diff --git a/.github/jsonnetfile.lock.json b/.github/jsonnetfile.lock.json index 3806723e517b7..395ab9190e3e1 100644 --- a/.github/jsonnetfile.lock.json +++ b/.github/jsonnetfile.lock.json @@ -8,8 +8,8 @@ "subdir": "workflows" } }, - "version": "d3fa90c124d13a4e0359a46c8708704e92ee8a50", - "sum": "TQ2X5sm7o+BCrytzSbJ7Th2YqNZ2ZPx9Wg62x5mEVJ0=" + "version": "634945b73e8eed4f5161ec08810178ddeca7505b", + "sum": "BOnwSjzyOjWwv9ikwJSAgPBNnYHTU2PEDJ0PWY6nr7I=" } ], "legacyImports": false diff --git a/.github/release-workflows.jsonnet b/.github/release-workflows.jsonnet index ff977b7f124e2..27d72bcf477fb 100644 --- a/.github/release-workflows.jsonnet +++ b/.github/release-workflows.jsonnet @@ -42,11 +42,13 @@ local imagePrefix = 'grafana'; skipValidation=false, useGitHubAppToken=true, versioningStrategy='always-bump-patch', - ), false, false + ) + { + name: 'Prepare Patch Release PR', + }, false, false ), 'minor-release-pr.yml': std.manifestYamlDoc( lokiRelease.releasePRWorkflow( - branches=['k[0-9]+', 'main'], + branches=['k[0-9]+'], buildImage=buildImage, checkTemplate=checkTemplate, golangCiLintVersion=golangCiLintVersion, @@ -59,7 +61,28 @@ local imagePrefix = 'grafana'; skipValidation=false, useGitHubAppToken=true, versioningStrategy='always-bump-minor', - ), false, false + ) + { + name: 'Prepare Minor Release PR from Weekly', + }, false, false + ), + 'three-zero-release.yml': std.manifestYamlDoc( + lokiRelease.releasePRWorkflow( + branches=['main'], + buildImage=buildImage, + checkTemplate=checkTemplate, + golangCiLintVersion=golangCiLintVersion, + imageBuildTimeoutMin=imageBuildTimeoutMin, + imageJobs=imageJobs, + imagePrefix=imagePrefix, + releaseLibRef=releaseLibRef, + releaseRepo='grafana/loki', + skipArm=false, + skipValidation=false, + useGitHubAppToken=true, + releaseAs='3.0.0-rc.1', + ) + { + name: 'Prepare Loki 3.0 release', + }, false, false ), 'release.yml': std.manifestYamlDoc( lokiRelease.releaseWorkflow( diff --git a/.github/vendor/github.com/grafana/loki-release/workflows/build.libsonnet b/.github/vendor/github.com/grafana/loki-release/workflows/build.libsonnet index 6a2749b62ee28..1857836d66655 100644 --- a/.github/vendor/github.com/grafana/loki-release/workflows/build.libsonnet +++ b/.github/vendor/github.com/grafana/loki-release/workflows/build.libsonnet @@ -119,19 +119,36 @@ local releaseLibStep = common.releaseLibStep; + step.withId('version') + step.withRun(||| npm install - npm exec -- release-please release-pr \ - --consider-all-branches \ - --dry-run \ - --dry-run-output release.json \ - --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ - --manifest-file .release-please-manifest.json \ - --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ - --release-type simple \ - --repo-url "${{ env.RELEASE_REPO }}" \ - --separate-pull-requests false \ - --target-branch "${{ steps.extract_branch.outputs.branch }}" \ - --token "${{ steps.github_app_token.outputs.token }}" \ - --versioning-strategy "${{ env.VERSIONING_STRATEGY }}" + + if [[ -z "${{ env.RELEASE_AS }}" ]]; then + npm exec -- release-please release-pr \ + --consider-all-branches \ + --dry-run \ + --dry-run-output release.json \ + --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --manifest-file .release-please-manifest.json \ + --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-type simple \ + --repo-url "${{ env.RELEASE_REPO }}" \ + --separate-pull-requests false \ + --target-branch "${{ steps.extract_branch.outputs.branch }}" \ + --token "${{ steps.github_app_token.outputs.token }}" \ + --versioning-strategy "${{ env.VERSIONING_STRATEGY }}" + else + npm exec -- release-please release-pr \ + --consider-all-branches \ + --dry-run \ + --dry-run-output release.json \ + --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --manifest-file .release-please-manifest.json \ + --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-type simple \ + --repo-url "${{ env.RELEASE_REPO }}" \ + --separate-pull-requests false \ + --target-branch "${{ steps.extract_branch.outputs.branch }}" \ + --token "${{ steps.github_app_token.outputs.token }}" \ + --release-as "${{ env.RELEASE_AS }}" + fi cat release.json @@ -214,5 +231,8 @@ local releaseLibStep = common.releaseLibStep; destination: '${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}', //TODO: make bucket configurable process_gcloudignore: false, }), - ]), + ]) + + job.withOutputs({ + version: '${{ needs.version.outputs.version }}', + }), } diff --git a/.github/vendor/github.com/grafana/loki-release/workflows/main.jsonnet b/.github/vendor/github.com/grafana/loki-release/workflows/main.jsonnet index fb401d3158829..d274d21a0571d 100644 --- a/.github/vendor/github.com/grafana/loki-release/workflows/main.jsonnet +++ b/.github/vendor/github.com/grafana/loki-release/workflows/main.jsonnet @@ -19,6 +19,7 @@ imageBuildTimeoutMin=25, imageJobs={}, imagePrefix='grafana', + releaseAs=null, releaseLibRef='main', releaseRepo='grafana/loki-release', skipArm=false, @@ -53,7 +54,9 @@ SKIP_VALIDATION: skipValidation, USE_GITHUB_APP_TOKEN: useGitHubAppToken, VERSIONING_STRATEGY: versioningStrategy, - }, + } + if releaseAs != null then { + RELEASE_AS: releaseAs, + } else {}, local validationSteps = ['check'], jobs: { check: {} + $.job.withUses(checkTemplate) diff --git a/.github/vendor/github.com/grafana/loki-release/workflows/release.libsonnet b/.github/vendor/github.com/grafana/loki-release/workflows/release.libsonnet index dc9978b2335d6..62f065b40288a 100644 --- a/.github/vendor/github.com/grafana/loki-release/workflows/release.libsonnet +++ b/.github/vendor/github.com/grafana/loki-release/workflows/release.libsonnet @@ -40,12 +40,12 @@ local pullRequestFooter = 'Merging this PR will release the [artifacts](https:// --manifest-file .release-please-manifest.json \ --pull-request-footer "%s" \ --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-as "${{ needs.dist.outputs.version }}" \ --release-type simple \ --repo-url "${{ env.RELEASE_REPO }}" \ --separate-pull-requests false \ --target-branch "${{ steps.extract_branch.outputs.branch }}" \ --token "${{ steps.github_app_token.outputs.token }}" \ - --versioning-strategy "${{ env.VERSIONING_STRATEGY }}" \ --dry-run ${{ fromJSON(env.DRY_RUN) }} ||| % pullRequestFooter), diff --git a/.github/workflows/minor-release-pr.yml b/.github/workflows/minor-release-pr.yml index 2704a770dbf91..19f859a6702e1 100644 --- a/.github/workflows/minor-release-pr.yml +++ b/.github/workflows/minor-release-pr.yml @@ -85,18 +85,20 @@ jobs: --manifest-file .release-please-manifest.json \ --pull-request-footer "Merging this PR will release the [artifacts](https://console.cloud.google.com/storage/browser/${BUILD_ARTIFACTS_BUCKET}/${SHA}) of ${SHA}" \ --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-as "${{ needs.dist.outputs.version }}" \ --release-type simple \ --repo-url "${{ env.RELEASE_REPO }}" \ --separate-pull-requests false \ --target-branch "${{ steps.extract_branch.outputs.branch }}" \ --token "${{ steps.github_app_token.outputs.token }}" \ - --versioning-strategy "${{ env.VERSIONING_STRATEGY }}" \ --dry-run ${{ fromJSON(env.DRY_RUN) }} working-directory: "lib" dist: needs: - "version" + outputs: + version: "${{ needs.version.outputs.version }}" runs-on: "ubuntu-latest" steps: - name: "pull code to release" @@ -768,19 +770,36 @@ jobs: name: "get release version" run: | npm install - npm exec -- release-please release-pr \ - --consider-all-branches \ - --dry-run \ - --dry-run-output release.json \ - --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ - --manifest-file .release-please-manifest.json \ - --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ - --release-type simple \ - --repo-url "${{ env.RELEASE_REPO }}" \ - --separate-pull-requests false \ - --target-branch "${{ steps.extract_branch.outputs.branch }}" \ - --token "${{ steps.github_app_token.outputs.token }}" \ - --versioning-strategy "${{ env.VERSIONING_STRATEGY }}" + + if [[ -z "${{ env.RELEASE_AS }}" ]]; then + npm exec -- release-please release-pr \ + --consider-all-branches \ + --dry-run \ + --dry-run-output release.json \ + --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --manifest-file .release-please-manifest.json \ + --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-type simple \ + --repo-url "${{ env.RELEASE_REPO }}" \ + --separate-pull-requests false \ + --target-branch "${{ steps.extract_branch.outputs.branch }}" \ + --token "${{ steps.github_app_token.outputs.token }}" \ + --versioning-strategy "${{ env.VERSIONING_STRATEGY }}" + else + npm exec -- release-please release-pr \ + --consider-all-branches \ + --dry-run \ + --dry-run-output release.json \ + --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --manifest-file .release-please-manifest.json \ + --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-type simple \ + --repo-url "${{ env.RELEASE_REPO }}" \ + --separate-pull-requests false \ + --target-branch "${{ steps.extract_branch.outputs.branch }}" \ + --token "${{ steps.github_app_token.outputs.token }}" \ + --release-as "${{ env.RELEASE_AS }}" + fi cat release.json @@ -799,12 +818,11 @@ jobs: echo "pr_created=true" >> $GITHUB_OUTPUT fi working-directory: "lib" -name: "create release PR" +name: "Prepare Minor Release PR from Weekly" "on": push: branches: - "k[0-9]+" - - "main" permissions: contents: "write" id-token: "write" diff --git a/.github/workflows/patch-release-pr.yml b/.github/workflows/patch-release-pr.yml index a388035df98cf..124ea87ef6958 100644 --- a/.github/workflows/patch-release-pr.yml +++ b/.github/workflows/patch-release-pr.yml @@ -85,18 +85,20 @@ jobs: --manifest-file .release-please-manifest.json \ --pull-request-footer "Merging this PR will release the [artifacts](https://console.cloud.google.com/storage/browser/${BUILD_ARTIFACTS_BUCKET}/${SHA}) of ${SHA}" \ --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-as "${{ needs.dist.outputs.version }}" \ --release-type simple \ --repo-url "${{ env.RELEASE_REPO }}" \ --separate-pull-requests false \ --target-branch "${{ steps.extract_branch.outputs.branch }}" \ --token "${{ steps.github_app_token.outputs.token }}" \ - --versioning-strategy "${{ env.VERSIONING_STRATEGY }}" \ --dry-run ${{ fromJSON(env.DRY_RUN) }} working-directory: "lib" dist: needs: - "version" + outputs: + version: "${{ needs.version.outputs.version }}" runs-on: "ubuntu-latest" steps: - name: "pull code to release" @@ -768,19 +770,36 @@ jobs: name: "get release version" run: | npm install - npm exec -- release-please release-pr \ - --consider-all-branches \ - --dry-run \ - --dry-run-output release.json \ - --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ - --manifest-file .release-please-manifest.json \ - --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ - --release-type simple \ - --repo-url "${{ env.RELEASE_REPO }}" \ - --separate-pull-requests false \ - --target-branch "${{ steps.extract_branch.outputs.branch }}" \ - --token "${{ steps.github_app_token.outputs.token }}" \ - --versioning-strategy "${{ env.VERSIONING_STRATEGY }}" + + if [[ -z "${{ env.RELEASE_AS }}" ]]; then + npm exec -- release-please release-pr \ + --consider-all-branches \ + --dry-run \ + --dry-run-output release.json \ + --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --manifest-file .release-please-manifest.json \ + --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-type simple \ + --repo-url "${{ env.RELEASE_REPO }}" \ + --separate-pull-requests false \ + --target-branch "${{ steps.extract_branch.outputs.branch }}" \ + --token "${{ steps.github_app_token.outputs.token }}" \ + --versioning-strategy "${{ env.VERSIONING_STRATEGY }}" + else + npm exec -- release-please release-pr \ + --consider-all-branches \ + --dry-run \ + --dry-run-output release.json \ + --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --manifest-file .release-please-manifest.json \ + --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-type simple \ + --repo-url "${{ env.RELEASE_REPO }}" \ + --separate-pull-requests false \ + --target-branch "${{ steps.extract_branch.outputs.branch }}" \ + --token "${{ steps.github_app_token.outputs.token }}" \ + --release-as "${{ env.RELEASE_AS }}" + fi cat release.json @@ -799,7 +818,7 @@ jobs: echo "pr_created=true" >> $GITHUB_OUTPUT fi working-directory: "lib" -name: "create release PR" +name: "Prepare Patch Release PR" "on": push: branches: diff --git a/.github/workflows/three-0-release.yml b/.github/workflows/three-0-release.yml new file mode 100644 index 0000000000000..e63fe7d8fa536 --- /dev/null +++ b/.github/workflows/three-0-release.yml @@ -0,0 +1,829 @@ +concurrency: + group: "create-release-pr-${{ github.sha }}" +env: + BUILD_ARTIFACTS_BUCKET: "loki-build-artifacts" + BUILD_TIMEOUT: 40 + CHANGELOG_PATH: "CHANGELOG.md" + DOCKER_USERNAME: "grafana" + DRY_RUN: false + IMAGE_PREFIX: "grafana" + RELEASE_AS: "3.0.0-rc.1" + RELEASE_LIB_REF: "main" + RELEASE_REPO: "grafana/loki" + SKIP_VALIDATION: false + USE_GITHUB_APP_TOKEN: true + VERSIONING_STRATEGY: "always-bump-patch" +jobs: + check: + uses: "grafana/loki-release/.github/workflows/check.yml@main" + with: + build_image: "grafana/loki-build-image:0.33.0" + golang_ci_lint_version: "v1.55.1" + release_lib_ref: "main" + skip_validation: false + use_github_app_token: true + create-release-pr: + needs: + - "dist" + - "fluent-bit" + - "fluentd" + - "logcli" + - "logstash" + - "loki" + - "loki-canary" + - "loki-canary-boringcrypto" + - "promtail" + - "querytee" + runs-on: "ubuntu-latest" + steps: + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - id: "extract_branch" + name: "extract branch name" + run: | + echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT + working-directory: "release" + - id: "get_github_app_token" + if: "${{ fromJSON(env.USE_GITHUB_APP_TOKEN) }}" + name: "get github app token" + uses: "actions/github-app-token@v1" + with: + app-id: "${{ secrets.APP_ID }}" + owner: "${{ github.repository_owner }}" + private-key: "${{ secrets.APP_PRIVATE_KEY }}" + - id: "github_app_token" + name: "set github token" + run: | + if [[ "${USE_GITHUB_APP_TOKEN}" == "true" ]]; then + echo "token=${{ steps.get_github_app_token.outputs.token }}" >> $GITHUB_OUTPUT + else + echo "token=${{ secrets.GH_TOKEN }}" >> $GITHUB_OUTPUT + fi + - env: + SHA: "${{ github.sha }}" + id: "release" + name: "release please" + run: | + npm install + npm exec -- release-please release-pr \ + --changelog-path "${CHANGELOG_PATH}" \ + --consider-all-branches \ + --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --label "backport main,autorelease: pending,product-approved" \ + --manifest-file .release-please-manifest.json \ + --pull-request-footer "Merging this PR will release the [artifacts](https://console.cloud.google.com/storage/browser/${BUILD_ARTIFACTS_BUCKET}/${SHA}) of ${SHA}" \ + --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-type simple \ + --repo-url "${{ env.RELEASE_REPO }}" \ + --separate-pull-requests false \ + --target-branch "${{ steps.extract_branch.outputs.branch }}" \ + --token "${{ steps.github_app_token.outputs.token }}" \ + --versioning-strategy "${{ env.VERSIONING_STRATEGY }}" \ + --dry-run ${{ fromJSON(env.DRY_RUN) }} + + working-directory: "lib" + dist: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up Cloud SDK" + uses: "google-github-actions/setup-gcloud@v2" + with: + version: ">= 452.0.0" + - id: "get-secrets" + name: "get nfpm signing keys" + uses: "grafana/shared-workflows/actions/get-vault-secrets@main" + with: + common_secrets: | + NFPM_SIGNING_KEY=packages-gpg:private-key + NFPM_PASSPHRASE=packages-gpg:passphrase + - env: + BUILD_IN_CONTAINER: false + DRONE_TAG: "${{ needs.version.outputs.version }}" + IMAGE_TAG: "${{ needs.version.outputs.version }}" + NFPM_SIGNING_KEY_FILE: "nfpm-private-key.key" + SKIP_ARM: false + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "build artifacts" + run: | + cat < $NFPM_SIGNING_KEY_FILE + make dist packages + EOF + working-directory: "release" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}" + path: "release/dist" + process_gcloudignore: false + fluent-bit: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/clients/cmd/fluent-bit/Dockerfile" + outputs: "type=docker,dest=release/images/fluent-bit-plugin-loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/fluent-bit-plugin-loki:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/fluent-bit-plugin-loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + fluentd: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/clients/cmd/fluentd/Dockerfile" + outputs: "type=docker,dest=release/images/fluent-plugin-loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/fluent-plugin-loki:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/fluent-plugin-loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + logcli: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/cmd/logcli/Dockerfile" + outputs: "type=docker,dest=release/images/logcli-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/logcli:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/logcli-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + - "linux/arm64" + - "linux/arm" + logstash: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/clients/cmd/logstash/Dockerfile" + outputs: "type=docker,dest=release/images/logstash-output-loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/logstash-output-loki:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/logstash-output-loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + loki: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/cmd/loki/Dockerfile" + outputs: "type=docker,dest=release/images/loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/loki:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + - "linux/arm64" + - "linux/arm" + loki-canary: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/cmd/loki-canary/Dockerfile" + outputs: "type=docker,dest=release/images/loki-canary-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/loki-canary:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/loki-canary-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + - "linux/arm64" + - "linux/arm" + loki-canary-boringcrypto: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/cmd/loki-canary-boringcrypto/Dockerfile" + outputs: "type=docker,dest=release/images/loki-canary-boringcrypto-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/loki-canary-boringcrypto:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/loki-canary-boringcrypto-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + - "linux/arm64" + - "linux/arm" + promtail: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/clients/cmd/promtail/Dockerfile" + outputs: "type=docker,dest=release/images/promtail-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/promtail:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/promtail-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + - "linux/arm64" + - "linux/arm" + querytee: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/cmd/querytee/Dockerfile" + outputs: "type=docker,dest=release/images/loki-query-tee-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/loki-query-tee:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/loki-query-tee-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + version: + needs: + - "check" + outputs: + pr_created: "${{ steps.version.outputs.pr_created }}" + version: "${{ steps.version.outputs.version }}" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - id: "extract_branch" + name: "extract branch name" + run: | + echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT + working-directory: "release" + - id: "get_github_app_token" + if: "${{ fromJSON(env.USE_GITHUB_APP_TOKEN) }}" + name: "get github app token" + uses: "actions/github-app-token@v1" + with: + app-id: "${{ secrets.APP_ID }}" + owner: "${{ github.repository_owner }}" + private-key: "${{ secrets.APP_PRIVATE_KEY }}" + - id: "github_app_token" + name: "set github token" + run: | + if [[ "${USE_GITHUB_APP_TOKEN}" == "true" ]]; then + echo "token=${{ steps.get_github_app_token.outputs.token }}" >> $GITHUB_OUTPUT + else + echo "token=${{ secrets.GH_TOKEN }}" >> $GITHUB_OUTPUT + fi + - id: "version" + name: "get release version" + run: | + npm install + + if [[ -z "${{ env.RELEASE_AS }}" ]]; then + npm exec -- release-please release-pr \ + --consider-all-branches \ + --dry-run \ + --dry-run-output release.json \ + --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --manifest-file .release-please-manifest.json \ + --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-type simple \ + --repo-url "${{ env.RELEASE_REPO }}" \ + --separate-pull-requests false \ + --target-branch "${{ steps.extract_branch.outputs.branch }}" \ + --token "${{ steps.github_app_token.outputs.token }}" \ + --versioning-strategy "${{ env.VERSIONING_STRATEGY }}" + else + npm exec -- release-please release-pr \ + --consider-all-branches \ + --dry-run \ + --dry-run-output release.json \ + --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --manifest-file .release-please-manifest.json \ + --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-type simple \ + --repo-url "${{ env.RELEASE_REPO }}" \ + --separate-pull-requests false \ + --target-branch "${{ steps.extract_branch.outputs.branch }}" \ + --token "${{ steps.github_app_token.outputs.token }}" \ + --release-as "${{ env.RELEASE_AS }}" + fi + + cat release.json + + if [[ `jq length release.json` -gt 1 ]]; then + echo 'release-please would create more than 1 PR, so cannot determine correct version' + echo "pr_created=false" >> $GITHUB_OUTPUT + exit 1 + fi + + if [[ `jq length release.json` -eq 0 ]]; then + echo "pr_created=false" >> $GITHUB_OUTPUT + else + version="$(npm run --silent get-version)" + echo "Parsed version: ${version}" + echo "version=${version}" >> $GITHUB_OUTPUT + echo "pr_created=true" >> $GITHUB_OUTPUT + fi + working-directory: "lib" +name: "Prepare Loki 3.0 release" +"on": + push: + branches: + - "main" + - "add-major-release-workflow" +permissions: + contents: "write" + id-token: "write" + pull-requests: "write" diff --git a/.github/workflows/three-zero-release.yml b/.github/workflows/three-zero-release.yml new file mode 100644 index 0000000000000..70b28fbc92bab --- /dev/null +++ b/.github/workflows/three-zero-release.yml @@ -0,0 +1,830 @@ +concurrency: + group: "create-release-pr-${{ github.sha }}" +env: + BUILD_ARTIFACTS_BUCKET: "loki-build-artifacts" + BUILD_TIMEOUT: 40 + CHANGELOG_PATH: "CHANGELOG.md" + DOCKER_USERNAME: "grafana" + DRY_RUN: false + IMAGE_PREFIX: "grafana" + RELEASE_AS: "3.0.0-rc.1" + RELEASE_LIB_REF: "main" + RELEASE_REPO: "grafana/loki" + SKIP_VALIDATION: false + USE_GITHUB_APP_TOKEN: true + VERSIONING_STRATEGY: "always-bump-patch" +jobs: + check: + uses: "grafana/loki-release/.github/workflows/check.yml@main" + with: + build_image: "grafana/loki-build-image:0.33.0" + golang_ci_lint_version: "v1.55.1" + release_lib_ref: "main" + skip_validation: false + use_github_app_token: true + create-release-pr: + needs: + - "dist" + - "fluent-bit" + - "fluentd" + - "logcli" + - "logstash" + - "loki" + - "loki-canary" + - "loki-canary-boringcrypto" + - "promtail" + - "querytee" + runs-on: "ubuntu-latest" + steps: + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - id: "extract_branch" + name: "extract branch name" + run: | + echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT + working-directory: "release" + - id: "get_github_app_token" + if: "${{ fromJSON(env.USE_GITHUB_APP_TOKEN) }}" + name: "get github app token" + uses: "actions/github-app-token@v1" + with: + app-id: "${{ secrets.APP_ID }}" + owner: "${{ github.repository_owner }}" + private-key: "${{ secrets.APP_PRIVATE_KEY }}" + - id: "github_app_token" + name: "set github token" + run: | + if [[ "${USE_GITHUB_APP_TOKEN}" == "true" ]]; then + echo "token=${{ steps.get_github_app_token.outputs.token }}" >> $GITHUB_OUTPUT + else + echo "token=${{ secrets.GH_TOKEN }}" >> $GITHUB_OUTPUT + fi + - env: + SHA: "${{ github.sha }}" + id: "release" + name: "release please" + run: | + npm install + npm exec -- release-please release-pr \ + --changelog-path "${CHANGELOG_PATH}" \ + --consider-all-branches \ + --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --label "backport main,autorelease: pending,product-approved" \ + --manifest-file .release-please-manifest.json \ + --pull-request-footer "Merging this PR will release the [artifacts](https://console.cloud.google.com/storage/browser/${BUILD_ARTIFACTS_BUCKET}/${SHA}) of ${SHA}" \ + --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-as "${{ needs.dist.outputs.version }}" \ + --release-type simple \ + --repo-url "${{ env.RELEASE_REPO }}" \ + --separate-pull-requests false \ + --target-branch "${{ steps.extract_branch.outputs.branch }}" \ + --token "${{ steps.github_app_token.outputs.token }}" \ + --dry-run ${{ fromJSON(env.DRY_RUN) }} + + working-directory: "lib" + dist: + needs: + - "version" + outputs: + version: "${{ needs.version.outputs.version }}" + runs-on: "ubuntu-latest" + steps: + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up Cloud SDK" + uses: "google-github-actions/setup-gcloud@v2" + with: + version: ">= 452.0.0" + - id: "get-secrets" + name: "get nfpm signing keys" + uses: "grafana/shared-workflows/actions/get-vault-secrets@main" + with: + common_secrets: | + NFPM_SIGNING_KEY=packages-gpg:private-key + NFPM_PASSPHRASE=packages-gpg:passphrase + - env: + BUILD_IN_CONTAINER: false + DRONE_TAG: "${{ needs.version.outputs.version }}" + IMAGE_TAG: "${{ needs.version.outputs.version }}" + NFPM_SIGNING_KEY_FILE: "nfpm-private-key.key" + SKIP_ARM: false + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "build artifacts" + run: | + cat < $NFPM_SIGNING_KEY_FILE + make dist packages + EOF + working-directory: "release" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}" + path: "release/dist" + process_gcloudignore: false + fluent-bit: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/clients/cmd/fluent-bit/Dockerfile" + outputs: "type=docker,dest=release/images/fluent-bit-plugin-loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/fluent-bit-plugin-loki:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/fluent-bit-plugin-loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + fluentd: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/clients/cmd/fluentd/Dockerfile" + outputs: "type=docker,dest=release/images/fluent-plugin-loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/fluent-plugin-loki:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/fluent-plugin-loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + logcli: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/cmd/logcli/Dockerfile" + outputs: "type=docker,dest=release/images/logcli-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/logcli:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/logcli-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + - "linux/arm64" + - "linux/arm" + logstash: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/clients/cmd/logstash/Dockerfile" + outputs: "type=docker,dest=release/images/logstash-output-loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/logstash-output-loki:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/logstash-output-loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + loki: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/cmd/loki/Dockerfile" + outputs: "type=docker,dest=release/images/loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/loki:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + - "linux/arm64" + - "linux/arm" + loki-canary: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/cmd/loki-canary/Dockerfile" + outputs: "type=docker,dest=release/images/loki-canary-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/loki-canary:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/loki-canary-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + - "linux/arm64" + - "linux/arm" + loki-canary-boringcrypto: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/cmd/loki-canary-boringcrypto/Dockerfile" + outputs: "type=docker,dest=release/images/loki-canary-boringcrypto-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/loki-canary-boringcrypto:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/loki-canary-boringcrypto-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + - "linux/arm64" + - "linux/arm" + promtail: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/clients/cmd/promtail/Dockerfile" + outputs: "type=docker,dest=release/images/promtail-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/promtail:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/promtail-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + - "linux/arm64" + - "linux/arm" + querytee: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/cmd/querytee/Dockerfile" + outputs: "type=docker,dest=release/images/loki-query-tee-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/loki-query-tee:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/loki-query-tee-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + version: + needs: + - "check" + outputs: + pr_created: "${{ steps.version.outputs.pr_created }}" + version: "${{ steps.version.outputs.version }}" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - id: "extract_branch" + name: "extract branch name" + run: | + echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT + working-directory: "release" + - id: "get_github_app_token" + if: "${{ fromJSON(env.USE_GITHUB_APP_TOKEN) }}" + name: "get github app token" + uses: "actions/github-app-token@v1" + with: + app-id: "${{ secrets.APP_ID }}" + owner: "${{ github.repository_owner }}" + private-key: "${{ secrets.APP_PRIVATE_KEY }}" + - id: "github_app_token" + name: "set github token" + run: | + if [[ "${USE_GITHUB_APP_TOKEN}" == "true" ]]; then + echo "token=${{ steps.get_github_app_token.outputs.token }}" >> $GITHUB_OUTPUT + else + echo "token=${{ secrets.GH_TOKEN }}" >> $GITHUB_OUTPUT + fi + - id: "version" + name: "get release version" + run: | + npm install + + if [[ -z "${{ env.RELEASE_AS }}" ]]; then + npm exec -- release-please release-pr \ + --consider-all-branches \ + --dry-run \ + --dry-run-output release.json \ + --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --manifest-file .release-please-manifest.json \ + --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-type simple \ + --repo-url "${{ env.RELEASE_REPO }}" \ + --separate-pull-requests false \ + --target-branch "${{ steps.extract_branch.outputs.branch }}" \ + --token "${{ steps.github_app_token.outputs.token }}" \ + --versioning-strategy "${{ env.VERSIONING_STRATEGY }}" + else + npm exec -- release-please release-pr \ + --consider-all-branches \ + --dry-run \ + --dry-run-output release.json \ + --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --manifest-file .release-please-manifest.json \ + --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-type simple \ + --repo-url "${{ env.RELEASE_REPO }}" \ + --separate-pull-requests false \ + --target-branch "${{ steps.extract_branch.outputs.branch }}" \ + --token "${{ steps.github_app_token.outputs.token }}" \ + --release-as "${{ env.RELEASE_AS }}" + fi + + cat release.json + + if [[ `jq length release.json` -gt 1 ]]; then + echo 'release-please would create more than 1 PR, so cannot determine correct version' + echo "pr_created=false" >> $GITHUB_OUTPUT + exit 1 + fi + + if [[ `jq length release.json` -eq 0 ]]; then + echo "pr_created=false" >> $GITHUB_OUTPUT + else + version="$(npm run --silent get-version)" + echo "Parsed version: ${version}" + echo "version=${version}" >> $GITHUB_OUTPUT + echo "pr_created=true" >> $GITHUB_OUTPUT + fi + working-directory: "lib" +name: "Prepare Loki 3.0 release" +"on": + push: + branches: + - "main" +permissions: + contents: "write" + id-token: "write" + pull-requests: "write" diff --git a/CHANGELOG.md b/CHANGELOG.md index 61dd741057c9e..289045cf2f6bc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -61,6 +61,9 @@ * [11817](https://github.com/grafana/loki/pull/11817) **ashwanthgoli** Ruler: Add support for filtering results of `/prometheus/api/v1/rules` endpoint by rule_name, rule_group, file and type. * [11897](https://github.com/grafana/loki/pull/11897) **ashwanthgoli** Metadata: Introduces a separate split interval of `split_recent_metadata_queries_by_interval` for `recent_metadata_query_window` to help with caching recent metadata query results. * [11970](https://github.com/grafana/loki/pull/11897) **masslessparticle** Ksonnet: Introduces memory limits to the compactor configuration to avoid unbounded memory usage. +* [12318](https://github.com/grafana/loki/pull/12318) **DylanGuedes** Memcached: Add mTLS support. +* [12392](https://github.com/grafana/loki/pull/12392) **sandeepsukhani** Detect name of service emitting logs and add it as a label. +* [12398](https://github.com/grafana/loki/pull/12398) **kolesnikovae** LogQL: Introduces pattern match filter operators. ##### Fixes * [11074](https://github.com/grafana/loki/pull/11074) **hainenber** Fix panic in lambda-promtail due to mishandling of empty DROP_LABELS env var. diff --git a/clients/cmd/docker-driver/config.go b/clients/cmd/docker-driver/config.go index 95dd07a6d8e81..d53117ca4872b 100644 --- a/clients/cmd/docker-driver/config.go +++ b/clients/cmd/docker-driver/config.go @@ -19,11 +19,11 @@ import ( "github.com/prometheus/prometheus/model/relabel" "gopkg.in/yaml.v2" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/client" - "github.com/grafana/loki/clients/pkg/promtail/targets/file" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/file" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) const ( diff --git a/clients/cmd/docker-driver/config_test.go b/clients/cmd/docker-driver/config_test.go index a3920778b622b..f83c560e39391 100644 --- a/clients/cmd/docker-driver/config_test.go +++ b/clients/cmd/docker-driver/config_test.go @@ -11,9 +11,9 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var jobRename = ` diff --git a/clients/cmd/docker-driver/loki.go b/clients/cmd/docker-driver/loki.go index cc15e71bda2ee..77bc4e5e439b7 100644 --- a/clients/cmd/docker-driver/loki.go +++ b/clients/cmd/docker-driver/loki.go @@ -10,11 +10,11 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) var jobName = "docker" diff --git a/clients/cmd/docker-driver/loki_test.go b/clients/cmd/docker-driver/loki_test.go index 0edc7b0c76931..4e61f37cd05c0 100644 --- a/clients/cmd/docker-driver/loki_test.go +++ b/clients/cmd/docker-driver/loki_test.go @@ -7,7 +7,7 @@ import ( "github.com/docker/docker/daemon/logger" "github.com/stretchr/testify/require" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func Test_loki_LogWhenClosed(t *testing.T) { diff --git a/clients/cmd/docker-driver/main.go b/clients/cmd/docker-driver/main.go index 5aba041f6b5fe..06d90b81bda56 100644 --- a/clients/cmd/docker-driver/main.go +++ b/clients/cmd/docker-driver/main.go @@ -12,8 +12,8 @@ import ( dslog "github.com/grafana/dskit/log" "github.com/prometheus/common/version" - _ "github.com/grafana/loki/pkg/util/build" - util_log "github.com/grafana/loki/pkg/util/log" + _ "github.com/grafana/loki/v3/pkg/util/build" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const socketAddress = "/run/docker/plugins/loki.sock" diff --git a/clients/cmd/fluent-bit/buffer.go b/clients/cmd/fluent-bit/buffer.go index a168ccfc142c5..28e9529abff48 100644 --- a/clients/cmd/fluent-bit/buffer.go +++ b/clients/cmd/fluent-bit/buffer.go @@ -5,7 +5,7 @@ import ( "github.com/go-kit/log" - "github.com/grafana/loki/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" ) type bufferConfig struct { diff --git a/clients/cmd/fluent-bit/client.go b/clients/cmd/fluent-bit/client.go index 11c2fa1d0386b..828d013d85ae1 100644 --- a/clients/cmd/fluent-bit/client.go +++ b/clients/cmd/fluent-bit/client.go @@ -3,7 +3,7 @@ package main import ( "github.com/go-kit/log" - "github.com/grafana/loki/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" ) // NewClient creates a new client based on the fluentbit configuration. diff --git a/clients/cmd/fluent-bit/config.go b/clients/cmd/fluent-bit/config.go index 469e18d495d74..84838d03f20f8 100644 --- a/clients/cmd/fluent-bit/config.go +++ b/clients/cmd/fluent-bit/config.go @@ -12,10 +12,10 @@ import ( "github.com/grafana/dskit/log" "github.com/prometheus/common/model" - "github.com/grafana/loki/clients/pkg/logentry/logql" - "github.com/grafana/loki/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/logentry/logql" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" - lokiflag "github.com/grafana/loki/pkg/util/flagext" + lokiflag "github.com/grafana/loki/v3/pkg/util/flagext" ) var defaultClientCfg = client.Config{} diff --git a/clients/cmd/fluent-bit/config_test.go b/clients/cmd/fluent-bit/config_test.go index 0d5ec6d592b0e..f52ea18bc96db 100644 --- a/clients/cmd/fluent-bit/config_test.go +++ b/clients/cmd/fluent-bit/config_test.go @@ -12,9 +12,9 @@ import ( "github.com/grafana/dskit/log" "github.com/prometheus/common/model" - "github.com/grafana/loki/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" - lokiflag "github.com/grafana/loki/pkg/util/flagext" + lokiflag "github.com/grafana/loki/v3/pkg/util/flagext" ) type fakeConfig map[string]string diff --git a/clients/cmd/fluent-bit/dque.go b/clients/cmd/fluent-bit/dque.go index f7091de893f59..6e5746033254b 100644 --- a/clients/cmd/fluent-bit/dque.go +++ b/clients/cmd/fluent-bit/dque.go @@ -12,10 +12,10 @@ import ( "github.com/joncrlsn/dque" "github.com/prometheus/common/model" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) type dqueConfig struct { diff --git a/clients/cmd/fluent-bit/loki.go b/clients/cmd/fluent-bit/loki.go index ea3de0261f407..6749af1ebf881 100644 --- a/clients/cmd/fluent-bit/loki.go +++ b/clients/cmd/fluent-bit/loki.go @@ -17,10 +17,10 @@ import ( jsoniter "github.com/json-iterator/go" "github.com/prometheus/common/model" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) var ( diff --git a/clients/cmd/fluent-bit/loki_test.go b/clients/cmd/fluent-bit/loki_test.go index 1bfd21d22ce02..477f6abe1757c 100644 --- a/clients/cmd/fluent-bit/loki_test.go +++ b/clients/cmd/fluent-bit/loki_test.go @@ -11,10 +11,10 @@ import ( jsoniter "github.com/json-iterator/go" "github.com/prometheus/common/model" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) var now = time.Now() diff --git a/clients/cmd/fluent-bit/out_grafana_loki.go b/clients/cmd/fluent-bit/out_grafana_loki.go index d396fddfc8da2..70a58e65b9350 100644 --- a/clients/cmd/fluent-bit/out_grafana_loki.go +++ b/clients/cmd/fluent-bit/out_grafana_loki.go @@ -13,12 +13,12 @@ import ( dslog "github.com/grafana/dskit/log" "github.com/prometheus/common/version" - _ "github.com/grafana/loki/pkg/util/build" + _ "github.com/grafana/loki/v3/pkg/util/build" ) import ( "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" ) var ( diff --git a/clients/cmd/promtail/main.go b/clients/cmd/promtail/main.go index 4492938246b5b..7e00e7ff35db3 100644 --- a/clients/cmd/promtail/main.go +++ b/clients/cmd/promtail/main.go @@ -20,16 +20,16 @@ import ( collectors_version "github.com/prometheus/client_golang/prometheus/collectors/version" "github.com/prometheus/common/version" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail" - "github.com/grafana/loki/clients/pkg/promtail/client" - promtail_config "github.com/grafana/loki/clients/pkg/promtail/config" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" + promtail_config "github.com/grafana/loki/v3/clients/pkg/promtail/config" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/cfg" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/cfg" - _ "github.com/grafana/loki/pkg/util/build" - util_log "github.com/grafana/loki/pkg/util/log" + _ "github.com/grafana/loki/v3/pkg/util/build" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func init() { diff --git a/clients/pkg/logentry/logql/parser.go b/clients/pkg/logentry/logql/parser.go index d567f6fce4c8b..924ec1b7bdeab 100644 --- a/clients/pkg/logentry/logql/parser.go +++ b/clients/pkg/logentry/logql/parser.go @@ -8,7 +8,7 @@ import ( "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) func init() { diff --git a/clients/pkg/logentry/metric/metricvec.go b/clients/pkg/logentry/metric/metricvec.go index 07f73c20873d3..f004db760f8f6 100644 --- a/clients/pkg/logentry/metric/metricvec.go +++ b/clients/pkg/logentry/metric/metricvec.go @@ -5,7 +5,7 @@ import ( "sync" "time" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" diff --git a/clients/pkg/logentry/stages/decolorize.go b/clients/pkg/logentry/stages/decolorize.go index bac7274b6bad3..a86e6cdeafb2e 100644 --- a/clients/pkg/logentry/stages/decolorize.go +++ b/clients/pkg/logentry/stages/decolorize.go @@ -1,7 +1,7 @@ package stages import ( - "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/log" ) type decolorizeStage struct{} diff --git a/clients/pkg/logentry/stages/decolorize_test.go b/clients/pkg/logentry/stages/decolorize_test.go index 5e7cead0a5275..029cd74c1c1e3 100644 --- a/clients/pkg/logentry/stages/decolorize_test.go +++ b/clients/pkg/logentry/stages/decolorize_test.go @@ -7,7 +7,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/stretchr/testify/assert" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var testDecolorizePipeline = ` diff --git a/clients/pkg/logentry/stages/drop.go b/clients/pkg/logentry/stages/drop.go index 19a2e6c378075..462d6c34f6350 100644 --- a/clients/pkg/logentry/stages/drop.go +++ b/clients/pkg/logentry/stages/drop.go @@ -13,7 +13,7 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/util/flagext" + "github.com/grafana/loki/v3/pkg/util/flagext" ) const ( diff --git a/clients/pkg/logentry/stages/drop_test.go b/clients/pkg/logentry/stages/drop_test.go index a7e5ffcb5665f..220bb68314df3 100644 --- a/clients/pkg/logentry/stages/drop_test.go +++ b/clients/pkg/logentry/stages/drop_test.go @@ -12,7 +12,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // Not all these are tested but are here to make sure the different types marshal without error diff --git a/clients/pkg/logentry/stages/eventlogmessage_test.go b/clients/pkg/logentry/stages/eventlogmessage_test.go index 4729d5a08f0e6..ed4bedccfc70c 100644 --- a/clients/pkg/logentry/stages/eventlogmessage_test.go +++ b/clients/pkg/logentry/stages/eventlogmessage_test.go @@ -11,7 +11,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/stretchr/testify/assert" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var testEvtLogMsgYamlDefaults = ` diff --git a/clients/pkg/logentry/stages/extensions.go b/clients/pkg/logentry/stages/extensions.go index f25ffe02e8403..2e49d6bd224b3 100644 --- a/clients/pkg/logentry/stages/extensions.go +++ b/clients/pkg/logentry/stages/extensions.go @@ -10,7 +10,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/util/flagext" + "github.com/grafana/loki/v3/pkg/util/flagext" ) const ( @@ -128,7 +128,7 @@ type CriConfig struct { MaxPartialLineSizeTruncate bool `mapstructure:"max_partial_line_size_truncate"` } -// validateDropConfig validates the DropConfig for the dropStage +// validateCriConfig validates the CriConfig for the cri stage func validateCriConfig(cfg *CriConfig) error { if cfg.MaxPartialLines == 0 { cfg.MaxPartialLines = MaxPartialLinesSize diff --git a/clients/pkg/logentry/stages/extensions_test.go b/clients/pkg/logentry/stages/extensions_test.go index 9e2a3f62a56f7..0d03acd3fe3dd 100644 --- a/clients/pkg/logentry/stages/extensions_test.go +++ b/clients/pkg/logentry/stages/extensions_test.go @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var ( diff --git a/clients/pkg/logentry/stages/json_test.go b/clients/pkg/logentry/stages/json_test.go index 31a0c0219e5af..1764387253fb1 100644 --- a/clients/pkg/logentry/stages/json_test.go +++ b/clients/pkg/logentry/stages/json_test.go @@ -10,7 +10,7 @@ import ( "github.com/stretchr/testify/assert" "gopkg.in/yaml.v2" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var testJSONYamlSingleStageWithoutSource = ` diff --git a/clients/pkg/logentry/stages/labelallow_test.go b/clients/pkg/logentry/stages/labelallow_test.go index a5cbcd8e3ce6b..ebcf451487ef8 100644 --- a/clients/pkg/logentry/stages/labelallow_test.go +++ b/clients/pkg/logentry/stages/labelallow_test.go @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func Test_addLabelStage_Process(t *testing.T) { diff --git a/clients/pkg/logentry/stages/labeldrop_test.go b/clients/pkg/logentry/stages/labeldrop_test.go index 215a7888f8c31..70912c7ed1c84 100644 --- a/clients/pkg/logentry/stages/labeldrop_test.go +++ b/clients/pkg/logentry/stages/labeldrop_test.go @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func Test_dropLabelStage_Process(t *testing.T) { diff --git a/clients/pkg/logentry/stages/labels_test.go b/clients/pkg/logentry/stages/labels_test.go index 175359606a2f4..27747d8032edd 100644 --- a/clients/pkg/logentry/stages/labels_test.go +++ b/clients/pkg/logentry/stages/labels_test.go @@ -13,7 +13,7 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/assert" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var testLabelsYaml = ` diff --git a/clients/pkg/logentry/stages/limit.go b/clients/pkg/logentry/stages/limit.go index d5489221e6ac0..49d32cbf04029 100644 --- a/clients/pkg/logentry/stages/limit.go +++ b/clients/pkg/logentry/stages/limit.go @@ -8,7 +8,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" "github.com/go-kit/log" "github.com/mitchellh/mapstructure" diff --git a/clients/pkg/logentry/stages/limit_test.go b/clients/pkg/logentry/stages/limit_test.go index b439db4908b2f..0d3519e8c9b4b 100644 --- a/clients/pkg/logentry/stages/limit_test.go +++ b/clients/pkg/logentry/stages/limit_test.go @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // Not all these are tested but are here to make sure the different types marshal without error @@ -60,7 +60,7 @@ var testNonAppLogLine = ` var plName = "testPipeline" -// TestLimitPipeline is used to verify we properly parse the yaml config and create a working pipeline +// TestLimitWaitPipeline is used to verify we properly parse the yaml config and create a working pipeline func TestLimitWaitPipeline(t *testing.T) { registry := prometheus.NewRegistry() pl, err := NewPipeline(util_log.Logger, loadConfig(testLimitWaitYaml), &plName, registry) @@ -78,7 +78,7 @@ func TestLimitWaitPipeline(t *testing.T) { assert.Equal(t, out[0].Line, testMatchLogLineApp1) } -// TestLimitPipeline is used to verify we properly parse the yaml config and create a working pipeline +// TestLimitDropPipeline is used to verify we properly parse the yaml config and create a working pipeline func TestLimitDropPipeline(t *testing.T) { registry := prometheus.NewRegistry() pl, err := NewPipeline(util_log.Logger, loadConfig(testLimitDropYaml), &plName, registry) diff --git a/clients/pkg/logentry/stages/logfmt_test.go b/clients/pkg/logentry/stages/logfmt_test.go index 8258eeece501b..ed60d8770d014 100644 --- a/clients/pkg/logentry/stages/logfmt_test.go +++ b/clients/pkg/logentry/stages/logfmt_test.go @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/assert" "gopkg.in/yaml.v2" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var testLogfmtYamlSingleStageWithoutSource = ` diff --git a/clients/pkg/logentry/stages/match.go b/clients/pkg/logentry/stages/match.go index 3b4addbb0de12..4007e45da4ecb 100644 --- a/clients/pkg/logentry/stages/match.go +++ b/clients/pkg/logentry/stages/match.go @@ -8,7 +8,7 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/clients/pkg/logentry/logql" + "github.com/grafana/loki/v3/clients/pkg/logentry/logql" ) const ( diff --git a/clients/pkg/logentry/stages/match_test.go b/clients/pkg/logentry/stages/match_test.go index 558407320c57d..05d65f0bcaff5 100644 --- a/clients/pkg/logentry/stages/match_test.go +++ b/clients/pkg/logentry/stages/match_test.go @@ -8,7 +8,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/stretchr/testify/assert" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var testMatchYaml = ` diff --git a/clients/pkg/logentry/stages/metrics.go b/clients/pkg/logentry/stages/metrics.go index 14386e3b43a40..827f0cf313a47 100644 --- a/clients/pkg/logentry/stages/metrics.go +++ b/clients/pkg/logentry/stages/metrics.go @@ -15,7 +15,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" - "github.com/grafana/loki/clients/pkg/logentry/metric" + "github.com/grafana/loki/v3/clients/pkg/logentry/metric" ) const ( diff --git a/clients/pkg/logentry/stages/metrics_test.go b/clients/pkg/logentry/stages/metrics_test.go index 6a14e6c80c1ee..f46ea6839919f 100644 --- a/clients/pkg/logentry/stages/metrics_test.go +++ b/clients/pkg/logentry/stages/metrics_test.go @@ -14,9 +14,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/logentry/metric" + "github.com/grafana/loki/v3/clients/pkg/logentry/metric" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var testMetricYaml = ` diff --git a/clients/pkg/logentry/stages/multiline.go b/clients/pkg/logentry/stages/multiline.go index 199ff438a9390..2f94a2e1822f3 100644 --- a/clients/pkg/logentry/stages/multiline.go +++ b/clients/pkg/logentry/stages/multiline.go @@ -13,9 +13,9 @@ import ( "github.com/pkg/errors" "github.com/prometheus/common/model" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) const ( diff --git a/clients/pkg/logentry/stages/multiline_test.go b/clients/pkg/logentry/stages/multiline_test.go index 33b71c8f5f023..b175f89845dea 100644 --- a/clients/pkg/logentry/stages/multiline_test.go +++ b/clients/pkg/logentry/stages/multiline_test.go @@ -10,10 +10,10 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/logproto" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func Test_multilineStage_Process(t *testing.T) { diff --git a/clients/pkg/logentry/stages/output_test.go b/clients/pkg/logentry/stages/output_test.go index a7b02714faf74..dc6aac54f0b93 100644 --- a/clients/pkg/logentry/stages/output_test.go +++ b/clients/pkg/logentry/stages/output_test.go @@ -11,7 +11,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/stretchr/testify/assert" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var testOutputYaml = ` diff --git a/clients/pkg/logentry/stages/pack.go b/clients/pkg/logentry/stages/pack.go index 737fa8d36b796..881650d8c6aa1 100644 --- a/clients/pkg/logentry/stages/pack.go +++ b/clients/pkg/logentry/stages/pack.go @@ -15,7 +15,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) var ( diff --git a/clients/pkg/logentry/stages/pack_test.go b/clients/pkg/logentry/stages/pack_test.go index b767f90a76063..44935051a9523 100644 --- a/clients/pkg/logentry/stages/pack_test.go +++ b/clients/pkg/logentry/stages/pack_test.go @@ -12,11 +12,11 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // Not all these are tested but are here to make sure the different types marshal without error diff --git a/clients/pkg/logentry/stages/pipeline.go b/clients/pkg/logentry/stages/pipeline.go index c20a7784c511c..1c4d2ba8e5ab4 100644 --- a/clients/pkg/logentry/stages/pipeline.go +++ b/clients/pkg/logentry/stages/pipeline.go @@ -9,7 +9,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "golang.org/x/time/rate" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" ) // PipelineStages contains configuration for each stage within a pipeline diff --git a/clients/pkg/logentry/stages/pipeline_test.go b/clients/pkg/logentry/stages/pipeline_test.go index 51fe66e30c36b..2649de6a83441 100644 --- a/clients/pkg/logentry/stages/pipeline_test.go +++ b/clients/pkg/logentry/stages/pipeline_test.go @@ -14,11 +14,11 @@ import ( "github.com/stretchr/testify/require" "gopkg.in/yaml.v2" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/pkg/logproto" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var ( diff --git a/clients/pkg/logentry/stages/regex_test.go b/clients/pkg/logentry/stages/regex_test.go index dc3402e6e7a45..f7fa5390a1959 100644 --- a/clients/pkg/logentry/stages/regex_test.go +++ b/clients/pkg/logentry/stages/regex_test.go @@ -14,7 +14,7 @@ import ( "github.com/stretchr/testify/assert" "gopkg.in/yaml.v2" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var testRegexYamlSingleStageWithoutSource = ` diff --git a/clients/pkg/logentry/stages/replace_test.go b/clients/pkg/logentry/stages/replace_test.go index f8feef3c898df..87bb3eecb898a 100644 --- a/clients/pkg/logentry/stages/replace_test.go +++ b/clients/pkg/logentry/stages/replace_test.go @@ -10,7 +10,7 @@ import ( "github.com/stretchr/testify/assert" "gopkg.in/yaml.v2" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var testReplaceYamlSingleStageWithoutSource = ` diff --git a/clients/pkg/logentry/stages/sampling_test.go b/clients/pkg/logentry/stages/sampling_test.go index 171277e961d66..9b56eec5c0c5a 100644 --- a/clients/pkg/logentry/stages/sampling_test.go +++ b/clients/pkg/logentry/stages/sampling_test.go @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var testSampingYaml = ` diff --git a/clients/pkg/logentry/stages/stage.go b/clients/pkg/logentry/stages/stage.go index 1c19face4044d..9de1d4e0a5904 100644 --- a/clients/pkg/logentry/stages/stage.go +++ b/clients/pkg/logentry/stages/stage.go @@ -12,7 +12,7 @@ import ( "github.com/prometheus/common/model" "gopkg.in/yaml.v2" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" ) const ( diff --git a/clients/pkg/logentry/stages/static_labels_test.go b/clients/pkg/logentry/stages/static_labels_test.go index 9643d3da7aa51..bad2ec68f4a97 100644 --- a/clients/pkg/logentry/stages/static_labels_test.go +++ b/clients/pkg/logentry/stages/static_labels_test.go @@ -7,7 +7,7 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/assert" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func Test_staticLabelStage_Process(t *testing.T) { diff --git a/clients/pkg/logentry/stages/structuredmetadata.go b/clients/pkg/logentry/stages/structuredmetadata.go index cdab88a956c7f..cdf70c01d4fa7 100644 --- a/clients/pkg/logentry/stages/structuredmetadata.go +++ b/clients/pkg/logentry/stages/structuredmetadata.go @@ -5,7 +5,7 @@ import ( "github.com/mitchellh/mapstructure" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func newStructuredMetadataStage(params StageCreationParams) (Stage, error) { diff --git a/clients/pkg/logentry/stages/structuredmetadata_test.go b/clients/pkg/logentry/stages/structuredmetadata_test.go index d9a70300b8b0b..2b48c641ef81d 100644 --- a/clients/pkg/logentry/stages/structuredmetadata_test.go +++ b/clients/pkg/logentry/stages/structuredmetadata_test.go @@ -9,7 +9,8 @@ import ( "github.com/stretchr/testify/require" "github.com/grafana/loki/pkg/push" - util_log "github.com/grafana/loki/pkg/util/log" + + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var pipelineStagesStructuredMetadataUsingMatch = ` diff --git a/clients/pkg/logentry/stages/template_test.go b/clients/pkg/logentry/stages/template_test.go index 96e7f1b06a2df..7977c87ffee66 100644 --- a/clients/pkg/logentry/stages/template_test.go +++ b/clients/pkg/logentry/stages/template_test.go @@ -12,7 +12,7 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/assert" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var testTemplateYaml = ` diff --git a/clients/pkg/logentry/stages/tenant.go b/clients/pkg/logentry/stages/tenant.go index 13717ccb29bf6..ed730fbc0c121 100644 --- a/clients/pkg/logentry/stages/tenant.go +++ b/clients/pkg/logentry/stages/tenant.go @@ -10,7 +10,7 @@ import ( "github.com/pkg/errors" "github.com/prometheus/common/model" - "github.com/grafana/loki/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" ) const ( diff --git a/clients/pkg/logentry/stages/tenant_test.go b/clients/pkg/logentry/stages/tenant_test.go index eb02b0bda9db8..8eee783d47ddf 100644 --- a/clients/pkg/logentry/stages/tenant_test.go +++ b/clients/pkg/logentry/stages/tenant_test.go @@ -12,10 +12,10 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" - lokiutil "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" + lokiutil "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var testTenantYamlExtractedData = ` diff --git a/clients/pkg/logentry/stages/timestamp.go b/clients/pkg/logentry/stages/timestamp.go index 592ae13565643..fb1fb8a27c3b5 100644 --- a/clients/pkg/logentry/stages/timestamp.go +++ b/clients/pkg/logentry/stages/timestamp.go @@ -12,7 +12,7 @@ import ( "github.com/mitchellh/mapstructure" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) const ( diff --git a/clients/pkg/logentry/stages/timestamp_test.go b/clients/pkg/logentry/stages/timestamp_test.go index 73e4fb196b5b5..f3f23dcfcebab 100644 --- a/clients/pkg/logentry/stages/timestamp_test.go +++ b/clients/pkg/logentry/stages/timestamp_test.go @@ -14,8 +14,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - lokiutil "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" + lokiutil "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var testTimestampYaml = ` diff --git a/clients/pkg/logentry/stages/util_test.go b/clients/pkg/logentry/stages/util_test.go index b58490cc56fc6..5ce0ae9a7f93a 100644 --- a/clients/pkg/logentry/stages/util_test.go +++ b/clients/pkg/logentry/stages/util_test.go @@ -8,9 +8,9 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/assert" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func newEntry(ex map[string]interface{}, lbs model.LabelSet, line string, ts time.Time) Entry { diff --git a/clients/pkg/promtail/api/types.go b/clients/pkg/promtail/api/types.go index 2bb2482da4628..36f9cc484160c 100644 --- a/clients/pkg/promtail/api/types.go +++ b/clients/pkg/promtail/api/types.go @@ -6,7 +6,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) // Entry is a log entry with labels. diff --git a/clients/pkg/promtail/client/batch.go b/clients/pkg/promtail/client/batch.go index 8681b67bd13f1..a6e7b45dd984b 100644 --- a/clients/pkg/promtail/client/batch.go +++ b/clients/pkg/promtail/client/batch.go @@ -12,9 +12,9 @@ import ( "github.com/prometheus/common/model" "golang.org/x/exp/slices" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) const ( diff --git a/clients/pkg/promtail/client/batch_test.go b/clients/pkg/promtail/client/batch_test.go index 56dc9477e8b6a..ec92fbc1c4225 100644 --- a/clients/pkg/promtail/client/batch_test.go +++ b/clients/pkg/promtail/client/batch_test.go @@ -9,9 +9,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func TestBatch_MaxStreams(t *testing.T) { diff --git a/clients/pkg/promtail/client/client.go b/clients/pkg/promtail/client/client.go index 4dfd11363a824..ea93a604d32fb 100644 --- a/clients/pkg/promtail/client/client.go +++ b/clients/pkg/promtail/client/client.go @@ -20,10 +20,10 @@ import ( "github.com/prometheus/common/config" "github.com/prometheus/common/model" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - lokiutil "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/build" + lokiutil "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/build" ) const ( diff --git a/clients/pkg/promtail/client/client_test.go b/clients/pkg/promtail/client/client_test.go index 01cbb87cc1116..ea3039879605b 100644 --- a/clients/pkg/promtail/client/client_test.go +++ b/clients/pkg/promtail/client/client_test.go @@ -19,12 +19,13 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/utils" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/utils" - "github.com/grafana/loki/pkg/logproto" "github.com/grafana/loki/pkg/push" - lokiflag "github.com/grafana/loki/pkg/util/flagext" + + "github.com/grafana/loki/v3/pkg/logproto" + lokiflag "github.com/grafana/loki/v3/pkg/util/flagext" ) var logEntries = []api.Entry{ diff --git a/clients/pkg/promtail/client/client_writeto.go b/clients/pkg/promtail/client/client_writeto.go index 6fa549dfa2721..bd5ecfc424689 100644 --- a/clients/pkg/promtail/client/client_writeto.go +++ b/clients/pkg/promtail/client/client_writeto.go @@ -10,10 +10,10 @@ import ( "github.com/prometheus/prometheus/tsdb/chunks" "github.com/prometheus/prometheus/tsdb/record" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/ingester/wal" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/util" ) // clientWriteTo implements a wal.WriteTo that re-builds entries with the stored series, and the received entries. After, diff --git a/clients/pkg/promtail/client/client_writeto_test.go b/clients/pkg/promtail/client/client_writeto_test.go index 2254fbb073658..3693b677f2ccf 100644 --- a/clients/pkg/promtail/client/client_writeto_test.go +++ b/clients/pkg/promtail/client/client_writeto_test.go @@ -18,10 +18,10 @@ import ( "github.com/prometheus/prometheus/tsdb/record" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/ingester/wal" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/logproto" ) func TestClientWriter_LogEntriesAreReconstructedAndForwardedCorrectly(t *testing.T) { diff --git a/clients/pkg/promtail/client/config.go b/clients/pkg/promtail/client/config.go index ab36353ba4903..eab0eb8863e65 100644 --- a/clients/pkg/promtail/client/config.go +++ b/clients/pkg/promtail/client/config.go @@ -8,7 +8,7 @@ import ( "github.com/grafana/dskit/flagext" "github.com/prometheus/common/config" - lokiflag "github.com/grafana/loki/pkg/util/flagext" + lokiflag "github.com/grafana/loki/v3/pkg/util/flagext" ) // NOTE the helm chart for promtail and fluent-bit also have defaults for these values, please update to match if you make changes here. diff --git a/clients/pkg/promtail/client/fake/client.go b/clients/pkg/promtail/client/fake/client.go index 33e886c30980c..03257135a585d 100644 --- a/clients/pkg/promtail/client/fake/client.go +++ b/clients/pkg/promtail/client/fake/client.go @@ -3,7 +3,7 @@ package fake import ( "sync" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" ) // Client is a fake client used for testing. diff --git a/clients/pkg/promtail/client/logger.go b/clients/pkg/promtail/client/logger.go index 890d51177c26c..ba20055a0d94b 100644 --- a/clients/pkg/promtail/client/logger.go +++ b/clients/pkg/promtail/client/logger.go @@ -12,9 +12,9 @@ import ( "github.com/prometheus/client_golang/prometheus" "gopkg.in/yaml.v2" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/limit" - "github.com/grafana/loki/clients/pkg/promtail/wal" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/limit" + "github.com/grafana/loki/v3/clients/pkg/promtail/wal" ) var ( diff --git a/clients/pkg/promtail/client/logger_test.go b/clients/pkg/promtail/client/logger_test.go index 43c710d69088c..c19f39ac75784 100644 --- a/clients/pkg/promtail/client/logger_test.go +++ b/clients/pkg/promtail/client/logger_test.go @@ -9,10 +9,10 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/logproto" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func TestNewLogger(t *testing.T) { diff --git a/clients/pkg/promtail/client/manager.go b/clients/pkg/promtail/client/manager.go index 84dc48de350d5..ef08d2c04f528 100644 --- a/clients/pkg/promtail/client/manager.go +++ b/clients/pkg/promtail/client/manager.go @@ -9,9 +9,9 @@ import ( "github.com/go-kit/log/level" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/limit" - "github.com/grafana/loki/clients/pkg/promtail/wal" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/limit" + "github.com/grafana/loki/v3/clients/pkg/promtail/wal" ) // WriterEventsNotifier implements a notifier that's received by the Manager, to which wal.Watcher can subscribe for diff --git a/clients/pkg/promtail/client/manager_test.go b/clients/pkg/promtail/client/manager_test.go index 14165ea503f2b..f11821c82120a 100644 --- a/clients/pkg/promtail/client/manager_test.go +++ b/clients/pkg/promtail/client/manager_test.go @@ -16,14 +16,14 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/limit" - "github.com/grafana/loki/clients/pkg/promtail/utils" - "github.com/grafana/loki/clients/pkg/promtail/wal" - - "github.com/grafana/loki/pkg/logproto" - lokiflag "github.com/grafana/loki/pkg/util/flagext" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/limit" + "github.com/grafana/loki/v3/clients/pkg/promtail/utils" + "github.com/grafana/loki/v3/clients/pkg/promtail/wal" + + "github.com/grafana/loki/v3/pkg/logproto" + lokiflag "github.com/grafana/loki/v3/pkg/util/flagext" ) var testLimitsConfig = limit.Config{ diff --git a/clients/pkg/promtail/config/config.go b/clients/pkg/promtail/config/config.go index 4a720a2dd28f3..615b8e9abaad5 100644 --- a/clients/pkg/promtail/config/config.go +++ b/clients/pkg/promtail/config/config.go @@ -8,16 +8,16 @@ import ( "github.com/go-kit/log/level" "gopkg.in/yaml.v2" - "github.com/grafana/loki/clients/pkg/promtail/client" - "github.com/grafana/loki/clients/pkg/promtail/limit" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/server" - "github.com/grafana/loki/clients/pkg/promtail/targets/file" - "github.com/grafana/loki/clients/pkg/promtail/wal" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/limit" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/server" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/file" + "github.com/grafana/loki/v3/clients/pkg/promtail/wal" - "github.com/grafana/loki/pkg/tracing" - "github.com/grafana/loki/pkg/util/flagext" + "github.com/grafana/loki/v3/pkg/tracing" + "github.com/grafana/loki/v3/pkg/util/flagext" ) // Options contains cross-cutting promtail configurations diff --git a/clients/pkg/promtail/config/config_test.go b/clients/pkg/promtail/config/config_test.go index 04cd09f56dfc1..32bab70501e39 100644 --- a/clients/pkg/promtail/config/config_test.go +++ b/clients/pkg/promtail/config/config_test.go @@ -11,9 +11,9 @@ import ( "github.com/stretchr/testify/require" "gopkg.in/yaml.v2" - "github.com/grafana/loki/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" - "github.com/grafana/loki/pkg/util/flagext" + "github.com/grafana/loki/v3/pkg/util/flagext" ) const testFile = ` diff --git a/clients/pkg/promtail/limit/config.go b/clients/pkg/promtail/limit/config.go index 02589afd86b89..aed6f23c0b041 100644 --- a/clients/pkg/promtail/limit/config.go +++ b/clients/pkg/promtail/limit/config.go @@ -3,7 +3,7 @@ package limit import ( "flag" - "github.com/grafana/loki/pkg/util/flagext" + "github.com/grafana/loki/v3/pkg/util/flagext" ) type Config struct { diff --git a/clients/pkg/promtail/positions/positions_test.go b/clients/pkg/promtail/positions/positions_test.go index 1dce97b08ec7e..6a7044a5a868d 100644 --- a/clients/pkg/promtail/positions/positions_test.go +++ b/clients/pkg/promtail/positions/positions_test.go @@ -9,7 +9,7 @@ import ( "github.com/go-kit/log" "github.com/stretchr/testify/require" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func tempFilename(t *testing.T) string { diff --git a/clients/pkg/promtail/promtail.go b/clients/pkg/promtail/promtail.go index 1ef3368a697e5..ffe774a405be8 100644 --- a/clients/pkg/promtail/promtail.go +++ b/clients/pkg/promtail/promtail.go @@ -14,17 +14,17 @@ import ( "github.com/go-kit/log/level" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/client" - "github.com/grafana/loki/clients/pkg/promtail/config" - "github.com/grafana/loki/clients/pkg/promtail/server" - "github.com/grafana/loki/clients/pkg/promtail/targets" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" - "github.com/grafana/loki/clients/pkg/promtail/utils" - "github.com/grafana/loki/clients/pkg/promtail/wal" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/config" + "github.com/grafana/loki/v3/clients/pkg/promtail/server" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/utils" + "github.com/grafana/loki/v3/clients/pkg/promtail/wal" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/clients/pkg/promtail/promtail_test.go b/clients/pkg/promtail/promtail_test.go index ca35fc403882a..695f3faeb0f5f 100644 --- a/clients/pkg/promtail/promtail_test.go +++ b/clients/pkg/promtail/promtail_test.go @@ -30,19 +30,19 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/client" - "github.com/grafana/loki/clients/pkg/promtail/config" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/server" - pserver "github.com/grafana/loki/clients/pkg/promtail/server" - file2 "github.com/grafana/loki/clients/pkg/promtail/targets/file" - "github.com/grafana/loki/clients/pkg/promtail/targets/testutils" - - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/config" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/server" + pserver "github.com/grafana/loki/v3/clients/pkg/promtail/server" + file2 "github.com/grafana/loki/v3/clients/pkg/promtail/targets/file" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/testutils" + + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var clientMetrics = client.NewMetrics(prometheus.DefaultRegisterer) diff --git a/clients/pkg/promtail/promtail_wal_test.go b/clients/pkg/promtail/promtail_wal_test.go index dc00c398e91c8..dfc7ce7273453 100644 --- a/clients/pkg/promtail/promtail_wal_test.go +++ b/clients/pkg/promtail/promtail_wal_test.go @@ -20,15 +20,16 @@ import ( "github.com/prometheus/prometheus/discovery/targetgroup" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/client" - "github.com/grafana/loki/clients/pkg/promtail/config" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/utils" - "github.com/grafana/loki/clients/pkg/promtail/wal" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/config" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/utils" + "github.com/grafana/loki/v3/clients/pkg/promtail/wal" "github.com/grafana/loki/pkg/push" - util_log "github.com/grafana/loki/pkg/util/log" + + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/clients/pkg/promtail/scrapeconfig/scrapeconfig.go b/clients/pkg/promtail/scrapeconfig/scrapeconfig.go index 262b4b925d25d..a261a9a08a383 100644 --- a/clients/pkg/promtail/scrapeconfig/scrapeconfig.go +++ b/clients/pkg/promtail/scrapeconfig/scrapeconfig.go @@ -27,8 +27,8 @@ import ( "github.com/prometheus/prometheus/discovery/zookeeper" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/discovery/consulagent" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/discovery/consulagent" ) // Config describes a job to scrape. diff --git a/clients/pkg/promtail/server/server.go b/clients/pkg/promtail/server/server.go index 1b47247630e05..2e7752812c93c 100644 --- a/clients/pkg/promtail/server/server.go +++ b/clients/pkg/promtail/server/server.go @@ -23,9 +23,9 @@ import ( "github.com/prometheus/client_golang/prometheus/promhttp" "github.com/prometheus/common/version" - "github.com/grafana/loki/clients/pkg/promtail/server/ui" - "github.com/grafana/loki/clients/pkg/promtail/targets" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/server/ui" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) var ( diff --git a/clients/pkg/promtail/server/template.go b/clients/pkg/promtail/server/template.go index 1ed7fde54547d..53013bc485d46 100644 --- a/clients/pkg/promtail/server/template.go +++ b/clients/pkg/promtail/server/template.go @@ -13,7 +13,7 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/template" - "github.com/grafana/loki/clients/pkg/promtail/server/ui" + "github.com/grafana/loki/v3/clients/pkg/promtail/server/ui" ) // templateOptions is a set of options to render a template. diff --git a/clients/pkg/promtail/server/ui/assets_generate.go b/clients/pkg/promtail/server/ui/assets_generate.go index 984a1f9c99e4d..0165b2031f60c 100644 --- a/clients/pkg/promtail/server/ui/assets_generate.go +++ b/clients/pkg/promtail/server/ui/assets_generate.go @@ -10,7 +10,7 @@ import ( "github.com/prometheus/alertmanager/pkg/modtimevfs" "github.com/shurcooL/vfsgen" - "github.com/grafana/loki/clients/pkg/promtail/server/ui" + "github.com/grafana/loki/v3/clients/pkg/promtail/server/ui" ) func main() { diff --git a/clients/pkg/promtail/targets/azureeventhubs/parser.go b/clients/pkg/promtail/targets/azureeventhubs/parser.go index cd2ddc7145cbb..0001dc525019e 100644 --- a/clients/pkg/promtail/targets/azureeventhubs/parser.go +++ b/clients/pkg/promtail/targets/azureeventhubs/parser.go @@ -12,9 +12,9 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) type azureMonitorResourceLogs struct { diff --git a/clients/pkg/promtail/targets/azureeventhubs/target_syncer.go b/clients/pkg/promtail/targets/azureeventhubs/target_syncer.go index e16d3b4914a10..bc2175768f460 100644 --- a/clients/pkg/promtail/targets/azureeventhubs/target_syncer.go +++ b/clients/pkg/promtail/targets/azureeventhubs/target_syncer.go @@ -10,10 +10,10 @@ import ( "github.com/go-kit/log" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/kafka" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/kafka" ) func NewSyncer( diff --git a/clients/pkg/promtail/targets/azureeventhubs/target_syncer_test.go b/clients/pkg/promtail/targets/azureeventhubs/target_syncer_test.go index 2113afffd4e2e..1874453cf364b 100644 --- a/clients/pkg/promtail/targets/azureeventhubs/target_syncer_test.go +++ b/clients/pkg/promtail/targets/azureeventhubs/target_syncer_test.go @@ -10,7 +10,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/stretchr/testify/assert" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" ) func Test_validateConfig(t *testing.T) { diff --git a/clients/pkg/promtail/targets/azureeventhubs/targetmanager.go b/clients/pkg/promtail/targets/azureeventhubs/targetmanager.go index 2651233942ba2..48f3a3fe8b8e6 100644 --- a/clients/pkg/promtail/targets/azureeventhubs/targetmanager.go +++ b/clients/pkg/promtail/targets/azureeventhubs/targetmanager.go @@ -6,10 +6,10 @@ import ( "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/kafka" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/kafka" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) // TargetManager manages a series of kafka targets. diff --git a/clients/pkg/promtail/targets/cloudflare/target.go b/clients/pkg/promtail/targets/cloudflare/target.go index 19d1f18758273..bede17bc45327 100644 --- a/clients/pkg/promtail/targets/cloudflare/target.go +++ b/clients/pkg/promtail/targets/cloudflare/target.go @@ -18,12 +18,12 @@ import ( "github.com/prometheus/common/model" "go.uber.org/atomic" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) // The minimun window size is 1 minute. diff --git a/clients/pkg/promtail/targets/cloudflare/target_test.go b/clients/pkg/promtail/targets/cloudflare/target_test.go index d275a7e845f10..64cb6c4492e5e 100644 --- a/clients/pkg/promtail/targets/cloudflare/target_test.go +++ b/clients/pkg/promtail/targets/cloudflare/target_test.go @@ -15,9 +15,9 @@ import ( "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" ) func Test_CloudflareTarget(t *testing.T) { diff --git a/clients/pkg/promtail/targets/cloudflare/targetmanager.go b/clients/pkg/promtail/targets/cloudflare/targetmanager.go index c60fd6577a5f3..31a05fe0b75d4 100644 --- a/clients/pkg/promtail/targets/cloudflare/targetmanager.go +++ b/clients/pkg/promtail/targets/cloudflare/targetmanager.go @@ -3,11 +3,11 @@ package cloudflare import ( "github.com/go-kit/log" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) // TargetManager manages a series of cloudflare targets. diff --git a/clients/pkg/promtail/targets/docker/target.go b/clients/pkg/promtail/targets/docker/target.go index 3b3e55cfb8445..3ec9d02a022c6 100644 --- a/clients/pkg/promtail/targets/docker/target.go +++ b/clients/pkg/promtail/targets/docker/target.go @@ -20,11 +20,11 @@ import ( "github.com/prometheus/prometheus/model/relabel" "go.uber.org/atomic" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) type Target struct { diff --git a/clients/pkg/promtail/targets/docker/target_group.go b/clients/pkg/promtail/targets/docker/target_group.go index 0b0ea9eef6f56..b9fd8940824d0 100644 --- a/clients/pkg/promtail/targets/docker/target_group.go +++ b/clients/pkg/promtail/targets/docker/target_group.go @@ -15,11 +15,11 @@ import ( "github.com/prometheus/prometheus/discovery/targetgroup" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/pkg/util/build" + "github.com/grafana/loki/v3/pkg/util/build" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) const DockerSource = "Docker" diff --git a/clients/pkg/promtail/targets/docker/target_test.go b/clients/pkg/promtail/targets/docker/target_test.go index 27a22871e4541..9bb5c9bfacd57 100644 --- a/clients/pkg/promtail/targets/docker/target_test.go +++ b/clients/pkg/promtail/targets/docker/target_test.go @@ -19,8 +19,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" ) func Test_DockerTarget(t *testing.T) { diff --git a/clients/pkg/promtail/targets/docker/targetmanager.go b/clients/pkg/promtail/targets/docker/targetmanager.go index 2d6b55116584f..6321705b8f142 100644 --- a/clients/pkg/promtail/targets/docker/targetmanager.go +++ b/clients/pkg/promtail/targets/docker/targetmanager.go @@ -9,13 +9,13 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/discovery" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) const ( diff --git a/clients/pkg/promtail/targets/docker/targetmanager_test.go b/clients/pkg/promtail/targets/docker/targetmanager_test.go index 23bca7a923216..224e58d5a8930 100644 --- a/clients/pkg/promtail/targets/docker/targetmanager_test.go +++ b/clients/pkg/promtail/targets/docker/targetmanager_test.go @@ -19,9 +19,9 @@ import ( "github.com/prometheus/prometheus/discovery/moby" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" ) func Test_TargetManager(t *testing.T) { diff --git a/clients/pkg/promtail/targets/file/decompresser.go b/clients/pkg/promtail/targets/file/decompresser.go index 3beb35415fb6b..34d2434d8b04e 100644 --- a/clients/pkg/promtail/targets/file/decompresser.go +++ b/clients/pkg/promtail/targets/file/decompresser.go @@ -23,11 +23,11 @@ import ( "golang.org/x/text/encoding/ianaindex" "golang.org/x/text/transform" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" ) func supportedCompressedFormats() map[string]struct{} { diff --git a/clients/pkg/promtail/targets/file/decompresser_test.go b/clients/pkg/promtail/targets/file/decompresser_test.go index 443f14a4a8443..a575922ec6e5c 100644 --- a/clients/pkg/promtail/targets/file/decompresser_test.go +++ b/clients/pkg/promtail/targets/file/decompresser_test.go @@ -11,9 +11,9 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" ) type noopClient struct { diff --git a/clients/pkg/promtail/targets/file/filetarget.go b/clients/pkg/promtail/targets/file/filetarget.go index 2c52cbead922f..97dc10f148293 100644 --- a/clients/pkg/promtail/targets/file/filetarget.go +++ b/clients/pkg/promtail/targets/file/filetarget.go @@ -14,10 +14,10 @@ import ( "github.com/pkg/errors" "github.com/prometheus/common/model" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) const ( diff --git a/clients/pkg/promtail/targets/file/filetarget_test.go b/clients/pkg/promtail/targets/file/filetarget_test.go index f3cde7bf819a4..57bc31b0802ee 100644 --- a/clients/pkg/promtail/targets/file/filetarget_test.go +++ b/clients/pkg/promtail/targets/file/filetarget_test.go @@ -20,8 +20,8 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" ) func TestFileTargetSync(t *testing.T) { diff --git a/clients/pkg/promtail/targets/file/filetargetmanager.go b/clients/pkg/promtail/targets/file/filetargetmanager.go index c56b8fa5f8f4d..a02d0295d2bda 100644 --- a/clients/pkg/promtail/targets/file/filetargetmanager.go +++ b/clients/pkg/promtail/targets/file/filetargetmanager.go @@ -20,13 +20,13 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) const ( diff --git a/clients/pkg/promtail/targets/file/filetargetmanager_test.go b/clients/pkg/promtail/targets/file/filetargetmanager_test.go index 62e41635f1111..d27cd43106fe2 100644 --- a/clients/pkg/promtail/targets/file/filetargetmanager_test.go +++ b/clients/pkg/promtail/targets/file/filetargetmanager_test.go @@ -16,10 +16,10 @@ import ( "github.com/prometheus/prometheus/discovery" "github.com/prometheus/prometheus/discovery/targetgroup" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" ) func newTestLogDirectories(t *testing.T) string { diff --git a/clients/pkg/promtail/targets/file/tailer.go b/clients/pkg/promtail/targets/file/tailer.go index 387978b6a7707..1e72e35306490 100644 --- a/clients/pkg/promtail/targets/file/tailer.go +++ b/clients/pkg/promtail/targets/file/tailer.go @@ -17,11 +17,11 @@ import ( "golang.org/x/text/encoding/ianaindex" "golang.org/x/text/transform" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/util" ) type tailer struct { diff --git a/clients/pkg/promtail/targets/gcplog/formatter.go b/clients/pkg/promtail/targets/gcplog/formatter.go index 9c175a7750f42..9435ec4775958 100644 --- a/clients/pkg/promtail/targets/gcplog/formatter.go +++ b/clients/pkg/promtail/targets/gcplog/formatter.go @@ -10,9 +10,9 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) // GCPLogEntry that will be written to the pubsub topic. diff --git a/clients/pkg/promtail/targets/gcplog/formatter_test.go b/clients/pkg/promtail/targets/gcplog/formatter_test.go index f70fa1d79d122..d5703890197d2 100644 --- a/clients/pkg/promtail/targets/gcplog/formatter_test.go +++ b/clients/pkg/promtail/targets/gcplog/formatter_test.go @@ -10,9 +10,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func TestFormat(t *testing.T) { diff --git a/clients/pkg/promtail/targets/gcplog/pull_target.go b/clients/pkg/promtail/targets/gcplog/pull_target.go index 38db550bdf730..671b160c6f4ca 100644 --- a/clients/pkg/promtail/targets/gcplog/pull_target.go +++ b/clients/pkg/promtail/targets/gcplog/pull_target.go @@ -14,9 +14,9 @@ import ( "github.com/prometheus/prometheus/model/relabel" "google.golang.org/api/option" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) var defaultBackoff = backoff.Config{ diff --git a/clients/pkg/promtail/targets/gcplog/pull_target_test.go b/clients/pkg/promtail/targets/gcplog/pull_target_test.go index e81ee20a6938c..81a16e6872bdd 100644 --- a/clients/pkg/promtail/targets/gcplog/pull_target_test.go +++ b/clients/pkg/promtail/targets/gcplog/pull_target_test.go @@ -16,9 +16,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) func TestPullTarget_RunStop(t *testing.T) { diff --git a/clients/pkg/promtail/targets/gcplog/push_target.go b/clients/pkg/promtail/targets/gcplog/push_target.go index 95260fb914a8a..4d0a2d2b8407c 100644 --- a/clients/pkg/promtail/targets/gcplog/push_target.go +++ b/clients/pkg/promtail/targets/gcplog/push_target.go @@ -13,12 +13,12 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/serverutils" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/serverutils" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type pushTarget struct { diff --git a/clients/pkg/promtail/targets/gcplog/push_target_test.go b/clients/pkg/promtail/targets/gcplog/push_target_test.go index 50075820fa497..d6e9ce3f75e0a 100644 --- a/clients/pkg/promtail/targets/gcplog/push_target_test.go +++ b/clients/pkg/promtail/targets/gcplog/push_target_test.go @@ -9,7 +9,7 @@ import ( "testing" "time" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" "github.com/go-kit/log" "github.com/prometheus/client_golang/prometheus" @@ -17,10 +17,10 @@ import ( "github.com/prometheus/prometheus/model/relabel" "github.com/stretchr/testify/require" - lokiClient "github.com/grafana/loki/clients/pkg/promtail/client" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/gcplog" + lokiClient "github.com/grafana/loki/v3/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/gcplog" ) const localhost = "127.0.0.1" diff --git a/clients/pkg/promtail/targets/gcplog/push_translation.go b/clients/pkg/promtail/targets/gcplog/push_translation.go index f96f7171f2185..aae5ee4fb25d6 100644 --- a/clients/pkg/promtail/targets/gcplog/push_translation.go +++ b/clients/pkg/promtail/targets/gcplog/push_translation.go @@ -9,10 +9,10 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/clients/pkg/promtail/api" - lokiClient "github.com/grafana/loki/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + lokiClient "github.com/grafana/loki/v3/clients/pkg/promtail/client" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) // PushMessage is the POST body format sent by GCP PubSub push subscriptions. diff --git a/clients/pkg/promtail/targets/gcplog/target.go b/clients/pkg/promtail/targets/gcplog/target.go index b122fb24112f7..1c60e8a3eb2ca 100644 --- a/clients/pkg/promtail/targets/gcplog/target.go +++ b/clients/pkg/promtail/targets/gcplog/target.go @@ -7,9 +7,9 @@ import ( "github.com/prometheus/prometheus/model/relabel" "google.golang.org/api/option" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) // Target is a common interface implemented by both GCPLog targets. diff --git a/clients/pkg/promtail/targets/gcplog/target_test.go b/clients/pkg/promtail/targets/gcplog/target_test.go index 1a7cec47131f6..96bf7606985ed 100644 --- a/clients/pkg/promtail/targets/gcplog/target_test.go +++ b/clients/pkg/promtail/targets/gcplog/target_test.go @@ -17,9 +17,9 @@ import ( "github.com/grafana/dskit/server" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" ) func TestNewGCPLogTarget(t *testing.T) { diff --git a/clients/pkg/promtail/targets/gcplog/targetmanager.go b/clients/pkg/promtail/targets/gcplog/targetmanager.go index 71f3b5130a2fc..cf731c6c9f995 100644 --- a/clients/pkg/promtail/targets/gcplog/targetmanager.go +++ b/clients/pkg/promtail/targets/gcplog/targetmanager.go @@ -6,10 +6,10 @@ import ( "github.com/go-kit/log" "github.com/go-kit/log/level" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) // nolint:revive diff --git a/clients/pkg/promtail/targets/gelf/gelftarget.go b/clients/pkg/promtail/targets/gelf/gelftarget.go index baaf8e3911fd9..42298b7588cca 100644 --- a/clients/pkg/promtail/targets/gelf/gelftarget.go +++ b/clients/pkg/promtail/targets/gelf/gelftarget.go @@ -14,11 +14,11 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) // SeverityLevels maps severity levels to severity string levels. diff --git a/clients/pkg/promtail/targets/gelf/gelftarget_test.go b/clients/pkg/promtail/targets/gelf/gelftarget_test.go index 86a304ef9a7a0..8bdc5470e28b5 100644 --- a/clients/pkg/promtail/targets/gelf/gelftarget_test.go +++ b/clients/pkg/promtail/targets/gelf/gelftarget_test.go @@ -15,8 +15,8 @@ import ( "github.com/prometheus/prometheus/model/relabel" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" ) func Test_Gelf(t *testing.T) { diff --git a/clients/pkg/promtail/targets/gelf/gelftargetmanager.go b/clients/pkg/promtail/targets/gelf/gelftargetmanager.go index f9824d3152f07..f6b7048287ce0 100644 --- a/clients/pkg/promtail/targets/gelf/gelftargetmanager.go +++ b/clients/pkg/promtail/targets/gelf/gelftargetmanager.go @@ -4,10 +4,10 @@ import ( "github.com/go-kit/log" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) // TargetManager manages a series of Gelf Targets. diff --git a/clients/pkg/promtail/targets/heroku/target.go b/clients/pkg/promtail/targets/heroku/target.go index 9ab2fdaacfac4..83aceda6b7921 100644 --- a/clients/pkg/promtail/targets/heroku/target.go +++ b/clients/pkg/promtail/targets/heroku/target.go @@ -14,14 +14,14 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/clients/pkg/promtail/api" - lokiClient "github.com/grafana/loki/clients/pkg/promtail/client" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/serverutils" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" - - "github.com/grafana/loki/pkg/logproto" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + lokiClient "github.com/grafana/loki/v3/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/serverutils" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" + + "github.com/grafana/loki/v3/pkg/logproto" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type Target struct { diff --git a/clients/pkg/promtail/targets/heroku/target_test.go b/clients/pkg/promtail/targets/heroku/target_test.go index c584bedba471d..42657d83ff1b4 100644 --- a/clients/pkg/promtail/targets/heroku/target_test.go +++ b/clients/pkg/promtail/targets/heroku/target_test.go @@ -19,9 +19,9 @@ import ( "github.com/prometheus/prometheus/model/relabel" "github.com/stretchr/testify/require" - lokiClient "github.com/grafana/loki/clients/pkg/promtail/client" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" + lokiClient "github.com/grafana/loki/v3/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" ) const localhost = "127.0.0.1" diff --git a/clients/pkg/promtail/targets/heroku/targetmanager.go b/clients/pkg/promtail/targets/heroku/targetmanager.go index 5d046ca3d45cf..3ad94bc5a79f0 100644 --- a/clients/pkg/promtail/targets/heroku/targetmanager.go +++ b/clients/pkg/promtail/targets/heroku/targetmanager.go @@ -5,10 +5,10 @@ import ( "github.com/go-kit/log/level" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) type TargetManager struct { diff --git a/clients/pkg/promtail/targets/journal/journaltarget.go b/clients/pkg/promtail/targets/journal/journaltarget.go index 6630b827e7c14..fa04ac50c3db4 100644 --- a/clients/pkg/promtail/targets/journal/journaltarget.go +++ b/clients/pkg/promtail/targets/journal/journaltarget.go @@ -19,12 +19,12 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) const ( diff --git a/clients/pkg/promtail/targets/journal/journaltarget_test.go b/clients/pkg/promtail/targets/journal/journaltarget_test.go index b0186d1504f47..768cb7f5c1510 100644 --- a/clients/pkg/promtail/targets/journal/journaltarget_test.go +++ b/clients/pkg/promtail/targets/journal/journaltarget_test.go @@ -20,10 +20,10 @@ import ( "github.com/stretchr/testify/require" "gopkg.in/yaml.v2" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/testutils" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/testutils" ) type mockJournalReader struct { diff --git a/clients/pkg/promtail/targets/journal/journaltargetmanager.go b/clients/pkg/promtail/targets/journal/journaltargetmanager.go index 9e55c37d9268c..f0d99f091db17 100644 --- a/clients/pkg/promtail/targets/journal/journaltargetmanager.go +++ b/clients/pkg/promtail/targets/journal/journaltargetmanager.go @@ -7,10 +7,10 @@ import ( "github.com/go-kit/log" "github.com/go-kit/log/level" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) // JournalTargetManager manages a series of JournalTargets. diff --git a/clients/pkg/promtail/targets/journal/journaltargetmanager_linux.go b/clients/pkg/promtail/targets/journal/journaltargetmanager_linux.go index 6940d57ead5d7..52d216e58a090 100644 --- a/clients/pkg/promtail/targets/journal/journaltargetmanager_linux.go +++ b/clients/pkg/promtail/targets/journal/journaltargetmanager_linux.go @@ -7,11 +7,11 @@ import ( "github.com/go-kit/log" "github.com/go-kit/log/level" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) // JournalTargetManager manages a series of JournalTargets. diff --git a/clients/pkg/promtail/targets/kafka/consumer.go b/clients/pkg/promtail/targets/kafka/consumer.go index 34cb61da00e7e..f4b8a4d260cf2 100644 --- a/clients/pkg/promtail/targets/kafka/consumer.go +++ b/clients/pkg/promtail/targets/kafka/consumer.go @@ -11,7 +11,7 @@ import ( "github.com/go-kit/log/level" "github.com/grafana/dskit/backoff" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) var defaultBackOff = backoff.Config{ diff --git a/clients/pkg/promtail/targets/kafka/consumer_test.go b/clients/pkg/promtail/targets/kafka/consumer_test.go index 63ab60b1a64f5..7420bdf6c1f11 100644 --- a/clients/pkg/promtail/targets/kafka/consumer_test.go +++ b/clients/pkg/promtail/targets/kafka/consumer_test.go @@ -11,7 +11,7 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) type DiscovererFn func(sarama.ConsumerGroupSession, sarama.ConsumerGroupClaim) (RunnableTarget, error) diff --git a/clients/pkg/promtail/targets/kafka/formatter.go b/clients/pkg/promtail/targets/kafka/formatter.go index b0f61e4332e3b..f800dbe20b9dd 100644 --- a/clients/pkg/promtail/targets/kafka/formatter.go +++ b/clients/pkg/promtail/targets/kafka/formatter.go @@ -7,7 +7,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) func format(lbs labels.Labels, cfg []*relabel.Config) model.LabelSet { diff --git a/clients/pkg/promtail/targets/kafka/parser.go b/clients/pkg/promtail/targets/kafka/parser.go index 872ea0e45bc05..9ad3b7f8271c0 100644 --- a/clients/pkg/promtail/targets/kafka/parser.go +++ b/clients/pkg/promtail/targets/kafka/parser.go @@ -5,9 +5,9 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) // messageParser implements MessageParser. It doesn't modify the content of the original `message.Value`. diff --git a/clients/pkg/promtail/targets/kafka/target.go b/clients/pkg/promtail/targets/kafka/target.go index 519af759b66c7..707cc01ca1947 100644 --- a/clients/pkg/promtail/targets/kafka/target.go +++ b/clients/pkg/promtail/targets/kafka/target.go @@ -11,8 +11,8 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) type runnableDroppedTarget struct { diff --git a/clients/pkg/promtail/targets/kafka/target_syncer.go b/clients/pkg/promtail/targets/kafka/target_syncer.go index 187f4749ce2df..6afcd24ad7832 100644 --- a/clients/pkg/promtail/targets/kafka/target_syncer.go +++ b/clients/pkg/promtail/targets/kafka/target_syncer.go @@ -16,12 +16,12 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) var TopicPollInterval = 30 * time.Second diff --git a/clients/pkg/promtail/targets/kafka/target_syncer_test.go b/clients/pkg/promtail/targets/kafka/target_syncer_test.go index cc1161c63dcb8..1f0255cedf62e 100644 --- a/clients/pkg/promtail/targets/kafka/target_syncer_test.go +++ b/clients/pkg/promtail/targets/kafka/target_syncer_test.go @@ -17,9 +17,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" ) func Test_TopicDiscovery(t *testing.T) { diff --git a/clients/pkg/promtail/targets/kafka/target_test.go b/clients/pkg/promtail/targets/kafka/target_test.go index a62488b04a7cb..0f8061027de3a 100644 --- a/clients/pkg/promtail/targets/kafka/target_test.go +++ b/clients/pkg/promtail/targets/kafka/target_test.go @@ -13,7 +13,7 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" ) // Consumergroup handler diff --git a/clients/pkg/promtail/targets/kafka/targetmanager.go b/clients/pkg/promtail/targets/kafka/targetmanager.go index f16606aefda75..c9cc382503704 100644 --- a/clients/pkg/promtail/targets/kafka/targetmanager.go +++ b/clients/pkg/promtail/targets/kafka/targetmanager.go @@ -5,9 +5,9 @@ import ( "github.com/go-kit/log/level" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) // TargetManager manages a series of kafka targets. diff --git a/clients/pkg/promtail/targets/lokipush/pushtarget.go b/clients/pkg/promtail/targets/lokipush/pushtarget.go index 88c7859bd36e5..63630c6e5ac2d 100644 --- a/clients/pkg/promtail/targets/lokipush/pushtarget.go +++ b/clients/pkg/promtail/targets/lokipush/pushtarget.go @@ -20,14 +20,14 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/serverutils" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" - - "github.com/grafana/loki/pkg/loghttp/push" - "github.com/grafana/loki/pkg/logproto" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/serverutils" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" + + "github.com/grafana/loki/v3/pkg/loghttp/push" + "github.com/grafana/loki/v3/pkg/logproto" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type PushTarget struct { diff --git a/clients/pkg/promtail/targets/lokipush/pushtarget_test.go b/clients/pkg/promtail/targets/lokipush/pushtarget_test.go index 147994fb2df1a..3fe48b599a5e4 100644 --- a/clients/pkg/promtail/targets/lokipush/pushtarget_test.go +++ b/clients/pkg/promtail/targets/lokipush/pushtarget_test.go @@ -20,12 +20,12 @@ import ( "github.com/prometheus/prometheus/model/relabel" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/client" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) const localhost = "127.0.0.1" diff --git a/clients/pkg/promtail/targets/lokipush/pushtargetmanager.go b/clients/pkg/promtail/targets/lokipush/pushtargetmanager.go index be29037544726..e924647c2c073 100644 --- a/clients/pkg/promtail/targets/lokipush/pushtargetmanager.go +++ b/clients/pkg/promtail/targets/lokipush/pushtargetmanager.go @@ -9,10 +9,10 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/prometheus/util/strutil" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) // PushTargetManager manages a series of PushTargets. diff --git a/clients/pkg/promtail/targets/lokipush/pushtargetmanager_test.go b/clients/pkg/promtail/targets/lokipush/pushtargetmanager_test.go index 40621d18f5ba0..08730042bf841 100644 --- a/clients/pkg/promtail/targets/lokipush/pushtargetmanager_test.go +++ b/clients/pkg/promtail/targets/lokipush/pushtargetmanager_test.go @@ -5,7 +5,7 @@ import ( "github.com/grafana/dskit/server" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" ) func Test_validateJobName(t *testing.T) { diff --git a/clients/pkg/promtail/targets/manager.go b/clients/pkg/promtail/targets/manager.go index 91ce61a1c84f4..241dd25aaa5cc 100644 --- a/clients/pkg/promtail/targets/manager.go +++ b/clients/pkg/promtail/targets/manager.go @@ -8,23 +8,23 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/azureeventhubs" - "github.com/grafana/loki/clients/pkg/promtail/targets/cloudflare" - "github.com/grafana/loki/clients/pkg/promtail/targets/docker" - "github.com/grafana/loki/clients/pkg/promtail/targets/file" - "github.com/grafana/loki/clients/pkg/promtail/targets/gcplog" - "github.com/grafana/loki/clients/pkg/promtail/targets/gelf" - "github.com/grafana/loki/clients/pkg/promtail/targets/heroku" - "github.com/grafana/loki/clients/pkg/promtail/targets/journal" - "github.com/grafana/loki/clients/pkg/promtail/targets/kafka" - "github.com/grafana/loki/clients/pkg/promtail/targets/lokipush" - "github.com/grafana/loki/clients/pkg/promtail/targets/stdin" - "github.com/grafana/loki/clients/pkg/promtail/targets/syslog" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" - "github.com/grafana/loki/clients/pkg/promtail/targets/windows" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/azureeventhubs" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/cloudflare" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/docker" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/file" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/gcplog" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/gelf" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/heroku" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/journal" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/kafka" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/lokipush" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/stdin" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/syslog" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/windows" ) const ( diff --git a/clients/pkg/promtail/targets/stdin/stdin_target_manager.go b/clients/pkg/promtail/targets/stdin/stdin_target_manager.go index 065d6bd93feb0..bcc441950e3a9 100644 --- a/clients/pkg/promtail/targets/stdin/stdin_target_manager.go +++ b/clients/pkg/promtail/targets/stdin/stdin_target_manager.go @@ -15,12 +15,12 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/discovery/targetgroup" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) // bufferSize is the size of the buffered reader diff --git a/clients/pkg/promtail/targets/stdin/stdin_target_manager_test.go b/clients/pkg/promtail/targets/stdin/stdin_target_manager_test.go index 58abae3c802b8..8f2135f3aff32 100644 --- a/clients/pkg/promtail/targets/stdin/stdin_target_manager_test.go +++ b/clients/pkg/promtail/targets/stdin/stdin_target_manager_test.go @@ -12,13 +12,13 @@ import ( "github.com/stretchr/testify/require" "gopkg.in/yaml.v2" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/pkg/logproto" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func Test_newReaderTarget(t *testing.T) { diff --git a/clients/pkg/promtail/targets/syslog/syslogparser/syslogparser_test.go b/clients/pkg/promtail/targets/syslog/syslogparser/syslogparser_test.go index 89d17c4645f55..f99742de48a24 100644 --- a/clients/pkg/promtail/targets/syslog/syslogparser/syslogparser_test.go +++ b/clients/pkg/promtail/targets/syslog/syslogparser/syslogparser_test.go @@ -9,7 +9,7 @@ import ( "github.com/influxdata/go-syslog/v3/rfc5424" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/targets/syslog/syslogparser" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/syslog/syslogparser" ) var ( diff --git a/clients/pkg/promtail/targets/syslog/syslogtarget.go b/clients/pkg/promtail/targets/syslog/syslogtarget.go index 54befebc38931..35ba4d8cf297f 100644 --- a/clients/pkg/promtail/targets/syslog/syslogtarget.go +++ b/clients/pkg/promtail/targets/syslog/syslogtarget.go @@ -15,11 +15,11 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) var ( diff --git a/clients/pkg/promtail/targets/syslog/syslogtarget_test.go b/clients/pkg/promtail/targets/syslog/syslogtarget_test.go index 62b5924626f14..2f06e04321ece 100644 --- a/clients/pkg/promtail/targets/syslog/syslogtarget_test.go +++ b/clients/pkg/promtail/targets/syslog/syslogtarget_test.go @@ -19,9 +19,9 @@ import ( "github.com/stretchr/testify/require" "gopkg.in/yaml.v2" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/syslog/syslogparser" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/syslog/syslogparser" ) var ( diff --git a/clients/pkg/promtail/targets/syslog/syslogtargetmanager.go b/clients/pkg/promtail/targets/syslog/syslogtargetmanager.go index 8a7246a28a464..ffda8b3de4203 100644 --- a/clients/pkg/promtail/targets/syslog/syslogtargetmanager.go +++ b/clients/pkg/promtail/targets/syslog/syslogtargetmanager.go @@ -5,10 +5,10 @@ import ( "github.com/go-kit/log/level" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) // SyslogTargetManager manages a series of SyslogTargets. diff --git a/clients/pkg/promtail/targets/syslog/transport.go b/clients/pkg/promtail/targets/syslog/transport.go index 67a78136e311b..6b1bdfeb91c11 100644 --- a/clients/pkg/promtail/targets/syslog/transport.go +++ b/clients/pkg/promtail/targets/syslog/transport.go @@ -20,8 +20,8 @@ import ( "github.com/influxdata/go-syslog/v3" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/syslog/syslogparser" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/syslog/syslogparser" ) var ( diff --git a/clients/pkg/promtail/targets/windows/bookmark.go b/clients/pkg/promtail/targets/windows/bookmark.go index b7a4a7698cde4..55ad7f3040526 100644 --- a/clients/pkg/promtail/targets/windows/bookmark.go +++ b/clients/pkg/promtail/targets/windows/bookmark.go @@ -9,7 +9,7 @@ import ( "github.com/spf13/afero" - "github.com/grafana/loki/clients/pkg/promtail/targets/windows/win_eventlog" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/windows/win_eventlog" ) type bookMark struct { diff --git a/clients/pkg/promtail/targets/windows/format.go b/clients/pkg/promtail/targets/windows/format.go index 9fc44cc62a8ba..821aa4ecf0d9b 100644 --- a/clients/pkg/promtail/targets/windows/format.go +++ b/clients/pkg/promtail/targets/windows/format.go @@ -9,8 +9,8 @@ import ( jsoniter "github.com/json-iterator/go" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/windows/win_eventlog" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/windows/win_eventlog" ) type Event struct { diff --git a/clients/pkg/promtail/targets/windows/target.go b/clients/pkg/promtail/targets/windows/target.go index c4e1806724a54..42cb298f0995c 100644 --- a/clients/pkg/promtail/targets/windows/target.go +++ b/clients/pkg/promtail/targets/windows/target.go @@ -18,10 +18,10 @@ import ( "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" - "github.com/grafana/loki/clients/pkg/promtail/targets/windows/win_eventlog" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/windows/win_eventlog" ) var fs = afero.NewOsFs() diff --git a/clients/pkg/promtail/targets/windows/target_test.go b/clients/pkg/promtail/targets/windows/target_test.go index a9a692b21ecfc..c766cb869cd87 100644 --- a/clients/pkg/promtail/targets/windows/target_test.go +++ b/clients/pkg/promtail/targets/windows/target_test.go @@ -14,13 +14,13 @@ import ( "github.com/stretchr/testify/require" "golang.org/x/sys/windows/svc/eventlog" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/windows/win_eventlog" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/windows/win_eventlog" - "github.com/grafana/loki/pkg/logproto" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func init() { diff --git a/clients/pkg/promtail/targets/windows/targetmanager.go b/clients/pkg/promtail/targets/windows/targetmanager.go index 78e98880ca2bf..9bb12ebc15a9c 100644 --- a/clients/pkg/promtail/targets/windows/targetmanager.go +++ b/clients/pkg/promtail/targets/windows/targetmanager.go @@ -8,9 +8,9 @@ import ( "github.com/go-kit/log/level" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) // TargetManager manages a series of windows event targets. diff --git a/clients/pkg/promtail/targets/windows/targetmanager_windows.go b/clients/pkg/promtail/targets/windows/targetmanager_windows.go index 78793b26c730a..4bc53bcc42153 100644 --- a/clients/pkg/promtail/targets/windows/targetmanager_windows.go +++ b/clients/pkg/promtail/targets/windows/targetmanager_windows.go @@ -8,10 +8,10 @@ import ( "github.com/go-kit/log/level" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) // TargetManager manages a series of windows event targets. diff --git a/clients/pkg/promtail/targets/windows/win_eventlog/win_eventlog.go b/clients/pkg/promtail/targets/windows/win_eventlog/win_eventlog.go index f2411698d4b14..71ff148de58ff 100644 --- a/clients/pkg/promtail/targets/windows/win_eventlog/win_eventlog.go +++ b/clients/pkg/promtail/targets/windows/win_eventlog/win_eventlog.go @@ -39,7 +39,7 @@ import ( "github.com/influxdata/telegraf/plugins/inputs" "golang.org/x/sys/windows" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var sampleConfig = ` diff --git a/clients/pkg/promtail/utils/entries.go b/clients/pkg/promtail/utils/entries.go index 214422b674bad..10204e8fb37f4 100644 --- a/clients/pkg/promtail/utils/entries.go +++ b/clients/pkg/promtail/utils/entries.go @@ -5,7 +5,7 @@ import ( "sync" "time" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" ) // FanoutEntryHandler implements api.EntryHandler, fanning out received entries to one or multiple channels. diff --git a/clients/pkg/promtail/utils/entries_test.go b/clients/pkg/promtail/utils/entries_test.go index 8029e895c2a62..c9b098d9ee4a4 100644 --- a/clients/pkg/promtail/utils/entries_test.go +++ b/clients/pkg/promtail/utils/entries_test.go @@ -8,9 +8,9 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func TestFanoutEntryHandler_SuccessfulFanout(t *testing.T) { diff --git a/clients/pkg/promtail/utils/remotewrite_server.go b/clients/pkg/promtail/utils/remotewrite_server.go index 089f4a79a01bb..837d3a8581f10 100644 --- a/clients/pkg/promtail/utils/remotewrite_server.go +++ b/clients/pkg/promtail/utils/remotewrite_server.go @@ -5,8 +5,8 @@ import ( "net/http" "net/http/httptest" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/util" ) // RemoteWriteRequest wraps the received logs remote write request that is received. diff --git a/clients/pkg/promtail/wal/reader.go b/clients/pkg/promtail/wal/reader.go index b19b2bbecc10b..769c566efbee9 100644 --- a/clients/pkg/promtail/wal/reader.go +++ b/clients/pkg/promtail/wal/reader.go @@ -5,11 +5,11 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/ingester/wal" - "github.com/grafana/loki/pkg/util" - walUtils "github.com/grafana/loki/pkg/util/wal" + "github.com/grafana/loki/v3/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/util" + walUtils "github.com/grafana/loki/v3/pkg/util/wal" ) // ReadWAL will read all entries in the WAL located under dir. Mainly used for testing diff --git a/clients/pkg/promtail/wal/wal.go b/clients/pkg/promtail/wal/wal.go index af1fa7e3d5098..8e747530470c7 100644 --- a/clients/pkg/promtail/wal/wal.go +++ b/clients/pkg/promtail/wal/wal.go @@ -9,7 +9,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/prometheus/tsdb/wlog" - "github.com/grafana/loki/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/ingester/wal" ) var ( diff --git a/clients/pkg/promtail/wal/watcher.go b/clients/pkg/promtail/wal/watcher.go index 3e8719a235812..926c93c01bcfc 100644 --- a/clients/pkg/promtail/wal/watcher.go +++ b/clients/pkg/promtail/wal/watcher.go @@ -14,7 +14,7 @@ import ( "github.com/prometheus/prometheus/tsdb/record" "github.com/prometheus/prometheus/tsdb/wlog" - "github.com/grafana/loki/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/ingester/wal" ) const ( diff --git a/clients/pkg/promtail/wal/watcher_test.go b/clients/pkg/promtail/wal/watcher_test.go index d9a5e04cb0b89..b41880f5d20ff 100644 --- a/clients/pkg/promtail/wal/watcher_test.go +++ b/clients/pkg/promtail/wal/watcher_test.go @@ -13,11 +13,11 @@ import ( "github.com/prometheus/prometheus/tsdb/record" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/ingester/wal" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/util" ) type testWriteTo struct { diff --git a/clients/pkg/promtail/wal/writer.go b/clients/pkg/promtail/wal/writer.go index 8e754a01038f8..e9360645716d9 100644 --- a/clients/pkg/promtail/wal/writer.go +++ b/clients/pkg/promtail/wal/writer.go @@ -16,11 +16,11 @@ import ( "github.com/prometheus/prometheus/tsdb/chunks" "github.com/prometheus/prometheus/tsdb/record" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/ingester/wal" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/util" ) const ( diff --git a/clients/pkg/promtail/wal/writer_test.go b/clients/pkg/promtail/wal/writer_test.go index fbce817f2a26e..a9c637f98b1ce 100644 --- a/clients/pkg/promtail/wal/writer_test.go +++ b/clients/pkg/promtail/wal/writer_test.go @@ -13,9 +13,9 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func TestWriter_EntriesAreWrittenToWAL(t *testing.T) { diff --git a/cmd/logcli/main.go b/cmd/logcli/main.go index 56a954cd5b44b..3d2aa85297b3f 100644 --- a/cmd/logcli/main.go +++ b/cmd/logcli/main.go @@ -15,15 +15,15 @@ import ( "github.com/prometheus/common/version" "gopkg.in/alecthomas/kingpin.v2" - "github.com/grafana/loki/pkg/logcli/client" - "github.com/grafana/loki/pkg/logcli/index" - "github.com/grafana/loki/pkg/logcli/labelquery" - "github.com/grafana/loki/pkg/logcli/output" - "github.com/grafana/loki/pkg/logcli/query" - "github.com/grafana/loki/pkg/logcli/seriesquery" - "github.com/grafana/loki/pkg/logcli/volume" - "github.com/grafana/loki/pkg/logql/syntax" - _ "github.com/grafana/loki/pkg/util/build" + "github.com/grafana/loki/v3/pkg/logcli/client" + "github.com/grafana/loki/v3/pkg/logcli/index" + "github.com/grafana/loki/v3/pkg/logcli/labelquery" + "github.com/grafana/loki/v3/pkg/logcli/output" + "github.com/grafana/loki/v3/pkg/logcli/query" + "github.com/grafana/loki/v3/pkg/logcli/seriesquery" + "github.com/grafana/loki/v3/pkg/logcli/volume" + "github.com/grafana/loki/v3/pkg/logql/syntax" + _ "github.com/grafana/loki/v3/pkg/util/build" ) var ( diff --git a/cmd/logql-analyzer/main.go b/cmd/logql-analyzer/main.go index 5031dbad7d894..beed1226709d4 100644 --- a/cmd/logql-analyzer/main.go +++ b/cmd/logql-analyzer/main.go @@ -10,9 +10,9 @@ import ( "github.com/grafana/dskit/server" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/logqlanalyzer" - "github.com/grafana/loki/pkg/sizing" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logqlanalyzer" + "github.com/grafana/loki/v3/pkg/sizing" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func main() { diff --git a/cmd/loki-canary/main.go b/cmd/loki-canary/main.go index 70aad7b8dfd80..061b98321047e 100644 --- a/cmd/loki-canary/main.go +++ b/cmd/loki-canary/main.go @@ -18,10 +18,10 @@ import ( "github.com/prometheus/common/config" "github.com/prometheus/common/version" - "github.com/grafana/loki/pkg/canary/comparator" - "github.com/grafana/loki/pkg/canary/reader" - "github.com/grafana/loki/pkg/canary/writer" - _ "github.com/grafana/loki/pkg/util/build" + "github.com/grafana/loki/v3/pkg/canary/comparator" + "github.com/grafana/loki/v3/pkg/canary/reader" + "github.com/grafana/loki/v3/pkg/canary/writer" + _ "github.com/grafana/loki/v3/pkg/util/build" ) const ( diff --git a/cmd/loki/loki-docker-config.yaml b/cmd/loki/loki-docker-config.yaml index b9f80f910236c..c50c147b06f2f 100644 --- a/cmd/loki/loki-docker-config.yaml +++ b/cmd/loki/loki-docker-config.yaml @@ -20,7 +20,7 @@ schema_config: - from: 2020-10-24 store: tsdb object_store: filesystem - schema: v12 + schema: v13 index: prefix: index_ period: 24h diff --git a/cmd/loki/loki-local-config.yaml b/cmd/loki/loki-local-config.yaml index cbc04cb4413f3..e448dfd9f1fa3 100644 --- a/cmd/loki/loki-local-config.yaml +++ b/cmd/loki/loki-local-config.yaml @@ -28,7 +28,7 @@ schema_config: - from: 2020-10-24 store: tsdb object_store: filesystem - schema: v12 + schema: v13 index: prefix: index_ period: 24h diff --git a/cmd/loki/loki-local-with-memcached.yaml b/cmd/loki/loki-local-with-memcached.yaml index a2f4336cdd484..d69a983d6124a 100644 --- a/cmd/loki/loki-local-with-memcached.yaml +++ b/cmd/loki/loki-local-with-memcached.yaml @@ -16,6 +16,10 @@ common: kvstore: store: inmemory +limits_config: + split_instant_metric_queries_by_interval: '10m' + + query_range: align_queries_with_step: true cache_index_stats_results: true @@ -76,7 +80,7 @@ schema_config: - from: 2020-10-24 store: tsdb object_store: filesystem - schema: v12 + schema: v13 index: prefix: index_ period: 24h diff --git a/cmd/loki/main.go b/cmd/loki/main.go index 20a5925acbb4a..250568203be15 100644 --- a/cmd/loki/main.go +++ b/cmd/loki/main.go @@ -16,12 +16,12 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/version" - "github.com/grafana/loki/pkg/loki" - "github.com/grafana/loki/pkg/util" - _ "github.com/grafana/loki/pkg/util/build" - "github.com/grafana/loki/pkg/util/cfg" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/loki" + "github.com/grafana/loki/v3/pkg/util" + _ "github.com/grafana/loki/v3/pkg/util/build" + "github.com/grafana/loki/v3/pkg/util/cfg" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/validation" ) func exit(code int) { diff --git a/cmd/migrate/main.go b/cmd/migrate/main.go index d638adaaa812e..e42468e532b07 100644 --- a/cmd/migrate/main.go +++ b/cmd/migrate/main.go @@ -17,16 +17,16 @@ import ( "github.com/grafana/dskit/user" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/loki" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/util/cfg" - "github.com/grafana/loki/pkg/util/constants" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/loki" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/util/cfg" + "github.com/grafana/loki/v3/pkg/util/constants" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/validation" ) type syncRange struct { diff --git a/cmd/querytee/main.go b/cmd/querytee/main.go index 9007dd6a3e3f2..5acebfed85179 100644 --- a/cmd/querytee/main.go +++ b/cmd/querytee/main.go @@ -10,8 +10,8 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/collectors" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/tools/querytee" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/tools/querytee" ) type Config struct { diff --git a/docs/sources/configure/_index.md b/docs/sources/configure/_index.md index 36603f1be1d3f..cce92e24eb2f1 100644 --- a/docs/sources/configure/_index.md +++ b/docs/sources/configure/_index.md @@ -1915,11 +1915,6 @@ client: # bloom-gateway-client.grpc [grpc_client_config: ] - # Flag to control whether requests sent to the gateway should be logged or - # not. - # CLI flag: -bloom-gateway-client.log-gateway-requests - [log_gateway_requests: | default = false] - results_cache: # The cache block configures the cache backend. # The CLI flags prefix for this block configuration is: @@ -2356,21 +2351,22 @@ tsdb_shipper: [ingesterdbretainperiod: ] -# Configures Bloom Shipper. +# Configures the bloom shipper component, which contains the store abstraction +# to fetch bloom filters from and put them to object storage. bloom_shipper: - # Working directory to store downloaded Bloom Blocks. + # Working directory to store downloaded bloom blocks. Supports multiple + # directories, separated by comma. # CLI flag: -bloom.shipper.working-directory - [working_directory: | default = "bloom-shipper"] + [working_directory: | default = "/data/blooms"] - blocks_downloading_queue: - # The count of parallel workers that download Bloom Blocks. - # CLI flag: -bloom.shipper.blocks-downloading-queue.workers-count - [workers_count: | default = 100] + # Maximum size of bloom pages that should be queried. Larger pages than this + # limit are skipped when querying blooms to limit memory usage. + # CLI flag: -bloom.max-query-page-size + [max_query_page_size: | default = 64MiB] - # Maximum number of task in queue per tenant per bloom-gateway. Enqueuing - # the tasks above this limit will fail an error. - # CLI flag: -bloom.shipper.blocks-downloading-queue.max_tasks_enqueued_per_tenant - [max_tasks_enqueued_per_tenant: | default = 10000] + # The amount of maximum concurrent bloom blocks downloads. + # CLI flag: -bloom.download-parallelism + [download_parallelism: | default = 16] blocks_cache: # Cache for bloom blocks. Soft limit of the cache in bytes. Exceeding this @@ -2403,6 +2399,10 @@ The `chunk_store_config` block configures how chunks will be cached and how long # The CLI flags prefix for this block configuration is: store.chunks-cache [chunk_cache_config: ] +# The cache block configures the cache backend. +# The CLI flags prefix for this block configuration is: store.chunks-cache-l2 +[chunk_cache_config_l2: ] + # Write dedupe cache is deprecated along with legacy index types (aws, # aws-dynamo, bigtable, bigtable-hashed, cassandra, gcp, gcp-columnkey, # grpc-store). @@ -2410,6 +2410,11 @@ The `chunk_store_config` block configures how chunks will be cached and how long # The CLI flags prefix for this block configuration is: store.index-cache-write [write_dedupe_cache_config: ] +# Chunks will be handed off to the L2 cache after this duration. 0 to disable L2 +# cache. +# CLI flag: -store.chunks-cache-l2.handoff +[l2_chunk_cache_handoff: | default = 0s] + # Cache index entries older than this period. 0 to disable. # CLI flag: -store.cache-lookups-older-than [cache_lookups_older_than: | default = 0s] @@ -2477,9 +2482,9 @@ The `compactor` block configures the compactor component, which compacts index s # CLI flag: -compactor.delete-request-cancel-period [delete_request_cancel_period: | default = 24h] -# Constrain the size of any single delete request. When a delete request > -# delete_max_interval is input, the request is sharded into smaller requests of -# no more than delete_max_interval +# Constrain the size of any single delete request with line filters. When a +# delete request > delete_max_interval is input, the request is sharded into +# smaller requests of no more than delete_max_interval # CLI flag: -compactor.delete-max-interval [delete_max_interval: | default = 24h] @@ -2695,18 +2700,18 @@ ring: # CLI flag: -bloom-compactor.compaction-interval [compaction_interval: | default = 10m] -# How many index periods (days) to wait before building bloom filters for a -# table. This can be used to lower cost by not re-writing data to object storage -# too frequently since recent data changes more often. -# CLI flag: -bloom-compactor.min-table-compaction-period -[min_table_compaction_period: | default = 1] +# Newest day-table offset (from today, inclusive) to compact. Increase to lower +# cost by not re-writing data to object storage too frequently since recent data +# changes more often at the cost of not having blooms available as quickly. +# CLI flag: -bloom-compactor.min-table-offset +[min_table_offset: | default = 1] -# The maximum number of index periods (days) to build bloom filters for a table. -# This can be used to lower cost by not trying to compact older data which -# doesn't change. This can be optimized by aligning it with the maximum -# `reject_old_samples_max_age` setting of any tenant. -# CLI flag: -bloom-compactor.max-table-compaction-period -[max_table_compaction_period: | default = 7] +# Oldest day-table offset (from today, inclusive) to compact. This can be used +# to lower cost by not trying to compact older data which doesn't change. This +# can be optimized by aligning it with the maximum `reject_old_samples_max_age` +# setting of any tenant. +# CLI flag: -bloom-compactor.max-table-offset +[max_table_offset: | default = 2] # Number of workers to run in parallel for compaction. # CLI flag: -bloom-compactor.worker-parallelism @@ -2729,6 +2734,15 @@ ring: # and compact as many tables. # CLI flag: -bloom-compactor.max-compaction-parallelism [max_compaction_parallelism: | default = 1] + +retention: + # Enable bloom retention. + # CLI flag: -bloom-compactor.retention.enabled + [enabled: | default = false] + + # Max lookback days for retention. + # CLI flag: -bloom-compactor.retention.max-lookback-days + [max_lookback_days: | default = 365] ``` ### limits_config @@ -2810,6 +2824,18 @@ The `limits_config` block configures global and per-tenant limits in Loki. # CLI flag: -validation.increment-duplicate-timestamps [increment_duplicate_timestamp: | default = false] +# If no service_name label exists, Loki maps a single label from the configured +# list to service_name. If none of the configured labels exist in the stream, +# label is set to unknown_service. Empty list disables setting the label. +# CLI flag: -validation.discover-service-name +[discover_service_name: | default = [service app application name app_kubernetes_io_name container container_name component workload job]] + +# Discover and add log levels during ingestion, if not present already. Levels +# would be added to Structured Metadata with name 'level' and one of the values +# from 'debug', 'info', 'warn', 'error', 'critical', 'fatal'. +# CLI flag: -validation.discover-log-levels +[discover_log_levels: | default = false] + # Maximum number of active streams per user, per ingester. 0 to disable. # CLI flag: -ingester.max-streams-per-user [max_streams_per_user: | default = 0] @@ -2871,11 +2897,18 @@ The `limits_config` block configures global and per-tenant limits in Loki. # CLI flag: -querier.tsdb-max-query-parallelism [tsdb_max_query_parallelism: | default = 128] -# Maximum number of bytes assigned to a single sharded query. Also expressible -# in human readable forms (1GB, etc). +# Target maximum number of bytes assigned to a single sharded query. Also +# expressible in human readable forms (1GB, etc). Note: This is a _target_ and +# not an absolute limit. The actual limit can be higher, but the query planner +# will try to build shards up to this limit. # CLI flag: -querier.tsdb-max-bytes-per-shard [tsdb_max_bytes_per_shard: | default = 600MB] +# sharding strategy to use in query planning. Suggested to use bounded once all +# nodes can recognize it. +# CLI flag: -limits.tsdb-sharding-strategy +[tsdb_sharding_strategy: | default = "power_of_two"] + # Cardinality limit for index queries. # CLI flag: -store.cardinality-limit [cardinality_limit: | default = 100000] @@ -4465,6 +4498,7 @@ The cache block configures the cache backend. The supported CLI flags `` - `frontend.series-results-cache` - `frontend.volume-results-cache` - `store.chunks-cache` +- `store.chunks-cache-l2` - `store.index-cache-read` - `store.index-cache-write` @@ -4511,9 +4545,8 @@ memcached_client: # CLI flag: -.memcached.service [service: | default = "memcached"] - # EXPERIMENTAL: Comma separated addresses list in DNS Service Discovery - # format: - # https://cortexmetrics.io/docs/configuration/arguments/#dns-service-discovery + # Comma separated addresses list in DNS Service Discovery format: + # https://grafana.com/docs/mimir/latest/configure/about-dns-service-discovery/#supported-discovery-modes # CLI flag: -.memcached.addresses [addresses: | default = ""] diff --git a/docs/sources/configure/examples/configuration-examples.md b/docs/sources/configure/examples/configuration-examples.md index 6fb77d78a6c4f..eaaf659049dee 100644 --- a/docs/sources/configure/examples/configuration-examples.md +++ b/docs/sources/configure/examples/configuration-examples.md @@ -30,7 +30,7 @@ schema_config: - from: 2020-05-15 store: tsdb object_store: filesystem - schema: v12 + schema: v13 index: prefix: index_ period: 24h @@ -71,7 +71,7 @@ schema_config: - from: 2020-05-15 store: tsdb object_store: s3 - schema: v12 + schema: v13 index: prefix: index_ period: 24h @@ -127,7 +127,7 @@ schema_config: - from: 2020-05-15 store: tsdb object_store: gcs - schema: v12 + schema: v13 index: prefix: index_ period: 24h @@ -154,7 +154,7 @@ schema_config: - from: 2020-05-15 store: tsdb object_store: bos - schema: v12 + schema: v13 index: prefix: index_ period: 24h @@ -203,12 +203,12 @@ schema_config: period: 24h prefix: index_ - # Starting from 2023-6-15 Loki should store indexes on TSDB with the v12 schema + # Starting from 2023-6-15 Loki should store indexes on TSDB with the v13 schema # using daily periodic tables and chunks on AWS S3. - from: "2023-06-15" store: tsdb object_store: s3 - schema: v12 + schema: v13 index: period: 24h prefix: index_ @@ -227,7 +227,7 @@ schema_config: - from: 2020-05-15 store: tsdb object_store: alibabacloud - schema: v12 + schema: v13 index: prefix: index_ period: 24h @@ -297,7 +297,7 @@ schema_config: - from: 2020-10-01 store: tsdb object_store: cos - schema: v12 + schema: v13 index: period: 24h prefix: index_ @@ -327,7 +327,7 @@ schema_config: - from: 2020-10-01 store: tsdb object_store: cos - schema: v12 + schema: v13 index: period: 24h prefix: index_ @@ -364,7 +364,7 @@ schema_config: - from: 2020-10-01 store: tsdb object_store: cos - schema: v12 + schema: v13 index: period: 24h prefix: index_ diff --git a/docs/sources/configure/examples/yaml/1-Local-Configuration-Example.yaml b/docs/sources/configure/examples/yaml/1-Local-Configuration-Example.yaml index e4b3ab0c6a28c..4ccba0868d2e2 100644 --- a/docs/sources/configure/examples/yaml/1-Local-Configuration-Example.yaml +++ b/docs/sources/configure/examples/yaml/1-Local-Configuration-Example.yaml @@ -19,7 +19,7 @@ schema_config: - from: 2020-05-15 store: tsdb object_store: filesystem - schema: v12 + schema: v13 index: prefix: index_ period: 24h diff --git a/docs/sources/configure/examples/yaml/11-COS-HMAC-Example.yaml b/docs/sources/configure/examples/yaml/11-COS-HMAC-Example.yaml index 642d0f3316dae..98d5f160926bb 100644 --- a/docs/sources/configure/examples/yaml/11-COS-HMAC-Example.yaml +++ b/docs/sources/configure/examples/yaml/11-COS-HMAC-Example.yaml @@ -5,7 +5,7 @@ schema_config: - from: 2020-10-01 store: tsdb object_store: cos - schema: v12 + schema: v13 index: period: 24h prefix: index_ diff --git a/docs/sources/configure/examples/yaml/12-COS-APIKey-Example.yaml b/docs/sources/configure/examples/yaml/12-COS-APIKey-Example.yaml index d50d1c39863cb..2dfa640dae2d9 100644 --- a/docs/sources/configure/examples/yaml/12-COS-APIKey-Example.yaml +++ b/docs/sources/configure/examples/yaml/12-COS-APIKey-Example.yaml @@ -5,7 +5,7 @@ schema_config: - from: 2020-10-01 store: tsdb object_store: cos - schema: v12 + schema: v13 index: period: 24h prefix: index_ diff --git a/docs/sources/configure/examples/yaml/13-COS-Trusted-Profile-Example.yaml b/docs/sources/configure/examples/yaml/13-COS-Trusted-Profile-Example.yaml index 90e6b9673353d..f6f14a4577bfb 100644 --- a/docs/sources/configure/examples/yaml/13-COS-Trusted-Profile-Example.yaml +++ b/docs/sources/configure/examples/yaml/13-COS-Trusted-Profile-Example.yaml @@ -11,7 +11,7 @@ schema_config: - from: 2020-10-01 store: tsdb object_store: cos - schema: v12 + schema: v13 index: period: 24h prefix: index_ diff --git a/docs/sources/configure/examples/yaml/2-S3-Cluster-Example.yaml b/docs/sources/configure/examples/yaml/2-S3-Cluster-Example.yaml index 4dd9a3ae04f03..cde37ed5b2e8a 100644 --- a/docs/sources/configure/examples/yaml/2-S3-Cluster-Example.yaml +++ b/docs/sources/configure/examples/yaml/2-S3-Cluster-Example.yaml @@ -20,7 +20,7 @@ schema_config: - from: 2020-05-15 store: tsdb object_store: s3 - schema: v12 + schema: v13 index: prefix: index_ period: 24h diff --git a/docs/sources/configure/examples/yaml/4-GCS-Example.yaml b/docs/sources/configure/examples/yaml/4-GCS-Example.yaml index 7a227d064a417..330e94006aeef 100644 --- a/docs/sources/configure/examples/yaml/4-GCS-Example.yaml +++ b/docs/sources/configure/examples/yaml/4-GCS-Example.yaml @@ -19,7 +19,7 @@ schema_config: - from: 2020-05-15 store: tsdb object_store: gcs - schema: v12 + schema: v13 index: prefix: index_ period: 24h diff --git a/docs/sources/configure/examples/yaml/5-BOS-Example.yaml b/docs/sources/configure/examples/yaml/5-BOS-Example.yaml index 3a024ee849c70..be25b802a0226 100644 --- a/docs/sources/configure/examples/yaml/5-BOS-Example.yaml +++ b/docs/sources/configure/examples/yaml/5-BOS-Example.yaml @@ -6,7 +6,7 @@ schema_config: - from: 2020-05-15 store: tsdb object_store: bos - schema: v12 + schema: v13 index: prefix: index_ period: 24h diff --git a/docs/sources/configure/examples/yaml/7-Schema-Migration-Snippet.yaml b/docs/sources/configure/examples/yaml/7-Schema-Migration-Snippet.yaml index 37c12e034ba26..dd1976b0107d7 100644 --- a/docs/sources/configure/examples/yaml/7-Schema-Migration-Snippet.yaml +++ b/docs/sources/configure/examples/yaml/7-Schema-Migration-Snippet.yaml @@ -11,12 +11,12 @@ schema_config: period: 24h prefix: index_ - # Starting from 2023-6-15 Loki should store indexes on TSDB with the v12 schema + # Starting from 2023-6-15 Loki should store indexes on TSDB with the v13 schema # using daily periodic tables and chunks on AWS S3. - from: "2023-06-15" store: tsdb object_store: s3 - schema: v12 + schema: v13 index: period: 24h prefix: index_ diff --git a/docs/sources/configure/examples/yaml/8-alibaba-cloud-storage-Snippet.yaml b/docs/sources/configure/examples/yaml/8-alibaba-cloud-storage-Snippet.yaml index 48b7edeb7b0bb..b26f1198d4c81 100644 --- a/docs/sources/configure/examples/yaml/8-alibaba-cloud-storage-Snippet.yaml +++ b/docs/sources/configure/examples/yaml/8-alibaba-cloud-storage-Snippet.yaml @@ -5,7 +5,7 @@ schema_config: - from: 2020-05-15 store: tsdb object_store: alibabacloud - schema: v12 + schema: v13 index: prefix: index_ period: 24h diff --git a/docs/sources/get-started/labels/structured-metadata.md b/docs/sources/get-started/labels/structured-metadata.md index 587306b2d8526..4864576a3fefb 100644 --- a/docs/sources/get-started/labels/structured-metadata.md +++ b/docs/sources/get-started/labels/structured-metadata.md @@ -21,8 +21,8 @@ Structured metadata can also be used to query commonly needed metadata from log You should only use structured metadata in the following situations: - • If you are ingesting data in OpenTelemetry format, using the Grafana Agent or an OpenTelemetry Collector. Structured metadata was designed to support native ingestion of OpenTelemetry data. - • If you have high cardinality metadata that should not be used as a label and does not exist in the log line. Some examples might include `process_id` or `thread_id` or Kubernetes pod names. +- If you are ingesting data in OpenTelemetry format, using the Grafana Agent or an OpenTelemetry Collector. Structured metadata was designed to support native ingestion of OpenTelemetry data. +- If you have high cardinality metadata that should not be used as a label and does not exist in the log line. Some examples might include `process_id` or `thread_id` or Kubernetes pod names. It is an antipattern to extract information that already exists in your log lines and put it into structured metadata. diff --git a/docs/sources/operations/storage/retention.md b/docs/sources/operations/storage/retention.md index 96880a43374e6..8a8e86ad52337 100644 --- a/docs/sources/operations/storage/retention.md +++ b/docs/sources/operations/storage/retention.md @@ -72,7 +72,7 @@ schema_config: period: 24h prefix: index_ object_store: gcs - schema: v12 + schema: v13 store: tsdb storage_config: tsdb_shipper: @@ -238,7 +238,7 @@ schema_config: - from: 2018-04-15 store: tsdb object_store: gcs - schema: v12 + schema: v13 index: prefix: loki_index_ period: 24h diff --git a/docs/sources/operations/storage/schema/_index.md b/docs/sources/operations/storage/schema/_index.md index e5a14b3ed8a61..a368497414de1 100644 --- a/docs/sources/operations/storage/schema/_index.md +++ b/docs/sources/operations/storage/schema/_index.md @@ -23,7 +23,7 @@ Here are items to consider when changing the schema; if schema changes are not d Be aware of your relation to UTC when using the current date. Make sure that UTC 00:00:00 has not already passed for your current date. - As an example, assume that the current date is 2022-04-10, and you want to update to the v12 schema, so you restart Loki with 2022-04-11 as the `from` date for the new schema. If you forget to take into account that your timezone is UTC -5:00 and it’s currently 20:00 hours in your local timezone, that is actually 2022-04-11T01:00:00 UTC. When Loki starts it will see the new schema and begin to write and store objects following that new schema. If you then try to query data that was written between 00:00:00 and 01:00:00 UTC, Loki will use the new schema and the data will be unreadable, because it was created with the previous schema. + As an example, assume that the current date is 2022-04-10, and you want to update to the v13 schema, so you restart Loki with 2022-04-11 as the `from` date for the new schema. If you forget to take into account that your timezone is UTC -5:00 and it’s currently 20:00 hours in your local timezone, that is actually 2022-04-11T01:00:00 UTC. When Loki starts it will see the new schema and begin to write and store objects following that new schema. If you then try to query data that was written between 00:00:00 and 01:00:00 UTC, Loki will use the new schema and the data will be unreadable, because it was created with the previous schema. - You cannot undo or roll back a schema change. @@ -46,6 +46,6 @@ schema_config: period: 24h prefix: loki_ops_index_ object_store: gcs - schema: v12 + schema: v13 store: tsdb ``` diff --git a/docs/sources/operations/storage/tsdb.md b/docs/sources/operations/storage/tsdb.md index 82e3a6dbf33db..d7c315bdf5e16 100644 --- a/docs/sources/operations/storage/tsdb.md +++ b/docs/sources/operations/storage/tsdb.md @@ -29,7 +29,7 @@ schema_config: period: 24h prefix: index_ object_store: gcs - schema: v12 + schema: v13 store: tsdb storage_config: diff --git a/docs/sources/operations/upgrade.md b/docs/sources/operations/upgrade.md new file mode 100644 index 0000000000000..5a0be8626e6a1 --- /dev/null +++ b/docs/sources/operations/upgrade.md @@ -0,0 +1,11 @@ +--- +title: Upgrade +description: Links to Loki upgrade documentation. +weight: +--- + +# Upgrade + +- [Upgrade](https://grafana.com/docs/loki/latest/setup/upgrade/) from one Loki version to a newer version. + +- [Upgrade Helm](https://grafana.com/docs/loki/latest/setup/upgrade/) from Helm v2.x to Helm v3.x. diff --git a/docs/sources/release-notes/v2-9.md b/docs/sources/release-notes/v2-9.md index 68d3da85bc4dd..4140643316297 100644 --- a/docs/sources/release-notes/v2-9.md +++ b/docs/sources/release-notes/v2-9.md @@ -32,16 +32,50 @@ Grafana Labs is excited to announce the release of Loki 2.9.0 Here's a summary o - The `-ingester.unordered-writes` CLI flag is deprecated and will always default to `true` in the next major release. - For the full list of deprecations, see CHANGELOG.md - ## Bug fixes +### 2.9.6 (2024-03-21) + +* Fixed Promtail failures connecting to local Loki installation ([#12184](https://github.com/grafana/loki/issues/12184)) ([8585e35](https://github.com/grafana/loki/commit/8585e3537375c0deb11462d7256f5da23228f5e1)). +* Fixed an issue when using IPv6 where IPv6 addresses were not properly joined with ports. Use `net.JoinHostPort` to support IPv6 addresses. ([#10650](https://github.com/grafana/loki/issues/10650)) ([#11870](https://github.com/grafana/loki/issues/11870)) ([7def3b4](https://github.com/grafana/loki/commit/7def3b4e774252e13ba154ca13f72816a84da7dd)). +* Updated google.golang.org/protobuf to v1.33.0 ([#12269](https://github.com/grafana/loki/issues/12269)) ([#12287](https://github.com/grafana/loki/issues/12287)) ([3186520](https://github.com/grafana/loki/commit/318652035059fdaa40405f263fc9e37b4d38b157)). + +For a full list of all changes and fixes, refer to the [CHANGELOG](https://github.com/grafana/loki/blob/release-2.9.x/CHANGELOG.md). + +### 2.9.5 (2024-02-28) + +* Bumped base images and Go dependencies to address CVEs ([#12092](https://github.com/grafana/loki/issues/12092)) ([eee3598](https://github.com/grafana/loki/commit/eee35983f38fe04543b169ffa8ece76c23c4217b)). + +For a full list of all changes and fixes, refer to the [CHANGELOG](https://github.com/grafana/loki/blob/release-2.9.x/CHANGELOG.md). + +### 2.9.4 (2024-01-24) + +- Fixed a couple of data races that can cause panics due to concurrent read-write access of tenant configs. +- Fixed a bug in the log results cache. +- Fixed the cache to atomically check background cache size limit correctly. +- Fixed the discrepancy between the semantics of logs and metrics queries. +- Fixed promtail default scrape config causing CPU and memory load. +- Updated golang.org/x/crypto to v0.18.0. + +For a full list of all changes and fixes, refer to the [CHANGELOG](https://github.com/grafana/loki/blob/release-2.9.x/CHANGELOG.md). + +### 2.9.3 (2023-12-11) + +* Upgraded otelhttp from 0.40.0 -> 0.44.0 and base alpine image from 3.18.3 -> 3.18.5 to fix a few CVES (CVE-2023-45142, CVE-2022-21698, CVE-2023-5363). +* Fixed querying ingester for label values with a matcher (previously didn't respect the matcher). +* Ensured all lifecycler cfgs ref a valid IPv6 addr and port combination. + +For a full list of all changes and fixes, refer to the [CHANGELOG](https://github.com/grafana/loki/blob/release-2.9.x/CHANGELOG.md). + ### 2.9.2 (2023-10-16) -* Upgrade go to v1.21.3, golang.org/x/net to v0.17.0 and grpc-go to v1.56.3 to patch CVE-2023-39325 / CVE-2023-44487 +* Upgraded go to v1.21.3, golang.org/x/net to v0.17.0 and grpc-go to v1.56.3 to patch CVE-2023-39325 / CVE-2023-44487 -For a full list of all changes and fixes, look at the [CHANGELOG](https://github.com/grafana/loki/blob/release-2.9.x/CHANGELOG.md). +For a full list of all changes and fixes, refer to the [CHANGELOG](https://github.com/grafana/loki/blob/release-2.9.x/CHANGELOG.md). ### 2.9.1 (2023-09-14) -* Update Docker base images to mitigate security vulnerability CVE-2022-48174 -* Fix bugs in indexshipper (`tsdb`, `boltdb-shipper`) that could result in not showing all ingested logs in query results. +* Updated Docker base images to mitigate security vulnerability CVE-2022-48174 +* Fixed bugs in indexshipper (`tsdb`, `boltdb-shipper`) that could result in not showing all ingested logs in query results. + +For a full list of all changes and fixes, refer to the [CHANGELOG](https://github.com/grafana/loki/blob/release-2.9.x/CHANGELOG.md). \ No newline at end of file diff --git a/docs/sources/send-data/_index.md b/docs/sources/send-data/_index.md index 981d98fe1e12c..f3106edf2352c 100644 --- a/docs/sources/send-data/_index.md +++ b/docs/sources/send-data/_index.md @@ -59,6 +59,7 @@ These third-party clients also enable sending logs to Loki: - [promtail-client](https://github.com/afiskon/promtail-client) (Go) - [push-to-loki.py](https://github.com/sleleko/devops-kb/blob/master/python/push-to-loki.py) (Python 3) - [python-logging-loki](https://pypi.org/project/python-logging-loki/) (Python 3) +- [nextlog](https://pypi.org/project/nextlog/) (Python 3) - [Serilog-Sinks-Loki](https://github.com/JosephWoodward/Serilog-Sinks-Loki) (C#) - [Vector Loki Sink](https://vector.dev/docs/reference/configuration/sinks/loki/) - [winston-loki](https://github.com/JaniAnttonen/winston-loki) (JS) diff --git a/docs/sources/send-data/otel/_index.md b/docs/sources/send-data/otel/_index.md index dac87a4fb5d56..fb2bd7b2665ac 100644 --- a/docs/sources/send-data/otel/_index.md +++ b/docs/sources/send-data/otel/_index.md @@ -33,7 +33,7 @@ And enable it in `service.pipelines`: ```yaml service: pipelines: - metrics: + logs: receivers: [...] processors: [...] exporters: [..., otlphttp] @@ -57,7 +57,7 @@ exporters: service: extensions: [basicauth/otlp] pipelines: - metrics: + logs: receivers: [...] processors: [...] exporters: [..., otlphttp] diff --git a/docs/sources/send-data/promtail/stages/structured_metadata.md b/docs/sources/send-data/promtail/stages/structured_metadata.md index 70e671bb8ae17..7337bef023b9a 100644 --- a/docs/sources/send-data/promtail/stages/structured_metadata.md +++ b/docs/sources/send-data/promtail/stages/structured_metadata.md @@ -14,6 +14,8 @@ modifies the [structured metadata]({{< relref "../../../get-started/labels/struc {{% admonition type="warning" %}} Structured metadata will be rejected by Loki unless you enable the `allow_structured_metadata` per tenant configuration (in the `limits_config`). + +Structured metadata was added to chunk format V4 which is used if the schema version is greater or equal to **13**. (See Schema Config for more details about schema versions. ) {{% /admonition %}} ## Schema @@ -47,7 +49,7 @@ For the given pipeline: Given the following log line: ```json -{"log":"log message\n","stream":"stderr","traceID":"0242ac120002",time":"2019-04-30T02:12:41.8443515Z"} +{"log":"log message\n","stream":"stderr","traceID":"0242ac120002","time":"2019-04-30T02:12:41.8443515Z"} ``` The first stage would extract `stream` with a value of `stderr` and `traceID` with a value of `0242ac120002` into diff --git a/docs/sources/setup/install/helm/install-monolithic/_index.md b/docs/sources/setup/install/helm/install-monolithic/_index.md index 01a3d6d357e10..e85d6a52159b5 100644 --- a/docs/sources/setup/install/helm/install-monolithic/_index.md +++ b/docs/sources/setup/install/helm/install-monolithic/_index.md @@ -60,10 +60,17 @@ If you set the `singleBinary.replicas` value to 2 or more, this chart configures ruler: loki-ruler admin: loki-admin type: 's3' + bucketNames: + chunks: loki-chunks + ruler: loki-ruler + admin: loki-admin s3: endpoint: foo.aws.com + region: secretAccessKey: supersecret accessKeyId: secret + s3ForcePathStyle: false + insecure: false singleBinary: replicas: 3 ``` diff --git a/docs/sources/setup/install/helm/reference.md b/docs/sources/setup/install/helm/reference.md index 0871fe6e20f8c..37d13c936f126 100644 --- a/docs/sources/setup/install/helm/reference.md +++ b/docs/sources/setup/install/helm/reference.md @@ -2861,6 +2861,15 @@ null
 {}
 
+ + + + monitoring.selfMonitoring.podLogs.additionalPipelineStages + list + Additional pipeline stages to process logs after scraping https://grafana.com/docs/agent/latest/operator/api/#pipelinestagespec-a-namemonitoringgrafanacomv1alpha1pipelinestagespeca +
+[]
+
diff --git a/docs/sources/setup/migrate/migrate-to-tsdb/_index.md b/docs/sources/setup/migrate/migrate-to-tsdb/_index.md index b9a1f478d359d..3407345b4acbf 100644 --- a/docs/sources/setup/migrate/migrate-to-tsdb/_index.md +++ b/docs/sources/setup/migrate/migrate-to-tsdb/_index.md @@ -39,7 +39,7 @@ schema_config: - from: 2023-10-20 ① store: tsdb ② object_store: filesystem ③ - schema: v12 ④ + schema: v13 ④ index: prefix: index_ period: 24h @@ -51,7 +51,7 @@ schema_config: ③ This sample configuration uses filesystem as the storage in both the periods. If you want to use a different storage for the TSDB index and chunks, you can specify a different `object_store` in the new period. -④ Update the schema to v12 which is the recommended version at the time of writing. Please refer to the [configure page]({{< relref "../../../configure#period_config" >}}) for the current recommend version. +④ Update the schema to v13 which is the recommended version at the time of writing. Please refer to the [configure page]({{< relref "../../../configure#period_config" >}}) for the current recommend version. ### Configure TSDB shipper diff --git a/docs/sources/storage/_index.md b/docs/sources/storage/_index.md index e7517e5c00353..2c967d2fc4b92 100644 --- a/docs/sources/storage/_index.md +++ b/docs/sources/storage/_index.md @@ -127,7 +127,7 @@ This storage type for indexes is deprecated and may be removed in future major v ## Schema Config Loki aims to be backwards compatible and over the course of its development has had many internal changes that facilitate better and more efficient storage/querying. Loki allows incrementally upgrading to these new storage _schemas_ and can query across them transparently. This makes upgrading a breeze. -For instance, this is what it looks like when migrating from BoltDB with v11 schema to TSDB with v12 schema starting 2023-07-01: +For instance, this is what it looks like when migrating from BoltDB with v11 schema to TSDB with v13 schema starting 2023-07-01: ```yaml schema_config: @@ -142,13 +142,13 @@ schema_config: - from: 2023-07-01 store: tsdb object_store: filesystem - schema: v12 + schema: v13 index: prefix: index_ period: 24h ``` -For all data ingested before 2023-07-01, Loki used BoltDB with the v11 schema, and then switched after that point to the more effective TSDB with the v12 schema. This dramatically simplifies upgrading, ensuring it's simple to take advantage of new storage optimizations. These configs should be immutable for as long as you care about retention. +For all data ingested before 2023-07-01, Loki used BoltDB with the v11 schema, and then switched after that point to the more effective TSDB with the v13 schema. This dramatically simplifies upgrading, ensuring it's simple to take advantage of new storage optimizations. These configs should be immutable for as long as you care about retention. ## Table Manager (deprecated) @@ -190,7 +190,7 @@ When a new schema is released and you want to gain the advantages it provides, y First, you'll want to create a new [period_config]({{< relref "../configure#period_config" >}}) entry in your [schema_config]({{< relref "../configure#schema_config" >}}). The important thing to remember here is to set this at some point in the _future_ and then roll out the config file changes to Loki. This allows the table manager to create the required table in advance of writes and ensures that existing data isn't queried as if it adheres to the new schema. -As an example, let's say it's 2023-07-14 and we want to start using the `v12` schema on the 20th: +As an example, let's say it's 2023-07-14 and we want to start using the `v13` schema on the 20th: ```yaml schema_config: configs: @@ -204,7 +204,7 @@ schema_config: - from: 2023-07-20 store: tsdb object_store: filesystem - schema: v12 + schema: v13 index: prefix: index_ period: 24h @@ -243,7 +243,7 @@ schema_config: - from: 2020-07-01 store: tsdb object_store: gcs - schema: v12 + schema: v13 index: prefix: index_ period: 24h @@ -266,7 +266,7 @@ schema_config: - from: 2020-07-01 store: tsdb object_store: aws - schema: v12 + schema: v13 index: prefix: index_ period: 24h @@ -352,7 +352,7 @@ schema_config: period: 24h prefix: index_ object_store: azure - schema: v12 + schema: v13 store: tsdb storage_config: azure: @@ -388,7 +388,7 @@ schema_config: period: 24h prefix: index_ object_store: azure - schema: v12 + schema: v13 store: tsdb storage_config: azure: @@ -420,7 +420,7 @@ schema_config: period: 24h prefix: loki_index_ object_store: cos - schema: v12 + schema: v13 store: tsdb storage_config: @@ -489,7 +489,7 @@ schema_config: - from: 2020-07-01 store: tsdb object_store: s3 - schema: v12 + schema: v13 index: prefix: index_ period: 24h diff --git a/examples/getting-started/loki-config.yaml b/examples/getting-started/loki-config.yaml index d4d00904b2f16..73ca66f78796a 100644 --- a/examples/getting-started/loki-config.yaml +++ b/examples/getting-started/loki-config.yaml @@ -9,7 +9,7 @@ schema_config: - from: 2021-08-01 store: tsdb object_store: s3 - schema: v12 + schema: v13 index: prefix: index_ period: 24h diff --git a/go.mod b/go.mod index 2fad4fa4375f8..0d0659a92220b 100644 --- a/go.mod +++ b/go.mod @@ -1,4 +1,4 @@ -module github.com/grafana/loki +module github.com/grafana/loki/v3 go 1.21 diff --git a/integration/client/client.go b/integration/client/client.go index 1ad94fd0edbb6..a749789036f07 100644 --- a/integration/client/client.go +++ b/integration/client/client.go @@ -23,9 +23,9 @@ import ( "go.opentelemetry.io/collector/pdata/plog" "go.opentelemetry.io/collector/pdata/plog/plogotlp" - logcli "github.com/grafana/loki/pkg/logcli/client" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/util/unmarshal" + logcli "github.com/grafana/loki/v3/pkg/logcli/client" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/util/unmarshal" ) const requestTimeout = 30 * time.Second diff --git a/integration/cluster/cluster.go b/integration/cluster/cluster.go index c7a0ba2d17dd7..446f84cfbb130 100644 --- a/integration/cluster/cluster.go +++ b/integration/cluster/cluster.go @@ -23,14 +23,14 @@ import ( "github.com/prometheus/common/model" "gopkg.in/yaml.v2" - "github.com/grafana/loki/integration/util" - - "github.com/grafana/loki/pkg/loki" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util/cfg" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/integration/util" + + "github.com/grafana/loki/v3/pkg/loki" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util/cfg" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/validation" ) var ( @@ -62,6 +62,7 @@ limits_config: ingestion_burst_size_mb: 50 reject_old_samples: false allow_structured_metadata: true + discover_service_name: otlp_config: resource_attributes: attributes_config: @@ -81,8 +82,6 @@ storage_config: cache_location: {{.dataPath}}/tsdb-cache bloom_shipper: working_directory: {{.dataPath}}/bloom-shipper - blocks_downloading_queue: - workers_count: 1 bloom_gateway: enabled: false diff --git a/integration/loki_micro_services_delete_test.go b/integration/loki_micro_services_delete_test.go index d77d7ab115083..ce83cdb4d9f5a 100644 --- a/integration/loki_micro_services_delete_test.go +++ b/integration/loki_micro_services_delete_test.go @@ -13,13 +13,14 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/integration/client" - "github.com/grafana/loki/integration/cluster" + "github.com/grafana/loki/v3/integration/client" + "github.com/grafana/loki/v3/integration/cluster" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" "github.com/grafana/loki/pkg/push" - "github.com/grafana/loki/pkg/storage" + + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/storage" ) type pushRequest struct { diff --git a/integration/loki_micro_services_test.go b/integration/loki_micro_services_test.go index 3480757f07930..611fafb15ab7a 100644 --- a/integration/loki_micro_services_test.go +++ b/integration/loki_micro_services_test.go @@ -21,12 +21,12 @@ import ( "golang.org/x/exp/slices" "google.golang.org/protobuf/proto" - "github.com/grafana/loki/integration/client" - "github.com/grafana/loki/integration/cluster" + "github.com/grafana/loki/v3/integration/client" + "github.com/grafana/loki/v3/integration/cluster" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/util/httpreq" - "github.com/grafana/loki/pkg/util/querylimits" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/util/querylimits" ) func TestMicroServicesIngestQuery(t *testing.T) { diff --git a/integration/loki_rule_eval_test.go b/integration/loki_rule_eval_test.go index 5ee9bf97ac566..00caeef8883c2 100644 --- a/integration/loki_rule_eval_test.go +++ b/integration/loki_rule_eval_test.go @@ -14,10 +14,10 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/integration/client" - "github.com/grafana/loki/integration/cluster" + "github.com/grafana/loki/v3/integration/client" + "github.com/grafana/loki/v3/integration/cluster" - "github.com/grafana/loki/pkg/ruler" + "github.com/grafana/loki/v3/pkg/ruler" ) // TestLocalRuleEval tests that rules are evaluated locally with an embedded query engine diff --git a/integration/loki_simple_scalable_test.go b/integration/loki_simple_scalable_test.go index f831dcc406c4c..070d3f918a14d 100644 --- a/integration/loki_simple_scalable_test.go +++ b/integration/loki_simple_scalable_test.go @@ -10,8 +10,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/integration/client" - "github.com/grafana/loki/integration/cluster" + "github.com/grafana/loki/v3/integration/client" + "github.com/grafana/loki/v3/integration/cluster" ) func TestSimpleScalable_IngestQuery(t *testing.T) { diff --git a/integration/loki_single_binary_test.go b/integration/loki_single_binary_test.go index 7e26f9c4caf72..6aaf64f5b4152 100644 --- a/integration/loki_single_binary_test.go +++ b/integration/loki_single_binary_test.go @@ -10,8 +10,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/integration/client" - "github.com/grafana/loki/integration/cluster" + "github.com/grafana/loki/v3/integration/client" + "github.com/grafana/loki/v3/integration/cluster" ) func TestSingleBinaryIngestQuery(t *testing.T) { diff --git a/integration/multi_tenant_queries_test.go b/integration/multi_tenant_queries_test.go index 4c13d6f9e6249..339b380d1b01e 100644 --- a/integration/multi_tenant_queries_test.go +++ b/integration/multi_tenant_queries_test.go @@ -10,8 +10,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/integration/client" - "github.com/grafana/loki/integration/cluster" + "github.com/grafana/loki/v3/integration/client" + "github.com/grafana/loki/v3/integration/cluster" ) func TestMultiTenantQuery(t *testing.T) { diff --git a/integration/per_request_limits_test.go b/integration/per_request_limits_test.go index 34d9c7e99f44c..482ff0e93fcfa 100644 --- a/integration/per_request_limits_test.go +++ b/integration/per_request_limits_test.go @@ -10,10 +10,10 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/integration/client" - "github.com/grafana/loki/integration/cluster" + "github.com/grafana/loki/v3/integration/client" + "github.com/grafana/loki/v3/integration/cluster" - "github.com/grafana/loki/pkg/util/querylimits" + "github.com/grafana/loki/v3/pkg/util/querylimits" ) func TestPerRequestLimits(t *testing.T) { diff --git a/operator/go.mod b/operator/go.mod index f580ea5b6d6d2..4e6b1d65bd67f 100644 --- a/operator/go.mod +++ b/operator/go.mod @@ -150,7 +150,7 @@ require ( google.golang.org/genproto/googleapis/api v0.0.0-20231127180814-3a041ad873d4 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20231120223509-83a465c0220f // indirect google.golang.org/grpc v1.59.0 // indirect - google.golang.org/protobuf v1.32.0 // indirect + google.golang.org/protobuf v1.33.0 // indirect gopkg.in/inf.v0 v0.9.1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect k8s.io/apiextensions-apiserver v0.28.4 // indirect diff --git a/operator/go.sum b/operator/go.sum index 220e1e2ebb5e9..6d861a1180a9d 100644 --- a/operator/go.sum +++ b/operator/go.sum @@ -1916,8 +1916,8 @@ google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQ google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I= -google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= +google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/pkg/analytics/reporter.go b/pkg/analytics/reporter.go index 85050237d99ad..d58e727aac7fb 100644 --- a/pkg/analytics/reporter.go +++ b/pkg/analytics/reporter.go @@ -18,8 +18,8 @@ import ( "github.com/grafana/dskit/services" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/util/build" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/util/build" ) const ( diff --git a/pkg/analytics/reporter_test.go b/pkg/analytics/reporter_test.go index 183f2d8246202..a986ac66de05d 100644 --- a/pkg/analytics/reporter_test.go +++ b/pkg/analytics/reporter_test.go @@ -14,7 +14,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" ) func Test_LeaderElection(t *testing.T) { diff --git a/pkg/analytics/seed_test.go b/pkg/analytics/seed_test.go index 4229c508d2dfe..366789354d69d 100644 --- a/pkg/analytics/seed_test.go +++ b/pkg/analytics/seed_test.go @@ -15,7 +15,7 @@ import ( "github.com/grafana/dskit/services" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" ) type dnsProviderMock struct { diff --git a/pkg/analytics/stats.go b/pkg/analytics/stats.go index 2479ae28c804c..e4ea068f0cabb 100644 --- a/pkg/analytics/stats.go +++ b/pkg/analytics/stats.go @@ -14,7 +14,7 @@ import ( "sync" "time" - "github.com/grafana/loki/pkg/util/build" + "github.com/grafana/loki/v3/pkg/util/build" "github.com/cespare/xxhash/v2" jsoniter "github.com/json-iterator/go" diff --git a/pkg/analytics/stats_test.go b/pkg/analytics/stats_test.go index 8c676af358652..b2ba30db7ca85 100644 --- a/pkg/analytics/stats_test.go +++ b/pkg/analytics/stats_test.go @@ -12,7 +12,7 @@ import ( "github.com/stretchr/testify/require" "golang.org/x/sync/errgroup" - "github.com/grafana/loki/pkg/util/build" + "github.com/grafana/loki/v3/pkg/util/build" ) func Test_BuildReport(t *testing.T) { diff --git a/pkg/bloomcompactor/batch.go b/pkg/bloomcompactor/batch.go index a9bf2c6c0cc80..4247fc1e4b52c 100644 --- a/pkg/bloomcompactor/batch.go +++ b/pkg/bloomcompactor/batch.go @@ -9,12 +9,12 @@ import ( "github.com/grafana/dskit/multierror" "golang.org/x/exp/slices" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/logproto" - logql_log "github.com/grafana/loki/pkg/logql/log" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/logproto" + logql_log "github.com/grafana/loki/v3/pkg/logql/log" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" ) type Fetcher[A, B any] interface { diff --git a/pkg/bloomcompactor/batch_test.go b/pkg/bloomcompactor/batch_test.go index bd2cb3378cfba..d64b8313e1066 100644 --- a/pkg/bloomcompactor/batch_test.go +++ b/pkg/bloomcompactor/batch_test.go @@ -7,8 +7,8 @@ import ( "github.com/stretchr/testify/require" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" ) func TestBatchedLoader(t *testing.T) { diff --git a/pkg/bloomcompactor/bloomcompactor.go b/pkg/bloomcompactor/bloomcompactor.go index ddfe552cb2ad8..3a99a1d1ad866 100644 --- a/pkg/bloomcompactor/bloomcompactor.go +++ b/pkg/bloomcompactor/bloomcompactor.go @@ -16,13 +16,13 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/bloomutils" - "github.com/grafana/loki/pkg/storage" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" - util_ring "github.com/grafana/loki/pkg/util/ring" + "github.com/grafana/loki/v3/pkg/bloomutils" + "github.com/grafana/loki/v3/pkg/storage" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" + util_ring "github.com/grafana/loki/v3/pkg/util/ring" ) var ( @@ -48,15 +48,15 @@ type Compactor struct { tsdbStore TSDBStore // TODO(owen-d): ShardingStrategy - controller *SimpleBloomController + controller *SimpleBloomController + retentionManager *RetentionManager // temporary workaround until bloomStore has implemented read/write shipper interface bloomStore bloomshipper.Store sharding util_ring.TenantSharding - metrics *Metrics - btMetrics *v1.Metrics + metrics *Metrics } func New( @@ -65,9 +65,10 @@ func New( storeCfg storage.Config, clientMetrics storage.ClientMetrics, fetcherProvider stores.ChunkFetcherProvider, - sharding util_ring.TenantSharding, + ring ring.ReadRing, + ringLifeCycler *ring.BasicLifecycler, limits Limits, - store bloomshipper.Store, + store bloomshipper.StoreWithMetrics, logger log.Logger, r prometheus.Registerer, ) (*Compactor, error) { @@ -75,9 +76,10 @@ func New( cfg: cfg, schemaCfg: schemaCfg, logger: logger, - sharding: sharding, + sharding: util_ring.NewTenantShuffleSharding(ring, ringLifeCycler, limits.BloomCompactorShardSize), limits: limits, bloomStore: store, + metrics: NewMetrics(r, store.BloomMetrics()), } tsdbStore, err := NewTSDBStores(schemaCfg, storeCfg, clientMetrics, logger) @@ -86,10 +88,6 @@ func New( } c.tsdbStore = tsdbStore - // initialize metrics - c.btMetrics = v1.NewMetrics(prometheus.WrapRegistererWithPrefix("loki_bloom_tokenizer_", r)) - c.metrics = NewMetrics(r, c.btMetrics) - chunkLoader := NewStoreChunkLoader( fetcherProvider, c.metrics, @@ -104,6 +102,15 @@ func New( c.logger, ) + c.retentionManager = NewRetentionManager( + c.cfg.RetentionConfig, + c.limits, + c.bloomStore, + newFirstTokenRetentionSharding(ring, ringLifeCycler), + c.metrics, + c.logger, + ) + c.Service = services.NewBasicService(c.starting, c.running, c.stopping) return c, nil } @@ -218,10 +225,17 @@ func (c *Compactor) runOne(ctx context.Context) error { c.metrics.compactionsStarted.Inc() start := time.Now() level.Info(c.logger).Log("msg", "running bloom compaction", "workers", c.cfg.WorkerParallelism) - var workersErr error + var workersErr, retentionErr error var wg sync.WaitGroup input := make(chan *tenantTableRange) + // Launch retention (will return instantly if retention is disabled or not owned by this compactor) + wg.Add(1) + go func() { + retentionErr = c.retentionManager.Apply(ctx) + wg.Done() + }() + tables := c.tables(time.Now()) level.Debug(c.logger).Log("msg", "loaded tables", "tables", tables.TotalDays()) @@ -240,7 +254,7 @@ func (c *Compactor) runOne(ctx context.Context) error { wg.Wait() duration := time.Since(start) - err = multierror.New(workersErr, err, ctx.Err()).Err() + err = multierror.New(retentionErr, workersErr, err, ctx.Err()).Err() if err != nil { level.Error(c.logger).Log("msg", "compaction iteration failed", "err", err, "duration", duration) @@ -258,12 +272,12 @@ func (c *Compactor) runOne(ctx context.Context) error { func (c *Compactor) tables(ts time.Time) *dayRangeIterator { // adjust the minimum by one to make it inclusive, which is more intuitive // for a configuration variable - adjustedMin := min(c.cfg.MinTableCompactionPeriod - 1) - minCompactionPeriod := time.Duration(adjustedMin) * config.ObjectStorageIndexRequiredPeriod - maxCompactionPeriod := time.Duration(c.cfg.MaxTableCompactionPeriod) * config.ObjectStorageIndexRequiredPeriod + adjustedMin := min(c.cfg.MinTableOffset - 1) + minCompactionDelta := time.Duration(adjustedMin) * config.ObjectStorageIndexRequiredPeriod + maxCompactionDelta := time.Duration(c.cfg.MaxTableOffset) * config.ObjectStorageIndexRequiredPeriod - from := ts.Add(-maxCompactionPeriod).UnixNano() / int64(config.ObjectStorageIndexRequiredPeriod) * int64(config.ObjectStorageIndexRequiredPeriod) - through := ts.Add(-minCompactionPeriod).UnixNano() / int64(config.ObjectStorageIndexRequiredPeriod) * int64(config.ObjectStorageIndexRequiredPeriod) + from := ts.Add(-maxCompactionDelta).UnixNano() / int64(config.ObjectStorageIndexRequiredPeriod) * int64(config.ObjectStorageIndexRequiredPeriod) + through := ts.Add(-minCompactionDelta).UnixNano() / int64(config.ObjectStorageIndexRequiredPeriod) * int64(config.ObjectStorageIndexRequiredPeriod) fromDay := config.NewDayTime(model.TimeFromUnixNano(from)) throughDay := config.NewDayTime(model.TimeFromUnixNano(through)) diff --git a/pkg/bloomcompactor/bloomcompactor_test.go b/pkg/bloomcompactor/bloomcompactor_test.go index db1221fe58d2f..1734ecfa710f7 100644 --- a/pkg/bloomcompactor/bloomcompactor_test.go +++ b/pkg/bloomcompactor/bloomcompactor_test.go @@ -14,14 +14,14 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/bloomutils" - "github.com/grafana/loki/pkg/chunkenc" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/config" - util_log "github.com/grafana/loki/pkg/util/log" - lokiring "github.com/grafana/loki/pkg/util/ring" - util_ring "github.com/grafana/loki/pkg/util/ring" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/bloomutils" + "github.com/grafana/loki/v3/pkg/chunkenc" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/config" + util_log "github.com/grafana/loki/v3/pkg/util/log" + lokiring "github.com/grafana/loki/v3/pkg/util/ring" + util_ring "github.com/grafana/loki/v3/pkg/util/ring" + "github.com/grafana/loki/v3/pkg/validation" ) func TestCompactor_ownsTenant(t *testing.T) { @@ -149,6 +149,14 @@ type mockLimits struct { shardSize int } +func (m mockLimits) RetentionPeriod(_ string) time.Duration { + panic("implement me") +} + +func (m mockLimits) StreamRetention(_ string) []validation.StreamRetention { + panic("implement me") +} + func (m mockLimits) AllByUserID() map[string]*validation.Limits { panic("implement me") } diff --git a/pkg/bloomcompactor/config.go b/pkg/bloomcompactor/config.go index a80399503f4e7..8b42cd6834710 100644 --- a/pkg/bloomcompactor/config.go +++ b/pkg/bloomcompactor/config.go @@ -7,8 +7,7 @@ import ( "github.com/pkg/errors" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/downloads" - "github.com/grafana/loki/pkg/util/ring" + "github.com/grafana/loki/v3/pkg/util/ring" ) const ( @@ -22,16 +21,18 @@ type Config struct { // section and the ingester configuration by default). Ring ring.RingConfig `yaml:"ring,omitempty" doc:"description=Defines the ring to be used by the bloom-compactor servers. In case this isn't configured, this block supports inheriting configuration from the common ring section."` // Enabled configures whether bloom-compactors should be used to compact index values into bloomfilters - Enabled bool `yaml:"enabled"` - CompactionInterval time.Duration `yaml:"compaction_interval"` - MinTableCompactionPeriod int `yaml:"min_table_compaction_period"` - MaxTableCompactionPeriod int `yaml:"max_table_compaction_period"` - WorkerParallelism int `yaml:"worker_parallelism"` - RetryMinBackoff time.Duration `yaml:"compaction_retries_min_backoff"` - RetryMaxBackoff time.Duration `yaml:"compaction_retries_max_backoff"` - CompactionRetries int `yaml:"compaction_retries"` + Enabled bool `yaml:"enabled"` + CompactionInterval time.Duration `yaml:"compaction_interval"` + MinTableOffset int `yaml:"min_table_offset"` + MaxTableOffset int `yaml:"max_table_offset"` + WorkerParallelism int `yaml:"worker_parallelism"` + RetryMinBackoff time.Duration `yaml:"compaction_retries_min_backoff"` + RetryMaxBackoff time.Duration `yaml:"compaction_retries_max_backoff"` + CompactionRetries int `yaml:"compaction_retries"` MaxCompactionParallelism int `yaml:"max_compaction_parallelism"` + + RetentionConfig RetentionConfig `yaml:"retention"` } // RegisterFlags registers flags for the Bloom-Compactor configuration. @@ -40,19 +41,19 @@ func (cfg *Config) RegisterFlags(f *flag.FlagSet) { f.DurationVar(&cfg.CompactionInterval, "bloom-compactor.compaction-interval", 10*time.Minute, "Interval at which to re-run the compaction operation.") f.IntVar(&cfg.WorkerParallelism, "bloom-compactor.worker-parallelism", 1, "Number of workers to run in parallel for compaction.") // TODO(owen-d): This is a confusing name. Rename it to `min_table_offset` - f.IntVar(&cfg.MinTableCompactionPeriod, "bloom-compactor.min-table-compaction-period", 1, "How many index periods (days) to wait before building bloom filters for a table. This can be used to lower cost by not re-writing data to object storage too frequently since recent data changes more often.") + f.IntVar(&cfg.MinTableOffset, "bloom-compactor.min-table-offset", 1, "Newest day-table offset (from today, inclusive) to compact. Increase to lower cost by not re-writing data to object storage too frequently since recent data changes more often at the cost of not having blooms available as quickly.") // TODO(owen-d): ideally we'd set this per tenant based on their `reject_old_samples_max_age` setting, // but due to how we need to discover tenants, we can't do that yet. Tenant+Period discovery is done by // iterating the table periods in object storage and looking for tenants within that period. // In order to have this done dynamically, we'd need to account for tenant specific overrides, which are also // dynamically reloaded. // I'm doing it the simple way for now. - // TODO(owen-d): This is a confusing name. Rename it to `max_table_offset` - f.IntVar(&cfg.MaxTableCompactionPeriod, "bloom-compactor.max-table-compaction-period", 7, "The maximum number of index periods (days) to build bloom filters for a table. This can be used to lower cost by not trying to compact older data which doesn't change. This can be optimized by aligning it with the maximum `reject_old_samples_max_age` setting of any tenant.") + f.IntVar(&cfg.MaxTableOffset, "bloom-compactor.max-table-offset", 2, "Oldest day-table offset (from today, inclusive) to compact. This can be used to lower cost by not trying to compact older data which doesn't change. This can be optimized by aligning it with the maximum `reject_old_samples_max_age` setting of any tenant.") f.DurationVar(&cfg.RetryMinBackoff, "bloom-compactor.compaction-retries-min-backoff", 10*time.Second, "Minimum backoff time between retries.") f.DurationVar(&cfg.RetryMaxBackoff, "bloom-compactor.compaction-retries-max-backoff", time.Minute, "Maximum backoff time between retries.") f.IntVar(&cfg.CompactionRetries, "bloom-compactor.compaction-retries", 3, "Number of retries to perform when compaction fails.") f.IntVar(&cfg.MaxCompactionParallelism, "bloom-compactor.max-compaction-parallelism", 1, "Maximum number of tables to compact in parallel. While increasing this value, please make sure compactor has enough disk space allocated to be able to store and compact as many tables.") + cfg.RetentionConfig.RegisterFlags(f) // Ring skipFlags := []string{ @@ -67,8 +68,12 @@ func (cfg *Config) RegisterFlags(f *flag.FlagSet) { } func (cfg *Config) Validate() error { - if cfg.MinTableCompactionPeriod > cfg.MaxTableCompactionPeriod { - return fmt.Errorf("min_compaction_age must be less than or equal to max_compaction_age") + if err := cfg.RetentionConfig.Validate(); err != nil { + return err + } + + if cfg.MinTableOffset > cfg.MaxTableOffset { + return fmt.Errorf("min-table-offset (%d) must be less than or equal to max-table-offset (%d)", cfg.MinTableOffset, cfg.MaxTableOffset) } if cfg.Ring.ReplicationFactor != ringReplicationFactor { return errors.New("Replication factor must not be changed as it will not take effect") @@ -77,7 +82,7 @@ func (cfg *Config) Validate() error { } type Limits interface { - downloads.Limits + RetentionLimits BloomCompactorShardSize(tenantID string) int BloomCompactorEnabled(tenantID string) bool BloomNGramLength(tenantID string) int diff --git a/pkg/bloomcompactor/controller.go b/pkg/bloomcompactor/controller.go index 1f8770cc216fb..37a7c6bc69b69 100644 --- a/pkg/bloomcompactor/controller.go +++ b/pkg/bloomcompactor/controller.go @@ -12,11 +12,11 @@ import ( "github.com/pkg/errors" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/chunkenc" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" + "github.com/grafana/loki/v3/pkg/chunkenc" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" ) type SimpleBloomController struct { diff --git a/pkg/bloomcompactor/controller_test.go b/pkg/bloomcompactor/controller_test.go index 7f81c5abe2d2f..2367ee3cc9566 100644 --- a/pkg/bloomcompactor/controller_test.go +++ b/pkg/bloomcompactor/controller_test.go @@ -8,9 +8,9 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" ) func Test_findGaps(t *testing.T) { diff --git a/pkg/bloomcompactor/metrics.go b/pkg/bloomcompactor/metrics.go index 30bb6d4022e58..d569a4dbfd82d 100644 --- a/pkg/bloomcompactor/metrics.go +++ b/pkg/bloomcompactor/metrics.go @@ -4,7 +4,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" ) const ( @@ -43,6 +43,13 @@ type Metrics struct { progress prometheus.Gauge timePerTenant *prometheus.CounterVec + + // Retention metrics + retentionRunning prometheus.Gauge + retentionTime *prometheus.HistogramVec + retentionDaysPerIteration *prometheus.HistogramVec + retentionTenantsPerIteration *prometheus.HistogramVec + retentionTenantsExceedingLookback prometheus.Gauge } func NewMetrics(r prometheus.Registerer, bloomMetrics *v1.Metrics) *Metrics { @@ -175,6 +182,47 @@ func NewMetrics(r prometheus.Registerer, bloomMetrics *v1.Metrics) *Metrics { Name: "tenant_compaction_seconds_total", Help: "Time spent processing a tenant.", }, []string{tenantLabel}), + + // Retention + retentionRunning: promauto.With(r).NewGauge(prometheus.GaugeOpts{ + Namespace: metricsNamespace, + Subsystem: metricsSubsystem, + Name: "retention_running", + Help: "1 if retention is running in this compactor.", + }), + + retentionTime: promauto.With(r).NewHistogramVec(prometheus.HistogramOpts{ + Namespace: metricsNamespace, + Subsystem: metricsSubsystem, + Name: "retention_time_seconds", + Help: "Time this retention process took to complete.", + Buckets: prometheus.DefBuckets, + }, []string{"status"}), + + retentionDaysPerIteration: promauto.With(r).NewHistogramVec(prometheus.HistogramOpts{ + Namespace: metricsNamespace, + Subsystem: metricsSubsystem, + Name: "retention_days_processed", + Help: "Number of days iterated over during the retention process.", + // 1day -> 5 years, 10 buckets + Buckets: prometheus.ExponentialBucketsRange(1, 365*5, 10), + }, []string{"status"}), + + retentionTenantsPerIteration: promauto.With(r).NewHistogramVec(prometheus.HistogramOpts{ + Namespace: metricsNamespace, + Subsystem: metricsSubsystem, + Name: "retention_tenants_processed", + Help: "Number of tenants on which retention was applied during the retention process.", + // 1 tenant -> 10k tenants, 10 buckets + Buckets: prometheus.ExponentialBucketsRange(1, 10000, 10), + }, []string{"status"}), + + retentionTenantsExceedingLookback: promauto.With(r).NewGauge(prometheus.GaugeOpts{ + Namespace: metricsNamespace, + Subsystem: metricsSubsystem, + Name: "retention_tenants_exceeding_lookback", + Help: "Number of tenants with a retention exceeding the configured retention lookback.", + }), } return &m diff --git a/pkg/bloomcompactor/retention.go b/pkg/bloomcompactor/retention.go new file mode 100644 index 0000000000000..7dd30dece9e8a --- /dev/null +++ b/pkg/bloomcompactor/retention.go @@ -0,0 +1,320 @@ +package bloomcompactor + +import ( + "context" + "flag" + "math" + "slices" + "time" + + "github.com/go-kit/log" + "github.com/go-kit/log/level" + "github.com/grafana/dskit/ring" + "github.com/pkg/errors" + "github.com/prometheus/common/model" + + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + storageconfig "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/validation" +) + +type retentionSharding interface { + OwnsRetention() (bool, error) +} + +type firstTokenRetentionSharding struct { + ring ring.ReadRing + ringLifeCycler *ring.BasicLifecycler +} + +func newFirstTokenRetentionSharding(ring ring.ReadRing, ringLifeCycler *ring.BasicLifecycler) *firstTokenRetentionSharding { + return &firstTokenRetentionSharding{ + ring: ring, + ringLifeCycler: ringLifeCycler, + } +} + +// OwnsRetention returns true if the compactor should apply retention. +// This is determined by checking if the compactor owns the smaller token in the ring. +// Note that during a ring topology change, more than one compactor may attempt to apply retention. +// This is fine since retention consists on deleting old data which should be idempotent. +func (s *firstTokenRetentionSharding) OwnsRetention() (bool, error) { + rs, err := s.ring.GetAllHealthy(RingOp) + if err != nil { + return false, errors.Wrap(err, "getting ring healthy instances") + } + if len(rs.Instances) == 0 { + return false, errors.New("no healthy instances in ring") + } + + // Lookup the instance with smaller token + instance := slices.MinFunc(rs.Instances, func(a, b ring.InstanceDesc) int { + smallerA := slices.Min(a.GetTokens()) + smallerB := slices.Min(b.GetTokens()) + if smallerA < smallerB { + return -1 + } + if smallerA > smallerB { + return 1 + } + return 0 + }) + + return instance.GetId() == s.ringLifeCycler.GetInstanceID(), nil +} + +type RetentionConfig struct { + Enabled bool `yaml:"enabled"` + MaxLookbackDays int `yaml:"max_lookback_days"` +} + +func (cfg *RetentionConfig) RegisterFlags(f *flag.FlagSet) { + f.BoolVar(&cfg.Enabled, "bloom-compactor.retention.enabled", false, "Enable bloom retention.") + f.IntVar(&cfg.MaxLookbackDays, "bloom-compactor.retention.max-lookback-days", 365, "Max lookback days for retention.") +} + +func (cfg *RetentionConfig) Validate() error { + if !cfg.Enabled { + return nil + } + + if cfg.MaxLookbackDays < 1 { + return errors.New("max lookback days must be a positive number") + } + return nil +} + +type RetentionLimits interface { + RetentionPeriod(userID string) time.Duration + StreamRetention(userID string) []validation.StreamRetention + AllByUserID() map[string]*validation.Limits + DefaultLimits() *validation.Limits +} + +type RetentionManager struct { + cfg RetentionConfig + limits RetentionLimits + bloomStore bloomshipper.Store + sharding retentionSharding + metrics *Metrics + logger log.Logger + lastDayRun storageconfig.DayTime + + // For testing + now func() model.Time +} + +func NewRetentionManager( + cfg RetentionConfig, + limits RetentionLimits, + bloomStore bloomshipper.Store, + sharding retentionSharding, + metrics *Metrics, + logger log.Logger, +) *RetentionManager { + return &RetentionManager{ + cfg: cfg, + limits: limits, + bloomStore: bloomStore, + sharding: sharding, + metrics: metrics, + logger: log.With(logger, "subcomponent", "retention-manager"), + now: model.Now, + lastDayRun: storageconfig.NewDayTime(0), + } +} + +func (r *RetentionManager) Apply(ctx context.Context) error { + if !r.cfg.Enabled { + level.Debug(r.logger).Log("msg", "retention is disabled") + return nil + } + + start := r.now() + today := storageconfig.NewDayTime(start) + if !today.After(r.lastDayRun) { + // We've already run retention for today + return nil + } + + ownsRetention, err := r.sharding.OwnsRetention() + if err != nil { + return errors.Wrap(err, "checking if compactor owns retention") + } + if !ownsRetention { + level.Debug(r.logger).Log("msg", "this compactor doesn't own retention") + return nil + } + + level.Info(r.logger).Log("msg", "Applying retention", "today", today.String(), "lastDayRun", r.lastDayRun.String()) + r.metrics.retentionRunning.Set(1) + defer r.metrics.retentionRunning.Set(0) + + tenantsRetention := retentionByTenant(r.limits) + r.reportTenantsExceedingLookback(tenantsRetention) + + defaultLimits := r.limits.DefaultLimits() + defaultRetention := findLongestRetention(time.Duration(defaultLimits.RetentionPeriod), defaultLimits.StreamRetention) + + smallestRetention := smallestEnabledRetention(defaultRetention, tenantsRetention) + if smallestRetention == 0 { + level.Debug(r.logger).Log("msg", "no retention period set for any tenant, skipping retention") + return nil + } + + // Start day is today minus the smallest retention period. + // Note that the last retention day is exclusive. E.g. 30 days retention means we keep 30 days of data, + // thus we start deleting data from the 31st day onwards. + startDay := storageconfig.NewDayTime(today.Add(-smallestRetention)).Dec() + // End day is today minus the max lookback days + endDay := storageconfig.NewDayTime(today.Add(-time.Duration(r.cfg.MaxLookbackDays) * 24 * time.Hour)) + + var daysProcessed int + tenantsRetentionApplied := make(map[string]struct{}, 100) + for day := startDay; day.After(endDay); day = day.Dec() { + dayLogger := log.With(r.logger, "day", day.String()) + bloomClient, err := r.bloomStore.Client(day.ModelTime()) + if err != nil { + level.Error(dayLogger).Log("msg", "failed to get bloom store client", "err", err) + break + } + objectClient := bloomClient.ObjectClient() + + tenants, err := r.bloomStore.TenantFilesForInterval( + ctx, bloomshipper.NewInterval(day.Bounds()), + func(tenant string, _ client.StorageObject) bool { + // Filter out tenants whose retention hasn't expired yet + globalRetention := r.limits.RetentionPeriod(tenant) + streamRetention := r.limits.StreamRetention(tenant) + tenantRetention := findLongestRetention(globalRetention, streamRetention) + expirationDay := storageconfig.NewDayTime(today.Add(-tenantRetention)) + return day.Before(expirationDay) + }, + ) + if err != nil { + r.metrics.retentionTime.WithLabelValues(statusFailure).Observe(time.Since(start.Time()).Seconds()) + r.metrics.retentionDaysPerIteration.WithLabelValues(statusFailure).Observe(float64(daysProcessed)) + r.metrics.retentionTenantsPerIteration.WithLabelValues(statusFailure).Observe(float64(len(tenantsRetentionApplied))) + return errors.Wrap(err, "getting users for period") + } + + if len(tenants) == 0 { + // No tenants for this day means we can break here since previous + // retention iterations have already deleted all tenants + break + } + + for tenant, objects := range tenants { + if len(objects) == 0 { + continue + } + + tenantLogger := log.With(dayLogger, "tenant", tenant) + level.Info(tenantLogger).Log("msg", "applying retention to tenant", "keys", len(objects)) + + // Note: we cannot delete the tenant directory directly because it is not an + // actual key in the object store. Instead, we need to delete all keys one by one. + for _, object := range objects { + if err := objectClient.DeleteObject(ctx, object.Key); err != nil { + r.metrics.retentionTime.WithLabelValues(statusFailure).Observe(time.Since(start.Time()).Seconds()) + r.metrics.retentionDaysPerIteration.WithLabelValues(statusFailure).Observe(float64(daysProcessed)) + r.metrics.retentionTenantsPerIteration.WithLabelValues(statusFailure).Observe(float64(len(tenantsRetentionApplied))) + return errors.Wrapf(err, "deleting key %s", object.Key) + } + } + + tenantsRetentionApplied[tenant] = struct{}{} + } + + daysProcessed++ + } + + r.lastDayRun = today + r.metrics.retentionTime.WithLabelValues(statusSuccess).Observe(time.Since(start.Time()).Seconds()) + r.metrics.retentionDaysPerIteration.WithLabelValues(statusSuccess).Observe(float64(daysProcessed)) + r.metrics.retentionTenantsPerIteration.WithLabelValues(statusSuccess).Observe(float64(len(tenantsRetentionApplied))) + level.Info(r.logger).Log("msg", "finished applying retention", "daysProcessed", daysProcessed, "tenants", len(tenantsRetentionApplied)) + + return nil +} + +func (r *RetentionManager) reportTenantsExceedingLookback(retentionByTenant map[string]time.Duration) { + if len(retentionByTenant) == 0 { + r.metrics.retentionTenantsExceedingLookback.Set(0) + return + } + + var tenantsExceedingLookback int + for tenant, retention := range retentionByTenant { + if retention > time.Duration(r.cfg.MaxLookbackDays)*24*time.Hour { + level.Warn(r.logger).Log("msg", "tenant retention exceeds max lookback days", "tenant", tenant, "retention", retention.String()) + } + tenantsExceedingLookback++ + } + + r.metrics.retentionTenantsExceedingLookback.Set(float64(tenantsExceedingLookback)) +} + +func findLongestRetention(globalRetention time.Duration, streamRetention []validation.StreamRetention) time.Duration { + if len(streamRetention) == 0 { + return globalRetention + } + + maxStreamRetention := slices.MaxFunc(streamRetention, func(a, b validation.StreamRetention) int { + return int(a.Period - b.Period) + }) + + if time.Duration(maxStreamRetention.Period) > globalRetention { + return time.Duration(maxStreamRetention.Period) + } + return globalRetention +} + +func retentionByTenant(limits RetentionLimits) map[string]time.Duration { + all := limits.AllByUserID() + if len(all) == 0 { + return nil + } + + retentions := make(map[string]time.Duration, len(all)) + for tenant, lim := range all { + retention := findLongestRetention(time.Duration(lim.RetentionPeriod), lim.StreamRetention) + if retention == 0 { + continue + } + retentions[tenant] = retention + } + + return retentions +} + +// smallestEnabledRetention returns the smallest retention period across all tenants and the default. +func smallestEnabledRetention(defaultRetention time.Duration, perTenantRetention map[string]time.Duration) time.Duration { + if len(perTenantRetention) == 0 { + return defaultRetention + } + + smallest := time.Duration(math.MaxInt64) + if defaultRetention != 0 { + smallest = defaultRetention + } + + for _, retention := range perTenantRetention { + // Skip unlimited retention + if retention == 0 { + continue + } + + if retention < smallest { + smallest = retention + } + } + + if smallest == time.Duration(math.MaxInt64) { + // No tenant nor defaults configures a retention + return 0 + } + + return smallest +} diff --git a/pkg/bloomcompactor/retention_test.go b/pkg/bloomcompactor/retention_test.go new file mode 100644 index 0000000000000..26ad6b3d2e4a6 --- /dev/null +++ b/pkg/bloomcompactor/retention_test.go @@ -0,0 +1,880 @@ +package bloomcompactor + +import ( + "context" + "flag" + "fmt" + "math" + "os" + "testing" + "time" + + "github.com/go-kit/log" + "github.com/grafana/dskit/services" + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/common/model" + "github.com/stretchr/testify/require" + + "github.com/grafana/loki/v3/pkg/storage" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + storageconfig "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper/config" + util_log "github.com/grafana/loki/v3/pkg/util/log" + lokiring "github.com/grafana/loki/v3/pkg/util/ring" + "github.com/grafana/loki/v3/pkg/validation" +) + +var testTime = parseDayTime("2024-12-31").ModelTime() + +func TestRetention(t *testing.T) { + for _, tc := range []struct { + name string + ownsRetention bool + cfg RetentionConfig + lim mockRetentionLimits + prePopulate func(t *testing.T, schemaCfg storageconfig.SchemaConfig, bloomStore *bloomshipper.BloomStore) + expectErr bool + check func(t *testing.T, bloomStore *bloomshipper.BloomStore) + }{ + { + name: "retention disabled", + ownsRetention: true, + cfg: RetentionConfig{ + Enabled: false, + MaxLookbackDays: 2 * 365, + }, + lim: mockRetentionLimits{ + retention: map[string]time.Duration{ + "1": 30 * 24 * time.Hour, + "2": 200 * 24 * time.Hour, + "3": 500 * 24 * time.Hour, + }, + }, + prePopulate: func(t *testing.T, schemaCfg storageconfig.SchemaConfig, bloomStore *bloomshipper.BloomStore) { + putMetasForLastNDays(t, schemaCfg, bloomStore, "1", testTime, 200) + putMetasForLastNDays(t, schemaCfg, bloomStore, "2", testTime, 50) + putMetasForLastNDays(t, schemaCfg, bloomStore, "3", testTime, 500) + }, + check: func(t *testing.T, bloomStore *bloomshipper.BloomStore) { + metas := getGroupedMetasForLastNDays(t, bloomStore, "1", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 200, len(metas[0])) + metas = getGroupedMetasForLastNDays(t, bloomStore, "2", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 50, len(metas[0])) + metas = getGroupedMetasForLastNDays(t, bloomStore, "3", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 500, len(metas[0])) + }, + }, + { + name: "compactor does not own retention", + ownsRetention: false, + cfg: RetentionConfig{ + Enabled: true, + MaxLookbackDays: 2 * 365, + }, + lim: mockRetentionLimits{ + retention: map[string]time.Duration{ + "1": 30 * 24 * time.Hour, + "2": 200 * 24 * time.Hour, + "3": 500 * 24 * time.Hour, + }, + }, + prePopulate: func(t *testing.T, schemaCfg storageconfig.SchemaConfig, bloomStore *bloomshipper.BloomStore) { + putMetasForLastNDays(t, schemaCfg, bloomStore, "1", testTime, 200) + putMetasForLastNDays(t, schemaCfg, bloomStore, "2", testTime, 50) + putMetasForLastNDays(t, schemaCfg, bloomStore, "3", testTime, 500) + }, + check: func(t *testing.T, bloomStore *bloomshipper.BloomStore) { + metas := getGroupedMetasForLastNDays(t, bloomStore, "1", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 200, len(metas[0])) + metas = getGroupedMetasForLastNDays(t, bloomStore, "2", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 50, len(metas[0])) + metas = getGroupedMetasForLastNDays(t, bloomStore, "3", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 500, len(metas[0])) + }, + }, + { + name: "unlimited retention", + ownsRetention: true, + cfg: RetentionConfig{ + Enabled: true, + MaxLookbackDays: 2 * 365, + }, + lim: mockRetentionLimits{ + retention: map[string]time.Duration{ + "1": 0, + }, + }, + prePopulate: func(t *testing.T, schemaCfg storageconfig.SchemaConfig, bloomStore *bloomshipper.BloomStore) { + putMetasForLastNDays(t, schemaCfg, bloomStore, "1", testTime, 200) + }, + check: func(t *testing.T, bloomStore *bloomshipper.BloomStore) { + metas := getGroupedMetasForLastNDays(t, bloomStore, "1", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 200, len(metas[0])) + }, + }, + { + name: "default retention", + ownsRetention: true, + cfg: RetentionConfig{ + Enabled: true, + MaxLookbackDays: 2 * 365, + }, + lim: mockRetentionLimits{ + defaultRetention: 30 * 24 * time.Hour, + }, + prePopulate: func(t *testing.T, schemaCfg storageconfig.SchemaConfig, bloomStore *bloomshipper.BloomStore) { + putMetasForLastNDays(t, schemaCfg, bloomStore, "1", testTime, 200) + }, + check: func(t *testing.T, bloomStore *bloomshipper.BloomStore) { + metas := getGroupedMetasForLastNDays(t, bloomStore, "1", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 31, len(metas[0])) + }, + }, + { + name: "retention lookback smaller than max retention", + ownsRetention: true, + cfg: RetentionConfig{ + Enabled: true, + MaxLookbackDays: 100, + }, + lim: mockRetentionLimits{ + retention: map[string]time.Duration{ + "1": 30 * 24 * time.Hour, + "2": 20 * 24 * time.Hour, + "3": 200 * 24 * time.Hour, + "4": 400 * 24 * time.Hour, + }, + streamRetention: map[string][]validation.StreamRetention{ + "1": { + { + Period: model.Duration(30 * 24 * time.Hour), + }, + { + Period: model.Duration(40 * 24 * time.Hour), + }, + }, + "2": { + { + Period: model.Duration(10 * 24 * time.Hour), + }, + }, + }, + }, + prePopulate: func(t *testing.T, schemaCfg storageconfig.SchemaConfig, bloomStore *bloomshipper.BloomStore) { + putMetasForLastNDays(t, schemaCfg, bloomStore, "1", testTime, 200) + putMetasForLastNDays(t, schemaCfg, bloomStore, "2", testTime, 50) + putMetasForLastNDays(t, schemaCfg, bloomStore, "3", testTime, 500) + putMetasForLastNDays(t, schemaCfg, bloomStore, "4", testTime, 500) + }, + check: func(t *testing.T, bloomStore *bloomshipper.BloomStore) { + // Tenant 1 has 40 days of retention, and we wrote 200 days of metas + // We should get two groups: 0th-40th and 101th-200th + metas := getGroupedMetasForLastNDays(t, bloomStore, "1", testTime, 500) + require.Equal(t, 2, len(metas)) + require.Equal(t, 41, len(metas[0])) // 0-40th day + require.Equal(t, 100, len(metas[1])) // 100th-200th day + + // Tenant 2 has 20 days of retention, and we wrote 50 days of metas + // We should get one group: 0th-20th + metas = getGroupedMetasForLastNDays(t, bloomStore, "2", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 21, len(metas[0])) // 0th-20th + + // Tenant 3 has 200 days of retention, and we wrote 500 days of metas + // Since the manager looks up to 100 days, we shouldn't have deleted any metas + metas = getGroupedMetasForLastNDays(t, bloomStore, "3", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 500, len(metas[0])) // 0th-500th + + // Tenant 4 has 400 days of retention, and we wrote 500 days of metas + // Since the manager looks up to 100 days, we shouldn't have deleted any metas + metas = getGroupedMetasForLastNDays(t, bloomStore, "4", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 500, len(metas[0])) // 0th-500th + }, + }, + { + name: "retention lookback bigger than max retention", + ownsRetention: true, + cfg: RetentionConfig{ + Enabled: true, + MaxLookbackDays: 2 * 365, + }, + lim: mockRetentionLimits{ + retention: map[string]time.Duration{ + "1": 30 * 24 * time.Hour, + "2": 20 * 24 * time.Hour, + "3": 200 * 24 * time.Hour, + "4": 400 * 24 * time.Hour, + }, + streamRetention: map[string][]validation.StreamRetention{ + "1": { + { + Period: model.Duration(30 * 24 * time.Hour), + }, + { + Period: model.Duration(40 * 24 * time.Hour), + }, + }, + "2": { + { + Period: model.Duration(10 * 24 * time.Hour), + }, + }, + }, + }, + prePopulate: func(t *testing.T, schemaCfg storageconfig.SchemaConfig, bloomStore *bloomshipper.BloomStore) { + putMetasForLastNDays(t, schemaCfg, bloomStore, "1", testTime, 200) + putMetasForLastNDays(t, schemaCfg, bloomStore, "2", testTime, 50) + putMetasForLastNDays(t, schemaCfg, bloomStore, "3", testTime, 500) + putMetasForLastNDays(t, schemaCfg, bloomStore, "4", testTime, 500) + }, + check: func(t *testing.T, bloomStore *bloomshipper.BloomStore) { + // Tenant 1 has 40 days of retention, and we wrote 200 days of metas + // We should get one groups: 0th-40th + metas := getGroupedMetasForLastNDays(t, bloomStore, "1", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 41, len(metas[0])) // 0-40th day + + // Tenant 2 has 20 days of retention, and we wrote 50 days of metas + // We should get one group: 0th-20th + metas = getGroupedMetasForLastNDays(t, bloomStore, "2", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 21, len(metas[0])) // 0th-20th + + // Tenant 3 has 200 days of retention, and we wrote 500 days of metas + // We should get one group: 0th-200th + metas = getGroupedMetasForLastNDays(t, bloomStore, "3", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 201, len(metas[0])) // 0th-200th + + // Tenant 4 has 400 days of retention, and we wrote 500 days of metas + // Since the manager looks up to 100 days, we shouldn't have deleted any metas + metas = getGroupedMetasForLastNDays(t, bloomStore, "4", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 401, len(metas[0])) // 0th-400th + }, + }, + { + name: "hit no tenants in table", + ownsRetention: true, + cfg: RetentionConfig{ + Enabled: true, + MaxLookbackDays: 2 * 365, + }, + lim: mockRetentionLimits{ + retention: map[string]time.Duration{ + "1": 30 * 24 * time.Hour, + }, + }, + prePopulate: func(t *testing.T, schemaCfg storageconfig.SchemaConfig, bloomStore *bloomshipper.BloomStore) { + // Place metas with a gap of 50 days. [0th-100th], [151th-200th] + putMetasForLastNDays(t, schemaCfg, bloomStore, "1", testTime, 100) + putMetasForLastNDays(t, schemaCfg, bloomStore, "1", testTime.Add(-150*24*time.Hour), 50) + }, + check: func(t *testing.T, bloomStore *bloomshipper.BloomStore) { + // We should get two groups: 0th-30th and 151th-200th + metas := getGroupedMetasForLastNDays(t, bloomStore, "1", testTime, 500) + require.Equal(t, 2, len(metas)) + require.Equal(t, 31, len(metas[0])) // 0th-30th day + require.Equal(t, 50, len(metas[1])) // 151th-200th day + }, + }, + } { + t.Run(tc.name, func(t *testing.T) { + bloomStore, schema, _, err := NewMockBloomStore(t) + require.NoError(t, err) + + rm := NewRetentionManager( + tc.cfg, + tc.lim, + bloomStore, + mockSharding{ + ownsRetention: tc.ownsRetention, + }, + NewMetrics(nil, v1.NewMetrics(nil)), + util_log.Logger, + ) + rm.now = func() model.Time { + return testTime + } + + tc.prePopulate(t, schema, bloomStore) + + err = rm.Apply(context.Background()) + if tc.expectErr { + require.Error(t, err) + return + } + require.NoError(t, err) + + tc.check(t, bloomStore) + }) + } +} + +func TestRetentionRunsOncePerDay(t *testing.T) { + bloomStore, schema, _, err := NewMockBloomStore(t) + require.NoError(t, err) + + rm := NewRetentionManager( + RetentionConfig{ + Enabled: true, + MaxLookbackDays: 365, + }, + mockRetentionLimits{ + retention: map[string]time.Duration{ + "1": 30 * 24 * time.Hour, + }, + }, + bloomStore, + mockSharding{ + ownsRetention: true, + }, + NewMetrics(nil, v1.NewMetrics(nil)), + util_log.Logger, + ) + rm.now = func() model.Time { + return testTime + } + + // Write metas for the last 100 days and run retention + putMetasForLastNDays(t, schema, bloomStore, "1", testTime, 100) + err = rm.Apply(context.Background()) + require.NoError(t, err) + + // We should get only the first 30 days of metas + metas := getGroupedMetasForLastNDays(t, bloomStore, "1", testTime, 100) + require.Equal(t, 1, len(metas)) + require.Equal(t, 31, len(metas[0])) // 0th-30th day + + // We now change the now() time to be a bit later in the day + rm.now = func() model.Time { + return testTime.Add(1 * time.Hour) + } + + // Write metas again and run retention. Since we already ran retention at now()'s day, + // Apply should be a noop, and therefore we should be able to get all the 100 days of metas + putMetasForLastNDays(t, schema, bloomStore, "1", testTime, 100) + err = rm.Apply(context.Background()) + require.NoError(t, err) + + metas = getGroupedMetasForLastNDays(t, bloomStore, "1", testTime, 100) + require.Equal(t, 1, len(metas)) + require.Equal(t, 100, len(metas[0])) + + // We now change the now() time to be the next day, retention should run again + rm.now = func() model.Time { + return testTime.Add(24 * time.Hour) + } + err = rm.Apply(context.Background()) + require.NoError(t, err) + + // We should only see the first 30 days of metas + metas = getGroupedMetasForLastNDays(t, bloomStore, "1", testTime, 100) + require.Equal(t, 1, len(metas)) + require.Equal(t, 30, len(metas[0])) // 0th-30th day +} + +func TestOwnsRetention(t *testing.T) { + for _, tc := range []struct { + name string + numCompactors int + }{ + { + name: "single compactor", + numCompactors: 1, + }, + { + name: "multiple compactors", + numCompactors: 100, + }, + } { + t.Run(tc.name, func(t *testing.T) { + var ringManagers []*lokiring.RingManager + for i := 0; i < tc.numCompactors; i++ { + var cfg Config + cfg.RegisterFlags(flag.NewFlagSet("ring", flag.PanicOnError)) + cfg.Ring.KVStore.Store = "inmemory" + cfg.Ring.InstanceID = fmt.Sprintf("bloom-compactor-%d", i) + cfg.Ring.InstanceAddr = fmt.Sprintf("localhost-%d", i) + + ringManager, err := lokiring.NewRingManager("bloom-compactor", lokiring.ServerMode, cfg.Ring, 1, cfg.Ring.NumTokens, util_log.Logger, prometheus.NewRegistry()) + require.NoError(t, err) + require.NoError(t, ringManager.StartAsync(context.Background())) + + ringManagers = append(ringManagers, ringManager) + } + t.Cleanup(func() { + // Stop all rings and wait for them to stop. + for _, ringManager := range ringManagers { + ringManager.StopAsync() + require.Eventually(t, func() bool { + return ringManager.State() == services.Terminated + }, 1*time.Minute, 100*time.Millisecond) + } + }) + + // Wait for all rings to see each other. + for _, ringManager := range ringManagers { + require.Eventually(t, func() bool { + running := ringManager.State() == services.Running + discovered := ringManager.Ring.InstancesCount() == tc.numCompactors + return running && discovered + }, 1*time.Minute, 100*time.Millisecond) + } + + var shardings []retentionSharding + for _, ringManager := range ringManagers { + shardings = append(shardings, newFirstTokenRetentionSharding(ringManager.Ring, ringManager.RingLifecycler)) + } + + var ownsRetention int + for _, sharding := range shardings { + owns, err := sharding.OwnsRetention() + require.NoError(t, err) + if owns { + ownsRetention++ + } + } + + require.Equal(t, 1, ownsRetention) + }) + } +} + +func TestFindLongestRetention(t *testing.T) { + for _, tc := range []struct { + name string + globalRetention time.Duration + streamRetention []validation.StreamRetention + expectedRetention time.Duration + }{ + { + name: "no retention", + expectedRetention: 0, + }, + { + name: "global retention", + globalRetention: 30 * 24 * time.Hour, + expectedRetention: 30 * 24 * time.Hour, + }, + { + name: "stream retention", + streamRetention: []validation.StreamRetention{ + { + Period: model.Duration(30 * 24 * time.Hour), + }, + }, + expectedRetention: 30 * 24 * time.Hour, + }, + { + name: "two stream retention", + streamRetention: []validation.StreamRetention{ + { + Period: model.Duration(30 * 24 * time.Hour), + }, + { + Period: model.Duration(40 * 24 * time.Hour), + }, + }, + expectedRetention: 40 * 24 * time.Hour, + }, + { + name: "stream retention bigger than global", + globalRetention: 20 * 24 * time.Hour, + streamRetention: []validation.StreamRetention{ + { + Period: model.Duration(30 * 24 * time.Hour), + }, + { + Period: model.Duration(40 * 24 * time.Hour), + }, + }, + expectedRetention: 40 * 24 * time.Hour, + }, + { + name: "global retention bigger than stream", + globalRetention: 40 * 24 * time.Hour, + streamRetention: []validation.StreamRetention{ + { + Period: model.Duration(20 * 24 * time.Hour), + }, + { + Period: model.Duration(30 * 24 * time.Hour), + }, + }, + expectedRetention: 40 * 24 * time.Hour, + }, + } { + t.Run(tc.name, func(t *testing.T) { + retention := findLongestRetention(tc.globalRetention, tc.streamRetention) + require.Equal(t, tc.expectedRetention, retention) + }) + } +} + +func TestSmallestRetention(t *testing.T) { + for _, tc := range []struct { + name string + limits RetentionLimits + expectedRetention time.Duration + expectedHasRetention bool + }{ + { + name: "no retention", + limits: mockRetentionLimits{}, + expectedRetention: 0, + }, + { + name: "default global retention", + limits: mockRetentionLimits{ + defaultRetention: 30 * 24 * time.Hour, + }, + expectedRetention: 30 * 24 * time.Hour, + }, + { + name: "default stream retention", + limits: mockRetentionLimits{ + defaultStreamRetention: []validation.StreamRetention{ + { + Period: model.Duration(30 * 24 * time.Hour), + }, + }, + }, + expectedRetention: 30 * 24 * time.Hour, + }, + { + name: "tenant configured unlimited", + limits: mockRetentionLimits{ + retention: map[string]time.Duration{ + "1": 0, + }, + defaultRetention: 30 * 24 * time.Hour, + }, + expectedRetention: 30 * 24 * time.Hour, + }, + { + name: "no default one tenant", + limits: mockRetentionLimits{ + retention: map[string]time.Duration{ + "1": 30 * 24 * time.Hour, + }, + streamRetention: map[string][]validation.StreamRetention{ + "1": { + { + Period: model.Duration(40 * 24 * time.Hour), + }, + }, + }, + }, + expectedRetention: 40 * 24 * time.Hour, + }, + { + name: "no default two tenants", + limits: mockRetentionLimits{ + retention: map[string]time.Duration{ + "1": 30 * 24 * time.Hour, + "2": 20 * 24 * time.Hour, + }, + streamRetention: map[string][]validation.StreamRetention{ + "1": { + { + Period: model.Duration(40 * 24 * time.Hour), + }, + }, + "2": { + { + Period: model.Duration(10 * 24 * time.Hour), + }, + }, + }, + }, + expectedRetention: 20 * 24 * time.Hour, + }, + { + name: "default bigger than tenant", + limits: mockRetentionLimits{ + retention: map[string]time.Duration{ + "1": 10 * 24 * time.Hour, + }, + streamRetention: map[string][]validation.StreamRetention{ + "1": { + { + Period: model.Duration(20 * 24 * time.Hour), + }, + }, + }, + defaultRetention: 40 * 24 * time.Hour, + defaultStreamRetention: []validation.StreamRetention{ + { + Period: model.Duration(30 * 24 * time.Hour), + }, + }, + }, + expectedRetention: 20 * 24 * time.Hour, + }, + { + name: "tenant bigger than default", + limits: mockRetentionLimits{ + retention: map[string]time.Duration{ + "1": 30 * 24 * time.Hour, + }, + streamRetention: map[string][]validation.StreamRetention{ + "1": { + { + Period: model.Duration(40 * 24 * time.Hour), + }, + }, + }, + defaultRetention: 10 * 24 * time.Hour, + defaultStreamRetention: []validation.StreamRetention{ + { + Period: model.Duration(20 * 24 * time.Hour), + }, + }, + }, + expectedRetention: 20 * 24 * time.Hour, + }, + } { + t.Run(tc.name, func(t *testing.T) { + defaultLim := tc.limits.DefaultLimits() + defaultRetention := findLongestRetention(time.Duration(defaultLim.RetentionPeriod), defaultLim.StreamRetention) + tenantsRetention := retentionByTenant(tc.limits) + + retention := smallestEnabledRetention(defaultRetention, tenantsRetention) + require.Equal(t, tc.expectedRetention, retention) + }) + } +} + +func TestRetentionConfigValidate(t *testing.T) { + for _, tc := range []struct { + name string + cfg RetentionConfig + expectErr bool + }{ + { + name: "enabled and valid", + cfg: RetentionConfig{ + Enabled: true, + MaxLookbackDays: 2 * 365, + }, + expectErr: false, + }, + { + name: "invalid max lookback days", + cfg: RetentionConfig{ + Enabled: true, + MaxLookbackDays: 0, + }, + expectErr: true, + }, + { + name: "disabled and invalid", + cfg: RetentionConfig{ + Enabled: false, + MaxLookbackDays: 0, + }, + expectErr: false, + }, + } { + t.Run(tc.name, func(t *testing.T) { + err := tc.cfg.Validate() + if tc.expectErr { + require.Error(t, err) + return + } + require.NoError(t, err) + }) + } +} + +func putMetasForLastNDays(t *testing.T, schemaCfg storageconfig.SchemaConfig, bloomStore *bloomshipper.BloomStore, tenant string, start model.Time, days int) { + const metasPerDay = 2 + + startDay := storageconfig.NewDayTime(start) + endDay := storageconfig.NewDayTime(startDay.Add(-time.Duration(days) * 24 * time.Hour)) + for day := startDay; day.After(endDay); day = day.Dec() { + period, err := schemaCfg.SchemaForTime(day.ModelTime()) + require.NoError(t, err) + + dayTable := storageconfig.NewDayTable(day, period.IndexTables.Prefix) + bloomClient, err := bloomStore.Client(dayTable.ModelTime()) + require.NoErrorf(t, err, "failed to get bloom client for day %d: %s", day, err) + + for i := 0; i < metasPerDay; i++ { + err = bloomClient.PutMeta(context.Background(), bloomshipper.Meta{ + MetaRef: bloomshipper.MetaRef{ + Ref: bloomshipper.Ref{ + TenantID: tenant, + TableName: dayTable.String(), + Bounds: v1.NewBounds(model.Fingerprint(i*100), model.Fingerprint(i*100+100)), + }, + }, + Blocks: []bloomshipper.BlockRef{}, + }) + require.NoError(t, err) + } + } +} + +// getMetasForLastNDays returns groups of continuous metas for the last N days. +func getGroupedMetasForLastNDays(t *testing.T, bloomStore *bloomshipper.BloomStore, tenant string, start model.Time, days int) [][][]bloomshipper.Meta { + metasGrouped := make([][][]bloomshipper.Meta, 0) + currentGroup := make([][]bloomshipper.Meta, 0) + + startDay := storageconfig.NewDayTime(start) + endDay := storageconfig.NewDayTime(startDay.Add(-time.Duration(days) * 24 * time.Hour)) + + for day := startDay; day.After(endDay); day = day.Dec() { + metas, err := bloomStore.FetchMetas(context.Background(), bloomshipper.MetaSearchParams{ + TenantID: tenant, + Interval: bloomshipper.NewInterval(day.Bounds()), + Keyspace: v1.NewBounds(0, math.MaxUint64), + }) + require.NoError(t, err) + if len(metas) == 0 { + // We have reached the end of the metas group: cut a new group + if len(currentGroup) > 0 { + metasGrouped = append(metasGrouped, currentGroup) + currentGroup = make([][]bloomshipper.Meta, 0) + } + continue + } + currentGroup = append(currentGroup, metas) + } + + // Append the last group if it's not empty + if len(currentGroup) > 0 { + metasGrouped = append(metasGrouped, currentGroup) + } + + return metasGrouped +} + +func NewMockBloomStore(t *testing.T) (*bloomshipper.BloomStore, storageconfig.SchemaConfig, string, error) { + workDir := t.TempDir() + return NewMockBloomStoreWithWorkDir(t, workDir) +} + +func NewMockBloomStoreWithWorkDir(t *testing.T, workDir string) (*bloomshipper.BloomStore, storageconfig.SchemaConfig, string, error) { + schemaCfg := storageconfig.SchemaConfig{ + Configs: []storageconfig.PeriodConfig{ + { + ObjectType: storageconfig.StorageTypeFileSystem, + From: storageconfig.DayTime{ + Time: testTime.Add(-2 * 365 * 24 * time.Hour), // -2 year + }, + IndexTables: storageconfig.IndexPeriodicTableConfig{ + PeriodicTableConfig: storageconfig.PeriodicTableConfig{ + Period: 24 * time.Hour, + Prefix: "schema_a_table_", + }}, + }, + { + ObjectType: storageconfig.StorageTypeFileSystem, + From: storageconfig.DayTime{ + Time: testTime.Add(-365 * 24 * time.Hour), // -1 year + }, + IndexTables: storageconfig.IndexPeriodicTableConfig{ + PeriodicTableConfig: storageconfig.PeriodicTableConfig{ + Period: 24 * time.Hour, + Prefix: "schema_b_table_", + }}, + }, + }, + } + + storageConfig := storage.Config{ + FSConfig: local.FSConfig{ + Directory: workDir, + }, + BloomShipperConfig: config.Config{ + WorkingDirectory: []string{workDir}, + DownloadParallelism: 1, + BlocksCache: config.BlocksCacheConfig{ + SoftLimit: 1 << 20, + HardLimit: 2 << 20, + TTL: time.Hour, + PurgeInterval: time.Hour, + }, + }, + } + + reg := prometheus.NewPedanticRegistry() + metrics := storage.NewClientMetrics() + t.Cleanup(metrics.Unregister) + logger := log.NewLogfmtLogger(os.Stderr) + + metasCache := cache.NewMockCache() + blocksCache := bloomshipper.NewFsBlocksCache(storageConfig.BloomShipperConfig.BlocksCache, prometheus.NewPedanticRegistry(), logger) + + store, err := bloomshipper.NewBloomStore(schemaCfg.Configs, storageConfig, metrics, metasCache, blocksCache, reg, logger) + if err == nil { + t.Cleanup(store.Stop) + } + + return store, schemaCfg, workDir, err +} + +type mockRetentionLimits struct { + retention map[string]time.Duration + streamRetention map[string][]validation.StreamRetention + defaultRetention time.Duration + defaultStreamRetention []validation.StreamRetention +} + +func (m mockRetentionLimits) RetentionPeriod(tenant string) time.Duration { + return m.retention[tenant] +} + +func (m mockRetentionLimits) StreamRetention(tenant string) []validation.StreamRetention { + return m.streamRetention[tenant] +} + +func (m mockRetentionLimits) AllByUserID() map[string]*validation.Limits { + tenants := make(map[string]*validation.Limits, len(m.retention)) + + for tenant, retention := range m.retention { + if _, ok := tenants[tenant]; !ok { + tenants[tenant] = &validation.Limits{} + } + tenants[tenant].RetentionPeriod = model.Duration(retention) + } + + for tenant, streamRetention := range m.streamRetention { + if _, ok := tenants[tenant]; !ok { + tenants[tenant] = &validation.Limits{} + } + tenants[tenant].StreamRetention = streamRetention + } + + return tenants +} + +func (m mockRetentionLimits) DefaultLimits() *validation.Limits { + return &validation.Limits{ + RetentionPeriod: model.Duration(m.defaultRetention), + StreamRetention: m.defaultStreamRetention, + } +} + +type mockSharding struct { + ownsRetention bool +} + +func (m mockSharding) OwnsRetention() (bool, error) { + return m.ownsRetention, nil +} diff --git a/pkg/bloomcompactor/spec.go b/pkg/bloomcompactor/spec.go index 1f2e58dabcceb..6c7e095dbed83 100644 --- a/pkg/bloomcompactor/spec.go +++ b/pkg/bloomcompactor/spec.go @@ -10,13 +10,13 @@ import ( "github.com/pkg/errors" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logproto" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/stores" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" + "github.com/grafana/loki/v3/pkg/logproto" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/stores" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" ) // inclusive range @@ -217,7 +217,7 @@ func (b *LazyBlockBuilderIterator) Next() bool { return false } - b.curr = v1.NewBlock(reader) + b.curr = v1.NewBlock(reader, b.metrics.bloomMetrics) return true } diff --git a/pkg/bloomcompactor/spec_test.go b/pkg/bloomcompactor/spec_test.go index 29f579a8e777d..b35d82b9d3f41 100644 --- a/pkg/bloomcompactor/spec_test.go +++ b/pkg/bloomcompactor/spec_test.go @@ -10,9 +10,9 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/chunkenc" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" ) func blocksFromSchema(t *testing.T, n int, options v1.BlockOptions) (res []*v1.Block, data []v1.SeriesWithBloom, refs []bloomshipper.BlockRef) { @@ -50,7 +50,7 @@ func blocksFromSchemaWithRange(t *testing.T, n int, options v1.BlockOptions, fro _, err = builder.BuildFrom(itr) require.Nil(t, err) - res = append(res, v1.NewBlock(reader)) + res = append(res, v1.NewBlock(reader, v1.NewMetrics(nil))) ref := genBlockRef(data[minIdx].Series.Fingerprint, data[maxIdx-1].Series.Fingerprint) t.Log("create block", ref) refs = append(refs, ref) @@ -74,7 +74,7 @@ func dummyBloomGen(t *testing.T, opts v1.BlockOptions, store v1.Iterator[*v1.Ser for i, b := range blocks { bqs = append(bqs, &bloomshipper.CloseableBlockQuerier{ BlockRef: refs[i], - BlockQuerier: v1.NewBlockQuerier(b), + BlockQuerier: v1.NewBlockQuerier(b, false, v1.DefaultMaxPageSize), }) } @@ -152,7 +152,7 @@ func TestSimpleBloomGenerator(t *testing.T) { expectedRefs := v1.PointerSlice(data) outputRefs := make([]*v1.SeriesWithBloom, 0, len(data)) for _, block := range outputBlocks { - bq := block.Querier() + bq := v1.NewBlockQuerier(block, false, v1.DefaultMaxPageSize) for bq.Next() { outputRefs = append(outputRefs, bq.At()) } diff --git a/pkg/bloomcompactor/tracker.go b/pkg/bloomcompactor/tracker.go index 34f726f322a09..1c9bde0a4ae71 100644 --- a/pkg/bloomcompactor/tracker.go +++ b/pkg/bloomcompactor/tracker.go @@ -8,8 +8,8 @@ import ( "github.com/pkg/errors" "github.com/prometheus/common/model" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/config" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/config" ) type tableRangeProgress struct { diff --git a/pkg/bloomcompactor/tracker_test.go b/pkg/bloomcompactor/tracker_test.go index 494073e7cc520..e23eb55d6dc64 100644 --- a/pkg/bloomcompactor/tracker_test.go +++ b/pkg/bloomcompactor/tracker_test.go @@ -6,8 +6,8 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/config" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/config" ) func mkTblRange(tenant string, tbl config.DayTime, from, through model.Fingerprint) *tenantTableRange { diff --git a/pkg/bloomcompactor/tsdb.go b/pkg/bloomcompactor/tsdb.go index ddfd78c2974b4..3ad359bc31227 100644 --- a/pkg/bloomcompactor/tsdb.go +++ b/pkg/bloomcompactor/tsdb.go @@ -14,13 +14,14 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/chunkenc" - baseStore "github.com/grafana/loki/pkg/storage" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/chunkenc" + baseStore "github.com/grafana/loki/v3/pkg/storage" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" ) const ( @@ -121,34 +122,22 @@ func (b *BloomTSDBStore) LoadTSDB( } }() - return NewTSDBSeriesIter(ctx, idx, bounds) + return NewTSDBSeriesIter(ctx, tenant, idx, bounds) } -// TSDBStore is an interface for interacting with the TSDB, -// modeled off a relevant subset of the `tsdb.TSDBIndex` struct -type forSeries interface { - ForSeries( - ctx context.Context, - fpFilter index.FingerprintFilter, - from model.Time, - through model.Time, - fn func(labels.Labels, model.Fingerprint, []index.ChunkMeta), - matchers ...*labels.Matcher, - ) error -} - -func NewTSDBSeriesIter(ctx context.Context, f forSeries, bounds v1.FingerprintBounds) (v1.Iterator[*v1.Series], error) { +func NewTSDBSeriesIter(ctx context.Context, user string, f sharding.ForSeries, bounds v1.FingerprintBounds) (v1.Iterator[*v1.Series], error) { // TODO(salvacorts): Create a pool series := make([]*v1.Series, 0, 100) if err := f.ForSeries( ctx, + user, bounds, 0, math.MaxInt64, - func(_ labels.Labels, fp model.Fingerprint, chks []index.ChunkMeta) { + func(_ labels.Labels, fp model.Fingerprint, chks []index.ChunkMeta) (stop bool) { select { case <-ctx.Done(): - return + return true default: res := &v1.Series{ Fingerprint: fp, @@ -163,6 +152,7 @@ func NewTSDBSeriesIter(ctx context.Context, f forSeries, bounds v1.FingerprintBo } series = append(series, res) + return false } }, labels.MustNewMatcher(labels.MatchEqual, "", ""), diff --git a/pkg/bloomcompactor/tsdb_test.go b/pkg/bloomcompactor/tsdb_test.go index 91ad1719375ac..a18e36ddb6d15 100644 --- a/pkg/bloomcompactor/tsdb_test.go +++ b/pkg/bloomcompactor/tsdb_test.go @@ -9,18 +9,19 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) type forSeriesTestImpl []*v1.Series func (f forSeriesTestImpl) ForSeries( _ context.Context, + _ string, _ index.FingerprintFilter, _ model.Time, _ model.Time, - fn func(labels.Labels, model.Fingerprint, []index.ChunkMeta), + fn func(labels.Labels, model.Fingerprint, []index.ChunkMeta) bool, _ ...*labels.Matcher, ) error { for i := range f { @@ -61,7 +62,7 @@ func TestTSDBSeriesIter(t *testing.T) { }, } srcItr := v1.NewSliceIter(input) - itr, err := NewTSDBSeriesIter(context.Background(), forSeriesTestImpl(input), v1.NewBounds(0, math.MaxUint64)) + itr, err := NewTSDBSeriesIter(context.Background(), "", forSeriesTestImpl(input), v1.NewBounds(0, math.MaxUint64)) require.NoError(t, err) v1.EqualIterators[*v1.Series]( @@ -78,7 +79,7 @@ func TestTSDBSeriesIter_Expiry(t *testing.T) { t.Run("expires on creation", func(t *testing.T) { ctx, cancel := context.WithCancel(context.Background()) cancel() - itr, err := NewTSDBSeriesIter(ctx, forSeriesTestImpl{ + itr, err := NewTSDBSeriesIter(ctx, "", forSeriesTestImpl{ {}, // a single entry }, v1.NewBounds(0, math.MaxUint64)) require.Error(t, err) @@ -87,7 +88,7 @@ func TestTSDBSeriesIter_Expiry(t *testing.T) { t.Run("expires during consumption", func(t *testing.T) { ctx, cancel := context.WithCancel(context.Background()) - itr, err := NewTSDBSeriesIter(ctx, forSeriesTestImpl{ + itr, err := NewTSDBSeriesIter(ctx, "", forSeriesTestImpl{ {}, {}, }, v1.NewBounds(0, math.MaxUint64)) diff --git a/pkg/bloomcompactor/versioned_range.go b/pkg/bloomcompactor/versioned_range.go index 0c399025f610f..03da12f1d7da5 100644 --- a/pkg/bloomcompactor/versioned_range.go +++ b/pkg/bloomcompactor/versioned_range.go @@ -5,8 +5,8 @@ import ( "github.com/prometheus/common/model" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" ) type tsdbToken struct { diff --git a/pkg/bloomcompactor/versioned_range_test.go b/pkg/bloomcompactor/versioned_range_test.go index 6c4329a0dba99..a85418bc6e1e5 100644 --- a/pkg/bloomcompactor/versioned_range_test.go +++ b/pkg/bloomcompactor/versioned_range_test.go @@ -6,9 +6,9 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" ) func Test_TsdbTokenRange(t *testing.T) { diff --git a/pkg/bloomgateway/bloomgateway.go b/pkg/bloomgateway/bloomgateway.go index 97f555cf43073..d2ac958f424aa 100644 --- a/pkg/bloomgateway/bloomgateway.go +++ b/pkg/bloomgateway/bloomgateway.go @@ -56,13 +56,12 @@ import ( "github.com/prometheus/client_golang/prometheus" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/queue" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/queue" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" ) var errGatewayUnhealthy = errors.New("bloom-gateway is unhealthy in the ring") @@ -196,9 +195,6 @@ func (g *Gateway) stopping(_ error) error { // FilterChunkRefs implements BloomGatewayServer func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunkRefRequest) (*logproto.FilterChunkRefResponse, error) { - sp, ctx := opentracing.StartSpanFromContext(ctx, "bloomgateway.FilterChunkRefs") - defer sp.Finish() - tenantID, err := tenant.TenantID(ctx) if err != nil { return nil, err @@ -206,8 +202,17 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk logger := log.With(g.logger, "tenant", tenantID) + sp, ctx := opentracing.StartSpanFromContext(ctx, "bloomgateway.FilterChunkRefs") + stats, ctx := ContextWithEmptyStats(ctx) + defer func() { + level.Info(logger).Log(stats.KVArgs()...) + sp.LogKV(stats.KVArgs()...) + sp.Finish() + }() + // start time == end time --> empty response if req.From.Equal(req.Through) { + stats.Status = labelSuccess return &logproto.FilterChunkRefResponse{ ChunkRefs: []*logproto.GroupedChunkRefs{}, }, nil @@ -215,23 +220,28 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk // start time > end time --> error response if req.Through.Before(req.From) { + stats.Status = labelFailure return nil, errors.New("from time must not be after through time") } filters := v1.ExtractTestableLineFilters(req.Plan.AST) + stats.NumFilters = len(filters) g.metrics.receivedFilters.Observe(float64(len(filters))) // Shortcut if request does not contain filters if len(filters) == 0 { + stats.Status = labelSuccess return &logproto.FilterChunkRefResponse{ ChunkRefs: req.Refs, }, nil } seriesByDay := partitionRequest(req) + stats.NumTasks = len(seriesByDay) // no tasks --> empty response if len(seriesByDay) == 0 { + stats.Status = labelSuccess return &logproto.FilterChunkRefResponse{ ChunkRefs: []*logproto.GroupedChunkRefs{}, }, nil @@ -253,15 +263,6 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk // TODO(owen-d): include capacity in constructor? task.responses = responsesPool.Get(len(seriesForDay.series)) - - level.Debug(g.logger).Log( - "msg", "created task for day", - "task", task.ID, - "day", seriesForDay.day, - "interval", seriesForDay.interval.String(), - "nSeries", len(seriesForDay.series), - "filters", JoinFunc(filters, ";", func(e syntax.LineFilterExpr) string { return e.String() }), - ) tasks = append(tasks, task) } @@ -283,13 +284,14 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk // When enqueuing, we also add the task to the pending tasks _ = g.pendingTasks.Inc() }); err != nil { + stats.Status = labelFailure return nil, errors.Wrap(err, "failed to enqueue task") } // TODO(owen-d): use `concurrency` lib, bound parallelism go g.consumeTask(ctx, task, tasksCh) } - sp.LogKV("enqueue_duration", time.Since(queueStart).String()) + sp.LogKV("msg", "enqueued tasks", "duration", time.Since(queueStart).String()) remaining := len(tasks) @@ -303,10 +305,12 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk for remaining > 0 { select { case <-ctx.Done(): + stats.Status = "cancel" return nil, errors.Wrap(ctx.Err(), "request failed") case task := <-tasksCh: level.Info(logger).Log("msg", "task done", "task", task.ID, "err", task.Err()) if task.Err() != nil { + stats.Status = labelFailure return nil, errors.Wrap(task.Err(), "request failed") } responses = append(responses, task.responses) @@ -316,7 +320,10 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk sp.LogKV("msg", "received all responses") + start := time.Now() filtered := filterChunkRefs(req, responses) + duration := time.Since(start) + stats.AddPostProcessingTime(duration) // free up the responses for _, resp := range responses { @@ -333,13 +340,14 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk g.metrics.requestedChunks.Observe(float64(preFilterChunks)) g.metrics.filteredChunks.Observe(float64(preFilterChunks - postFilterChunks)) - level.Info(logger).Log( - "msg", "return filtered chunk refs", - "requested_series", preFilterSeries, - "filtered_series", preFilterSeries-postFilterSeries, - "requested_chunks", preFilterChunks, - "filtered_chunks", preFilterChunks-postFilterChunks, - ) + stats.Status = "success" + stats.SeriesRequested = preFilterSeries + stats.SeriesFiltered = preFilterSeries - postFilterSeries + stats.ChunksRequested = preFilterChunks + stats.ChunksFiltered = preFilterChunks - postFilterChunks + + sp.LogKV("msg", "return filtered chunk refs") + return &logproto.FilterChunkRefResponse{ChunkRefs: filtered}, nil } diff --git a/pkg/bloomgateway/bloomgateway_test.go b/pkg/bloomgateway/bloomgateway_test.go index 45c9a3926c157..edaa2ea7f0c82 100644 --- a/pkg/bloomgateway/bloomgateway_test.go +++ b/pkg/bloomgateway/bloomgateway_test.go @@ -20,17 +20,17 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/storage" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" - bloomshipperconfig "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper/config" - lokiring "github.com/grafana/loki/pkg/util/ring" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/storage" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" + bloomshipperconfig "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper/config" + lokiring "github.com/grafana/loki/v3/pkg/util/ring" + "github.com/grafana/loki/v3/pkg/validation" ) func groupRefs(t *testing.T, chunkRefs []*logproto.ChunkRef) []*logproto.GroupedChunkRefs { @@ -72,10 +72,8 @@ func setupBloomStore(t *testing.T) *bloomshipper.BloomStore { } storageCfg := storage.Config{ BloomShipperConfig: bloomshipperconfig.Config{ - WorkingDirectory: t.TempDir(), - BlocksDownloadingQueue: bloomshipperconfig.DownloadingQueueConfig{ - WorkersCount: 1, - }, + WorkingDirectory: []string{t.TempDir()}, + DownloadParallelism: 1, BlocksCache: bloomshipperconfig.BlocksCacheConfig{ SoftLimit: flagext.Bytes(10 << 20), HardLimit: flagext.Bytes(20 << 20), diff --git a/pkg/bloomgateway/cache.go b/pkg/bloomgateway/cache.go index 6c573cb47d6de..60124f353e2a6 100644 --- a/pkg/bloomgateway/cache.go +++ b/pkg/bloomgateway/cache.go @@ -11,9 +11,9 @@ import ( "golang.org/x/exp/slices" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" ) const ( @@ -46,6 +46,7 @@ func newCacheKeyGen(limits CacheLimits) keyGen { return keyGen{limits} } +// TODO(owen-d): need to implement our own key-generation which accounts for fingerprint ranges requested. func (k keyGen) GenerateCacheKey(ctx context.Context, tenant string, r resultscache.Request) string { return resultscache.ConstSplitter(k.BloomGatewayCacheKeyInterval(tenant)).GenerateCacheKey(ctx, tenant, r) } diff --git a/pkg/bloomgateway/cache_test.go b/pkg/bloomgateway/cache_test.go index bf1a8dbaa365b..3694a20ca09dc 100644 --- a/pkg/bloomgateway/cache_test.go +++ b/pkg/bloomgateway/cache_test.go @@ -11,13 +11,13 @@ import ( "github.com/stretchr/testify/require" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/util/constants" ) // Range is 1000-4000 diff --git a/pkg/bloomgateway/client.go b/pkg/bloomgateway/client.go index 6497848923ab1..ff3ef1defb736 100644 --- a/pkg/bloomgateway/client.go +++ b/pkg/bloomgateway/client.go @@ -25,16 +25,17 @@ import ( "google.golang.org/grpc" "google.golang.org/grpc/health/grpc_health_v1" - "github.com/grafana/loki/pkg/bloomutils" - "github.com/grafana/loki/pkg/distributor/clientpool" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/queue" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/bloomutils" + "github.com/grafana/loki/v3/pkg/distributor/clientpool" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/queue" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/util/constants" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var ( @@ -55,9 +56,6 @@ var ( } }, } - - // NB(chaudum): Should probably be configurable, but I don't want yet another user setting. - maxQueryParallelism = 10 ) type ringGetBuffers struct { @@ -106,10 +104,6 @@ type ClientConfig struct { // GRPCClientConfig configures the gRPC connection between the Bloom Gateway client and the server. GRPCClientConfig grpcclient.Config `yaml:"grpc_client_config"` - // LogGatewayRequests configures if requests sent to the gateway should be logged or not. - // The log messages are of type debug and contain the address of the gateway and the relevant tenant. - LogGatewayRequests bool `yaml:"log_gateway_requests"` - // Ring is the Bloom Gateway ring used to find the appropriate Bloom Gateway instance // this client should talk to. Ring ring.ReadRing `yaml:"-"` @@ -129,7 +123,6 @@ func (i *ClientConfig) RegisterFlagsWithPrefix(prefix string, f *flag.FlagSet) { i.GRPCClientConfig.RegisterFlagsWithPrefix(prefix+"grpc", f) i.Cache.RegisterFlagsWithPrefix(prefix+"cache.", f) f.BoolVar(&i.CacheResults, prefix+"cache_results", false, "Flag to control whether to cache bloom gateway client requests/responses.") - f.BoolVar(&i.LogGatewayRequests, prefix+"log-gateway-requests", false, "Flag to control whether requests sent to the gateway should be logged or not.") } func (i *ClientConfig) Validate() error { @@ -257,7 +250,7 @@ func (c *GatewayClient) FilterChunks(ctx context.Context, tenant string, from, t results := make([][]*logproto.GroupedChunkRefs, len(servers)) count := 0 - err = concurrency.ForEachJob(ctx, len(servers), maxQueryParallelism, func(ctx context.Context, i int) error { + err = concurrency.ForEachJob(ctx, len(servers), len(servers), func(ctx context.Context, i int) error { rs := servers[i] // randomize order of addresses so we don't hotspot the first server in the list @@ -270,9 +263,6 @@ func (c *GatewayClient) FilterChunks(ctx context.Context, tenant string, from, t "from", from.Time(), "through", through.Time(), "num_refs", len(rs.groups), - "refs", JoinFunc(rs.groups, ",", func(e *logproto.GroupedChunkRefs) string { - return model.Fingerprint(e.Fingerprint).String() - }), "plan", plan.String(), "plan_hash", plan.Hash(), ) @@ -357,6 +347,16 @@ func replicationSetsWithBounds(subRing ring.ReadRing, instances []ring.InstanceD return nil, errors.Wrap(err, "bloom gateway get ring") } + if len(tr) == 0 { + level.Warn(util_log.Logger).Log( + "subroutine", "replicationSetsWithBounds", + "msg", "instance has no token ranges - should not be possible", + "instance", inst.Id, + "n_instances", len(instances), + ) + continue + } + // NB(owen-d): this will send requests to the wrong nodes if RF>1 since it only checks the // first token when assigning replicasets rs, err := subRing.Get(tr[0], BlocksOwnerRead, bufDescs, bufHosts, bufZones) diff --git a/pkg/bloomgateway/client_test.go b/pkg/bloomgateway/client_test.go index e4b905c37b12c..d1de9dbab84e2 100644 --- a/pkg/bloomgateway/client_test.go +++ b/pkg/bloomgateway/client_test.go @@ -14,12 +14,12 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/bloomutils" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/plan" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/bloomutils" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/plan" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/validation" ) func rs(id int, tokens ...uint32) ring.ReplicationSet { diff --git a/pkg/bloomgateway/config.go b/pkg/bloomgateway/config.go index 356bc782fb839..9eaa6771e674f 100644 --- a/pkg/bloomgateway/config.go +++ b/pkg/bloomgateway/config.go @@ -3,7 +3,7 @@ package bloomgateway import ( "flag" - "github.com/grafana/loki/pkg/util/ring" + "github.com/grafana/loki/v3/pkg/util/ring" ) // Config configures the Bloom Gateway component. diff --git a/pkg/bloomgateway/multiplexing.go b/pkg/bloomgateway/multiplexing.go index 97e0b0aa6d66f..fab8fd867765a 100644 --- a/pkg/bloomgateway/multiplexing.go +++ b/pkg/bloomgateway/multiplexing.go @@ -9,11 +9,11 @@ import ( "github.com/oklog/ulid" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" ) const ( diff --git a/pkg/bloomgateway/multiplexing_test.go b/pkg/bloomgateway/multiplexing_test.go index af79f37b358b4..27c0bbbe5ec69 100644 --- a/pkg/bloomgateway/multiplexing_test.go +++ b/pkg/bloomgateway/multiplexing_test.go @@ -9,10 +9,10 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" ) func TestTask(t *testing.T) { diff --git a/pkg/bloomgateway/processor.go b/pkg/bloomgateway/processor.go index e49679fe61ea8..401bd9210e0ae 100644 --- a/pkg/bloomgateway/processor.go +++ b/pkg/bloomgateway/processor.go @@ -12,9 +12,9 @@ import ( "github.com/grafana/dskit/concurrency" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" ) func newProcessor(id string, concurrency int, store bloomshipper.Store, logger log.Logger, metrics *workerMetrics) *processor { @@ -45,7 +45,7 @@ func (p *processor) runWithBounds(ctx context.Context, tasks []Task, bounds v1.M "msg", "process tasks with bounds", "tenant", tenant, "tasks", len(tasks), - "bounds", JoinFunc(bounds, ",", func(e v1.FingerprintBounds) string { return e.String() }), + "bounds", len(bounds), ) for ts, tasks := range group(tasks, func(t Task) config.DayTime { return t.table }) { @@ -73,30 +73,65 @@ func (p *processor) processTasks(ctx context.Context, tenant string, day config. Interval: interval, Keyspace: v1.NewBounds(minFpRange.Min, maxFpRange.Max), } + + start := time.Now() metas, err := p.store.FetchMetas(ctx, metaSearch) + duration := time.Since(start) + level.Debug(p.logger).Log("msg", "fetched metas", "count", len(metas), "duration", duration, "err", err) + + for _, t := range tasks { + FromContext(t.ctx).AddMetasFetchTime(duration) + } + if err != nil { return err } blocksRefs := bloomshipper.BlocksForMetas(metas, interval, keyspaces) - level.Info(p.logger).Log("msg", "blocks for metas", "num_metas", len(metas), "num_blocks", len(blocksRefs)) - return p.processBlocks(ctx, partitionTasks(tasks, blocksRefs)) -} -func (p *processor) processBlocks(ctx context.Context, data []blockWithTasks) error { + data := partitionTasks(tasks, blocksRefs) + refs := make([]bloomshipper.BlockRef, 0, len(data)) for _, block := range data { refs = append(refs, block.ref) } - start := time.Now() - bqs, err := p.store.FetchBlocks(ctx, refs, bloomshipper.WithFetchAsync(true), bloomshipper.WithIgnoreNotFound(true)) - level.Debug(p.logger).Log("msg", "fetch blocks", "count", len(bqs), "duration", time.Since(start), "err", err) + start = time.Now() + bqs, err := p.store.FetchBlocks( + ctx, + refs, + bloomshipper.WithFetchAsync(true), + bloomshipper.WithIgnoreNotFound(true), + // NB(owen-d): we relinquish bloom pages to a pool + // after iteration for performance (alloc reduction). + // This is safe to do here because we do not capture + // the underlying bloom []byte outside of iteration + bloomshipper.WithPool(true), + ) + duration = time.Since(start) + level.Debug(p.logger).Log("msg", "fetched blocks", "count", len(refs), "duration", duration, "err", err) + + for _, t := range tasks { + FromContext(t.ctx).AddBlocksFetchTime(duration) + } if err != nil { return err } + start = time.Now() + res := p.processBlocks(ctx, bqs, data) + duration = time.Since(start) + + for _, t := range tasks { + FromContext(t.ctx).AddProcessingTime(duration) + } + + return res +} + +func (p *processor) processBlocks(ctx context.Context, bqs []*bloomshipper.CloseableBlockQuerier, data []blockWithTasks) error { + defer func() { for i := range bqs { if bqs[i] == nil { @@ -114,13 +149,6 @@ func (p *processor) processBlocks(ctx context.Context, data []blockWithTasks) er } block := data[i] - level.Debug(p.logger).Log( - "msg", "process block with tasks", - "job", i+1, - "of_jobs", len(bqs), - "block", block.ref, - "num_tasks", len(block.tasks), - ) if !block.ref.Bounds.Equal(bq.Bounds) { return errors.Errorf("block and querier bounds differ: %s vs %s", block.ref.Bounds, bq.Bounds) @@ -143,22 +171,12 @@ func (p *processor) processBlock(_ context.Context, blockQuerier *v1.BlockQuerie tokenizer := v1.NewNGramTokenizer(schema.NGramLen(), 0) iters := make([]v1.PeekingIterator[v1.Request], 0, len(tasks)) - // collect spans & run single defer to avoid blowing call stack - // if there are many tasks - spans := make([]opentracing.Span, 0, len(tasks)) - defer func() { - for _, sp := range spans { - sp.Finish() - } - }() - for _, task := range tasks { - // add spans for each task context for this block - sp, _ := opentracing.StartSpanFromContext(task.ctx, "bloomgateway.ProcessBlock") - spans = append(spans, sp) - md, _ := blockQuerier.Metadata() - blk := bloomshipper.BlockRefFrom(task.Tenant, task.table.String(), md) - sp.LogKV("block", blk.String()) + if sp := opentracing.SpanFromContext(task.ctx); sp != nil { + md, _ := blockQuerier.Metadata() + blk := bloomshipper.BlockRefFrom(task.Tenant, task.table.String(), md) + sp.LogKV("process block", blk.String(), "series", len(task.series)) + } it := v1.NewPeekingIter(task.RequestIter(tokenizer)) iters = append(iters, it) @@ -168,10 +186,18 @@ func (p *processor) processBlock(_ context.Context, blockQuerier *v1.BlockQuerie start := time.Now() err = fq.Run() + duration := time.Since(start) + if err != nil { - p.metrics.blockQueryLatency.WithLabelValues(p.id, labelFailure).Observe(time.Since(start).Seconds()) + p.metrics.blockQueryLatency.WithLabelValues(p.id, labelFailure).Observe(duration.Seconds()) } else { - p.metrics.blockQueryLatency.WithLabelValues(p.id, labelSuccess).Observe(time.Since(start).Seconds()) + p.metrics.blockQueryLatency.WithLabelValues(p.id, labelSuccess).Observe(duration.Seconds()) + } + + for _, task := range tasks { + stats := FromContext(task.ctx) + stats.AddTotalProcessingTime(duration) + stats.IncProcessedBlocks() } return err diff --git a/pkg/bloomgateway/processor_test.go b/pkg/bloomgateway/processor_test.go index d9e6a799045e3..69259a9193945 100644 --- a/pkg/bloomgateway/processor_test.go +++ b/pkg/bloomgateway/processor_test.go @@ -7,16 +7,18 @@ import ( "time" "github.com/go-kit/log" + "github.com/opentracing/opentracing-go" "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" "github.com/stretchr/testify/require" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/util/constants" ) var _ bloomshipper.Store = &dummyStore{} @@ -54,6 +56,10 @@ func (s *dummyStore) FetchMetas(_ context.Context, _ bloomshipper.MetaSearchPara return s.metas, nil } +func (s *dummyStore) TenantFilesForInterval(_ context.Context, _ bloomshipper.Interval, _ func(tenant string, object client.StorageObject) bool) (map[string][]client.StorageObject, error) { + return nil, nil +} + func (s *dummyStore) Fetcher(_ model.Time) (*bloomshipper.Fetcher, error) { return nil, nil } @@ -88,6 +94,9 @@ func (s *dummyStore) FetchBlocks(_ context.Context, refs []bloomshipper.BlockRef func TestProcessor(t *testing.T) { ctx := context.Background() + sp, ctx := opentracing.StartSpanFromContext(ctx, "TestProcessor") + t.Cleanup(sp.Finish) + tenant := "fake" now := mktime("2024-01-27 12:00") metrics := newWorkerMetrics(prometheus.NewPedanticRegistry(), constants.Loki, "bloom_gatway") diff --git a/pkg/bloomgateway/querier.go b/pkg/bloomgateway/querier.go index 32f14b7e668c9..a9d494beb0c7b 100644 --- a/pkg/bloomgateway/querier.go +++ b/pkg/bloomgateway/querier.go @@ -9,10 +9,10 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/plan" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/querier/plan" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/util/constants" ) type querierMetrics struct { diff --git a/pkg/bloomgateway/querier_test.go b/pkg/bloomgateway/querier_test.go index 0d7872927cc42..f952f3733233e 100644 --- a/pkg/bloomgateway/querier_test.go +++ b/pkg/bloomgateway/querier_test.go @@ -10,9 +10,9 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/plan" ) type noopClient struct { diff --git a/pkg/bloomgateway/stats.go b/pkg/bloomgateway/stats.go new file mode 100644 index 0000000000000..09f78841e544a --- /dev/null +++ b/pkg/bloomgateway/stats.go @@ -0,0 +1,137 @@ +package bloomgateway + +import ( + "context" + "time" + + "go.uber.org/atomic" +) + +type Stats struct { + Status string + NumTasks, NumFilters int + ChunksRequested, ChunksFiltered int + SeriesRequested, SeriesFiltered int + QueueTime *atomic.Duration + MetasFetchTime, BlocksFetchTime *atomic.Duration + ProcessingTime, TotalProcessingTime *atomic.Duration + PostProcessingTime *atomic.Duration + ProcessedBlocks *atomic.Int32 +} + +type statsKey int + +var ctxKey = statsKey(0) + +// ContextWithEmptyStats returns a context with empty stats. +func ContextWithEmptyStats(ctx context.Context) (*Stats, context.Context) { + stats := &Stats{ + Status: "unknown", + ProcessedBlocks: atomic.NewInt32(0), + QueueTime: atomic.NewDuration(0), + MetasFetchTime: atomic.NewDuration(0), + BlocksFetchTime: atomic.NewDuration(0), + ProcessingTime: atomic.NewDuration(0), + TotalProcessingTime: atomic.NewDuration(0), + PostProcessingTime: atomic.NewDuration(0), + } + ctx = context.WithValue(ctx, ctxKey, stats) + return stats, ctx +} + +// FromContext gets the Stats out of the Context. Returns nil if stats have not +// been initialised in the context. +func FromContext(ctx context.Context) *Stats { + o := ctx.Value(ctxKey) + if o == nil { + return nil + } + return o.(*Stats) +} + +// aggregates the total duration +func (s *Stats) Duration() (dur time.Duration) { + dur += s.QueueTime.Load() + dur += s.MetasFetchTime.Load() + dur += s.BlocksFetchTime.Load() + dur += s.ProcessingTime.Load() + dur += s.PostProcessingTime.Load() + return +} + +func (s *Stats) KVArgs() []any { + if s == nil { + return []any{} + } + chunksRemaining := s.ChunksRequested - s.ChunksFiltered + filterRatio := float64(s.ChunksFiltered) / float64(max(s.ChunksRequested, 1)) + + return []any{ + "msg", "stats-report", + "status", s.Status, + "tasks", s.NumTasks, + "filters", s.NumFilters, + "blocks_processed", s.ProcessedBlocks.Load(), + "series_requested", s.SeriesRequested, + "series_filtered", s.SeriesFiltered, + "chunks_requested", s.ChunksRequested, + "chunks_filtered", s.ChunksFiltered, + "chunks_remaining", chunksRemaining, + "filter_ratio", filterRatio, + "queue_time", s.QueueTime.Load(), + "metas_fetch_time", s.MetasFetchTime.Load(), + "blocks_fetch_time", s.BlocksFetchTime.Load(), + "processing_time", s.ProcessingTime.Load(), + "post_processing_time", s.PostProcessingTime.Load(), + "duration", s.Duration(), + } +} + +func (s *Stats) AddQueueTime(t time.Duration) { + if s == nil { + return + } + s.QueueTime.Add(t) +} + +func (s *Stats) AddMetasFetchTime(t time.Duration) { + if s == nil { + return + } + s.MetasFetchTime.Add(t) +} + +func (s *Stats) AddBlocksFetchTime(t time.Duration) { + if s == nil { + return + } + s.BlocksFetchTime.Add(t) +} + +func (s *Stats) AddProcessingTime(t time.Duration) { + if s == nil { + return + } + s.ProcessingTime.Add(t) +} + +func (s *Stats) AddTotalProcessingTime(t time.Duration) { + if s == nil { + return + } + s.TotalProcessingTime.Add(t) +} + +func (s *Stats) AddPostProcessingTime(t time.Duration) { + if s == nil { + return + } + s.PostProcessingTime.Add(t) +} + +func (s *Stats) IncProcessedBlocks() { + if s == nil { + return + } + s.ProcessedBlocks.Inc() +} diff --git a/pkg/bloomgateway/util.go b/pkg/bloomgateway/util.go index 5f2f2e31f79c2..bf5f5ee1501e4 100644 --- a/pkg/bloomgateway/util.go +++ b/pkg/bloomgateway/util.go @@ -7,10 +7,10 @@ import ( "github.com/prometheus/common/model" "golang.org/x/exp/slices" - "github.com/grafana/loki/pkg/logproto" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/logproto" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" ) func getDayTime(ts model.Time) time.Time { @@ -78,7 +78,7 @@ func partitionTasks(tasks []Task, blocks []bloomshipper.BlockRef) []blockWithTas }) // All fingerprints fall outside of the consumer's range - if min == len(refs) || max == 0 { + if min == len(refs) || max == 0 || min == max { continue } diff --git a/pkg/bloomgateway/util_test.go b/pkg/bloomgateway/util_test.go index 0eb94f68c7dbf..ed293566886a7 100644 --- a/pkg/bloomgateway/util_test.go +++ b/pkg/bloomgateway/util_test.go @@ -7,10 +7,10 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/logproto" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" ) func parseDayTime(s string) config.DayTime { @@ -136,6 +136,26 @@ func TestPartitionTasks(t *testing.T) { require.Len(t, res.tasks[0].series, 90) } }) + + t.Run("block series before and after task series", func(t *testing.T) { + bounds := []bloomshipper.BlockRef{ + mkBlockRef(100, 200), + } + + tasks := []Task{ + { + series: []*logproto.GroupedChunkRefs{ + {Fingerprint: 50}, + {Fingerprint: 75}, + {Fingerprint: 250}, + {Fingerprint: 300}, + }, + }, + } + + results := partitionTasks(tasks, bounds) + require.Len(t, results, 0) + }) } func TestPartitionRequest(t *testing.T) { @@ -334,7 +354,7 @@ func createBlocks(t *testing.T, tenant string, n int, from, through model.Time, // } // } querier := &bloomshipper.CloseableBlockQuerier{ - BlockQuerier: v1.NewBlockQuerier(block), + BlockQuerier: v1.NewBlockQuerier(block, false, v1.DefaultMaxPageSize), BlockRef: blockRef, } queriers = append(queriers, querier) diff --git a/pkg/bloomgateway/worker.go b/pkg/bloomgateway/worker.go index 52de8155d7783..fab243f29613a 100644 --- a/pkg/bloomgateway/worker.go +++ b/pkg/bloomgateway/worker.go @@ -11,9 +11,9 @@ import ( "github.com/prometheus/common/model" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/queue" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/queue" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" ) const ( @@ -103,6 +103,7 @@ func (w *worker) running(_ context.Context) error { level.Debug(w.logger).Log("msg", "dequeued task", "task", task.ID) _ = w.pending.Dec() w.metrics.queueDuration.WithLabelValues(w.id).Observe(time.Since(task.enqueueTime).Seconds()) + FromContext(task.ctx).AddQueueTime(time.Since(task.enqueueTime)) tasks = append(tasks, task) first, last := getFirstLast(task.series) diff --git a/pkg/bloomutils/ring.go b/pkg/bloomutils/ring.go index 9858f63e6ba3d..9743298e89b4d 100644 --- a/pkg/bloomutils/ring.go +++ b/pkg/bloomutils/ring.go @@ -13,7 +13,7 @@ import ( "golang.org/x/exp/constraints" "golang.org/x/exp/slices" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" ) var ( diff --git a/pkg/bloomutils/ring_test.go b/pkg/bloomutils/ring_test.go index a6ef7374f527f..8a373696c7c92 100644 --- a/pkg/bloomutils/ring_test.go +++ b/pkg/bloomutils/ring_test.go @@ -8,7 +8,7 @@ import ( "github.com/grafana/dskit/ring" "github.com/stretchr/testify/require" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" ) func uint64Range(min, max uint64) Range[uint64] { diff --git a/pkg/canary/comparator/comparator.go b/pkg/canary/comparator/comparator.go index e7234df191f9c..8f57af09ba783 100644 --- a/pkg/canary/comparator/comparator.go +++ b/pkg/canary/comparator/comparator.go @@ -11,7 +11,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/canary/reader" + "github.com/grafana/loki/v3/pkg/canary/reader" ) const ( diff --git a/pkg/canary/reader/reader.go b/pkg/canary/reader/reader.go index 0725a658b4680..4576ca7a70ed8 100644 --- a/pkg/canary/reader/reader.go +++ b/pkg/canary/reader/reader.go @@ -23,10 +23,10 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "github.com/prometheus/common/config" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/util/build" - "github.com/grafana/loki/pkg/util/unmarshal" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/util/build" + "github.com/grafana/loki/v3/pkg/util/unmarshal" ) var ( diff --git a/pkg/canary/writer/push.go b/pkg/canary/writer/push.go index 799f265451f97..15d0b1ba8d6f4 100644 --- a/pkg/canary/writer/push.go +++ b/pkg/canary/writer/push.go @@ -19,8 +19,8 @@ import ( "github.com/prometheus/common/config" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/util/build" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/util/build" ) const ( diff --git a/pkg/canary/writer/push_test.go b/pkg/canary/writer/push_test.go index 89204f2a00a82..b61272eb2ec37 100644 --- a/pkg/canary/writer/push_test.go +++ b/pkg/canary/writer/push_test.go @@ -17,8 +17,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/util" ) const ( diff --git a/pkg/chunkenc/dumb_chunk.go b/pkg/chunkenc/dumb_chunk.go index 793e0b751829b..e2d520df6e024 100644 --- a/pkg/chunkenc/dumb_chunk.go +++ b/pkg/chunkenc/dumb_chunk.go @@ -6,10 +6,10 @@ import ( "sort" "time" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/util/filter" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/util/filter" ) const ( diff --git a/pkg/chunkenc/facade.go b/pkg/chunkenc/facade.go index d66e994539f28..22a6e760d590b 100644 --- a/pkg/chunkenc/facade.go +++ b/pkg/chunkenc/facade.go @@ -6,8 +6,8 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/util/filter" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/util/filter" ) // GzipLogChunk is a cortex encoding type for our chunks. diff --git a/pkg/chunkenc/hash_test.go b/pkg/chunkenc/hash_test.go index 0ca899a72cbe9..e75251f57cb5c 100644 --- a/pkg/chunkenc/hash_test.go +++ b/pkg/chunkenc/hash_test.go @@ -8,7 +8,7 @@ import ( "github.com/segmentio/fasthash/fnv1a" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc/testdata" + "github.com/grafana/loki/v3/pkg/chunkenc/testdata" ) var res uint64 diff --git a/pkg/chunkenc/interface.go b/pkg/chunkenc/interface.go index 0985f4a883c22..b96d9f705d092 100644 --- a/pkg/chunkenc/interface.go +++ b/pkg/chunkenc/interface.go @@ -8,10 +8,10 @@ import ( "strings" "time" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/util/filter" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/util/filter" ) // Errors returned by the chunk interface. diff --git a/pkg/chunkenc/memchunk.go b/pkg/chunkenc/memchunk.go index df59db7c755c8..107e3c71a97d5 100644 --- a/pkg/chunkenc/memchunk.go +++ b/pkg/chunkenc/memchunk.go @@ -17,13 +17,13 @@ import ( "github.com/pkg/errors" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/util/filter" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/util/filter" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/chunkenc/memchunk_test.go b/pkg/chunkenc/memchunk_test.go index 071d7bab21538..8fc3eaab5ab34 100644 --- a/pkg/chunkenc/memchunk_test.go +++ b/pkg/chunkenc/memchunk_test.go @@ -18,15 +18,16 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc/testdata" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel/stats" "github.com/grafana/loki/pkg/push" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/util/filter" + + "github.com/grafana/loki/v3/pkg/chunkenc/testdata" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/util/filter" ) var testEncoding = []Encoding{ diff --git a/pkg/chunkenc/pool.go b/pkg/chunkenc/pool.go index 4b6cf7abb90bc..486bef44b3da8 100644 --- a/pkg/chunkenc/pool.go +++ b/pkg/chunkenc/pool.go @@ -14,7 +14,7 @@ import ( "github.com/pierrec/lz4/v4" "github.com/prometheus/prometheus/util/pool" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) // WriterPool is a pool of io.Writer diff --git a/pkg/chunkenc/symbols.go b/pkg/chunkenc/symbols.go index cb1c5586775ae..bed4035400c74 100644 --- a/pkg/chunkenc/symbols.go +++ b/pkg/chunkenc/symbols.go @@ -11,7 +11,7 @@ import ( "github.com/pkg/errors" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) // symbol holds reference to a label name and value pair diff --git a/pkg/chunkenc/unordered.go b/pkg/chunkenc/unordered.go index 883be05154f2d..788f9c0a7c45b 100644 --- a/pkg/chunkenc/unordered.go +++ b/pkg/chunkenc/unordered.go @@ -14,10 +14,10 @@ import ( "github.com/pkg/errors" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" ) var noopStreamPipeline = log.NewNoopPipeline().ForStream(labels.Labels{}) diff --git a/pkg/chunkenc/unordered_test.go b/pkg/chunkenc/unordered_test.go index d92c2f1054bb9..f4930952660fc 100644 --- a/pkg/chunkenc/unordered_test.go +++ b/pkg/chunkenc/unordered_test.go @@ -12,10 +12,10 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" ) func iterEq(t *testing.T, exp []entry, got iter.EntryIterator) { diff --git a/pkg/chunkenc/util_test.go b/pkg/chunkenc/util_test.go index 3b5118495ddc6..a1860f9ae297a 100644 --- a/pkg/chunkenc/util_test.go +++ b/pkg/chunkenc/util_test.go @@ -4,8 +4,8 @@ import ( "math/rand" "time" - "github.com/grafana/loki/pkg/chunkenc/testdata" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/chunkenc/testdata" + "github.com/grafana/loki/v3/pkg/logproto" ) func logprotoEntry(ts int64, line string) *logproto.Entry { diff --git a/pkg/compactor/client/grpc.go b/pkg/compactor/client/grpc.go index 89fdbd7015eec..6a03b6b4a0c40 100644 --- a/pkg/compactor/client/grpc.go +++ b/pkg/compactor/client/grpc.go @@ -12,8 +12,8 @@ import ( "github.com/prometheus/common/model" "google.golang.org/grpc" - deletion_grpc "github.com/grafana/loki/pkg/compactor/client/grpc" - "github.com/grafana/loki/pkg/compactor/deletion" + deletion_grpc "github.com/grafana/loki/v3/pkg/compactor/client/grpc" + "github.com/grafana/loki/v3/pkg/compactor/deletion" ) type GRPCConfig struct { diff --git a/pkg/compactor/client/http.go b/pkg/compactor/client/http.go index ea30094055519..7e8120067ae80 100644 --- a/pkg/compactor/client/http.go +++ b/pkg/compactor/client/http.go @@ -13,8 +13,8 @@ import ( "github.com/go-kit/log/level" "github.com/grafana/dskit/crypto/tls" - "github.com/grafana/loki/pkg/compactor/deletion" - "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/compactor/deletion" + "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/compactor/compactor.go b/pkg/compactor/compactor.go index 75bd575e2c77c..f6fa95f10a4ad 100644 --- a/pkg/compactor/compactor.go +++ b/pkg/compactor/compactor.go @@ -19,18 +19,18 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/analytics" - "github.com/grafana/loki/pkg/compactor/deletion" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - chunk_util "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - "github.com/grafana/loki/pkg/util/filter" - util_log "github.com/grafana/loki/pkg/util/log" - lokiring "github.com/grafana/loki/pkg/util/ring" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/analytics" + "github.com/grafana/loki/v3/pkg/compactor/deletion" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + chunk_util "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/util/filter" + util_log "github.com/grafana/loki/v3/pkg/util/log" + lokiring "github.com/grafana/loki/v3/pkg/util/ring" + "github.com/grafana/loki/v3/pkg/validation" ) // Here is how the generic compactor works: @@ -102,7 +102,7 @@ func (cfg *Config) RegisterFlags(f *flag.FlagSet) { f.StringVar(&cfg.DeleteRequestStoreKeyPrefix, "compactor.delete-request-store.key-prefix", "index/", "Path prefix for storing delete requests.") f.IntVar(&cfg.DeleteBatchSize, "compactor.delete-batch-size", 70, "The max number of delete requests to run per compaction cycle.") f.DurationVar(&cfg.DeleteRequestCancelPeriod, "compactor.delete-request-cancel-period", 24*time.Hour, "Allow cancellation of delete request until duration after they are created. Data would be deleted only after delete requests have been older than this duration. Ideally this should be set to at least 24h.") - f.DurationVar(&cfg.DeleteMaxInterval, "compactor.delete-max-interval", 24*time.Hour, "Constrain the size of any single delete request. When a delete request > delete_max_interval is input, the request is sharded into smaller requests of no more than delete_max_interval") + f.DurationVar(&cfg.DeleteMaxInterval, "compactor.delete-max-interval", 24*time.Hour, "Constrain the size of any single delete request with line filters. When a delete request > delete_max_interval is input, the request is sharded into smaller requests of no more than delete_max_interval") f.DurationVar(&cfg.RetentionTableTimeout, "compactor.retention-table-timeout", 0, "The maximum amount of time to spend running retention and deletion on any given table in the index.") f.IntVar(&cfg.MaxCompactionParallelism, "compactor.max-compaction-parallelism", 1, "Maximum number of tables to compact in parallel. While increasing this value, please make sure compactor has enough disk space allocated to be able to store and compact as many tables.") f.IntVar(&cfg.UploadParallelism, "compactor.upload-parallelism", 10, "Number of upload/remove operations to execute in parallel when finalizing a compaction. NOTE: This setting is per compaction operation, which can be executed in parallel. The upper bound on the number of concurrent uploads is upload_parallelism * max_compaction_parallelism.") diff --git a/pkg/compactor/compactor_test.go b/pkg/compactor/compactor_test.go index cfcc55e456d0d..3fccbb237b78f 100644 --- a/pkg/compactor/compactor_test.go +++ b/pkg/compactor/compactor_test.go @@ -15,12 +15,12 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util/constants" - loki_net "github.com/grafana/loki/pkg/util/net" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util/constants" + loki_net "github.com/grafana/loki/v3/pkg/util/net" + "github.com/grafana/loki/v3/pkg/validation" ) const indexTablePrefix = "table_" diff --git a/pkg/compactor/deletion/delete_request.go b/pkg/compactor/deletion/delete_request.go index 5af9716a74ed3..9ce7f381fb105 100644 --- a/pkg/compactor/deletion/delete_request.go +++ b/pkg/compactor/deletion/delete_request.go @@ -7,10 +7,10 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/util/filter" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/util/filter" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type timeInterval struct { diff --git a/pkg/compactor/deletion/delete_request_test.go b/pkg/compactor/deletion/delete_request_test.go index bd83f95913825..f67a06dc483fb 100644 --- a/pkg/compactor/deletion/delete_request_test.go +++ b/pkg/compactor/deletion/delete_request_test.go @@ -12,9 +12,9 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/util/filter" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/util/filter" ) const ( diff --git a/pkg/compactor/deletion/delete_requests_client.go b/pkg/compactor/deletion/delete_requests_client.go index 62b6f509880e2..8395b33cd9f39 100644 --- a/pkg/compactor/deletion/delete_requests_client.go +++ b/pkg/compactor/deletion/delete_requests_client.go @@ -7,7 +7,7 @@ import ( "github.com/go-kit/log/level" - "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/log" ) type CompactorClient interface { diff --git a/pkg/compactor/deletion/delete_requests_manager.go b/pkg/compactor/deletion/delete_requests_manager.go index 0e22439dd2a6b..ba99625b2dd96 100644 --- a/pkg/compactor/deletion/delete_requests_manager.go +++ b/pkg/compactor/deletion/delete_requests_manager.go @@ -12,10 +12,10 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/compactor/deletionmode" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/util/filter" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/compactor/deletionmode" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/util/filter" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( @@ -126,10 +126,25 @@ func (d *DeleteRequestsManager) loadDeleteRequestsToProcess() error { return err } + reqCount := 0 for i := range deleteRequests { deleteRequest := deleteRequests[i] - if i >= d.batchSize { - logBatchTruncation(i, len(deleteRequests)) + maxRetentionInterval := getMaxRetentionInterval(deleteRequest.UserID, d.limits) + // retention interval 0 means retain the data forever + if maxRetentionInterval != 0 { + oldestRetainedLogTimestamp := model.Now().Add(-maxRetentionInterval) + if deleteRequest.StartTime.Before(oldestRetainedLogTimestamp) && deleteRequest.EndTime.Before(oldestRetainedLogTimestamp) { + level.Info(util_log.Logger).Log( + "msg", "Marking delete request with interval beyond retention period as processed", + "delete_request_id", deleteRequest.RequestID, + "user", deleteRequest.UserID, + ) + d.markRequestAsProcessed(deleteRequest) + continue + } + } + if reqCount >= d.batchSize { + logBatchTruncation(reqCount, len(deleteRequests)) break } @@ -149,6 +164,7 @@ func (d *DeleteRequestsManager) loadDeleteRequestsToProcess() error { if deleteRequest.EndTime > ur.requestsInterval.End { ur.requestsInterval.End = deleteRequest.EndTime } + reqCount++ } return nil @@ -305,6 +321,28 @@ func (d *DeleteRequestsManager) MarkPhaseTimedOut() { d.deleteRequestsToProcess = map[string]*userDeleteRequests{} } +func (d *DeleteRequestsManager) markRequestAsProcessed(deleteRequest DeleteRequest) { + if err := d.deleteRequestsStore.UpdateStatus(context.Background(), deleteRequest, StatusProcessed); err != nil { + level.Error(util_log.Logger).Log( + "msg", "failed to mark delete request for user as processed", + "delete_request_id", deleteRequest.RequestID, + "sequence_num", deleteRequest.SequenceNum, + "user", deleteRequest.UserID, + "err", err, + "deleted_lines", deleteRequest.DeletedLines, + ) + } else { + level.Info(util_log.Logger).Log( + "msg", "delete request for user marked as processed", + "delete_request_id", deleteRequest.RequestID, + "sequence_num", deleteRequest.SequenceNum, + "user", deleteRequest.UserID, + "deleted_lines", deleteRequest.DeletedLines, + ) + d.metrics.deleteRequestsProcessedTotal.WithLabelValues(deleteRequest.UserID).Inc() + } +} + func (d *DeleteRequestsManager) MarkPhaseFinished() { d.deleteRequestsToProcessMtx.Lock() defer d.deleteRequestsToProcessMtx.Unlock() @@ -315,25 +353,7 @@ func (d *DeleteRequestsManager) MarkPhaseFinished() { } for _, deleteRequest := range userDeleteRequests.requests { - if err := d.deleteRequestsStore.UpdateStatus(context.Background(), *deleteRequest, StatusProcessed); err != nil { - level.Error(util_log.Logger).Log( - "msg", "failed to mark delete request for user as processed", - "delete_request_id", deleteRequest.RequestID, - "sequence_num", deleteRequest.SequenceNum, - "user", deleteRequest.UserID, - "err", err, - "deleted_lines", deleteRequest.DeletedLines, - ) - } else { - level.Info(util_log.Logger).Log( - "msg", "delete request for user marked as processed", - "delete_request_id", deleteRequest.RequestID, - "sequence_num", deleteRequest.SequenceNum, - "user", deleteRequest.UserID, - "deleted_lines", deleteRequest.DeletedLines, - ) - } - d.metrics.deleteRequestsProcessedTotal.WithLabelValues(deleteRequest.UserID).Inc() + d.markRequestAsProcessed(*deleteRequest) } } } @@ -355,3 +375,21 @@ func (d *DeleteRequestsManager) IntervalMayHaveExpiredChunks(_ model.Interval, u func (d *DeleteRequestsManager) DropFromIndex(_ retention.ChunkEntry, _ model.Time, _ model.Time) bool { return false } + +func getMaxRetentionInterval(userID string, limits Limits) time.Duration { + maxRetention := model.Duration(limits.RetentionPeriod(userID)) + if maxRetention == 0 { + return 0 + } + + for _, streamRetention := range limits.StreamRetention(userID) { + if streamRetention.Period == 0 { + return 0 + } + if streamRetention.Period > maxRetention { + maxRetention = streamRetention.Period + } + } + + return time.Duration(maxRetention) +} diff --git a/pkg/compactor/deletion/delete_requests_manager_test.go b/pkg/compactor/deletion/delete_requests_manager_test.go index c2777c9801b7e..04aa986ac492d 100644 --- a/pkg/compactor/deletion/delete_requests_manager_test.go +++ b/pkg/compactor/deletion/delete_requests_manager_test.go @@ -10,10 +10,10 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/compactor/deletionmode" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/util/filter" + "github.com/grafana/loki/v3/pkg/compactor/deletionmode" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/util/filter" ) const testUserID = "test-user" @@ -41,12 +41,13 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { } for _, tc := range []struct { - name string - deletionMode deletionmode.Mode - deleteRequestsFromStore []DeleteRequest - batchSize int - expectedResp resp - expectedDeletionRangeByUser map[string]model.Interval + name string + deletionMode deletionmode.Mode + deleteRequestsFromStore []DeleteRequest + batchSize int + expectedResp resp + expectedDeletionRangeByUser map[string]model.Interval + expectedRequestsMarkedAsProcessed []int }{ { name: "no delete requests", @@ -66,6 +67,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: lblFoo.String(), StartTime: now.Add(-24 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -77,6 +79,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0}, }, { name: "no relevant delete requests", @@ -88,6 +91,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: lblFoo.String(), StartTime: now.Add(-24 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -99,6 +103,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0}, }, { name: "delete request not matching labels", @@ -110,6 +115,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: `{fizz="buzz"}`, StartTime: now.Add(-24 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -121,6 +127,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0}, }, { name: "whole chunk deleted by single request", @@ -132,6 +139,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: lblFoo.String(), StartTime: now.Add(-24 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -143,6 +151,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0}, }, { name: "whole chunk deleted by single request with line filters", @@ -154,6 +163,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: streamSelectorWithLineFilters, StartTime: now.Add(-24 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -168,6 +178,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0}, }, { name: "whole chunk deleted by single request with structured metadata filters", @@ -179,6 +190,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: streamSelectorWithStructuredMetadataFilters, StartTime: now.Add(-24 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -193,6 +205,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0}, }, { name: "whole chunk deleted by single request with line and structured metadata filters", @@ -204,6 +217,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: streamSelectorWithLineAndStructuredMetadataFilters, StartTime: now.Add(-24 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -218,6 +232,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0}, }, { name: "deleted interval out of range", @@ -229,6 +244,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: lblFoo.String(), StartTime: now.Add(-48 * time.Hour), EndTime: now.Add(-24 * time.Hour), + Status: StatusReceived, }, }, expectedResp: resp{ @@ -240,6 +256,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now.Add(-24 * time.Hour), }, }, + expectedRequestsMarkedAsProcessed: []int{0}, }, { name: "deleted interval out of range(with multiple user requests)", @@ -251,12 +268,14 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: lblFoo.String(), StartTime: now.Add(-48 * time.Hour), EndTime: now.Add(-24 * time.Hour), + Status: StatusReceived, }, { UserID: "different-user", Query: lblFoo.String(), StartTime: now.Add(-24 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -272,6 +291,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0, 1}, }, { name: "multiple delete requests with one deleting the whole chunk", @@ -283,12 +303,14 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: lblFoo.String(), StartTime: now.Add(-48 * time.Hour), EndTime: now.Add(-24 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-12 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -300,6 +322,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0, 1}, }, { name: "multiple delete requests with line filters and one deleting the whole chunk", @@ -311,12 +334,14 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: streamSelectorWithLineFilters, StartTime: now.Add(-48 * time.Hour), EndTime: now.Add(-24 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: streamSelectorWithLineFilters, StartTime: now.Add(-12 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -331,6 +356,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0, 1}, }, { name: "multiple delete requests with structured metadata filters and one deleting the whole chunk", @@ -342,12 +368,14 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: streamSelectorWithStructuredMetadataFilters, StartTime: now.Add(-48 * time.Hour), EndTime: now.Add(-24 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: streamSelectorWithStructuredMetadataFilters, StartTime: now.Add(-12 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -362,6 +390,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0, 1}, }, { name: "multiple delete requests causing multiple holes", @@ -373,24 +402,28 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: lblFoo.String(), StartTime: now.Add(-13 * time.Hour), EndTime: now.Add(-11 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-10 * time.Hour), EndTime: now.Add(-8 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-6 * time.Hour), EndTime: now.Add(-5 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-2 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -412,6 +445,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0, 1, 2, 3}, }, { name: "multiple overlapping requests deleting the whole chunk", @@ -423,12 +457,14 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: lblFoo.String(), StartTime: now.Add(-13 * time.Hour), EndTime: now.Add(-6 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-8 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -443,6 +479,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0, 1}, }, { name: "multiple overlapping requests with line filters deleting the whole chunk", @@ -454,12 +491,14 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: streamSelectorWithLineFilters, StartTime: now.Add(-13 * time.Hour), EndTime: now.Add(-6 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: streamSelectorWithLineFilters, StartTime: now.Add(-8 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -474,6 +513,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0, 1}, }, { name: "multiple overlapping requests with structured metadata filters deleting the whole chunk", @@ -485,12 +525,14 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: streamSelectorWithStructuredMetadataFilters, StartTime: now.Add(-13 * time.Hour), EndTime: now.Add(-6 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: streamSelectorWithStructuredMetadataFilters, StartTime: now.Add(-8 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -505,6 +547,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0, 1}, }, { name: "multiple non-overlapping requests deleting the whole chunk", @@ -516,18 +559,21 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: lblFoo.String(), StartTime: now.Add(-12 * time.Hour), EndTime: now.Add(-6*time.Hour) - 1, + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-6 * time.Hour), EndTime: now.Add(-4*time.Hour) - 1, + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-4 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -542,6 +588,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0, 1, 2}, }, { name: "multiple non-overlapping requests with line filter deleting the whole chunk", @@ -553,18 +600,21 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: streamSelectorWithLineFilters, StartTime: now.Add(-12 * time.Hour), EndTime: now.Add(-6*time.Hour) - 1, + Status: StatusReceived, }, { UserID: testUserID, Query: streamSelectorWithLineFilters, StartTime: now.Add(-6 * time.Hour), EndTime: now.Add(-4*time.Hour) - 1, + Status: StatusReceived, }, { UserID: testUserID, Query: streamSelectorWithLineFilters, StartTime: now.Add(-4 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -579,6 +629,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0, 1, 2}, }, { name: "multiple non-overlapping requests with structured metadata filter deleting the whole chunk", @@ -590,18 +641,21 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: streamSelectorWithStructuredMetadataFilters, StartTime: now.Add(-12 * time.Hour), EndTime: now.Add(-6*time.Hour) - 1, + Status: StatusReceived, }, { UserID: testUserID, Query: streamSelectorWithStructuredMetadataFilters, StartTime: now.Add(-6 * time.Hour), EndTime: now.Add(-4*time.Hour) - 1, + Status: StatusReceived, }, { UserID: testUserID, Query: streamSelectorWithStructuredMetadataFilters, StartTime: now.Add(-4 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -616,6 +670,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0, 1, 2}, }, { name: "deletes are disabled", @@ -627,24 +682,28 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: lblFoo.String(), StartTime: now.Add(-13 * time.Hour), EndTime: now.Add(-11 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-10 * time.Hour), EndTime: now.Add(-8 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-6 * time.Hour), EndTime: now.Add(-5 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-2 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -661,24 +720,28 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: lblFoo.String(), StartTime: now.Add(-13 * time.Hour), EndTime: now.Add(-11 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-10 * time.Hour), EndTime: now.Add(-8 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-6 * time.Hour), EndTime: now.Add(-5 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-2 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -695,24 +758,28 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: lblFoo.String(), StartTime: now.Add(-2 * time.Hour), EndTime: now, + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-6 * time.Hour), EndTime: now.Add(-5 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-10 * time.Hour), EndTime: now.Add(-8 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-13 * time.Hour), EndTime: now.Add(-11 * time.Hour), + Status: StatusReceived, }, }, expectedResp: resp{ @@ -733,10 +800,108 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now.Add(-8 * time.Hour), }, }, + expectedRequestsMarkedAsProcessed: []int{2, 3}, + }, + { + name: "Deletes beyond retention are marked as processed straight away without being batched for processing", + deletionMode: deletionmode.FilterAndDelete, + batchSize: 2, + deleteRequestsFromStore: []DeleteRequest{ + { + UserID: "different-user", + Query: lblFoo.String(), + StartTime: now.Add(-14 * 24 * time.Hour), + EndTime: now.Add(-10 * 24 * time.Hour), + Status: StatusReceived, + }, + { + UserID: testUserID, + Query: lblFoo.String(), + StartTime: now.Add(-14 * 24 * time.Hour), + EndTime: now.Add(-10 * 24 * time.Hour), + Status: StatusReceived, + }, + { + UserID: testUserID, + Query: lblFoo.String(), + StartTime: now.Add(-2 * time.Hour), + EndTime: now, + Status: StatusReceived, + }, + { + UserID: testUserID, + Query: lblFoo.String(), + StartTime: now.Add(-6 * time.Hour), + EndTime: now.Add(-5 * time.Hour), + Status: StatusReceived, + }, + { + UserID: testUserID, + Query: lblFoo.String(), + StartTime: now.Add(-10 * time.Hour), + EndTime: now.Add(-8 * time.Hour), + Status: StatusReceived, + }, + { + UserID: testUserID, + Query: lblFoo.String(), + StartTime: now.Add(-13 * time.Hour), + EndTime: now.Add(-11 * time.Hour), + Status: StatusReceived, + }, + }, + expectedResp: resp{ + isExpired: true, + expectedFilter: func(ts time.Time, s string, _ ...labels.Label) bool { + tsUnixNano := ts.UnixNano() + if (now.Add(-13*time.Hour).UnixNano() <= tsUnixNano && tsUnixNano <= now.Add(-11*time.Hour).UnixNano()) || + (now.Add(-10*time.Hour).UnixNano() <= tsUnixNano && tsUnixNano <= now.Add(-8*time.Hour).UnixNano()) { + return true + } + + return false + }, + }, + expectedDeletionRangeByUser: map[string]model.Interval{ + testUserID: { + Start: now.Add(-13 * time.Hour), + End: now.Add(-8 * time.Hour), + }, + }, + expectedRequestsMarkedAsProcessed: []int{0, 1, 4, 5}, + }, + { + name: "All deletes beyond retention", + deletionMode: deletionmode.FilterAndDelete, + batchSize: 2, + deleteRequestsFromStore: []DeleteRequest{ + { + UserID: "different-user", + Query: lblFoo.String(), + StartTime: now.Add(-14 * 24 * time.Hour), + EndTime: now.Add(-10 * 24 * time.Hour), + Status: StatusReceived, + }, + { + UserID: testUserID, + Query: lblFoo.String(), + StartTime: now.Add(-14 * 24 * time.Hour), + EndTime: now.Add(-10 * 24 * time.Hour), + Status: StatusReceived, + }, + }, + expectedResp: resp{ + isExpired: false, + }, + expectedRequestsMarkedAsProcessed: []int{0, 1}, }, } { t.Run(tc.name, func(t *testing.T) { - mgr := NewDeleteRequestsManager(&mockDeleteRequestsStore{deleteRequests: tc.deleteRequestsFromStore}, time.Hour, tc.batchSize, &fakeLimits{mode: tc.deletionMode.String()}, nil) + mockDeleteRequestsStore := &mockDeleteRequestsStore{deleteRequests: tc.deleteRequestsFromStore} + mgr := NewDeleteRequestsManager(mockDeleteRequestsStore, time.Hour, tc.batchSize, &fakeLimits{defaultLimit: limit{ + retentionPeriod: 7 * 24 * time.Hour, + deletionMode: tc.deletionMode.String(), + }}, nil) require.NoError(t, mgr.loadDeleteRequestsToProcess()) for _, deleteRequests := range mgr.deleteRequestsToProcess { @@ -749,28 +914,38 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { require.Equal(t, tc.expectedResp.isExpired, isExpired) if tc.expectedResp.expectedFilter == nil { require.Nil(t, filterFunc) - return - } - require.NotNil(t, filterFunc) + } else { + require.NotNil(t, filterFunc) - for start := chunkEntry.From; start <= chunkEntry.Through; start = start.Add(time.Minute) { - line := "foo bar" - if start.Time().Minute()%2 == 1 { - line = "fizz buzz" + for start := chunkEntry.From; start <= chunkEntry.Through; start = start.Add(time.Minute) { + line := "foo bar" + if start.Time().Minute()%2 == 1 { + line = "fizz buzz" + } + // mix of empty, ding=dong and ping=pong as structured metadata + var structuredMetadata []labels.Label + if start.Time().Minute()%3 == 0 { + structuredMetadata = []labels.Label{{Name: lblPing, Value: lblPong}} + } else if start.Time().Minute()%2 == 0 { + structuredMetadata = []labels.Label{{Name: "ting", Value: "tong"}} + } + require.Equal(t, tc.expectedResp.expectedFilter(start.Time(), line, structuredMetadata...), filterFunc(start.Time(), line, structuredMetadata...), "line", line, "time", start.Time(), "now", now.Time()) } - // mix of empty, ding=dong and ping=pong as structured metadata - var structuredMetadata []labels.Label - if start.Time().Minute()%3 == 0 { - structuredMetadata = []labels.Label{{Name: lblPing, Value: lblPong}} - } else if start.Time().Minute()%2 == 0 { - structuredMetadata = []labels.Label{{Name: "ting", Value: "tong"}} + + require.Equal(t, len(tc.expectedDeletionRangeByUser), len(mgr.deleteRequestsToProcess)) + for userID, dr := range tc.expectedDeletionRangeByUser { + require.Equal(t, dr, mgr.deleteRequestsToProcess[userID].requestsInterval) } - require.Equal(t, tc.expectedResp.expectedFilter(start.Time(), line, structuredMetadata...), filterFunc(start.Time(), line, structuredMetadata...), "line", line, "time", start.Time(), "now", now.Time()) } - require.Equal(t, len(tc.expectedDeletionRangeByUser), len(mgr.deleteRequestsToProcess)) - for userID, dr := range tc.expectedDeletionRangeByUser { - require.Equal(t, dr, mgr.deleteRequestsToProcess[userID].requestsInterval) + mgr.MarkPhaseFinished() + + processedRequests, err := mockDeleteRequestsStore.GetDeleteRequestsByStatus(context.Background(), StatusProcessed) + require.NoError(t, err) + require.Len(t, processedRequests, len(tc.expectedRequestsMarkedAsProcessed)) + + for i, reqIdx := range tc.expectedRequestsMarkedAsProcessed { + require.True(t, requestsAreEqual(tc.deleteRequestsFromStore[reqIdx], processedRequests[i])) } }) } @@ -782,18 +957,18 @@ func TestDeleteRequestsManager_IntervalMayHaveExpiredChunks(t *testing.T) { hasChunks bool user string }{ - {[]DeleteRequest{{Query: `0`, UserID: "test-user", StartTime: 0, EndTime: 100}}, true, "test-user"}, - {[]DeleteRequest{{Query: `1`, UserID: "test-user", StartTime: 200, EndTime: 400}}, true, "test-user"}, - {[]DeleteRequest{{Query: `2`, UserID: "test-user", StartTime: 400, EndTime: 500}}, true, "test-user"}, - {[]DeleteRequest{{Query: `3`, UserID: "test-user", StartTime: 500, EndTime: 700}}, true, "test-user"}, - {[]DeleteRequest{{Query: `3`, UserID: "other-user", StartTime: 500, EndTime: 700}}, false, "test-user"}, - {[]DeleteRequest{{Query: `4`, UserID: "test-user", StartTime: 700, EndTime: 900}}, true, "test-user"}, - {[]DeleteRequest{{Query: `4`, UserID: "", StartTime: 700, EndTime: 900}}, true, ""}, + {[]DeleteRequest{{Query: `0`, UserID: "test-user", StartTime: 0, EndTime: 100, Status: StatusReceived}}, true, "test-user"}, + {[]DeleteRequest{{Query: `1`, UserID: "test-user", StartTime: 200, EndTime: 400, Status: StatusReceived}}, true, "test-user"}, + {[]DeleteRequest{{Query: `2`, UserID: "test-user", StartTime: 400, EndTime: 500, Status: StatusReceived}}, true, "test-user"}, + {[]DeleteRequest{{Query: `3`, UserID: "test-user", StartTime: 500, EndTime: 700, Status: StatusReceived}}, true, "test-user"}, + {[]DeleteRequest{{Query: `3`, UserID: "other-user", StartTime: 500, EndTime: 700, Status: StatusReceived}}, false, "test-user"}, + {[]DeleteRequest{{Query: `4`, UserID: "test-user", StartTime: 700, EndTime: 900, Status: StatusReceived}}, true, "test-user"}, + {[]DeleteRequest{{Query: `4`, UserID: "", StartTime: 700, EndTime: 900, Status: StatusReceived}}, true, ""}, {[]DeleteRequest{}, false, ""}, } for _, tc := range tt { - mgr := NewDeleteRequestsManager(&mockDeleteRequestsStore{deleteRequests: tc.deleteRequestsFromStore}, time.Hour, 70, &fakeLimits{mode: deletionmode.FilterAndDelete.String()}, nil) + mgr := NewDeleteRequestsManager(&mockDeleteRequestsStore{deleteRequests: tc.deleteRequestsFromStore}, time.Hour, 70, &fakeLimits{defaultLimit: limit{deletionMode: deletionmode.FilterAndDelete.String()}}, nil) require.NoError(t, mgr.loadDeleteRequestsToProcess()) interval := model.Interval{Start: 300, End: 600} @@ -823,8 +998,14 @@ type mockDeleteRequestsStore struct { genNumber string } -func (m *mockDeleteRequestsStore) GetDeleteRequestsByStatus(_ context.Context, _ DeleteRequestStatus) ([]DeleteRequest, error) { - return m.deleteRequests, nil +func (m *mockDeleteRequestsStore) GetDeleteRequestsByStatus(_ context.Context, status DeleteRequestStatus) ([]DeleteRequest, error) { + reqs := make([]DeleteRequest, 0, len(m.deleteRequests)) + for i := range m.deleteRequests { + if m.deleteRequests[i].Status == status { + reqs = append(reqs, m.deleteRequests[i]) + } + } + return reqs, nil } func (m *mockDeleteRequestsStore) AddDeleteRequestGroup(_ context.Context, reqs []DeleteRequest) ([]DeleteRequest, error) { @@ -854,3 +1035,24 @@ func (m *mockDeleteRequestsStore) GetAllDeleteRequestsForUser(_ context.Context, func (m *mockDeleteRequestsStore) GetCacheGenerationNumber(_ context.Context, _ string) (string, error) { return m.genNumber, m.getErr } + +func (m *mockDeleteRequestsStore) UpdateStatus(_ context.Context, req DeleteRequest, newStatus DeleteRequestStatus) error { + for i := range m.deleteRequests { + if requestsAreEqual(m.deleteRequests[i], req) { + m.deleteRequests[i].Status = newStatus + } + } + + return nil +} + +func requestsAreEqual(req1, req2 DeleteRequest) bool { + if req1.UserID == req2.UserID && + req1.Query == req2.Query && + req1.StartTime == req2.StartTime && + req1.EndTime == req2.EndTime { + return true + } + + return false +} diff --git a/pkg/compactor/deletion/delete_requests_store.go b/pkg/compactor/deletion/delete_requests_store.go index 20212467c15d7..ee8f324d6b0be 100644 --- a/pkg/compactor/deletion/delete_requests_store.go +++ b/pkg/compactor/deletion/delete_requests_store.go @@ -17,8 +17,8 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" ) type ( diff --git a/pkg/compactor/deletion/delete_requests_store_test.go b/pkg/compactor/deletion/delete_requests_store_test.go index 0bf4cb27f7bd8..fa02a44bc7598 100644 --- a/pkg/compactor/deletion/delete_requests_store_test.go +++ b/pkg/compactor/deletion/delete_requests_store_test.go @@ -11,8 +11,8 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" ) func TestDeleteRequestsStore(t *testing.T) { diff --git a/pkg/compactor/deletion/delete_requests_table.go b/pkg/compactor/deletion/delete_requests_table.go index 3143a51cc5444..80a47a5e6435b 100644 --- a/pkg/compactor/deletion/delete_requests_table.go +++ b/pkg/compactor/deletion/delete_requests_table.go @@ -13,12 +13,12 @@ import ( "github.com/go-kit/log/level" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - shipper_util "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + shipper_util "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type deleteRequestsTable struct { diff --git a/pkg/compactor/deletion/delete_requests_table_test.go b/pkg/compactor/deletion/delete_requests_table_test.go index 4b30a9bc67b1f..b5fcacaa5d383 100644 --- a/pkg/compactor/deletion/delete_requests_table_test.go +++ b/pkg/compactor/deletion/delete_requests_table_test.go @@ -8,11 +8,11 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/testutil" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/util" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/testutil" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/util" ) func TestDeleteRequestsTable(t *testing.T) { diff --git a/pkg/compactor/deletion/grpc_request_handler.go b/pkg/compactor/deletion/grpc_request_handler.go index 94ba7c163524b..bf68c397043b4 100644 --- a/pkg/compactor/deletion/grpc_request_handler.go +++ b/pkg/compactor/deletion/grpc_request_handler.go @@ -8,8 +8,8 @@ import ( "github.com/grafana/dskit/tenant" "github.com/pkg/errors" - "github.com/grafana/loki/pkg/compactor/client/grpc" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/compactor/client/grpc" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type GRPCRequestHandler struct { diff --git a/pkg/compactor/deletion/grpc_request_handler_test.go b/pkg/compactor/deletion/grpc_request_handler_test.go index f0b2002e8d590..612777e9101cc 100644 --- a/pkg/compactor/deletion/grpc_request_handler_test.go +++ b/pkg/compactor/deletion/grpc_request_handler_test.go @@ -17,8 +17,8 @@ import ( "google.golang.org/grpc/status" "google.golang.org/grpc/test/bufconn" - compactor_client_grpc "github.com/grafana/loki/pkg/compactor/client/grpc" - "github.com/grafana/loki/pkg/compactor/deletionmode" + compactor_client_grpc "github.com/grafana/loki/v3/pkg/compactor/client/grpc" + "github.com/grafana/loki/v3/pkg/compactor/deletionmode" ) func server(t *testing.T, h *GRPCRequestHandler) (compactor_client_grpc.CompactorClient, func()) { @@ -74,7 +74,7 @@ func TestGRPCGetDeleteRequests(t *testing.T) { t.Run("it gets all the delete requests for the user", func(t *testing.T) { store := &mockDeleteRequestsStore{} store.getAllResult = []DeleteRequest{{RequestID: "test-request-1", Status: StatusReceived}, {RequestID: "test-request-2", Status: StatusReceived}} - h := NewGRPCRequestHandler(store, &fakeLimits{mode: deletionmode.FilterAndDelete.String()}) + h := NewGRPCRequestHandler(store, &fakeLimits{defaultLimit: limit{deletionMode: deletionmode.FilterAndDelete.String()}}) grpcClient, closer := server(t, h) t.Cleanup(closer) @@ -96,7 +96,7 @@ func TestGRPCGetDeleteRequests(t *testing.T) { {RequestID: "test-request-2", CreatedAt: now.Add(time.Minute), StartTime: now.Add(30 * time.Minute), EndTime: now.Add(90 * time.Minute)}, {RequestID: "test-request-1", CreatedAt: now, StartTime: now.Add(time.Hour), EndTime: now.Add(2 * time.Hour)}, } - h := NewGRPCRequestHandler(store, &fakeLimits{mode: deletionmode.FilterAndDelete.String()}) + h := NewGRPCRequestHandler(store, &fakeLimits{defaultLimit: limit{deletionMode: deletionmode.FilterAndDelete.String()}}) grpcClient, closer := server(t, h) t.Cleanup(closer) @@ -124,7 +124,7 @@ func TestGRPCGetDeleteRequests(t *testing.T) { {RequestID: "test-request-2", CreatedAt: now.Add(time.Minute), Status: StatusProcessed}, {RequestID: "test-request-3", CreatedAt: now.Add(2 * time.Minute), Status: StatusReceived}, } - h := NewGRPCRequestHandler(store, &fakeLimits{mode: deletionmode.FilterAndDelete.String()}) + h := NewGRPCRequestHandler(store, &fakeLimits{defaultLimit: limit{deletionMode: deletionmode.FilterAndDelete.String()}}) grpcClient, closer := server(t, h) t.Cleanup(closer) @@ -145,7 +145,7 @@ func TestGRPCGetDeleteRequests(t *testing.T) { t.Run("error getting from store", func(t *testing.T) { store := &mockDeleteRequestsStore{} store.getAllErr = errors.New("something bad") - h := NewGRPCRequestHandler(store, &fakeLimits{mode: deletionmode.FilterAndDelete.String()}) + h := NewGRPCRequestHandler(store, &fakeLimits{defaultLimit: limit{deletionMode: deletionmode.FilterAndDelete.String()}}) grpcClient, closer := server(t, h) t.Cleanup(closer) @@ -162,7 +162,7 @@ func TestGRPCGetDeleteRequests(t *testing.T) { t.Run("validation", func(t *testing.T) { t.Run("no org id", func(t *testing.T) { - h := NewGRPCRequestHandler(&mockDeleteRequestsStore{}, &fakeLimits{mode: deletionmode.FilterAndDelete.String()}) + h := NewGRPCRequestHandler(&mockDeleteRequestsStore{}, &fakeLimits{defaultLimit: limit{deletionMode: deletionmode.FilterAndDelete.String()}}) grpcClient, closer := server(t, h) t.Cleanup(closer) @@ -178,7 +178,7 @@ func TestGRPCGetCacheGenNumbers(t *testing.T) { t.Run("get gen number", func(t *testing.T) { store := &mockDeleteRequestsStore{} store.genNumber = "123" - h := NewGRPCRequestHandler(store, &fakeLimits{mode: deletionmode.FilterAndDelete.String()}) + h := NewGRPCRequestHandler(store, &fakeLimits{defaultLimit: limit{deletionMode: deletionmode.FilterAndDelete.String()}}) grpcClient, closer := server(t, h) t.Cleanup(closer) @@ -195,7 +195,7 @@ func TestGRPCGetCacheGenNumbers(t *testing.T) { t.Run("error getting from store", func(t *testing.T) { store := &mockDeleteRequestsStore{} store.getErr = errors.New("something bad") - h := NewGRPCRequestHandler(store, &fakeLimits{mode: deletionmode.FilterAndDelete.String()}) + h := NewGRPCRequestHandler(store, &fakeLimits{defaultLimit: limit{deletionMode: deletionmode.FilterAndDelete.String()}}) grpcClient, closer := server(t, h) t.Cleanup(closer) @@ -212,7 +212,7 @@ func TestGRPCGetCacheGenNumbers(t *testing.T) { t.Run("validation", func(t *testing.T) { t.Run("no org id", func(t *testing.T) { - h := NewGRPCRequestHandler(&mockDeleteRequestsStore{}, &fakeLimits{mode: deletionmode.FilterAndDelete.String()}) + h := NewGRPCRequestHandler(&mockDeleteRequestsStore{}, &fakeLimits{defaultLimit: limit{deletionMode: deletionmode.FilterAndDelete.String()}}) grpcClient, closer := server(t, h) t.Cleanup(closer) diff --git a/pkg/compactor/deletion/metrics.go b/pkg/compactor/deletion/metrics.go index b3196948830b9..9d89f46c88d9d 100644 --- a/pkg/compactor/deletion/metrics.go +++ b/pkg/compactor/deletion/metrics.go @@ -4,7 +4,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) type DeleteRequestClientMetrics struct { diff --git a/pkg/compactor/deletion/request_handler.go b/pkg/compactor/deletion/request_handler.go index db5a22a83d544..c4c0e1105341c 100644 --- a/pkg/compactor/deletion/request_handler.go +++ b/pkg/compactor/deletion/request_handler.go @@ -10,15 +10,14 @@ import ( "sort" "time" - "github.com/grafana/loki/pkg/util" - "github.com/go-kit/log/level" + "github.com/grafana/dskit/tenant" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" - "github.com/grafana/dskit/tenant" - - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // DeleteRequestHandler provides handlers for delete requests @@ -49,7 +48,7 @@ func (dm *DeleteRequestHandler) AddDeleteRequestHandler(w http.ResponseWriter, r } params := r.URL.Query() - query, err := query(params) + query, parsedExpr, err := query(params) if err != nil { http.Error(w, err.Error(), http.StatusBadRequest) return @@ -67,13 +66,19 @@ func (dm *DeleteRequestHandler) AddDeleteRequestHandler(w http.ResponseWriter, r return } - interval, err := dm.interval(params, startTime, endTime) - if err != nil { - http.Error(w, err.Error(), http.StatusBadRequest) - return + var shardByInterval time.Duration + if parsedExpr.HasFilter() { + var err error + shardByInterval, err = dm.interval(params, startTime, endTime) + if err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + } else { + shardByInterval = endTime.Sub(startTime) + time.Minute } - deleteRequests := shardDeleteRequestsByInterval(startTime, endTime, query, userID, interval) + deleteRequests := shardDeleteRequestsByInterval(startTime, endTime, query, userID, shardByInterval) createdDeleteRequests, err := dm.deleteRequestsStore.AddDeleteRequestGroup(ctx, deleteRequests) if err != nil { level.Error(util_log.Logger).Log("msg", "error adding delete request to the store", "err", err) @@ -92,7 +97,7 @@ func (dm *DeleteRequestHandler) AddDeleteRequestHandler(w http.ResponseWriter, r "delete_request_id", createdDeleteRequests[0].RequestID, "user", userID, "query", query, - "interval", interval.String(), + "interval", shardByInterval.String(), ) dm.metrics.deleteRequestsReceivedTotal.WithLabelValues(userID).Inc() @@ -315,17 +320,18 @@ func (dm *DeleteRequestHandler) GetCacheGenerationNumberHandler(w http.ResponseW } } -func query(params url.Values) (string, error) { +func query(params url.Values) (string, syntax.LogSelectorExpr, error) { query := params.Get("query") if len(query) == 0 { - return "", errors.New("query not set") + return "", nil, errors.New("query not set") } - if _, err := parseDeletionQuery(query); err != nil { - return "", err + parsedExpr, err := parseDeletionQuery(query) + if err != nil { + return "", nil, err } - return query, nil + return query, parsedExpr, nil } func startTime(params url.Values) (model.Time, error) { diff --git a/pkg/compactor/deletion/request_handler_test.go b/pkg/compactor/deletion/request_handler_test.go index 1aaf0b582b366..1df1446bd4e55 100644 --- a/pkg/compactor/deletion/request_handler_test.go +++ b/pkg/compactor/deletion/request_handler_test.go @@ -10,14 +10,12 @@ import ( "testing" "time" + "github.com/grafana/dskit/user" "github.com/pkg/errors" - "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/util" - - "github.com/grafana/dskit/user" + "github.com/grafana/loki/v3/pkg/util" ) func TestAddDeleteRequestHandler(t *testing.T) { @@ -50,14 +48,14 @@ func TestAddDeleteRequestHandler(t *testing.T) { require.Equal(t, w.Code, http.StatusInternalServerError) }) - t.Run("it shards deletes based on a query param", func(t *testing.T) { + t.Run("it only shards deletes with line filter based on a query param", func(t *testing.T) { store := &mockDeleteRequestsStore{} h := NewDeleteRequestHandler(store, 0, nil) from := model.TimeFromUnix(model.Now().Add(-3 * time.Hour).Unix()) to := model.TimeFromUnix(from.Add(3 * time.Hour).Unix()) - req := buildRequest("org-id", `{foo="bar"}`, unixString(from), unixString(to)) + req := buildRequest("org-id", `{foo="bar"} |= "foo"`, unixString(from), unixString(to)) params := req.URL.Query() params.Set("max_interval", "1h") req.URL.RawQuery = params.Encode() @@ -87,7 +85,7 @@ func TestAddDeleteRequestHandler(t *testing.T) { from := model.TimeFromUnix(model.Now().Add(-3 * time.Hour).Unix()) to := model.TimeFromUnix(from.Add(3 * time.Hour).Unix()) - req := buildRequest("org-id", `{foo="bar"}`, unixString(from), unixString(to)) + req := buildRequest("org-id", `{foo="bar"} |= "foo"`, unixString(from), unixString(to)) w := httptest.NewRecorder() h.AddDeleteRequestHandler(w, req) @@ -107,6 +105,27 @@ func TestAddDeleteRequestHandler(t *testing.T) { } }) + t.Run("it does not shard deletes without line filter", func(t *testing.T) { + store := &mockDeleteRequestsStore{} + h := NewDeleteRequestHandler(store, 0, nil) + + from := model.TimeFromUnix(model.Now().Add(-3 * time.Hour).Unix()) + to := model.TimeFromUnix(from.Add(3 * time.Hour).Unix()) + + req := buildRequest("org-id", `{foo="bar"}`, unixString(from), unixString(to)) + params := req.URL.Query() + params.Set("max_interval", "1h") + req.URL.RawQuery = params.Encode() + + w := httptest.NewRecorder() + h.AddDeleteRequestHandler(w, req) + + require.Equal(t, w.Code, http.StatusNoContent) + require.Len(t, store.addReqs, 1) + require.Equal(t, from, store.addReqs[0].StartTime) + require.Equal(t, to, store.addReqs[0].EndTime) + }) + t.Run("it works with RFC3339", func(t *testing.T) { store := &mockDeleteRequestsStore{} h := NewDeleteRequestHandler(store, 0, nil) @@ -166,11 +185,11 @@ func TestAddDeleteRequestHandler(t *testing.T) { {"org-id", `{foo="bar"}`, "0000000000", "0000000000001", "", "invalid end time: require unix seconds or RFC3339 format\n"}, {"org-id", `{foo="bar"}`, "0000000000", fmt.Sprint(time.Now().Add(time.Hour).Unix())[:10], "", "deletes in the future are not allowed\n"}, {"org-id", `{foo="bar"}`, "0000000001", "0000000000", "", "start time can't be greater than end time\n"}, - {"org-id", `{foo="bar"}`, "0000000000", "0000000001", "not-a-duration", "invalid max_interval: valid time units are 's', 'm', 'h'\n"}, - {"org-id", `{foo="bar"}`, "0000000000", "0000000001", "1ms", "invalid max_interval: valid time units are 's', 'm', 'h'\n"}, - {"org-id", `{foo="bar"}`, "0000000000", "0000000001", "1h", "max_interval can't be greater than 1m0s\n"}, - {"org-id", `{foo="bar"}`, "0000000000", "0000000001", "30s", "max_interval can't be greater than the interval to be deleted (1s)\n"}, - {"org-id", `{foo="bar"}`, "0000000000", "0000000000", "", "difference between start time and end time must be at least one second\n"}, + {"org-id", `{foo="bar"} |= "foo"`, "0000000000", "0000000001", "not-a-duration", "invalid max_interval: valid time units are 's', 'm', 'h'\n"}, + {"org-id", `{foo="bar"} |= "foo"`, "0000000000", "0000000001", "1ms", "invalid max_interval: valid time units are 's', 'm', 'h'\n"}, + {"org-id", `{foo="bar"} |= "foo"`, "0000000000", "0000000001", "1h", "max_interval can't be greater than 1m0s\n"}, + {"org-id", `{foo="bar"} |= "foo"`, "0000000000", "0000000001", "30s", "max_interval can't be greater than the interval to be deleted (1s)\n"}, + {"org-id", `{foo="bar"} |= "foo"`, "0000000000", "0000000000", "", "difference between start time and end time must be at least one second\n"}, } { t.Run(strings.TrimSpace(tc.error), func(t *testing.T) { req := buildRequest(tc.orgID, tc.query, tc.startTime, tc.endTime) diff --git a/pkg/compactor/deletion/tenant_delete_requests_client.go b/pkg/compactor/deletion/tenant_delete_requests_client.go index 29b6a56922868..495ece96e181c 100644 --- a/pkg/compactor/deletion/tenant_delete_requests_client.go +++ b/pkg/compactor/deletion/tenant_delete_requests_client.go @@ -2,12 +2,17 @@ package deletion import ( "context" + "time" + + "github.com/grafana/loki/v3/pkg/validation" ) const deletionNotAvailableMsg = "deletion is not available for this tenant" type Limits interface { DeletionMode(userID string) string + RetentionPeriod(userID string) time.Duration + StreamRetention(userID string) []validation.StreamRetention } type perTenantDeleteRequestsClient struct { diff --git a/pkg/compactor/deletion/tenant_delete_requests_client_test.go b/pkg/compactor/deletion/tenant_delete_requests_client_test.go index ba063f713b844..20e97d463f4f8 100644 --- a/pkg/compactor/deletion/tenant_delete_requests_client_test.go +++ b/pkg/compactor/deletion/tenant_delete_requests_client_test.go @@ -3,6 +3,7 @@ package deletion import ( "context" "testing" + "time" "github.com/stretchr/testify/require" ) @@ -13,7 +14,7 @@ func TestTenantDeleteRequestsClient(t *testing.T) { RequestID: "test-request", }}, } - perTenantClient := NewPerTenantDeleteRequestsClient(fakeClient, limits) + perTenantClient := NewPerTenantDeleteRequestsClient(fakeClient, defaultLimits) t.Run("tenant enabled", func(t *testing.T) { reqs, err := perTenantClient.GetAllDeleteRequestsForUser(context.Background(), "1") @@ -39,10 +40,11 @@ func (c *fakeRequestsClient) GetAllDeleteRequestsForUser(_ context.Context, _ st } var ( - limits = &fakeLimits{ - limits: map[string]string{ - "1": "filter-only", - "2": "disabled", + defaultLimits = &fakeLimits{ + tenantLimits: map[string]limit{ + "1": {deletionMode: "filter-only"}, + "2": {deletionMode: "disabled"}, + "3": {retentionPeriod: time.Hour}, }, } ) diff --git a/pkg/compactor/deletion/tenant_request_handler_test.go b/pkg/compactor/deletion/tenant_request_handler_test.go index e979a5c8c4d00..c57dc84ba4caf 100644 --- a/pkg/compactor/deletion/tenant_request_handler_test.go +++ b/pkg/compactor/deletion/tenant_request_handler_test.go @@ -4,16 +4,19 @@ import ( "net/http" "net/http/httptest" "testing" + "time" "github.com/grafana/dskit/user" "github.com/stretchr/testify/require" + + "github.com/grafana/loki/v3/pkg/validation" ) func TestDeleteRequestHandlerDeletionMiddleware(t *testing.T) { fl := &fakeLimits{ - limits: map[string]string{ - "1": "filter-only", - "2": "disabled", + tenantLimits: map[string]limit{ + "1": {deletionMode: "filter-only"}, + "2": {deletionMode: "disabled"}, }, } @@ -47,15 +50,34 @@ func TestDeleteRequestHandlerDeletionMiddleware(t *testing.T) { require.Equal(t, http.StatusBadRequest, res.Result().StatusCode) } +type limit struct { + deletionMode string + retentionPeriod time.Duration + streamRetention []validation.StreamRetention +} + type fakeLimits struct { - limits map[string]string - mode string + tenantLimits map[string]limit + defaultLimit limit } -func (f *fakeLimits) DeletionMode(userID string) string { - if f.mode != "" { - return f.mode +func (f *fakeLimits) getLimitForUser(userID string) limit { + limit := f.defaultLimit + if override, ok := f.tenantLimits[userID]; ok { + limit = override } - return f.limits[userID] + return limit +} + +func (f *fakeLimits) DeletionMode(userID string) string { + return f.getLimitForUser(userID).deletionMode +} + +func (f *fakeLimits) RetentionPeriod(userID string) time.Duration { + return f.getLimitForUser(userID).retentionPeriod +} + +func (f *fakeLimits) StreamRetention(userID string) []validation.StreamRetention { + return f.getLimitForUser(userID).streamRetention } diff --git a/pkg/compactor/deletion/util.go b/pkg/compactor/deletion/util.go index 18d5822141f14..c20da8a4a2602 100644 --- a/pkg/compactor/deletion/util.go +++ b/pkg/compactor/deletion/util.go @@ -3,8 +3,8 @@ package deletion import ( "errors" - "github.com/grafana/loki/pkg/compactor/deletionmode" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/compactor/deletionmode" + "github.com/grafana/loki/v3/pkg/logql/syntax" ) var ( diff --git a/pkg/compactor/generationnumber/gennumber_loader.go b/pkg/compactor/generationnumber/gennumber_loader.go index ec8bb0bcab567..c2edb62dc1664 100644 --- a/pkg/compactor/generationnumber/gennumber_loader.go +++ b/pkg/compactor/generationnumber/gennumber_loader.go @@ -10,7 +10,7 @@ import ( "github.com/go-kit/log/level" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/log" ) const reloadDuration = 5 * time.Minute diff --git a/pkg/compactor/generationnumber/metrics.go b/pkg/compactor/generationnumber/metrics.go index c71f1b5821562..ccd06d74f8a73 100644 --- a/pkg/compactor/generationnumber/metrics.go +++ b/pkg/compactor/generationnumber/metrics.go @@ -3,7 +3,7 @@ package generationnumber import ( "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) // Make this package level because we want several instances of a loader to be able to report metrics diff --git a/pkg/compactor/index_set.go b/pkg/compactor/index_set.go index 9b6267d60739d..7102aef564259 100644 --- a/pkg/compactor/index_set.go +++ b/pkg/compactor/index_set.go @@ -12,12 +12,12 @@ import ( "github.com/go-kit/log/level" "github.com/pkg/errors" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type IndexSet interface { diff --git a/pkg/compactor/retention/expiration.go b/pkg/compactor/retention/expiration.go index 9111a6c7afee1..45029f9652c5a 100644 --- a/pkg/compactor/retention/expiration.go +++ b/pkg/compactor/retention/expiration.go @@ -8,9 +8,9 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/util/filter" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/util/filter" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/validation" ) // IntervalFilter contains the interval to delete diff --git a/pkg/compactor/retention/expiration_test.go b/pkg/compactor/retention/expiration_test.go index ee61eb102b7ba..3cc69f88ae613 100644 --- a/pkg/compactor/retention/expiration_test.go +++ b/pkg/compactor/retention/expiration_test.go @@ -9,7 +9,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/validation" ) type retentionLimit struct { diff --git a/pkg/compactor/retention/marker.go b/pkg/compactor/retention/marker.go index 84e461cd7bc5d..bad9b46d1c57b 100644 --- a/pkg/compactor/retention/marker.go +++ b/pkg/compactor/retention/marker.go @@ -15,9 +15,9 @@ import ( "github.com/go-kit/log/level" "go.etcd.io/bbolt" - chunk_util "github.com/grafana/loki/pkg/storage/chunk/client/util" - shipper_util "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/util" - util_log "github.com/grafana/loki/pkg/util/log" + chunk_util "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + shipper_util "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var ( diff --git a/pkg/compactor/retention/retention.go b/pkg/compactor/retention/retention.go index 703e7e0182e6a..0a4aba59be474 100644 --- a/pkg/compactor/retention/retention.go +++ b/pkg/compactor/retention/retention.go @@ -15,13 +15,13 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client" - chunk_util "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/filter" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + chunk_util "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/filter" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var chunkBucket = []byte("chunks") diff --git a/pkg/compactor/retention/retention_test.go b/pkg/compactor/retention/retention_test.go index 36faaaf332dc8..6c261d34799e5 100644 --- a/pkg/compactor/retention/retention_test.go +++ b/pkg/compactor/retention/retention_test.go @@ -20,14 +20,14 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - ingesterclient "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/util/filter" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/chunkenc" + ingesterclient "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/util/filter" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/validation" ) type mockChunkClient struct { diff --git a/pkg/compactor/retention/util_test.go b/pkg/compactor/retention/util_test.go index bb2f0fe2e0a77..3597a11565adb 100644 --- a/pkg/compactor/retention/util_test.go +++ b/pkg/compactor/retention/util_test.go @@ -14,14 +14,14 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - chunk_util "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + chunk_util "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func dayFromTime(t model.Time) config.DayTime { diff --git a/pkg/compactor/table.go b/pkg/compactor/table.go index b7b94627c7415..c371a5db88f59 100644 --- a/pkg/compactor/table.go +++ b/pkg/compactor/table.go @@ -12,11 +12,11 @@ import ( "github.com/grafana/dskit/concurrency" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/compactor/retention" - chunk_util "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/compactor/retention" + chunk_util "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/compactor/table_test.go b/pkg/compactor/table_test.go index c08c0390de8e7..462511eca4782 100644 --- a/pkg/compactor/table_test.go +++ b/pkg/compactor/table_test.go @@ -12,10 +12,10 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" ) const ( diff --git a/pkg/compactor/testutil.go b/pkg/compactor/testutil.go index 094eea2903ad0..4ebba27f64bfa 100644 --- a/pkg/compactor/testutil.go +++ b/pkg/compactor/testutil.go @@ -17,12 +17,12 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/testutil" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/testutil" ) const ( diff --git a/pkg/configs/client/client.go b/pkg/configs/client/client.go index fb530ed020e72..5592fbe1b83dc 100644 --- a/pkg/configs/client/client.go +++ b/pkg/configs/client/client.go @@ -19,8 +19,8 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "github.com/prometheus/common/version" - "github.com/grafana/loki/pkg/configs/userconfig" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/configs/userconfig" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var ( diff --git a/pkg/configs/client/configs_test.go b/pkg/configs/client/configs_test.go index 96e1fa9f7d3cf..311c33ca91ad9 100644 --- a/pkg/configs/client/configs_test.go +++ b/pkg/configs/client/configs_test.go @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/configs/userconfig" + "github.com/grafana/loki/v3/pkg/configs/userconfig" ) var response = `{ diff --git a/pkg/configs/userconfig/config.go b/pkg/configs/userconfig/config.go index 2bb33c824f263..e7d22e033a8ec 100644 --- a/pkg/configs/userconfig/config.go +++ b/pkg/configs/userconfig/config.go @@ -13,7 +13,7 @@ import ( "github.com/prometheus/prometheus/rules" "gopkg.in/yaml.v3" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // An ID is the ID of a single users's Cortex configuration. When a diff --git a/pkg/configs/userconfig/config_test.go b/pkg/configs/userconfig/config_test.go index 0b304f28288db..ac81d47e4ee98 100644 --- a/pkg/configs/userconfig/config_test.go +++ b/pkg/configs/userconfig/config_test.go @@ -18,7 +18,7 @@ import ( "github.com/stretchr/testify/require" "gopkg.in/yaml.v3" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var legacyRulesFile = `ALERT TestAlert diff --git a/pkg/distributor/distributor.go b/pkg/distributor/distributor.go index 818c0fe735ae8..d721a60f3070e 100644 --- a/pkg/distributor/distributor.go +++ b/pkg/distributor/distributor.go @@ -15,6 +15,7 @@ import ( "github.com/go-kit/log/level" "github.com/gogo/status" "github.com/prometheus/prometheus/model/labels" + "go.opentelemetry.io/collector/pdata/plog" "google.golang.org/grpc/codes" "github.com/grafana/dskit/httpgrpc" @@ -32,28 +33,38 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/analytics" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/distributor/clientpool" - "github.com/grafana/loki/pkg/distributor/shardstreams" - "github.com/grafana/loki/pkg/distributor/writefailures" - "github.com/grafana/loki/pkg/ingester" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/loghttp/push" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/runtime" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" - util_log "github.com/grafana/loki/pkg/util/log" - lokiring "github.com/grafana/loki/pkg/util/ring" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/analytics" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/distributor/clientpool" + "github.com/grafana/loki/v3/pkg/distributor/shardstreams" + "github.com/grafana/loki/v3/pkg/distributor/writefailures" + "github.com/grafana/loki/v3/pkg/ingester" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/loghttp/push" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/runtime" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" + util_log "github.com/grafana/loki/v3/pkg/util/log" + lokiring "github.com/grafana/loki/v3/pkg/util/ring" + "github.com/grafana/loki/v3/pkg/validation" ) const ( ringKey = "distributor" ringAutoForgetUnhealthyPeriods = 2 + + labelServiceName = "service_name" + serviceUnknown = "unknown_service" + labelLevel = "level" + logLevelDebug = "debug" + logLevelInfo = "info" + logLevelWarn = "warn" + logLevelError = "error" + logLevelFatal = "fatal" + logLevelCritical = "critical" ) var ( @@ -348,7 +359,7 @@ func (d *Distributor) Push(ctx context.Context, req *logproto.PushRequest) (*log d.truncateLines(validationContext, &stream) var lbs labels.Labels - lbs, stream.Labels, stream.Hash, err = d.parseStreamLabels(validationContext, stream.Labels, &stream) + lbs, stream.Labels, stream.Hash, err = d.parseStreamLabels(validationContext, stream.Labels, stream) if err != nil { d.writeFailuresManager.Log(tenantID, err) validationErrors.Add(err) @@ -364,6 +375,7 @@ func (d *Distributor) Push(ctx context.Context, req *logproto.PushRequest) (*log n := 0 pushSize := 0 prevTs := stream.Entries[0].Timestamp + addLogLevel := validationContext.allowStructuredMetadata && validationContext.discoverLogLevels && !lbs.Has(labelLevel) for _, entry := range stream.Entries { if err := d.validator.ValidateEntry(ctx, validationContext, lbs, entry); err != nil { d.writeFailuresManager.Log(tenantID, err) @@ -371,6 +383,14 @@ func (d *Distributor) Push(ctx context.Context, req *logproto.PushRequest) (*log continue } + structuredMetadata := logproto.FromLabelAdaptersToLabels(entry.StructuredMetadata) + if addLogLevel && !structuredMetadata.Has(labelLevel) { + logLevel := detectLogLevelFromLogEntry(entry, structuredMetadata) + entry.StructuredMetadata = append(entry.StructuredMetadata, logproto.LabelAdapter{ + Name: labelLevel, + Value: logLevel, + }) + } stream.Entries[n] = entry // If configured for this tenant, increment duplicate timestamps. Note, this is imperfect @@ -425,7 +445,7 @@ func (d *Distributor) Push(ctx context.Context, req *logproto.PushRequest) (*log if d.usageTracker != nil { for _, stream := range req.Streams { - lbs, _, _, err := d.parseStreamLabels(validationContext, stream.Labels, &stream) + lbs, _, _, err := d.parseStreamLabels(validationContext, stream.Labels, stream) if err != nil { continue } @@ -717,7 +737,7 @@ type labelData struct { hash uint64 } -func (d *Distributor) parseStreamLabels(vContext validationContext, key string, stream *logproto.Stream) (labels.Labels, string, uint64, error) { +func (d *Distributor) parseStreamLabels(vContext validationContext, key string, stream logproto.Stream) (labels.Labels, string, uint64, error) { if val, ok := d.labelCache.Get(key); ok { labelVal := val.(labelData) return labelVal.ls, labelVal.ls.String(), labelVal.hash, nil @@ -728,10 +748,24 @@ func (d *Distributor) parseStreamLabels(vContext validationContext, key string, return nil, "", 0, fmt.Errorf(validation.InvalidLabelsErrorMsg, key, err) } - if err := d.validator.ValidateLabels(vContext, ls, *stream); err != nil { + if err := d.validator.ValidateLabels(vContext, ls, stream); err != nil { return nil, "", 0, err } + // We do not want to count service_name added by us in the stream limit so adding it after validating original labels. + if !ls.Has(labelServiceName) && len(vContext.discoverServiceName) > 0 { + serviceName := serviceUnknown + for _, labelName := range vContext.discoverServiceName { + if labelVal := ls.Get(labelName); labelVal != "" { + serviceName = labelVal + break + } + } + + ls = labels.NewBuilder(ls).Set(labelServiceName, serviceName).Labels() + stream.Labels = ls.String() + } + lsHash := ls.Hash() d.labelCache.Add(key, labelData{ls, lsHash}) @@ -821,3 +855,56 @@ func newRingAndLifecycler(cfg RingConfig, instanceCount *atomic.Uint32, logger l func (d *Distributor) HealthyInstancesCount() int { return int(d.healthyInstancesCount.Load()) } + +func detectLogLevelFromLogEntry(entry logproto.Entry, structuredMetadata labels.Labels) string { + // otlp logs have a severity number, using which we are defining the log levels. + // Significance of severity number is explained in otel docs here https://opentelemetry.io/docs/specs/otel/logs/data-model/#field-severitynumber + if otlpSeverityNumberTxt := structuredMetadata.Get(push.OTLPSeverityNumber); otlpSeverityNumberTxt != "" { + otlpSeverityNumber, err := strconv.Atoi(otlpSeverityNumberTxt) + if err != nil { + return logLevelInfo + } + if otlpSeverityNumber <= int(plog.SeverityNumberDebug4) { + return logLevelDebug + } else if otlpSeverityNumber <= int(plog.SeverityNumberInfo4) { + return logLevelInfo + } else if otlpSeverityNumber <= int(plog.SeverityNumberWarn4) { + return logLevelWarn + } else if otlpSeverityNumber <= int(plog.SeverityNumberError4) { + return logLevelError + } else if otlpSeverityNumber <= int(plog.SeverityNumberFatal4) { + return logLevelFatal + } + return logLevelInfo + } + + return extractLogLevelFromLogLine(entry.Line) +} + +func extractLogLevelFromLogLine(log string) string { + if strings.Contains(log, `:"err"`) || strings.Contains(log, `:"ERR"`) || + strings.Contains(log, "=err") || strings.Contains(log, "=ERR") || + strings.Contains(log, "err:") || strings.Contains(log, "ERR:") || + strings.Contains(log, "error") || strings.Contains(log, "ERROR") { + return logLevelError + } + if strings.Contains(log, `:"warn"`) || strings.Contains(log, `:"WARN"`) || + strings.Contains(log, "=warn") || strings.Contains(log, "=WARN") || + strings.Contains(log, "warn:") || strings.Contains(log, "WARN:") || + strings.Contains(log, "warning") || strings.Contains(log, "WARNING") { + return logLevelWarn + } + if strings.Contains(log, `:"critical"`) || strings.Contains(log, `:"CRITICAL"`) || + strings.Contains(log, "=critical") || strings.Contains(log, "=CRITICAL") || + strings.Contains(log, "CRITICAL:") || strings.Contains(log, "critical:") { + return logLevelCritical + } + if strings.Contains(log, `:"debug"`) || strings.Contains(log, `:"DEBUG"`) || + strings.Contains(log, "=debug") || strings.Contains(log, "=DEBUG") || + strings.Contains(log, "debug:") || strings.Contains(log, "DEBUG:") { + return logLevelDebug + } + + // Default to info if no specific level is found + return logLevelInfo +} diff --git a/pkg/distributor/distributor_ring.go b/pkg/distributor/distributor_ring.go index 0c7451ebdea2f..0beffd91791a3 100644 --- a/pkg/distributor/distributor_ring.go +++ b/pkg/distributor/distributor_ring.go @@ -14,7 +14,7 @@ import ( "github.com/grafana/dskit/netutil" "github.com/grafana/dskit/ring" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // RingConfig masks the ring lifecycler config which contains diff --git a/pkg/distributor/distributor_test.go b/pkg/distributor/distributor_test.go index 04747ffb72334..b51c545166f25 100644 --- a/pkg/distributor/distributor_test.go +++ b/pkg/distributor/distributor_test.go @@ -23,23 +23,28 @@ import ( "github.com/grafana/dskit/user" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" + "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "go.opentelemetry.io/collector/pdata/plog" "google.golang.org/grpc" "google.golang.org/grpc/health/grpc_health_v1" - "github.com/grafana/loki/pkg/ingester" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/runtime" - "github.com/grafana/loki/pkg/util/constants" - fe "github.com/grafana/loki/pkg/util/flagext" - loki_flagext "github.com/grafana/loki/pkg/util/flagext" - util_log "github.com/grafana/loki/pkg/util/log" - loki_net "github.com/grafana/loki/pkg/util/net" - "github.com/grafana/loki/pkg/util/test" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/pkg/push" + + "github.com/grafana/loki/v3/pkg/ingester" + "github.com/grafana/loki/v3/pkg/ingester/client" + loghttp_push "github.com/grafana/loki/v3/pkg/loghttp/push" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/runtime" + "github.com/grafana/loki/v3/pkg/util/constants" + fe "github.com/grafana/loki/v3/pkg/util/flagext" + loki_flagext "github.com/grafana/loki/v3/pkg/util/flagext" + util_log "github.com/grafana/loki/v3/pkg/util/log" + loki_net "github.com/grafana/loki/v3/pkg/util/net" + "github.com/grafana/loki/v3/pkg/util/test" + "github.com/grafana/loki/v3/pkg/validation" ) var ( @@ -98,6 +103,7 @@ func TestDistributor(t *testing.T) { t.Run(fmt.Sprintf("[%d](lines=%v)", i, tc.lines), func(t *testing.T) { limits := &validation.Limits{} flagext.DefaultValues(limits) + limits.DiscoverServiceName = nil limits.IngestionRateMB = ingestionRateLimit limits.IngestionBurstSizeMB = ingestionRateLimit limits.MaxLineSize = fe.ByteSize(tc.maxLineSize) @@ -134,13 +140,19 @@ func TestDistributor(t *testing.T) { func Test_IncrementTimestamp(t *testing.T) { incrementingDisabled := &validation.Limits{} flagext.DefaultValues(incrementingDisabled) + incrementingDisabled.DiscoverServiceName = nil incrementingDisabled.RejectOldSamples = false incrementingEnabled := &validation.Limits{} flagext.DefaultValues(incrementingEnabled) + incrementingEnabled.DiscoverServiceName = nil incrementingEnabled.RejectOldSamples = false incrementingEnabled.IncrementDuplicateTimestamp = true + defaultLimits := &validation.Limits{} + flagext.DefaultValues(defaultLimits) + now := time.Now() + tests := map[string]struct { limits *validation.Limits push *logproto.PushRequest @@ -386,6 +398,34 @@ func Test_IncrementTimestamp(t *testing.T) { }, }, }, + "default limit adding service_name label": { + limits: defaultLimits, + push: &logproto.PushRequest{ + Streams: []logproto.Stream{ + { + Labels: "{job=\"foo\"}", + Entries: []logproto.Entry{ + {Timestamp: now.Add(-2 * time.Second), Line: "hey1"}, + {Timestamp: now.Add(-time.Second), Line: "hey2"}, + {Timestamp: now, Line: "hey3"}, + }, + }, + }, + }, + expectedPush: &logproto.PushRequest{ + Streams: []logproto.Stream{ + { + Labels: "{job=\"foo\", service_name=\"foo\"}", + Hash: 0x86ca305b6d86e8b0, + Entries: []logproto.Entry{ + {Timestamp: now.Add(-2 * time.Second), Line: "hey1"}, + {Timestamp: now.Add(-time.Second), Line: "hey2"}, + {Timestamp: now, Line: "hey3"}, + }, + }, + }, + }, + }, } for testName, testData := range tests { @@ -405,6 +445,7 @@ func Test_IncrementTimestamp(t *testing.T) { func TestDistributorPushConcurrently(t *testing.T) { limits := &validation.Limits{} flagext.DefaultValues(limits) + limits.DiscoverServiceName = nil distributors, ingesters := prepare(t, 1, 5, limits, nil) @@ -497,6 +538,7 @@ func TestDistributorPushErrors(t *testing.T) { func Test_SortLabelsOnPush(t *testing.T) { limits := &validation.Limits{} flagext.DefaultValues(limits) + limits.DiscoverServiceName = nil ingester := &mockIngester{} distributors, _ := prepare(t, 1, 5, limits, func(addr string) (ring_client.PoolClient, error) { return ingester, nil }) @@ -788,13 +830,136 @@ func Benchmark_SortLabelsOnPush(b *testing.B) { for n := 0; n < b.N; n++ { stream := request.Streams[0] stream.Labels = `{buzz="f", a="b"}` - _, _, _, err := d.parseStreamLabels(vCtx, stream.Labels, &stream) + _, _, _, err := d.parseStreamLabels(vCtx, stream.Labels, stream) if err != nil { panic("parseStreamLabels fail,err:" + err.Error()) } } } +func TestParseStreamLabels(t *testing.T) { + defaultLimit := &validation.Limits{} + flagext.DefaultValues(defaultLimit) + + for _, tc := range []struct { + name string + origLabels string + expectedLabels labels.Labels + expectedErr error + generateLimits func() *validation.Limits + }{ + { + name: "service name label mapping disabled", + generateLimits: func() *validation.Limits { + limits := &validation.Limits{} + flagext.DefaultValues(limits) + limits.DiscoverServiceName = nil + return limits + }, + origLabels: `{foo="bar"}`, + expectedLabels: labels.Labels{ + { + Name: "foo", + Value: "bar", + }, + }, + }, + { + name: "no labels defined - service name label mapping disabled", + generateLimits: func() *validation.Limits { + limits := &validation.Limits{} + flagext.DefaultValues(limits) + limits.DiscoverServiceName = nil + return limits + }, + origLabels: `{}`, + expectedErr: fmt.Errorf(validation.MissingLabelsErrorMsg), + }, + { + name: "service name label enabled", + origLabels: `{foo="bar"}`, + generateLimits: func() *validation.Limits { + return defaultLimit + }, + expectedLabels: labels.Labels{ + { + Name: "foo", + Value: "bar", + }, + { + Name: labelServiceName, + Value: serviceUnknown, + }, + }, + }, + { + name: "service name label should not get counted against max labels count", + origLabels: `{foo="bar"}`, + generateLimits: func() *validation.Limits { + limits := &validation.Limits{} + flagext.DefaultValues(limits) + limits.MaxLabelNamesPerSeries = 1 + return limits + }, + expectedLabels: labels.Labels{ + { + Name: "foo", + Value: "bar", + }, + { + Name: labelServiceName, + Value: serviceUnknown, + }, + }, + }, + { + name: "use label service as service name", + origLabels: `{container="nginx", foo="bar", service="auth"}`, + generateLimits: func() *validation.Limits { + return defaultLimit + }, + expectedLabels: labels.Labels{ + { + Name: "container", + Value: "nginx", + }, + { + Name: "foo", + Value: "bar", + }, + { + Name: "service", + Value: "auth", + }, + { + Name: labelServiceName, + Value: "auth", + }, + }, + }, + } { + limits := tc.generateLimits() + distributors, _ := prepare(&testing.T{}, 1, 5, limits, nil) + d := distributors[0] + + vCtx := d.validator.getValidationContextForTime(testTime, "123") + + t.Run(tc.name, func(t *testing.T) { + lbs, lbsString, hash, err := d.parseStreamLabels(vCtx, tc.origLabels, logproto.Stream{ + Labels: tc.origLabels, + }) + if tc.expectedErr != nil { + require.Equal(t, tc.expectedErr, err) + return + } + require.NoError(t, err) + require.Equal(t, tc.expectedLabels.String(), lbsString) + require.Equal(t, tc.expectedLabels, lbs) + require.Equal(t, tc.expectedLabels.Hash(), hash) + }) + } +} + func Benchmark_Push(b *testing.B) { limits := &validation.Limits{} flagext.DefaultValues(limits) @@ -1330,3 +1495,146 @@ func TestDistributorTee(t *testing.T) { require.Equal(t, "test", tee.tenant) } } + +func Test_DetectLogLevels(t *testing.T) { + setup := func(discoverLogLevels bool) (*validation.Limits, *mockIngester) { + limits := &validation.Limits{} + flagext.DefaultValues(limits) + + limits.DiscoverLogLevels = discoverLogLevels + limits.DiscoverServiceName = nil + limits.AllowStructuredMetadata = true + return limits, &mockIngester{} + } + + t.Run("log level detection disabled", func(t *testing.T) { + limits, ingester := setup(false) + distributors, _ := prepare(t, 1, 5, limits, func(addr string) (ring_client.PoolClient, error) { return ingester, nil }) + + writeReq := makeWriteRequestWithLabels(1, 10, []string{`{foo="bar"}`}) + _, err := distributors[0].Push(ctx, writeReq) + require.NoError(t, err) + topVal := ingester.Peek() + require.Equal(t, `{foo="bar"}`, topVal.Streams[0].Labels) + require.Len(t, topVal.Streams[0].Entries[0].StructuredMetadata, 0) + }) + + t.Run("log level detection enabled", func(t *testing.T) { + limits, ingester := setup(true) + distributors, _ := prepare(t, 1, 5, limits, func(addr string) (ring_client.PoolClient, error) { return ingester, nil }) + + writeReq := makeWriteRequestWithLabels(1, 10, []string{`{foo="bar"}`}) + _, err := distributors[0].Push(ctx, writeReq) + require.NoError(t, err) + topVal := ingester.Peek() + require.Equal(t, `{foo="bar"}`, topVal.Streams[0].Labels) + require.Equal(t, push.LabelsAdapter{ + { + Name: labelLevel, + Value: logLevelInfo, + }, + }, topVal.Streams[0].Entries[0].StructuredMetadata) + }) + + t.Run("log level detection enabled but log level already present in stream", func(t *testing.T) { + limits, ingester := setup(true) + distributors, _ := prepare(t, 1, 5, limits, func(addr string) (ring_client.PoolClient, error) { return ingester, nil }) + + writeReq := makeWriteRequestWithLabels(1, 10, []string{`{foo="bar", level="debug"}`}) + _, err := distributors[0].Push(ctx, writeReq) + require.NoError(t, err) + topVal := ingester.Peek() + require.Equal(t, `{foo="bar", level="debug"}`, topVal.Streams[0].Labels) + require.Len(t, topVal.Streams[0].Entries[0].StructuredMetadata, 0) + }) + + t.Run("log level detection enabled but log level already present as structured metadata", func(t *testing.T) { + limits, ingester := setup(true) + distributors, _ := prepare(t, 1, 5, limits, func(addr string) (ring_client.PoolClient, error) { return ingester, nil }) + + writeReq := makeWriteRequestWithLabels(1, 10, []string{`{foo="bar"}`}) + writeReq.Streams[0].Entries[0].StructuredMetadata = push.LabelsAdapter{ + { + Name: labelLevel, + Value: logLevelWarn, + }, + } + _, err := distributors[0].Push(ctx, writeReq) + require.NoError(t, err) + topVal := ingester.Peek() + require.Equal(t, `{foo="bar"}`, topVal.Streams[0].Labels) + require.Equal(t, push.LabelsAdapter{ + { + Name: labelLevel, + Value: logLevelWarn, + }, + }, topVal.Streams[0].Entries[0].StructuredMetadata) + }) +} + +func Test_detectLogLevelFromLogEntry(t *testing.T) { + for _, tc := range []struct { + name string + entry logproto.Entry + expectedLogLevel string + }{ + { + name: "use severity number from otlp logs", + entry: logproto.Entry{ + Line: "error", + StructuredMetadata: push.LabelsAdapter{ + { + Name: loghttp_push.OTLPSeverityNumber, + Value: fmt.Sprintf("%d", plog.SeverityNumberDebug3), + }, + }, + }, + expectedLogLevel: logLevelDebug, + }, + { + name: "invalid severity number should not cause any issues", + entry: logproto.Entry{ + StructuredMetadata: push.LabelsAdapter{ + { + Name: loghttp_push.OTLPSeverityNumber, + Value: "foo", + }, + }, + }, + expectedLogLevel: logLevelInfo, + }, + { + name: "non otlp without any of the log level keywords in log line", + entry: logproto.Entry{ + Line: "foo", + }, + expectedLogLevel: logLevelInfo, + }, + { + name: "non otlp with log level keywords in log line", + entry: logproto.Entry{ + Line: "this is a warning log", + }, + expectedLogLevel: logLevelWarn, + }, + { + name: "json log line with an error", + entry: logproto.Entry{ + Line: `{"foo":"bar","level":"error"}`, + }, + expectedLogLevel: logLevelError, + }, + { + name: "logfmt log line with a warn", + entry: logproto.Entry{ + Line: `foo=bar level=warn`, + }, + expectedLogLevel: logLevelWarn, + }, + } { + t.Run(tc.name, func(t *testing.T) { + detectedLogLevel := detectLogLevelFromLogEntry(tc.entry, logproto.FromLabelAdaptersToLabels(tc.entry.StructuredMetadata)) + require.Equal(t, tc.expectedLogLevel, detectedLogLevel) + }) + } +} diff --git a/pkg/distributor/http.go b/pkg/distributor/http.go index 54c9588662367..00c3ba53a2806 100644 --- a/pkg/distributor/http.go +++ b/pkg/distributor/http.go @@ -8,13 +8,13 @@ import ( "github.com/go-kit/log/level" "github.com/grafana/dskit/httpgrpc" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/loghttp/push" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/loghttp/push" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/validation" ) // PushHandler reads a snappy-compressed proto from the HTTP body. diff --git a/pkg/distributor/http_test.go b/pkg/distributor/http_test.go index 23b2993c5b213..0ecf70fa9a498 100644 --- a/pkg/distributor/http_test.go +++ b/pkg/distributor/http_test.go @@ -9,13 +9,13 @@ import ( "github.com/grafana/dskit/user" - "github.com/grafana/loki/pkg/loghttp/push" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/loghttp/push" + "github.com/grafana/loki/v3/pkg/logproto" "github.com/grafana/dskit/flagext" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/validation" ) func TestDistributorRingHandler(t *testing.T) { diff --git a/pkg/distributor/ingestion_rate_strategy_test.go b/pkg/distributor/ingestion_rate_strategy_test.go index e87c5ce69f8e2..657d34290984a 100644 --- a/pkg/distributor/ingestion_rate_strategy_test.go +++ b/pkg/distributor/ingestion_rate_strategy_test.go @@ -8,7 +8,7 @@ import ( "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/validation" ) const ( diff --git a/pkg/distributor/instance_count_test.go b/pkg/distributor/instance_count_test.go index 92abf94c45061..7f861a262284a 100644 --- a/pkg/distributor/instance_count_test.go +++ b/pkg/distributor/instance_count_test.go @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func TestInstanceCountDelegateCounting(t *testing.T) { diff --git a/pkg/distributor/limits.go b/pkg/distributor/limits.go index 6db6995662dd2..05734db4184f0 100644 --- a/pkg/distributor/limits.go +++ b/pkg/distributor/limits.go @@ -3,9 +3,9 @@ package distributor import ( "time" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/distributor/shardstreams" - "github.com/grafana/loki/pkg/loghttp/push" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/distributor/shardstreams" + "github.com/grafana/loki/v3/pkg/loghttp/push" ) // Limits is an interface for distributor limits/related configs @@ -22,6 +22,8 @@ type Limits interface { RejectOldSamplesMaxAge(userID string) time.Duration IncrementDuplicateTimestamps(userID string) bool + DiscoverServiceName(userID string) []string + DiscoverLogLevels(userID string) bool ShardStreams(userID string) *shardstreams.Config IngestionRateStrategy() string diff --git a/pkg/distributor/ratestore.go b/pkg/distributor/ratestore.go index 2eb9e9f062326..0fe5c3aca0bce 100644 --- a/pkg/distributor/ratestore.go +++ b/pkg/distributor/ratestore.go @@ -15,9 +15,9 @@ import ( "github.com/opentracing/opentracing-go" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type poolClientFactory interface { diff --git a/pkg/distributor/ratestore_metrics.go b/pkg/distributor/ratestore_metrics.go index 2e31e3934f0bc..fce24ee6617b0 100644 --- a/pkg/distributor/ratestore_metrics.go +++ b/pkg/distributor/ratestore_metrics.go @@ -5,7 +5,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) type ratestoreMetrics struct { diff --git a/pkg/distributor/ratestore_test.go b/pkg/distributor/ratestore_test.go index ab01fbc21593b..af9fa9f0adb70 100644 --- a/pkg/distributor/ratestore_test.go +++ b/pkg/distributor/ratestore_test.go @@ -7,16 +7,16 @@ import ( "testing" "time" - "github.com/grafana/loki/pkg/distributor/shardstreams" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/distributor/shardstreams" + "github.com/grafana/loki/v3/pkg/validation" "github.com/stretchr/testify/require" - client2 "github.com/grafana/loki/pkg/ingester/client" + client2 "github.com/grafana/loki/v3/pkg/ingester/client" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" "github.com/grafana/dskit/ring" "github.com/grafana/dskit/ring/client" diff --git a/pkg/distributor/shardstreams/config.go b/pkg/distributor/shardstreams/config.go index 6a92472451543..90f1e65600a9b 100644 --- a/pkg/distributor/shardstreams/config.go +++ b/pkg/distributor/shardstreams/config.go @@ -3,7 +3,7 @@ package shardstreams import ( "flag" - "github.com/grafana/loki/pkg/util/flagext" + "github.com/grafana/loki/v3/pkg/util/flagext" ) type Config struct { diff --git a/pkg/distributor/streamsharder_test.go b/pkg/distributor/streamsharder_test.go index 1bd03d9fdd82f..eff8b476c0866 100644 --- a/pkg/distributor/streamsharder_test.go +++ b/pkg/distributor/streamsharder_test.go @@ -3,7 +3,7 @@ package distributor import ( "fmt" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) type StreamSharderMock struct { diff --git a/pkg/distributor/tee.go b/pkg/distributor/tee.go index 460f9622b2ea2..04acb1e22c0df 100644 --- a/pkg/distributor/tee.go +++ b/pkg/distributor/tee.go @@ -4,3 +4,24 @@ package distributor type Tee interface { Duplicate(tenant string, streams []KeyedStream) } + +// WrapTee wraps a new Tee around an existing Tee. +func WrapTee(existing, new Tee) Tee { + if existing == nil { + return new + } + if multi, ok := existing.(*multiTee); ok { + return &multiTee{append(multi.tees, new)} + } + return &multiTee{tees: []Tee{existing, new}} +} + +type multiTee struct { + tees []Tee +} + +func (m *multiTee) Duplicate(tenant string, streams []KeyedStream) { + for _, tee := range m.tees { + tee.Duplicate(tenant, streams) + } +} diff --git a/pkg/distributor/tee_test.go b/pkg/distributor/tee_test.go new file mode 100644 index 0000000000000..f953e09b75111 --- /dev/null +++ b/pkg/distributor/tee_test.go @@ -0,0 +1,48 @@ +package distributor + +import ( + "testing" + + "github.com/stretchr/testify/mock" + + "github.com/grafana/loki/pkg/push" +) + +type mockedTee struct { + mock.Mock +} + +func (m *mockedTee) Duplicate(tenant string, streams []KeyedStream) { + m.Called(tenant, streams) +} + +func TestWrapTee(t *testing.T) { + tee1 := new(mockedTee) + tee2 := new(mockedTee) + tee3 := new(mockedTee) + streams := []KeyedStream{ + { + HashKey: 1, + Stream: push.Stream{}, + }, + } + tee1.On("Duplicate", "1", streams).Once() + tee1.On("Duplicate", "2", streams).Once() + tee2.On("Duplicate", "2", streams).Once() + tee1.On("Duplicate", "3", streams).Once() + tee2.On("Duplicate", "3", streams).Once() + tee3.On("Duplicate", "3", streams).Once() + + wrappedTee := WrapTee(nil, tee1) + wrappedTee.Duplicate("1", streams) + + wrappedTee = WrapTee(wrappedTee, tee2) + wrappedTee.Duplicate("2", streams) + + wrappedTee = WrapTee(wrappedTee, tee3) + wrappedTee.Duplicate("3", streams) + + tee1.AssertExpectations(t) + tee2.AssertExpectations(t) + tee3.AssertExpectations(t) +} diff --git a/pkg/distributor/validator.go b/pkg/distributor/validator.go index 7e7006c836201..2ef4c78cff94a 100644 --- a/pkg/distributor/validator.go +++ b/pkg/distributor/validator.go @@ -9,9 +9,9 @@ import ( "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/loghttp/push" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/loghttp/push" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/validation" ) const ( @@ -43,6 +43,8 @@ type validationContext struct { maxLabelValueLength int incrementDuplicateTimestamps bool + discoverServiceName []string + discoverLogLevels bool allowStructuredMetadata bool maxStructuredMetadataSize int @@ -63,6 +65,8 @@ func (v Validator) getValidationContextForTime(now time.Time, userID string) val maxLabelNameLength: v.MaxLabelNameLength(userID), maxLabelValueLength: v.MaxLabelValueLength(userID), incrementDuplicateTimestamps: v.IncrementDuplicateTimestamps(userID), + discoverServiceName: v.DiscoverServiceName(userID), + discoverLogLevels: v.DiscoverLogLevels(userID), allowStructuredMetadata: v.AllowStructuredMetadata(userID), maxStructuredMetadataSize: v.MaxStructuredMetadataSize(userID), maxStructuredMetadataCount: v.MaxStructuredMetadataCount(userID), diff --git a/pkg/distributor/validator_test.go b/pkg/distributor/validator_test.go index 0bfdd7d4d0568..9e51099dfad38 100644 --- a/pkg/distributor/validator_test.go +++ b/pkg/distributor/validator_test.go @@ -11,10 +11,11 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/assert" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" "github.com/grafana/loki/pkg/push" - "github.com/grafana/loki/pkg/validation" + + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/validation" ) var ( diff --git a/pkg/distributor/writefailures/cfg.go b/pkg/distributor/writefailures/cfg.go index 393000f2573a2..727c4a1dda0eb 100644 --- a/pkg/distributor/writefailures/cfg.go +++ b/pkg/distributor/writefailures/cfg.go @@ -3,7 +3,7 @@ package writefailures import ( "flag" - "github.com/grafana/loki/pkg/util/flagext" + "github.com/grafana/loki/v3/pkg/util/flagext" ) type Cfg struct { diff --git a/pkg/distributor/writefailures/manager.go b/pkg/distributor/writefailures/manager.go index 2804c47e9522b..f02ab2e57d76f 100644 --- a/pkg/distributor/writefailures/manager.go +++ b/pkg/distributor/writefailures/manager.go @@ -8,7 +8,7 @@ import ( "github.com/grafana/dskit/limiter" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/runtime" + "github.com/grafana/loki/v3/pkg/runtime" ) type Manager struct { diff --git a/pkg/distributor/writefailures/manager_test.go b/pkg/distributor/writefailures/manager_test.go index 6f5f1eee38419..fb3d7577953a7 100644 --- a/pkg/distributor/writefailures/manager_test.go +++ b/pkg/distributor/writefailures/manager_test.go @@ -11,8 +11,8 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/runtime" - "github.com/grafana/loki/pkg/util/flagext" + "github.com/grafana/loki/v3/pkg/runtime" + "github.com/grafana/loki/v3/pkg/util/flagext" ) func TestWriteFailuresLogging(t *testing.T) { diff --git a/pkg/distributor/writefailures/metrics.go b/pkg/distributor/writefailures/metrics.go index e62d6f19b4f79..1d4c1a2fe85c7 100644 --- a/pkg/distributor/writefailures/metrics.go +++ b/pkg/distributor/writefailures/metrics.go @@ -4,7 +4,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) type metrics struct { diff --git a/pkg/ingester/checkpoint.go b/pkg/ingester/checkpoint.go index e2c8ef2c18681..9de54888f4748 100644 --- a/pkg/ingester/checkpoint.go +++ b/pkg/ingester/checkpoint.go @@ -20,11 +20,11 @@ import ( "github.com/prometheus/prometheus/tsdb/wlog" prompool "github.com/prometheus/prometheus/util/pool" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/ingester/wal" - "github.com/grafana/loki/pkg/logproto" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/pool" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/logproto" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/pool" ) var ( diff --git a/pkg/ingester/checkpoint.pb.go b/pkg/ingester/checkpoint.pb.go index 4f66c217b5ef1..b4dbe51913ace 100644 --- a/pkg/ingester/checkpoint.pb.go +++ b/pkg/ingester/checkpoint.pb.go @@ -10,8 +10,8 @@ import ( proto "github.com/gogo/protobuf/proto" _ "github.com/gogo/protobuf/types" github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" - _ "github.com/grafana/loki/pkg/logproto" - github_com_grafana_loki_pkg_logproto "github.com/grafana/loki/pkg/logproto" + _ "github.com/grafana/loki/v3/pkg/logproto" + github_com_grafana_loki_v3_pkg_logproto "github.com/grafana/loki/v3/pkg/logproto" io "io" math "math" math_bits "math/bits" @@ -139,9 +139,9 @@ func (m *Chunk) GetHead() []byte { type Series struct { UserID string `protobuf:"bytes,1,opt,name=userID,proto3" json:"userID,omitempty"` // post mapped fingerprint is necessary because subsequent wal writes will reference it. - Fingerprint uint64 `protobuf:"varint,2,opt,name=fingerprint,proto3" json:"fingerprint,omitempty"` - Labels []github_com_grafana_loki_pkg_logproto.LabelAdapter `protobuf:"bytes,3,rep,name=labels,proto3,customtype=github.com/grafana/loki/pkg/logproto.LabelAdapter" json:"labels"` - Chunks []Chunk `protobuf:"bytes,4,rep,name=chunks,proto3" json:"chunks"` + Fingerprint uint64 `protobuf:"varint,2,opt,name=fingerprint,proto3" json:"fingerprint,omitempty"` + Labels []github_com_grafana_loki_v3_pkg_logproto.LabelAdapter `protobuf:"bytes,3,rep,name=labels,proto3,customtype=github.com/grafana/loki/v3/pkg/logproto.LabelAdapter" json:"labels"` + Chunks []Chunk `protobuf:"bytes,4,rep,name=chunks,proto3" json:"chunks"` // most recently pushed timestamp. To time.Time `protobuf:"bytes,5,opt,name=to,proto3,stdtime" json:"to"` // most recently pushed line. @@ -242,40 +242,40 @@ func init() { func init() { proto.RegisterFile("pkg/ingester/checkpoint.proto", fileDescriptor_00f4b7152db9bdb5) } var fileDescriptor_00f4b7152db9bdb5 = []byte{ - // 517 bytes of a gzipped FileDescriptorProto + // 521 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x52, 0x31, 0x8f, 0xd3, 0x30, - 0x14, 0x8e, 0xdb, 0x5c, 0xae, 0x75, 0x61, 0xb1, 0x4e, 0xc8, 0x14, 0xe1, 0x56, 0x37, 0x75, 0x4a, - 0x44, 0x61, 0x80, 0x05, 0xe9, 0x7a, 0x08, 0x09, 0xa9, 0x03, 0x0a, 0xc7, 0xc2, 0x82, 0xdc, 0xc4, - 0x75, 0xa2, 0xa6, 0x71, 0x64, 0xbb, 0x43, 0x37, 0x7e, 0xc2, 0x0d, 0xfc, 0x08, 0x7e, 0xca, 0x8d, - 0x1d, 0x4f, 0x20, 0x1d, 0x34, 0x5d, 0x18, 0xef, 0x27, 0x20, 0x3b, 0x49, 0x29, 0x1b, 0xdd, 0xde, - 0xf7, 0x9e, 0xbf, 0xf7, 0xc9, 0xdf, 0xfb, 0xe0, 0xd3, 0x62, 0xc1, 0x83, 0x34, 0xe7, 0x4c, 0x69, - 0x26, 0x83, 0x28, 0x61, 0xd1, 0xa2, 0x10, 0x69, 0xae, 0xfd, 0x42, 0x0a, 0x2d, 0xd0, 0xc3, 0x4c, - 0x2c, 0xd2, 0xcf, 0xcd, 0xbc, 0x7f, 0xc6, 0x05, 0x17, 0x76, 0x12, 0x98, 0xaa, 0x7a, 0xd4, 0x1f, - 0x70, 0x21, 0x78, 0xc6, 0x02, 0x8b, 0x66, 0xab, 0x79, 0xa0, 0xd3, 0x25, 0x53, 0x9a, 0x2e, 0x8b, - 0xfa, 0xc1, 0x13, 0x23, 0x92, 0x09, 0x5e, 0x31, 0x9b, 0xa2, 0x1a, 0x9e, 0xff, 0x68, 0xc1, 0x93, - 0xcb, 0x64, 0x95, 0x2f, 0xd0, 0x4b, 0xe8, 0xce, 0xa5, 0x58, 0x62, 0x30, 0x04, 0xa3, 0xde, 0xb8, - 0xef, 0x57, 0x6b, 0xfd, 0x66, 0xad, 0x7f, 0xd5, 0xac, 0x9d, 0x74, 0x6e, 0xee, 0x06, 0xce, 0xf5, - 0xcf, 0x01, 0x08, 0x2d, 0x03, 0xbd, 0x80, 0x2d, 0x2d, 0x70, 0xeb, 0x08, 0x5e, 0x4b, 0x0b, 0x34, - 0x81, 0xdd, 0x79, 0xb6, 0x52, 0x09, 0x8b, 0x2f, 0x34, 0x6e, 0x1f, 0x41, 0xfe, 0x4b, 0x43, 0x6f, - 0x61, 0x2f, 0xa3, 0x4a, 0x7f, 0x2c, 0x62, 0xaa, 0x59, 0x8c, 0xdd, 0x23, 0xb6, 0x1c, 0x12, 0xd1, - 0x23, 0xe8, 0x45, 0x99, 0x50, 0x2c, 0xc6, 0x27, 0x43, 0x30, 0xea, 0x84, 0x35, 0x32, 0x7d, 0xb5, - 0xce, 0x23, 0x16, 0x63, 0xaf, 0xea, 0x57, 0x08, 0x21, 0xe8, 0xc6, 0x54, 0x53, 0x7c, 0x3a, 0x04, - 0xa3, 0x07, 0xa1, 0xad, 0x4d, 0x2f, 0x61, 0x34, 0xc6, 0x9d, 0xaa, 0x67, 0xea, 0xf3, 0xaf, 0x6d, - 0xe8, 0x7d, 0x60, 0x32, 0x65, 0xca, 0xac, 0x5a, 0x29, 0x26, 0xdf, 0xbd, 0xb1, 0x06, 0x77, 0xc3, - 0x1a, 0xa1, 0x21, 0xec, 0xcd, 0xcd, 0x85, 0x65, 0x21, 0xd3, 0x5c, 0x5b, 0x17, 0xdd, 0xf0, 0xb0, - 0x85, 0x32, 0xe8, 0x65, 0x74, 0xc6, 0x32, 0x85, 0xdb, 0xc3, 0xf6, 0xa8, 0x37, 0x7e, 0xec, 0xef, - 0x6f, 0x38, 0x65, 0x9c, 0x46, 0xeb, 0xa9, 0x99, 0xbe, 0xa7, 0xa9, 0x9c, 0xbc, 0x32, 0xdf, 0xfb, - 0x7e, 0x37, 0x78, 0xc6, 0x53, 0x9d, 0xac, 0x66, 0x7e, 0x24, 0x96, 0x01, 0x97, 0x74, 0x4e, 0x73, - 0x1a, 0x98, 0x2c, 0x05, 0x87, 0x51, 0xf0, 0x2d, 0xef, 0x22, 0xa6, 0x85, 0x66, 0x32, 0xac, 0x35, - 0xd0, 0x18, 0x7a, 0x91, 0xc9, 0x83, 0xc2, 0xae, 0x55, 0x3b, 0xf3, 0xff, 0x09, 0xa1, 0x6f, 0xc3, - 0x32, 0x71, 0x8d, 0x50, 0x58, 0xbf, 0xac, 0x03, 0x70, 0x72, 0x64, 0x00, 0xfa, 0xb0, 0x63, 0x6e, - 0x30, 0x4d, 0x73, 0x66, 0xed, 0xed, 0x86, 0x7b, 0x8c, 0x30, 0x3c, 0x65, 0xb9, 0x96, 0xeb, 0x4b, - 0x6d, 0x3d, 0x6e, 0x87, 0x0d, 0x34, 0xb1, 0x49, 0x52, 0x9e, 0x30, 0xa5, 0xaf, 0x94, 0xf5, 0xfa, - 0xbf, 0x63, 0xb3, 0xa7, 0x4d, 0x5e, 0x6f, 0xb6, 0xc4, 0xb9, 0xdd, 0x12, 0xe7, 0x7e, 0x4b, 0xc0, - 0x97, 0x92, 0x80, 0x6f, 0x25, 0x01, 0x37, 0x25, 0x01, 0x9b, 0x92, 0x80, 0x5f, 0x25, 0x01, 0xbf, - 0x4b, 0xe2, 0xdc, 0x97, 0x04, 0x5c, 0xef, 0x88, 0xb3, 0xd9, 0x11, 0xe7, 0x76, 0x47, 0x9c, 0x4f, - 0x9d, 0xc6, 0x83, 0x99, 0x67, 0x85, 0x9e, 0xff, 0x09, 0x00, 0x00, 0xff, 0xff, 0xda, 0xcc, 0xb5, - 0xf6, 0xbf, 0x03, 0x00, 0x00, + 0x14, 0x8e, 0xdb, 0x5c, 0xae, 0xe7, 0xc2, 0x62, 0x9d, 0x90, 0x29, 0xc2, 0xad, 0x6e, 0xea, 0x94, + 0x48, 0xbd, 0x1b, 0x18, 0x10, 0xd2, 0xf5, 0x10, 0x12, 0x52, 0x07, 0x14, 0x8e, 0x85, 0x05, 0xb9, + 0x89, 0xeb, 0x44, 0x4d, 0xe3, 0xc8, 0x76, 0x91, 0xba, 0xf1, 0x13, 0x6e, 0xe3, 0x2f, 0xf0, 0x53, + 0x6e, 0xec, 0x78, 0x02, 0xe9, 0xa0, 0xe9, 0xc2, 0x78, 0x3f, 0x01, 0xd9, 0x49, 0x4a, 0xd9, 0xe8, + 0xf6, 0xbe, 0xf7, 0xfc, 0xbd, 0x4f, 0xfe, 0xde, 0x07, 0x9f, 0x17, 0x73, 0x1e, 0xa4, 0x39, 0x67, + 0x4a, 0x33, 0x19, 0x44, 0x09, 0x8b, 0xe6, 0x85, 0x48, 0x73, 0xed, 0x17, 0x52, 0x68, 0x81, 0x1e, + 0x67, 0x62, 0x9e, 0x7e, 0x6a, 0xe6, 0xbd, 0x53, 0x2e, 0xb8, 0xb0, 0x93, 0xc0, 0x54, 0xd5, 0xa3, + 0x5e, 0x9f, 0x0b, 0xc1, 0x33, 0x16, 0x58, 0x34, 0x5d, 0xce, 0x02, 0x9d, 0x2e, 0x98, 0xd2, 0x74, + 0x51, 0xd4, 0x0f, 0x9e, 0x19, 0x91, 0x4c, 0xf0, 0x8a, 0xd9, 0x14, 0xd5, 0xf0, 0xec, 0x47, 0x0b, + 0x1e, 0x5d, 0x25, 0xcb, 0x7c, 0x8e, 0x5e, 0x40, 0x77, 0x26, 0xc5, 0x02, 0x83, 0x01, 0x18, 0x76, + 0x47, 0x3d, 0xbf, 0x5a, 0xeb, 0x37, 0x6b, 0xfd, 0xeb, 0x66, 0xed, 0xb8, 0x73, 0x7b, 0xdf, 0x77, + 0x6e, 0x7e, 0xf6, 0x41, 0x68, 0x19, 0xe8, 0x02, 0xb6, 0xb4, 0xc0, 0xad, 0x03, 0x78, 0x2d, 0x2d, + 0xd0, 0x18, 0x9e, 0xcc, 0xb2, 0xa5, 0x4a, 0x58, 0x7c, 0xa9, 0x71, 0xfb, 0x00, 0xf2, 0x5f, 0x1a, + 0x7a, 0x03, 0xbb, 0x19, 0x55, 0xfa, 0x43, 0x11, 0x53, 0xcd, 0x62, 0xec, 0x1e, 0xb0, 0x65, 0x9f, + 0x88, 0x9e, 0x40, 0x2f, 0xca, 0x84, 0x62, 0x31, 0x3e, 0x1a, 0x80, 0x61, 0x27, 0xac, 0x91, 0xe9, + 0xab, 0x55, 0x1e, 0xb1, 0x18, 0x7b, 0x55, 0xbf, 0x42, 0x08, 0x41, 0x37, 0xa6, 0x9a, 0xe2, 0xe3, + 0x01, 0x18, 0x3e, 0x0a, 0x6d, 0x6d, 0x7a, 0x09, 0xa3, 0x31, 0xee, 0x54, 0x3d, 0x53, 0x9f, 0x7d, + 0x6d, 0x43, 0xef, 0x3d, 0x93, 0x29, 0x53, 0x66, 0xd5, 0x52, 0x31, 0xf9, 0xf6, 0xb5, 0x35, 0xf8, + 0x24, 0xac, 0x11, 0x1a, 0xc0, 0xee, 0xcc, 0x5c, 0x58, 0x16, 0x32, 0xcd, 0xb5, 0x75, 0xd1, 0x0d, + 0xf7, 0x5b, 0x48, 0x40, 0x2f, 0xa3, 0x53, 0x96, 0x29, 0xdc, 0x1e, 0xb4, 0x87, 0xdd, 0xd1, 0x53, + 0x7f, 0x77, 0xc3, 0x09, 0xe3, 0x34, 0x5a, 0x4d, 0xcc, 0xf4, 0x1d, 0x4d, 0xe5, 0xf8, 0xa5, 0xf9, + 0xde, 0xf7, 0xfb, 0xfe, 0x05, 0x4f, 0x75, 0xb2, 0x9c, 0xfa, 0x91, 0x58, 0x04, 0x5c, 0xd2, 0x19, + 0xcd, 0x69, 0x60, 0xb2, 0x14, 0x7c, 0x3e, 0x0f, 0xf6, 0xd3, 0xe0, 0x5b, 0xea, 0x65, 0x4c, 0x0b, + 0xcd, 0x64, 0x58, 0xcb, 0xa0, 0x11, 0xf4, 0x22, 0x13, 0x09, 0x85, 0x5d, 0x2b, 0x78, 0xea, 0xff, + 0x93, 0x43, 0xdf, 0xe6, 0x65, 0xec, 0x1a, 0xad, 0xb0, 0x7e, 0x59, 0x67, 0xe0, 0xe8, 0xc0, 0x0c, + 0xf4, 0x60, 0xc7, 0x9c, 0x61, 0x92, 0xe6, 0xcc, 0x3a, 0x7c, 0x12, 0xee, 0x30, 0xc2, 0xf0, 0x98, + 0xe5, 0x5a, 0xae, 0xae, 0xb4, 0xb5, 0xb9, 0x1d, 0x36, 0xd0, 0x24, 0x27, 0x49, 0x79, 0xc2, 0x94, + 0xbe, 0x56, 0xd6, 0xee, 0xff, 0x4e, 0xce, 0x8e, 0x36, 0x7e, 0xb5, 0xde, 0x10, 0xe7, 0x6e, 0x43, + 0x9c, 0x87, 0x0d, 0x01, 0x5f, 0x4a, 0x02, 0xbe, 0x95, 0x04, 0xdc, 0x96, 0x04, 0xac, 0x4b, 0x02, + 0x7e, 0x95, 0x04, 0xfc, 0x2e, 0x89, 0xf3, 0x50, 0x12, 0x70, 0xb3, 0x25, 0xce, 0x7a, 0x4b, 0x9c, + 0xbb, 0x2d, 0x71, 0x3e, 0x76, 0x1a, 0x0f, 0xa6, 0x9e, 0x15, 0x3a, 0xff, 0x13, 0x00, 0x00, 0xff, + 0xff, 0x4e, 0xf6, 0xfc, 0x95, 0xc2, 0x03, 0x00, 0x00, } func (this *Chunk) Equal(that interface{}) bool { @@ -1142,7 +1142,7 @@ func (m *Series) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Labels = append(m.Labels, github_com_grafana_loki_pkg_logproto.LabelAdapter{}) + m.Labels = append(m.Labels, github_com_grafana_loki_v3_pkg_logproto.LabelAdapter{}) if err := m.Labels[len(m.Labels)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } diff --git a/pkg/ingester/checkpoint.proto b/pkg/ingester/checkpoint.proto index 48e100b32ab53..92dae8499ec04 100644 --- a/pkg/ingester/checkpoint.proto +++ b/pkg/ingester/checkpoint.proto @@ -42,7 +42,7 @@ message Series { uint64 fingerprint = 2; repeated logproto.LegacyLabelPair labels = 3 [ (gogoproto.nullable) = false, - (gogoproto.customtype) = "github.com/grafana/loki/pkg/logproto.LabelAdapter" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logproto.LabelAdapter" ]; repeated Chunk chunks = 4 [(gogoproto.nullable) = false]; // most recently pushed timestamp. diff --git a/pkg/ingester/checkpoint_test.go b/pkg/ingester/checkpoint_test.go index 2cf46d921ce94..e8871e7a13918 100644 --- a/pkg/ingester/checkpoint_test.go +++ b/pkg/ingester/checkpoint_test.go @@ -15,15 +15,15 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/distributor/writefailures" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/runtime" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/distributor/writefailures" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/runtime" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/validation" ) // small util for ensuring data exists as we expect diff --git a/pkg/ingester/chunk_test.go b/pkg/ingester/chunk_test.go index c18c64fe67762..4523bc8cc1d8b 100644 --- a/pkg/ingester/chunk_test.go +++ b/pkg/ingester/chunk_test.go @@ -10,10 +10,10 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" ) func testIteratorForward(t *testing.T, iter iter.EntryIterator, from, through int64) { diff --git a/pkg/ingester/client/client.go b/pkg/ingester/client/client.go index 861a925d6b176..2c4329b56c93e 100644 --- a/pkg/ingester/client/client.go +++ b/pkg/ingester/client/client.go @@ -5,7 +5,7 @@ import ( "io" "time" - "github.com/grafana/loki/pkg/util/server" + "github.com/grafana/loki/v3/pkg/util/server" "github.com/grafana/dskit/grpcclient" "github.com/grafana/dskit/middleware" @@ -16,8 +16,8 @@ import ( "google.golang.org/grpc" "google.golang.org/grpc/health/grpc_health_v1" - "github.com/grafana/loki/pkg/distributor/clientpool" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/distributor/clientpool" + "github.com/grafana/loki/v3/pkg/logproto" ) var ingesterClientRequestDuration = promauto.NewHistogramVec(prometheus.HistogramOpts{ diff --git a/pkg/ingester/client/compat.go b/pkg/ingester/client/compat.go index 4d6c3cacc3fd8..136c486694330 100644 --- a/pkg/ingester/client/compat.go +++ b/pkg/ingester/client/compat.go @@ -4,7 +4,7 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) const ( diff --git a/pkg/ingester/encoding_test.go b/pkg/ingester/encoding_test.go index 200f97b6413b9..4bb1aab0b8da6 100644 --- a/pkg/ingester/encoding_test.go +++ b/pkg/ingester/encoding_test.go @@ -8,9 +8,9 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/ingester/wal" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/logproto" ) func fillChunk(t testing.TB, c chunkenc.Chunk) { diff --git a/pkg/ingester/flush.go b/pkg/ingester/flush.go index 1326d916961ec..f9904ca8409e5 100644 --- a/pkg/ingester/flush.go +++ b/pkg/ingester/flush.go @@ -17,10 +17,10 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/ingester/flush_test.go b/pkg/ingester/flush_test.go index e3060f873b875..a4255f7510b7a 100644 --- a/pkg/ingester/flush_test.go +++ b/pkg/ingester/flush_test.go @@ -22,21 +22,22 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/distributor/writefailures" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/ingester/wal" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/runtime" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/distributor/writefailures" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/runtime" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/validation" ) const ( @@ -364,6 +365,14 @@ func (s *testStore) GetChunks(_ context.Context, _ string, _, _ model.Time, _ ch return nil, nil, nil } +func (s *testStore) GetShards(_ context.Context, _ string, _, _ model.Time, _ uint64, _ chunk.Predicate) (*logproto.ShardsResponse, error) { + return nil, nil +} + +func (s *testStore) HasForSeries(_, _ model.Time) (sharding.ForSeries, bool) { + return nil, false +} + func (s *testStore) GetSchemaConfigs() []config.PeriodConfig { return defaultPeriodConfigs } diff --git a/pkg/ingester/index/bitprefix.go b/pkg/ingester/index/bitprefix.go index 8235c2821d6ca..38df9381e49d0 100644 --- a/pkg/ingester/index/bitprefix.go +++ b/pkg/ingester/index/bitprefix.go @@ -7,9 +7,9 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) // BitPrefixInvertedIndex is another inverted index implementation @@ -48,7 +48,7 @@ func NewBitPrefixWithShards(totalShards uint32) (*BitPrefixInvertedIndex, error) }, nil } -func (ii *BitPrefixInvertedIndex) getShards(shard *astmapper.ShardAnnotation) ([]*indexShard, bool) { +func (ii *BitPrefixInvertedIndex) getShards(shard *logql.Shard) ([]*indexShard, bool) { if shard == nil { return ii.shards, false } @@ -63,13 +63,18 @@ func (ii *BitPrefixInvertedIndex) getShards(shard *astmapper.ShardAnnotation) ([ // Conversely, if the requested shard is 1_of_2, but the index has a factor of 4, // we can _exactly_ match ob1 => (ob10, ob11) and know all fingerprints in those // resulting shards have the requested ob1 prefix (don't need to filter). - var filter bool - if shard.Of > len(ii.shards) { - filter = true + // NB(owen-d): this only applies when using the old power-of-two shards, + // which are superseded by the new bounded sharding strategy. + filter := true + + switch shard.Variant() { + case logql.PowerOfTwoVersion: + if int(shard.PowerOfTwo.Of) <= len(ii.shards) { + filter = false + } } - requestedShard := shard.TSDB() - minFp, maxFp := requestedShard.GetFromThrough() + minFp, maxFp := shard.GetFromThrough() // Determine how many bits we need to take from // the requested shard's min/max fingerprint values @@ -102,12 +107,17 @@ func (ii *BitPrefixInvertedIndex) shardForFP(fp model.Fingerprint) int { return int(fp >> (64 - localShard.RequiredBits())) } -func (ii *BitPrefixInvertedIndex) validateShard(shard *astmapper.ShardAnnotation) error { +func (ii *BitPrefixInvertedIndex) validateShard(shard *logql.Shard) error { if shard == nil { return nil } - return shard.TSDB().Validate() + switch shard.Variant() { + case logql.PowerOfTwoVersion: + return shard.PowerOfTwo.Validate() + } + return nil + } // Add a fingerprint under the specified labels. @@ -119,7 +129,7 @@ func (ii *BitPrefixInvertedIndex) Add(labels []logproto.LabelAdapter, fp model.F } // Lookup all fingerprints for the provided matchers. -func (ii *BitPrefixInvertedIndex) Lookup(matchers []*labels.Matcher, shard *astmapper.ShardAnnotation) ([]model.Fingerprint, error) { +func (ii *BitPrefixInvertedIndex) Lookup(matchers []*labels.Matcher, shard *logql.Shard) ([]model.Fingerprint, error) { if err := ii.validateShard(shard); err != nil { return nil, err } @@ -143,7 +153,7 @@ func (ii *BitPrefixInvertedIndex) Lookup(matchers []*labels.Matcher, shard *astm // Because bit prefix order is also ascending order, // the merged fingerprints from ascending shards are also in order. if filter { - minFP, maxFP := shard.TSDB().GetFromThrough() + minFP, maxFP := shard.GetFromThrough() minIdx := sort.Search(len(result), func(i int) bool { return result[i] >= minFP }) @@ -159,7 +169,7 @@ func (ii *BitPrefixInvertedIndex) Lookup(matchers []*labels.Matcher, shard *astm } // LabelNames returns all label names. -func (ii *BitPrefixInvertedIndex) LabelNames(shard *astmapper.ShardAnnotation) ([]string, error) { +func (ii *BitPrefixInvertedIndex) LabelNames(shard *logql.Shard) ([]string, error) { if err := ii.validateShard(shard); err != nil { return nil, err } @@ -171,7 +181,6 @@ func (ii *BitPrefixInvertedIndex) LabelNames(shard *astmapper.ShardAnnotation) ( // Therefore it's more performant to request shard factors lower or equal to the // inverted index factor if filter { - s := shard.TSDB() extractor = func(x unlockIndex) (results []string) { @@ -179,7 +188,7 @@ func (ii *BitPrefixInvertedIndex) LabelNames(shard *astmapper.ShardAnnotation) ( for name, entry := range x { for _, valEntry := range entry.fps { for _, fp := range valEntry.fps { - if s.Match(fp) { + if shard.Match(fp) { results = append(results, name) continue outer } @@ -201,7 +210,7 @@ func (ii *BitPrefixInvertedIndex) LabelNames(shard *astmapper.ShardAnnotation) ( } // LabelValues returns the values for the given label. -func (ii *BitPrefixInvertedIndex) LabelValues(name string, shard *astmapper.ShardAnnotation) ([]string, error) { +func (ii *BitPrefixInvertedIndex) LabelValues(name string, shard *logql.Shard) ([]string, error) { if err := ii.validateShard(shard); err != nil { return nil, err } @@ -209,7 +218,6 @@ func (ii *BitPrefixInvertedIndex) LabelValues(name string, shard *astmapper.Shar var extractor func(indexEntry) []string shards, filter := ii.getShards(shard) if filter { - s := shard.TSDB() extractor = func(x indexEntry) []string { results := make([]string, 0, len(x.fps)) @@ -217,7 +225,7 @@ func (ii *BitPrefixInvertedIndex) LabelValues(name string, shard *astmapper.Shar outer: for val, valEntry := range x.fps { for _, fp := range valEntry.fps { - if s.Match(fp) { + if shard.Match(fp) { results = append(results, val) continue outer } diff --git a/pkg/ingester/index/bitprefix_test.go b/pkg/ingester/index/bitprefix_test.go index d4afb9f635727..9832e15ed60c7 100644 --- a/pkg/ingester/index/bitprefix_test.go +++ b/pkg/ingester/index/bitprefix_test.go @@ -2,6 +2,7 @@ package index import ( "fmt" + "math" "sort" "testing" @@ -9,33 +10,33 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func Test_BitPrefixGetShards(t *testing.T) { for _, tt := range []struct { total uint32 filter bool - shard *astmapper.ShardAnnotation + shard *logql.Shard expected []uint32 }{ // equal factors - {16, false, &astmapper.ShardAnnotation{Shard: 0, Of: 16}, []uint32{0}}, - {16, false, &astmapper.ShardAnnotation{Shard: 4, Of: 16}, []uint32{4}}, - {16, false, &astmapper.ShardAnnotation{Shard: 15, Of: 16}, []uint32{15}}, + {16, false, logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 0, Of: 16}).Ptr(), []uint32{0}}, + {16, false, logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 4, Of: 16}).Ptr(), []uint32{4}}, + {16, false, logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 15, Of: 16}).Ptr(), []uint32{15}}, // idx factor a larger factor of 2 - {32, false, &astmapper.ShardAnnotation{Shard: 0, Of: 16}, []uint32{0, 1}}, - {32, false, &astmapper.ShardAnnotation{Shard: 4, Of: 16}, []uint32{8, 9}}, - {32, false, &astmapper.ShardAnnotation{Shard: 15, Of: 16}, []uint32{30, 31}}, - {64, false, &astmapper.ShardAnnotation{Shard: 15, Of: 16}, []uint32{60, 61, 62, 63}}, + {32, false, logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 0, Of: 16}).Ptr(), []uint32{0, 1}}, + {32, false, logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 4, Of: 16}).Ptr(), []uint32{8, 9}}, + {32, false, logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 15, Of: 16}).Ptr(), []uint32{30, 31}}, + {64, false, logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 15, Of: 16}).Ptr(), []uint32{60, 61, 62, 63}}, // // idx factor a smaller factor of 2 - {8, true, &astmapper.ShardAnnotation{Shard: 0, Of: 16}, []uint32{0}}, - {8, true, &astmapper.ShardAnnotation{Shard: 4, Of: 16}, []uint32{2}}, - {8, true, &astmapper.ShardAnnotation{Shard: 15, Of: 16}, []uint32{7}}, + {8, true, logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 0, Of: 16}).Ptr(), []uint32{0}}, + {8, true, logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 4, Of: 16}).Ptr(), []uint32{2}}, + {8, true, logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 15, Of: 16}).Ptr(), []uint32{7}}, } { tt := tt t.Run(tt.shard.String()+fmt.Sprintf("_total_%d", tt.total), func(t *testing.T) { @@ -52,11 +53,105 @@ func Test_BitPrefixGetShards(t *testing.T) { } } +func Test_BitPrefixGetShards_Bounded(t *testing.T) { + for _, tt := range []struct { + total uint32 + shard *logql.Shard + expected []uint32 + }{ + { + 4, + logql.NewBoundedShard( + logproto.Shard{ + Bounds: logproto.FPBounds{ + Min: 0b01 << 62, + Max: 0b10 << 62, + }, + }, + ).Ptr(), + []uint32{1, 2}, + }, + { + 4, + logql.NewBoundedShard( + logproto.Shard{ + Bounds: logproto.FPBounds{ + Min: 0b10 << 62, + Max: 0b11 << 62, + }, + }, + ).Ptr(), + []uint32{2, 3}, + }, + { + 8, + logql.NewBoundedShard( + logproto.Shard{ + Bounds: logproto.FPBounds{ + Min: 0b00 << 62, + Max: 0b101 << 61, + }, + }, + ).Ptr(), + []uint32{0, 1, 2, 3, 4, 5}, + }, + { + 8, + logql.NewBoundedShard( + logproto.Shard{ + Bounds: logproto.FPBounds{ + Min: 0b00 << 62, + Max: 0b110 << 61, + }, + }, + ).Ptr(), + []uint32{0, 1, 2, 3, 4, 5, 6}, + }, + { + 8, + logql.NewBoundedShard( + logproto.Shard{ + Bounds: logproto.FPBounds{ + Min: 0b00 << 62, + Max: 0b111 << 61, + }, + }, + ).Ptr(), + []uint32{0, 1, 2, 3, 4, 5, 6, 7}, + }, + { + 8, + logql.NewBoundedShard( + logproto.Shard{ + Bounds: logproto.FPBounds{ + Min: 0, + Max: math.MaxUint64, + }, + }, + ).Ptr(), + []uint32{0, 1, 2, 3, 4, 5, 6, 7}, + }, + } { + t.Run(tt.shard.String()+fmt.Sprintf("_total_%d", tt.total), func(t *testing.T) { + ii, err := NewBitPrefixWithShards(tt.total) + require.Nil(t, err) + res, filter := ii.getShards(tt.shard) + require.True(t, filter) // always need to filter bounded shards + resInt := []uint32{} + for _, r := range res { + resInt = append(resInt, r.shard) + } + require.Equal(t, tt.expected, resInt) + }) + } + +} + func Test_BitPrefixValidateShards(t *testing.T) { ii, err := NewBitPrefixWithShards(32) require.Nil(t, err) - require.NoError(t, ii.validateShard(&astmapper.ShardAnnotation{Shard: 1, Of: 16})) - require.Error(t, ii.validateShard(&astmapper.ShardAnnotation{Shard: 1, Of: 15})) + require.NoError(t, ii.validateShard(logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 1, Of: 16}).Ptr())) + require.Error(t, ii.validateShard(logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 1, Of: 15}).Ptr())) } func Test_BitPrefixCreation(t *testing.T) { @@ -116,10 +211,10 @@ func Test_BitPrefix_hash_mapping(t *testing.T) { []*labels.Matcher{{Type: labels.MatchEqual, Name: "compose_project", Value: "loki-tsdb-storage-s3"}}, - &astmapper.ShardAnnotation{ - Shard: int(expShard), - Of: requestedFactor, - }, + logql.NewPowerOfTwoShard(index.ShardAnnotation{ + Shard: expShard, + Of: uint32(requestedFactor), + }).Ptr(), ) require.NoError(t, err) require.Len(t, res, 1) @@ -147,7 +242,7 @@ func Test_BitPrefixNoMatcherLookup(t *testing.T) { require.Nil(t, err) expShard := uint32(fp >> (64 - index.NewShard(0, 16).RequiredBits())) ii.Add(logproto.FromLabelsToLabelAdapters(lbs), fp) - ids, err = ii.Lookup(nil, &astmapper.ShardAnnotation{Shard: int(expShard), Of: 16}) + ids, err = ii.Lookup(nil, logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: expShard, Of: 16}).Ptr()) require.Nil(t, err) require.Equal(t, fp, ids[0]) } @@ -169,12 +264,12 @@ func Test_BitPrefixConsistentMapping(t *testing.T) { b.Add(logproto.FromLabelsToLabelAdapters(lbs), fp) } - shardMax := 8 - for i := 0; i < shardMax; i++ { - shard := &astmapper.ShardAnnotation{ + shardMax := uint32(8) + for i := uint32(0); i < shardMax; i++ { + shard := logql.NewPowerOfTwoShard(index.ShardAnnotation{ Shard: i, Of: shardMax, - } + }).Ptr() aIDs, err := a.Lookup([]*labels.Matcher{ labels.MustNewMatcher(labels.MatchEqual, "foo", "bar"), diff --git a/pkg/ingester/index/index.go b/pkg/ingester/index/index.go index aff7352c8d59c..67246b081c544 100644 --- a/pkg/ingester/index/index.go +++ b/pkg/ingester/index/index.go @@ -17,9 +17,10 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/storage/stores/series" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/storage/stores/series" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) const DefaultIndexShards = 32 @@ -28,9 +29,9 @@ var ErrInvalidShardQuery = errors.New("incompatible index shard query") type Interface interface { Add(labels []logproto.LabelAdapter, fp model.Fingerprint) labels.Labels - Lookup(matchers []*labels.Matcher, shard *astmapper.ShardAnnotation) ([]model.Fingerprint, error) - LabelNames(shard *astmapper.ShardAnnotation) ([]string, error) - LabelValues(name string, shard *astmapper.ShardAnnotation) ([]string, error) + Lookup(matchers []*labels.Matcher, shard *logql.Shard) ([]model.Fingerprint, error) + LabelNames(shard *logql.Shard) ([]string, error) + LabelValues(name string, shard *logql.Shard) ([]string, error) Delete(labels labels.Labels, fp model.Fingerprint) } @@ -55,15 +56,15 @@ func NewWithShards(totalShards uint32) *InvertedIndex { } } -func (ii *InvertedIndex) getShards(shard *astmapper.ShardAnnotation) []*indexShard { +func (ii *InvertedIndex) getShards(shard *index.ShardAnnotation) []*indexShard { if shard == nil { return ii.shards } - totalRequested := int(ii.totalShards) / shard.Of + totalRequested := ii.totalShards / shard.Of result := make([]*indexShard, totalRequested) var j int - for i := 0; i < totalRequested; i++ { + for i := uint32(0); i < totalRequested; i++ { subShard := ((shard.Shard) + (i * shard.Of)) result[j] = ii.shards[subShard] j++ @@ -71,14 +72,20 @@ func (ii *InvertedIndex) getShards(shard *astmapper.ShardAnnotation) []*indexSha return result } -func (ii *InvertedIndex) validateShard(shard *astmapper.ShardAnnotation) error { +func (ii *InvertedIndex) validateShard(shard *logql.Shard) (*index.ShardAnnotation, error) { if shard == nil { - return nil + return nil, nil + } + + s := shard.PowerOfTwo + if s == nil { + return nil, errors.New("inverted index only supports shard annotations with `PowerOfTwo`") } - if int(ii.totalShards)%shard.Of != 0 || uint32(shard.Of) > ii.totalShards { - return fmt.Errorf("%w index_shard:%d query_shard:%v", ErrInvalidShardQuery, ii.totalShards, shard) + + if ii.totalShards%s.Of != 0 || s.Of > ii.totalShards { + return nil, fmt.Errorf("%w index_shard:%d query_shard:%v", ErrInvalidShardQuery, ii.totalShards, s) } - return nil + return s, nil } // Add a fingerprint under the specified labels. @@ -150,8 +157,9 @@ func labelsString(b *bytes.Buffer, ls labels.Labels) { } // Lookup all fingerprints for the provided matchers. -func (ii *InvertedIndex) Lookup(matchers []*labels.Matcher, shard *astmapper.ShardAnnotation) ([]model.Fingerprint, error) { - if err := ii.validateShard(shard); err != nil { +func (ii *InvertedIndex) Lookup(matchers []*labels.Matcher, s *logql.Shard) ([]model.Fingerprint, error) { + shard, err := ii.validateShard(s) + if err != nil { return nil, err } @@ -175,8 +183,9 @@ func (ii *InvertedIndex) Lookup(matchers []*labels.Matcher, shard *astmapper.Sha } // LabelNames returns all label names. -func (ii *InvertedIndex) LabelNames(shard *astmapper.ShardAnnotation) ([]string, error) { - if err := ii.validateShard(shard); err != nil { +func (ii *InvertedIndex) LabelNames(s *logql.Shard) ([]string, error) { + shard, err := ii.validateShard(s) + if err != nil { return nil, err } shards := ii.getShards(shard) @@ -190,8 +199,9 @@ func (ii *InvertedIndex) LabelNames(shard *astmapper.ShardAnnotation) ([]string, } // LabelValues returns the values for the given label. -func (ii *InvertedIndex) LabelValues(name string, shard *astmapper.ShardAnnotation) ([]string, error) { - if err := ii.validateShard(shard); err != nil { +func (ii *InvertedIndex) LabelValues(name string, s *logql.Shard) ([]string, error) { + shard, err := ii.validateShard(s) + if err != nil { return nil, err } shards := ii.getShards(shard) diff --git a/pkg/ingester/index/index_test.go b/pkg/ingester/index/index_test.go index bc6aaeebf344b..23873cbfc3fdf 100644 --- a/pkg/ingester/index/index_test.go +++ b/pkg/ingester/index/index_test.go @@ -9,27 +9,28 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/util" ) func Test_GetShards(t *testing.T) { for _, tt := range []struct { total uint32 - shard *astmapper.ShardAnnotation + shard *index.ShardAnnotation expected []uint32 }{ // equal factors - {16, &astmapper.ShardAnnotation{Shard: 0, Of: 16}, []uint32{0}}, - {16, &astmapper.ShardAnnotation{Shard: 4, Of: 16}, []uint32{4}}, - {16, &astmapper.ShardAnnotation{Shard: 15, Of: 16}, []uint32{15}}, + {16, &index.ShardAnnotation{Shard: 0, Of: 16}, []uint32{0}}, + {16, &index.ShardAnnotation{Shard: 4, Of: 16}, []uint32{4}}, + {16, &index.ShardAnnotation{Shard: 15, Of: 16}, []uint32{15}}, // idx factor a larger multiple of schema factor - {32, &astmapper.ShardAnnotation{Shard: 0, Of: 16}, []uint32{0, 16}}, - {32, &astmapper.ShardAnnotation{Shard: 4, Of: 16}, []uint32{4, 20}}, - {32, &astmapper.ShardAnnotation{Shard: 15, Of: 16}, []uint32{15, 31}}, - {64, &astmapper.ShardAnnotation{Shard: 15, Of: 16}, []uint32{15, 31, 47, 63}}, + {32, &index.ShardAnnotation{Shard: 0, Of: 16}, []uint32{0, 16}}, + {32, &index.ShardAnnotation{Shard: 4, Of: 16}, []uint32{4, 20}}, + {32, &index.ShardAnnotation{Shard: 15, Of: 16}, []uint32{15, 31}}, + {64, &index.ShardAnnotation{Shard: 15, Of: 16}, []uint32{15, 31, 47, 63}}, } { tt := tt t.Run(tt.shard.String()+fmt.Sprintf("_total_%d", tt.total), func(t *testing.T) { @@ -46,7 +47,10 @@ func Test_GetShards(t *testing.T) { func Test_ValidateShards(t *testing.T) { ii := NewWithShards(32) - require.NoError(t, ii.validateShard(&astmapper.ShardAnnotation{Shard: 1, Of: 16})) + _, err := ii.validateShard( + logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 1, Of: 16}).Ptr(), + ) + require.NoError(t, err) } var ( @@ -108,7 +112,8 @@ func Test_hash_mapping(t *testing.T) { ii := NewWithShards(shard) ii.Add(logproto.FromLabelsToLabelAdapters(lbs), 1) - res, err := ii.Lookup([]*labels.Matcher{{Type: labels.MatchEqual, Name: "compose_project", Value: "loki-tsdb-storage-s3"}}, &astmapper.ShardAnnotation{Shard: int(labelsSeriesIDHash(lbs) % 16), Of: 16}) + x := logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: labelsSeriesIDHash(lbs) % 16, Of: 16}) + res, err := ii.Lookup([]*labels.Matcher{{Type: labels.MatchEqual, Name: "compose_project", Value: "loki-tsdb-storage-s3"}}, &x) require.NoError(t, err) require.Len(t, res, 1) require.Equal(t, model.Fingerprint(1), res[0]) @@ -131,7 +136,8 @@ func Test_NoMatcherLookup(t *testing.T) { // with shard param ii = NewWithShards(16) ii.Add(logproto.FromLabelsToLabelAdapters(lbs), 1) - ids, err = ii.Lookup(nil, &astmapper.ShardAnnotation{Shard: int(labelsSeriesIDHash(lbs) % 16), Of: 16}) + x := logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: labelsSeriesIDHash(lbs) % 16, Of: 16}) + ids, err = ii.Lookup(nil, &x) require.Nil(t, err) require.Equal(t, model.Fingerprint(1), ids[0]) } @@ -151,10 +157,10 @@ func Test_ConsistentMapping(t *testing.T) { shardMax := 8 for i := 0; i < shardMax; i++ { - shard := &astmapper.ShardAnnotation{ - Shard: i, - Of: shardMax, - } + shard := logql.NewPowerOfTwoShard(index.ShardAnnotation{ + Shard: uint32(i), + Of: uint32(shardMax), + }).Ptr() aIDs, err := a.Lookup([]*labels.Matcher{ labels.MustNewMatcher(labels.MatchEqual, "foo", "bar"), diff --git a/pkg/ingester/index/multi.go b/pkg/ingester/index/multi.go index db4a7b642f6fe..2d94d2e9ea3e7 100644 --- a/pkg/ingester/index/multi.go +++ b/pkg/ingester/index/multi.go @@ -7,9 +7,9 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/storage/config" ) type periodIndex struct { @@ -80,15 +80,15 @@ func (m *Multi) Delete(labels labels.Labels, fp model.Fingerprint) { } -func (m *Multi) Lookup(t time.Time, matchers []*labels.Matcher, shard *astmapper.ShardAnnotation) ([]model.Fingerprint, error) { +func (m *Multi) Lookup(t time.Time, matchers []*labels.Matcher, shard *logql.Shard) ([]model.Fingerprint, error) { return m.indexFor(t).Lookup(matchers, shard) } -func (m *Multi) LabelNames(t time.Time, shard *astmapper.ShardAnnotation) ([]string, error) { +func (m *Multi) LabelNames(t time.Time, shard *logql.Shard) ([]string, error) { return m.indexFor(t).LabelNames(shard) } -func (m *Multi) LabelValues(t time.Time, name string, shard *astmapper.ShardAnnotation) ([]string, error) { +func (m *Multi) LabelValues(t time.Time, name string, shard *logql.Shard) ([]string, error) { return m.indexFor(t).LabelValues(name, shard) } @@ -111,14 +111,14 @@ func (noopInvertedIndex) Add(_ []logproto.LabelAdapter, _ model.Fingerprint) lab func (noopInvertedIndex) Delete(_ labels.Labels, _ model.Fingerprint) {} -func (noopInvertedIndex) Lookup(_ []*labels.Matcher, _ *astmapper.ShardAnnotation) ([]model.Fingerprint, error) { +func (noopInvertedIndex) Lookup(_ []*labels.Matcher, _ *logql.Shard) ([]model.Fingerprint, error) { return nil, nil } -func (noopInvertedIndex) LabelNames(_ *astmapper.ShardAnnotation) ([]string, error) { +func (noopInvertedIndex) LabelNames(_ *logql.Shard) ([]string, error) { return nil, nil } -func (noopInvertedIndex) LabelValues(_ string, _ *astmapper.ShardAnnotation) ([]string, error) { +func (noopInvertedIndex) LabelValues(_ string, _ *logql.Shard) ([]string, error) { return nil, nil } diff --git a/pkg/ingester/index/multi_test.go b/pkg/ingester/index/multi_test.go index dc4568dab63b2..d78f132bc975e 100644 --- a/pkg/ingester/index/multi_test.go +++ b/pkg/ingester/index/multi_test.go @@ -9,10 +9,10 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func MustParseDayTime(s string) config.DayTime { @@ -129,7 +129,9 @@ func TestMultiIndex(t *testing.T) { []*labels.Matcher{ labels.MustNewMatcher(labels.MatchEqual, "foo", "foo"), }, - &astmapper.ShardAnnotation{Shard: int(expShard), Of: int(factor)}, + logql.NewPowerOfTwoShard( + index.ShardAnnotation{Shard: expShard, Of: factor}, + ).Ptr(), ) require.Nil(t, err) @@ -144,7 +146,7 @@ func TestMultiIndex(t *testing.T) { []*labels.Matcher{ labels.MustNewMatcher(labels.MatchEqual, "foo", "foo"), }, - &astmapper.ShardAnnotation{Shard: int(expShard), Of: int(factor)}, + logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: expShard, Of: factor}).Ptr(), ) require.Nil(t, err) diff --git a/pkg/ingester/ingester.go b/pkg/ingester/ingester.go index 0cd76c55a2035..1aede454d53e3 100644 --- a/pkg/ingester/ingester.go +++ b/pkg/ingester/ingester.go @@ -12,7 +12,7 @@ import ( "sync" "time" - lokilog "github.com/grafana/loki/pkg/logql/log" + lokilog "github.com/grafana/loki/v3/pkg/logql/log" "github.com/go-kit/log" "github.com/go-kit/log/level" @@ -29,28 +29,28 @@ import ( "github.com/prometheus/prometheus/model/labels" "google.golang.org/grpc/health/grpc_health_v1" - "github.com/grafana/loki/pkg/analytics" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/distributor/writefailures" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/ingester/index" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/runtime" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores" - indexstore "github.com/grafana/loki/pkg/storage/stores/index" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - index_stats "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/wal" + "github.com/grafana/loki/v3/pkg/analytics" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/distributor/writefailures" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/ingester/index" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/runtime" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores" + indexstore "github.com/grafana/loki/v3/pkg/storage/stores/index" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + index_stats "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/wal" ) const ( @@ -1359,3 +1359,15 @@ func adjustQueryStartTime(maxLookBackPeriod time.Duration, start, now time.Time) } return start } + +func (i *Ingester) GetDetectedFields(_ context.Context, _ *logproto.DetectedFieldsRequest) (*logproto.DetectedFieldsResponse, error) { + return &logproto.DetectedFieldsResponse{ + Fields: []*logproto.DetectedField{ + { + Label: "foo", + Type: logproto.DetectedFieldString, + Cardinality: 1, + }, + }, + }, nil +} diff --git a/pkg/ingester/ingester_test.go b/pkg/ingester/ingester_test.go index 1f62821e1cc8b..bf7922fb61dfc 100644 --- a/pkg/ingester/ingester_test.go +++ b/pkg/ingester/ingester_test.go @@ -28,23 +28,24 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/distributor/writefailures" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/ingester/index" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/runtime" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/distributor/writefailures" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/ingester/index" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/runtime" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/validation" ) func TestPrepareShutdownMarkerPathNotSet(t *testing.T) { @@ -478,6 +479,14 @@ func (s *mockStore) Stats(_ context.Context, _ string, _, _ model.Time, _ ...*la }, nil } +func (s *mockStore) GetShards(_ context.Context, _ string, _, _ model.Time, _ uint64, _ chunk.Predicate) (*logproto.ShardsResponse, error) { + return nil, nil +} + +func (s *mockStore) HasForSeries(_, _ model.Time) (sharding.ForSeries, bool) { + return nil, false +} + func (s *mockStore) Volume(_ context.Context, _ string, _, _ model.Time, limit int32, _ []string, _ string, _ ...*labels.Matcher) (*logproto.VolumeResponse, error) { return &logproto.VolumeResponse{ Volumes: []logproto.Volume{ diff --git a/pkg/ingester/instance.go b/pkg/ingester/instance.go index 64678da85a540..e0218c1a4d947 100644 --- a/pkg/ingester/instance.go +++ b/pkg/ingester/instance.go @@ -10,7 +10,7 @@ import ( "syscall" "time" - "github.com/grafana/loki/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/util/httpreq" "github.com/go-kit/log/level" "github.com/grafana/dskit/httpgrpc" @@ -26,29 +26,28 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/analytics" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/distributor/writefailures" - "github.com/grafana/loki/pkg/ingester/index" - "github.com/grafana/loki/pkg/ingester/wal" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/loghttp/push" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/runtime" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/deletion" - util_log "github.com/grafana/loki/pkg/util/log" - mathutil "github.com/grafana/loki/pkg/util/math" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/analytics" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/distributor/writefailures" + "github.com/grafana/loki/v3/pkg/ingester/index" + "github.com/grafana/loki/v3/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/loghttp/push" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/runtime" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/deletion" + util_log "github.com/grafana/loki/v3/pkg/util/log" + mathutil "github.com/grafana/loki/v3/pkg/util/math" + "github.com/grafana/loki/v3/pkg/validation" ) const ( @@ -504,17 +503,10 @@ func (i *instance) QuerySample(ctx context.Context, req logql.SelectSampleParams stats := stats.FromContext(ctx) var iters []iter.SampleIterator - var shard *astmapper.ShardAnnotation - shards, err := logql.ParseShards(req.Shards) + shard, err := parseShardFromRequest(req.Shards) if err != nil { return nil, err } - if len(shards) > 1 { - return nil, errors.New("only one shard per ingester query is supported") - } - if len(shards) == 1 { - shard = &shards[0] - } selector, err := expr.Selector() if err != nil { return nil, err @@ -823,11 +815,11 @@ func (i *instance) forMatchingStreams( // and is used to select the correct inverted index ts time.Time, matchers []*labels.Matcher, - shards *astmapper.ShardAnnotation, + shard *logql.Shard, fn func(*stream) error, ) error { filters, matchers := util.SplitFiltersAndMatchers(matchers) - ids, err := i.index.Lookup(ts, matchers, shards) + ids, err := i.index.Lookup(ts, matchers, shard) if err != nil { return err } @@ -934,9 +926,9 @@ func (i *instance) openTailersCount() uint32 { return uint32(len(i.tailers)) } -func parseShardFromRequest(reqShards []string) (*astmapper.ShardAnnotation, error) { - var shard *astmapper.ShardAnnotation - shards, err := logql.ParseShards(reqShards) +func parseShardFromRequest(reqShards []string) (*logql.Shard, error) { + var shard *logql.Shard + shards, _, err := logql.ParseShards(reqShards) if err != nil { return nil, err } diff --git a/pkg/ingester/instance_test.go b/pkg/ingester/instance_test.go index 254779511465f..ed78943c23c4d 100644 --- a/pkg/ingester/instance_test.go +++ b/pkg/ingester/instance_test.go @@ -10,12 +10,12 @@ import ( "testing" "time" - "github.com/grafana/loki/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/util/httpreq" "github.com/grafana/dskit/tenant" "github.com/grafana/dskit/user" - "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/log" "github.com/grafana/dskit/flagext" "github.com/pkg/errors" @@ -23,18 +23,18 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/distributor/shardstreams" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/querier/plan" - loki_runtime "github.com/grafana/loki/pkg/runtime" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/distributor/shardstreams" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/querier/plan" + loki_runtime "github.com/grafana/loki/v3/pkg/runtime" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/validation" ) func defaultConfig() *Config { diff --git a/pkg/ingester/limiter.go b/pkg/ingester/limiter.go index e48c2a018d277..193209a54f6b9 100644 --- a/pkg/ingester/limiter.go +++ b/pkg/ingester/limiter.go @@ -8,8 +8,8 @@ import ( "golang.org/x/time/rate" - "github.com/grafana/loki/pkg/distributor/shardstreams" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/distributor/shardstreams" + "github.com/grafana/loki/v3/pkg/validation" ) const ( diff --git a/pkg/ingester/limiter_test.go b/pkg/ingester/limiter_test.go index b9646bb27d18f..6186e910663e0 100644 --- a/pkg/ingester/limiter_test.go +++ b/pkg/ingester/limiter_test.go @@ -10,7 +10,7 @@ import ( "github.com/stretchr/testify/require" "golang.org/x/time/rate" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/validation" ) func TestLimiter_AssertMaxStreamsPerUser(t *testing.T) { diff --git a/pkg/ingester/mapper.go b/pkg/ingester/mapper.go index 5677a2a08dbec..ced7c0d6833e6 100644 --- a/pkg/ingester/mapper.go +++ b/pkg/ingester/mapper.go @@ -11,7 +11,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "go.uber.org/atomic" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const maxMappedFP = 1 << 20 // About 1M fingerprints reserved for mapping. diff --git a/pkg/ingester/metrics.go b/pkg/ingester/metrics.go index e3d3a41c1a592..8b005860555f1 100644 --- a/pkg/ingester/metrics.go +++ b/pkg/ingester/metrics.go @@ -4,9 +4,9 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/analytics" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/analytics" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/validation" ) type ingesterMetrics struct { diff --git a/pkg/ingester/recovery.go b/pkg/ingester/recovery.go index f6809f2a2bdf8..a93151e0e6fca 100644 --- a/pkg/ingester/recovery.go +++ b/pkg/ingester/recovery.go @@ -13,8 +13,8 @@ import ( "github.com/prometheus/prometheus/tsdb/wlog" "golang.org/x/net/context" - "github.com/grafana/loki/pkg/ingester/wal" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/logproto" ) type WALReader interface { diff --git a/pkg/ingester/recovery_test.go b/pkg/ingester/recovery_test.go index 525d39ce1d8ff..fd8f05136d6f5 100644 --- a/pkg/ingester/recovery_test.go +++ b/pkg/ingester/recovery_test.go @@ -17,14 +17,14 @@ import ( "github.com/prometheus/prometheus/tsdb/record" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/distributor/writefailures" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/ingester/wal" - "github.com/grafana/loki/pkg/logproto" - loki_runtime "github.com/grafana/loki/pkg/runtime" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/distributor/writefailures" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/logproto" + loki_runtime "github.com/grafana/loki/v3/pkg/runtime" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/validation" ) type MemoryWALReader struct { diff --git a/pkg/ingester/replay_controller.go b/pkg/ingester/replay_controller.go index b5bf6d7b56a0c..3982921300638 100644 --- a/pkg/ingester/replay_controller.go +++ b/pkg/ingester/replay_controller.go @@ -7,7 +7,7 @@ import ( "github.com/go-kit/log/level" "go.uber.org/atomic" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type replayFlusher struct { diff --git a/pkg/ingester/replay_controller_test.go b/pkg/ingester/replay_controller_test.go index 5559022eae97f..0cde538d7c503 100644 --- a/pkg/ingester/replay_controller_test.go +++ b/pkg/ingester/replay_controller_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) type dumbFlusher struct { diff --git a/pkg/ingester/stream.go b/pkg/ingester/stream.go index 81ce436929251..d7a29b73e802d 100644 --- a/pkg/ingester/stream.go +++ b/pkg/ingester/stream.go @@ -15,16 +15,16 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/distributor/writefailures" - "github.com/grafana/loki/pkg/ingester/wal" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/util/flagext" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/distributor/writefailures" + "github.com/grafana/loki/v3/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/util/flagext" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/validation" ) var ErrEntriesExist = errors.New("duplicate push - entries already exist") diff --git a/pkg/ingester/stream_rate_calculator.go b/pkg/ingester/stream_rate_calculator.go index ef6578ed5d1be..e021850a9bd55 100644 --- a/pkg/ingester/stream_rate_calculator.go +++ b/pkg/ingester/stream_rate_calculator.go @@ -4,7 +4,7 @@ import ( "sync" "time" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) const ( diff --git a/pkg/ingester/stream_test.go b/pkg/ingester/stream_test.go index d1b01f22746c2..26eef4e3a7936 100644 --- a/pkg/ingester/stream_test.go +++ b/pkg/ingester/stream_test.go @@ -14,13 +14,13 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/util/flagext" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/util/flagext" + "github.com/grafana/loki/v3/pkg/validation" ) var ( diff --git a/pkg/ingester/streams_map_test.go b/pkg/ingester/streams_map_test.go index 2468ffd7c79d8..d98369ff152a9 100644 --- a/pkg/ingester/streams_map_test.go +++ b/pkg/ingester/streams_map_test.go @@ -7,7 +7,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/validation" ) func TestStreamsMap(t *testing.T) { diff --git a/pkg/ingester/tailer.go b/pkg/ingester/tailer.go index 25fdfdb740d7a..80cceba78fca6 100644 --- a/pkg/ingester/tailer.go +++ b/pkg/ingester/tailer.go @@ -10,11 +10,11 @@ import ( "github.com/prometheus/prometheus/model/labels" "golang.org/x/net/context" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/ingester/tailer_test.go b/pkg/ingester/tailer_test.go index fa44cc0a7dcb8..1f49ec0095086 100644 --- a/pkg/ingester/tailer_test.go +++ b/pkg/ingester/tailer_test.go @@ -12,8 +12,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" ) func TestTailer_RoundTrip(t *testing.T) { diff --git a/pkg/ingester/wal.go b/pkg/ingester/wal.go index 2bb60d65749d1..5a32aee050325 100644 --- a/pkg/ingester/wal.go +++ b/pkg/ingester/wal.go @@ -10,9 +10,9 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/prometheus/tsdb/wlog" - "github.com/grafana/loki/pkg/ingester/wal" - "github.com/grafana/loki/pkg/util/flagext" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/util/flagext" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var ( diff --git a/pkg/ingester/wal/encoding.go b/pkg/ingester/wal/encoding.go index fee0b8b5f314d..a21ce57bf34b1 100644 --- a/pkg/ingester/wal/encoding.go +++ b/pkg/ingester/wal/encoding.go @@ -7,8 +7,8 @@ import ( "github.com/prometheus/prometheus/tsdb/chunks" "github.com/prometheus/prometheus/tsdb/record" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/util/encoding" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/util/encoding" ) // RecordType represents the type of the WAL/Checkpoint record. diff --git a/pkg/ingester/wal/encoding_test.go b/pkg/ingester/wal/encoding_test.go index 9b36c1c79917f..a72e1c160565d 100644 --- a/pkg/ingester/wal/encoding_test.go +++ b/pkg/ingester/wal/encoding_test.go @@ -10,7 +10,7 @@ import ( "github.com/prometheus/prometheus/tsdb/record" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) var ( diff --git a/pkg/iter/cache.go b/pkg/iter/cache.go index db26244e60d2a..a6e12dffbce20 100644 --- a/pkg/iter/cache.go +++ b/pkg/iter/cache.go @@ -1,7 +1,7 @@ package iter import ( - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) type CacheEntryIterator interface { diff --git a/pkg/iter/cache_test.go b/pkg/iter/cache_test.go index 9befe34383538..23ee9cb7d9954 100644 --- a/pkg/iter/cache_test.go +++ b/pkg/iter/cache_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func Test_CachedIterator(t *testing.T) { diff --git a/pkg/iter/categorized_labels_iterator.go b/pkg/iter/categorized_labels_iterator.go index 1e95cad09a16e..c91aa49911163 100644 --- a/pkg/iter/categorized_labels_iterator.go +++ b/pkg/iter/categorized_labels_iterator.go @@ -5,8 +5,8 @@ import ( "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" ) type categorizeLabelsIterator struct { diff --git a/pkg/iter/categorized_labels_iterator_test.go b/pkg/iter/categorized_labels_iterator_test.go index 18259edfbf169..790ca5413aba6 100644 --- a/pkg/iter/categorized_labels_iterator_test.go +++ b/pkg/iter/categorized_labels_iterator_test.go @@ -7,7 +7,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func TestNewCategorizeLabelsIterator(t *testing.T) { diff --git a/pkg/iter/entry_iterator.go b/pkg/iter/entry_iterator.go index fa67da6a3bc0a..7c373ddeac7cd 100644 --- a/pkg/iter/entry_iterator.go +++ b/pkg/iter/entry_iterator.go @@ -7,10 +7,10 @@ import ( "sync" "time" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/loser" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/loser" ) // EntryIterator iterates over entries in time-order. diff --git a/pkg/iter/entry_iterator_test.go b/pkg/iter/entry_iterator_test.go index c900f898f1be4..3c64c01e296e8 100644 --- a/pkg/iter/entry_iterator_test.go +++ b/pkg/iter/entry_iterator_test.go @@ -12,8 +12,8 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" ) const ( diff --git a/pkg/iter/iterator.go b/pkg/iter/iterator.go index 8d0bde08ec652..61c727428c71a 100644 --- a/pkg/iter/iterator.go +++ b/pkg/iter/iterator.go @@ -1,6 +1,6 @@ package iter -import "github.com/grafana/loki/pkg/logproto" +import "github.com/grafana/loki/v3/pkg/logproto" // Iterator iterates over data in time-order. type Iterator interface { diff --git a/pkg/iter/sample_iterator.go b/pkg/iter/sample_iterator.go index 632ed9106df15..261b75a0b33c9 100644 --- a/pkg/iter/sample_iterator.go +++ b/pkg/iter/sample_iterator.go @@ -6,9 +6,9 @@ import ( "io" "sync" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/util" ) // SampleIterator iterates over samples in time-order. diff --git a/pkg/iter/sample_iterator_test.go b/pkg/iter/sample_iterator_test.go index ec739e4d5a290..da3113c547e66 100644 --- a/pkg/iter/sample_iterator_test.go +++ b/pkg/iter/sample_iterator_test.go @@ -14,8 +14,8 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/util" ) func TestNewPeekingSampleIterator(t *testing.T) { diff --git a/pkg/logcli/client/client.go b/pkg/logcli/client/client.go index 964abc13d30bd..73ddccd7efd17 100644 --- a/pkg/logcli/client/client.go +++ b/pkg/logcli/client/client.go @@ -19,12 +19,12 @@ import ( "github.com/grafana/dskit/backoff" - "github.com/grafana/loki/pkg/logcli/volume" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/build" + "github.com/grafana/loki/v3/pkg/logcli/volume" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/build" ) const ( diff --git a/pkg/logcli/client/file.go b/pkg/logcli/client/file.go index 82274ef79fb8d..dd0432a79e172 100644 --- a/pkg/logcli/client/file.go +++ b/pkg/logcli/client/file.go @@ -11,15 +11,15 @@ import ( "github.com/gorilla/websocket" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logcli/volume" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - logqllog "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/marshal" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logcli/volume" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + logqllog "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/marshal" + "github.com/grafana/loki/v3/pkg/util/validation" "github.com/grafana/dskit/user" "github.com/prometheus/prometheus/model/labels" diff --git a/pkg/logcli/client/file_test.go b/pkg/logcli/client/file_test.go index 1e5a2ab77c630..1a1eac9fa7a7d 100644 --- a/pkg/logcli/client/file_test.go +++ b/pkg/logcli/client/file_test.go @@ -9,8 +9,8 @@ import ( "testing" "time" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/pkg/logcli/index/stats.go b/pkg/logcli/index/stats.go index f67c6283ba556..723513a26cf25 100644 --- a/pkg/logcli/index/stats.go +++ b/pkg/logcli/index/stats.go @@ -7,8 +7,8 @@ import ( "github.com/fatih/color" - "github.com/grafana/loki/pkg/logcli/client" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logcli/client" + "github.com/grafana/loki/v3/pkg/logproto" ) type StatsQuery struct { diff --git a/pkg/logcli/index/volume.go b/pkg/logcli/index/volume.go index 85a378d20d255..b6a3205706912 100644 --- a/pkg/logcli/index/volume.go +++ b/pkg/logcli/index/volume.go @@ -3,11 +3,11 @@ package index import ( "log" - "github.com/grafana/loki/pkg/logcli/client" - "github.com/grafana/loki/pkg/logcli/output" - "github.com/grafana/loki/pkg/logcli/print" - "github.com/grafana/loki/pkg/logcli/volume" - "github.com/grafana/loki/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logcli/client" + "github.com/grafana/loki/v3/pkg/logcli/output" + "github.com/grafana/loki/v3/pkg/logcli/print" + "github.com/grafana/loki/v3/pkg/logcli/volume" + "github.com/grafana/loki/v3/pkg/loghttp" ) // GetVolume executes a volume query and prints the results diff --git a/pkg/logcli/labelquery/labels.go b/pkg/logcli/labelquery/labels.go index 38e0a500df861..2759a2bb8cf22 100644 --- a/pkg/logcli/labelquery/labels.go +++ b/pkg/logcli/labelquery/labels.go @@ -5,8 +5,8 @@ import ( "log" "time" - "github.com/grafana/loki/pkg/logcli/client" - "github.com/grafana/loki/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logcli/client" + "github.com/grafana/loki/v3/pkg/loghttp" ) // LabelQuery contains all necessary fields to execute label queries and print out the results diff --git a/pkg/logcli/output/default.go b/pkg/logcli/output/default.go index d6edf3c30d34a..cfa9f2ad34a1e 100644 --- a/pkg/logcli/output/default.go +++ b/pkg/logcli/output/default.go @@ -8,7 +8,7 @@ import ( "github.com/fatih/color" - "github.com/grafana/loki/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/loghttp" ) // DefaultOutput provides logs and metadata in human readable format diff --git a/pkg/logcli/output/default_test.go b/pkg/logcli/output/default_test.go index 61e87633362c6..121b6d4816007 100644 --- a/pkg/logcli/output/default_test.go +++ b/pkg/logcli/output/default_test.go @@ -8,7 +8,7 @@ import ( "github.com/stretchr/testify/assert" - "github.com/grafana/loki/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/loghttp" ) func TestDefaultOutput_Format(t *testing.T) { diff --git a/pkg/logcli/output/jsonl.go b/pkg/logcli/output/jsonl.go index 6c9372cf2194f..793c47eeafb39 100644 --- a/pkg/logcli/output/jsonl.go +++ b/pkg/logcli/output/jsonl.go @@ -7,7 +7,7 @@ import ( "log" "time" - "github.com/grafana/loki/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/loghttp" ) // JSONLOutput prints logs and metadata as JSON Lines, suitable for scripts diff --git a/pkg/logcli/output/jsonl_test.go b/pkg/logcli/output/jsonl_test.go index bd984e912b87d..22e81fd29ea9a 100644 --- a/pkg/logcli/output/jsonl_test.go +++ b/pkg/logcli/output/jsonl_test.go @@ -8,7 +8,7 @@ import ( "github.com/stretchr/testify/assert" - "github.com/grafana/loki/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/loghttp" ) func TestJSONLOutput_Format(t *testing.T) { diff --git a/pkg/logcli/output/output.go b/pkg/logcli/output/output.go index 92d3a214c230e..be82e6e97e69a 100644 --- a/pkg/logcli/output/output.go +++ b/pkg/logcli/output/output.go @@ -8,7 +8,7 @@ import ( "github.com/fatih/color" - "github.com/grafana/loki/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/loghttp" ) // Blue color is excluded since we are already printing timestamp diff --git a/pkg/logcli/output/raw.go b/pkg/logcli/output/raw.go index 22aba03e35914..4431858efb7ea 100644 --- a/pkg/logcli/output/raw.go +++ b/pkg/logcli/output/raw.go @@ -5,7 +5,7 @@ import ( "io" "time" - "github.com/grafana/loki/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/loghttp" ) // RawOutput prints logs in their original form, without any metadata diff --git a/pkg/logcli/output/raw_test.go b/pkg/logcli/output/raw_test.go index 3c45708374639..844e8e811afc5 100644 --- a/pkg/logcli/output/raw_test.go +++ b/pkg/logcli/output/raw_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/assert" - "github.com/grafana/loki/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/loghttp" ) func TestRawOutput_Format(t *testing.T) { diff --git a/pkg/logcli/print/print.go b/pkg/logcli/print/print.go index 6528b2c7ec4b3..0f7d5d131151e 100644 --- a/pkg/logcli/print/print.go +++ b/pkg/logcli/print/print.go @@ -11,11 +11,11 @@ import ( "github.com/fatih/color" - "github.com/grafana/loki/pkg/logcli/output" - "github.com/grafana/loki/pkg/logcli/util" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/logcli/output" + "github.com/grafana/loki/v3/pkg/logcli/util" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" ) type QueryResultPrinter struct { diff --git a/pkg/logcli/print/print_test.go b/pkg/logcli/print/print_test.go index 91ada1c6e5687..737e494545c6f 100644 --- a/pkg/logcli/print/print_test.go +++ b/pkg/logcli/print/print_test.go @@ -6,8 +6,8 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/util/marshal" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/util/marshal" ) func Test_commonLabels(t *testing.T) { diff --git a/pkg/logcli/query/query.go b/pkg/logcli/query/query.go index 7e8c86e08ad15..84934e57730cc 100644 --- a/pkg/logcli/query/query.go +++ b/pkg/logcli/query/query.go @@ -16,22 +16,22 @@ import ( "github.com/prometheus/client_golang/prometheus" "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/logcli/client" - "github.com/grafana/loki/pkg/logcli/output" - "github.com/grafana/loki/pkg/logcli/print" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/loki" - "github.com/grafana/loki/pkg/storage" - chunk "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/util/cfg" - "github.com/grafana/loki/pkg/util/constants" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/marshal" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/logcli/client" + "github.com/grafana/loki/v3/pkg/logcli/output" + "github.com/grafana/loki/v3/pkg/logcli/print" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/loki" + "github.com/grafana/loki/v3/pkg/storage" + chunk "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/util/cfg" + "github.com/grafana/loki/v3/pkg/util/constants" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/marshal" + "github.com/grafana/loki/v3/pkg/validation" ) const schemaConfigFilename = "schemaconfig" diff --git a/pkg/logcli/query/query_test.go b/pkg/logcli/query/query_test.go index 605155bca3db2..32a6538558cae 100644 --- a/pkg/logcli/query/query_test.go +++ b/pkg/logcli/query/query_test.go @@ -17,17 +17,17 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logcli/output" - "github.com/grafana/loki/pkg/logcli/volume" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/loki" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util/marshal" + "github.com/grafana/loki/v3/pkg/logcli/output" + "github.com/grafana/loki/v3/pkg/logcli/volume" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/loki" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util/marshal" ) func Test_batch(t *testing.T) { diff --git a/pkg/logcli/query/tail.go b/pkg/logcli/query/tail.go index c1c092a9e1166..b65e546b904e3 100644 --- a/pkg/logcli/query/tail.go +++ b/pkg/logcli/query/tail.go @@ -13,11 +13,11 @@ import ( "github.com/gorilla/websocket" "github.com/grafana/dskit/backoff" - "github.com/grafana/loki/pkg/logcli/client" - "github.com/grafana/loki/pkg/logcli/output" - "github.com/grafana/loki/pkg/logcli/util" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/util/unmarshal" + "github.com/grafana/loki/v3/pkg/logcli/client" + "github.com/grafana/loki/v3/pkg/logcli/output" + "github.com/grafana/loki/v3/pkg/logcli/util" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/util/unmarshal" ) // TailQuery connects to the Loki websocket endpoint and tails logs diff --git a/pkg/logcli/seriesquery/series.go b/pkg/logcli/seriesquery/series.go index 94bd3cc842a05..c5d639ca6af1c 100644 --- a/pkg/logcli/seriesquery/series.go +++ b/pkg/logcli/seriesquery/series.go @@ -8,8 +8,8 @@ import ( "text/tabwriter" "time" - "github.com/grafana/loki/pkg/logcli/client" - "github.com/grafana/loki/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logcli/client" + "github.com/grafana/loki/v3/pkg/loghttp" ) // SeriesQuery contains all necessary fields to execute label queries and print out the results diff --git a/pkg/logcli/util/util.go b/pkg/logcli/util/util.go index cb3a70f924a98..cf276d5ab365a 100644 --- a/pkg/logcli/util/util.go +++ b/pkg/logcli/util/util.go @@ -1,6 +1,6 @@ package util -import "github.com/grafana/loki/pkg/loghttp" +import "github.com/grafana/loki/v3/pkg/loghttp" func MatchLabels(on bool, l loghttp.LabelSet, names []string) loghttp.LabelSet { ret := loghttp.LabelSet{} diff --git a/pkg/loghttp/labels.go b/pkg/loghttp/labels.go index 98bad4e957869..b2c5a343637be 100644 --- a/pkg/loghttp/labels.go +++ b/pkg/loghttp/labels.go @@ -9,7 +9,7 @@ import ( "github.com/gorilla/mux" "github.com/grafana/jsonparser" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) // LabelResponse represents the http json response to a label query @@ -86,3 +86,16 @@ func ParseLabelQuery(r *http.Request) (*logproto.LabelRequest, error) { req.Query = query(r) return req, nil } + +func ParseDetectedLabelsQuery(r *http.Request) (*logproto.DetectedLabelsRequest, error) { + start, end, err := bounds(r) + if err != nil { + return nil, err + } + + return &logproto.DetectedLabelsRequest{ + Start: &start, + End: &end, + Query: query(r), + }, nil +} diff --git a/pkg/loghttp/labels_test.go b/pkg/loghttp/labels_test.go index c2f0f0315b30b..c5b5c837f057b 100644 --- a/pkg/loghttp/labels_test.go +++ b/pkg/loghttp/labels_test.go @@ -9,7 +9,7 @@ import ( "github.com/gorilla/mux" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func TestParseLabelQuery(t *testing.T) { diff --git a/pkg/loghttp/legacy/tail.go b/pkg/loghttp/legacy/tail.go index a51629cb7c2ac..06e4b8b1b181a 100644 --- a/pkg/loghttp/legacy/tail.go +++ b/pkg/loghttp/legacy/tail.go @@ -3,7 +3,7 @@ package loghttp import ( "time" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) // DroppedEntry represents a dropped entry in a tail call diff --git a/pkg/loghttp/params.go b/pkg/loghttp/params.go index df97a5c2e37fe..4f34992df592b 100644 --- a/pkg/loghttp/params.go +++ b/pkg/loghttp/params.go @@ -8,16 +8,18 @@ import ( "strings" "time" + "github.com/c2h5oh/datasize" "github.com/pkg/errors" "github.com/prometheus/common/model" - "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/syntax" ) const ( defaultQueryLimit = 100 + defaultFieldLimit = 1000 defaultSince = 1 * time.Hour defaultDirection = logproto.BACKWARD ) @@ -33,6 +35,28 @@ func limit(r *http.Request) (uint32, error) { return uint32(l), nil } +func lineLimit(r *http.Request) (uint32, error) { + l, err := parseInt(r.Form.Get("line_limit"), defaultQueryLimit) + if err != nil { + return 0, err + } + if l <= 0 { + return 0, errors.New("limit must be a positive value") + } + return uint32(l), nil +} + +func fieldLimit(r *http.Request) (uint32, error) { + l, err := parseInt(r.Form.Get("field_limit"), defaultFieldLimit) + if err != nil { + return 0, err + } + if l <= 0 { + return 0, errors.New("limit must be a positive value") + } + return uint32(l), nil +} + func query(r *http.Request) string { return r.Form.Get("query") } @@ -192,7 +216,7 @@ func parseRegexQuery(httpRequest *http.Request) (string, error) { if err != nil { return "", err } - newExpr, err := syntax.AddFilterExpr(expr, labels.MatchRegexp, "", regexp) + newExpr, err := syntax.AddFilterExpr(expr, log.LineMatchRegexp, "", regexp) if err != nil { return "", err } @@ -200,3 +224,21 @@ func parseRegexQuery(httpRequest *http.Request) (string, error) { } return query, nil } + +func parseBytes(r *http.Request, field string, optional bool) (val datasize.ByteSize, err error) { + s := r.Form.Get(field) + + if s == "" { + if !optional { + return 0, fmt.Errorf("missing %s", field) + } + return val, nil + } + + if err := val.UnmarshalText([]byte(s)); err != nil { + return 0, errors.Wrapf(err, "invalid %s: %s", field, s) + } + + return val, nil + +} diff --git a/pkg/loghttp/params_test.go b/pkg/loghttp/params_test.go index 873fdff36bb98..3456fdc2ed802 100644 --- a/pkg/loghttp/params_test.go +++ b/pkg/loghttp/params_test.go @@ -10,7 +10,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func TestHttp_defaultQueryRangeStep(t *testing.T) { diff --git a/pkg/loghttp/push/otlp.go b/pkg/loghttp/push/otlp.go index 8136d6995dc62..a361bbbf196de 100644 --- a/pkg/loghttp/push/otlp.go +++ b/pkg/loghttp/push/otlp.go @@ -18,15 +18,18 @@ import ( "go.opentelemetry.io/collector/pdata/plog" "go.opentelemetry.io/collector/pdata/plog/plogotlp" - "github.com/grafana/loki/pkg/logproto" "github.com/grafana/loki/pkg/push" - loki_util "github.com/grafana/loki/pkg/util" + + "github.com/grafana/loki/v3/pkg/logproto" + loki_util "github.com/grafana/loki/v3/pkg/util" ) const ( pbContentType = "application/x-protobuf" gzipContentEncoding = "gzip" attrServiceName = "service.name" + + OTLPSeverityNumber = "severity_number" ) func newPushStats() *Stats { @@ -151,6 +154,10 @@ func otlpToLokiPushRequest(ctx context.Context, ld plog.Logs, userID string, ten retentionPeriodForUser := tenantsRetention.RetentionPeriodFor(userID, lbs) stats.StructuredMetadataBytes[retentionPeriodForUser] += int64(resourceAttributesAsStructuredMetadataSize) + if tracker != nil { + tracker.ReceivedBytesAdd(ctx, userID, retentionPeriodForUser, lbs, float64(resourceAttributesAsStructuredMetadataSize)) + } + stats.ResourceAndSourceMetadataLabels[retentionPeriodForUser] = append(stats.ResourceAndSourceMetadataLabels[retentionPeriodForUser], resourceAttributesAsStructuredMetadata...) for j := 0; j < sls.Len(); j++ { @@ -202,6 +209,10 @@ func otlpToLokiPushRequest(ctx context.Context, ld plog.Logs, userID string, ten scopeAttributesAsStructuredMetadataSize := labelsSize(scopeAttributesAsStructuredMetadata) stats.StructuredMetadataBytes[retentionPeriodForUser] += int64(scopeAttributesAsStructuredMetadataSize) + if tracker != nil { + tracker.ReceivedBytesAdd(ctx, userID, retentionPeriodForUser, lbs, float64(scopeAttributesAsStructuredMetadataSize)) + } + stats.ResourceAndSourceMetadataLabels[retentionPeriodForUser] = append(stats.ResourceAndSourceMetadataLabels[retentionPeriodForUser], scopeAttributesAsStructuredMetadata...) for k := 0; k < logs.Len(); k++ { log := logs.At(k) @@ -279,7 +290,7 @@ func otlpLogToPushEntry(log plog.LogRecord, otlpConfig OTLPConfig) push.Entry { if severityNum := log.SeverityNumber(); severityNum != plog.SeverityNumberUnspecified { structuredMetadata = append(structuredMetadata, push.LabelAdapter{ - Name: "severity_number", + Name: OTLPSeverityNumber, Value: fmt.Sprintf("%d", severityNum), }) } diff --git a/pkg/loghttp/push/otlp_test.go b/pkg/loghttp/push/otlp_test.go index 5202505fd1bf1..bcdeb18d17069 100644 --- a/pkg/loghttp/push/otlp_test.go +++ b/pkg/loghttp/push/otlp_test.go @@ -13,8 +13,9 @@ import ( "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/plog" - "github.com/grafana/loki/pkg/logproto" "github.com/grafana/loki/pkg/push" + + "github.com/grafana/loki/v3/pkg/logproto" ) func TestOTLPToLokiPushRequest(t *testing.T) { @@ -26,7 +27,6 @@ func TestOTLPToLokiPushRequest(t *testing.T) { expectedPushRequest logproto.PushRequest expectedStats Stats otlpConfig OTLPConfig - tracker UsageTracker }{ { name: "no logs", @@ -129,7 +129,6 @@ func TestOTLPToLokiPushRequest(t *testing.T) { { name: "service.name not defined in resource attributes", otlpConfig: DefaultOTLPConfig(defaultGlobalOTLPConfig), - tracker: NewMockTracker(), generateLogs: func() plog.Logs { ld := plog.NewLogs() ld.ResourceLogs().AppendEmpty().Resource().Attributes().PutStr("service.namespace", "foo") @@ -164,32 +163,7 @@ func TestOTLPToLokiPushRequest(t *testing.T) { }, StreamLabelsSize: 47, MostRecentEntryTimestamp: now, - /* - logLinesBytesCustomTrackers: []customTrackerPair{ - { - Labels: []labels.Label{ - {Name: "service_namespace", Value: "foo"}, - {Name: "tracker", Value: "foo"}, - }, - Bytes: map[time.Duration]int64{ - time.Hour: 9, - }, - }, - }, - structuredMetadataBytesCustomTrackers: []customTrackerPair{ - { - Labels: []labels.Label{ - {Name: "service_namespace", Value: "foo"}, - {Name: "tracker", Value: "foo"}, - }, - Bytes: map[time.Duration]int64{ - time.Hour: 0, - }, - }, - }, - */ }, - //expectedTrackedUsaged: }, { name: "resource attributes and scope attributes stored as structured metadata", @@ -518,9 +492,19 @@ func TestOTLPToLokiPushRequest(t *testing.T) { } { t.Run(tc.name, func(t *testing.T) { stats := newPushStats() - pushReq := otlpToLokiPushRequest(context.Background(), tc.generateLogs(), "foo", fakeRetention{}, tc.otlpConfig, tc.tracker, stats) + tracker := NewMockTracker() + pushReq := otlpToLokiPushRequest(context.Background(), tc.generateLogs(), "foo", fakeRetention{}, tc.otlpConfig, tracker, stats) require.Equal(t, tc.expectedPushRequest, *pushReq) require.Equal(t, tc.expectedStats, *stats) + + totalBytes := 0.0 + for _, b := range stats.LogLinesBytes { + totalBytes += float64(b) + } + for _, b := range stats.StructuredMetadataBytes { + totalBytes += float64(b) + } + require.Equal(t, totalBytes, tracker.Total(), "Total tracked bytes must equal total bytes of the stats.") }) } } diff --git a/pkg/loghttp/push/push.go b/pkg/loghttp/push/push.go index f4b8771e5fec8..c63b32c6111bb 100644 --- a/pkg/loghttp/push/push.go +++ b/pkg/loghttp/push/push.go @@ -20,15 +20,16 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/analytics" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/util" - loki_util "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/unmarshal" - unmarshal2 "github.com/grafana/loki/pkg/util/unmarshal/legacy" + loki_util "github.com/grafana/loki/v3/pkg/util" + + "github.com/grafana/loki/v3/pkg/analytics" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/unmarshal" + unmarshal2 "github.com/grafana/loki/v3/pkg/util/unmarshal/legacy" ) var ( diff --git a/pkg/loghttp/push/push_test.go b/pkg/loghttp/push/push_test.go index 9f470fc0eb9e4..ac83492d62eba 100644 --- a/pkg/loghttp/push/push_test.go +++ b/pkg/loghttp/push/push_test.go @@ -17,7 +17,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // GZip source string and return compressed string @@ -226,6 +226,7 @@ func TestParseRequest(t *testing.T) { assert.NotNil(t, data, "Should give data for %d", index) require.Equal(t, test.expectedStructuredMetadataBytes, structuredMetadataBytesReceived) require.Equal(t, test.expectedBytes, bytesReceived) + require.Equalf(t, tracker.Total(), float64(bytesReceived), "tracked usage bytes must equal bytes received metric") require.Equal(t, test.expectedLines, linesReceived) require.Equal(t, float64(test.expectedStructuredMetadataBytes), testutil.ToFloat64(structuredMetadataBytesIngested.WithLabelValues("fake", ""))) require.Equal(t, float64(test.expectedBytes), testutil.ToFloat64(bytesIngested.WithLabelValues("fake", ""))) @@ -257,6 +258,14 @@ func NewMockTracker() *MockCustomTracker { } } +func (t *MockCustomTracker) Total() float64 { + total := float64(0) + for _, v := range t.receivedBytes { + total += v + } + return total +} + // DiscardedBytesAdd implements CustomTracker. func (t *MockCustomTracker) DiscardedBytesAdd(_ context.Context, _, _ string, labels labels.Labels, value float64) { t.discardedBytes[labels.String()] += value diff --git a/pkg/loghttp/query.go b/pkg/loghttp/query.go index 854ccd5ae7116..5d16aefc1e43e 100644 --- a/pkg/loghttp/query.go +++ b/pkg/loghttp/query.go @@ -8,18 +8,19 @@ import ( "time" "unsafe" + "github.com/c2h5oh/datasize" "github.com/grafana/jsonparser" json "github.com/json-iterator/go" "github.com/prometheus/common/model" "github.com/grafana/dskit/httpgrpc" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/util" ) var ( @@ -503,6 +504,17 @@ func ParseIndexStatsQuery(r *http.Request) (*RangeQuery, error) { return ParseRangeQuery(r) } +func ParseIndexShardsQuery(r *http.Request) (*RangeQuery, datasize.ByteSize, error) { + // TODO(owen-d): use a specific type/validation instead + // of using range query parameters (superset) + parsed, err := ParseRangeQuery(r) + if err != nil { + return nil, 0, err + } + targetBytes, err := parseBytes(r, "targetBytesPerShard", true) + return parsed, targetBytes, err +} + func NewVolumeRangeQueryWithDefaults(matchers string) *logproto.VolumeRequest { start, end, _ := determineBounds(time.Now(), "", "", "") step := (time.Duration(defaultQueryRangeStep(start, end)) * time.Second).Milliseconds() @@ -605,6 +617,48 @@ func ParseVolumeRangeQuery(r *http.Request) (*VolumeRangeQuery, error) { }, nil } +func ParseDetectedFieldsQuery(r *http.Request) (*logproto.DetectedFieldsRequest, error) { + var err error + result := &logproto.DetectedFieldsRequest{} + + result.Query = query(r) + result.Start, result.End, err = bounds(r) + if err != nil { + return nil, err + } + + if result.End.Before(result.Start) { + return nil, errEndBeforeStart + } + + result.LineLimit, err = lineLimit(r) + if err != nil { + return nil, err + } + + result.FieldLimit, err = fieldLimit(r) + if err != nil { + return nil, err + } + + step, err := step(r, result.Start, result.End) + result.Step = step.Milliseconds() + if err != nil { + return nil, err + } + + if result.Step <= 0 { + return nil, errZeroOrNegativeStep + } + + // For safety, limit the number of returned points per timeseries. + // This is sufficient for 60s resolution for a week or 1h resolution for a year. + if (result.End.Sub(result.Start) / step) > 11000 { + return nil, errStepTooSmall + } + return result, nil +} + func targetLabels(r *http.Request) []string { lbls := strings.Split(r.Form.Get("targetLabels"), ",") if (len(lbls) == 1 && lbls[0] == "") || len(lbls) == 0 { diff --git a/pkg/loghttp/query_test.go b/pkg/loghttp/query_test.go index e94199352f12e..889a8900eac76 100644 --- a/pkg/loghttp/query_test.go +++ b/pkg/loghttp/query_test.go @@ -11,8 +11,8 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" ) func TestParseRangeQuery(t *testing.T) { diff --git a/pkg/loghttp/series.go b/pkg/loghttp/series.go index 2cb22ec6cfeca..1faef6d6e540a 100644 --- a/pkg/loghttp/series.go +++ b/pkg/loghttp/series.go @@ -5,8 +5,8 @@ import ( "sort" "strings" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" ) type SeriesResponse struct { diff --git a/pkg/loghttp/series_test.go b/pkg/loghttp/series_test.go index 403b0c19af981..928e5350ccb52 100644 --- a/pkg/loghttp/series_test.go +++ b/pkg/loghttp/series_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func TestParseAndValidateSeriesQuery(t *testing.T) { diff --git a/pkg/loghttp/tail.go b/pkg/loghttp/tail.go index 9ad2219b10979..658ae112cce07 100644 --- a/pkg/loghttp/tail.go +++ b/pkg/loghttp/tail.go @@ -10,9 +10,9 @@ import ( "github.com/grafana/dskit/httpgrpc" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/plan" ) const ( diff --git a/pkg/loghttp/tail_test.go b/pkg/loghttp/tail_test.go index 6fe7163116675..06fd23f4f5f9d 100644 --- a/pkg/loghttp/tail_test.go +++ b/pkg/loghttp/tail_test.go @@ -8,9 +8,9 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/plan" ) func TestParseTailQuery(t *testing.T) { diff --git a/pkg/logproto/bloomgateway.pb.go b/pkg/logproto/bloomgateway.pb.go index 98a22fd13168f..1ca062e722b33 100644 --- a/pkg/logproto/bloomgateway.pb.go +++ b/pkg/logproto/bloomgateway.pb.go @@ -8,8 +8,8 @@ import ( fmt "fmt" _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" - github_com_grafana_loki_pkg_logql_syntax "github.com/grafana/loki/pkg/logql/syntax" - github_com_grafana_loki_pkg_querier_plan "github.com/grafana/loki/pkg/querier/plan" + github_com_grafana_loki_v3_pkg_logql_syntax "github.com/grafana/loki/v3/pkg/logql/syntax" + github_com_grafana_loki_v3_pkg_querier_plan "github.com/grafana/loki/v3/pkg/querier/plan" github_com_prometheus_common_model "github.com/prometheus/common/model" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" @@ -37,8 +37,8 @@ type FilterChunkRefRequest struct { Through github_com_prometheus_common_model.Time `protobuf:"varint,2,opt,name=through,proto3,customtype=github.com/prometheus/common/model.Time" json:"through"` Refs []*GroupedChunkRefs `protobuf:"bytes,3,rep,name=refs,proto3" json:"refs,omitempty"` // TODO(salvacorts): Delete this field once the weekly release is done. - Filters []github_com_grafana_loki_pkg_logql_syntax.LineFilter `protobuf:"bytes,4,rep,name=filters,proto3,customtype=github.com/grafana/loki/pkg/logql/syntax.LineFilter" json:"filters"` - Plan github_com_grafana_loki_pkg_querier_plan.QueryPlan `protobuf:"bytes,5,opt,name=plan,proto3,customtype=github.com/grafana/loki/pkg/querier/plan.QueryPlan" json:"plan"` + Filters []github_com_grafana_loki_v3_pkg_logql_syntax.LineFilter `protobuf:"bytes,4,rep,name=filters,proto3,customtype=github.com/grafana/loki/v3/pkg/logql/syntax.LineFilter" json:"filters"` + Plan github_com_grafana_loki_v3_pkg_querier_plan.QueryPlan `protobuf:"bytes,5,opt,name=plan,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/plan.QueryPlan" json:"plan"` } func (m *FilterChunkRefRequest) Reset() { *m = FilterChunkRefRequest{} } @@ -237,40 +237,41 @@ func init() { func init() { proto.RegisterFile("pkg/logproto/bloomgateway.proto", fileDescriptor_a50b5dd1dbcd1415) } var fileDescriptor_a50b5dd1dbcd1415 = []byte{ - // 525 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x53, 0xbb, 0x6e, 0x13, 0x41, - 0x14, 0xdd, 0xc1, 0x26, 0x8f, 0x31, 0x2f, 0x8d, 0x42, 0xb4, 0x32, 0xd2, 0x78, 0x65, 0x21, 0x70, - 0xb5, 0x2b, 0x39, 0x0d, 0x82, 0xce, 0x91, 0x88, 0x90, 0x28, 0x60, 0x40, 0x14, 0x29, 0x90, 0xd6, - 0xce, 0xdd, 0x87, 0xbc, 0x3b, 0xb3, 0x9e, 0x99, 0x15, 0xb8, 0xe3, 0x13, 0xf8, 0x08, 0x0a, 0xbe, - 0x80, 0x6f, 0x48, 0xe9, 0x32, 0xa2, 0x88, 0xf0, 0xba, 0xa1, 0xcc, 0x27, 0x20, 0xcf, 0x7a, 0xb3, - 0x76, 0x04, 0x44, 0xa2, 0xa2, 0x9a, 0xc7, 0xbd, 0xe7, 0x9e, 0x7b, 0xee, 0x03, 0x77, 0xb2, 0x71, - 0xe8, 0x25, 0x22, 0xcc, 0xa4, 0xd0, 0xc2, 0x1b, 0x26, 0x42, 0xa4, 0xa1, 0xaf, 0xe1, 0x83, 0x3f, - 0x75, 0xcd, 0x17, 0xd9, 0xa9, 0x8c, 0xed, 0xbd, 0x50, 0x84, 0xa2, 0xf4, 0x5b, 0xde, 0x4a, 0x7b, - 0xfb, 0xc1, 0x46, 0x80, 0xea, 0x52, 0x1a, 0xbb, 0x5f, 0x1a, 0xf8, 0xfe, 0xf3, 0x38, 0xd1, 0x20, - 0x0f, 0xa3, 0x9c, 0x8f, 0x19, 0x04, 0x0c, 0x26, 0x39, 0x28, 0x4d, 0x0e, 0x71, 0x33, 0x90, 0x22, - 0xb5, 0x91, 0x83, 0x7a, 0x8d, 0x81, 0x77, 0x7a, 0xde, 0xb1, 0xbe, 0x9f, 0x77, 0x1e, 0x87, 0xb1, - 0x8e, 0xf2, 0xa1, 0x3b, 0x12, 0xa9, 0x97, 0x49, 0x91, 0x82, 0x8e, 0x20, 0x57, 0xde, 0x48, 0xa4, - 0xa9, 0xe0, 0x5e, 0x2a, 0x4e, 0x20, 0x71, 0xdf, 0xc6, 0x29, 0x30, 0x03, 0x26, 0x2f, 0xf0, 0xb6, - 0x8e, 0xa4, 0xc8, 0xc3, 0xc8, 0xbe, 0xf1, 0x6f, 0x71, 0x2a, 0x3c, 0x71, 0x71, 0x53, 0x42, 0xa0, - 0xec, 0x86, 0xd3, 0xe8, 0xb5, 0xfa, 0x6d, 0xf7, 0x52, 0xc8, 0x91, 0x14, 0x79, 0x06, 0x27, 0x55, - 0xfe, 0x8a, 0x19, 0x3f, 0x32, 0xc6, 0xdb, 0x81, 0x11, 0xa6, 0xec, 0xa6, 0x81, 0xec, 0xd5, 0x90, - 0x97, 0x31, 0x87, 0x52, 0xf5, 0xe0, 0xd9, 0x2a, 0xa1, 0x83, 0xb5, 0x84, 0x42, 0xe9, 0x07, 0x3e, - 0xf7, 0xbd, 0x44, 0x8c, 0x63, 0x6f, 0x55, 0xbd, 0x49, 0xe2, 0xa9, 0x29, 0xd7, 0xfe, 0xc7, 0x35, - 0x30, 0xab, 0x18, 0xc8, 0x7b, 0xdc, 0xcc, 0x12, 0x9f, 0xdb, 0x37, 0x1d, 0xd4, 0x6b, 0xf5, 0xef, - 0xd4, 0x4c, 0xaf, 0x12, 0x9f, 0x0f, 0x9e, 0xae, 0x38, 0xfa, 0x7f, 0xe3, 0x98, 0xe4, 0x20, 0x63, - 0x90, 0xde, 0x32, 0x8e, 0xfb, 0x3a, 0x07, 0x39, 0x5d, 0x62, 0x99, 0x89, 0xdb, 0x65, 0x78, 0xff, - 0x6a, 0x97, 0x54, 0x26, 0xb8, 0x02, 0xf2, 0x04, 0xef, 0x8e, 0x2a, 0xe5, 0x36, 0xba, 0xb6, 0x36, - 0xb5, 0x73, 0xf7, 0x1b, 0xc2, 0x3b, 0x6f, 0x22, 0x21, 0x35, 0x83, 0xe0, 0xbf, 0xeb, 0x76, 0x1b, - 0xef, 0x8c, 0x22, 0x18, 0x8d, 0x55, 0x9e, 0xda, 0x0d, 0x07, 0xf5, 0x6e, 0xb3, 0xcb, 0x77, 0x57, - 0xe3, 0x7b, 0x57, 0x75, 0x11, 0x07, 0xb7, 0x82, 0x98, 0x87, 0x20, 0x33, 0x19, 0x73, 0x6d, 0x64, - 0x34, 0xd9, 0xfa, 0x17, 0xd9, 0xc7, 0x5b, 0x1a, 0xb8, 0xcf, 0xb5, 0xc9, 0x6d, 0x97, 0xad, 0x5e, - 0xe4, 0xd1, 0xc6, 0x5c, 0x91, 0xba, 0x76, 0x55, 0x6d, 0xca, 0x79, 0xea, 0x07, 0xf8, 0xd6, 0x60, - 0xb9, 0x7c, 0x47, 0xe5, 0xf2, 0x91, 0x77, 0xf8, 0xee, 0x66, 0x4b, 0x14, 0xe9, 0xd4, 0xe0, 0xdf, - 0xee, 0x54, 0xdb, 0xf9, 0xb3, 0x43, 0xd9, 0xce, 0xae, 0x35, 0x38, 0x9e, 0xcd, 0xa9, 0x75, 0x36, - 0xa7, 0xd6, 0xc5, 0x9c, 0xa2, 0x4f, 0x05, 0x45, 0x5f, 0x0b, 0x8a, 0x4e, 0x0b, 0x8a, 0x66, 0x05, - 0x45, 0x3f, 0x0a, 0x8a, 0x7e, 0x16, 0xd4, 0xba, 0x28, 0x28, 0xfa, 0xbc, 0xa0, 0xd6, 0x6c, 0x41, - 0xad, 0xb3, 0x05, 0xb5, 0x8e, 0x1f, 0x5e, 0x33, 0xbe, 0x86, 0x74, 0xb8, 0x65, 0x8e, 0x83, 0x5f, - 0x01, 0x00, 0x00, 0xff, 0xff, 0xbe, 0xe2, 0x64, 0x8a, 0x54, 0x04, 0x00, 0x00, + // 529 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x53, 0xbb, 0x6e, 0x13, 0x4d, + 0x14, 0xde, 0xf9, 0xed, 0x3f, 0x71, 0xc6, 0xdc, 0x34, 0x0a, 0xd1, 0xca, 0x48, 0xe3, 0x95, 0x0b, + 0xe2, 0x6a, 0x47, 0x72, 0x04, 0xa2, 0x81, 0xc2, 0x91, 0x88, 0x90, 0x28, 0x60, 0x40, 0x14, 0x48, + 0x14, 0x6b, 0xe7, 0xec, 0x45, 0xde, 0x9d, 0x59, 0xcf, 0xce, 0x02, 0xee, 0x78, 0x04, 0x1e, 0x03, + 0xf1, 0x00, 0x3c, 0x43, 0x4a, 0x97, 0x11, 0x45, 0x84, 0xd7, 0x0d, 0x65, 0x1e, 0x01, 0x79, 0xd6, + 0x1b, 0xdb, 0x11, 0xc8, 0x12, 0x15, 0xd5, 0x5c, 0xce, 0xf9, 0xce, 0x77, 0xbe, 0x73, 0xc1, 0xed, + 0x74, 0x14, 0xb0, 0x58, 0x06, 0xa9, 0x92, 0x5a, 0xb2, 0x41, 0x2c, 0x65, 0x12, 0x78, 0x1a, 0x3e, + 0x78, 0x13, 0xd7, 0x7c, 0x91, 0x46, 0x65, 0x6c, 0xed, 0x07, 0x32, 0x90, 0xa5, 0xdf, 0xe2, 0x56, + 0xda, 0x5b, 0xf7, 0x36, 0x02, 0x54, 0x97, 0xd2, 0xd8, 0xf9, 0x5a, 0xc3, 0x77, 0x9f, 0x46, 0xb1, + 0x06, 0x75, 0x1c, 0xe6, 0x62, 0xc4, 0xc1, 0xe7, 0x30, 0xce, 0x21, 0xd3, 0xe4, 0x18, 0xd7, 0x7d, + 0x25, 0x13, 0x1b, 0x39, 0xa8, 0x5b, 0xeb, 0xb3, 0xb3, 0x8b, 0xb6, 0xf5, 0xfd, 0xa2, 0x7d, 0x18, + 0x44, 0x3a, 0xcc, 0x07, 0xee, 0x50, 0x26, 0x2c, 0x55, 0x32, 0x01, 0x1d, 0x42, 0x9e, 0xb1, 0xa1, + 0x4c, 0x12, 0x29, 0x58, 0x22, 0x4f, 0x21, 0x76, 0x5f, 0x47, 0x09, 0x70, 0x03, 0x26, 0xcf, 0xf0, + 0xae, 0x0e, 0x95, 0xcc, 0x83, 0xd0, 0xfe, 0xef, 0xef, 0xe2, 0x54, 0x78, 0xe2, 0xe2, 0xba, 0x02, + 0x3f, 0xb3, 0x6b, 0x4e, 0xad, 0xdb, 0xec, 0xb5, 0xdc, 0x2b, 0x21, 0x27, 0x4a, 0xe6, 0x29, 0x9c, + 0x56, 0xf9, 0x67, 0xdc, 0xf8, 0x11, 0x81, 0x77, 0x7d, 0x23, 0x2c, 0xb3, 0xeb, 0x06, 0xb2, 0xbf, + 0x82, 0x3c, 0x8f, 0x04, 0x94, 0xaa, 0xfb, 0x4f, 0x96, 0x09, 0x3d, 0x5c, 0x4b, 0x28, 0x50, 0x9e, + 0xef, 0x09, 0x8f, 0xc5, 0x72, 0x14, 0xb1, 0xf7, 0x47, 0x6c, 0x59, 0xc0, 0x71, 0xcc, 0xb2, 0x89, + 0xd0, 0xde, 0xc7, 0x35, 0x3c, 0xaf, 0x48, 0x88, 0x87, 0xeb, 0x69, 0xec, 0x09, 0xfb, 0x7f, 0x07, + 0x75, 0x9b, 0xbd, 0x5b, 0x2b, 0xb2, 0x17, 0xb1, 0x27, 0xfa, 0x8f, 0x97, 0x34, 0x0f, 0xb6, 0xd0, + 0x8c, 0x73, 0x50, 0x11, 0x28, 0xb6, 0x08, 0xe5, 0xbe, 0xcc, 0x41, 0x4d, 0x16, 0x70, 0x6e, 0x42, + 0x77, 0x38, 0x3e, 0xb8, 0xde, 0xab, 0x2c, 0x95, 0x22, 0x03, 0xf2, 0x08, 0xef, 0x0d, 0x2b, 0xfd, + 0x36, 0xda, 0x5a, 0xa1, 0x95, 0x73, 0xe7, 0x1b, 0xc2, 0x8d, 0x57, 0xa1, 0x54, 0x9a, 0x83, 0xff, + 0xcf, 0xf5, 0xbc, 0x85, 0x1b, 0xc3, 0x10, 0x86, 0xa3, 0x2c, 0x4f, 0xec, 0x9a, 0x83, 0xba, 0x37, + 0xf9, 0xd5, 0xbb, 0xa3, 0xf1, 0x9d, 0xeb, 0xba, 0x88, 0x83, 0x9b, 0x7e, 0x24, 0x02, 0x50, 0xa9, + 0x8a, 0x84, 0x36, 0x32, 0xea, 0x7c, 0xfd, 0x8b, 0x1c, 0xe0, 0x1d, 0x0d, 0xc2, 0x13, 0xda, 0xe4, + 0xb6, 0xc7, 0x97, 0x2f, 0x72, 0x7f, 0x63, 0xba, 0xc8, 0xaa, 0x76, 0x55, 0x6d, 0xca, 0xa9, 0xea, + 0xf9, 0xf8, 0x46, 0x7f, 0xb1, 0x82, 0x27, 0xe5, 0x0a, 0x92, 0x37, 0xf8, 0xf6, 0x66, 0x4b, 0x32, + 0xd2, 0x5e, 0x81, 0x7f, 0xbb, 0x59, 0x2d, 0xe7, 0xcf, 0x0e, 0x65, 0x3b, 0x3b, 0x56, 0xff, 0xdd, + 0x74, 0x46, 0xad, 0xf3, 0x19, 0xb5, 0x2e, 0x67, 0x14, 0x7d, 0x2a, 0x28, 0xfa, 0x52, 0x50, 0x74, + 0x56, 0x50, 0x34, 0x2d, 0x28, 0xfa, 0x51, 0x50, 0xf4, 0xb3, 0xa0, 0xd6, 0x65, 0x41, 0xd1, 0xe7, + 0x39, 0xb5, 0xa6, 0x73, 0x6a, 0x9d, 0xcf, 0xa9, 0xf5, 0xf6, 0x70, 0xfb, 0x10, 0x1b, 0xde, 0xc1, + 0x8e, 0x39, 0x8e, 0x7e, 0x05, 0x00, 0x00, 0xff, 0xff, 0x1b, 0x54, 0xbe, 0xd4, 0x5d, 0x04, 0x00, + 0x00, } func (this *FilterChunkRefRequest) Equal(that interface{}) bool { @@ -1053,7 +1054,7 @@ func (m *FilterChunkRefRequest) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Filters = append(m.Filters, github_com_grafana_loki_pkg_logql_syntax.LineFilter{}) + m.Filters = append(m.Filters, github_com_grafana_loki_v3_pkg_logql_syntax.LineFilter{}) if err := m.Filters[len(m.Filters)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } diff --git a/pkg/logproto/bloomgateway.proto b/pkg/logproto/bloomgateway.proto index 13d5c25e763f6..ffef97efe7ccd 100644 --- a/pkg/logproto/bloomgateway.proto +++ b/pkg/logproto/bloomgateway.proto @@ -5,7 +5,7 @@ package logproto; import "gogoproto/gogo.proto"; import "pkg/logproto/logproto.proto"; -option go_package = "github.com/grafana/loki/pkg/logproto"; +option go_package = "github.com/grafana/loki/v3/pkg/logproto"; message FilterChunkRefRequest { int64 from = 1 [ @@ -19,11 +19,11 @@ message FilterChunkRefRequest { repeated GroupedChunkRefs refs = 3; // TODO(salvacorts): Delete this field once the weekly release is done. repeated LineFilter filters = 4 [ - (gogoproto.customtype) = "github.com/grafana/loki/pkg/logql/syntax.LineFilter", + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logql/syntax.LineFilter", (gogoproto.nullable) = false ]; Plan plan = 5 [ - (gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/plan.QueryPlan", + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/plan.QueryPlan", (gogoproto.nullable) = false ]; } diff --git a/pkg/logproto/compat.go b/pkg/logproto/compat.go index 0e65a90da02fa..82979824a5f57 100644 --- a/pkg/logproto/compat.go +++ b/pkg/logproto/compat.go @@ -11,6 +11,7 @@ import ( "time" "unsafe" + "github.com/c2h5oh/datasize" "github.com/cespare/xxhash/v2" jsoniter "github.com/json-iterator/go" "github.com/opentracing/opentracing-go" @@ -19,9 +20,9 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/timestamp" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/util" ) // ToWriteRequest converts matched slices of Labels, Samples and Metadata into a WriteRequest proto. @@ -354,6 +355,9 @@ func (m *FilterChunkRefRequest) GetStep() int64 { return 0 } +// TODO(owen-d): why does this return the hash of all the refs instead of the query? +// The latter should be significantly cheaper, more helpful (readable), and just as correct +// at being a unique identifier for the request. // GetQuery returns the query of the request. // The query is the hash for the input chunks refs and the filter expressions. func (m *FilterChunkRefRequest) GetQuery() string { @@ -402,3 +406,42 @@ func (m *FilterChunkRefRequest) WithStartEndForCache(start, end time.Time) resul return &clone } + +func (m *ShardsRequest) GetCachingOptions() (res definitions.CachingOptions) { return } + +func (m *ShardsRequest) GetStart() time.Time { + return time.Unix(0, m.From.UnixNano()) +} + +func (m *ShardsRequest) GetEnd() time.Time { + return time.Unix(0, m.Through.UnixNano()) +} + +func (m *ShardsRequest) GetStep() int64 { return 0 } + +func (m *ShardsRequest) WithStartEnd(start, end time.Time) definitions.Request { + clone := *m + clone.From = model.TimeFromUnixNano(start.UnixNano()) + clone.Through = model.TimeFromUnixNano(end.UnixNano()) + return &clone +} + +func (m *ShardsRequest) WithQuery(query string) definitions.Request { + clone := *m + clone.Query = query + return &clone +} + +func (m *ShardsRequest) WithStartEndForCache(start, end time.Time) resultscache.Request { + return m.WithStartEnd(start, end).(resultscache.Request) +} + +func (m *ShardsRequest) LogToSpan(sp opentracing.Span) { + fields := []otlog.Field{ + otlog.String("from", timestamp.Time(int64(m.From)).String()), + otlog.String("through", timestamp.Time(int64(m.Through)).String()), + otlog.String("query", m.GetQuery()), + otlog.String("target_bytes_per_shard", datasize.ByteSize(m.TargetBytesPerShard).HumanReadable()), + } + sp.LogFields(fields...) +} diff --git a/pkg/logproto/compat_test.go b/pkg/logproto/compat_test.go index d4de93638f827..83b2e61787fa0 100644 --- a/pkg/logproto/compat_test.go +++ b/pkg/logproto/compat_test.go @@ -5,15 +5,19 @@ import ( "fmt" "math" "testing" + "time" "unsafe" jsoniter "github.com/json-iterator/go" + "github.com/opentracing/opentracing-go/mocktracer" + "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" + "github.com/prometheus/prometheus/model/timestamp" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/plan" ) // This test verifies that jsoninter uses our custom method for marshalling. @@ -338,6 +342,54 @@ func TestFilterChunkRefRequestGetQuery(t *testing.T) { } } +func TestIndexStatsRequestSpanLogging(t *testing.T) { + now := time.Now() + end := now.Add(1000 * time.Second) + req := IndexStatsRequest{ + From: model.Time(now.UnixMilli()), + Through: model.Time(end.UnixMilli()), + } + + span := mocktracer.MockSpan{} + req.LogToSpan(&span) + + for _, l := range span.Logs() { + for _, field := range l.Fields { + if field.Key == "start" { + require.Equal(t, timestamp.Time(now.UnixMilli()).String(), field.ValueString) + } + if field.Key == "end" { + require.Equal(t, timestamp.Time(end.UnixMilli()).String(), field.ValueString) + + } + } + } +} + +func TestVolumeRequest(t *testing.T) { + now := time.Now() + end := now.Add(1000 * time.Second) + req := VolumeRequest{ + From: model.Time(now.UnixMilli()), + Through: model.Time(end.UnixMilli()), + } + + span := mocktracer.MockSpan{} + req.LogToSpan(&span) + + for _, l := range span.Logs() { + for _, field := range l.Fields { + if field.Key == "start" { + require.Equal(t, timestamp.Time(now.UnixMilli()).String(), field.ValueString) + } + if field.Key == "end" { + require.Equal(t, timestamp.Time(end.UnixMilli()).String(), field.ValueString) + + } + } + } +} + func benchmarkMergeLabelResponses(b *testing.B, responses []*LabelResponse) { b.ReportAllocs() for n := 0; n < b.N; n++ { diff --git a/pkg/logproto/extensions.go b/pkg/logproto/extensions.go index 9a5f3f8e61af6..19e1f7be3b0c5 100644 --- a/pkg/logproto/extensions.go +++ b/pkg/logproto/extensions.go @@ -10,7 +10,7 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) // This is the separator define in the Prometheus Labels.Hash function. @@ -133,3 +133,25 @@ func (m *IndexStatsResponse) LoggingKeyValues() []interface{} { "entries", m.Entries, } } + +func (m *Shard) SpaceFor(stats *IndexStatsResponse, targetShardBytes uint64) bool { + curDelta := max(m.Stats.Bytes, targetShardBytes) - min(m.Stats.Bytes, targetShardBytes) + updated := m.Stats.Bytes + stats.Bytes + newDelta := max(updated, targetShardBytes) - min(updated, targetShardBytes) + return newDelta <= curDelta +} + +type DetectedFieldType string + +const ( + DetectedFieldString DetectedFieldType = "string" + DetectedFieldInt DetectedFieldType = "int" + DetectedFieldFloat DetectedFieldType = "float" + DetectedFieldBoolean DetectedFieldType = "boolean" + DetectedFieldDuration DetectedFieldType = "duration" + DetectedFieldBytes DetectedFieldType = "bytes" +) + +func (d DetectedFieldType) String() string { + return string(d) +} diff --git a/pkg/logproto/extensions_test.go b/pkg/logproto/extensions_test.go new file mode 100644 index 0000000000000..d1c96c76bbed3 --- /dev/null +++ b/pkg/logproto/extensions_test.go @@ -0,0 +1,42 @@ +package logproto + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestShard_SpaceFor(t *testing.T) { + target := uint64(100) + shard := Shard{ + Stats: &IndexStatsResponse{ + Bytes: 50, + }, + } + + for _, tc := range []struct { + desc string + bytes uint64 + exp bool + }{ + { + desc: "full shard", + bytes: 50, + exp: true, + }, + { + desc: "overflow equal to underflow accepts", + bytes: 100, + exp: true, + }, + { + desc: "overflow", + bytes: 101, + exp: false, + }, + } { + t.Run(tc.desc, func(t *testing.T) { + require.Equal(t, shard.SpaceFor(&IndexStatsResponse{Bytes: tc.bytes}, target), tc.exp) + }) + } +} diff --git a/pkg/logproto/indexgateway.pb.go b/pkg/logproto/indexgateway.pb.go index 86b2665e86b17..bd2650fbc01a9 100644 --- a/pkg/logproto/indexgateway.pb.go +++ b/pkg/logproto/indexgateway.pb.go @@ -6,11 +6,18 @@ package logproto import ( context "context" fmt "fmt" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" + stats "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + github_com_prometheus_common_model "github.com/prometheus/common/model" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" status "google.golang.org/grpc/status" + io "io" math "math" + math_bits "math/bits" + reflect "reflect" + strings "strings" ) // Reference imports to suppress errors if they are not otherwise used. @@ -24,33 +31,454 @@ var _ = math.Inf // proto package needs to be updated. const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package +type ShardsRequest struct { + From github_com_prometheus_common_model.Time `protobuf:"varint,1,opt,name=from,proto3,customtype=github.com/prometheus/common/model.Time" json:"from"` + Through github_com_prometheus_common_model.Time `protobuf:"varint,2,opt,name=through,proto3,customtype=github.com/prometheus/common/model.Time" json:"through"` + Query string `protobuf:"bytes,3,opt,name=query,proto3" json:"query"` + TargetBytesPerShard uint64 `protobuf:"varint,4,opt,name=target_bytes_per_shard,json=targetBytesPerShard,proto3" json:"targetBytesPerShard"` +} + +func (m *ShardsRequest) Reset() { *m = ShardsRequest{} } +func (*ShardsRequest) ProtoMessage() {} +func (*ShardsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_d27585148d0a52c8, []int{0} +} +func (m *ShardsRequest) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *ShardsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_ShardsRequest.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *ShardsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ShardsRequest.Merge(m, src) +} +func (m *ShardsRequest) XXX_Size() int { + return m.Size() +} +func (m *ShardsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ShardsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ShardsRequest proto.InternalMessageInfo + +func (m *ShardsRequest) GetQuery() string { + if m != nil { + return m.Query + } + return "" +} + +func (m *ShardsRequest) GetTargetBytesPerShard() uint64 { + if m != nil { + return m.TargetBytesPerShard + } + return 0 +} + +type ShardsResponse struct { + Shards []Shard `protobuf:"bytes,1,rep,name=shards,proto3" json:"shards"` + Statistics stats.Result `protobuf:"bytes,2,opt,name=statistics,proto3" json:"statistics"` +} + +func (m *ShardsResponse) Reset() { *m = ShardsResponse{} } +func (*ShardsResponse) ProtoMessage() {} +func (*ShardsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_d27585148d0a52c8, []int{1} +} +func (m *ShardsResponse) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *ShardsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_ShardsResponse.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *ShardsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ShardsResponse.Merge(m, src) +} +func (m *ShardsResponse) XXX_Size() int { + return m.Size() +} +func (m *ShardsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ShardsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ShardsResponse proto.InternalMessageInfo + +func (m *ShardsResponse) GetShards() []Shard { + if m != nil { + return m.Shards + } + return nil +} + +func (m *ShardsResponse) GetStatistics() stats.Result { + if m != nil { + return m.Statistics + } + return stats.Result{} +} + +type Shard struct { + Bounds FPBounds `protobuf:"bytes,1,opt,name=bounds,proto3" json:"bounds"` + Stats *IndexStatsResponse `protobuf:"bytes,2,opt,name=stats,proto3" json:"stats"` +} + +func (m *Shard) Reset() { *m = Shard{} } +func (*Shard) ProtoMessage() {} +func (*Shard) Descriptor() ([]byte, []int) { + return fileDescriptor_d27585148d0a52c8, []int{2} +} +func (m *Shard) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *Shard) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_Shard.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *Shard) XXX_Merge(src proto.Message) { + xxx_messageInfo_Shard.Merge(m, src) +} +func (m *Shard) XXX_Size() int { + return m.Size() +} +func (m *Shard) XXX_DiscardUnknown() { + xxx_messageInfo_Shard.DiscardUnknown(m) +} + +var xxx_messageInfo_Shard proto.InternalMessageInfo + +func (m *Shard) GetBounds() FPBounds { + if m != nil { + return m.Bounds + } + return FPBounds{} +} + +func (m *Shard) GetStats() *IndexStatsResponse { + if m != nil { + return m.Stats + } + return nil +} + +// FPBounds is identical to the definition in `pkg/storage/bloom/v1/bounds.FingerprintBounds` +// which ensures we can cast between them without allocations. +type FPBounds struct { + Min github_com_prometheus_common_model.Fingerprint `protobuf:"varint,1,opt,name=min,proto3,casttype=github.com/prometheus/common/model.Fingerprint" json:"min"` + Max github_com_prometheus_common_model.Fingerprint `protobuf:"varint,2,opt,name=max,proto3,casttype=github.com/prometheus/common/model.Fingerprint" json:"max"` +} + +func (m *FPBounds) Reset() { *m = FPBounds{} } +func (*FPBounds) ProtoMessage() {} +func (*FPBounds) Descriptor() ([]byte, []int) { + return fileDescriptor_d27585148d0a52c8, []int{3} +} +func (m *FPBounds) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *FPBounds) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_FPBounds.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *FPBounds) XXX_Merge(src proto.Message) { + xxx_messageInfo_FPBounds.Merge(m, src) +} +func (m *FPBounds) XXX_Size() int { + return m.Size() +} +func (m *FPBounds) XXX_DiscardUnknown() { + xxx_messageInfo_FPBounds.DiscardUnknown(m) +} + +var xxx_messageInfo_FPBounds proto.InternalMessageInfo + +func (m *FPBounds) GetMin() github_com_prometheus_common_model.Fingerprint { + if m != nil { + return m.Min + } + return 0 +} + +func (m *FPBounds) GetMax() github_com_prometheus_common_model.Fingerprint { + if m != nil { + return m.Max + } + return 0 +} + +func init() { + proto.RegisterType((*ShardsRequest)(nil), "indexgatewaypb.ShardsRequest") + proto.RegisterType((*ShardsResponse)(nil), "indexgatewaypb.ShardsResponse") + proto.RegisterType((*Shard)(nil), "indexgatewaypb.Shard") + proto.RegisterType((*FPBounds)(nil), "indexgatewaypb.FPBounds") +} + func init() { proto.RegisterFile("pkg/logproto/indexgateway.proto", fileDescriptor_d27585148d0a52c8) } var fileDescriptor_d27585148d0a52c8 = []byte{ - // 361 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xb1, 0x4e, 0xc2, 0x40, - 0x18, 0xc7, 0xef, 0x12, 0x63, 0xf4, 0x34, 0x0e, 0xb7, 0x40, 0x40, 0xcf, 0xc4, 0x38, 0xe8, 0x42, - 0x8d, 0xbe, 0x80, 0xd1, 0x84, 0x86, 0x04, 0x4d, 0xc4, 0x84, 0x81, 0xc1, 0x78, 0xc5, 0x8f, 0xd2, - 0x50, 0x7a, 0xb5, 0xbd, 0x46, 0xd9, 0x7c, 0x04, 0x1f, 0xc3, 0x87, 0xf0, 0x01, 0x1c, 0x19, 0x19, - 0xe5, 0x58, 0x1c, 0x79, 0x04, 0xc3, 0x35, 0x85, 0x03, 0x4b, 0xe2, 0x04, 0xfd, 0xfd, 0x7f, 0xdf, - 0xff, 0x4b, 0xef, 0x4a, 0x0e, 0xc3, 0x9e, 0x6b, 0xf9, 0xc2, 0x0d, 0x23, 0x21, 0x85, 0xe5, 0x05, - 0x4f, 0xf0, 0xea, 0x72, 0x09, 0x2f, 0x7c, 0x50, 0xd1, 0x88, 0xee, 0x99, 0x2c, 0x74, 0x4a, 0xe5, - 0xa5, 0x81, 0xec, 0x4f, 0x2a, 0x9f, 0x7f, 0x6e, 0x90, 0xdd, 0xda, 0xcc, 0xb7, 0x53, 0x9f, 0xd6, - 0x08, 0xb9, 0x4b, 0x20, 0x1a, 0x68, 0x48, 0xcb, 0x95, 0xb9, 0xbf, 0xa0, 0x0d, 0x78, 0x4e, 0x20, - 0x96, 0xa5, 0xfd, 0xfc, 0x30, 0x0e, 0x45, 0x10, 0xc3, 0x19, 0xa6, 0x75, 0xb2, 0x63, 0x83, 0xbc, - 0xee, 0x26, 0x41, 0xaf, 0x01, 0x1d, 0x6a, 0xe8, 0x06, 0xce, 0xca, 0x0e, 0xd6, 0xa4, 0x69, 0xdb, - 0x11, 0xa2, 0x55, 0xb2, 0x6d, 0x83, 0xbc, 0x87, 0xc8, 0x83, 0x98, 0x96, 0x96, 0xec, 0x14, 0x66, - 0x4d, 0xe5, 0xdc, 0x6c, 0xde, 0xf3, 0x40, 0x0a, 0x75, 0xee, 0x80, 0x7f, 0xcb, 0xfb, 0x10, 0x57, - 0x45, 0x74, 0x03, 0x32, 0xf2, 0xda, 0xb3, 0x27, 0x7a, 0xb2, 0x98, 0x5c, 0xa3, 0x64, 0x3b, 0x0a, - 0x2b, 0xa6, 0xd1, 0xff, 0x48, 0x8a, 0x1a, 0x35, 0xb9, 0x9f, 0xac, 0x2e, 0x38, 0x5d, 0x19, 0xcb, - 0x71, 0xfe, 0xb1, 0xc1, 0x26, 0x5b, 0xb3, 0x17, 0x93, 0x5c, 0xc6, 0xe6, 0x05, 0xe9, 0xe3, 0xd7, - 0x34, 0xe7, 0x82, 0xcc, 0x70, 0x5e, 0x74, 0xa9, 0x8f, 0xb4, 0x29, 0xfc, 0xa4, 0x0f, 0xd4, 0x58, - 0x98, 0x92, 0xac, 0xa5, 0xf8, 0x37, 0xc8, 0x1a, 0xae, 0x5a, 0xc3, 0x31, 0x43, 0xa3, 0x31, 0x43, - 0xd3, 0x31, 0xc3, 0x6f, 0x8a, 0xe1, 0x0f, 0xc5, 0xf0, 0x97, 0x62, 0x78, 0xa8, 0x18, 0xfe, 0x56, - 0x0c, 0xff, 0x28, 0x86, 0xa6, 0x8a, 0xe1, 0xf7, 0x09, 0x43, 0xc3, 0x09, 0x43, 0xa3, 0x09, 0x43, - 0xad, 0x63, 0xd7, 0x93, 0xdd, 0xc4, 0xa9, 0xb4, 0x45, 0xdf, 0x72, 0x23, 0xde, 0xe1, 0x01, 0xb7, - 0x7c, 0xd1, 0xf3, 0x2c, 0xf3, 0x4b, 0x75, 0x36, 0xf5, 0xcf, 0xc5, 0x6f, 0x00, 0x00, 0x00, 0xff, - 0xff, 0x7a, 0x1a, 0x28, 0xb4, 0xf1, 0x02, 0x00, 0x00, + // 737 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x55, 0xbf, 0x4f, 0xdb, 0x4c, + 0x18, 0xf6, 0x91, 0x84, 0x0f, 0x8e, 0x0f, 0x86, 0xfb, 0x7e, 0x60, 0x05, 0xb0, 0xa3, 0x2c, 0x5f, + 0xbe, 0xc5, 0xae, 0x60, 0xa8, 0x5a, 0x09, 0x89, 0xba, 0x52, 0x22, 0x44, 0x5a, 0x51, 0x83, 0x18, + 0x2a, 0xb5, 0xd4, 0x09, 0x87, 0x63, 0x61, 0xfb, 0xc2, 0xdd, 0xb9, 0x0d, 0x5b, 0xc7, 0xaa, 0x53, + 0xd5, 0xff, 0xa0, 0x52, 0x87, 0xfe, 0x29, 0x8c, 0x8c, 0xa8, 0x83, 0x55, 0xc2, 0x52, 0x65, 0x62, + 0xee, 0x54, 0xf9, 0xce, 0x4e, 0x4c, 0x08, 0x12, 0xed, 0xe2, 0x3b, 0x3f, 0xef, 0xf3, 0x3e, 0xef, + 0x4f, 0x27, 0x50, 0xef, 0x1e, 0xb9, 0xa6, 0x4f, 0xdc, 0x2e, 0x25, 0x9c, 0x98, 0x5e, 0x78, 0x80, + 0x7b, 0xae, 0xc3, 0xf1, 0x1b, 0xe7, 0xc4, 0x10, 0x10, 0x5a, 0xc8, 0x63, 0xdd, 0x56, 0xf9, 0x6f, + 0x97, 0xb8, 0x44, 0xb2, 0x93, 0x9b, 0x64, 0x95, 0x97, 0xae, 0xc9, 0x64, 0x97, 0xd4, 0x58, 0x49, + 0x8d, 0xc7, 0x7e, 0x40, 0x0e, 0xb0, 0x6f, 0x32, 0xee, 0x70, 0x26, 0x9f, 0x92, 0x51, 0xfd, 0x34, + 0x05, 0xe7, 0x77, 0x3a, 0x0e, 0x3d, 0x60, 0x36, 0x3e, 0x8e, 0x30, 0xe3, 0x68, 0x0b, 0x16, 0x0f, + 0x29, 0x09, 0x54, 0x50, 0x01, 0xb5, 0x82, 0x75, 0xff, 0x34, 0xd6, 0x95, 0xaf, 0xb1, 0xfe, 0x9f, + 0xeb, 0xf1, 0x4e, 0xd4, 0x32, 0xda, 0x24, 0x30, 0xbb, 0x94, 0x04, 0x98, 0x77, 0x70, 0xc4, 0xcc, + 0x36, 0x09, 0x02, 0x12, 0x9a, 0x42, 0xdd, 0xd8, 0xf5, 0x02, 0x3c, 0x88, 0x75, 0xe1, 0x6e, 0x8b, + 0x27, 0xda, 0x85, 0x7f, 0xf0, 0x0e, 0x25, 0x91, 0xdb, 0x51, 0xa7, 0x84, 0xde, 0xc3, 0x5f, 0xd7, + 0xcb, 0x14, 0xec, 0xec, 0x82, 0x74, 0x58, 0x3a, 0x8e, 0x30, 0x3d, 0x51, 0x0b, 0x15, 0x50, 0x9b, + 0xb5, 0x66, 0x07, 0xb1, 0x2e, 0x01, 0x5b, 0x1e, 0xa8, 0x09, 0xff, 0xe5, 0x0e, 0x75, 0x31, 0xdf, + 0x6f, 0x9d, 0x70, 0xcc, 0xf6, 0xbb, 0x98, 0xee, 0xb3, 0xa4, 0x4a, 0xb5, 0x58, 0x01, 0xb5, 0xa2, + 0xb5, 0x38, 0x88, 0xf5, 0xbf, 0x24, 0xc3, 0x4a, 0x08, 0xdb, 0x98, 0x8a, 0x26, 0xd8, 0x93, 0xc0, + 0xea, 0x47, 0x00, 0x17, 0xb2, 0x1e, 0xb1, 0x2e, 0x09, 0x19, 0x46, 0xeb, 0x70, 0x5a, 0xe8, 0x31, + 0x15, 0x54, 0x0a, 0xb5, 0xb9, 0xd5, 0x7f, 0x8c, 0xeb, 0xc3, 0x32, 0x04, 0xdf, 0x5a, 0x48, 0xaa, + 0x1d, 0xc4, 0x7a, 0x4a, 0xb6, 0xd3, 0x13, 0x3d, 0x82, 0x30, 0x19, 0x82, 0xc7, 0xb8, 0xd7, 0x66, + 0xa2, 0x33, 0x73, 0xab, 0xf3, 0x86, 0x9c, 0x8b, 0x8d, 0x59, 0xe4, 0x73, 0x0b, 0xa5, 0xae, 0x39, + 0xa2, 0x9d, 0xbb, 0x57, 0xdf, 0x01, 0x58, 0x12, 0x41, 0xd0, 0x06, 0x9c, 0x6e, 0x91, 0x28, 0x14, + 0xb9, 0x24, 0x42, 0xea, 0x78, 0x2e, 0xf5, 0x6d, 0x4b, 0xd8, 0x47, 0xe9, 0x48, 0xbe, 0x9d, 0x9e, + 0x68, 0x1d, 0x96, 0x44, 0xec, 0x34, 0x93, 0x65, 0x63, 0xb8, 0x46, 0x9b, 0x89, 0xd2, 0x4e, 0x62, + 0xcb, 0x4a, 0x97, 0xdd, 0x16, 0x74, 0x5b, 0x1e, 0xd5, 0xcf, 0x00, 0xce, 0x64, 0x31, 0xd0, 0x16, + 0x2c, 0x04, 0x5e, 0x28, 0x52, 0x29, 0x5a, 0x0f, 0x06, 0xb1, 0x9e, 0xbc, 0xfe, 0x88, 0x75, 0xe3, + 0x0e, 0x03, 0xaf, 0x7b, 0xa1, 0x8b, 0x69, 0x97, 0x7a, 0x21, 0xb7, 0x13, 0x37, 0x21, 0xe6, 0xf4, + 0x44, 0x5a, 0x99, 0x98, 0xd3, 0xfb, 0x2d, 0x31, 0xa7, 0xb7, 0xfa, 0xbe, 0x04, 0xff, 0x14, 0xf5, + 0x34, 0x64, 0x67, 0xd0, 0x26, 0x84, 0xcf, 0x92, 0x75, 0x11, 0x20, 0x5a, 0x1a, 0x55, 0x3d, 0x42, + 0xd3, 0x8f, 0xa2, 0xbc, 0x3c, 0xd9, 0x28, 0x5b, 0x72, 0x0f, 0xa0, 0x26, 0x9c, 0x6b, 0x60, 0xfe, + 0xb8, 0x13, 0x85, 0x47, 0x36, 0x3e, 0x44, 0x39, 0x7a, 0x0e, 0xce, 0xc4, 0x56, 0x6e, 0xb1, 0x4a, + 0xb5, 0xaa, 0x82, 0xea, 0x70, 0xb6, 0x81, 0xf9, 0x0e, 0xa6, 0x1e, 0x66, 0xa8, 0x7c, 0x8d, 0x2d, + 0xc1, 0x4c, 0x69, 0x69, 0xa2, 0x6d, 0xa8, 0xf3, 0x12, 0x2e, 0x36, 0x9d, 0x16, 0xf6, 0x9f, 0x3a, + 0x01, 0x66, 0x75, 0x42, 0x9f, 0x60, 0x4e, 0xbd, 0x76, 0xf2, 0x86, 0x6a, 0x23, 0xcf, 0x5b, 0x28, + 0x59, 0x8c, 0xc5, 0x31, 0x66, 0x4e, 0xff, 0x15, 0x54, 0x05, 0xb4, 0xe7, 0xf8, 0xd1, 0x78, 0x80, + 0xff, 0xc7, 0xdc, 0x26, 0x70, 0xee, 0x10, 0xa1, 0x01, 0x67, 0x92, 0xc2, 0x92, 0x35, 0xcb, 0x0f, + 0x28, 0xbf, 0x96, 0x37, 0x06, 0x74, 0x73, 0x67, 0xab, 0x0a, 0xda, 0x10, 0x2d, 0xdd, 0x23, 0x7e, + 0x14, 0x60, 0x94, 0x0b, 0x28, 0x91, 0x4c, 0x45, 0xbd, 0x69, 0x18, 0x2a, 0x34, 0xe5, 0x50, 0xe4, + 0x07, 0xbc, 0x32, 0xf1, 0x7b, 0x1f, 0x66, 0xa3, 0xdd, 0x66, 0xce, 0x16, 0xc6, 0x7a, 0x71, 0x76, + 0xa1, 0x29, 0xe7, 0x17, 0x9a, 0x72, 0x75, 0xa1, 0x81, 0xb7, 0x7d, 0x0d, 0x7c, 0xe9, 0x6b, 0xe0, + 0xb4, 0xaf, 0x81, 0xb3, 0xbe, 0x06, 0xbe, 0xf5, 0x35, 0xf0, 0xbd, 0xaf, 0x29, 0x57, 0x7d, 0x0d, + 0x7c, 0xb8, 0xd4, 0x94, 0xb3, 0x4b, 0x4d, 0x39, 0xbf, 0xd4, 0x94, 0xe7, 0xf9, 0x5f, 0x4d, 0x97, + 0x3a, 0x87, 0x4e, 0xe8, 0x98, 0x3e, 0x39, 0xf2, 0xcc, 0xd7, 0x6b, 0x66, 0xfe, 0x7f, 0xa0, 0x35, + 0x2d, 0x8e, 0xb5, 0x9f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x1e, 0x17, 0x36, 0xcf, 0x65, 0x06, 0x00, + 0x00, +} + +func (this *ShardsRequest) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*ShardsRequest) + if !ok { + that2, ok := that.(ShardsRequest) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if !this.From.Equal(that1.From) { + return false + } + if !this.Through.Equal(that1.Through) { + return false + } + if this.Query != that1.Query { + return false + } + if this.TargetBytesPerShard != that1.TargetBytesPerShard { + return false + } + return true +} +func (this *ShardsResponse) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*ShardsResponse) + if !ok { + that2, ok := that.(ShardsResponse) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if len(this.Shards) != len(that1.Shards) { + return false + } + for i := range this.Shards { + if !this.Shards[i].Equal(&that1.Shards[i]) { + return false + } + } + if !this.Statistics.Equal(&that1.Statistics) { + return false + } + return true +} +func (this *Shard) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*Shard) + if !ok { + that2, ok := that.(Shard) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if !this.Bounds.Equal(&that1.Bounds) { + return false + } + if !this.Stats.Equal(that1.Stats) { + return false + } + return true +} +func (this *FPBounds) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*FPBounds) + if !ok { + that2, ok := that.(FPBounds) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if this.Min != that1.Min { + return false + } + if this.Max != that1.Max { + return false + } + return true +} +func (this *ShardsRequest) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 8) + s = append(s, "&logproto.ShardsRequest{") + s = append(s, "From: "+fmt.Sprintf("%#v", this.From)+",\n") + s = append(s, "Through: "+fmt.Sprintf("%#v", this.Through)+",\n") + s = append(s, "Query: "+fmt.Sprintf("%#v", this.Query)+",\n") + s = append(s, "TargetBytesPerShard: "+fmt.Sprintf("%#v", this.TargetBytesPerShard)+",\n") + s = append(s, "}") + return strings.Join(s, "") +} +func (this *ShardsResponse) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 6) + s = append(s, "&logproto.ShardsResponse{") + if this.Shards != nil { + vs := make([]*Shard, len(this.Shards)) + for i := range vs { + vs[i] = &this.Shards[i] + } + s = append(s, "Shards: "+fmt.Sprintf("%#v", vs)+",\n") + } + s = append(s, "Statistics: "+strings.Replace(this.Statistics.GoString(), `&`, ``, 1)+",\n") + s = append(s, "}") + return strings.Join(s, "") +} +func (this *Shard) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 6) + s = append(s, "&logproto.Shard{") + s = append(s, "Bounds: "+strings.Replace(this.Bounds.GoString(), `&`, ``, 1)+",\n") + if this.Stats != nil { + s = append(s, "Stats: "+fmt.Sprintf("%#v", this.Stats)+",\n") + } + s = append(s, "}") + return strings.Join(s, "") +} +func (this *FPBounds) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 6) + s = append(s, "&logproto.FPBounds{") + s = append(s, "Min: "+fmt.Sprintf("%#v", this.Min)+",\n") + s = append(s, "Max: "+fmt.Sprintf("%#v", this.Max)+",\n") + s = append(s, "}") + return strings.Join(s, "") +} +func valueToGoStringIndexgateway(v interface{}, typ string) string { + rv := reflect.ValueOf(v) + if rv.IsNil() { + return "nil" + } + pv := reflect.Indirect(rv).Interface() + return fmt.Sprintf("func(v %v) *%v { return &v } ( %#v )", typ, typ, pv) } // Reference imports to suppress errors if they are not otherwise used. @@ -79,6 +507,9 @@ type IndexGatewayClient interface { // Note: this MUST be the same as the variant defined in // logproto.proto on the Querier service. GetVolume(ctx context.Context, in *VolumeRequest, opts ...grpc.CallOption) (*VolumeResponse, error) + // GetShards is an optimized implemented shard-planning implementation + // on the index gateway and not on the ingester. + GetShards(ctx context.Context, in *ShardsRequest, opts ...grpc.CallOption) (IndexGateway_GetShardsClient, error) } type indexGatewayClient struct { @@ -175,6 +606,38 @@ func (c *indexGatewayClient) GetVolume(ctx context.Context, in *VolumeRequest, o return out, nil } +func (c *indexGatewayClient) GetShards(ctx context.Context, in *ShardsRequest, opts ...grpc.CallOption) (IndexGateway_GetShardsClient, error) { + stream, err := c.cc.NewStream(ctx, &_IndexGateway_serviceDesc.Streams[1], "/indexgatewaypb.IndexGateway/GetShards", opts...) + if err != nil { + return nil, err + } + x := &indexGatewayGetShardsClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type IndexGateway_GetShardsClient interface { + Recv() (*ShardsResponse, error) + grpc.ClientStream +} + +type indexGatewayGetShardsClient struct { + grpc.ClientStream +} + +func (x *indexGatewayGetShardsClient) Recv() (*ShardsResponse, error) { + m := new(ShardsResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + // IndexGatewayServer is the server API for IndexGateway service. type IndexGatewayServer interface { /// QueryIndex reads the indexes required for given query & sends back the batch of rows @@ -191,6 +654,9 @@ type IndexGatewayServer interface { // Note: this MUST be the same as the variant defined in // logproto.proto on the Querier service. GetVolume(context.Context, *VolumeRequest) (*VolumeResponse, error) + // GetShards is an optimized implemented shard-planning implementation + // on the index gateway and not on the ingester. + GetShards(*ShardsRequest, IndexGateway_GetShardsServer) error } // UnimplementedIndexGatewayServer can be embedded to have forward compatible implementations. @@ -218,6 +684,9 @@ func (*UnimplementedIndexGatewayServer) GetStats(ctx context.Context, req *Index func (*UnimplementedIndexGatewayServer) GetVolume(ctx context.Context, req *VolumeRequest) (*VolumeResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method GetVolume not implemented") } +func (*UnimplementedIndexGatewayServer) GetShards(req *ShardsRequest, srv IndexGateway_GetShardsServer) error { + return status.Errorf(codes.Unimplemented, "method GetShards not implemented") +} func RegisterIndexGatewayServer(s *grpc.Server, srv IndexGatewayServer) { s.RegisterService(&_IndexGateway_serviceDesc, srv) @@ -352,6 +821,27 @@ func _IndexGateway_GetVolume_Handler(srv interface{}, ctx context.Context, dec f return interceptor(ctx, in, info, handler) } +func _IndexGateway_GetShards_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(ShardsRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(IndexGatewayServer).GetShards(m, &indexGatewayGetShardsServer{stream}) +} + +type IndexGateway_GetShardsServer interface { + Send(*ShardsResponse) error + grpc.ServerStream +} + +type indexGatewayGetShardsServer struct { + grpc.ServerStream +} + +func (x *indexGatewayGetShardsServer) Send(m *ShardsResponse) error { + return x.ServerStream.SendMsg(m) +} + var _IndexGateway_serviceDesc = grpc.ServiceDesc{ ServiceName: "indexgatewaypb.IndexGateway", HandlerType: (*IndexGatewayServer)(nil), @@ -387,6 +877,912 @@ var _IndexGateway_serviceDesc = grpc.ServiceDesc{ Handler: _IndexGateway_QueryIndex_Handler, ServerStreams: true, }, + { + StreamName: "GetShards", + Handler: _IndexGateway_GetShards_Handler, + ServerStreams: true, + }, }, Metadata: "pkg/logproto/indexgateway.proto", } + +func (m *ShardsRequest) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *ShardsRequest) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *ShardsRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.TargetBytesPerShard != 0 { + i = encodeVarintIndexgateway(dAtA, i, uint64(m.TargetBytesPerShard)) + i-- + dAtA[i] = 0x20 + } + if len(m.Query) > 0 { + i -= len(m.Query) + copy(dAtA[i:], m.Query) + i = encodeVarintIndexgateway(dAtA, i, uint64(len(m.Query))) + i-- + dAtA[i] = 0x1a + } + if m.Through != 0 { + i = encodeVarintIndexgateway(dAtA, i, uint64(m.Through)) + i-- + dAtA[i] = 0x10 + } + if m.From != 0 { + i = encodeVarintIndexgateway(dAtA, i, uint64(m.From)) + i-- + dAtA[i] = 0x8 + } + return len(dAtA) - i, nil +} + +func (m *ShardsResponse) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *ShardsResponse) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *ShardsResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + { + size, err := m.Statistics.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintIndexgateway(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + if len(m.Shards) > 0 { + for iNdEx := len(m.Shards) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.Shards[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintIndexgateway(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + } + } + return len(dAtA) - i, nil +} + +func (m *Shard) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *Shard) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *Shard) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.Stats != nil { + { + size, err := m.Stats.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintIndexgateway(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + } + { + size, err := m.Bounds.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintIndexgateway(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + return len(dAtA) - i, nil +} + +func (m *FPBounds) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *FPBounds) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *FPBounds) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.Max != 0 { + i = encodeVarintIndexgateway(dAtA, i, uint64(m.Max)) + i-- + dAtA[i] = 0x10 + } + if m.Min != 0 { + i = encodeVarintIndexgateway(dAtA, i, uint64(m.Min)) + i-- + dAtA[i] = 0x8 + } + return len(dAtA) - i, nil +} + +func encodeVarintIndexgateway(dAtA []byte, offset int, v uint64) int { + offset -= sovIndexgateway(v) + base := offset + for v >= 1<<7 { + dAtA[offset] = uint8(v&0x7f | 0x80) + v >>= 7 + offset++ + } + dAtA[offset] = uint8(v) + return base +} +func (m *ShardsRequest) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.From != 0 { + n += 1 + sovIndexgateway(uint64(m.From)) + } + if m.Through != 0 { + n += 1 + sovIndexgateway(uint64(m.Through)) + } + l = len(m.Query) + if l > 0 { + n += 1 + l + sovIndexgateway(uint64(l)) + } + if m.TargetBytesPerShard != 0 { + n += 1 + sovIndexgateway(uint64(m.TargetBytesPerShard)) + } + return n +} + +func (m *ShardsResponse) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if len(m.Shards) > 0 { + for _, e := range m.Shards { + l = e.Size() + n += 1 + l + sovIndexgateway(uint64(l)) + } + } + l = m.Statistics.Size() + n += 1 + l + sovIndexgateway(uint64(l)) + return n +} + +func (m *Shard) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = m.Bounds.Size() + n += 1 + l + sovIndexgateway(uint64(l)) + if m.Stats != nil { + l = m.Stats.Size() + n += 1 + l + sovIndexgateway(uint64(l)) + } + return n +} + +func (m *FPBounds) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Min != 0 { + n += 1 + sovIndexgateway(uint64(m.Min)) + } + if m.Max != 0 { + n += 1 + sovIndexgateway(uint64(m.Max)) + } + return n +} + +func sovIndexgateway(x uint64) (n int) { + return (math_bits.Len64(x|1) + 6) / 7 +} +func sozIndexgateway(x uint64) (n int) { + return sovIndexgateway(uint64((x << 1) ^ uint64((int64(x) >> 63)))) +} +func (this *ShardsRequest) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&ShardsRequest{`, + `From:` + fmt.Sprintf("%v", this.From) + `,`, + `Through:` + fmt.Sprintf("%v", this.Through) + `,`, + `Query:` + fmt.Sprintf("%v", this.Query) + `,`, + `TargetBytesPerShard:` + fmt.Sprintf("%v", this.TargetBytesPerShard) + `,`, + `}`, + }, "") + return s +} +func (this *ShardsResponse) String() string { + if this == nil { + return "nil" + } + repeatedStringForShards := "[]Shard{" + for _, f := range this.Shards { + repeatedStringForShards += strings.Replace(strings.Replace(f.String(), "Shard", "Shard", 1), `&`, ``, 1) + "," + } + repeatedStringForShards += "}" + s := strings.Join([]string{`&ShardsResponse{`, + `Shards:` + repeatedStringForShards + `,`, + `Statistics:` + strings.Replace(strings.Replace(fmt.Sprintf("%v", this.Statistics), "Result", "stats.Result", 1), `&`, ``, 1) + `,`, + `}`, + }, "") + return s +} +func (this *Shard) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&Shard{`, + `Bounds:` + strings.Replace(strings.Replace(this.Bounds.String(), "FPBounds", "FPBounds", 1), `&`, ``, 1) + `,`, + `Stats:` + strings.Replace(fmt.Sprintf("%v", this.Stats), "IndexStatsResponse", "IndexStatsResponse", 1) + `,`, + `}`, + }, "") + return s +} +func (this *FPBounds) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&FPBounds{`, + `Min:` + fmt.Sprintf("%v", this.Min) + `,`, + `Max:` + fmt.Sprintf("%v", this.Max) + `,`, + `}`, + }, "") + return s +} +func valueToStringIndexgateway(v interface{}) string { + rv := reflect.ValueOf(v) + if rv.IsNil() { + return "nil" + } + pv := reflect.Indirect(rv).Interface() + return fmt.Sprintf("*%v", pv) +} +func (m *ShardsRequest) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: ShardsRequest: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: ShardsRequest: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field From", wireType) + } + m.From = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.From |= github_com_prometheus_common_model.Time(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 2: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Through", wireType) + } + m.Through = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.Through |= github_com_prometheus_common_model.Time(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Query", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthIndexgateway + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthIndexgateway + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Query = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 4: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field TargetBytesPerShard", wireType) + } + m.TargetBytesPerShard = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.TargetBytesPerShard |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + default: + iNdEx = preIndex + skippy, err := skipIndexgateway(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthIndexgateway + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthIndexgateway + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *ShardsResponse) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: ShardsResponse: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: ShardsResponse: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Shards", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthIndexgateway + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthIndexgateway + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Shards = append(m.Shards, Shard{}) + if err := m.Shards[len(m.Shards)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Statistics", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthIndexgateway + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthIndexgateway + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := m.Statistics.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipIndexgateway(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthIndexgateway + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthIndexgateway + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *Shard) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: Shard: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: Shard: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Bounds", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthIndexgateway + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthIndexgateway + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := m.Bounds.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Stats", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthIndexgateway + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthIndexgateway + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Stats == nil { + m.Stats = &IndexStatsResponse{} + } + if err := m.Stats.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipIndexgateway(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthIndexgateway + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthIndexgateway + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *FPBounds) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: FPBounds: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: FPBounds: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Min", wireType) + } + m.Min = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.Min |= github_com_prometheus_common_model.Fingerprint(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 2: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Max", wireType) + } + m.Max = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.Max |= github_com_prometheus_common_model.Fingerprint(b&0x7F) << shift + if b < 0x80 { + break + } + } + default: + iNdEx = preIndex + skippy, err := skipIndexgateway(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthIndexgateway + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthIndexgateway + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func skipIndexgateway(dAtA []byte) (n int, err error) { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + wireType := int(wire & 0x7) + switch wireType { + case 0: + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + iNdEx++ + if dAtA[iNdEx-1] < 0x80 { + break + } + } + return iNdEx, nil + case 1: + iNdEx += 8 + return iNdEx, nil + case 2: + var length int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + length |= (int(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + if length < 0 { + return 0, ErrInvalidLengthIndexgateway + } + iNdEx += length + if iNdEx < 0 { + return 0, ErrInvalidLengthIndexgateway + } + return iNdEx, nil + case 3: + for { + var innerWire uint64 + var start int = iNdEx + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + innerWire |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + innerWireType := int(innerWire & 0x7) + if innerWireType == 4 { + break + } + next, err := skipIndexgateway(dAtA[start:]) + if err != nil { + return 0, err + } + iNdEx = start + next + if iNdEx < 0 { + return 0, ErrInvalidLengthIndexgateway + } + } + return iNdEx, nil + case 4: + return iNdEx, nil + case 5: + iNdEx += 4 + return iNdEx, nil + default: + return 0, fmt.Errorf("proto: illegal wireType %d", wireType) + } + } + panic("unreachable") +} + +var ( + ErrInvalidLengthIndexgateway = fmt.Errorf("proto: negative length found during unmarshaling") + ErrIntOverflowIndexgateway = fmt.Errorf("proto: integer overflow") +) diff --git a/pkg/logproto/indexgateway.proto b/pkg/logproto/indexgateway.proto index af34e03a279cb..1b54542d8f5a9 100644 --- a/pkg/logproto/indexgateway.proto +++ b/pkg/logproto/indexgateway.proto @@ -2,9 +2,11 @@ syntax = "proto3"; package indexgatewaypb; +import "gogoproto/gogo.proto"; import "pkg/logproto/logproto.proto"; +import "pkg/logqlmodel/stats/stats.proto"; -option go_package = "github.com/grafana/loki/pkg/logproto"; +option go_package = "github.com/grafana/loki/v3/pkg/logproto"; // This exists in a different file to retain proto namespacing compatibility with it's prior definition, but has been relocated to the logproto go pkg. service IndexGateway { @@ -25,4 +27,55 @@ service IndexGateway { // Note: this MUST be the same as the variant defined in // logproto.proto on the Querier service. rpc GetVolume(logproto.VolumeRequest) returns (logproto.VolumeResponse) {} + + // GetShards is an optimized implemented shard-planning implementation + // on the index gateway and not on the ingester. + rpc GetShards(ShardsRequest) returns (stream ShardsResponse); +} + +message ShardsRequest { + int64 from = 1 [ + (gogoproto.customtype) = "github.com/prometheus/common/model.Time", + (gogoproto.nullable) = false, + (gogoproto.jsontag) = "from" + ]; + int64 through = 2 [ + (gogoproto.customtype) = "github.com/prometheus/common/model.Time", + (gogoproto.nullable) = false, + (gogoproto.jsontag) = "through" + ]; + string query = 3 [(gogoproto.jsontag) = "query"]; + uint64 target_bytes_per_shard = 4 [(gogoproto.jsontag) = "targetBytesPerShard"]; +} + +message ShardsResponse { + repeated Shard shards = 1 [ + (gogoproto.nullable) = false, + (gogoproto.jsontag) = "shards" + ]; + stats.Result statistics = 2 [ + (gogoproto.nullable) = false, + (gogoproto.jsontag) = "statistics" + ]; +} + +message Shard { + FPBounds bounds = 1 [ + (gogoproto.nullable) = false, + (gogoproto.jsontag) = "bounds" + ]; + logproto.IndexStatsResponse stats = 2 [(gogoproto.jsontag) = "stats"]; +} + +// FPBounds is identical to the definition in `pkg/storage/bloom/v1/bounds.FingerprintBounds` +// which ensures we can cast between them without allocations. +message FPBounds { + uint64 min = 1 [ + (gogoproto.casttype) = "github.com/prometheus/common/model.Fingerprint", + (gogoproto.jsontag) = "min" + ]; + uint64 max = 2 [ + (gogoproto.casttype) = "github.com/prometheus/common/model.Fingerprint", + (gogoproto.jsontag) = "max" + ]; } diff --git a/pkg/logproto/logproto.pb.go b/pkg/logproto/logproto.pb.go index d50ae7d1e5db4..3459fa836529b 100644 --- a/pkg/logproto/logproto.pb.go +++ b/pkg/logproto/logproto.pb.go @@ -12,11 +12,11 @@ import ( proto "github.com/gogo/protobuf/proto" _ "github.com/gogo/protobuf/types" github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" - github_com_grafana_loki_pkg_logql_syntax "github.com/grafana/loki/pkg/logql/syntax" - stats "github.com/grafana/loki/pkg/logqlmodel/stats" _ "github.com/grafana/loki/pkg/push" github_com_grafana_loki_pkg_push "github.com/grafana/loki/pkg/push" - github_com_grafana_loki_pkg_querier_plan "github.com/grafana/loki/pkg/querier/plan" + github_com_grafana_loki_v3_pkg_logql_syntax "github.com/grafana/loki/v3/pkg/logql/syntax" + stats "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + github_com_grafana_loki_v3_pkg_querier_plan "github.com/grafana/loki/v3/pkg/querier/plan" github_com_prometheus_common_model "github.com/prometheus/common/model" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" @@ -217,14 +217,14 @@ func (m *StreamRate) GetPushes() uint32 { } type QueryRequest struct { - Selector string `protobuf:"bytes,1,opt,name=selector,proto3" json:"selector,omitempty"` // Deprecated: Do not use. - Limit uint32 `protobuf:"varint,2,opt,name=limit,proto3" json:"limit,omitempty"` - Start time.Time `protobuf:"bytes,3,opt,name=start,proto3,stdtime" json:"start"` - End time.Time `protobuf:"bytes,4,opt,name=end,proto3,stdtime" json:"end"` - Direction Direction `protobuf:"varint,5,opt,name=direction,proto3,enum=logproto.Direction" json:"direction,omitempty"` - Shards []string `protobuf:"bytes,7,rep,name=shards,proto3" json:"shards,omitempty"` - Deletes []*Delete `protobuf:"bytes,8,rep,name=deletes,proto3" json:"deletes,omitempty"` - Plan *github_com_grafana_loki_pkg_querier_plan.QueryPlan `protobuf:"bytes,9,opt,name=plan,proto3,customtype=github.com/grafana/loki/pkg/querier/plan.QueryPlan" json:"plan,omitempty"` + Selector string `protobuf:"bytes,1,opt,name=selector,proto3" json:"selector,omitempty"` // Deprecated: Do not use. + Limit uint32 `protobuf:"varint,2,opt,name=limit,proto3" json:"limit,omitempty"` + Start time.Time `protobuf:"bytes,3,opt,name=start,proto3,stdtime" json:"start"` + End time.Time `protobuf:"bytes,4,opt,name=end,proto3,stdtime" json:"end"` + Direction Direction `protobuf:"varint,5,opt,name=direction,proto3,enum=logproto.Direction" json:"direction,omitempty"` + Shards []string `protobuf:"bytes,7,rep,name=shards,proto3" json:"shards,omitempty"` + Deletes []*Delete `protobuf:"bytes,8,rep,name=deletes,proto3" json:"deletes,omitempty"` + Plan *github_com_grafana_loki_v3_pkg_querier_plan.QueryPlan `protobuf:"bytes,9,opt,name=plan,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/plan.QueryPlan" json:"plan,omitempty"` } func (m *QueryRequest) Reset() { *m = QueryRequest{} } @@ -310,12 +310,12 @@ func (m *QueryRequest) GetDeletes() []*Delete { } type SampleQueryRequest struct { - Selector string `protobuf:"bytes,1,opt,name=selector,proto3" json:"selector,omitempty"` // Deprecated: Do not use. - Start time.Time `protobuf:"bytes,2,opt,name=start,proto3,stdtime" json:"start"` - End time.Time `protobuf:"bytes,3,opt,name=end,proto3,stdtime" json:"end"` - Shards []string `protobuf:"bytes,4,rep,name=shards,proto3" json:"shards,omitempty"` - Deletes []*Delete `protobuf:"bytes,5,rep,name=deletes,proto3" json:"deletes,omitempty"` - Plan *github_com_grafana_loki_pkg_querier_plan.QueryPlan `protobuf:"bytes,6,opt,name=plan,proto3,customtype=github.com/grafana/loki/pkg/querier/plan.QueryPlan" json:"plan,omitempty"` + Selector string `protobuf:"bytes,1,opt,name=selector,proto3" json:"selector,omitempty"` // Deprecated: Do not use. + Start time.Time `protobuf:"bytes,2,opt,name=start,proto3,stdtime" json:"start"` + End time.Time `protobuf:"bytes,3,opt,name=end,proto3,stdtime" json:"end"` + Shards []string `protobuf:"bytes,4,rep,name=shards,proto3" json:"shards,omitempty"` + Deletes []*Delete `protobuf:"bytes,5,rep,name=deletes,proto3" json:"deletes,omitempty"` + Plan *github_com_grafana_loki_v3_pkg_querier_plan.QueryPlan `protobuf:"bytes,6,opt,name=plan,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/plan.QueryPlan" json:"plan,omitempty"` } func (m *SampleQueryRequest) Reset() { *m = SampleQueryRequest{} } @@ -386,6 +386,7 @@ func (m *SampleQueryRequest) GetDeletes() []*Delete { return nil } +// TODO(owen-d): fix. This will break rollouts as soon as the internal repr is changed. type Plan struct { Raw []byte `protobuf:"bytes,1,opt,name=raw,proto3" json:"raw,omitempty"` } @@ -865,11 +866,11 @@ func (m *Series) GetStreamHash() uint64 { } type TailRequest struct { - Query string `protobuf:"bytes,1,opt,name=query,proto3" json:"query,omitempty"` // Deprecated: Do not use. - DelayFor uint32 `protobuf:"varint,3,opt,name=delayFor,proto3" json:"delayFor,omitempty"` - Limit uint32 `protobuf:"varint,4,opt,name=limit,proto3" json:"limit,omitempty"` - Start time.Time `protobuf:"bytes,5,opt,name=start,proto3,stdtime" json:"start"` - Plan *github_com_grafana_loki_pkg_querier_plan.QueryPlan `protobuf:"bytes,6,opt,name=plan,proto3,customtype=github.com/grafana/loki/pkg/querier/plan.QueryPlan" json:"plan,omitempty"` + Query string `protobuf:"bytes,1,opt,name=query,proto3" json:"query,omitempty"` // Deprecated: Do not use. + DelayFor uint32 `protobuf:"varint,3,opt,name=delayFor,proto3" json:"delayFor,omitempty"` + Limit uint32 `protobuf:"varint,4,opt,name=limit,proto3" json:"limit,omitempty"` + Start time.Time `protobuf:"bytes,5,opt,name=start,proto3,stdtime" json:"start"` + Plan *github_com_grafana_loki_v3_pkg_querier_plan.QueryPlan `protobuf:"bytes,6,opt,name=plan,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/plan.QueryPlan" json:"plan,omitempty"` } func (m *TailRequest) Reset() { *m = TailRequest{} } @@ -1740,6 +1741,7 @@ func (m *LabelNamesForMetricNameRequest) GetMetricName() string { return "" } +// TODO(owen-d): fix. This will break rollouts as soon as the internal repr is changed. type LineFilter struct { Raw []byte `protobuf:"bytes,1,opt,name=raw,proto3" json:"raw,omitempty"` } @@ -1788,8 +1790,8 @@ type GetChunkRefRequest struct { Through github_com_prometheus_common_model.Time `protobuf:"varint,2,opt,name=through,proto3,customtype=github.com/prometheus/common/model.Time" json:"through"` Matchers string `protobuf:"bytes,3,opt,name=matchers,proto3" json:"matchers,omitempty"` // TODO(salvacorts): Delete this field once the weekly release is done. - Filters []github_com_grafana_loki_pkg_logql_syntax.LineFilter `protobuf:"bytes,4,rep,name=filters,proto3,customtype=github.com/grafana/loki/pkg/logql/syntax.LineFilter" json:"filters"` - Plan github_com_grafana_loki_pkg_querier_plan.QueryPlan `protobuf:"bytes,5,opt,name=plan,proto3,customtype=github.com/grafana/loki/pkg/querier/plan.QueryPlan" json:"plan"` + Filters []github_com_grafana_loki_v3_pkg_logql_syntax.LineFilter `protobuf:"bytes,4,rep,name=filters,proto3,customtype=github.com/grafana/loki/v3/pkg/logql/syntax.LineFilter" json:"filters"` + Plan github_com_grafana_loki_v3_pkg_querier_plan.QueryPlan `protobuf:"bytes,5,opt,name=plan,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/plan.QueryPlan" json:"plan"` } func (m *GetChunkRefRequest) Reset() { *m = GetChunkRefRequest{} } @@ -2510,6 +2512,336 @@ func (m *Volume) GetVolume() uint64 { return 0 } +type DetectedFieldsRequest struct { + Start time.Time `protobuf:"bytes,1,opt,name=start,proto3,stdtime" json:"start"` + End time.Time `protobuf:"bytes,2,opt,name=end,proto3,stdtime" json:"end"` + Query string `protobuf:"bytes,3,opt,name=query,proto3" json:"query,omitempty"` + LineLimit uint32 `protobuf:"varint,4,opt,name=lineLimit,proto3" json:"lineLimit,omitempty"` + FieldLimit uint32 `protobuf:"varint,5,opt,name=fieldLimit,proto3" json:"fieldLimit,omitempty"` + Step int64 `protobuf:"varint,6,opt,name=step,proto3" json:"step,omitempty"` +} + +func (m *DetectedFieldsRequest) Reset() { *m = DetectedFieldsRequest{} } +func (*DetectedFieldsRequest) ProtoMessage() {} +func (*DetectedFieldsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_c28a5f14f1f4c79a, []int{45} +} +func (m *DetectedFieldsRequest) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *DetectedFieldsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_DetectedFieldsRequest.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *DetectedFieldsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DetectedFieldsRequest.Merge(m, src) +} +func (m *DetectedFieldsRequest) XXX_Size() int { + return m.Size() +} +func (m *DetectedFieldsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DetectedFieldsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DetectedFieldsRequest proto.InternalMessageInfo + +func (m *DetectedFieldsRequest) GetStart() time.Time { + if m != nil { + return m.Start + } + return time.Time{} +} + +func (m *DetectedFieldsRequest) GetEnd() time.Time { + if m != nil { + return m.End + } + return time.Time{} +} + +func (m *DetectedFieldsRequest) GetQuery() string { + if m != nil { + return m.Query + } + return "" +} + +func (m *DetectedFieldsRequest) GetLineLimit() uint32 { + if m != nil { + return m.LineLimit + } + return 0 +} + +func (m *DetectedFieldsRequest) GetFieldLimit() uint32 { + if m != nil { + return m.FieldLimit + } + return 0 +} + +func (m *DetectedFieldsRequest) GetStep() int64 { + if m != nil { + return m.Step + } + return 0 +} + +type DetectedFieldsResponse struct { + Fields []*DetectedField `protobuf:"bytes,1,rep,name=fields,proto3" json:"fields,omitempty"` +} + +func (m *DetectedFieldsResponse) Reset() { *m = DetectedFieldsResponse{} } +func (*DetectedFieldsResponse) ProtoMessage() {} +func (*DetectedFieldsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_c28a5f14f1f4c79a, []int{46} +} +func (m *DetectedFieldsResponse) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *DetectedFieldsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_DetectedFieldsResponse.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *DetectedFieldsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_DetectedFieldsResponse.Merge(m, src) +} +func (m *DetectedFieldsResponse) XXX_Size() int { + return m.Size() +} +func (m *DetectedFieldsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_DetectedFieldsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_DetectedFieldsResponse proto.InternalMessageInfo + +func (m *DetectedFieldsResponse) GetFields() []*DetectedField { + if m != nil { + return m.Fields + } + return nil +} + +type DetectedField struct { + Label string `protobuf:"bytes,1,opt,name=label,proto3" json:"label,omitempty"` + Type DetectedFieldType `protobuf:"bytes,2,opt,name=type,proto3,casttype=DetectedFieldType" json:"type,omitempty"` + Cardinality uint64 `protobuf:"varint,3,opt,name=cardinality,proto3" json:"cardinality,omitempty"` +} + +func (m *DetectedField) Reset() { *m = DetectedField{} } +func (*DetectedField) ProtoMessage() {} +func (*DetectedField) Descriptor() ([]byte, []int) { + return fileDescriptor_c28a5f14f1f4c79a, []int{47} +} +func (m *DetectedField) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *DetectedField) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_DetectedField.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *DetectedField) XXX_Merge(src proto.Message) { + xxx_messageInfo_DetectedField.Merge(m, src) +} +func (m *DetectedField) XXX_Size() int { + return m.Size() +} +func (m *DetectedField) XXX_DiscardUnknown() { + xxx_messageInfo_DetectedField.DiscardUnknown(m) +} + +var xxx_messageInfo_DetectedField proto.InternalMessageInfo + +func (m *DetectedField) GetLabel() string { + if m != nil { + return m.Label + } + return "" +} + +func (m *DetectedField) GetType() DetectedFieldType { + if m != nil { + return m.Type + } + return "" +} + +func (m *DetectedField) GetCardinality() uint64 { + if m != nil { + return m.Cardinality + } + return 0 +} + +type DetectedLabelsRequest struct { + Start *time.Time `protobuf:"bytes,1,opt,name=start,proto3,stdtime" json:"start,omitempty"` + End *time.Time `protobuf:"bytes,2,opt,name=end,proto3,stdtime" json:"end,omitempty"` + Query string `protobuf:"bytes,3,opt,name=query,proto3" json:"query,omitempty"` +} + +func (m *DetectedLabelsRequest) Reset() { *m = DetectedLabelsRequest{} } +func (*DetectedLabelsRequest) ProtoMessage() {} +func (*DetectedLabelsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_c28a5f14f1f4c79a, []int{48} +} +func (m *DetectedLabelsRequest) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *DetectedLabelsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_DetectedLabelsRequest.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *DetectedLabelsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DetectedLabelsRequest.Merge(m, src) +} +func (m *DetectedLabelsRequest) XXX_Size() int { + return m.Size() +} +func (m *DetectedLabelsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DetectedLabelsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DetectedLabelsRequest proto.InternalMessageInfo + +func (m *DetectedLabelsRequest) GetStart() *time.Time { + if m != nil { + return m.Start + } + return nil +} + +func (m *DetectedLabelsRequest) GetEnd() *time.Time { + if m != nil { + return m.End + } + return nil +} + +func (m *DetectedLabelsRequest) GetQuery() string { + if m != nil { + return m.Query + } + return "" +} + +type DetectedLabelsResponse struct { + DetectedLabels []*DetectedLabel `protobuf:"bytes,1,rep,name=detectedLabels,proto3" json:"detectedLabels,omitempty"` +} + +func (m *DetectedLabelsResponse) Reset() { *m = DetectedLabelsResponse{} } +func (*DetectedLabelsResponse) ProtoMessage() {} +func (*DetectedLabelsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_c28a5f14f1f4c79a, []int{49} +} +func (m *DetectedLabelsResponse) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *DetectedLabelsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_DetectedLabelsResponse.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *DetectedLabelsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_DetectedLabelsResponse.Merge(m, src) +} +func (m *DetectedLabelsResponse) XXX_Size() int { + return m.Size() +} +func (m *DetectedLabelsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_DetectedLabelsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_DetectedLabelsResponse proto.InternalMessageInfo + +func (m *DetectedLabelsResponse) GetDetectedLabels() []*DetectedLabel { + if m != nil { + return m.DetectedLabels + } + return nil +} + +type DetectedLabel struct { + Label string `protobuf:"bytes,1,opt,name=label,proto3" json:"label,omitempty"` +} + +func (m *DetectedLabel) Reset() { *m = DetectedLabel{} } +func (*DetectedLabel) ProtoMessage() {} +func (*DetectedLabel) Descriptor() ([]byte, []int) { + return fileDescriptor_c28a5f14f1f4c79a, []int{50} +} +func (m *DetectedLabel) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *DetectedLabel) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_DetectedLabel.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *DetectedLabel) XXX_Merge(src proto.Message) { + xxx_messageInfo_DetectedLabel.Merge(m, src) +} +func (m *DetectedLabel) XXX_Size() int { + return m.Size() +} +func (m *DetectedLabel) XXX_DiscardUnknown() { + xxx_messageInfo_DetectedLabel.DiscardUnknown(m) +} + +var xxx_messageInfo_DetectedLabel proto.InternalMessageInfo + +func (m *DetectedLabel) GetLabel() string { + if m != nil { + return m.Label + } + return "" +} + func init() { proto.RegisterEnum("logproto.Direction", Direction_name, Direction_value) proto.RegisterType((*StreamRatesRequest)(nil), "logproto.StreamRatesRequest") @@ -2558,155 +2890,173 @@ func init() { proto.RegisterType((*VolumeRequest)(nil), "logproto.VolumeRequest") proto.RegisterType((*VolumeResponse)(nil), "logproto.VolumeResponse") proto.RegisterType((*Volume)(nil), "logproto.Volume") + proto.RegisterType((*DetectedFieldsRequest)(nil), "logproto.DetectedFieldsRequest") + proto.RegisterType((*DetectedFieldsResponse)(nil), "logproto.DetectedFieldsResponse") + proto.RegisterType((*DetectedField)(nil), "logproto.DetectedField") + proto.RegisterType((*DetectedLabelsRequest)(nil), "logproto.DetectedLabelsRequest") + proto.RegisterType((*DetectedLabelsResponse)(nil), "logproto.DetectedLabelsResponse") + proto.RegisterType((*DetectedLabel)(nil), "logproto.DetectedLabel") } func init() { proto.RegisterFile("pkg/logproto/logproto.proto", fileDescriptor_c28a5f14f1f4c79a) } var fileDescriptor_c28a5f14f1f4c79a = []byte{ - // 2278 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x19, 0x4d, 0x6f, 0x1b, 0xc7, - 0x95, 0x4b, 0x2e, 0xbf, 0x1e, 0x29, 0x59, 0x1e, 0x31, 0x36, 0x41, 0xdb, 0xa4, 0x3c, 0x48, 0x1d, - 0xc1, 0x71, 0xc8, 0x58, 0x6e, 0xdc, 0xd4, 0x6e, 0xd0, 0x9a, 0x52, 0xec, 0xc8, 0x96, 0x3f, 0x32, - 0x72, 0xdd, 0xc2, 0x68, 0x6b, 0xac, 0xc4, 0x11, 0x45, 0x88, 0xbb, 0x4b, 0xef, 0x0e, 0x63, 0x0b, - 0xe8, 0xa1, 0x7f, 0x20, 0x68, 0x6e, 0x45, 0x2f, 0x45, 0x0f, 0x05, 0x52, 0xa0, 0xe8, 0xa5, 0x3f, - 0xa0, 0xbd, 0xf4, 0xe0, 0xde, 0xdc, 0x5b, 0x90, 0x03, 0x5b, 0xcb, 0x97, 0x42, 0xa7, 0xdc, 0x72, - 0x2d, 0xe6, 0x6b, 0x77, 0x96, 0xa2, 0xdd, 0x50, 0x75, 0x51, 0xf8, 0xc2, 0x9d, 0x79, 0xf3, 0xe6, - 0xcd, 0xfb, 0x9a, 0xf7, 0x31, 0x84, 0x13, 0x83, 0x9d, 0x6e, 0xab, 0xef, 0x77, 0x07, 0x81, 0xcf, - 0xfc, 0x68, 0xd0, 0x14, 0xbf, 0xa8, 0xa0, 0xe7, 0xb5, 0x4a, 0xd7, 0xef, 0xfa, 0x12, 0x87, 0x8f, - 0xe4, 0x7a, 0xad, 0xd1, 0xf5, 0xfd, 0x6e, 0x9f, 0xb6, 0xc4, 0x6c, 0x63, 0xb8, 0xd5, 0x62, 0x3d, - 0x97, 0x86, 0xcc, 0x71, 0x07, 0x0a, 0x61, 0x41, 0x51, 0x7f, 0xd8, 0x77, 0xfd, 0x0e, 0xed, 0xb7, - 0x42, 0xe6, 0xb0, 0x50, 0xfe, 0x2a, 0x8c, 0x79, 0x8e, 0x31, 0x18, 0x86, 0xdb, 0xe2, 0x47, 0x02, - 0x71, 0x05, 0xd0, 0x3a, 0x0b, 0xa8, 0xe3, 0x12, 0x87, 0xd1, 0x90, 0xd0, 0x87, 0x43, 0x1a, 0x32, - 0x7c, 0x13, 0xe6, 0x13, 0xd0, 0x70, 0xe0, 0x7b, 0x21, 0x45, 0x17, 0xa1, 0x14, 0xc6, 0xe0, 0xaa, - 0xb5, 0x90, 0x59, 0x2c, 0x2d, 0x55, 0x9a, 0x91, 0x28, 0xf1, 0x1e, 0x62, 0x22, 0xe2, 0xdf, 0x58, - 0x00, 0xf1, 0x1a, 0xaa, 0x03, 0xc8, 0xd5, 0x8f, 0x9c, 0x70, 0xbb, 0x6a, 0x2d, 0x58, 0x8b, 0x36, - 0x31, 0x20, 0xe8, 0x1c, 0x1c, 0x8d, 0x67, 0xb7, 0xfc, 0xf5, 0x6d, 0x27, 0xe8, 0x54, 0xd3, 0x02, - 0xed, 0xe0, 0x02, 0x42, 0x60, 0x07, 0x0e, 0xa3, 0xd5, 0xcc, 0x82, 0xb5, 0x98, 0x21, 0x62, 0x8c, - 0x8e, 0x41, 0x8e, 0x51, 0xcf, 0xf1, 0x58, 0xd5, 0x5e, 0xb0, 0x16, 0x8b, 0x44, 0xcd, 0x38, 0x9c, - 0xcb, 0x4e, 0xc3, 0x6a, 0x76, 0xc1, 0x5a, 0x9c, 0x21, 0x6a, 0x86, 0x3f, 0xcf, 0x40, 0xf9, 0xe3, - 0x21, 0x0d, 0x76, 0x95, 0x02, 0x50, 0x1d, 0x0a, 0x21, 0xed, 0xd3, 0x4d, 0xe6, 0x07, 0x82, 0xc1, - 0x62, 0x3b, 0x5d, 0xb5, 0x48, 0x04, 0x43, 0x15, 0xc8, 0xf6, 0x7b, 0x6e, 0x8f, 0x09, 0xb6, 0x66, - 0x88, 0x9c, 0xa0, 0x4b, 0x90, 0x0d, 0x99, 0x13, 0x30, 0xc1, 0x4b, 0x69, 0xa9, 0xd6, 0x94, 0x46, - 0x6b, 0x6a, 0xa3, 0x35, 0xef, 0x6a, 0xa3, 0xb5, 0x0b, 0x4f, 0x46, 0x8d, 0xd4, 0x67, 0xff, 0x68, - 0x58, 0x44, 0x6e, 0x41, 0x17, 0x21, 0x43, 0xbd, 0x8e, 0xe0, 0xf7, 0x9b, 0xee, 0xe4, 0x1b, 0xd0, - 0x79, 0x28, 0x76, 0x7a, 0x01, 0xdd, 0x64, 0x3d, 0xdf, 0x13, 0x52, 0xcd, 0x2e, 0xcd, 0xc7, 0x16, - 0x59, 0xd1, 0x4b, 0x24, 0xc6, 0x42, 0xe7, 0x20, 0x17, 0x72, 0xd5, 0x85, 0xd5, 0xfc, 0x42, 0x66, - 0xb1, 0xd8, 0xae, 0xec, 0x8f, 0x1a, 0x73, 0x12, 0x72, 0xce, 0x77, 0x7b, 0x8c, 0xba, 0x03, 0xb6, - 0x4b, 0x14, 0x0e, 0x3a, 0x0b, 0xf9, 0x0e, 0xed, 0x53, 0x6e, 0xf0, 0x82, 0x30, 0xf8, 0x9c, 0x41, - 0x5e, 0x2c, 0x10, 0x8d, 0x80, 0xee, 0x83, 0x3d, 0xe8, 0x3b, 0x5e, 0xb5, 0x28, 0xa4, 0x98, 0x8d, - 0x11, 0xef, 0xf4, 0x1d, 0xaf, 0x7d, 0xf1, 0xcb, 0x51, 0x63, 0xa9, 0xdb, 0x63, 0xdb, 0xc3, 0x8d, - 0xe6, 0xa6, 0xef, 0xb6, 0xba, 0x81, 0xb3, 0xe5, 0x78, 0x4e, 0xab, 0xef, 0xef, 0xf4, 0x5a, 0xdc, - 0x39, 0x1f, 0x0e, 0x69, 0xd0, 0xa3, 0x41, 0x8b, 0xd3, 0x68, 0x0a, 0x7b, 0xf0, 0x7d, 0x44, 0xd0, - 0xbc, 0x6e, 0x17, 0x72, 0x73, 0x79, 0x3c, 0x4a, 0x03, 0x5a, 0x77, 0xdc, 0x41, 0x9f, 0x4e, 0x65, - 0xaf, 0xc8, 0x32, 0xe9, 0x43, 0x5b, 0x26, 0x33, 0xad, 0x65, 0x62, 0x35, 0xdb, 0xd3, 0xa9, 0x39, - 0xfb, 0x4d, 0xd5, 0x9c, 0x7b, 0xf5, 0x6a, 0xc6, 0x55, 0xb0, 0xf9, 0x0c, 0xcd, 0x41, 0x26, 0x70, - 0x1e, 0x09, 0x65, 0x96, 0x09, 0x1f, 0xe2, 0x35, 0xc8, 0x49, 0x46, 0x50, 0x6d, 0x5c, 0xdb, 0xc9, - 0x9b, 0x11, 0x6b, 0x3a, 0xa3, 0x75, 0x38, 0x17, 0xeb, 0x30, 0x23, 0xb4, 0x83, 0x7f, 0x6b, 0xc1, - 0x8c, 0x32, 0xa1, 0x8a, 0x2e, 0x1b, 0x90, 0x97, 0xb7, 0x5b, 0x47, 0x96, 0xe3, 0xe3, 0x91, 0xe5, - 0x4a, 0xc7, 0x19, 0x30, 0x1a, 0xb4, 0x5b, 0x4f, 0x46, 0x0d, 0xeb, 0xcb, 0x51, 0xe3, 0xad, 0x97, - 0x49, 0x29, 0x82, 0x9c, 0x8a, 0x3a, 0x9a, 0x30, 0x7a, 0x5b, 0x70, 0xc7, 0x42, 0xe5, 0x07, 0x47, - 0x9a, 0x32, 0x40, 0xae, 0x7a, 0x5d, 0x1a, 0x72, 0xca, 0x36, 0x37, 0x21, 0x91, 0x38, 0xf8, 0xe7, - 0x30, 0x9f, 0x70, 0x35, 0xc5, 0xe7, 0xfb, 0x90, 0x0b, 0xb9, 0x02, 0x35, 0x9b, 0x86, 0xa1, 0xd6, - 0x05, 0xbc, 0x3d, 0xab, 0xf8, 0xcb, 0xc9, 0x39, 0x51, 0xf8, 0xd3, 0x9d, 0xfe, 0x57, 0x0b, 0xca, - 0x6b, 0xce, 0x06, 0xed, 0x6b, 0x1f, 0x47, 0x60, 0x7b, 0x8e, 0x4b, 0x95, 0xc6, 0xc5, 0x98, 0x07, - 0xb4, 0x4f, 0x9c, 0xfe, 0x90, 0x4a, 0x92, 0x05, 0xa2, 0x66, 0xd3, 0x46, 0x22, 0xeb, 0xd0, 0x91, - 0xc8, 0x8a, 0xfd, 0xbd, 0x02, 0x59, 0xee, 0x59, 0xbb, 0x22, 0x0a, 0x15, 0x89, 0x9c, 0xe0, 0xb7, - 0x60, 0x46, 0x49, 0xa1, 0xd4, 0x17, 0xb3, 0xcc, 0xd5, 0x57, 0xd4, 0x2c, 0x63, 0x17, 0x72, 0x52, - 0xdb, 0xe8, 0x4d, 0x28, 0x46, 0xd9, 0x4d, 0x48, 0x9b, 0x69, 0xe7, 0xf6, 0x47, 0x8d, 0x34, 0x0b, - 0x49, 0xbc, 0x80, 0x1a, 0x90, 0x15, 0x3b, 0x85, 0xe4, 0x56, 0xbb, 0xb8, 0x3f, 0x6a, 0x48, 0x00, - 0x91, 0x1f, 0x74, 0x12, 0xec, 0x6d, 0x9e, 0x60, 0xb8, 0x0a, 0xec, 0x76, 0x61, 0x7f, 0xd4, 0x10, - 0x73, 0x22, 0x7e, 0xf1, 0x35, 0x28, 0xaf, 0xd1, 0xae, 0xb3, 0xb9, 0xab, 0x0e, 0xad, 0x68, 0x72, - 0xfc, 0x40, 0x4b, 0xd3, 0x38, 0x0d, 0xe5, 0xe8, 0xc4, 0x07, 0x6e, 0xa8, 0x9c, 0xba, 0x14, 0xc1, - 0x6e, 0x86, 0xf8, 0xd7, 0x16, 0x28, 0x3b, 0x23, 0x0c, 0xb9, 0x3e, 0x97, 0x35, 0x54, 0x31, 0x08, - 0xf6, 0x47, 0x0d, 0x05, 0x21, 0xea, 0x8b, 0x2e, 0x43, 0x3e, 0x14, 0x27, 0x72, 0x62, 0xe3, 0xee, - 0x23, 0x16, 0xda, 0x47, 0xb8, 0x1b, 0xec, 0x8f, 0x1a, 0x1a, 0x91, 0xe8, 0x01, 0x6a, 0x26, 0x32, - 0xa7, 0x14, 0x6c, 0x76, 0x7f, 0xd4, 0x30, 0xa0, 0x66, 0x26, 0xc5, 0x5f, 0x5b, 0x50, 0xba, 0xeb, - 0xf4, 0x22, 0x17, 0xaa, 0x6a, 0x13, 0xc5, 0x31, 0x52, 0x02, 0xf8, 0x95, 0xee, 0xd0, 0xbe, 0xb3, - 0x7b, 0xd5, 0x0f, 0x04, 0xdd, 0x19, 0x12, 0xcd, 0xe3, 0x64, 0x67, 0x4f, 0x4c, 0x76, 0xd9, 0xe9, - 0x43, 0xea, 0xff, 0x30, 0x80, 0x5d, 0xb7, 0x0b, 0xe9, 0xb9, 0x0c, 0xfe, 0xa3, 0x05, 0x65, 0x29, - 0xb9, 0x72, 0xbb, 0x9f, 0x40, 0x4e, 0x2a, 0x46, 0xc8, 0xfe, 0x92, 0xe0, 0xf2, 0xf6, 0x34, 0x81, - 0x45, 0xd1, 0x44, 0xdf, 0x87, 0xd9, 0x4e, 0xe0, 0x0f, 0x06, 0xb4, 0xb3, 0xae, 0x42, 0x58, 0x7a, - 0x3c, 0x84, 0xad, 0x98, 0xeb, 0x64, 0x0c, 0x1d, 0xff, 0xcd, 0x82, 0x19, 0x15, 0x2d, 0x94, 0xad, - 0x22, 0xfd, 0x5a, 0x87, 0x4e, 0x59, 0xe9, 0x69, 0x53, 0xd6, 0x31, 0xc8, 0x75, 0x03, 0x7f, 0x38, - 0x08, 0xab, 0x19, 0x79, 0x37, 0xe5, 0x6c, 0xba, 0x54, 0x86, 0xaf, 0xc3, 0xac, 0x16, 0xe5, 0x05, - 0x21, 0xb3, 0x36, 0x1e, 0x32, 0x57, 0x3b, 0xd4, 0x63, 0xbd, 0xad, 0x5e, 0x14, 0x04, 0x15, 0x3e, - 0xfe, 0xa5, 0x05, 0x73, 0xe3, 0x28, 0x68, 0xc5, 0xb8, 0x67, 0x9c, 0xdc, 0x99, 0x17, 0x93, 0x6b, - 0x8a, 0xe0, 0x13, 0x7e, 0xe8, 0xb1, 0x60, 0x57, 0x93, 0x96, 0x7b, 0x6b, 0xef, 0x41, 0xc9, 0x58, - 0xe4, 0x29, 0x6a, 0x87, 0xaa, 0x9b, 0x41, 0xf8, 0x30, 0x0e, 0x09, 0x69, 0x19, 0xd0, 0xc4, 0x04, - 0xff, 0xca, 0x82, 0x99, 0x84, 0x2d, 0xd1, 0xfb, 0x60, 0x6f, 0x05, 0xbe, 0x3b, 0x95, 0xa1, 0xc4, - 0x0e, 0xf4, 0x6d, 0x48, 0x33, 0x7f, 0x2a, 0x33, 0xa5, 0x99, 0xcf, 0xad, 0xa4, 0xc4, 0xcf, 0xc8, - 0xea, 0x56, 0xce, 0xf0, 0x7b, 0x50, 0x14, 0x02, 0xdd, 0x71, 0x7a, 0xc1, 0xc4, 0x6c, 0x31, 0x59, - 0xa0, 0xcb, 0x70, 0x44, 0x46, 0xc2, 0xc9, 0x9b, 0xcb, 0x93, 0x36, 0x97, 0xf5, 0xe6, 0x13, 0x90, - 0x5d, 0xde, 0x1e, 0x7a, 0x3b, 0x7c, 0x4b, 0xc7, 0x61, 0x8e, 0xde, 0xc2, 0xc7, 0xf8, 0x0d, 0x98, - 0xe7, 0x77, 0x90, 0x06, 0xe1, 0xb2, 0x3f, 0xf4, 0x98, 0xee, 0x2e, 0xce, 0x41, 0x25, 0x09, 0x56, - 0x5e, 0x52, 0x81, 0xec, 0x26, 0x07, 0x08, 0x1a, 0x33, 0x44, 0x4e, 0xf0, 0xef, 0x2c, 0x40, 0xd7, - 0x28, 0x13, 0xa7, 0xac, 0xae, 0x44, 0xd7, 0xa3, 0x06, 0x05, 0xd7, 0x61, 0x9b, 0xdb, 0x34, 0x08, - 0x75, 0x0d, 0xa2, 0xe7, 0xff, 0x8f, 0x6a, 0x0f, 0x9f, 0x87, 0xf9, 0x04, 0x97, 0x4a, 0xa6, 0x1a, - 0x14, 0x36, 0x15, 0x4c, 0xe5, 0xbb, 0x68, 0x8e, 0xff, 0x94, 0x86, 0x82, 0xd8, 0x40, 0xe8, 0x16, - 0x3a, 0x0f, 0xa5, 0xad, 0x9e, 0xd7, 0xa5, 0xc1, 0x20, 0xe8, 0x29, 0x15, 0xd8, 0xed, 0x23, 0xfb, - 0xa3, 0x86, 0x09, 0x26, 0xe6, 0x04, 0xbd, 0x03, 0xf9, 0x61, 0x48, 0x83, 0x07, 0x3d, 0x79, 0xd3, - 0x8b, 0xed, 0xca, 0xde, 0xa8, 0x91, 0xfb, 0x61, 0x48, 0x83, 0xd5, 0x15, 0x9e, 0x79, 0x86, 0x62, - 0x44, 0xe4, 0xb7, 0x83, 0x6e, 0x28, 0x37, 0x15, 0x45, 0x58, 0xfb, 0x3b, 0x9c, 0xfd, 0xb1, 0x50, - 0x37, 0x08, 0x7c, 0x97, 0xb2, 0x6d, 0x3a, 0x0c, 0x5b, 0x9b, 0xbe, 0xeb, 0xfa, 0x5e, 0x4b, 0xf4, - 0x92, 0x42, 0x68, 0x9e, 0x3e, 0xf9, 0x76, 0xe5, 0xb9, 0x77, 0x21, 0xcf, 0xb6, 0x03, 0x7f, 0xd8, - 0xdd, 0x16, 0x59, 0x21, 0xd3, 0xbe, 0x34, 0x3d, 0x3d, 0x4d, 0x81, 0xe8, 0x01, 0x3a, 0xcd, 0xb5, - 0x45, 0x37, 0x77, 0xc2, 0xa1, 0x2b, 0x3b, 0xb4, 0x76, 0x76, 0x7f, 0xd4, 0xb0, 0xde, 0x21, 0x11, - 0x18, 0x7f, 0x9a, 0x86, 0x86, 0x70, 0xd4, 0x7b, 0xa2, 0x6c, 0xb8, 0xea, 0x07, 0x37, 0x29, 0x0b, - 0x7a, 0x9b, 0xb7, 0x1c, 0x97, 0x6a, 0xdf, 0x68, 0x40, 0xc9, 0x15, 0xc0, 0x07, 0xc6, 0x15, 0x00, - 0x37, 0xc2, 0x43, 0xa7, 0x00, 0xc4, 0x9d, 0x91, 0xeb, 0xf2, 0x36, 0x14, 0x05, 0x44, 0x2c, 0x2f, - 0x27, 0x34, 0xd5, 0x9a, 0x52, 0x32, 0xa5, 0xa1, 0xd5, 0x71, 0x0d, 0x4d, 0x4d, 0x27, 0x52, 0x8b, - 0xe9, 0xeb, 0xd9, 0xa4, 0xaf, 0xe3, 0xbf, 0x5b, 0x50, 0x5f, 0xd3, 0x9c, 0x1f, 0x52, 0x1d, 0x5a, - 0xde, 0xf4, 0x2b, 0x92, 0x37, 0xf3, 0xdf, 0xc9, 0x8b, 0xeb, 0x00, 0x6b, 0x3d, 0x8f, 0x5e, 0xed, - 0xf5, 0x19, 0x0d, 0x26, 0x74, 0x22, 0x9f, 0x66, 0xe2, 0x90, 0x40, 0xe8, 0x96, 0x96, 0x73, 0xd9, - 0x88, 0xc3, 0xaf, 0x42, 0x8c, 0xf4, 0x2b, 0x34, 0x5b, 0x66, 0x2c, 0x44, 0xed, 0x40, 0x7e, 0x4b, - 0x88, 0x27, 0x53, 0x6a, 0xe2, 0x19, 0x25, 0x96, 0xbd, 0x7d, 0x59, 0x1d, 0x7e, 0xe1, 0x65, 0x05, - 0x89, 0x78, 0xf5, 0x69, 0x85, 0xbb, 0x1e, 0x73, 0x1e, 0x1b, 0x9b, 0x89, 0x3e, 0x01, 0xfd, 0x4c, - 0x95, 0x5b, 0xd9, 0x89, 0xe5, 0x96, 0xbe, 0xb9, 0x87, 0xef, 0x19, 0x3f, 0x88, 0x63, 0x9f, 0x30, - 0x87, 0x8a, 0x7d, 0x67, 0xc0, 0x0e, 0xe8, 0x96, 0x4e, 0xd2, 0x28, 0x3e, 0x36, 0xc2, 0x14, 0xeb, - 0xf8, 0xcf, 0x16, 0xcc, 0x5d, 0xa3, 0x2c, 0x59, 0xfe, 0xbc, 0x46, 0xc6, 0xc4, 0x1f, 0xc1, 0x51, - 0x83, 0x7f, 0x25, 0xfd, 0x85, 0xb1, 0x9a, 0xe7, 0x8d, 0x58, 0xfe, 0x55, 0xaf, 0x43, 0x1f, 0xab, - 0x5e, 0x31, 0x59, 0xee, 0xdc, 0x81, 0x92, 0xb1, 0x88, 0xae, 0x8c, 0x15, 0x3a, 0xc6, 0xcb, 0x4e, - 0x94, 0xac, 0xdb, 0x15, 0x25, 0x93, 0xec, 0x16, 0x55, 0x19, 0x1b, 0x15, 0x05, 0xeb, 0x80, 0x84, - 0xb9, 0x04, 0x59, 0x33, 0x2d, 0x09, 0xe8, 0x8d, 0xa8, 0xe2, 0x89, 0xe6, 0xe8, 0x34, 0xd8, 0x81, - 0xff, 0x48, 0x57, 0xb0, 0x33, 0xf1, 0x91, 0xc4, 0x7f, 0x44, 0xc4, 0x12, 0xbe, 0x0c, 0x19, 0xe2, - 0x3f, 0x42, 0x75, 0x80, 0xc0, 0xf1, 0xba, 0xf4, 0x5e, 0xd4, 0x38, 0x95, 0x89, 0x01, 0x79, 0x41, - 0xc9, 0xb0, 0x0c, 0x47, 0x4d, 0x8e, 0xa4, 0xb9, 0x9b, 0x90, 0xff, 0x78, 0x68, 0xaa, 0xab, 0x32, - 0xa6, 0x2e, 0xd9, 0x83, 0x6b, 0x24, 0xee, 0x33, 0x10, 0xc3, 0xd1, 0x49, 0x28, 0x32, 0x67, 0xa3, - 0x4f, 0x6f, 0xc5, 0x01, 0x2e, 0x06, 0xf0, 0x55, 0xde, 0xf3, 0xdd, 0x33, 0x6a, 0x9f, 0x18, 0x80, - 0xce, 0xc2, 0x5c, 0xcc, 0xf3, 0x9d, 0x80, 0x6e, 0xf5, 0x1e, 0x0b, 0x0b, 0x97, 0xc9, 0x01, 0x38, - 0x5a, 0x84, 0x23, 0x31, 0x6c, 0x5d, 0xd4, 0x18, 0xb6, 0x40, 0x1d, 0x07, 0x73, 0xdd, 0x08, 0x71, - 0x3f, 0x7c, 0x38, 0x74, 0xfa, 0xe2, 0xe6, 0x95, 0x89, 0x01, 0xc1, 0x7f, 0xb1, 0xe0, 0xa8, 0x34, - 0x35, 0xef, 0xf6, 0x5f, 0x47, 0xaf, 0xff, 0xdc, 0x02, 0x64, 0x4a, 0xa0, 0x5c, 0xeb, 0x5b, 0xe6, - 0x33, 0x0e, 0x2f, 0x62, 0x4a, 0xa2, 0x95, 0x95, 0xa0, 0xf8, 0x25, 0x06, 0x43, 0x4e, 0x14, 0x42, - 0xb2, 0xa7, 0xb6, 0x65, 0xaf, 0x2c, 0x21, 0x44, 0x7d, 0x79, 0x8b, 0xbf, 0xb1, 0xcb, 0x68, 0xa8, - 0x3a, 0x5d, 0xd1, 0xe2, 0x0b, 0x00, 0x91, 0x1f, 0x7e, 0x16, 0xf5, 0x98, 0xf0, 0x1a, 0x3b, 0x3e, - 0x4b, 0x81, 0x88, 0x1e, 0xe0, 0x3f, 0xa4, 0x61, 0xe6, 0x9e, 0xdf, 0x1f, 0xc6, 0x29, 0xf1, 0x75, - 0x4a, 0x15, 0x89, 0xf6, 0x3b, 0xab, 0xdb, 0x6f, 0x04, 0x76, 0xc8, 0xe8, 0x40, 0x78, 0x56, 0x86, - 0x88, 0x31, 0xc2, 0x50, 0x66, 0x4e, 0xd0, 0xa5, 0x4c, 0xf6, 0x35, 0xd5, 0x9c, 0x28, 0x38, 0x13, - 0x30, 0xb4, 0x00, 0x25, 0xa7, 0xdb, 0x0d, 0x68, 0xd7, 0x61, 0xb4, 0xbd, 0x5b, 0xcd, 0x8b, 0xc3, - 0x4c, 0x10, 0xfe, 0x31, 0xcc, 0x6a, 0x65, 0x29, 0x93, 0xbe, 0x0b, 0xf9, 0x4f, 0x04, 0x64, 0xc2, - 0x93, 0x97, 0x44, 0x55, 0x61, 0x4c, 0xa3, 0x25, 0xdf, 0xc7, 0x35, 0xcf, 0xf8, 0x3a, 0xe4, 0x24, - 0x3a, 0x3a, 0x69, 0x76, 0x27, 0xf2, 0x6d, 0x86, 0xcf, 0x55, 0xab, 0x81, 0x21, 0x27, 0x09, 0x29, - 0xc3, 0x0b, 0xdf, 0x90, 0x10, 0xa2, 0xbe, 0x67, 0xcf, 0x40, 0x31, 0x7a, 0xdc, 0x46, 0x25, 0xc8, - 0x5f, 0xbd, 0x4d, 0x7e, 0x74, 0x85, 0xac, 0xcc, 0xa5, 0x50, 0x19, 0x0a, 0xed, 0x2b, 0xcb, 0x37, - 0xc4, 0xcc, 0x5a, 0xfa, 0xda, 0xd6, 0x91, 0x25, 0x40, 0xdf, 0x83, 0xac, 0x0c, 0x17, 0xc7, 0x62, - 0xfe, 0xcd, 0x67, 0xe4, 0xda, 0xf1, 0x03, 0x70, 0xa9, 0x01, 0x9c, 0x7a, 0xd7, 0x42, 0xb7, 0xa0, - 0x24, 0x80, 0xea, 0xc1, 0xe8, 0xe4, 0xf8, 0xbb, 0x4d, 0x82, 0xd2, 0xa9, 0x17, 0xac, 0x1a, 0xf4, - 0x2e, 0x41, 0x56, 0xd8, 0xc4, 0xe4, 0xc6, 0x7c, 0xf0, 0x33, 0xb9, 0x49, 0x3c, 0xa1, 0xe1, 0x14, - 0xfa, 0x2e, 0xd8, 0xbc, 0x85, 0x42, 0x46, 0x52, 0x31, 0xde, 0x79, 0x6a, 0xc7, 0xc6, 0xc1, 0xc6, - 0xb1, 0x1f, 0x44, 0xcf, 0x55, 0xc7, 0xc7, 0xdb, 0x66, 0xbd, 0xbd, 0x7a, 0x70, 0x21, 0x3a, 0xf9, - 0xb6, 0x7c, 0x57, 0xd1, 0xcd, 0x1b, 0x3a, 0x95, 0x3c, 0x6a, 0xac, 0xd7, 0xab, 0xd5, 0x5f, 0xb4, - 0x1c, 0x11, 0x5c, 0x83, 0x92, 0xd1, 0x38, 0x99, 0x6a, 0x3d, 0xd8, 0xf5, 0x99, 0x6a, 0x9d, 0xd0, - 0x6d, 0xe1, 0x14, 0xba, 0x06, 0x05, 0x9e, 0x8a, 0x79, 0x44, 0x42, 0x27, 0xc6, 0x33, 0xae, 0x11, - 0x69, 0x6b, 0x27, 0x27, 0x2f, 0x46, 0x84, 0x7e, 0x00, 0xc5, 0x6b, 0x94, 0x29, 0x77, 0x3d, 0x3e, - 0xee, 0xef, 0x13, 0x34, 0x95, 0xbc, 0x33, 0x38, 0xb5, 0xf4, 0x53, 0xfd, 0xa7, 0xd7, 0x8a, 0xc3, - 0x1c, 0x74, 0x1b, 0x66, 0x05, 0x63, 0xd1, 0xbf, 0x62, 0x09, 0x07, 0x3a, 0xf0, 0x17, 0x5c, 0xc2, - 0x81, 0x0e, 0xfe, 0x15, 0x87, 0x53, 0xed, 0xfb, 0x4f, 0x9f, 0xd5, 0x53, 0x5f, 0x3c, 0xab, 0xa7, - 0xbe, 0x7a, 0x56, 0xb7, 0x7e, 0xb1, 0x57, 0xb7, 0x7e, 0xbf, 0x57, 0xb7, 0x9e, 0xec, 0xd5, 0xad, - 0xa7, 0x7b, 0x75, 0xeb, 0x9f, 0x7b, 0x75, 0xeb, 0x5f, 0x7b, 0xf5, 0xd4, 0x57, 0x7b, 0x75, 0xeb, - 0xb3, 0xe7, 0xf5, 0xd4, 0xd3, 0xe7, 0xf5, 0xd4, 0x17, 0xcf, 0xeb, 0xa9, 0xfb, 0x6f, 0xfe, 0x87, - 0x42, 0x52, 0x36, 0xba, 0x39, 0xf1, 0xb9, 0xf0, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x3e, 0xbe, - 0x5b, 0x4c, 0xb3, 0x1c, 0x00, 0x00, + // 2472 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x39, 0xcb, 0x6f, 0x1b, 0xc7, + 0xf9, 0x5c, 0x72, 0xf9, 0xfa, 0x48, 0xc9, 0xf2, 0x88, 0x96, 0x09, 0xc6, 0x21, 0xe5, 0xc1, 0x2f, + 0xb6, 0x7e, 0x8e, 0x23, 0xc6, 0x76, 0xed, 0x26, 0x76, 0xdd, 0xd4, 0x94, 0x62, 0x47, 0xb6, 0xfc, + 0xc8, 0xc8, 0x75, 0xd3, 0xa2, 0x86, 0xb1, 0x22, 0x47, 0xd4, 0xc2, 0xe4, 0x2e, 0xbd, 0x3b, 0xb4, + 0x2d, 0xa0, 0x87, 0xfe, 0x03, 0x45, 0x03, 0xf4, 0x50, 0xf4, 0x52, 0xb4, 0x40, 0x81, 0x16, 0x2d, + 0x7a, 0xe9, 0x1f, 0xd0, 0x5e, 0x7a, 0x70, 0x6f, 0xce, 0x2d, 0xc8, 0x81, 0xad, 0xe5, 0x4b, 0xa1, + 0x53, 0x80, 0xde, 0x72, 0x2a, 0xe6, 0xb5, 0x2f, 0x51, 0x75, 0xa8, 0x38, 0x08, 0x7c, 0x21, 0x67, + 0xbe, 0xf9, 0xe6, 0x9b, 0xf9, 0x1e, 0xf3, 0xbd, 0x16, 0x5e, 0x1b, 0xdc, 0xef, 0x36, 0x7b, 0x6e, + 0x77, 0xe0, 0xb9, 0xcc, 0x0d, 0x06, 0x8b, 0xe2, 0x17, 0x15, 0xf4, 0xbc, 0x56, 0xe9, 0xba, 0x5d, + 0x57, 0xe2, 0xf0, 0x91, 0x5c, 0xaf, 0x35, 0xba, 0xae, 0xdb, 0xed, 0xd1, 0xa6, 0x98, 0xad, 0x0f, + 0x37, 0x9a, 0xcc, 0xee, 0x53, 0x9f, 0x59, 0xfd, 0x81, 0x42, 0x98, 0x57, 0xd4, 0x1f, 0xf4, 0xfa, + 0x6e, 0x87, 0xf6, 0x9a, 0x3e, 0xb3, 0x98, 0x2f, 0x7f, 0x15, 0xc6, 0x2c, 0xc7, 0x18, 0x0c, 0xfd, + 0x4d, 0xf1, 0x23, 0x81, 0xb8, 0x02, 0x68, 0x8d, 0x79, 0xd4, 0xea, 0x13, 0x8b, 0x51, 0x9f, 0xd0, + 0x07, 0x43, 0xea, 0x33, 0x7c, 0x1d, 0x66, 0x63, 0x50, 0x7f, 0xe0, 0x3a, 0x3e, 0x45, 0xe7, 0xa0, + 0xe4, 0x87, 0xe0, 0xaa, 0x31, 0x9f, 0x59, 0x28, 0x9d, 0xae, 0x2c, 0x06, 0xac, 0x84, 0x7b, 0x48, + 0x14, 0x11, 0xff, 0xda, 0x00, 0x08, 0xd7, 0x50, 0x1d, 0x40, 0xae, 0x7e, 0x60, 0xf9, 0x9b, 0x55, + 0x63, 0xde, 0x58, 0x30, 0x49, 0x04, 0x82, 0x4e, 0xc2, 0xc1, 0x70, 0x76, 0xc3, 0x5d, 0xdb, 0xb4, + 0xbc, 0x4e, 0x35, 0x2d, 0xd0, 0x76, 0x2f, 0x20, 0x04, 0xa6, 0x67, 0x31, 0x5a, 0xcd, 0xcc, 0x1b, + 0x0b, 0x19, 0x22, 0xc6, 0x68, 0x0e, 0x72, 0x8c, 0x3a, 0x96, 0xc3, 0xaa, 0xe6, 0xbc, 0xb1, 0x50, + 0x24, 0x6a, 0xc6, 0xe1, 0x9c, 0x77, 0xea, 0x57, 0xb3, 0xf3, 0xc6, 0xc2, 0x14, 0x51, 0x33, 0xfc, + 0xc7, 0x0c, 0x94, 0x3f, 0x1c, 0x52, 0x6f, 0x4b, 0x09, 0x00, 0xd5, 0xa1, 0xe0, 0xd3, 0x1e, 0x6d, + 0x33, 0xd7, 0x13, 0x17, 0x2c, 0xb6, 0xd2, 0x55, 0x83, 0x04, 0x30, 0x54, 0x81, 0x6c, 0xcf, 0xee, + 0xdb, 0x4c, 0x5c, 0x6b, 0x8a, 0xc8, 0x09, 0x3a, 0x0f, 0x59, 0x9f, 0x59, 0x1e, 0x13, 0x77, 0x29, + 0x9d, 0xae, 0x2d, 0x4a, 0xa5, 0x2d, 0x6a, 0xa5, 0x2d, 0xde, 0xd6, 0x4a, 0x6b, 0x15, 0x9e, 0x8c, + 0x1a, 0xa9, 0x8f, 0xff, 0xd9, 0x30, 0x88, 0xdc, 0x82, 0xce, 0x41, 0x86, 0x3a, 0x1d, 0x71, 0xdf, + 0x2f, 0xbb, 0x93, 0x6f, 0x40, 0xa7, 0xa0, 0xd8, 0xb1, 0x3d, 0xda, 0x66, 0xb6, 0xeb, 0x08, 0xae, + 0xa6, 0x4f, 0xcf, 0x86, 0x1a, 0x59, 0xd6, 0x4b, 0x24, 0xc4, 0x42, 0x27, 0x21, 0xe7, 0x73, 0xd1, + 0xf9, 0xd5, 0xfc, 0x7c, 0x66, 0xa1, 0xd8, 0xaa, 0xec, 0x8c, 0x1a, 0x33, 0x12, 0x72, 0xd2, 0xed, + 0xdb, 0x8c, 0xf6, 0x07, 0x6c, 0x8b, 0x28, 0x1c, 0x74, 0x02, 0xf2, 0x1d, 0xda, 0xa3, 0x5c, 0xe1, + 0x05, 0xa1, 0xf0, 0x99, 0x08, 0x79, 0xb1, 0x40, 0x34, 0x02, 0xba, 0x0b, 0xe6, 0xa0, 0x67, 0x39, + 0xd5, 0xa2, 0xe0, 0x62, 0x3a, 0x44, 0xbc, 0xd5, 0xb3, 0x9c, 0xd6, 0xbb, 0x9f, 0x8d, 0x1a, 0x67, + 0xbb, 0x36, 0xdb, 0x1c, 0xae, 0x2f, 0xb6, 0xdd, 0x7e, 0xb3, 0xeb, 0x59, 0x1b, 0x96, 0x63, 0x35, + 0x7b, 0xee, 0x7d, 0xbb, 0xf9, 0xf0, 0x4c, 0x93, 0xdb, 0xe7, 0x83, 0x21, 0xf5, 0x6c, 0xea, 0x35, + 0x39, 0x99, 0x45, 0xa1, 0x12, 0xbe, 0x95, 0x08, 0xb2, 0x57, 0xcd, 0x42, 0x6e, 0x26, 0x8f, 0x9f, + 0xa5, 0x01, 0xad, 0x59, 0xfd, 0x41, 0x8f, 0x4e, 0xa4, 0xb2, 0x40, 0x39, 0xe9, 0x7d, 0x2b, 0x27, + 0x33, 0xa9, 0x72, 0x42, 0x49, 0x9b, 0x93, 0x49, 0x3a, 0xfb, 0x65, 0x25, 0x9d, 0xfb, 0x5a, 0x24, + 0x8d, 0xab, 0x60, 0xf2, 0x19, 0x9a, 0x81, 0x8c, 0x67, 0x3d, 0x12, 0xf2, 0x2c, 0x13, 0x3e, 0xc4, + 0xab, 0x90, 0x93, 0x77, 0x41, 0xb5, 0xa4, 0xc0, 0xe3, 0xef, 0x23, 0x14, 0x76, 0x46, 0x8b, 0x71, + 0x26, 0x14, 0x63, 0x46, 0x08, 0x08, 0xff, 0xc6, 0x80, 0x29, 0xa5, 0x45, 0xe5, 0x63, 0xd6, 0x21, + 0x2f, 0xdf, 0xb8, 0xf6, 0x2f, 0x87, 0x93, 0xfe, 0xe5, 0x52, 0xc7, 0x1a, 0x30, 0xea, 0xb5, 0x9a, + 0x4f, 0x46, 0x0d, 0xe3, 0xb3, 0x51, 0xe3, 0xf8, 0x5e, 0x8c, 0x6a, 0x7f, 0xa7, 0xfd, 0x92, 0x26, + 0x8c, 0xde, 0x14, 0xb7, 0x63, 0xbe, 0x32, 0x85, 0x03, 0x8b, 0xd2, 0x4d, 0xae, 0x38, 0x5d, 0xea, + 0x73, 0xca, 0x26, 0xd7, 0x22, 0x91, 0x38, 0xf8, 0x27, 0x30, 0x1b, 0xb3, 0x36, 0x75, 0xcf, 0x77, + 0x20, 0xe7, 0x73, 0x01, 0xea, 0x6b, 0x46, 0x74, 0xb5, 0x26, 0xe0, 0xad, 0x69, 0x75, 0xbf, 0x9c, + 0x9c, 0x13, 0x85, 0x3f, 0xd9, 0xe9, 0x7f, 0x37, 0xa0, 0xbc, 0x6a, 0xad, 0xd3, 0x9e, 0x36, 0x73, + 0x04, 0xa6, 0x63, 0xf5, 0xa9, 0x92, 0xb8, 0x18, 0x73, 0xb7, 0xf6, 0xd0, 0xea, 0x0d, 0xa9, 0x24, + 0x59, 0x20, 0x6a, 0x36, 0xa9, 0x3f, 0x32, 0xf6, 0xed, 0x8f, 0x8c, 0xd0, 0xe4, 0x2b, 0x90, 0xe5, + 0x96, 0xb5, 0x25, 0x7c, 0x51, 0x91, 0xc8, 0x09, 0x3e, 0x0e, 0x53, 0x8a, 0x0b, 0x25, 0xbe, 0xf0, + 0xca, 0x5c, 0x7c, 0x45, 0x7d, 0x65, 0xdc, 0x87, 0x9c, 0x94, 0x36, 0xfa, 0x3f, 0x28, 0x06, 0x31, + 0x4e, 0x70, 0x9b, 0x69, 0xe5, 0x76, 0x46, 0x8d, 0x34, 0xf3, 0x49, 0xb8, 0x80, 0x1a, 0x90, 0x15, + 0x3b, 0x05, 0xe7, 0x46, 0xab, 0xb8, 0x33, 0x6a, 0x48, 0x00, 0x91, 0x7f, 0xe8, 0x08, 0x98, 0x9b, + 0x3c, 0xcc, 0x70, 0x11, 0x98, 0xad, 0xc2, 0xce, 0xa8, 0x21, 0xe6, 0x44, 0xfc, 0xe2, 0x2b, 0x50, + 0x5e, 0xa5, 0x5d, 0xab, 0xbd, 0xa5, 0x0e, 0xad, 0x68, 0x72, 0xfc, 0x40, 0x43, 0xd3, 0x38, 0x0a, + 0xe5, 0xe0, 0xc4, 0x7b, 0x7d, 0x5f, 0x19, 0x75, 0x29, 0x80, 0x5d, 0xf7, 0xf1, 0xaf, 0x0c, 0x50, + 0x7a, 0x46, 0x18, 0x72, 0x3d, 0xce, 0xab, 0xaf, 0xdc, 0x10, 0xec, 0x8c, 0x1a, 0x0a, 0x42, 0xd4, + 0x3f, 0xba, 0x00, 0x79, 0x5f, 0x9c, 0xc8, 0x89, 0x25, 0xcd, 0x47, 0x2c, 0xb4, 0x0e, 0x70, 0x33, + 0xd8, 0x19, 0x35, 0x34, 0x22, 0xd1, 0x03, 0xb4, 0x18, 0x8b, 0x9f, 0x92, 0xb1, 0xe9, 0x9d, 0x51, + 0x23, 0x02, 0x8d, 0xc6, 0x53, 0xfc, 0x85, 0x01, 0xa5, 0xdb, 0x96, 0x1d, 0x98, 0x50, 0x55, 0xab, + 0x28, 0x74, 0x93, 0x12, 0xc0, 0x9f, 0x74, 0x87, 0xf6, 0xac, 0xad, 0xcb, 0xae, 0x27, 0xe8, 0x4e, + 0x91, 0x60, 0x1e, 0x86, 0x3c, 0x73, 0x6c, 0xc8, 0xcb, 0x4e, 0xee, 0x55, 0xbf, 0x5e, 0x1f, 0x76, + 0xd5, 0x2c, 0xa4, 0x67, 0x32, 0xf8, 0xcf, 0x06, 0x94, 0x25, 0xf3, 0xca, 0xf2, 0x7e, 0x0c, 0x39, + 0x29, 0x1b, 0xc1, 0xfe, 0xff, 0xf0, 0x2f, 0x6f, 0x4e, 0xe2, 0x5b, 0x14, 0x4d, 0xf4, 0x1e, 0x4c, + 0x77, 0x3c, 0x77, 0x30, 0xa0, 0x9d, 0x35, 0xe5, 0xc5, 0xd2, 0x49, 0x2f, 0xb6, 0x1c, 0x5d, 0x27, + 0x09, 0x74, 0xfc, 0x0f, 0x03, 0xa6, 0x94, 0xc3, 0x50, 0xea, 0x0a, 0x44, 0x6c, 0xec, 0x3b, 0x70, + 0xa5, 0x27, 0x0d, 0x5c, 0x73, 0x90, 0xeb, 0x7a, 0xee, 0x70, 0xe0, 0x57, 0x33, 0xf2, 0x79, 0xca, + 0xd9, 0x64, 0x01, 0x0d, 0x5f, 0x85, 0x69, 0xcd, 0xca, 0x1e, 0x5e, 0xb3, 0x96, 0xf4, 0x9a, 0x2b, + 0x1d, 0xea, 0x30, 0x7b, 0xc3, 0x0e, 0xfc, 0xa0, 0xc2, 0xc7, 0x3f, 0x37, 0x60, 0x26, 0x89, 0x82, + 0x96, 0x23, 0x4f, 0x8d, 0x93, 0x3b, 0xb6, 0x37, 0xb9, 0x45, 0xe1, 0x7f, 0xfc, 0xf7, 0x1d, 0xe6, + 0x6d, 0x69, 0xd2, 0x72, 0x6f, 0xed, 0x2c, 0x94, 0x22, 0x8b, 0x3c, 0x4a, 0xdd, 0xa7, 0xea, 0x71, + 0x10, 0x3e, 0x0c, 0xbd, 0x42, 0x5a, 0xfa, 0x34, 0x31, 0xc1, 0xbf, 0x34, 0x60, 0x2a, 0xa6, 0x4b, + 0xf4, 0x0e, 0x98, 0x1b, 0x9e, 0xdb, 0x9f, 0x48, 0x51, 0x62, 0x07, 0xfa, 0x16, 0xa4, 0x99, 0x3b, + 0x91, 0x9a, 0xd2, 0xcc, 0xe5, 0x5a, 0x52, 0xec, 0x67, 0x64, 0x9a, 0x2b, 0x67, 0xf8, 0x2c, 0x14, + 0x05, 0x43, 0xb7, 0x2c, 0xdb, 0x1b, 0x1b, 0x30, 0xc6, 0x33, 0x74, 0x01, 0x0e, 0x48, 0x67, 0x38, + 0x7e, 0x73, 0x79, 0xdc, 0xe6, 0xb2, 0xde, 0xfc, 0x1a, 0x64, 0x97, 0x36, 0x87, 0xce, 0x7d, 0xbe, + 0xa5, 0x63, 0x31, 0x4b, 0x6f, 0xe1, 0x63, 0x7c, 0x08, 0x66, 0xf9, 0x1b, 0xa4, 0x9e, 0xbf, 0xe4, + 0x0e, 0x1d, 0xa6, 0xcb, 0x8c, 0x93, 0x50, 0x89, 0x83, 0x95, 0x95, 0x54, 0x20, 0xdb, 0xe6, 0x00, + 0x41, 0x63, 0x8a, 0xc8, 0x09, 0xfe, 0x9d, 0x01, 0xe8, 0x0a, 0x65, 0xe2, 0x94, 0x95, 0xe5, 0xe0, + 0x79, 0xd4, 0xa0, 0xd0, 0xb7, 0x58, 0x7b, 0x93, 0x7a, 0xbe, 0x4e, 0x43, 0xf4, 0xfc, 0x9b, 0xc8, + 0xf9, 0xf0, 0x29, 0x98, 0x8d, 0xdd, 0x52, 0xf1, 0x54, 0x83, 0x42, 0x5b, 0xc1, 0x54, 0xc8, 0x0b, + 0xe6, 0xf8, 0x2f, 0x69, 0x28, 0x88, 0x0d, 0x84, 0x6e, 0xa0, 0x53, 0x50, 0xda, 0xb0, 0x9d, 0x2e, + 0xf5, 0x06, 0x9e, 0xad, 0x44, 0x60, 0xb6, 0x0e, 0xec, 0x8c, 0x1a, 0x51, 0x30, 0x89, 0x4e, 0xd0, + 0x5b, 0x90, 0x1f, 0xfa, 0xd4, 0xbb, 0x67, 0xcb, 0x97, 0x5e, 0x6c, 0x55, 0xb6, 0x47, 0x8d, 0xdc, + 0xf7, 0x7d, 0xea, 0xad, 0x2c, 0xf3, 0xe0, 0x33, 0x14, 0x23, 0x22, 0xff, 0x3b, 0xe8, 0x9a, 0x32, + 0x53, 0x91, 0x87, 0xb5, 0xbe, 0xcd, 0xaf, 0x9f, 0x70, 0x75, 0x03, 0xcf, 0xed, 0x53, 0xb6, 0x49, + 0x87, 0x7e, 0xb3, 0xed, 0xf6, 0xfb, 0xae, 0xd3, 0x14, 0x45, 0xa5, 0x60, 0x9a, 0x47, 0x50, 0xbe, + 0x5d, 0x59, 0xee, 0x6d, 0xc8, 0xb3, 0x4d, 0xcf, 0x1d, 0x76, 0x37, 0x45, 0x60, 0xc8, 0xb4, 0xce, + 0x4f, 0x4e, 0x4f, 0x53, 0x20, 0x7a, 0x80, 0x8e, 0x72, 0x69, 0xd1, 0xf6, 0x7d, 0x7f, 0xd8, 0x97, + 0xa5, 0x5a, 0x2b, 0xbb, 0x33, 0x6a, 0x18, 0x6f, 0x91, 0x00, 0x8c, 0x7f, 0x96, 0x86, 0x86, 0x30, + 0xd4, 0x3b, 0x22, 0x73, 0xb8, 0xec, 0x7a, 0xd7, 0x29, 0xf3, 0xec, 0xf6, 0x0d, 0xab, 0x4f, 0xb5, + 0x6d, 0x34, 0xa0, 0xd4, 0x17, 0xc0, 0x7b, 0x91, 0x27, 0x00, 0xfd, 0x00, 0x0f, 0xbd, 0x0e, 0x20, + 0xde, 0x8c, 0x5c, 0x97, 0xaf, 0xa1, 0x28, 0x20, 0x62, 0x79, 0x29, 0x26, 0xa9, 0xe6, 0x84, 0x9c, + 0x29, 0x09, 0xad, 0x24, 0x25, 0x34, 0x31, 0x9d, 0x40, 0x2c, 0x51, 0x5b, 0xcf, 0xc6, 0x6d, 0x1d, + 0x7f, 0x62, 0x40, 0x7d, 0x55, 0xdf, 0x7c, 0x9f, 0xe2, 0xd0, 0xfc, 0xa6, 0x5f, 0x12, 0xbf, 0x99, + 0xaf, 0xc6, 0x2f, 0xae, 0x03, 0xac, 0xda, 0x0e, 0xbd, 0x6c, 0xf7, 0x18, 0xf5, 0xc6, 0x14, 0x23, + 0xbf, 0xc8, 0x84, 0x2e, 0x81, 0xd0, 0x0d, 0xcd, 0xe7, 0x52, 0xc4, 0x0f, 0xbf, 0x0c, 0x36, 0xd2, + 0x2f, 0x51, 0x6d, 0x99, 0x84, 0x8b, 0x72, 0x20, 0xbf, 0x21, 0xd8, 0x93, 0x21, 0x35, 0xd6, 0x4f, + 0x09, 0x79, 0x6f, 0x7d, 0x57, 0x1d, 0x7e, 0xee, 0x05, 0x19, 0x91, 0xe8, 0x00, 0x35, 0xfd, 0x2d, + 0x87, 0x59, 0x8f, 0x23, 0xfb, 0x89, 0x3e, 0x04, 0x59, 0x2a, 0xe9, 0xca, 0x8e, 0x4d, 0xba, 0x2e, + 0xaa, 0x63, 0xbe, 0x52, 0xf1, 0x78, 0x31, 0xf4, 0x80, 0x42, 0x29, 0xca, 0x03, 0x1e, 0x03, 0xd3, + 0xa3, 0x1b, 0x3a, 0x54, 0xa3, 0xf0, 0xe4, 0x00, 0x53, 0xac, 0xe3, 0xbf, 0x1a, 0x30, 0x73, 0x85, + 0xb2, 0x78, 0x12, 0xf4, 0x0a, 0xa9, 0x14, 0x7f, 0x00, 0x07, 0x23, 0xf7, 0x57, 0xdc, 0x9f, 0x49, + 0x64, 0x3e, 0x87, 0x42, 0xfe, 0x57, 0x9c, 0x0e, 0x7d, 0xac, 0x8a, 0xc6, 0x78, 0xd2, 0x73, 0x0b, + 0x4a, 0x91, 0x45, 0x74, 0x29, 0x91, 0xee, 0x44, 0x1a, 0x3d, 0x41, 0xc8, 0x6e, 0x55, 0x14, 0x4f, + 0xb2, 0x6c, 0x54, 0xc9, 0x6c, 0x90, 0x1a, 0xac, 0x01, 0x12, 0xea, 0x12, 0x64, 0xa3, 0xc1, 0x49, + 0x40, 0xaf, 0x05, 0x79, 0x4f, 0x30, 0x47, 0x47, 0xc1, 0xf4, 0xdc, 0x47, 0x3a, 0x8f, 0x9d, 0x0a, + 0x8f, 0x24, 0xee, 0x23, 0x22, 0x96, 0xf0, 0x05, 0xc8, 0x10, 0xf7, 0x11, 0xaa, 0x03, 0x78, 0x96, + 0xd3, 0xa5, 0x77, 0x82, 0x0a, 0xaa, 0x4c, 0x22, 0x90, 0x3d, 0x12, 0x87, 0x25, 0x38, 0x18, 0xbd, + 0x91, 0x54, 0xf7, 0x22, 0xe4, 0x3f, 0x1c, 0x46, 0xc5, 0x55, 0x49, 0x88, 0x4b, 0x16, 0xe3, 0x1a, + 0x89, 0xdb, 0x0c, 0x84, 0x70, 0x74, 0x04, 0x8a, 0xcc, 0x5a, 0xef, 0xd1, 0x1b, 0xa1, 0x9b, 0x0b, + 0x01, 0x7c, 0x95, 0x17, 0x7f, 0x77, 0x22, 0x19, 0x50, 0x08, 0x40, 0x27, 0x60, 0x26, 0xbc, 0xf3, + 0x2d, 0x8f, 0x6e, 0xd8, 0x8f, 0x85, 0x86, 0xcb, 0x64, 0x17, 0x1c, 0x2d, 0xc0, 0x81, 0x10, 0xb6, + 0x26, 0x32, 0x0d, 0x53, 0xa0, 0x26, 0xc1, 0x5c, 0x36, 0x82, 0xdd, 0xf7, 0x1f, 0x0c, 0xad, 0x9e, + 0x78, 0x7c, 0x65, 0x12, 0x81, 0xe0, 0xbf, 0x19, 0x70, 0x50, 0xaa, 0x9a, 0x97, 0xfd, 0xaf, 0xa2, + 0xd5, 0xff, 0xde, 0x00, 0x14, 0xe5, 0x40, 0x99, 0xd6, 0x1b, 0xd1, 0x7e, 0x0e, 0x4f, 0x65, 0x4a, + 0xa2, 0xa6, 0x95, 0xa0, 0xb0, 0x25, 0x83, 0x21, 0x27, 0xd2, 0x21, 0x59, 0x5c, 0x9b, 0xb2, 0x68, + 0x96, 0x10, 0xa2, 0xfe, 0x79, 0xad, 0xbf, 0xbe, 0xc5, 0xa8, 0xaf, 0x4a, 0x5e, 0x51, 0xeb, 0x0b, + 0x00, 0x91, 0x7f, 0xfc, 0x2c, 0xea, 0x30, 0x61, 0x35, 0x66, 0x78, 0x96, 0x02, 0x11, 0x3d, 0xc0, + 0x7f, 0x4a, 0xc3, 0xd4, 0x1d, 0xb7, 0x37, 0x0c, 0x03, 0xe3, 0xab, 0x14, 0x30, 0x62, 0x75, 0x78, + 0x56, 0xd7, 0xe1, 0x08, 0x4c, 0x9f, 0xd1, 0x81, 0xb0, 0xac, 0x0c, 0x11, 0x63, 0x84, 0xa1, 0xcc, + 0x2c, 0xaf, 0x4b, 0x99, 0xac, 0x6e, 0xaa, 0x39, 0x91, 0x76, 0xc6, 0x60, 0x68, 0x1e, 0x4a, 0x56, + 0xb7, 0xeb, 0xd1, 0xae, 0xc5, 0x68, 0x6b, 0xab, 0x9a, 0x17, 0x87, 0x45, 0x41, 0xf8, 0x23, 0x98, + 0xd6, 0xc2, 0x52, 0x2a, 0x7d, 0x1b, 0xf2, 0x0f, 0x05, 0x64, 0x4c, 0xef, 0x4b, 0xa2, 0x2a, 0x37, + 0xa6, 0xd1, 0xe2, 0xed, 0x72, 0x7d, 0x67, 0x7c, 0x15, 0x72, 0x12, 0x1d, 0x1d, 0x89, 0xd6, 0x28, + 0xb2, 0x49, 0xc3, 0xe7, 0xaa, 0xe0, 0xc0, 0x90, 0x93, 0x84, 0x94, 0xe2, 0x85, 0x6d, 0x48, 0x08, + 0x51, 0xff, 0xf8, 0x3f, 0x06, 0x1c, 0x5a, 0xa6, 0x8c, 0xb6, 0x19, 0xed, 0x5c, 0xb6, 0x69, 0xaf, + 0xf3, 0x8d, 0x96, 0xcf, 0x41, 0x13, 0x2c, 0x13, 0x69, 0x82, 0x71, 0xbf, 0xd3, 0xb3, 0x1d, 0xba, + 0x1a, 0xe9, 0xa2, 0x84, 0x00, 0xee, 0x21, 0x36, 0xf8, 0xc5, 0xe5, 0xb2, 0xfc, 0x3e, 0x11, 0x81, + 0x04, 0x1a, 0xce, 0x85, 0x1a, 0xc6, 0x2b, 0x30, 0x97, 0x64, 0x5a, 0xe9, 0xa8, 0x09, 0x39, 0xb1, + 0x77, 0x4c, 0x17, 0x35, 0xb6, 0x83, 0x28, 0x34, 0xec, 0xc1, 0x54, 0x6c, 0x41, 0xe8, 0x8c, 0xdb, + 0x88, 0xf2, 0x9f, 0x72, 0x82, 0xfe, 0x1f, 0x4c, 0xb6, 0x35, 0x50, 0x6e, 0xb3, 0x75, 0xe8, 0x8b, + 0x51, 0xe3, 0x60, 0x6c, 0xdb, 0xed, 0xad, 0x01, 0x25, 0x02, 0x85, 0x9b, 0x56, 0xdb, 0xf2, 0x3a, + 0xb6, 0x63, 0xf5, 0x6c, 0x26, 0x45, 0x61, 0x92, 0x28, 0x08, 0xff, 0x36, 0xa2, 0x34, 0x69, 0x8f, + 0xfb, 0x54, 0x9a, 0xb1, 0x6f, 0xa5, 0x19, 0x2f, 0x50, 0x1a, 0xfe, 0x61, 0x28, 0x62, 0x7d, 0x45, + 0x25, 0xe2, 0xf7, 0x60, 0xba, 0x13, 0x5b, 0xd9, 0x5b, 0xd4, 0xb2, 0xf7, 0x99, 0x40, 0xc7, 0x6f, + 0x84, 0x22, 0x17, 0x90, 0xf1, 0x22, 0x3f, 0x71, 0x0c, 0x8a, 0xc1, 0x67, 0x1c, 0x54, 0x82, 0xfc, + 0xe5, 0x9b, 0xe4, 0x07, 0x97, 0xc8, 0xf2, 0x4c, 0x0a, 0x95, 0xa1, 0xd0, 0xba, 0xb4, 0x74, 0x4d, + 0xcc, 0x8c, 0xd3, 0x9f, 0x64, 0x75, 0xd0, 0xf4, 0xd0, 0x77, 0x20, 0x2b, 0x23, 0xe1, 0x5c, 0x78, + 0x99, 0xe8, 0xd7, 0x92, 0xda, 0xe1, 0x5d, 0x70, 0xc9, 0x15, 0x4e, 0xbd, 0x6d, 0xa0, 0x1b, 0x50, + 0x12, 0x40, 0xd5, 0x14, 0x3d, 0x92, 0xec, 0x4d, 0xc6, 0x28, 0xbd, 0xbe, 0xc7, 0x6a, 0x84, 0xde, + 0x79, 0xc8, 0x4a, 0x06, 0xe7, 0x12, 0x09, 0xcb, 0x98, 0xdb, 0xc4, 0xda, 0xc4, 0x38, 0x85, 0xde, + 0x05, 0xf3, 0xb6, 0x65, 0xf7, 0x50, 0x24, 0x5f, 0x8a, 0xf4, 0x32, 0x6b, 0x73, 0x49, 0x70, 0xe4, + 0xd8, 0x8b, 0x41, 0x4b, 0xf6, 0x70, 0xb2, 0x2f, 0xa4, 0xb7, 0x57, 0x77, 0x2f, 0x04, 0x27, 0xdf, + 0x94, 0x8d, 0x43, 0xdd, 0x9d, 0x40, 0xaf, 0xc7, 0x8f, 0x4a, 0x34, 0x33, 0x6a, 0xf5, 0xbd, 0x96, + 0x03, 0x82, 0xab, 0x50, 0x8a, 0x74, 0x06, 0xa2, 0x62, 0xdd, 0xdd, 0xd6, 0x88, 0x8a, 0x75, 0x4c, + 0x3b, 0x01, 0xa7, 0xd0, 0x15, 0x28, 0xf0, 0x2c, 0x93, 0x07, 0x5b, 0xf4, 0x5a, 0x32, 0x99, 0x8c, + 0x24, 0x11, 0xb5, 0x23, 0xe3, 0x17, 0x03, 0x42, 0xdf, 0x83, 0xe2, 0x15, 0xca, 0x94, 0x27, 0x3e, + 0x9c, 0x74, 0xe5, 0x63, 0x24, 0x15, 0x0f, 0x07, 0x38, 0x85, 0x3e, 0x12, 0x09, 0x6f, 0xdc, 0x13, + 0xa1, 0xc6, 0x1e, 0x1e, 0x27, 0xb8, 0xd7, 0xfc, 0xde, 0x08, 0x9a, 0xf2, 0xe9, 0xbb, 0xfa, 0xc3, + 0xf1, 0xb2, 0xc5, 0x2c, 0x74, 0x13, 0xa6, 0x05, 0xcb, 0xc1, 0x97, 0xe5, 0x98, 0x69, 0xee, 0xfa, + 0x8c, 0x1d, 0x33, 0xcd, 0xdd, 0x9f, 0xb3, 0x71, 0xaa, 0x75, 0xf7, 0xe9, 0xb3, 0x7a, 0xea, 0xd3, + 0x67, 0xf5, 0xd4, 0xe7, 0xcf, 0xea, 0xc6, 0x4f, 0xb7, 0xeb, 0xc6, 0x1f, 0xb6, 0xeb, 0xc6, 0x93, + 0xed, 0xba, 0xf1, 0x74, 0xbb, 0x6e, 0xfc, 0x6b, 0xbb, 0x6e, 0xfc, 0x7b, 0xbb, 0x9e, 0xfa, 0x7c, + 0xbb, 0x6e, 0x7c, 0xfc, 0xbc, 0x9e, 0x7a, 0xfa, 0xbc, 0x9e, 0xfa, 0xf4, 0x79, 0x3d, 0xf5, 0xa3, + 0xe3, 0x2f, 0xae, 0xc1, 0xa4, 0xb7, 0xc9, 0x89, 0xbf, 0x33, 0xff, 0x0d, 0x00, 0x00, 0xff, 0xff, + 0x05, 0xeb, 0x00, 0x3f, 0xfa, 0x1f, 0x00, 0x00, } func (x Direction) String() string { @@ -4190,43 +4540,232 @@ func (this *Volume) Equal(that interface{}) bool { } return true } -func (this *StreamRatesRequest) GoString() string { - if this == nil { - return "nil" +func (this *DetectedFieldsRequest) Equal(that interface{}) bool { + if that == nil { + return this == nil } - s := make([]string, 0, 4) - s = append(s, "&logproto.StreamRatesRequest{") - s = append(s, "}") - return strings.Join(s, "") -} -func (this *StreamRatesResponse) GoString() string { - if this == nil { - return "nil" + + that1, ok := that.(*DetectedFieldsRequest) + if !ok { + that2, ok := that.(DetectedFieldsRequest) + if ok { + that1 = &that2 + } else { + return false + } } - s := make([]string, 0, 5) - s = append(s, "&logproto.StreamRatesResponse{") - if this.StreamRates != nil { - s = append(s, "StreamRates: "+fmt.Sprintf("%#v", this.StreamRates)+",\n") + if that1 == nil { + return this == nil + } else if this == nil { + return false } - s = append(s, "}") - return strings.Join(s, "") -} -func (this *StreamRate) GoString() string { - if this == nil { - return "nil" + if !this.Start.Equal(that1.Start) { + return false } - s := make([]string, 0, 9) - s = append(s, "&logproto.StreamRate{") - s = append(s, "StreamHash: "+fmt.Sprintf("%#v", this.StreamHash)+",\n") - s = append(s, "StreamHashNoShard: "+fmt.Sprintf("%#v", this.StreamHashNoShard)+",\n") - s = append(s, "Rate: "+fmt.Sprintf("%#v", this.Rate)+",\n") - s = append(s, "Tenant: "+fmt.Sprintf("%#v", this.Tenant)+",\n") - s = append(s, "Pushes: "+fmt.Sprintf("%#v", this.Pushes)+",\n") - s = append(s, "}") - return strings.Join(s, "") -} -func (this *QueryRequest) GoString() string { - if this == nil { + if !this.End.Equal(that1.End) { + return false + } + if this.Query != that1.Query { + return false + } + if this.LineLimit != that1.LineLimit { + return false + } + if this.FieldLimit != that1.FieldLimit { + return false + } + if this.Step != that1.Step { + return false + } + return true +} +func (this *DetectedFieldsResponse) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*DetectedFieldsResponse) + if !ok { + that2, ok := that.(DetectedFieldsResponse) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if len(this.Fields) != len(that1.Fields) { + return false + } + for i := range this.Fields { + if !this.Fields[i].Equal(that1.Fields[i]) { + return false + } + } + return true +} +func (this *DetectedField) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*DetectedField) + if !ok { + that2, ok := that.(DetectedField) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if this.Label != that1.Label { + return false + } + if this.Type != that1.Type { + return false + } + if this.Cardinality != that1.Cardinality { + return false + } + return true +} +func (this *DetectedLabelsRequest) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*DetectedLabelsRequest) + if !ok { + that2, ok := that.(DetectedLabelsRequest) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if that1.Start == nil { + if this.Start != nil { + return false + } + } else if !this.Start.Equal(*that1.Start) { + return false + } + if that1.End == nil { + if this.End != nil { + return false + } + } else if !this.End.Equal(*that1.End) { + return false + } + if this.Query != that1.Query { + return false + } + return true +} +func (this *DetectedLabelsResponse) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*DetectedLabelsResponse) + if !ok { + that2, ok := that.(DetectedLabelsResponse) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if len(this.DetectedLabels) != len(that1.DetectedLabels) { + return false + } + for i := range this.DetectedLabels { + if !this.DetectedLabels[i].Equal(that1.DetectedLabels[i]) { + return false + } + } + return true +} +func (this *DetectedLabel) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*DetectedLabel) + if !ok { + that2, ok := that.(DetectedLabel) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if this.Label != that1.Label { + return false + } + return true +} +func (this *StreamRatesRequest) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 4) + s = append(s, "&logproto.StreamRatesRequest{") + s = append(s, "}") + return strings.Join(s, "") +} +func (this *StreamRatesResponse) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 5) + s = append(s, "&logproto.StreamRatesResponse{") + if this.StreamRates != nil { + s = append(s, "StreamRates: "+fmt.Sprintf("%#v", this.StreamRates)+",\n") + } + s = append(s, "}") + return strings.Join(s, "") +} +func (this *StreamRate) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 9) + s = append(s, "&logproto.StreamRate{") + s = append(s, "StreamHash: "+fmt.Sprintf("%#v", this.StreamHash)+",\n") + s = append(s, "StreamHashNoShard: "+fmt.Sprintf("%#v", this.StreamHashNoShard)+",\n") + s = append(s, "Rate: "+fmt.Sprintf("%#v", this.Rate)+",\n") + s = append(s, "Tenant: "+fmt.Sprintf("%#v", this.Tenant)+",\n") + s = append(s, "Pushes: "+fmt.Sprintf("%#v", this.Pushes)+",\n") + s = append(s, "}") + return strings.Join(s, "") +} +func (this *QueryRequest) GoString() string { + if this == nil { return "nil" } s := make([]string, 0, 12) @@ -4771,6 +5310,79 @@ func (this *Volume) GoString() string { s = append(s, "}") return strings.Join(s, "") } +func (this *DetectedFieldsRequest) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 10) + s = append(s, "&logproto.DetectedFieldsRequest{") + s = append(s, "Start: "+fmt.Sprintf("%#v", this.Start)+",\n") + s = append(s, "End: "+fmt.Sprintf("%#v", this.End)+",\n") + s = append(s, "Query: "+fmt.Sprintf("%#v", this.Query)+",\n") + s = append(s, "LineLimit: "+fmt.Sprintf("%#v", this.LineLimit)+",\n") + s = append(s, "FieldLimit: "+fmt.Sprintf("%#v", this.FieldLimit)+",\n") + s = append(s, "Step: "+fmt.Sprintf("%#v", this.Step)+",\n") + s = append(s, "}") + return strings.Join(s, "") +} +func (this *DetectedFieldsResponse) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 5) + s = append(s, "&logproto.DetectedFieldsResponse{") + if this.Fields != nil { + s = append(s, "Fields: "+fmt.Sprintf("%#v", this.Fields)+",\n") + } + s = append(s, "}") + return strings.Join(s, "") +} +func (this *DetectedField) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 7) + s = append(s, "&logproto.DetectedField{") + s = append(s, "Label: "+fmt.Sprintf("%#v", this.Label)+",\n") + s = append(s, "Type: "+fmt.Sprintf("%#v", this.Type)+",\n") + s = append(s, "Cardinality: "+fmt.Sprintf("%#v", this.Cardinality)+",\n") + s = append(s, "}") + return strings.Join(s, "") +} +func (this *DetectedLabelsRequest) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 7) + s = append(s, "&logproto.DetectedLabelsRequest{") + s = append(s, "Start: "+fmt.Sprintf("%#v", this.Start)+",\n") + s = append(s, "End: "+fmt.Sprintf("%#v", this.End)+",\n") + s = append(s, "Query: "+fmt.Sprintf("%#v", this.Query)+",\n") + s = append(s, "}") + return strings.Join(s, "") +} +func (this *DetectedLabelsResponse) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 5) + s = append(s, "&logproto.DetectedLabelsResponse{") + if this.DetectedLabels != nil { + s = append(s, "DetectedLabels: "+fmt.Sprintf("%#v", this.DetectedLabels)+",\n") + } + s = append(s, "}") + return strings.Join(s, "") +} +func (this *DetectedLabel) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 5) + s = append(s, "&logproto.DetectedLabel{") + s = append(s, "Label: "+fmt.Sprintf("%#v", this.Label)+",\n") + s = append(s, "}") + return strings.Join(s, "") +} func valueToGoStringLogproto(v interface{}, typ string) string { rv := reflect.ValueOf(v) if rv.IsNil() { @@ -4805,6 +5417,7 @@ type QuerierClient interface { // Note: this MUST be the same as the variant defined in // indexgateway.proto on the IndexGateway service. GetVolume(ctx context.Context, in *VolumeRequest, opts ...grpc.CallOption) (*VolumeResponse, error) + GetDetectedFields(ctx context.Context, in *DetectedFieldsRequest, opts ...grpc.CallOption) (*DetectedFieldsResponse, error) } type querierClient struct { @@ -4965,6 +5578,15 @@ func (c *querierClient) GetVolume(ctx context.Context, in *VolumeRequest, opts . return out, nil } +func (c *querierClient) GetDetectedFields(ctx context.Context, in *DetectedFieldsRequest, opts ...grpc.CallOption) (*DetectedFieldsResponse, error) { + out := new(DetectedFieldsResponse) + err := c.cc.Invoke(ctx, "/logproto.Querier/GetDetectedFields", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + // QuerierServer is the server API for Querier service. type QuerierServer interface { Query(*QueryRequest, Querier_QueryServer) error @@ -4980,6 +5602,7 @@ type QuerierServer interface { // Note: this MUST be the same as the variant defined in // indexgateway.proto on the IndexGateway service. GetVolume(context.Context, *VolumeRequest) (*VolumeResponse, error) + GetDetectedFields(context.Context, *DetectedFieldsRequest) (*DetectedFieldsResponse, error) } // UnimplementedQuerierServer can be embedded to have forward compatible implementations. @@ -5013,6 +5636,9 @@ func (*UnimplementedQuerierServer) GetStats(ctx context.Context, req *IndexStats func (*UnimplementedQuerierServer) GetVolume(ctx context.Context, req *VolumeRequest) (*VolumeResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method GetVolume not implemented") } +func (*UnimplementedQuerierServer) GetDetectedFields(ctx context.Context, req *DetectedFieldsRequest) (*DetectedFieldsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetDetectedFields not implemented") +} func RegisterQuerierServer(s *grpc.Server, srv QuerierServer) { s.RegisterService(&_Querier_serviceDesc, srv) @@ -5189,6 +5815,24 @@ func _Querier_GetVolume_Handler(srv interface{}, ctx context.Context, dec func(i return interceptor(ctx, in, info, handler) } +func _Querier_GetDetectedFields_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DetectedFieldsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(QuerierServer).GetDetectedFields(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/logproto.Querier/GetDetectedFields", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(QuerierServer).GetDetectedFields(ctx, req.(*DetectedFieldsRequest)) + } + return interceptor(ctx, in, info, handler) +} + var _Querier_serviceDesc = grpc.ServiceDesc{ ServiceName: "logproto.Querier", HandlerType: (*QuerierServer)(nil), @@ -5217,6 +5861,10 @@ var _Querier_serviceDesc = grpc.ServiceDesc{ MethodName: "GetVolume", Handler: _Querier_GetVolume_Handler, }, + { + MethodName: "GetDetectedFields", + Handler: _Querier_GetDetectedFields_Handler, + }, }, Streams: []grpc.StreamDesc{ { @@ -7324,12 +7972,269 @@ func (m *Volume) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } -func encodeVarintLogproto(dAtA []byte, offset int, v uint64) int { - offset -= sovLogproto(v) - base := offset - for v >= 1<<7 { - dAtA[offset] = uint8(v&0x7f | 0x80) - v >>= 7 +func (m *DetectedFieldsRequest) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *DetectedFieldsRequest) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *DetectedFieldsRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.Step != 0 { + i = encodeVarintLogproto(dAtA, i, uint64(m.Step)) + i-- + dAtA[i] = 0x30 + } + if m.FieldLimit != 0 { + i = encodeVarintLogproto(dAtA, i, uint64(m.FieldLimit)) + i-- + dAtA[i] = 0x28 + } + if m.LineLimit != 0 { + i = encodeVarintLogproto(dAtA, i, uint64(m.LineLimit)) + i-- + dAtA[i] = 0x20 + } + if len(m.Query) > 0 { + i -= len(m.Query) + copy(dAtA[i:], m.Query) + i = encodeVarintLogproto(dAtA, i, uint64(len(m.Query))) + i-- + dAtA[i] = 0x1a + } + n21, err21 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.End, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(m.End):]) + if err21 != nil { + return 0, err21 + } + i -= n21 + i = encodeVarintLogproto(dAtA, i, uint64(n21)) + i-- + dAtA[i] = 0x12 + n22, err22 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.Start, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(m.Start):]) + if err22 != nil { + return 0, err22 + } + i -= n22 + i = encodeVarintLogproto(dAtA, i, uint64(n22)) + i-- + dAtA[i] = 0xa + return len(dAtA) - i, nil +} + +func (m *DetectedFieldsResponse) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *DetectedFieldsResponse) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *DetectedFieldsResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.Fields) > 0 { + for iNdEx := len(m.Fields) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.Fields[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintLogproto(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + } + } + return len(dAtA) - i, nil +} + +func (m *DetectedField) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *DetectedField) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *DetectedField) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.Cardinality != 0 { + i = encodeVarintLogproto(dAtA, i, uint64(m.Cardinality)) + i-- + dAtA[i] = 0x18 + } + if len(m.Type) > 0 { + i -= len(m.Type) + copy(dAtA[i:], m.Type) + i = encodeVarintLogproto(dAtA, i, uint64(len(m.Type))) + i-- + dAtA[i] = 0x12 + } + if len(m.Label) > 0 { + i -= len(m.Label) + copy(dAtA[i:], m.Label) + i = encodeVarintLogproto(dAtA, i, uint64(len(m.Label))) + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + +func (m *DetectedLabelsRequest) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *DetectedLabelsRequest) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *DetectedLabelsRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.Query) > 0 { + i -= len(m.Query) + copy(dAtA[i:], m.Query) + i = encodeVarintLogproto(dAtA, i, uint64(len(m.Query))) + i-- + dAtA[i] = 0x1a + } + if m.End != nil { + n23, err23 := github_com_gogo_protobuf_types.StdTimeMarshalTo(*m.End, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(*m.End):]) + if err23 != nil { + return 0, err23 + } + i -= n23 + i = encodeVarintLogproto(dAtA, i, uint64(n23)) + i-- + dAtA[i] = 0x12 + } + if m.Start != nil { + n24, err24 := github_com_gogo_protobuf_types.StdTimeMarshalTo(*m.Start, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(*m.Start):]) + if err24 != nil { + return 0, err24 + } + i -= n24 + i = encodeVarintLogproto(dAtA, i, uint64(n24)) + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + +func (m *DetectedLabelsResponse) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *DetectedLabelsResponse) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *DetectedLabelsResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.DetectedLabels) > 0 { + for iNdEx := len(m.DetectedLabels) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.DetectedLabels[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintLogproto(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + } + } + return len(dAtA) - i, nil +} + +func (m *DetectedLabel) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *DetectedLabel) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *DetectedLabel) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.Label) > 0 { + i -= len(m.Label) + copy(dAtA[i:], m.Label) + i = encodeVarintLogproto(dAtA, i, uint64(len(m.Label))) + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + +func encodeVarintLogproto(dAtA []byte, offset int, v uint64) int { + offset -= sovLogproto(v) + base := offset + for v >= 1<<7 { + dAtA[offset] = uint8(v&0x7f | 0x80) + v >>= 7 offset++ } dAtA[offset] = uint8(v) @@ -8217,53 +9122,163 @@ func (m *Volume) Size() (n int) { return n } -func sovLogproto(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 -} -func sozLogproto(x uint64) (n int) { - return sovLogproto(uint64((x << 1) ^ uint64((int64(x) >> 63)))) -} -func (this *StreamRatesRequest) String() string { - if this == nil { - return "nil" +func (m *DetectedFieldsRequest) Size() (n int) { + if m == nil { + return 0 } - s := strings.Join([]string{`&StreamRatesRequest{`, - `}`, - }, "") - return s -} -func (this *StreamRatesResponse) String() string { - if this == nil { - return "nil" + var l int + _ = l + l = github_com_gogo_protobuf_types.SizeOfStdTime(m.Start) + n += 1 + l + sovLogproto(uint64(l)) + l = github_com_gogo_protobuf_types.SizeOfStdTime(m.End) + n += 1 + l + sovLogproto(uint64(l)) + l = len(m.Query) + if l > 0 { + n += 1 + l + sovLogproto(uint64(l)) } - repeatedStringForStreamRates := "[]*StreamRate{" - for _, f := range this.StreamRates { - repeatedStringForStreamRates += strings.Replace(f.String(), "StreamRate", "StreamRate", 1) + "," + if m.LineLimit != 0 { + n += 1 + sovLogproto(uint64(m.LineLimit)) } - repeatedStringForStreamRates += "}" - s := strings.Join([]string{`&StreamRatesResponse{`, - `StreamRates:` + repeatedStringForStreamRates + `,`, - `}`, - }, "") - return s -} -func (this *StreamRate) String() string { - if this == nil { - return "nil" + if m.FieldLimit != 0 { + n += 1 + sovLogproto(uint64(m.FieldLimit)) } - s := strings.Join([]string{`&StreamRate{`, - `StreamHash:` + fmt.Sprintf("%v", this.StreamHash) + `,`, - `StreamHashNoShard:` + fmt.Sprintf("%v", this.StreamHashNoShard) + `,`, - `Rate:` + fmt.Sprintf("%v", this.Rate) + `,`, - `Tenant:` + fmt.Sprintf("%v", this.Tenant) + `,`, - `Pushes:` + fmt.Sprintf("%v", this.Pushes) + `,`, - `}`, - }, "") - return s -} -func (this *QueryRequest) String() string { - if this == nil { - return "nil" + if m.Step != 0 { + n += 1 + sovLogproto(uint64(m.Step)) + } + return n +} + +func (m *DetectedFieldsResponse) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if len(m.Fields) > 0 { + for _, e := range m.Fields { + l = e.Size() + n += 1 + l + sovLogproto(uint64(l)) + } + } + return n +} + +func (m *DetectedField) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = len(m.Label) + if l > 0 { + n += 1 + l + sovLogproto(uint64(l)) + } + l = len(m.Type) + if l > 0 { + n += 1 + l + sovLogproto(uint64(l)) + } + if m.Cardinality != 0 { + n += 1 + sovLogproto(uint64(m.Cardinality)) + } + return n +} + +func (m *DetectedLabelsRequest) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Start != nil { + l = github_com_gogo_protobuf_types.SizeOfStdTime(*m.Start) + n += 1 + l + sovLogproto(uint64(l)) + } + if m.End != nil { + l = github_com_gogo_protobuf_types.SizeOfStdTime(*m.End) + n += 1 + l + sovLogproto(uint64(l)) + } + l = len(m.Query) + if l > 0 { + n += 1 + l + sovLogproto(uint64(l)) + } + return n +} + +func (m *DetectedLabelsResponse) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if len(m.DetectedLabels) > 0 { + for _, e := range m.DetectedLabels { + l = e.Size() + n += 1 + l + sovLogproto(uint64(l)) + } + } + return n +} + +func (m *DetectedLabel) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = len(m.Label) + if l > 0 { + n += 1 + l + sovLogproto(uint64(l)) + } + return n +} + +func sovLogproto(x uint64) (n int) { + return (math_bits.Len64(x|1) + 6) / 7 +} +func sozLogproto(x uint64) (n int) { + return sovLogproto(uint64((x << 1) ^ uint64((int64(x) >> 63)))) +} +func (this *StreamRatesRequest) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&StreamRatesRequest{`, + `}`, + }, "") + return s +} +func (this *StreamRatesResponse) String() string { + if this == nil { + return "nil" + } + repeatedStringForStreamRates := "[]*StreamRate{" + for _, f := range this.StreamRates { + repeatedStringForStreamRates += strings.Replace(f.String(), "StreamRate", "StreamRate", 1) + "," + } + repeatedStringForStreamRates += "}" + s := strings.Join([]string{`&StreamRatesResponse{`, + `StreamRates:` + repeatedStringForStreamRates + `,`, + `}`, + }, "") + return s +} +func (this *StreamRate) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&StreamRate{`, + `StreamHash:` + fmt.Sprintf("%v", this.StreamHash) + `,`, + `StreamHashNoShard:` + fmt.Sprintf("%v", this.StreamHashNoShard) + `,`, + `Rate:` + fmt.Sprintf("%v", this.Rate) + `,`, + `Tenant:` + fmt.Sprintf("%v", this.Tenant) + `,`, + `Pushes:` + fmt.Sprintf("%v", this.Pushes) + `,`, + `}`, + }, "") + return s +} +func (this *QueryRequest) String() string { + if this == nil { + return "nil" } repeatedStringForDeletes := "[]*Delete{" for _, f := range this.Deletes { @@ -8820,6 +9835,85 @@ func (this *Volume) String() string { }, "") return s } +func (this *DetectedFieldsRequest) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&DetectedFieldsRequest{`, + `Start:` + strings.Replace(strings.Replace(fmt.Sprintf("%v", this.Start), "Timestamp", "types.Timestamp", 1), `&`, ``, 1) + `,`, + `End:` + strings.Replace(strings.Replace(fmt.Sprintf("%v", this.End), "Timestamp", "types.Timestamp", 1), `&`, ``, 1) + `,`, + `Query:` + fmt.Sprintf("%v", this.Query) + `,`, + `LineLimit:` + fmt.Sprintf("%v", this.LineLimit) + `,`, + `FieldLimit:` + fmt.Sprintf("%v", this.FieldLimit) + `,`, + `Step:` + fmt.Sprintf("%v", this.Step) + `,`, + `}`, + }, "") + return s +} +func (this *DetectedFieldsResponse) String() string { + if this == nil { + return "nil" + } + repeatedStringForFields := "[]*DetectedField{" + for _, f := range this.Fields { + repeatedStringForFields += strings.Replace(f.String(), "DetectedField", "DetectedField", 1) + "," + } + repeatedStringForFields += "}" + s := strings.Join([]string{`&DetectedFieldsResponse{`, + `Fields:` + repeatedStringForFields + `,`, + `}`, + }, "") + return s +} +func (this *DetectedField) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&DetectedField{`, + `Label:` + fmt.Sprintf("%v", this.Label) + `,`, + `Type:` + fmt.Sprintf("%v", this.Type) + `,`, + `Cardinality:` + fmt.Sprintf("%v", this.Cardinality) + `,`, + `}`, + }, "") + return s +} +func (this *DetectedLabelsRequest) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&DetectedLabelsRequest{`, + `Start:` + strings.Replace(fmt.Sprintf("%v", this.Start), "Timestamp", "types.Timestamp", 1) + `,`, + `End:` + strings.Replace(fmt.Sprintf("%v", this.End), "Timestamp", "types.Timestamp", 1) + `,`, + `Query:` + fmt.Sprintf("%v", this.Query) + `,`, + `}`, + }, "") + return s +} +func (this *DetectedLabelsResponse) String() string { + if this == nil { + return "nil" + } + repeatedStringForDetectedLabels := "[]*DetectedLabel{" + for _, f := range this.DetectedLabels { + repeatedStringForDetectedLabels += strings.Replace(f.String(), "DetectedLabel", "DetectedLabel", 1) + "," + } + repeatedStringForDetectedLabels += "}" + s := strings.Join([]string{`&DetectedLabelsResponse{`, + `DetectedLabels:` + repeatedStringForDetectedLabels + `,`, + `}`, + }, "") + return s +} +func (this *DetectedLabel) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&DetectedLabel{`, + `Label:` + fmt.Sprintf("%v", this.Label) + `,`, + `}`, + }, "") + return s +} func valueToStringLogproto(v interface{}) string { rv := reflect.ValueOf(v) if rv.IsNil() { @@ -9390,7 +10484,7 @@ func (m *QueryRequest) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Plan == nil { - m.Plan = &github_com_grafana_loki_pkg_querier_plan.QueryPlan{} + m.Plan = &github_com_grafana_loki_v3_pkg_querier_plan.QueryPlan{} } if err := m.Plan.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -9643,7 +10737,7 @@ func (m *SampleQueryRequest) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Plan == nil { - m.Plan = &github_com_grafana_loki_pkg_querier_plan.QueryPlan{} + m.Plan = &github_com_grafana_loki_v3_pkg_querier_plan.QueryPlan{} } if err := m.Plan.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -10902,7 +11996,7 @@ func (m *TailRequest) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Plan == nil { - m.Plan = &github_com_grafana_loki_pkg_querier_plan.QueryPlan{} + m.Plan = &github_com_grafana_loki_v3_pkg_querier_plan.QueryPlan{} } if err := m.Plan.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -13052,7 +14146,7 @@ func (m *GetChunkRefRequest) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Filters = append(m.Filters, github_com_grafana_loki_pkg_logql_syntax.LineFilter{}) + m.Filters = append(m.Filters, github_com_grafana_loki_v3_pkg_logql_syntax.LineFilter{}) if err := m.Filters[len(m.Filters)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -14731,6 +15825,766 @@ func (m *Volume) Unmarshal(dAtA []byte) error { } return nil } +func (m *DetectedFieldsRequest) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: DetectedFieldsRequest: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: DetectedFieldsRequest: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Start", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthLogproto + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthLogproto + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := github_com_gogo_protobuf_types.StdTimeUnmarshal(&m.Start, dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field End", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthLogproto + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthLogproto + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := github_com_gogo_protobuf_types.StdTimeUnmarshal(&m.End, dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Query", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthLogproto + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthLogproto + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Query = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 4: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field LineLimit", wireType) + } + m.LineLimit = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.LineLimit |= uint32(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 5: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field FieldLimit", wireType) + } + m.FieldLimit = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.FieldLimit |= uint32(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 6: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Step", wireType) + } + m.Step = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.Step |= int64(b&0x7F) << shift + if b < 0x80 { + break + } + } + default: + iNdEx = preIndex + skippy, err := skipLogproto(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthLogproto + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthLogproto + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *DetectedFieldsResponse) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: DetectedFieldsResponse: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: DetectedFieldsResponse: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Fields", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthLogproto + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthLogproto + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Fields = append(m.Fields, &DetectedField{}) + if err := m.Fields[len(m.Fields)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipLogproto(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthLogproto + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthLogproto + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *DetectedField) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: DetectedField: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: DetectedField: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Label", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthLogproto + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthLogproto + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Label = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Type", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthLogproto + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthLogproto + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Type = DetectedFieldType(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 3: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Cardinality", wireType) + } + m.Cardinality = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.Cardinality |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + default: + iNdEx = preIndex + skippy, err := skipLogproto(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthLogproto + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthLogproto + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *DetectedLabelsRequest) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: DetectedLabelsRequest: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: DetectedLabelsRequest: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Start", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthLogproto + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthLogproto + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Start == nil { + m.Start = new(time.Time) + } + if err := github_com_gogo_protobuf_types.StdTimeUnmarshal(m.Start, dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field End", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthLogproto + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthLogproto + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.End == nil { + m.End = new(time.Time) + } + if err := github_com_gogo_protobuf_types.StdTimeUnmarshal(m.End, dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Query", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthLogproto + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthLogproto + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Query = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipLogproto(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthLogproto + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthLogproto + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *DetectedLabelsResponse) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: DetectedLabelsResponse: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: DetectedLabelsResponse: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field DetectedLabels", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthLogproto + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthLogproto + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.DetectedLabels = append(m.DetectedLabels, &DetectedLabel{}) + if err := m.DetectedLabels[len(m.DetectedLabels)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipLogproto(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthLogproto + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthLogproto + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *DetectedLabel) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: DetectedLabel: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: DetectedLabel: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Label", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthLogproto + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthLogproto + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Label = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipLogproto(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthLogproto + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthLogproto + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} func skipLogproto(dAtA []byte) (n int, err error) { l := len(dAtA) iNdEx := 0 diff --git a/pkg/logproto/logproto.proto b/pkg/logproto/logproto.proto index bf175168cfd93..892e0f599ebae 100644 --- a/pkg/logproto/logproto.proto +++ b/pkg/logproto/logproto.proto @@ -7,7 +7,7 @@ import "google/protobuf/timestamp.proto"; import "pkg/logqlmodel/stats/stats.proto"; import "pkg/push/push.proto"; -option go_package = "github.com/grafana/loki/pkg/logproto"; +option go_package = "github.com/grafana/loki/v3/pkg/logproto"; service Querier { rpc Query(QueryRequest) returns (stream QueryResponse) {} @@ -30,6 +30,8 @@ service Querier { // Note: this MUST be the same as the variant defined in // indexgateway.proto on the IndexGateway service. rpc GetVolume(VolumeRequest) returns (VolumeResponse) {} + + rpc GetDetectedFields(DetectedFieldsRequest) returns (DetectedFieldsResponse) {} } service StreamData { @@ -65,7 +67,7 @@ message QueryRequest { reserved 6; repeated string shards = 7 [(gogoproto.jsontag) = "shards,omitempty"]; repeated Delete deletes = 8; - Plan plan = 9 [(gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/plan.QueryPlan"]; + Plan plan = 9 [(gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/plan.QueryPlan"]; } message SampleQueryRequest { @@ -80,9 +82,10 @@ message SampleQueryRequest { ]; repeated string shards = 4 [(gogoproto.jsontag) = "shards,omitempty"]; repeated Delete deletes = 5; - Plan plan = 6 [(gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/plan.QueryPlan"]; + Plan plan = 6 [(gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/plan.QueryPlan"]; } +// TODO(owen-d): fix. This will break rollouts as soon as the internal repr is changed. message Plan { bytes raw = 1; } @@ -162,7 +165,7 @@ message TailRequest { (gogoproto.stdtime) = true, (gogoproto.nullable) = false ]; - Plan plan = 6 [(gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/plan.QueryPlan"]; + Plan plan = 6 [(gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/plan.QueryPlan"]; } message TailResponse { @@ -297,6 +300,7 @@ message LabelNamesForMetricNameRequest { ]; } +// TODO(owen-d): fix. This will break rollouts as soon as the internal repr is changed. message LineFilter { bytes raw = 1; } @@ -313,11 +317,11 @@ message GetChunkRefRequest { string matchers = 3; // TODO(salvacorts): Delete this field once the weekly release is done. repeated LineFilter filters = 4 [ - (gogoproto.customtype) = "github.com/grafana/loki/pkg/logql/syntax.LineFilter", + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logql/syntax.LineFilter", (gogoproto.nullable) = false ]; Plan plan = 5 [ - (gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/plan.QueryPlan", + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/plan.QueryPlan", (gogoproto.nullable) = false ]; } @@ -418,3 +422,48 @@ message Volume { string name = 1 [(gogoproto.jsontag) = "name"]; uint64 volume = 3 [(gogoproto.jsontag) = "volume"]; } + +message DetectedFieldsRequest { + google.protobuf.Timestamp start = 1 [ + (gogoproto.stdtime) = true, + (gogoproto.nullable) = false + ]; + google.protobuf.Timestamp end = 2 [ + (gogoproto.stdtime) = true, + (gogoproto.nullable) = false + ]; + string query = 3; // Naming this query instead of match because this should be with queryrangebase.Request interface + uint32 lineLimit = 4; + uint32 fieldLimit = 5; + int64 step = 6; +} + +message DetectedFieldsResponse { + repeated DetectedField fields = 1; +} + +message DetectedField { + string label = 1; + string type = 2 [(gogoproto.casttype) = "DetectedFieldType"]; + uint64 cardinality = 3; +} + +message DetectedLabelsRequest { + google.protobuf.Timestamp start = 1 [ + (gogoproto.stdtime) = true, + (gogoproto.nullable) = true + ]; + google.protobuf.Timestamp end = 2 [ + (gogoproto.stdtime) = true, + (gogoproto.nullable) = true + ]; + string query = 3; +} + +message DetectedLabelsResponse { + repeated DetectedLabel detectedLabels = 1; +} + +message DetectedLabel { + string label = 1; +} diff --git a/pkg/logproto/metrics.pb.go b/pkg/logproto/metrics.pb.go index 49e45e03a9b40..facb92220d05c 100644 --- a/pkg/logproto/metrics.pb.go +++ b/pkg/logproto/metrics.pb.go @@ -342,46 +342,47 @@ func init() { func init() { proto.RegisterFile("pkg/logproto/metrics.proto", fileDescriptor_d2388e514bd0aa0e) } var fileDescriptor_d2388e514bd0aa0e = []byte{ - // 623 bytes of a gzipped FileDescriptorProto + // 627 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x53, 0xcf, 0x6e, 0xd3, 0x4e, - 0x10, 0xf6, 0x26, 0x69, 0x92, 0x4e, 0xff, 0xfc, 0xac, 0x55, 0xf5, 0xc3, 0x04, 0x69, 0x53, 0x0c, - 0x87, 0x1e, 0x50, 0x22, 0x15, 0x09, 0x04, 0xe2, 0x92, 0xa0, 0x34, 0x54, 0x34, 0x7f, 0x58, 0x3b, - 0x54, 0xf4, 0x12, 0x6d, 0xd3, 0xad, 0xbb, 0xaa, 0x1d, 0x1b, 0xdb, 0x41, 0xca, 0x8d, 0x17, 0x40, - 0xe2, 0xcc, 0x13, 0xf0, 0x04, 0x3c, 0x43, 0x8f, 0x3d, 0x56, 0x1c, 0x2a, 0xea, 0x5e, 0x7a, 0xec, - 0x23, 0x20, 0xaf, 0x9d, 0x38, 0x45, 0xe2, 0xc6, 0xc9, 0x33, 0xf3, 0x7d, 0xdf, 0xcc, 0xe8, 0x1b, - 0x2f, 0x54, 0xbc, 0x53, 0xab, 0x6e, 0xbb, 0x96, 0xe7, 0xbb, 0xa1, 0x5b, 0x77, 0x78, 0xe8, 0x8b, - 0x51, 0x50, 0x93, 0x19, 0x2e, 0xcf, 0xea, 0x95, 0x0d, 0xcb, 0xb5, 0xdc, 0x84, 0x12, 0x47, 0x09, - 0x5e, 0x79, 0x70, 0x47, 0x3b, 0x0b, 0x12, 0x50, 0xff, 0x91, 0x83, 0xd5, 0x7d, 0x5f, 0x84, 0x9c, - 0xf2, 0x8f, 0x13, 0x1e, 0x84, 0xb8, 0x0f, 0x10, 0x0a, 0x87, 0x07, 0xdc, 0x17, 0x3c, 0xd0, 0xd0, - 0x66, 0x7e, 0x6b, 0x65, 0x7b, 0xa3, 0x36, 0x57, 0x99, 0xc2, 0xe1, 0x86, 0xc4, 0x9a, 0x95, 0xb3, - 0xcb, 0xaa, 0xf2, 0xf3, 0xb2, 0x8a, 0xfb, 0x3e, 0x67, 0xb6, 0xed, 0x8e, 0xcc, 0xb9, 0x8e, 0x2e, - 0xf4, 0xc0, 0x2f, 0xa0, 0x68, 0xb8, 0x13, 0x7f, 0xc4, 0xb5, 0xdc, 0x26, 0xda, 0x5a, 0xdf, 0x7e, - 0x98, 0x75, 0x5b, 0x9c, 0x5c, 0x4b, 0x48, 0xad, 0xf1, 0xc4, 0xa1, 0xa9, 0x00, 0xbf, 0x84, 0xb2, - 0xc3, 0x43, 0x76, 0xc4, 0x42, 0xa6, 0xe5, 0xe5, 0x2a, 0x5a, 0x26, 0xee, 0x48, 0x17, 0x3a, 0x29, - 0xde, 0x2c, 0x9c, 0x5d, 0x56, 0x11, 0x9d, 0xf3, 0xf1, 0x2b, 0xa8, 0x04, 0xa7, 0xc2, 0x1b, 0xda, - 0xec, 0x90, 0xdb, 0xc3, 0x31, 0x73, 0xf8, 0xf0, 0x13, 0xb3, 0xc5, 0x11, 0x0b, 0x85, 0x3b, 0xd6, - 0x6e, 0x4a, 0x9b, 0x68, 0xab, 0x4c, 0xef, 0xc5, 0x94, 0xbd, 0x98, 0xd1, 0x65, 0x0e, 0x7f, 0x3f, - 0xc7, 0xf5, 0x2a, 0x40, 0xb6, 0x0f, 0x2e, 0x41, 0xbe, 0xd1, 0xdf, 0x55, 0x15, 0x5c, 0x86, 0x02, - 0x1d, 0xec, 0xb5, 0x54, 0xa4, 0xff, 0x07, 0x6b, 0xe9, 0xf6, 0x81, 0xe7, 0x8e, 0x03, 0xae, 0x7f, - 0x41, 0x00, 0x99, 0x3b, 0xb8, 0x0d, 0x45, 0x39, 0x79, 0xe6, 0xe1, 0xfd, 0x6c, 0xf1, 0x3d, 0x6e, - 0xb1, 0xd1, 0x54, 0x4e, 0xed, 0x33, 0xe1, 0x37, 0x37, 0x52, 0x23, 0x57, 0x65, 0xa9, 0x71, 0xc4, - 0xbc, 0x90, 0xfb, 0x34, 0x95, 0xe3, 0x67, 0x50, 0x0a, 0x98, 0xe3, 0xd9, 0x3c, 0xd0, 0x72, 0xb2, - 0xd3, 0xff, 0x7f, 0x76, 0x32, 0x24, 0x2c, 0x0d, 0x50, 0xe8, 0x8c, 0xac, 0x7f, 0xcb, 0xc1, 0xfa, - 0x5d, 0x8b, 0xf0, 0x73, 0x28, 0x84, 0x53, 0x8f, 0x6b, 0x48, 0xde, 0xe1, 0xd1, 0xdf, 0xac, 0x4c, - 0x53, 0x73, 0xea, 0x71, 0x2a, 0x05, 0xf8, 0x09, 0xe0, 0xe4, 0x9f, 0x1b, 0x1e, 0x33, 0x47, 0xd8, - 0x53, 0x69, 0xa7, 0x3c, 0xe7, 0x32, 0x55, 0x13, 0x64, 0x47, 0x02, 0xb1, 0x8b, 0x18, 0x43, 0xe1, - 0x84, 0xdb, 0x9e, 0x56, 0x90, 0xb8, 0x8c, 0xe3, 0xda, 0x64, 0x2c, 0x42, 0x6d, 0x29, 0xa9, 0xc5, - 0xb1, 0x3e, 0x05, 0xc8, 0x26, 0xe1, 0x15, 0x28, 0x0d, 0xba, 0x6f, 0xbb, 0xbd, 0xfd, 0xae, 0xaa, - 0xc4, 0xc9, 0xeb, 0xde, 0xa0, 0x6b, 0xb6, 0xa8, 0x8a, 0xf0, 0x32, 0x2c, 0xb5, 0x1b, 0x83, 0x76, - 0x4b, 0xcd, 0xe1, 0x35, 0x58, 0x7e, 0xb3, 0x6b, 0x98, 0xbd, 0x36, 0x6d, 0x74, 0xd4, 0x3c, 0xc6, - 0xb0, 0x2e, 0x91, 0xac, 0x56, 0x88, 0xa5, 0xc6, 0xa0, 0xd3, 0x69, 0xd0, 0x0f, 0xea, 0x52, 0x7c, - 0xaf, 0xdd, 0xee, 0x4e, 0x4f, 0x2d, 0xe2, 0x55, 0x28, 0x1b, 0x66, 0xc3, 0x6c, 0x19, 0x2d, 0x53, - 0x2d, 0xe9, 0xef, 0xa0, 0x98, 0x8c, 0xfe, 0x67, 0x77, 0x6a, 0x1e, 0x9c, 0x5f, 0x11, 0xe5, 0xe2, - 0x8a, 0x28, 0xb7, 0x57, 0x04, 0x7d, 0x8e, 0x08, 0xfa, 0x1e, 0x11, 0x74, 0x16, 0x11, 0x74, 0x1e, - 0x11, 0xf4, 0x2b, 0x22, 0xe8, 0x26, 0x22, 0xca, 0x6d, 0x44, 0xd0, 0xd7, 0x6b, 0xa2, 0x9c, 0x5f, - 0x13, 0xe5, 0xe2, 0x9a, 0x28, 0x07, 0x8f, 0x2d, 0x11, 0x9e, 0x4c, 0x0e, 0x6b, 0x23, 0xd7, 0xa9, - 0x5b, 0x3e, 0x3b, 0x66, 0x63, 0x56, 0xb7, 0xdd, 0x53, 0x51, 0x5f, 0x7c, 0xb4, 0x87, 0x45, 0xf9, - 0x79, 0xfa, 0x3b, 0x00, 0x00, 0xff, 0xff, 0x8b, 0x68, 0x28, 0x2b, 0x07, 0x04, 0x00, 0x00, + 0x10, 0xf6, 0x26, 0x69, 0x92, 0x4e, 0xff, 0xfc, 0xac, 0x55, 0xf5, 0xc3, 0x04, 0x69, 0x53, 0xcc, + 0x81, 0x1e, 0x50, 0x22, 0xb5, 0x12, 0x08, 0xc4, 0x25, 0x41, 0x69, 0xa8, 0x68, 0xfe, 0xb0, 0x76, + 0xa8, 0x40, 0x42, 0xd1, 0x36, 0xdd, 0xba, 0xab, 0xda, 0xb1, 0xb1, 0x9d, 0x4a, 0xb9, 0xf1, 0x02, + 0x48, 0x9c, 0x79, 0x02, 0x9e, 0x80, 0x67, 0xe8, 0xb1, 0xc7, 0x8a, 0x43, 0x45, 0xdd, 0x4b, 0x8f, + 0x7d, 0x04, 0xe4, 0xb5, 0x13, 0xb7, 0x48, 0xdc, 0x38, 0x79, 0x66, 0xbe, 0xef, 0x9b, 0x19, 0x7d, + 0xe3, 0x85, 0x8a, 0x77, 0x6c, 0xd5, 0x6d, 0xd7, 0xf2, 0x7c, 0x37, 0x74, 0xeb, 0x0e, 0x0f, 0x7d, + 0x31, 0x0a, 0x6a, 0x32, 0xc3, 0xe5, 0x59, 0xbd, 0xb2, 0x66, 0xb9, 0x96, 0x9b, 0x50, 0xe2, 0x28, + 0xc1, 0x2b, 0x0f, 0xee, 0x68, 0x67, 0x41, 0x02, 0xea, 0x3f, 0x72, 0xb0, 0xbc, 0xe7, 0x8b, 0x90, + 0x53, 0xfe, 0x69, 0xc2, 0x83, 0x10, 0xf7, 0x01, 0x42, 0xe1, 0xf0, 0x80, 0xfb, 0x82, 0x07, 0x1a, + 0x5a, 0xcf, 0x6f, 0x2c, 0x6d, 0xae, 0xd5, 0xe6, 0x2a, 0x53, 0x38, 0xdc, 0x90, 0x58, 0xb3, 0x72, + 0x7a, 0x51, 0x55, 0x7e, 0x5e, 0x54, 0x71, 0xdf, 0xe7, 0xcc, 0xb6, 0xdd, 0x91, 0x39, 0xd7, 0xd1, + 0x5b, 0x3d, 0xf0, 0x73, 0x28, 0x1a, 0xee, 0xc4, 0x1f, 0x71, 0x2d, 0xb7, 0x8e, 0x36, 0x56, 0x37, + 0x1f, 0x66, 0xdd, 0x6e, 0x4f, 0xae, 0x25, 0xa4, 0xd6, 0x78, 0xe2, 0xd0, 0x54, 0x80, 0x5f, 0x40, + 0xd9, 0xe1, 0x21, 0x3b, 0x60, 0x21, 0xd3, 0xf2, 0x72, 0x15, 0x2d, 0x13, 0x77, 0xa4, 0x0b, 0x9d, + 0x14, 0x6f, 0x16, 0x4e, 0x2f, 0xaa, 0x88, 0xce, 0xf9, 0xf8, 0x25, 0x54, 0x82, 0x63, 0xe1, 0x0d, + 0x6d, 0xb6, 0xcf, 0xed, 0xe1, 0x98, 0x39, 0x7c, 0x78, 0xc2, 0x6c, 0x71, 0xc0, 0x42, 0xe1, 0x8e, + 0xb5, 0xeb, 0xd2, 0x3a, 0xda, 0x28, 0xd3, 0x7b, 0x31, 0x65, 0x37, 0x66, 0x74, 0x99, 0xc3, 0xdf, + 0xcd, 0x71, 0xbd, 0x0a, 0x90, 0xed, 0x83, 0x4b, 0x90, 0x6f, 0xf4, 0x77, 0x54, 0x05, 0x97, 0xa1, + 0x40, 0x07, 0xbb, 0x2d, 0x15, 0xe9, 0xff, 0xc1, 0x4a, 0xba, 0x7d, 0xe0, 0xb9, 0xe3, 0x80, 0xeb, + 0x5f, 0x10, 0x40, 0xe6, 0x0e, 0x6e, 0x43, 0x51, 0x4e, 0x9e, 0x79, 0x78, 0x3f, 0x5b, 0x7c, 0x97, + 0x5b, 0x6c, 0x34, 0x95, 0x53, 0xfb, 0x4c, 0xf8, 0xcd, 0xb5, 0xd4, 0xc8, 0x65, 0x59, 0x6a, 0x1c, + 0x30, 0x2f, 0xe4, 0x3e, 0x4d, 0xe5, 0xf8, 0x29, 0x94, 0x02, 0xe6, 0x78, 0x36, 0x0f, 0xb4, 0x9c, + 0xec, 0xf4, 0xff, 0x9f, 0x9d, 0x0c, 0x09, 0x4b, 0x03, 0x14, 0x3a, 0x23, 0xeb, 0xdf, 0x72, 0xb0, + 0x7a, 0xd7, 0x22, 0xfc, 0x0c, 0x0a, 0xe1, 0xd4, 0xe3, 0x1a, 0x92, 0x77, 0x78, 0xf4, 0x37, 0x2b, + 0xd3, 0xd4, 0x9c, 0x7a, 0x9c, 0x4a, 0x01, 0x7e, 0x02, 0x38, 0xf9, 0xe7, 0x86, 0x87, 0xcc, 0x11, + 0xf6, 0x54, 0xda, 0x29, 0xcf, 0xb9, 0x48, 0xd5, 0x04, 0xd9, 0x96, 0x40, 0xec, 0x22, 0xc6, 0x50, + 0x38, 0xe2, 0xb6, 0xa7, 0x15, 0x24, 0x2e, 0xe3, 0xb8, 0x36, 0x19, 0x8b, 0x50, 0x5b, 0x48, 0x6a, + 0x71, 0xac, 0x4f, 0x01, 0xb2, 0x49, 0x78, 0x09, 0x4a, 0x83, 0xee, 0x9b, 0x6e, 0x6f, 0xaf, 0xab, + 0x2a, 0x71, 0xf2, 0xaa, 0x37, 0xe8, 0x9a, 0x2d, 0xaa, 0x22, 0xbc, 0x08, 0x0b, 0xed, 0xc6, 0xa0, + 0xdd, 0x52, 0x73, 0x78, 0x05, 0x16, 0x5f, 0xef, 0x18, 0x66, 0xaf, 0x4d, 0x1b, 0x1d, 0x35, 0x8f, + 0x31, 0xac, 0x4a, 0x24, 0xab, 0x15, 0x62, 0xa9, 0x31, 0xe8, 0x74, 0x1a, 0xf4, 0xbd, 0xba, 0x10, + 0xdf, 0x6b, 0xa7, 0xbb, 0xdd, 0x53, 0x8b, 0x78, 0x19, 0xca, 0x86, 0xd9, 0x30, 0x5b, 0x46, 0xcb, + 0x54, 0x4b, 0xfa, 0x5b, 0x28, 0x26, 0xa3, 0xff, 0xd9, 0x9d, 0x9a, 0x1f, 0xcf, 0x2e, 0x89, 0x72, + 0x7e, 0x49, 0x94, 0x9b, 0x4b, 0x82, 0x3e, 0x47, 0x04, 0x7d, 0x8f, 0x08, 0x3a, 0x8d, 0x08, 0x3a, + 0x8b, 0x08, 0xfa, 0x15, 0x11, 0x74, 0x1d, 0x11, 0xe5, 0x26, 0x22, 0xe8, 0xeb, 0x15, 0x51, 0xce, + 0xae, 0x88, 0x72, 0x7e, 0x45, 0x94, 0x0f, 0x8f, 0x2d, 0x11, 0x1e, 0x4d, 0xf6, 0x6b, 0x23, 0xd7, + 0xa9, 0x5b, 0x3e, 0x3b, 0x64, 0x63, 0x56, 0xb7, 0xdd, 0x63, 0x51, 0x3f, 0xd9, 0xaa, 0xdf, 0x7e, + 0xb7, 0xfb, 0x45, 0xf9, 0xd9, 0xfa, 0x1d, 0x00, 0x00, 0xff, 0xff, 0x39, 0xa1, 0x2c, 0xef, 0x0a, + 0x04, 0x00, 0x00, } func (x WriteRequest_SourceEnum) String() string { diff --git a/pkg/logproto/metrics.proto b/pkg/logproto/metrics.proto index d0cfef0a69f58..1fc92f836312d 100644 --- a/pkg/logproto/metrics.proto +++ b/pkg/logproto/metrics.proto @@ -5,7 +5,7 @@ package logproto; import "gogoproto/gogo.proto"; import "pkg/logproto/logproto.proto"; -option go_package = "github.com/grafana/loki/pkg/logproto"; +option go_package = "github.com/grafana/loki/v3/pkg/logproto"; option (gogoproto.marshaler_all) = true; option (gogoproto.unmarshaler_all) = true; diff --git a/pkg/logproto/sketch.pb.go b/pkg/logproto/sketch.pb.go index c555d64d55970..ef0fa986b1e33 100644 --- a/pkg/logproto/sketch.pb.go +++ b/pkg/logproto/sketch.pb.go @@ -656,46 +656,47 @@ func init() { func init() { proto.RegisterFile("pkg/logproto/sketch.proto", fileDescriptor_7f9fd40e59b87ff3) } var fileDescriptor_7f9fd40e59b87ff3 = []byte{ - // 623 bytes of a gzipped FileDescriptorProto + // 626 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x54, 0x41, 0x4f, 0xd4, 0x4e, - 0x14, 0xef, 0xfc, 0x77, 0xff, 0xcb, 0xf2, 0x16, 0x88, 0x8e, 0xc4, 0xd4, 0xc5, 0x4c, 0xd6, 0xc6, - 0x28, 0xd1, 0xb8, 0x9b, 0x40, 0x42, 0x38, 0x83, 0x07, 0x12, 0x45, 0x71, 0x20, 0xc6, 0x70, 0x31, - 0x43, 0x3b, 0x74, 0x27, 0xdb, 0x76, 0x9a, 0xce, 0x2c, 0xe0, 0xcd, 0x4f, 0x60, 0x8c, 0x9f, 0xc2, - 0xab, 0x1f, 0xc1, 0x9b, 0x47, 0x8e, 0x1c, 0xa5, 0x5c, 0x3c, 0xf2, 0x11, 0xcc, 0x4c, 0xdb, 0x85, - 0x2e, 0x31, 0x7a, 0xda, 0x79, 0xbf, 0xf7, 0x7b, 0xbf, 0xf9, 0xcd, 0x7b, 0x7d, 0x0b, 0xf7, 0xd2, - 0x51, 0x38, 0x88, 0x64, 0x98, 0x66, 0x52, 0xcb, 0x81, 0x1a, 0x71, 0xed, 0x0f, 0xfb, 0x36, 0xc0, - 0xed, 0x0a, 0xee, 0x2e, 0xd5, 0x48, 0xd5, 0xa1, 0xa0, 0x79, 0xaf, 0x60, 0xf1, 0xcd, 0x98, 0x25, - 0x5a, 0x44, 0x7c, 0xd7, 0x96, 0x6f, 0x33, 0x9d, 0x89, 0x13, 0xbc, 0x06, 0xad, 0x23, 0x16, 0x8d, - 0xb9, 0x72, 0x51, 0xaf, 0xb1, 0xdc, 0x59, 0x21, 0xfd, 0x49, 0x61, 0x9d, 0xff, 0x96, 0xfb, 0x5a, - 0x66, 0xb4, 0x64, 0x7b, 0x3b, 0xd3, 0x7a, 0x45, 0x1e, 0xaf, 0xc3, 0x8c, 0x62, 0x71, 0x1a, 0xfd, - 0x5d, 0x70, 0xd7, 0xd2, 0x68, 0x45, 0xf7, 0x3e, 0xa1, 0x69, 0xc9, 0x82, 0x81, 0x1f, 0x01, 0x3a, - 0x74, 0x51, 0x0f, 0x2d, 0x77, 0x56, 0xdc, 0x3f, 0x89, 0x51, 0x74, 0x88, 0x1f, 0xc0, 0x9c, 0x16, - 0x31, 0x57, 0x9a, 0xc5, 0xe9, 0xfb, 0x58, 0xb9, 0xff, 0xf5, 0xd0, 0x72, 0x83, 0x76, 0x26, 0xd8, - 0xb6, 0xc2, 0x4f, 0xa1, 0x15, 0x73, 0x9d, 0x09, 0xdf, 0x6d, 0x58, 0x73, 0x77, 0xae, 0xf4, 0x5e, - 0xb2, 0x03, 0x1e, 0xed, 0x30, 0x91, 0xd1, 0x92, 0xe2, 0x85, 0xb0, 0x50, 0xbf, 0x04, 0x3f, 0x83, - 0x19, 0x1d, 0x88, 0x90, 0x2b, 0x5d, 0xfa, 0xb9, 0x7d, 0x55, 0xbf, 0xf7, 0xdc, 0x26, 0xb6, 0x1c, - 0x5a, 0x71, 0xf0, 0x7d, 0x68, 0x07, 0x41, 0x31, 0x2c, 0x6b, 0x66, 0x6e, 0xcb, 0xa1, 0x13, 0x64, - 0xa3, 0x0d, 0xad, 0xe2, 0xe4, 0x7d, 0x47, 0x30, 0x53, 0x96, 0xe3, 0x5b, 0xd0, 0x88, 0x45, 0x62, - 0xe5, 0x11, 0x35, 0x47, 0x8b, 0xb0, 0x13, 0x2b, 0x60, 0x10, 0x76, 0x82, 0x7b, 0xd0, 0xf1, 0x65, - 0x9c, 0x66, 0x5c, 0x29, 0x21, 0x13, 0xb7, 0x61, 0x33, 0xd7, 0x21, 0xbc, 0x0e, 0xb3, 0x69, 0x26, - 0x7d, 0xae, 0x14, 0x0f, 0xdc, 0xa6, 0x7d, 0x6a, 0xf7, 0x86, 0xd5, 0xfe, 0x26, 0x4f, 0x74, 0x26, - 0x45, 0x40, 0xaf, 0xc8, 0xdd, 0x35, 0x68, 0x57, 0x30, 0xc6, 0xd0, 0x8c, 0x39, 0xab, 0xcc, 0xd8, - 0x33, 0xbe, 0x0b, 0xad, 0x63, 0x2e, 0xc2, 0xa1, 0x2e, 0x0d, 0x95, 0x91, 0xf7, 0x0e, 0x16, 0x36, - 0xe5, 0x38, 0xd1, 0xdb, 0x22, 0x29, 0x9b, 0xb5, 0x08, 0xff, 0x07, 0x3c, 0xd5, 0x43, 0x5b, 0x3e, - 0x4f, 0x8b, 0xc0, 0xa0, 0xc7, 0x22, 0xd0, 0x45, 0x43, 0xe6, 0x69, 0x11, 0xe0, 0x2e, 0xb4, 0x7d, - 0x53, 0xcd, 0x33, 0x65, 0x27, 0x33, 0x4f, 0x27, 0xb1, 0xf7, 0x0d, 0x41, 0x73, 0x4f, 0xa6, 0x2f, - 0xf0, 0x13, 0x68, 0xf8, 0xb1, 0xba, 0xf9, 0x25, 0xd4, 0xef, 0xa5, 0x86, 0x84, 0x1f, 0x43, 0x33, - 0x12, 0xca, 0x98, 0x9c, 0x1a, 0xb3, 0x51, 0xea, 0xdb, 0x31, 0x5b, 0x82, 0xe9, 0xe5, 0xf0, 0x43, - 0xca, 0xb3, 0x48, 0x86, 0x91, 0x0c, 0x6d, 0x2f, 0xe7, 0xe8, 0x75, 0xa8, 0xbb, 0x02, 0x4d, 0xc3, - 0x37, 0xce, 0xf9, 0x11, 0x4f, 0x8a, 0xd1, 0xcf, 0xd2, 0x22, 0x30, 0xa8, 0x75, 0x5a, 0xbd, 0xc7, - 0x06, 0xde, 0x17, 0x04, 0x60, 0x6e, 0x2a, 0x97, 0x6c, 0x75, 0x6a, 0xc9, 0x96, 0xea, 0x7e, 0x0a, - 0x56, 0xbf, 0xbe, 0x61, 0xdd, 0xd7, 0xd0, 0x2a, 0x77, 0xca, 0x83, 0xa6, 0x96, 0xe9, 0xa8, 0x7c, - 0xf9, 0x42, 0xbd, 0x98, 0xda, 0xdc, 0x3f, 0x7c, 0xfc, 0x1b, 0xfb, 0xa7, 0xe7, 0xc4, 0x39, 0x3b, - 0x27, 0xce, 0xe5, 0x39, 0x41, 0x1f, 0x73, 0x82, 0xbe, 0xe6, 0x04, 0xfd, 0xc8, 0x09, 0x3a, 0xcd, - 0x09, 0xfa, 0x99, 0x13, 0xf4, 0x2b, 0x27, 0xce, 0x65, 0x4e, 0xd0, 0xe7, 0x0b, 0xe2, 0x9c, 0x5e, - 0x10, 0xe7, 0xec, 0x82, 0x38, 0xfb, 0x0f, 0x43, 0xa1, 0x87, 0xe3, 0x83, 0xbe, 0x2f, 0xe3, 0x41, - 0x98, 0xb1, 0x43, 0x96, 0xb0, 0x41, 0x24, 0x47, 0x62, 0x70, 0xfd, 0xdf, 0xe6, 0xa0, 0x65, 0x7f, - 0x56, 0x7f, 0x07, 0x00, 0x00, 0xff, 0xff, 0x24, 0x9c, 0x74, 0xb7, 0xa9, 0x04, 0x00, 0x00, + 0x14, 0xef, 0xfc, 0x77, 0xff, 0xcb, 0xf2, 0x16, 0x88, 0x8e, 0xc4, 0xd4, 0xc5, 0x4c, 0xd6, 0x1e, + 0x84, 0x68, 0xdc, 0x4d, 0x20, 0x21, 0x9c, 0xc1, 0x03, 0x89, 0xa2, 0x38, 0x10, 0x63, 0x4c, 0x8c, + 0x19, 0xda, 0xa1, 0x3b, 0xd9, 0xb6, 0xd3, 0x74, 0x66, 0x01, 0x6f, 0x7e, 0x02, 0x63, 0xfc, 0x14, + 0x5e, 0xfd, 0x08, 0xde, 0x3c, 0x72, 0xe4, 0x28, 0xe5, 0xe2, 0x91, 0x8f, 0x60, 0x66, 0xda, 0x2e, + 0x74, 0x89, 0xd1, 0xd3, 0xce, 0xfb, 0xbd, 0xdf, 0xfb, 0xcd, 0x6f, 0xde, 0xeb, 0x5b, 0xb8, 0x97, + 0x8e, 0xc2, 0x41, 0x24, 0xc3, 0x34, 0x93, 0x5a, 0x0e, 0xd4, 0x88, 0x6b, 0x7f, 0xd8, 0xb7, 0x01, + 0x6e, 0x57, 0x70, 0x77, 0xa9, 0x46, 0xaa, 0x0e, 0x05, 0xcd, 0x7b, 0x01, 0x8b, 0xaf, 0xc6, 0x2c, + 0xd1, 0x22, 0xe2, 0x7b, 0xb6, 0x7c, 0x87, 0xe9, 0x4c, 0x9c, 0xe0, 0x75, 0x68, 0x1d, 0xb1, 0x68, + 0xcc, 0x95, 0x8b, 0x7a, 0x8d, 0x95, 0xce, 0x2a, 0xe9, 0x4f, 0x0a, 0xeb, 0xfc, 0xd7, 0xdc, 0xd7, + 0x32, 0xa3, 0x25, 0xdb, 0xdb, 0x9d, 0xd6, 0x2b, 0xf2, 0x78, 0x03, 0x66, 0x14, 0x8b, 0xd3, 0xe8, + 0xef, 0x82, 0x7b, 0x96, 0x46, 0x2b, 0xba, 0xf7, 0x09, 0x4d, 0x4b, 0x16, 0x0c, 0xfc, 0x10, 0xd0, + 0xa1, 0x8b, 0x7a, 0x68, 0xa5, 0xb3, 0xea, 0xfe, 0x49, 0x8c, 0xa2, 0x43, 0xfc, 0x00, 0xe6, 0xb4, + 0x88, 0xb9, 0xd2, 0x2c, 0x4e, 0xdf, 0xc7, 0xca, 0xfd, 0xaf, 0x87, 0x56, 0x1a, 0xb4, 0x33, 0xc1, + 0x76, 0x14, 0x7e, 0x0c, 0xad, 0x98, 0xeb, 0x4c, 0xf8, 0x6e, 0xc3, 0x9a, 0xbb, 0x73, 0xa5, 0xf7, + 0x9c, 0x1d, 0xf0, 0x68, 0x97, 0x89, 0x8c, 0x96, 0x14, 0x2f, 0x84, 0x85, 0xfa, 0x25, 0xf8, 0x09, + 0xcc, 0xe8, 0x40, 0x84, 0x5c, 0xe9, 0xd2, 0xcf, 0xed, 0xab, 0xfa, 0xfd, 0xa7, 0x36, 0xb1, 0xed, + 0xd0, 0x8a, 0x83, 0xef, 0x43, 0x3b, 0x08, 0x8a, 0x61, 0x59, 0x33, 0x73, 0xdb, 0x0e, 0x9d, 0x20, + 0x9b, 0x6d, 0x68, 0x15, 0x27, 0xef, 0x3b, 0x82, 0x99, 0xb2, 0x1c, 0xdf, 0x82, 0x46, 0x2c, 0x12, + 0x2b, 0x8f, 0xa8, 0x39, 0x5a, 0x84, 0x9d, 0x58, 0x01, 0x83, 0xb0, 0x13, 0xdc, 0x83, 0x8e, 0x2f, + 0xe3, 0x34, 0xe3, 0x4a, 0x09, 0x99, 0xb8, 0x0d, 0x9b, 0xb9, 0x0e, 0xe1, 0x0d, 0x98, 0x4d, 0x33, + 0xe9, 0x73, 0xa5, 0x78, 0xe0, 0x36, 0xed, 0x53, 0xbb, 0x37, 0xac, 0xf6, 0xb7, 0x78, 0xa2, 0x33, + 0x29, 0x02, 0x7a, 0x45, 0xee, 0xae, 0x43, 0xbb, 0x82, 0x31, 0x86, 0x66, 0xcc, 0x59, 0x65, 0xc6, + 0x9e, 0xf1, 0x5d, 0x68, 0x1d, 0x73, 0x11, 0x0e, 0x75, 0x69, 0xa8, 0x8c, 0xbc, 0x37, 0xb0, 0xb0, + 0x25, 0xc7, 0x89, 0xde, 0x11, 0x49, 0xd9, 0xac, 0x45, 0xf8, 0x3f, 0xe0, 0xa9, 0x1e, 0xda, 0xf2, + 0x79, 0x5a, 0x04, 0x06, 0x3d, 0x16, 0x81, 0x2e, 0x1a, 0x32, 0x4f, 0x8b, 0x00, 0x77, 0xa1, 0xed, + 0x9b, 0x6a, 0x9e, 0x29, 0x3b, 0x99, 0x79, 0x3a, 0x89, 0xbd, 0x6f, 0x08, 0x9a, 0xfb, 0x32, 0x7d, + 0x86, 0x1f, 0x41, 0xc3, 0x8f, 0xd5, 0xcd, 0x2f, 0xa1, 0x7e, 0x2f, 0x35, 0x24, 0xbc, 0x0c, 0xcd, + 0x48, 0x28, 0x63, 0x72, 0x6a, 0xcc, 0x46, 0xa9, 0x6f, 0xc7, 0x6c, 0x09, 0xa6, 0x97, 0xc3, 0x0f, + 0x29, 0xcf, 0x22, 0x19, 0x46, 0x32, 0xb4, 0xbd, 0x9c, 0xa3, 0xd7, 0xa1, 0xee, 0x2a, 0x34, 0x0d, + 0xdf, 0x38, 0xe7, 0x47, 0x3c, 0x29, 0x46, 0x3f, 0x4b, 0x8b, 0xc0, 0xa0, 0xd6, 0x69, 0xf5, 0x1e, + 0x1b, 0x78, 0x5f, 0x10, 0x80, 0xb9, 0xa9, 0x5c, 0xb2, 0xb5, 0xa9, 0x25, 0x5b, 0xaa, 0xfb, 0x29, + 0x58, 0xfd, 0xfa, 0x86, 0x75, 0x5f, 0x42, 0xab, 0xdc, 0x29, 0x0f, 0x9a, 0x5a, 0xa6, 0xa3, 0xf2, + 0xe5, 0x0b, 0xf5, 0x62, 0x6a, 0x73, 0xff, 0xf0, 0xf1, 0x6f, 0xbe, 0x3b, 0x3d, 0x27, 0xce, 0xd9, + 0x39, 0x71, 0x2e, 0xcf, 0x09, 0xfa, 0x98, 0x13, 0xf4, 0x35, 0x27, 0xe8, 0x47, 0x4e, 0xd0, 0x69, + 0x4e, 0xd0, 0xcf, 0x9c, 0xa0, 0x5f, 0x39, 0x71, 0x2e, 0x73, 0x82, 0x3e, 0x5f, 0x10, 0xe7, 0xf4, + 0x82, 0x38, 0x67, 0x17, 0xc4, 0x79, 0xbb, 0x1c, 0x0a, 0x3d, 0x1c, 0x1f, 0xf4, 0x7d, 0x19, 0x0f, + 0xc2, 0x8c, 0x1d, 0xb2, 0x84, 0x0d, 0x22, 0x39, 0x12, 0x83, 0xa3, 0xb5, 0xc1, 0xf5, 0x3f, 0x9c, + 0x83, 0x96, 0xfd, 0x59, 0xfb, 0x1d, 0x00, 0x00, 0xff, 0xff, 0x9e, 0x82, 0x05, 0x2f, 0xac, 0x04, + 0x00, 0x00, } func (this *QuantileSketchMatrix) Equal(that interface{}) bool { diff --git a/pkg/logproto/sketch.proto b/pkg/logproto/sketch.proto index d8ffeb0110340..e551716db3742 100644 --- a/pkg/logproto/sketch.proto +++ b/pkg/logproto/sketch.proto @@ -4,7 +4,7 @@ package logproto; import "pkg/logproto/logproto.proto"; -option go_package = "github.com/grafana/loki/pkg/logproto"; +option go_package = "github.com/grafana/loki/v3/pkg/logproto"; message QuantileSketchMatrix { repeated QuantileSketchVector values = 1; diff --git a/pkg/logql/accumulator.go b/pkg/logql/accumulator.go index 9e9784cb037ef..613a99fc43fd9 100644 --- a/pkg/logql/accumulator.go +++ b/pkg/logql/accumulator.go @@ -7,12 +7,12 @@ import ( "sort" "time" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/metadata" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions" - "github.com/grafana/loki/pkg/util/math" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/metadata" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions" + "github.com/grafana/loki/v3/pkg/util/math" ) // NewBufferedAccumulator returns an accumulator which aggregates all query diff --git a/pkg/logql/accumulator_test.go b/pkg/logql/accumulator_test.go index d827e3ea02e71..b9b8b86760d04 100644 --- a/pkg/logql/accumulator_test.go +++ b/pkg/logql/accumulator_test.go @@ -10,9 +10,9 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/sketch" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/sketch" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) func TestAccumulatedStreams(t *testing.T) { diff --git a/pkg/logql/blocker.go b/pkg/logql/blocker.go index 9a07113c40dd3..eaa6e1d7b1291 100644 --- a/pkg/logql/blocker.go +++ b/pkg/logql/blocker.go @@ -8,9 +8,9 @@ import ( "github.com/go-kit/log/level" "github.com/grafana/regexp" - "github.com/grafana/loki/pkg/util" - logutil "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/util" + logutil "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/validation" ) type queryBlocker struct { diff --git a/pkg/logql/blocker_test.go b/pkg/logql/blocker_test.go index 9fa586a02db80..c39d77c074031 100644 --- a/pkg/logql/blocker_test.go +++ b/pkg/logql/blocker_test.go @@ -10,10 +10,10 @@ import ( "github.com/grafana/dskit/user" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/validation" ) func TestEngine_ExecWithBlockedQueries(t *testing.T) { diff --git a/pkg/logql/downstream.go b/pkg/logql/downstream.go index 11f94132a4bf6..eea9b28d3059b 100644 --- a/pkg/logql/downstream.go +++ b/pkg/logql/downstream.go @@ -10,15 +10,14 @@ import ( "github.com/go-kit/log/level" "github.com/prometheus/prometheus/promql" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/metadata" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/metadata" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) /* @@ -75,7 +74,7 @@ func (ng *DownstreamEngine) Query(ctx context.Context, p Params) Query { // DownstreamSampleExpr is a SampleExpr which signals downstream computation type DownstreamSampleExpr struct { - shard *astmapper.ShardAnnotation + shard *Shard syntax.SampleExpr } @@ -108,7 +107,7 @@ func (d DownstreamSampleExpr) Pretty(level int) string { // DownstreamLogSelectorExpr is a LogSelectorExpr which signals downstream computation type DownstreamLogSelectorExpr struct { - shard *astmapper.ShardAnnotation + shard *Shard syntax.LogSelectorExpr } @@ -302,50 +301,6 @@ func (e *QuantileSketchMergeExpr) Walk(f syntax.WalkFn) { } } -type Shards []astmapper.ShardAnnotation - -func (xs Shards) Encode() (encoded []string) { - for _, shard := range xs { - encoded = append(encoded, shard.String()) - } - - return encoded -} - -// ParseShards parses a list of string encoded shards -func ParseShards(strs []string) (Shards, error) { - if len(strs) == 0 { - return nil, nil - } - shards := make([]astmapper.ShardAnnotation, 0, len(strs)) - - for _, str := range strs { - shard, err := astmapper.ParseShard(str) - if err != nil { - return nil, err - } - shards = append(shards, shard) - } - return shards, nil -} - -func ParseShardCount(strs []string) int { - if len(strs) == 0 { - return 0 - } - - for _, str := range strs { - shard, err := astmapper.ParseShard(str) - if err != nil { - continue - } - - return shard.Of - } - - return 0 -} - type Downstreamable interface { Downstreamer(context.Context) Downstreamer } @@ -435,7 +390,7 @@ func (ev *DownstreamEvaluator) NewStepEvaluator( case DownstreamSampleExpr: // downstream to a querier - var shards []astmapper.ShardAnnotation + var shards Shards if e.shard != nil { shards = append(shards, *e.shard) } @@ -443,7 +398,7 @@ func (ev *DownstreamEvaluator) NewStepEvaluator( results, err := ev.Downstream(ctx, []DownstreamQuery{{ Params: ParamsWithShardsOverride{ Params: ParamsWithExpressionOverride{Params: params, ExpressionOverride: e.SampleExpr}, - ShardsOverride: Shards(shards).Encode(), + ShardsOverride: shards.Encode(), }, }}, acc) if err != nil { diff --git a/pkg/logql/downstream_test.go b/pkg/logql/downstream_test.go index b3b8c6f37e48a..fa179502d6b7b 100644 --- a/pkg/logql/downstream_test.go +++ b/pkg/logql/downstream_test.go @@ -13,9 +13,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) var nilShardMetrics = NewShardMapperMetrics(nil) @@ -96,7 +96,8 @@ func TestMappingEquivalence(t *testing.T) { qry := regular.Query(params) ctx := user.InjectOrgID(context.Background(), "fake") - mapper := NewShardMapper(ConstantShards(shards), nilShardMetrics, []string{}) + strategy := NewPowerOfTwoStrategy(ConstantShards(shards)) + mapper := NewShardMapper(strategy, nilShardMetrics, []string{}) // TODO (callum) refactor this test so that we won't need to set every // possible sharding config option to true when we have multiple in the future if tc.approximate { @@ -166,7 +167,8 @@ func TestMappingEquivalenceSketches(t *testing.T) { qry := regular.Query(params) ctx := user.InjectOrgID(context.Background(), "fake") - mapper := NewShardMapper(ConstantShards(shards), nilShardMetrics, []string{ShardQuantileOverTime}) + strategy := NewPowerOfTwoStrategy(ConstantShards(shards)) + mapper := NewShardMapper(strategy, nilShardMetrics, []string{ShardQuantileOverTime}) _, _, mapped, err := mapper.Parse(params.GetExpression()) require.NoError(t, err) @@ -200,7 +202,8 @@ func TestMappingEquivalenceSketches(t *testing.T) { qry := regular.Query(params) ctx := user.InjectOrgID(context.Background(), "fake") - mapper := NewShardMapper(ConstantShards(shards), nilShardMetrics, []string{ShardQuantileOverTime}) + strategy := NewPowerOfTwoStrategy(ConstantShards(shards)) + mapper := NewShardMapper(strategy, nilShardMetrics, []string{ShardQuantileOverTime}) _, _, mapped, err := mapper.Parse(params.GetExpression()) require.NoError(t, err) @@ -265,7 +268,8 @@ func TestShardCounter(t *testing.T) { require.NoError(t, err) ctx := user.InjectOrgID(context.Background(), "fake") - mapper := NewShardMapper(ConstantShards(shards), nilShardMetrics, []string{ShardQuantileOverTime}) + strategy := NewPowerOfTwoStrategy(ConstantShards(shards)) + mapper := NewShardMapper(strategy, nilShardMetrics, []string{ShardQuantileOverTime}) noop, _, mapped, err := mapper.Parse(params.GetExpression()) require.NoError(t, err) @@ -620,10 +624,10 @@ func TestFormat_ShardedExpr(t *testing.T) { name: "ConcatSampleExpr", in: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 3, - }, + }).Ptr(), SampleExpr: &syntax.RangeAggregationExpr{ Operation: syntax.OpRangeTypeRate, Left: &syntax.LogRange{ @@ -636,10 +640,10 @@ func TestFormat_ShardedExpr(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 3, - }, + }).Ptr(), SampleExpr: &syntax.RangeAggregationExpr{ Operation: syntax.OpRangeTypeRate, Left: &syntax.LogRange{ @@ -652,10 +656,10 @@ func TestFormat_ShardedExpr(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 3, - }, + }).Ptr(), SampleExpr: &syntax.RangeAggregationExpr{ Operation: syntax.OpRangeTypeRate, Left: &syntax.LogRange{ @@ -701,7 +705,8 @@ func TestPrettierWithoutShards(t *testing.T) { q := `((quantile_over_time(0.5,{foo="bar"} | json | unwrap bytes[1d]) by (cluster) > 42) and (count by (cluster)(max_over_time({foo="baz"} |= "error" | json | unwrap bytes[1d]) by (cluster,namespace)) > 10))` e := syntax.MustParseExpr(q) - mapper := NewShardMapper(ConstantShards(4), nilShardMetrics, []string{}) + strategy := NewPowerOfTwoStrategy(ConstantShards(4)) + mapper := NewShardMapper(strategy, nilShardMetrics, []string{}) _, _, mapped, err := mapper.Parse(e) require.NoError(t, err) got := syntax.Prettify(mapped) @@ -738,42 +743,3 @@ and 10` assert.Equal(t, expected, got) } - -func TestParseShardCount(t *testing.T) { - for _, st := range []struct { - name string - shards []string - expected int - }{ - { - name: "empty shards", - shards: []string{}, - expected: 0, - }, - { - name: "single shard", - shards: []string{"0_of_3"}, - expected: 3, - }, - { - name: "single shard with error", - shards: []string{"0_of_"}, - expected: 0, - }, - { - name: "multiple shards", - shards: []string{"0_of_3", "0_of_4"}, - expected: 3, - }, - { - name: "multiple shards with errors", - shards: []string{"_of_3", "0_of_4"}, - expected: 4, - }, - } { - t.Run(st.name, func(t *testing.T) { - require.Equal(t, st.expected, ParseShardCount(st.shards)) - }) - - } -} diff --git a/pkg/logql/engine.go b/pkg/logql/engine.go index a9f3dabe14eed..fd89591794359 100644 --- a/pkg/logql/engine.go +++ b/pkg/logql/engine.go @@ -13,7 +13,7 @@ import ( "github.com/opentracing/opentracing-go" - "github.com/grafana/loki/pkg/logqlmodel/metadata" + "github.com/grafana/loki/v3/pkg/logqlmodel/metadata" "github.com/go-kit/log" "github.com/go-kit/log/level" @@ -26,18 +26,18 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/httpreq" - logutil "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/server" - "github.com/grafana/loki/pkg/util/spanlogger" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/httpreq" + logutil "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/server" + "github.com/grafana/loki/v3/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/util/validation" ) const ( diff --git a/pkg/logql/engine_test.go b/pkg/logql/engine_test.go index 1391b40ff4248..2e354bdf5b8d3 100644 --- a/pkg/logql/engine_test.go +++ b/pkg/logql/engine_test.go @@ -11,9 +11,9 @@ import ( "testing" "time" - "github.com/grafana/loki/pkg/logqlmodel/metadata" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions" + "github.com/grafana/loki/v3/pkg/logqlmodel/metadata" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions" "github.com/go-kit/log" "github.com/grafana/dskit/user" @@ -24,13 +24,13 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/httpreq" ) var ( diff --git a/pkg/logql/evaluator.go b/pkg/logql/evaluator.go index 903c4a5555b2d..eb7958691ac64 100644 --- a/pkg/logql/evaluator.go +++ b/pkg/logql/evaluator.go @@ -13,12 +13,12 @@ import ( "github.com/prometheus/prometheus/promql" "golang.org/x/sync/errgroup" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/util" ) type QueryRangeType string diff --git a/pkg/logql/evaluator_test.go b/pkg/logql/evaluator_test.go index e31d587252066..7a9eae4e3506f 100644 --- a/pkg/logql/evaluator_test.go +++ b/pkg/logql/evaluator_test.go @@ -10,7 +10,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logql/syntax" ) func TestDefaultEvaluator_DivideByZero(t *testing.T) { diff --git a/pkg/logql/explain_test.go b/pkg/logql/explain_test.go index 307aa10cfa98d..d6984683aec1b 100644 --- a/pkg/logql/explain_test.go +++ b/pkg/logql/explain_test.go @@ -10,7 +10,7 @@ import ( "github.com/grafana/dskit/user" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logql/syntax" ) func TestExplain(t *testing.T) { @@ -28,7 +28,8 @@ func TestExplain(t *testing.T) { defaultEv := NewDefaultEvaluator(querier, 30*time.Second) downEv := &DownstreamEvaluator{Downstreamer: MockDownstreamer{regular}, defaultEvaluator: defaultEv} - mapper := NewShardMapper(ConstantShards(4), nilShardMetrics, []string{ShardQuantileOverTime}) + strategy := NewPowerOfTwoStrategy(ConstantShards(4)) + mapper := NewShardMapper(strategy, nilShardMetrics, []string{ShardQuantileOverTime}) _, _, expr, err := mapper.Parse(syntax.MustParseExpr(query)) require.NoError(t, err) diff --git a/pkg/logql/limits.go b/pkg/logql/limits.go index 9075d9320ca43..f9742dac53ef0 100644 --- a/pkg/logql/limits.go +++ b/pkg/logql/limits.go @@ -5,7 +5,7 @@ import ( "math" "time" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/util/validation" ) var ( diff --git a/pkg/logql/log/drop_labels.go b/pkg/logql/log/drop_labels.go index 7e6b5e0b14155..0f0fcdee942f0 100644 --- a/pkg/logql/log/drop_labels.go +++ b/pkg/logql/log/drop_labels.go @@ -3,7 +3,7 @@ package log import ( "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) type DropLabels struct { diff --git a/pkg/logql/log/drop_labels_test.go b/pkg/logql/log/drop_labels_test.go index 9eee5f55dd201..bce8487fd93a7 100644 --- a/pkg/logql/log/drop_labels_test.go +++ b/pkg/logql/log/drop_labels_test.go @@ -6,7 +6,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) func Test_DropLabels(t *testing.T) { diff --git a/pkg/logql/log/filter.go b/pkg/logql/log/filter.go index 7b613947c8b8b..dbe5c5e99ce29 100644 --- a/pkg/logql/log/filter.go +++ b/pkg/logql/log/filter.go @@ -11,9 +11,42 @@ import ( "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logql/log/pattern" + "github.com/grafana/loki/v3/pkg/util" ) +// LineMatchType is an enum for line matching types. +type LineMatchType int + +// Possible LineMatchTypes. +const ( + LineMatchEqual LineMatchType = iota + LineMatchNotEqual + LineMatchRegexp + LineMatchNotRegexp + LineMatchPattern + LineMatchNotPattern +) + +func (t LineMatchType) String() string { + switch t { + case LineMatchEqual: + return "|=" + case LineMatchNotEqual: + return "!=" + case LineMatchRegexp: + return "|~" + case LineMatchNotRegexp: + return "!~" + case LineMatchPattern: + return "|>" + case LineMatchNotPattern: + return "!>" + default: + return "" + } +} + // Checker is an interface that matches against the input line or regexp. type Checker interface { Test(line []byte, caseInsensitive bool, equal bool) bool @@ -517,16 +550,20 @@ func (f containsAllFilter) Matches(test Checker) bool { } // NewFilter creates a new line filter from a match string and type. -func NewFilter(match string, mt labels.MatchType) (Filterer, error) { +func NewFilter(match string, mt LineMatchType) (Filterer, error) { switch mt { - case labels.MatchRegexp: + case LineMatchRegexp: return parseRegexpFilter(match, true, false) - case labels.MatchNotRegexp: + case LineMatchNotRegexp: return parseRegexpFilter(match, false, false) - case labels.MatchEqual: + case LineMatchEqual: return newContainsFilter([]byte(match), false), nil - case labels.MatchNotEqual: + case LineMatchNotEqual: return NewNotFilter(newContainsFilter([]byte(match), false)), nil + case LineMatchPattern: + return newPatternFilterer([]byte(match), true) + case LineMatchNotPattern: + return newPatternFilterer([]byte(match), false) default: return nil, fmt.Errorf("unknown matcher: %v", match) } @@ -757,3 +794,37 @@ func (s *RegexSimplifier) simplifyConcatAlternate(reg *syntax.Regexp, literal [] } return nil, false } + +type patternFilter struct { + matcher *pattern.Matcher + pattern []byte +} + +func newPatternFilterer(p []byte, match bool) (MatcherFilterer, error) { + m, err := pattern.ParseLineFilter(p) + if err != nil { + return nil, err + } + filter := &patternFilter{ + matcher: m, + pattern: p, + } + if !match { + return NewNotFilter(filter), nil + } + return filter, nil +} + +func (f *patternFilter) Filter(line []byte) bool { return f.matcher.Test(line) } + +func (f *patternFilter) Matches(test Checker) bool { + return test.Test(f.pattern, false, false) +} + +func (f *patternFilter) ToStage() Stage { + return StageFunc{ + process: func(_ int64, line []byte, _ *LabelsBuilder) ([]byte, bool) { + return line, f.Filter(line) + }, + } +} diff --git a/pkg/logql/log/fmt.go b/pkg/logql/log/fmt.go index 34a1bb32c5e48..c69aa3d40bb01 100644 --- a/pkg/logql/log/fmt.go +++ b/pkg/logql/log/fmt.go @@ -13,7 +13,7 @@ import ( "github.com/Masterminds/sprig/v3" "github.com/grafana/regexp" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) const ( diff --git a/pkg/logql/log/fmt_test.go b/pkg/logql/log/fmt_test.go index 637caec29a469..2028d2e00bf8f 100644 --- a/pkg/logql/log/fmt_test.go +++ b/pkg/logql/log/fmt_test.go @@ -9,7 +9,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) func Test_lineFormatter_Format(t *testing.T) { diff --git a/pkg/logql/log/ip.go b/pkg/logql/log/ip.go index 1508432d245c5..851cc1a9fa6c7 100644 --- a/pkg/logql/log/ip.go +++ b/pkg/logql/log/ip.go @@ -6,7 +6,6 @@ import ( "net/netip" "unicode" - "github.com/prometheus/prometheus/model/labels" "go4.org/netipx" ) @@ -27,14 +26,14 @@ type IPMatcher interface{} type IPLineFilter struct { ip *ipFilter - ty labels.MatchType + ty LineMatchType } // NewIPLineFilter is used to construct ip filter as a `LineFilter` -func NewIPLineFilter(pattern string, ty labels.MatchType) (*IPLineFilter, error) { +func NewIPLineFilter(pattern string, ty LineMatchType) (*IPLineFilter, error) { // check if `ty` supported in ip matcher. switch ty { - case labels.MatchEqual, labels.MatchNotEqual: + case LineMatchEqual, LineMatchNotEqual: default: return nil, ErrIPFilterInvalidOperation } @@ -69,8 +68,8 @@ func (f *IPLineFilter) RequiredLabelNames() []string { return []string{} // empty for line filter } -func (f *IPLineFilter) filterTy(line []byte, ty labels.MatchType) bool { - if ty == labels.MatchNotEqual { +func (f *IPLineFilter) filterTy(line []byte, ty LineMatchType) bool { + if ty == LineMatchNotEqual { return !f.ip.filter(line) } return f.ip.filter(line) diff --git a/pkg/logql/log/ip_test.go b/pkg/logql/log/ip_test.go index 105b3badd58f0..32b98169f7a60 100644 --- a/pkg/logql/log/ip_test.go +++ b/pkg/logql/log/ip_test.go @@ -189,7 +189,7 @@ func Test_IPLineFilterTy(t *testing.T) { cases := []struct { name string pat string - ty labels.MatchType + ty LineMatchType line []byte expectedMatch bool @@ -199,21 +199,21 @@ func Test_IPLineFilterTy(t *testing.T) { { name: "equal operator", pat: "192.168.0.1", - ty: labels.MatchEqual, + ty: LineMatchEqual, line: []byte("192.168.0.1"), expectedMatch: true, }, { name: "not equal operator", pat: "192.168.0.2", - ty: labels.MatchNotEqual, + ty: LineMatchNotEqual, line: []byte("192.168.0.1"), // match because !=ip("192.168.0.2") expectedMatch: true, }, { name: "regex not equal", pat: "192.168.0.2", - ty: labels.MatchNotRegexp, // not supported + ty: LineMatchNotRegexp, // not supported line: []byte("192.168.0.1"), fail: true, err: ErrIPFilterInvalidOperation, @@ -221,7 +221,7 @@ func Test_IPLineFilterTy(t *testing.T) { { name: "regex equal", pat: "192.168.0.2", - ty: labels.MatchRegexp, // not supported + ty: LineMatchRegexp, // not supported line: []byte("192.168.0.1"), fail: true, err: ErrIPFilterInvalidOperation, diff --git a/pkg/logql/log/keep_labels.go b/pkg/logql/log/keep_labels.go index 43ed2ab666abd..67c93ecca8fd2 100644 --- a/pkg/logql/log/keep_labels.go +++ b/pkg/logql/log/keep_labels.go @@ -3,7 +3,7 @@ package log import ( "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) type KeepLabels struct { diff --git a/pkg/logql/log/keep_labels_test.go b/pkg/logql/log/keep_labels_test.go index 3f502e76c901c..11d70f0ac6549 100644 --- a/pkg/logql/log/keep_labels_test.go +++ b/pkg/logql/log/keep_labels_test.go @@ -6,7 +6,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) func Test_KeepLabels(t *testing.T) { diff --git a/pkg/logql/log/label_filter.go b/pkg/logql/log/label_filter.go index a89f324008e16..49e8cbf092378 100644 --- a/pkg/logql/log/label_filter.go +++ b/pkg/logql/log/label_filter.go @@ -10,7 +10,7 @@ import ( "github.com/dustin/go-humanize" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) var ( @@ -173,10 +173,6 @@ func NewBytesLabelFilter(t LabelFilterType, name string, b uint64) *BytesLabelFi } func (d *BytesLabelFilter) Process(_ int64, line []byte, lbs *LabelsBuilder) ([]byte, bool) { - if lbs.HasErr() { - // if there's an error only the string matchers can filter it out. - return line, true - } v, ok := lbs.Get(d.Name) if !ok { // we have not found this label. @@ -184,8 +180,11 @@ func (d *BytesLabelFilter) Process(_ int64, line []byte, lbs *LabelsBuilder) ([] } value, err := humanize.ParseBytes(v) if err != nil { - lbs.SetErr(errLabelFilter) - lbs.SetErrorDetails(err.Error()) + // Don't overwrite what might be a more useful error + if !lbs.HasErr() { + lbs.SetErr(errLabelFilter) + lbs.SetErrorDetails(err.Error()) + } return line, true } switch d.Type { @@ -202,7 +201,9 @@ func (d *BytesLabelFilter) Process(_ int64, line []byte, lbs *LabelsBuilder) ([] case LabelFilterLesserThanOrEqual: return line, value <= d.Value default: - lbs.SetErr(errLabelFilter) + if !lbs.HasErr() { + lbs.SetErr(errLabelFilter) + } return line, true } } @@ -240,10 +241,6 @@ func NewDurationLabelFilter(t LabelFilterType, name string, d time.Duration) *Du } func (d *DurationLabelFilter) Process(_ int64, line []byte, lbs *LabelsBuilder) ([]byte, bool) { - if lbs.HasErr() { - // if there's an error only the string matchers can filter out. - return line, true - } v, ok := lbs.Get(d.Name) if !ok { // we have not found this label. @@ -251,8 +248,11 @@ func (d *DurationLabelFilter) Process(_ int64, line []byte, lbs *LabelsBuilder) } value, err := time.ParseDuration(v) if err != nil { - lbs.SetErr(errLabelFilter) - lbs.SetErrorDetails(err.Error()) + // Don't overwrite what might be a more useful error + if !lbs.HasErr() { + lbs.SetErr(errLabelFilter) + lbs.SetErrorDetails(err.Error()) + } return line, true } switch d.Type { @@ -269,7 +269,9 @@ func (d *DurationLabelFilter) Process(_ int64, line []byte, lbs *LabelsBuilder) case LabelFilterLesserThanOrEqual: return line, value <= d.Value default: - lbs.SetErr(errLabelFilter) + if !lbs.HasErr() { + lbs.SetErr(errLabelFilter) + } return line, true } } @@ -302,10 +304,6 @@ func NewNumericLabelFilter(t LabelFilterType, name string, v float64) *NumericLa } func (n *NumericLabelFilter) Process(_ int64, line []byte, lbs *LabelsBuilder) ([]byte, bool) { - if lbs.HasErr() { - // if there's an error only the string matchers can filter out. - return line, true - } v, ok := lbs.Get(n.Name) if !ok { // we have not found this label. @@ -313,8 +311,11 @@ func (n *NumericLabelFilter) Process(_ int64, line []byte, lbs *LabelsBuilder) ( } value, err := strconv.ParseFloat(v, 64) if err != nil { - lbs.SetErr(errLabelFilter) - lbs.SetErrorDetails(err.Error()) + // Don't overwrite what might be a more useful error + if !lbs.HasErr() { + lbs.SetErr(errLabelFilter) + lbs.SetErrorDetails(err.Error()) + } return line, true } switch n.Type { @@ -331,7 +332,9 @@ func (n *NumericLabelFilter) Process(_ int64, line []byte, lbs *LabelsBuilder) ( case LabelFilterLesserThanOrEqual: return line, value <= n.Value default: - lbs.SetErr(errLabelFilter) + if !lbs.HasErr() { + lbs.SetErr(errLabelFilter) + } return line, true } diff --git a/pkg/logql/log/label_filter_test.go b/pkg/logql/log/label_filter_test.go index 3a2e2480c33f2..b6364dc0c3fd0 100644 --- a/pkg/logql/log/label_filter_test.go +++ b/pkg/logql/log/label_filter_test.go @@ -10,7 +10,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) func TestBinary_Filter(t *testing.T) { diff --git a/pkg/logql/log/labels.go b/pkg/logql/log/labels.go index d93af8a845e83..c68fe1af0e5b5 100644 --- a/pkg/logql/log/labels.go +++ b/pkg/logql/log/labels.go @@ -7,7 +7,7 @@ import ( "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) const MaxInternedStrings = 1024 diff --git a/pkg/logql/log/labels_test.go b/pkg/logql/log/labels_test.go index e42a330dcbc3a..97c9a8899c223 100644 --- a/pkg/logql/log/labels_test.go +++ b/pkg/logql/log/labels_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) func TestLabelsBuilder_Get(t *testing.T) { diff --git a/pkg/logql/log/metrics_extraction_test.go b/pkg/logql/log/metrics_extraction_test.go index f059271cb8c65..9cc5ff4411f51 100644 --- a/pkg/logql/log/metrics_extraction_test.go +++ b/pkg/logql/log/metrics_extraction_test.go @@ -346,7 +346,7 @@ func TestNewLineSampleExtractor(t *testing.T) { require.Equal(t, 1., f) assertLabelResult(t, lbs, l) - stage := mustFilter(NewFilter("foo", labels.MatchEqual)).ToStage() + stage := mustFilter(NewFilter("foo", LineMatchEqual)).ToStage() se, err = NewLineSampleExtractor(BytesExtractor, []Stage{stage}, []string{"namespace"}, false, false) require.NoError(t, err) @@ -404,7 +404,7 @@ func TestNewLineSampleExtractorWithStructuredMetadata(t *testing.T) { se, err = NewLineSampleExtractor(BytesExtractor, []Stage{ NewStringLabelFilter(labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")), NewStringLabelFilter(labels.MustNewMatcher(labels.MatchEqual, "user", "bob")), - mustFilter(NewFilter("foo", labels.MatchEqual)).ToStage(), + mustFilter(NewFilter("foo", LineMatchEqual)).ToStage(), }, []string{"foo"}, false, false) require.NoError(t, err) diff --git a/pkg/logql/log/parser.go b/pkg/logql/log/parser.go index 90d4a4bebf8ab..9a5ae1395069c 100644 --- a/pkg/logql/log/parser.go +++ b/pkg/logql/log/parser.go @@ -8,10 +8,10 @@ import ( "github.com/grafana/jsonparser" - "github.com/grafana/loki/pkg/logql/log/jsonexpr" - "github.com/grafana/loki/pkg/logql/log/logfmt" - "github.com/grafana/loki/pkg/logql/log/pattern" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logql/log/jsonexpr" + "github.com/grafana/loki/v3/pkg/logql/log/logfmt" + "github.com/grafana/loki/v3/pkg/logql/log/pattern" + "github.com/grafana/loki/v3/pkg/logqlmodel" "github.com/grafana/regexp" jsoniter "github.com/json-iterator/go" @@ -373,7 +373,7 @@ func (l *LogfmtParser) Process(_ int64, line []byte, lbs *LabelsBuilder) ([]byte func (l *LogfmtParser) RequiredLabelNames() []string { return []string{} } type PatternParser struct { - matcher pattern.Matcher + matcher *pattern.Matcher names []string } diff --git a/pkg/logql/log/parser_hints.go b/pkg/logql/log/parser_hints.go index 3fd4cff2b3323..32a789250df94 100644 --- a/pkg/logql/log/parser_hints.go +++ b/pkg/logql/log/parser_hints.go @@ -3,7 +3,7 @@ package log import ( "strings" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) func NoParserHints() ParserHint { diff --git a/pkg/logql/log/parser_hints_test.go b/pkg/logql/log/parser_hints_test.go index 42d0134bc1d8f..96bfc15b38639 100644 --- a/pkg/logql/log/parser_hints_test.go +++ b/pkg/logql/log/parser_hints_test.go @@ -4,12 +4,12 @@ package log_test import ( "testing" - "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/log" "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logql/syntax" ) var ( diff --git a/pkg/logql/log/parser_test.go b/pkg/logql/log/parser_test.go index f8cf6373a152f..3e5de0f709418 100644 --- a/pkg/logql/log/parser_test.go +++ b/pkg/logql/log/parser_test.go @@ -5,7 +5,7 @@ import ( "sort" "testing" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" diff --git a/pkg/logql/log/pattern/ast.go b/pkg/logql/log/pattern/ast.go index b4cf8e813f968..677db07dcfbef 100644 --- a/pkg/logql/log/pattern/ast.go +++ b/pkg/logql/log/pattern/ast.go @@ -20,6 +20,21 @@ func (e expr) validate() error { return ErrNoCapture } // Consecutive captures are not allowed. + if err := e.validateNoConsecutiveCaptures(); err != nil { + return err + } + caps := e.captures() + uniq := map[string]struct{}{} + for _, c := range caps { + if _, ok := uniq[c]; ok { + return fmt.Errorf("duplicate capture name (%s): %w", c, ErrInvalidExpr) + } + uniq[c] = struct{}{} + } + return nil +} + +func (e expr) validateNoConsecutiveCaptures() error { for i, n := range e { if i+1 >= len(e) { break @@ -30,21 +45,21 @@ func (e expr) validate() error { } } } + return nil +} - caps := e.captures() - uniq := map[string]struct{}{} - for _, c := range caps { - if _, ok := uniq[c]; ok { - return fmt.Errorf("duplicate capture name (%s): %w", c, ErrInvalidExpr) +func (e expr) validateNoNamedCaptures() error { + for i, n := range e { + if c, ok := e[i].(capture); ok && !c.isUnnamed() { + return fmt.Errorf("%w: found '%s'", ErrCaptureNotAllowed, n.String()) } - uniq[c] = struct{}{} } return nil } func (e expr) captures() (captures []string) { for _, n := range e { - if c, ok := n.(capture); ok && !c.isUnamed() { + if c, ok := n.(capture); ok && !c.isUnnamed() { captures = append(captures, c.Name()) } } @@ -65,8 +80,8 @@ func (c capture) Name() string { return string(c) } -func (c capture) isUnamed() bool { - return string(c) == underscore +func (c capture) isUnnamed() bool { + return len(c) == 1 && c[0] == underscore[0] } type literals []byte diff --git a/pkg/logql/log/pattern/parser.go b/pkg/logql/log/pattern/parser.go index d1bc2515c9cb2..b2a868f7af76c 100644 --- a/pkg/logql/log/pattern/parser.go +++ b/pkg/logql/log/pattern/parser.go @@ -19,8 +19,12 @@ func init() { } func parseExpr(input string) (expr, error) { + return parseExprBytes([]byte(input)) +} + +func parseExprBytes(input []byte) (expr, error) { l := newLexer() - l.setData([]byte(input)) + l.setData(input) e := exprNewParser().Parse(l) if e != 0 || len(l.errs) > 0 { return nil, l.errs[0] diff --git a/pkg/logql/log/pattern/pattern.go b/pkg/logql/log/pattern/pattern.go index b08c91b610af9..83dc6a473a165 100644 --- a/pkg/logql/log/pattern/pattern.go +++ b/pkg/logql/log/pattern/pattern.go @@ -6,23 +6,19 @@ import ( ) var ( - ErrNoCapture = errors.New("at least one capture is required") - ErrInvalidExpr = errors.New("invalid expression") + ErrNoCapture = errors.New("at least one capture is required") + ErrCaptureNotAllowed = errors.New("named captures are not allowed") + ErrInvalidExpr = errors.New("invalid expression") ) -type Matcher interface { - Matches(in []byte) [][]byte - Names() []string -} - -type matcher struct { +type Matcher struct { e expr captures [][]byte names []string } -func New(in string) (Matcher, error) { +func New(in string) (*Matcher, error) { e, err := parseExpr(in) if err != nil { return nil, err @@ -30,16 +26,47 @@ func New(in string) (Matcher, error) { if err := e.validate(); err != nil { return nil, err } - return &matcher{ + return &Matcher{ e: e, captures: make([][]byte, 0, e.captureCount()), names: e.captures(), }, nil } +func ParseLineFilter(in []byte) (*Matcher, error) { + if len(in) == 0 { + return new(Matcher), nil + } + e, err := parseExprBytes(in) + if err != nil { + return nil, err + } + if err = e.validateNoConsecutiveCaptures(); err != nil { + return nil, err + } + if err = e.validateNoNamedCaptures(); err != nil { + return nil, err + } + return &Matcher{e: e}, nil +} + +func ParseLiterals(in string) ([][]byte, error) { + e, err := parseExpr(in) + if err != nil { + return nil, err + } + lit := make([][]byte, 0, len(e)) + for _, n := range e { + if l, ok := n.(literals); ok { + lit = append(lit, l) + } + } + return lit, nil +} + // Matches matches the given line with the provided pattern. // Matches invalidates the previous returned captures array. -func (m *matcher) Matches(in []byte) [][]byte { +func (m *Matcher) Matches(in []byte) [][]byte { if len(in) == 0 { return nil } @@ -62,7 +89,7 @@ func (m *matcher) Matches(in []byte) [][]byte { // from now we have capture - literals - capture ... (literals)? for len(expr) != 0 { if len(expr) == 1 { // we're ending on a capture. - if !(expr[0].(capture)).isUnamed() { + if !(expr[0].(capture)).isUnnamed() { captures = append(captures, in) } return captures @@ -73,13 +100,13 @@ func (m *matcher) Matches(in []byte) [][]byte { i := bytes.Index(in, ls) if i == -1 { // if a capture is missed we return up to the end as the capture. - if !capt.isUnamed() { + if !capt.isUnnamed() { captures = append(captures, in) } return captures } - if capt.isUnamed() { + if capt.isUnnamed() { in = in[len(ls)+i:] continue } @@ -90,6 +117,42 @@ func (m *matcher) Matches(in []byte) [][]byte { return captures } -func (m *matcher) Names() []string { +func (m *Matcher) Names() []string { return m.names } + +func (m *Matcher) Test(in []byte) bool { + if len(in) == 0 || len(m.e) == 0 { + // An empty line can only match an empty pattern. + return len(in) == 0 && len(m.e) == 0 + } + var off int + for i := 0; i < len(m.e); i++ { + lit, ok := m.e[i].(literals) + if !ok { + continue + } + j := bytes.Index(in[off:], lit) + if j == -1 { + return false + } + if i != 0 && j == 0 { + // This means we either have repetitive literals, or an empty + // capture. Either way, the line does not match the pattern. + return false + } + off += j + len(lit) + } + // If we end up on a literal, we only consider the test successful if + // the remaining input is empty. Otherwise, if we end up on a capture, + // the remainder (the captured text) must not be empty. + // + // For example, "foo bar baz" does not match "<_> bar", but it matches + // "<_> baz" and "foo <_>". + // + // Empty captures are not allowed as well: " bar " does not match + // "<_> bar <_>", but matches "<_>bar<_>". + _, reqRem := m.e[len(m.e)-1].(capture) + hasRem := off != len(in) + return reqRem == hasRem +} diff --git a/pkg/logql/log/pattern/pattern_test.go b/pkg/logql/log/pattern/pattern_test.go index da0c6a180527f..0d1c47f0bea29 100644 --- a/pkg/logql/log/pattern/pattern_test.go +++ b/pkg/logql/log/pattern/pattern_test.go @@ -4,6 +4,7 @@ import ( "fmt" "testing" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -11,97 +12,144 @@ var fixtures = []struct { expr string in string expected []string + matches bool }{ { "foo bar", "foo buzz bar", []string{"buzz"}, + true, }, { "foo bar", "foo buzz bar", []string{"buzz", ""}, + false, + }, + { + "foo bar", + "foo buzz bar", + []string{"", "buzz"}, + false, }, { " bar", " bar", []string{"", ""}, + false, + }, + { + "bar", + " bar ", + []string{" ", " "}, + true, + }, + { + " bar", + " bar ", + []string{"", " "}, + false, + }, + { + "bar ", + " bar ", + []string{" ", ""}, + false, + }, + { + "", + " bar ", + []string{" bar "}, + true, }, { "?<_>", `/api/plugins/versioncheck?slugIn=snuids-trafficlights-panel,input,gel&grafanaVersion=7.0.0-beta1`, []string{"/api/plugins/versioncheck"}, + true, }, { "?<_>", `/api/plugins/status`, []string{"/api/plugins/status"}, + false, }, { // Common Log Format ` [<_>] " <_>" `, `127.0.0.1 user-identifier frank [10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326`, []string{"127.0.0.1", "user-identifier", "frank", "GET", "/apache_pb.gif", "200", "2326"}, + true, }, { // Combined Log Format ` - - [<_>] " <_>" `, `35.191.8.106 - - [19/May/2021:07:21:49 +0000] "GET /api/plugins/versioncheck?slugIn=snuids-trafficlights-panel,input,gel&grafanaVersion=7.0.0-beta1 HTTP/1.1" 200 107 "-" "Go-http-client/2.0" "80.153.74.144, 34.120.177.193" "TLSv1.3" "DE" "DEBW"`, []string{"35.191.8.106", "GET", "/api/plugins/versioncheck?slugIn=snuids-trafficlights-panel,input,gel&grafanaVersion=7.0.0-beta1", "200", "107"}, + false, }, { // MySQL `<_> [] [] [] `, `2020-08-06T14:25:02.835618Z 0 [Note] [MY-012487] [InnoDB] DDL log recovery : begin`, []string{"0", "Note", "MY-012487", "InnoDB"}, + false, }, { // MySQL `<_> [] `, `2021-05-19T07:40:12.215792Z 42761518 [Note] Aborted connection 42761518 to db: 'hosted_grafana' user: 'hosted_grafana' host: '10.36.4.122' (Got an error reading communication packets)`, []string{"42761518", "Note"}, + false, }, { // Kubernetes api-server ` <_> <_> ] `, `W0519 07:46:47.647050 1 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {https://kubernetes-etcd-1.kubernetes-etcd:2379 0 }. Err :connection error: desc = "transport: Error while dialing dial tcp 10.32.85.85:2379: connect: connection refused". Reconnecting...`, []string{"W0519", "clientconn.go:1223"}, + false, }, { // Cassandra ` []<_> in .<_>`, `INFO [Service Thread] 2021-05-19 07:40:12,130 GCInspector.java:284 - ParNew GC in 248ms. CMS Old Gen: 5043436640 -> 5091062064; Par Eden Space: 671088640 -> 0; Par Survivor Space: 70188280 -> 60139760`, []string{"INFO", "Service Thread", "248ms"}, + true, }, { // Cortex & Loki distributor `<_> msg=" () "`, `level=debug ts=2021-05-19T07:54:26.864644382Z caller=logging.go:66 traceID=7fbb92fd0eb9c65d msg="POST /loki/api/v1/push (204) 1.238734ms"`, []string{"POST", "/loki/api/v1/push", "204", "1.238734ms"}, + true, }, { // Etcd `<_> <_> | : <_> peer <_> tcp :<_>`, `2021-05-19 08:16:50.181436 W | rafthttp: health check for peer fd8275e521cfb532 could not connect: dial tcp 10.32.85.85:2380: connect: connection refused`, []string{"W", "rafthttp", "fd8275e521cfb532", "10.32.85.85"}, + true, }, { // Kafka `<_>] [Log partition=, dir=] `, `[2021-05-19 08:35:28,681] INFO [Log partition=p-636-L-fs-117, dir=/data/kafka-logs] Deleting segment 455976081 (kafka.log.Log)`, []string{"INFO", "p-636-L-fs-117", "/data/kafka-logs"}, + false, }, { // Elastic `<_>][][] [] []`, `[2021-05-19T06:54:06,994][INFO ][o.e.c.m.MetaDataMappingService] [1f605d47-8454-4bfb-a67f-49f318bf837a] [usage-stats-2021.05.19/O2Je9IbmR8CqFyUvNpTttA] update_mapping [report]`, []string{"INFO ", "o.e.c.m.MetaDataMappingService", "1f605d47-8454-4bfb-a67f-49f318bf837a", "usage-stats-2021.05.19/O2Je9IbmR8CqFyUvNpTttA"}, + false, }, { // Envoy `<_> " <_>" <_> "" "" <_> <_> ""`, `[2016-04-15T20:17:00.310Z] "POST /api/v1/locations HTTP/2" 204 - 154 0 226 100 "10.0.35.28" "nsq2http" "cc21d9b0-cf5c-432b-8c7e-98aeb7988cd2" "locations" "tcp://10.0.2.1:80"`, []string{"POST", "/api/v1/locations", "204", "154", "0", "226", "100", "10.0.35.28", "nsq2http", "tcp://10.0.2.1:80"}, + true, }, } @@ -112,12 +160,14 @@ func Test_matcher_Matches(t *testing.T) { t.Parallel() m, err := New(tt.expr) require.NoError(t, err) - actual := m.Matches([]byte(tt.in)) + line := []byte(tt.in) + assert.Equal(t, tt.matches, m.Test(line)) + actual := m.Matches(line) var actualStrings []string for _, a := range actual { actualStrings = append(actualStrings, string(a)) } - require.Equal(t, tt.expected, actualStrings) + assert.Equal(t, tt.expected, actualStrings) }) } } @@ -162,3 +212,40 @@ func Test_Error(t *testing.T) { }) } } + +func Test_ParseLineFilter(t *testing.T) { + for _, tt := range []struct { + name string + err error + }{ + {"<_>", nil}, // Meaningless, but valid: matches everything. + {"", nil}, // Empty pattern matches empty lines. + {"foo <_> bar <_>", nil}, + {" bar <_>", fmt.Errorf("%w: found ''", ErrCaptureNotAllowed)}, + {"", fmt.Errorf("%w: found ''", ErrCaptureNotAllowed)}, + } { + t.Run(tt.name, func(t *testing.T) { + _, err := ParseLineFilter([]byte(tt.name)) + require.Equal(t, tt.err, err) + }) + } +} + +func Test_ParseLiterals(t *testing.T) { + for _, tt := range []struct { + pattern string + lit [][]byte + err error + }{ + {"<_>", [][]byte{}, nil}, + {"", nil, newParseError("syntax error: unexpected $end, expecting IDENTIFIER or LITERAL", 1, 1)}, + {"foo <_> bar <_>", [][]byte{[]byte("foo "), []byte(" bar ")}, nil}, + {"", [][]byte{}, nil}, + } { + t.Run(tt.pattern, func(t *testing.T) { + lit, err := ParseLiterals(tt.pattern) + require.Equal(t, tt.err, err) + require.Equal(t, tt.lit, lit) + }) + } +} diff --git a/pkg/logql/log/pipeline_test.go b/pkg/logql/log/pipeline_test.go index 12a1a61fcc2cf..ffa5df0d50b98 100644 --- a/pkg/logql/log/pipeline_test.go +++ b/pkg/logql/log/pipeline_test.go @@ -7,7 +7,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) func TestNoopPipeline(t *testing.T) { @@ -240,7 +240,7 @@ func newPipelineFilter(start, end int64, lbls, structuredMetadata labels.Labels, stages = append(stages, s) }) - stages = append(stages, mustFilter(NewFilter(filter, labels.MatchEqual)).ToStage()) + stages = append(stages, mustFilter(NewFilter(filter, LineMatchEqual)).ToStage()) return PipelineFilter{start, end, matchers, NewPipeline(stages)} } @@ -527,7 +527,7 @@ func Benchmark_Pipeline(b *testing.B) { b.ReportAllocs() stages := []Stage{ - mustFilter(NewFilter("metrics.go", labels.MatchEqual)).ToStage(), + mustFilter(NewFilter("metrics.go", LineMatchEqual)).ToStage(), NewLogfmtParser(false, false), NewAndLabelFilter( NewDurationLabelFilter(LabelFilterGreaterThan, "duration", 10*time.Millisecond), @@ -611,7 +611,7 @@ func jsonBenchmark(b *testing.B, parser Stage) { b.ReportAllocs() p := NewPipeline([]Stage{ - mustFilter(NewFilter("metrics.go", labels.MatchEqual)).ToStage(), + mustFilter(NewFilter("metrics.go", LineMatchEqual)).ToStage(), parser, }) line := []byte(`{"ts":"2020-12-27T09:15:54.333026285Z","error":"action could not be completed", "context":{"file": "metrics.go"}}`) @@ -643,7 +643,7 @@ func invalidJSONBenchmark(b *testing.B, parser Stage) { b.ReportAllocs() p := NewPipeline([]Stage{ - mustFilter(NewFilter("invalid json", labels.MatchEqual)).ToStage(), + mustFilter(NewFilter("invalid json", LineMatchEqual)).ToStage(), parser, }) line := []byte(`invalid json`) @@ -696,7 +696,7 @@ func logfmtBenchmark(b *testing.B, parser Stage) { b.ReportAllocs() p := NewPipeline([]Stage{ - mustFilter(NewFilter("ts", labels.MatchEqual)).ToStage(), + mustFilter(NewFilter("ts", LineMatchEqual)).ToStage(), parser, }) diff --git a/pkg/logql/mapper_metrics.go b/pkg/logql/mapper_metrics.go index 3588231700a63..33cc406dd1ab3 100644 --- a/pkg/logql/mapper_metrics.go +++ b/pkg/logql/mapper_metrics.go @@ -4,7 +4,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) // expression type used in metrics @@ -42,9 +42,10 @@ func newMapperMetrics(registerer prometheus.Registerer, mapper string) *MapperMe ConstLabels: prometheus.Labels{"mapper": mapper}, }, []string{"type"}), DownstreamFactor: promauto.With(registerer).NewHistogram(prometheus.HistogramOpts{ - Namespace: constants.Loki, - Name: "query_frontend_shard_factor", - Help: "Number of downstream queries per request", + Namespace: constants.Loki, + Name: "query_frontend_shard_factor", + Help: "Number of downstream queries per request", + // 1 -> 65k shards Buckets: prometheus.ExponentialBuckets(1, 4, 8), ConstLabels: prometheus.Labels{"mapper": mapper}, }), diff --git a/pkg/logql/matchers.go b/pkg/logql/matchers.go index 2fbf14573069c..f0bdef46a9d41 100644 --- a/pkg/logql/matchers.go +++ b/pkg/logql/matchers.go @@ -4,7 +4,7 @@ import ( "github.com/pkg/errors" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logql/syntax" ) // MatchForSeriesRequest extracts and parses multiple matcher groups from a slice of strings. diff --git a/pkg/logql/metrics.go b/pkg/logql/metrics.go index 1399ac15ef9f5..e9921a07c2944 100644 --- a/pkg/logql/metrics.go +++ b/pkg/logql/metrics.go @@ -2,10 +2,12 @@ package logql import ( "context" + "fmt" "strconv" "strings" "time" + "github.com/c2h5oh/datasize" "github.com/dustin/go-humanize" "github.com/go-kit/log" "github.com/go-kit/log/level" @@ -13,16 +15,16 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" promql_parser "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/analytics" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - logql_stats "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/httpreq" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/analytics" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + logql_stats "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/httpreq" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) const ( @@ -32,6 +34,7 @@ const ( QueryTypeLabels = "labels" QueryTypeSeries = "series" QueryTypeStats = "stats" + QueryTypeShards = "shards" QueryTypeVolume = "volume" latencyTypeSlow = "slow" @@ -128,6 +131,11 @@ func RecordRangeAndInstantQueryMetrics( logValues := make([]interface{}, 0, 50) + var bloomRatio float64 // what % are filtered + if stats.Index.TotalChunks > 0 { + bloomRatio = float64(stats.Index.TotalChunks-stats.Index.PostFilterChunks) / float64(stats.Index.TotalChunks) + } + logValues = append(logValues, []interface{}{ "latency", latencyType, // this can be used to filter log lines. "query", query, @@ -154,6 +162,7 @@ func RecordRangeAndInstantQueryMetrics( "splits", stats.Summary.Splits, "shards", stats.Summary.Shards, "query_referenced_structured_metadata", stats.QueryReferencedStructuredMetadata(), + "pipeline_wrapper_filtered_lines", stats.PipelineWrapperFilteredLines(), "chunk_refs_fetch_time", stats.ChunkRefsFetchTime(), "cache_chunk_req", stats.Caches.Chunk.EntriesRequested, "cache_chunk_hit", stats.Caches.Chunk.EntriesFound, @@ -191,6 +200,9 @@ func RecordRangeAndInstantQueryMetrics( "ingester_post_filter_lines", stats.Ingester.Store.Chunk.GetPostFilterLines(), // Time spent being blocked on congestion control. "congestion_control_latency", stats.CongestionControlLatency(), + "index_total_chunks", stats.Index.TotalChunks, + "index_post_bloom_filter_chunks", stats.Index.PostFilterChunks, + "index_bloom_filter_ratio", fmt.Sprintf("%.2f", bloomRatio), }...) logValues = append(logValues, tagsToKeyValues(queryTags)...) @@ -370,6 +382,58 @@ func RecordStatsQueryMetrics(ctx context.Context, log log.Logger, start, end tim execLatency.WithLabelValues(status, queryType, "").Observe(stats.Summary.ExecTime) } +func RecordShardsQueryMetrics( + ctx context.Context, + log log.Logger, + start, + end time.Time, + query string, + targetBytesPerShard uint64, + status string, + shards int, + stats logql_stats.Result, +) { + var ( + logger = fixLogger(ctx, log) + latencyType = latencyTypeFast + queryType = QueryTypeShards + ) + + // Tag throughput metric by latency type based on a threshold. + // Latency below the threshold is fast, above is slow. + if stats.Summary.ExecTime > slowQueryThresholdSecond { + latencyType = latencyTypeSlow + } + + var bloomRatio float64 // what % are filtered + if stats.Index.TotalChunks > 0 { + bloomRatio = float64(stats.Index.TotalChunks-stats.Index.PostFilterChunks) / float64(stats.Index.TotalChunks) + } + logValues := make([]interface{}, 0, 15) + logValues = append(logValues, + "latency", latencyType, + "query_type", queryType, + "start", start.Format(time.RFC3339Nano), + "end", end.Format(time.RFC3339Nano), + "start_delta", time.Since(start), + "end_delta", time.Since(end), + "length", end.Sub(start), + "duration", time.Duration(int64(stats.Summary.ExecTime*float64(time.Second))), + "status", status, + "query", query, + "query_hash", util.HashedQuery(query), + "target_bytes_per_shard", datasize.ByteSize(targetBytesPerShard).HumanReadable(), + "shards", shards, + "index_total_chunks", stats.Index.TotalChunks, + "index_post_bloom_filter_chunks", stats.Index.PostFilterChunks, + "index_bloom_filter_ratio", fmt.Sprintf("%.2f", bloomRatio), + ) + + level.Info(logger).Log(logValues...) + + execLatency.WithLabelValues(status, queryType, "").Observe(stats.Summary.ExecTime) +} + func RecordVolumeQueryMetrics(ctx context.Context, log log.Logger, start, end time.Time, query string, limit uint32, step time.Duration, status string, stats logql_stats.Result) { var ( logger = fixLogger(ctx, log) @@ -416,6 +480,36 @@ func RecordVolumeQueryMetrics(ctx context.Context, log log.Logger, start, end ti execLatency.WithLabelValues(status, queryType, "").Observe(stats.Summary.ExecTime) } +func RecordDetectedFieldsQueryMetrics(ctx context.Context, log log.Logger, start, end time.Time, query string, status string, stats logql_stats.Result) { + var ( + logger = fixLogger(ctx, log) + latencyType = latencyTypeFast + queryType = QueryTypeVolume + ) + + // Tag throughput metric by latency type based on a threshold. + // Latency below the threshold is fast, above is slow. + if stats.Summary.ExecTime > slowQueryThresholdSecond { + latencyType = latencyTypeSlow + } + + level.Info(logger).Log( + "latency", latencyType, + "query_type", queryType, + "query", query, + "query_hash", util.HashedQuery(query), + "start", start.Format(time.RFC3339Nano), + "end", end.Format(time.RFC3339Nano), + "start_delta", time.Since(start), + "end_delta", time.Since(end), + "length", end.Sub(start), + "status", status, + // "duration", time.Duration(int64(stats.Summary.ExecTime*float64(time.Second))), + ) + //TODO(twhitney): add stats and exec time + // execLatency.WithLabelValues(status, queryType, "").Observe(stats.Summary.ExecTime) +} + func recordUsageStats(queryType string, stats logql_stats.Result) { if queryType == QueryTypeMetric { bytePerSecondMetricUsage.Record(float64(stats.Summary.BytesProcessedPerSecond)) @@ -484,3 +578,7 @@ func extractShard(shards []string) *astmapper.ShardAnnotation { return &shard } + +func RecordDetectedLabelsQueryMetrics(_ context.Context, _ log.Logger, _ time.Time, _ time.Time, _ string, _ string, _ logql_stats.Result) { + // TODO(shantanu) log metrics here +} diff --git a/pkg/logql/metrics_test.go b/pkg/logql/metrics_test.go index c08844eabeabc..44094e27f5d4b 100644 --- a/pkg/logql/metrics_test.go +++ b/pkg/logql/metrics_test.go @@ -15,13 +15,13 @@ import ( "github.com/stretchr/testify/require" "github.com/uber/jaeger-client-go" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/httpreq" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/httpreq" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func TestQueryType(t *testing.T) { diff --git a/pkg/logql/optimize.go b/pkg/logql/optimize.go index 9b885b0fd229c..ef930be799664 100644 --- a/pkg/logql/optimize.go +++ b/pkg/logql/optimize.go @@ -1,6 +1,6 @@ package logql -import "github.com/grafana/loki/pkg/logql/syntax" +import "github.com/grafana/loki/v3/pkg/logql/syntax" // optimizeSampleExpr Attempt to optimize the SampleExpr to another that will run faster but will produce the same result. func optimizeSampleExpr(expr syntax.SampleExpr) (syntax.SampleExpr, error) { diff --git a/pkg/logql/optimize_test.go b/pkg/logql/optimize_test.go index b4005e6d1f158..a457f180d4fa3 100644 --- a/pkg/logql/optimize_test.go +++ b/pkg/logql/optimize_test.go @@ -5,7 +5,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logql/syntax" ) func Test_optimizeSampleExpr(t *testing.T) { diff --git a/pkg/logql/quantile_over_time_sketch.go b/pkg/logql/quantile_over_time_sketch.go index 24a8a05d89ede..42288830c2ddc 100644 --- a/pkg/logql/quantile_over_time_sketch.go +++ b/pkg/logql/quantile_over_time_sketch.go @@ -10,10 +10,10 @@ import ( "github.com/prometheus/prometheus/promql" promql_parser "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/sketch" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/sketch" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) const ( diff --git a/pkg/logql/quantile_over_time_sketch_test.go b/pkg/logql/quantile_over_time_sketch_test.go index 488ebdec26f06..5692575bd2904 100644 --- a/pkg/logql/quantile_over_time_sketch_test.go +++ b/pkg/logql/quantile_over_time_sketch_test.go @@ -11,9 +11,9 @@ import ( "github.com/prometheus/prometheus/promql" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/sketch" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/sketch" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) func TestProbabilisticMQuantileMatrixSerialization(t *testing.T) { diff --git a/pkg/logql/range_vector.go b/pkg/logql/range_vector.go index 484949718f090..44a8651577549 100644 --- a/pkg/logql/range_vector.go +++ b/pkg/logql/range_vector.go @@ -11,9 +11,9 @@ import ( "github.com/prometheus/prometheus/promql" promql_parser "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logql/vector" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logql/vector" ) // BatchRangeVectorAggregator aggregates samples for a given range of samples. diff --git a/pkg/logql/range_vector_test.go b/pkg/logql/range_vector_test.go index c7176bed2ab90..fb28ea5c9c0c9 100644 --- a/pkg/logql/range_vector_test.go +++ b/pkg/logql/range_vector_test.go @@ -13,11 +13,11 @@ import ( "github.com/prometheus/prometheus/promql" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/sketch" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logql/vector" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/sketch" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logql/vector" ) var samples = []logproto.Sample{ diff --git a/pkg/logql/rangemapper.go b/pkg/logql/rangemapper.go index f898e19d2ea1e..bec1711226109 100644 --- a/pkg/logql/rangemapper.go +++ b/pkg/logql/rangemapper.go @@ -9,8 +9,8 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/logql/syntax" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logql/syntax" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var splittableVectorOp = map[string]struct{}{ diff --git a/pkg/logql/rangemapper_test.go b/pkg/logql/rangemapper_test.go index 5e95486a8c8e2..5365c7b2b73f0 100644 --- a/pkg/logql/rangemapper_test.go +++ b/pkg/logql/rangemapper_test.go @@ -6,7 +6,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logql/syntax" ) func Test_SplitRangeInterval(t *testing.T) { diff --git a/pkg/logql/shardmapper.go b/pkg/logql/shardmapper.go index a8fb04c826a6a..fbd0dbaa83ebb 100644 --- a/pkg/logql/shardmapper.go +++ b/pkg/logql/shardmapper.go @@ -7,35 +7,22 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/util/math" - - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) -type ShardResolver interface { - Shards(expr syntax.Expr) (int, uint64, error) - GetStats(e syntax.Expr) (stats.Stats, error) -} - -type ConstantShards int - -func (s ConstantShards) Shards(_ syntax.Expr) (int, uint64, error) { return int(s), 0, nil } -func (s ConstantShards) GetStats(_ syntax.Expr) (stats.Stats, error) { return stats.Stats{}, nil } - const ( ShardQuantileOverTime = "quantile_over_time" ) type ShardMapper struct { - shards ShardResolver + shards ShardingStrategy metrics *MapperMetrics quantileOverTimeSharding bool } -func NewShardMapper(resolver ShardResolver, metrics *MapperMetrics, shardAggregation []string) ShardMapper { +func NewShardMapper(strategy ShardingStrategy, metrics *MapperMetrics, shardAggregation []string) ShardMapper { quantileOverTimeSharding := false for _, a := range shardAggregation { if a == ShardQuantileOverTime { @@ -43,7 +30,7 @@ func NewShardMapper(resolver ShardResolver, metrics *MapperMetrics, shardAggrega } } return ShardMapper{ - shards: resolver, + shards: strategy, metrics: metrics, quantileOverTimeSharding: quantileOverTimeSharding, } @@ -148,71 +135,70 @@ func (m ShardMapper) mapBinOpExpr(e *syntax.BinOpExpr, r *downstreamRecorder, to e.RHS = rhsSampleExpr // We take the maximum bytes per shard of both sides of the operation - bytesPerShard := uint64(math.Max(int(lhsBytesPerShard), int(rhsBytesPerShard))) + bytesPerShard := uint64(max(int(lhsBytesPerShard), int(rhsBytesPerShard))) return e, bytesPerShard, nil } func (m ShardMapper) mapLogSelectorExpr(expr syntax.LogSelectorExpr, r *downstreamRecorder) (syntax.LogSelectorExpr, uint64, error) { var head *ConcatLogSelectorExpr - shards, bytesPerShard, err := m.shards.Shards(expr) + shards, maxBytesPerShard, err := m.shards.Shards(expr) if err != nil { return nil, 0, err } - if shards == 0 { + if len(shards) == 0 { return &ConcatLogSelectorExpr{ DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ shard: nil, LogSelectorExpr: expr, }, - }, bytesPerShard, nil + }, maxBytesPerShard, nil } - for i := shards - 1; i >= 0; i-- { + + for i := len(shards) - 1; i >= 0; i-- { head = &ConcatLogSelectorExpr{ DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ - shard: &astmapper.ShardAnnotation{ - Shard: i, - Of: shards, - }, + shard: &shards[i], LogSelectorExpr: expr, }, next: head, } } - r.Add(shards, StreamsKey) - return head, bytesPerShard, nil + r.Add(len(shards), StreamsKey) + return head, maxBytesPerShard, nil } func (m ShardMapper) mapSampleExpr(expr syntax.SampleExpr, r *downstreamRecorder) (syntax.SampleExpr, uint64, error) { var head *ConcatSampleExpr - shards, bytesPerShard, err := m.shards.Shards(expr) + shards, maxBytesPerShard, err := m.shards.Shards(expr) + if err != nil { return nil, 0, err } - if shards == 0 { + + if len(shards) == 0 { return &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ shard: nil, SampleExpr: expr, }, - }, bytesPerShard, nil + }, maxBytesPerShard, nil } - for shard := shards - 1; shard >= 0; shard-- { + + for i := len(shards) - 1; i >= 0; i-- { head = &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ - Shard: shard, - Of: shards, - }, + shard: &shards[i], SampleExpr: expr, }, next: head, } } - r.Add(shards, MetricsKey) - return head, bytesPerShard, nil + r.Add(len(shards), MetricsKey) + + return head, maxBytesPerShard, nil } // turn a vector aggr into a wrapped+sharded variant, @@ -273,7 +259,7 @@ func (m ShardMapper) mapVectorAggregationExpr(expr *syntax.VectorAggregationExpr } // We take the maximum bytes per shard of both sides of the operation - bytesPerShard := uint64(math.Max(int(lhsBytesPerShard), int(rhsBytesPerShard))) + bytesPerShard := uint64(max(int(lhsBytesPerShard), int(rhsBytesPerShard))) return &syntax.BinOpExpr{ SampleExpr: lhs, @@ -307,7 +293,7 @@ func (m ShardMapper) mapVectorAggregationExpr(expr *syntax.VectorAggregationExpr "msg", "unexpected operation which appears shardable, ignoring", "operation", expr.Operation, ) - exprStats, err := m.shards.GetStats(expr) + exprStats, err := m.shards.Resolver().GetStats(expr) if err != nil { return nil, 0, err } @@ -366,7 +352,7 @@ var rangeMergeMap = map[string]string{ func (m ShardMapper) mapRangeAggregationExpr(expr *syntax.RangeAggregationExpr, r *downstreamRecorder, topLevel bool) (syntax.SampleExpr, uint64, error) { if !expr.Shardable(topLevel) { - return noOp(expr, m.shards) + return noOp(expr, m.shards.Resolver()) } switch expr.Operation { @@ -442,7 +428,7 @@ func (m ShardMapper) mapRangeAggregationExpr(expr *syntax.RangeAggregationExpr, } // We take the maximum bytes per shard of both sides of the operation - bytesPerShard := uint64(math.Max(int(lhsBytesPerShard), int(rhsBytesPerShard))) + bytesPerShard := uint64(max(int(lhsBytesPerShard), int(rhsBytesPerShard))) return &syntax.BinOpExpr{ SampleExpr: lhs, @@ -452,7 +438,7 @@ func (m ShardMapper) mapRangeAggregationExpr(expr *syntax.RangeAggregationExpr, case syntax.OpRangeTypeQuantile: if !m.quantileOverTimeSharding { - return noOp(expr, m.shards) + return noOp(expr, m.shards.Resolver()) } potentialConflict := syntax.ReducesLabels(expr) @@ -460,12 +446,15 @@ func (m ShardMapper) mapRangeAggregationExpr(expr *syntax.RangeAggregationExpr, return m.mapSampleExpr(expr, r) } - shards, bytesPerShard, err := m.shards.Shards(expr) + // TODO(owen-d): integrate bounded sharding with quantile over time + // I'm not doing this now because it uses a separate code path and may not handle + // bounded shards in the same way + shards, bytesPerShard, err := m.shards.Resolver().Shards(expr) if err != nil { return nil, 0, err } if shards == 0 { - return noOp(expr, m.shards) + return noOp(expr, m.shards.Resolver()) } // quantile_over_time() by (foo) -> @@ -475,11 +464,12 @@ func (m ShardMapper) mapRangeAggregationExpr(expr *syntax.RangeAggregationExpr, downstreams := make([]DownstreamSampleExpr, 0, shards) expr.Operation = syntax.OpRangeTypeQuantileSketch for shard := shards - 1; shard >= 0; shard-- { + s := NewPowerOfTwoShard(index.ShardAnnotation{ + Shard: uint32(shard), + Of: uint32(shards), + }) downstreams = append(downstreams, DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ - Shard: shard, - Of: shards, - }, + shard: &s, SampleExpr: expr, }) } @@ -493,7 +483,7 @@ func (m ShardMapper) mapRangeAggregationExpr(expr *syntax.RangeAggregationExpr, default: // don't shard if there's not an appropriate optimization - return noOp(expr, m.shards) + return noOp(expr, m.shards.Resolver()) } } diff --git a/pkg/logql/shardmapper_test.go b/pkg/logql/shardmapper_test.go index e7fbb9d5204c3..355f839bac55f 100644 --- a/pkg/logql/shardmapper_test.go +++ b/pkg/logql/shardmapper_test.go @@ -7,9 +7,10 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func TestShardedStringer(t *testing.T) { @@ -20,20 +21,20 @@ func TestShardedStringer(t *testing.T) { { in: &ConcatLogSelectorExpr{ DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), LogSelectorExpr: &syntax.MatchersExpr{ Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}, }, }, next: &ConcatLogSelectorExpr{ DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), LogSelectorExpr: &syntax.MatchersExpr{ Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}, }, @@ -51,7 +52,9 @@ func TestShardedStringer(t *testing.T) { } func TestMapSampleExpr(t *testing.T) { - m := NewShardMapper(ConstantShards(2), nilShardMetrics, []string{ShardQuantileOverTime}) + + strategy := NewPowerOfTwoStrategy(ConstantShards(2)) + m := NewShardMapper(strategy, nilShardMetrics, []string{ShardQuantileOverTime}) for _, tc := range []struct { in syntax.SampleExpr @@ -69,10 +72,10 @@ func TestMapSampleExpr(t *testing.T) { }, out: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.RangeAggregationExpr{ Operation: syntax.OpRangeTypeRate, Left: &syntax.LogRange{ @@ -85,10 +88,10 @@ func TestMapSampleExpr(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.RangeAggregationExpr{ Operation: syntax.OpRangeTypeRate, Left: &syntax.LogRange{ @@ -113,7 +116,8 @@ func TestMapSampleExpr(t *testing.T) { } func TestMappingStrings(t *testing.T) { - m := NewShardMapper(ConstantShards(2), nilShardMetrics, []string{ShardQuantileOverTime}) + strategy := NewPowerOfTwoStrategy(ConstantShards(2)) + m := NewShardMapper(strategy, nilShardMetrics, []string{ShardQuantileOverTime}) for _, tc := range []struct { in string out string @@ -452,7 +456,8 @@ func TestMappingStrings_NoProbabilisticSharding(t *testing.T) { }, } { t.Run(tc.in, func(t *testing.T) { - shardedMapper := NewShardMapper(ConstantShards(2), nilShardMetrics, []string{ShardQuantileOverTime}) + + shardedMapper := NewShardMapper(NewPowerOfTwoStrategy(ConstantShards(2)), nilShardMetrics, []string{ShardQuantileOverTime}) ast, err := syntax.ParseExpr(tc.in) require.Nil(t, err) @@ -462,7 +467,7 @@ func TestMappingStrings_NoProbabilisticSharding(t *testing.T) { require.Equal(t, removeWhiteSpace(tc.out), removeWhiteSpace(sharded.String())) - unshardedMapper := NewShardMapper(ConstantShards(2), nilShardMetrics, []string{}) + unshardedMapper := NewShardMapper(NewPowerOfTwoStrategy(ConstantShards(2)), nilShardMetrics, []string{}) ast, err = syntax.ParseExpr(tc.in) require.Nil(t, err) @@ -476,7 +481,8 @@ func TestMappingStrings_NoProbabilisticSharding(t *testing.T) { } func TestMapping(t *testing.T) { - m := NewShardMapper(ConstantShards(2), nilShardMetrics, []string{}) + strategy := NewPowerOfTwoStrategy(ConstantShards(2)) + m := NewShardMapper(strategy, nilShardMetrics, []string{}) for _, tc := range []struct { in string @@ -487,20 +493,20 @@ func TestMapping(t *testing.T) { in: `{foo="bar"}`, expr: &ConcatLogSelectorExpr{ DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), LogSelectorExpr: &syntax.MatchersExpr{ Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}, }, }, next: &ConcatLogSelectorExpr{ DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), LogSelectorExpr: &syntax.MatchersExpr{ Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}, }, @@ -513,10 +519,10 @@ func TestMapping(t *testing.T) { in: `{foo="bar"} |= "error"`, expr: &ConcatLogSelectorExpr{ DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), LogSelectorExpr: &syntax.PipelineExpr{ Left: &syntax.MatchersExpr{ Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}, @@ -524,7 +530,7 @@ func TestMapping(t *testing.T) { MultiStages: syntax.MultiStageExpr{ &syntax.LineFilterExpr{ LineFilter: syntax.LineFilter{ - Ty: labels.MatchEqual, + Ty: log.LineMatchEqual, Match: "error", Op: "", }, @@ -534,10 +540,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatLogSelectorExpr{ DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), LogSelectorExpr: &syntax.PipelineExpr{ Left: &syntax.MatchersExpr{ Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}, @@ -545,7 +551,7 @@ func TestMapping(t *testing.T) { MultiStages: syntax.MultiStageExpr{ &syntax.LineFilterExpr{ LineFilter: syntax.LineFilter{ - Ty: labels.MatchEqual, + Ty: log.LineMatchEqual, Match: "error", Op: "", }, @@ -561,10 +567,10 @@ func TestMapping(t *testing.T) { in: `rate({foo="bar"}[5m])`, expr: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.RangeAggregationExpr{ Operation: syntax.OpRangeTypeRate, Left: &syntax.LogRange{ @@ -577,10 +583,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.RangeAggregationExpr{ Operation: syntax.OpRangeTypeRate, Left: &syntax.LogRange{ @@ -599,10 +605,10 @@ func TestMapping(t *testing.T) { in: `count_over_time({foo="bar"}[5m])`, expr: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.RangeAggregationExpr{ Operation: syntax.OpRangeTypeCount, Left: &syntax.LogRange{ @@ -615,10 +621,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.RangeAggregationExpr{ Operation: syntax.OpRangeTypeCount, Left: &syntax.LogRange{ @@ -640,10 +646,10 @@ func TestMapping(t *testing.T) { Operation: syntax.OpTypeSum, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeSum, @@ -660,10 +666,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeSum, @@ -691,10 +697,10 @@ func TestMapping(t *testing.T) { Operation: syntax.OpTypeTopK, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.RangeAggregationExpr{ Operation: syntax.OpRangeTypeRate, Left: &syntax.LogRange{ @@ -707,10 +713,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.RangeAggregationExpr{ Operation: syntax.OpRangeTypeRate, Left: &syntax.LogRange{ @@ -733,10 +739,10 @@ func TestMapping(t *testing.T) { Grouping: &syntax.Grouping{}, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeCount, @@ -753,10 +759,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeCount, @@ -785,10 +791,10 @@ func TestMapping(t *testing.T) { Operation: syntax.OpTypeSum, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeSum, @@ -805,10 +811,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeSum, @@ -832,10 +838,10 @@ func TestMapping(t *testing.T) { Grouping: &syntax.Grouping{}, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeCount, @@ -852,10 +858,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeCount, @@ -892,10 +898,10 @@ func TestMapping(t *testing.T) { Operation: syntax.OpTypeSum, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{ Groups: []string{"cluster"}, @@ -914,10 +920,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{ Groups: []string{"cluster"}, @@ -954,10 +960,10 @@ func TestMapping(t *testing.T) { Grouping: &syntax.Grouping{}, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeSum, @@ -974,10 +980,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeSum, @@ -1009,10 +1015,10 @@ func TestMapping(t *testing.T) { Grouping: &syntax.Grouping{}, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeCount, @@ -1029,10 +1035,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeCount, @@ -1071,10 +1077,10 @@ func TestMapping(t *testing.T) { Operation: syntax.OpTypeSum, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{ Groups: []string{"cluster"}, @@ -1093,10 +1099,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{ Groups: []string{"cluster"}, @@ -1123,10 +1129,10 @@ func TestMapping(t *testing.T) { Grouping: &syntax.Grouping{}, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeCount, @@ -1143,10 +1149,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeCount, @@ -1185,10 +1191,10 @@ func TestMapping(t *testing.T) { Operation: syntax.OpTypeSum, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{ Groups: []string{"cluster"}, @@ -1207,10 +1213,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{ Groups: []string{"cluster"}, @@ -1236,10 +1242,10 @@ func TestMapping(t *testing.T) { Grouping: &syntax.Grouping{}, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeCount, @@ -1256,10 +1262,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeCount, @@ -1291,10 +1297,10 @@ func TestMapping(t *testing.T) { Operation: syntax.OpTypeSum, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{ Groups: []string{"cluster"}, @@ -1316,10 +1322,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{ Groups: []string{"cluster"}, @@ -1350,10 +1356,10 @@ func TestMapping(t *testing.T) { }, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{ Groups: []string{"cluster"}, @@ -1372,10 +1378,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{ Groups: []string{"cluster"}, @@ -1461,10 +1467,10 @@ func TestMapping(t *testing.T) { RHS: &syntax.VectorAggregationExpr{ Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Left: &syntax.RangeAggregationExpr{ Left: &syntax.LogRange{ @@ -1484,10 +1490,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Left: &syntax.RangeAggregationExpr{ Left: &syntax.LogRange{ @@ -1661,7 +1667,8 @@ func TestStringTrimming(t *testing.T) { }, } { t.Run(tc.expr.String(), func(t *testing.T) { - m := NewShardMapper(ConstantShards(tc.shards), nilShardMetrics, []string{ShardQuantileOverTime}) + strategy := NewPowerOfTwoStrategy(ConstantShards(tc.shards)) + m := NewShardMapper(strategy, nilShardMetrics, []string{ShardQuantileOverTime}) _, _, mappedExpr, err := m.Parse(tc.expr) require.Nil(t, err) require.Equal(t, removeWhiteSpace(tc.expected), removeWhiteSpace(mappedExpr.String())) @@ -1680,7 +1687,7 @@ func TestShardTopk(t *testing.T) { sum_over_time({job="foo"} | json | unwrap bytes(bytes)[1m]) ) )` - m := NewShardMapper(ConstantShards(5), nilShardMetrics, []string{ShardQuantileOverTime}) + m := NewShardMapper(NewPowerOfTwoStrategy(ConstantShards(5)), nilShardMetrics, []string{ShardQuantileOverTime}) _, _, mappedExpr, err := m.Parse(syntax.MustParseExpr(expr)) require.NoError(t, err) diff --git a/pkg/logql/shards.go b/pkg/logql/shards.go new file mode 100644 index 0000000000000..75281aa3c95b3 --- /dev/null +++ b/pkg/logql/shards.go @@ -0,0 +1,250 @@ +package logql + +import ( + "encoding/json" + + "github.com/grafana/dskit/multierror" + "github.com/pkg/errors" + "github.com/prometheus/common/model" + + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/astmapper" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" +) + +type Shards []Shard + +type ShardVersion uint8 + +const ( + PowerOfTwoVersion ShardVersion = iota + BoundedVersion +) + +func (v ShardVersion) Strategy(resolver ShardResolver, defaultTargetShardBytes uint64) ShardingStrategy { + switch v { + case BoundedVersion: + return NewDynamicBoundsStrategy(resolver, defaultTargetShardBytes) + default: + // TODO(owen-d): refactor, ugly, etc, but the power of two strategy already populated + // the default target shard bytes through it's resolver + return NewPowerOfTwoStrategy(resolver) + } +} + +func (v ShardVersion) String() string { + switch v { + case PowerOfTwoVersion: + return "power_of_two" + case BoundedVersion: + return "bounded" + default: + return "unknown" + } +} + +var validStrategies = map[string]ShardVersion{ + PowerOfTwoVersion.String(): PowerOfTwoVersion, + BoundedVersion.String(): BoundedVersion, +} + +func ParseShardVersion(s string) (ShardVersion, error) { + v, ok := validStrategies[s] + if !ok { + return PowerOfTwoVersion, errors.Errorf("invalid shard version %s", s) + } + return v, nil +} + +type ShardResolver interface { + Shards(expr syntax.Expr) (int, uint64, error) + ShardingRanges(expr syntax.Expr, targetBytesPerShard uint64) ([]logproto.Shard, error) + GetStats(e syntax.Expr) (stats.Stats, error) +} + +type ConstantShards int + +func (s ConstantShards) Shards(_ syntax.Expr) (int, uint64, error) { return int(s), 0, nil } +func (s ConstantShards) ShardingRanges(_ syntax.Expr, _ uint64) ([]logproto.Shard, error) { + return sharding.LinearShards(int(s), 0), nil +} +func (s ConstantShards) GetStats(_ syntax.Expr) (stats.Stats, error) { return stats.Stats{}, nil } + +type ShardingStrategy interface { + Shards(expr syntax.Expr) (shards Shards, maxBytesPerShard uint64, err error) + Resolver() ShardResolver +} + +type DynamicBoundsStrategy struct { + resolver ShardResolver + targetBytesPerShard uint64 +} + +func (s DynamicBoundsStrategy) Shards(expr syntax.Expr) (Shards, uint64, error) { + shards, err := s.resolver.ShardingRanges(expr, s.targetBytesPerShard) + if err != nil { + return nil, 0, err + } + + var maxBytes uint64 + res := make(Shards, 0, len(shards)) + for _, shard := range shards { + if shard.Stats != nil { + maxBytes = max(maxBytes, shard.Stats.Bytes) + } + res = append(res, NewBoundedShard(shard)) + } + + return res, maxBytes, nil +} + +func (s DynamicBoundsStrategy) Resolver() ShardResolver { + return s.resolver +} + +func NewDynamicBoundsStrategy(resolver ShardResolver, targetBytesPerShard uint64) DynamicBoundsStrategy { + return DynamicBoundsStrategy{resolver: resolver, targetBytesPerShard: targetBytesPerShard} +} + +type PowerOfTwoStrategy struct { + resolver ShardResolver +} + +func NewPowerOfTwoStrategy(resolver ShardResolver) PowerOfTwoStrategy { + return PowerOfTwoStrategy{resolver: resolver} +} + +func (s PowerOfTwoStrategy) Resolver() ShardResolver { + return s.resolver +} + +func (s PowerOfTwoStrategy) Shards(expr syntax.Expr) (Shards, uint64, error) { + factor, bytesPerShard, err := s.resolver.Shards(expr) + if err != nil { + return nil, 0, err + } + + if factor == 0 { + return nil, bytesPerShard, nil + } + + res := make(Shards, 0, factor) + for i := 0; i < factor; i++ { + res = append(res, NewPowerOfTwoShard(index.ShardAnnotation{Of: uint32(factor), Shard: uint32(i)})) + } + return res, bytesPerShard, nil +} + +// Shard represents a shard annotation +// It holds either a power of two shard (legacy) or a bounded shard +type Shard struct { + PowerOfTwo *index.ShardAnnotation + Bounded *logproto.Shard +} + +func (s *Shard) Variant() ShardVersion { + if s.Bounded != nil { + return BoundedVersion + } + + return PowerOfTwoVersion +} + +// implement FingerprintFilter +func (s *Shard) Match(fp model.Fingerprint) bool { + if s.Bounded != nil { + return v1.BoundsFromProto(s.Bounded.Bounds).Match(fp) + } + + return s.PowerOfTwo.Match(fp) +} + +func (s *Shard) GetFromThrough() (model.Fingerprint, model.Fingerprint) { + if s.Bounded != nil { + return v1.BoundsFromProto(s.Bounded.Bounds).GetFromThrough() + } + + return s.PowerOfTwo.GetFromThrough() +} + +// convenience method for unaddressability concerns using constructors in literals (tests) +func (s Shard) Ptr() *Shard { + return &s +} + +func NewBoundedShard(shard logproto.Shard) Shard { + return Shard{Bounded: &shard} +} + +func NewPowerOfTwoShard(shard index.ShardAnnotation) Shard { + return Shard{PowerOfTwo: &shard} +} + +func (s Shard) String() string { + if s.Bounded != nil { + b, err := json.Marshal(s.Bounded) + if err != nil { + panic(err) + } + return string(b) + } + + return s.PowerOfTwo.String() +} + +func (xs Shards) Encode() (encoded []string) { + for _, shard := range xs { + encoded = append(encoded, shard.String()) + } + + return encoded +} + +// ParseShards parses a list of string encoded shards +func ParseShards(strs []string) (Shards, ShardVersion, error) { + if len(strs) == 0 { + return nil, PowerOfTwoVersion, nil + } + shards := make(Shards, 0, len(strs)) + + var prevVersion ShardVersion + for i, str := range strs { + shard, version, err := ParseShard(str) + if err != nil { + return nil, PowerOfTwoVersion, err + } + + if i == 0 { + prevVersion = version + } else if prevVersion != version { + return nil, PowerOfTwoVersion, errors.New("shards must be of the same version") + } + shards = append(shards, shard) + } + return shards, prevVersion, nil +} + +func ParseShard(s string) (Shard, ShardVersion, error) { + + var bounded logproto.Shard + v2Err := json.Unmarshal([]byte(s), &bounded) + if v2Err == nil { + return Shard{Bounded: &bounded}, BoundedVersion, nil + } + + old, v1Err := astmapper.ParseShard(s) + casted := old.TSDB() + if v1Err == nil { + return Shard{PowerOfTwo: &casted}, PowerOfTwoVersion, nil + } + + err := errors.Wrap( + multierror.New(v1Err, v2Err).Err(), + "failed to parse shard", + ) + return Shard{}, PowerOfTwoVersion, err +} diff --git a/pkg/logql/shards_test.go b/pkg/logql/shards_test.go new file mode 100644 index 0000000000000..d9f5b62ebb178 --- /dev/null +++ b/pkg/logql/shards_test.go @@ -0,0 +1,188 @@ +package logql + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" +) + +func TestShardString(t *testing.T) { + for _, rc := range []struct { + shard Shard + exp string + }{ + { + shard: Shard{ + PowerOfTwo: &index.ShardAnnotation{ + Shard: 1, + Of: 2, + }, + }, + exp: "1_of_2", + }, + { + shard: Shard{ + Bounded: &logproto.Shard{ + Bounds: logproto.FPBounds{ + Min: 1, + Max: 2, + }, + }, + }, + exp: `{"bounds":{"min":1,"max":2},"stats":null}`, + }, + { + shard: Shard{ + Bounded: &logproto.Shard{ + Stats: &logproto.IndexStatsResponse{ + Bytes: 1, + }, + Bounds: logproto.FPBounds{ + Min: 1, + Max: 2, + }, + }, + }, + exp: `{"bounds":{"min":1,"max":2},"stats":{"streams":0,"chunks":0,"bytes":1,"entries":0}}`, + }, + { + // when more than one are present, + // return the newest successful version (v2) + shard: Shard{ + Bounded: &logproto.Shard{ + Stats: &logproto.IndexStatsResponse{ + Bytes: 1, + }, + Bounds: logproto.FPBounds{ + Min: 1, + Max: 2, + }, + }, + PowerOfTwo: &index.ShardAnnotation{ + Shard: 1, + Of: 2, + }, + }, + exp: `{"bounds":{"min":1,"max":2},"stats":{"streams":0,"chunks":0,"bytes":1,"entries":0}}`, + }, + } { + t.Run(fmt.Sprintf("%+v", rc.shard), func(t *testing.T) { + require.Equal(t, rc.exp, rc.shard.String()) + }) + } +} + +func TestParseShard(t *testing.T) { + for _, rc := range []struct { + str string + version ShardVersion + exp Shard + }{ + { + str: "1_of_2", + version: PowerOfTwoVersion, + exp: Shard{ + PowerOfTwo: &index.ShardAnnotation{ + Shard: 1, + Of: 2, + }, + }, + }, + { + str: `{"bounds":{"min":1,"max":2},"stats":null}`, + version: BoundedVersion, + exp: Shard{ + Bounded: &logproto.Shard{ + Bounds: logproto.FPBounds{ + Min: 1, + Max: 2, + }, + }, + }, + }, + { + str: `{"bounds":{"min":1,"max":2},"stats":{"streams":0,"chunks":0,"bytes":1,"entries":0}}`, + version: BoundedVersion, + exp: Shard{ + Bounded: &logproto.Shard{ + Stats: &logproto.IndexStatsResponse{ + Bytes: 1, + }, + Bounds: logproto.FPBounds{ + Min: 1, + Max: 2, + }, + }, + }, + }, + } { + t.Run(rc.str, func(t *testing.T) { + shard, version, err := ParseShard(rc.str) + require.NoError(t, err) + require.Equal(t, rc.version, version) + require.Equal(t, rc.exp, shard) + }) + } +} + +func TestParseShards(t *testing.T) { + for _, rc := range []struct { + strs []string + version ShardVersion + exp Shards + err bool + }{ + { + strs: []string{"1_of_2", "1_of_2"}, + version: PowerOfTwoVersion, + exp: Shards{ + NewPowerOfTwoShard(index.ShardAnnotation{ + Shard: 1, + Of: 2, + }), + NewPowerOfTwoShard(index.ShardAnnotation{ + Shard: 1, + Of: 2, + }), + }, + }, + { + strs: []string{`{"bounds":{"min":1,"max":2},"stats":null}`, `{"bounds":{"min":1,"max":2},"stats":null}`}, + version: BoundedVersion, + exp: Shards{ + NewBoundedShard(logproto.Shard{ + Bounds: logproto.FPBounds{ + Min: 1, + Max: 2, + }, + }), + NewBoundedShard(logproto.Shard{ + Bounds: logproto.FPBounds{ + Min: 1, + Max: 2, + }, + }), + }, + }, + { + strs: []string{`{"bounds":{"min":1,"max":2},"stats":null}`, "1_of_2"}, + version: PowerOfTwoVersion, + err: true, + }, + } { + t.Run(fmt.Sprintf("%+v", rc.strs), func(t *testing.T) { + shards, version, err := ParseShards(rc.strs) + if rc.err { + require.Error(t, err) + return + } + require.NoError(t, err) + require.Equal(t, rc.version, version) + require.Equal(t, rc.exp, shards) + }) + } +} diff --git a/pkg/logql/sketch/quantile.go b/pkg/logql/sketch/quantile.go index 3b8b0f22fc8e0..093923a591366 100644 --- a/pkg/logql/sketch/quantile.go +++ b/pkg/logql/sketch/quantile.go @@ -10,7 +10,7 @@ import ( "github.com/DataDog/sketches-go/ddsketch/store" "github.com/influxdata/tdigest" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) // QuantileSketch estimates quantiles over time. diff --git a/pkg/logql/sketch/series.go b/pkg/logql/sketch/series.go index 684e3080d6df5..0f434bd341c52 100644 --- a/pkg/logql/sketch/series.go +++ b/pkg/logql/sketch/series.go @@ -3,7 +3,7 @@ package sketch import ( "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) const ValueTypeTopKMatrix = "topk_matrix" diff --git a/pkg/logql/sketch/topk.go b/pkg/logql/sketch/topk.go index 021ab632ab552..e5efad409727b 100644 --- a/pkg/logql/sketch/topk.go +++ b/pkg/logql/sketch/topk.go @@ -6,7 +6,7 @@ import ( "sort" "unsafe" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" "github.com/axiomhq/hyperloglog" "github.com/go-kit/log" diff --git a/pkg/logql/syntax/ast.go b/pkg/logql/syntax/ast.go index 060fc3cd11711..b0649570e8334 100644 --- a/pkg/logql/syntax/ast.go +++ b/pkg/logql/syntax/ast.go @@ -8,7 +8,7 @@ import ( "strings" "time" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" "github.com/pkg/errors" "github.com/prometheus/common/model" @@ -17,8 +17,8 @@ import ( "github.com/grafana/regexp/syntax" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) // Expr is the root expression which can be a SampleExpr or LogSelectorExpr @@ -329,7 +329,7 @@ func (e *PipelineExpr) HasFilter() bool { } type LineFilter struct { - Ty labels.MatchType + Ty log.LineMatchType Match string Op string } @@ -342,7 +342,7 @@ type LineFilterExpr struct { implicit } -func newLineFilterExpr(ty labels.MatchType, op, match string) *LineFilterExpr { +func newLineFilterExpr(ty log.LineMatchType, op, match string) *LineFilterExpr { return &LineFilterExpr{ LineFilter: LineFilter{ Ty: ty, @@ -355,7 +355,7 @@ func newLineFilterExpr(ty labels.MatchType, op, match string) *LineFilterExpr { func newOrLineFilter(left, right *LineFilterExpr) *LineFilterExpr { right.Ty = left.Ty - if left.Ty == labels.MatchEqual || left.Ty == labels.MatchRegexp { + if left.Ty == log.LineMatchEqual || left.Ty == log.LineMatchRegexp || left.Ty == log.LineMatchPattern { left.Or = right right.IsOrChild = true return left @@ -389,7 +389,7 @@ func (e *LineFilterExpr) Accept(v RootVisitor) { } // AddFilterExpr adds a filter expression to a logselector expression. -func AddFilterExpr(expr LogSelectorExpr, ty labels.MatchType, op, match string) (LogSelectorExpr, error) { +func AddFilterExpr(expr LogSelectorExpr, ty log.LineMatchType, op, match string) (LogSelectorExpr, error) { filter := newLineFilterExpr(ty, op, match) switch e := expr.(type) { case *MatchersExpr: @@ -412,16 +412,7 @@ func (e *LineFilterExpr) String() string { } if !e.IsOrChild { // Only write the type when we're not chaining "or" filters - switch e.Ty { - case labels.MatchRegexp: - sb.WriteString("|~") - case labels.MatchNotRegexp: - sb.WriteString("!~") - case labels.MatchEqual: - sb.WriteString("|=") - case labels.MatchNotEqual: - sb.WriteString("!=") - } + sb.WriteString(e.Ty.String()) sb.WriteString(" ") } diff --git a/pkg/logql/syntax/ast_test.go b/pkg/logql/syntax/ast_test.go index ece470516eb45..2ba435e0fe2de 100644 --- a/pkg/logql/syntax/ast_test.go +++ b/pkg/logql/syntax/ast_test.go @@ -10,7 +10,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/log" ) var labelBar, _ = ParseLabels("{app=\"bar\"}") @@ -24,7 +24,7 @@ func Test_logSelectorExpr_String(t *testing.T) { {`{foo="bar"}`, false}, {`{foo="bar", bar!="baz"}`, false}, {`{foo="bar", bar!="baz"} != "bip" !~ ".+bop"`, true}, - {`{foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap"`, true}, + {`{foo="bar"} |= "baz" |~ "blip" |> "qux" !> "waldo" != "flip" !~ "flap"`, true}, {`{foo="bar", bar!="baz"} |= ""`, false}, {`{foo="bar", bar!="baz"} |= "" |= ip("::1")`, true}, {`{foo="bar", bar!="baz"} |= "" != ip("127.0.0.1")`, true}, @@ -32,7 +32,10 @@ func Test_logSelectorExpr_String(t *testing.T) { {`{foo="bar", bar!="baz"} |~ ".*"`, false}, {`{foo="bar", bar!="baz"} |= "" |= ""`, false}, {`{foo="bar", bar!="baz"} |~ "" |= "" |~ ".*"`, false}, - {`{foo="bar", bar!="baz"} != "bip" !~ ".+bop" | json`, true}, + {`{foo="bar", bar!="baz"} |> ""`, true}, + {`{foo="bar", bar!="baz"} |> "<_>"`, true}, + {`{foo="bar", bar!="baz"} |> "<_>" !> "<_> <_>"`, true}, + {`{foo="bar", bar!="baz"} != "bip" !~ ".+bop" |> "<_> bop <_>" | json`, true}, {`{foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap" | logfmt`, true}, {`{foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap" | logfmt --strict`, true}, {`{foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap" | logfmt --strict --keep-empty`, true}, @@ -275,6 +278,7 @@ func Test_NilFilterDoesntPanic(t *testing.T) { `{namespace="dev", container_name="cart"} |= "bleep" |= "" |= "bloop"`, `{namespace="dev", container_name="cart"} |= "bleep" |= "" |= "bloop"`, `{namespace="dev", container_name="cart"} |= "bleep" |= "bloop" |= ""`, + `{namespace="dev", container_name="cart"} !> ""`, } { t.Run(tc, func(t *testing.T) { expr, err := ParseLogSelector(tc, true) @@ -355,6 +359,20 @@ func Test_FilterMatcher(t *testing.T) { }, []linecheck{{"foo", true}, {"bar", false}, {"foobar", true}}, }, + { + `{app="foo"} |> "foo <_>"`, + []*labels.Matcher{ + mustNewMatcher(labels.MatchEqual, "app", "foo"), + }, + []linecheck{{"foo bar", true}, {"foo", false}}, + }, + { + `{app="foo"} !> "foo <_>"`, + []*labels.Matcher{ + mustNewMatcher(labels.MatchEqual, "app", "foo"), + }, + []linecheck{{"foo bar", false}, {"foo", true}}, + }, { `{app="foo"} |~ "foo\\.bar\\.baz"`, []*labels.Matcher{ @@ -425,6 +443,20 @@ func Test_FilterMatcher(t *testing.T) { }, []linecheck{{"foo", false}, {"bar", true}, {"127.0.0.2", true}, {"127.0.0.1", false}}, }, + { + `{app="foo"} |> "foo" or "bar"`, + []*labels.Matcher{ + mustNewMatcher(labels.MatchEqual, "app", "foo"), + }, + []linecheck{{"foo", true}, {"bar", true}, {"none", false}}, + }, + { + `{app="foo"} !> "foo" or "bar"`, + []*labels.Matcher{ + mustNewMatcher(labels.MatchEqual, "app", "foo"), + }, + []linecheck{{"foo", false}, {"bar", false}, {"none", true}}, + }, } { tt := tt t.Run(tt.q, func(t *testing.T) { @@ -449,16 +481,18 @@ func Test_FilterMatcher(t *testing.T) { func TestOrLineFilterTypes(t *testing.T) { for _, tt := range []struct { - ty labels.MatchType + ty log.LineMatchType }{ - {labels.MatchEqual}, - {labels.MatchNotEqual}, - {labels.MatchRegexp}, - {labels.MatchNotRegexp}, + {log.LineMatchEqual}, + {log.LineMatchNotEqual}, + {log.LineMatchRegexp}, + {log.LineMatchNotRegexp}, + {log.LineMatchPattern}, + {log.LineMatchNotPattern}, } { t.Run("right inherits left's type", func(t *testing.T) { left := &LineFilterExpr{LineFilter: LineFilter{Ty: tt.ty, Match: "something"}} - right := &LineFilterExpr{LineFilter: LineFilter{Ty: labels.MatchEqual, Match: "something"}} + right := &LineFilterExpr{LineFilter: LineFilter{Ty: log.LineMatchEqual, Match: "something"}} _ = newOrLineFilter(left, right) require.Equal(t, tt.ty, right.Ty) @@ -523,6 +557,14 @@ func TestStringer(t *testing.T) { in: `{app="foo"} |~ ip("127.0.0.1") or "foo"`, out: `{app="foo"} |~ ip("127.0.0.1") or "foo"`, }, + { + in: `{app="foo"} |> "foo <_> baz" or "foo <_>"`, + out: `{app="foo"} |> "foo <_> baz" or "foo <_>"`, + }, + { + in: `{app="foo"} |> "foo <_> baz" or "foo <_>" |> "foo <_> baz"`, + out: `{app="foo"} |> "foo <_> baz" or "foo <_>" |> "foo <_> baz"`, + }, { // !(A || B) == !A && !B in: `{app="foo"} != "foo" or "bar"`, out: `{app="foo"} != "foo" != "bar"`, @@ -539,6 +581,10 @@ func TestStringer(t *testing.T) { in: `{app="foo"} !~ ip("127.0.0.1") or "foo"`, out: `{app="foo"} !~ ip("127.0.0.1") !~ "foo"`, }, + { + in: `{app="foo"} !> "<_> foo <_>" or "foo <_>" !> "foo <_> baz"`, + out: `{app="foo"} !> "<_> foo <_>" !> "foo <_>" !> "foo <_> baz"`, + }, } { t.Run(tc.in, func(t *testing.T) { expr, err := ParseExpr(tc.in) @@ -563,19 +609,19 @@ func BenchmarkContainsFilter(b *testing.B) { }{ { "AllMatches", - `{app="foo"} |= "foo" |= "hello" |= "world" |= "bar"`, + `{app="foo"} |= "foo" |= "hello" |= "world" |= "bar" |> "<_> world <_>"`, }, { "OneMatches", - `{app="foo"} |= "foo" |= "not" |= "in" |= "there"`, + `{app="foo"} |= "foo" |= "not" |= "in" |= "there" |> "yet"`, }, { "MixedFiltersTrue", - `{app="foo"} |= "foo" != "not" |~ "hello.*bar" != "there" |= "world"`, + `{app="foo"} |= "foo" != "not" |~ "hello.*bar" != "there" |= "world" |> "<_> more than one <_>"`, }, { "MixedFiltersFalse", - `{app="foo"} |= "baz" != "not" |~ "hello.*bar" != "there" |= "world"`, + `{app="foo"} |= "baz" != "not" |~ "hello.*bar" != "there" |= "world" !> "<_> more than one"`, }, { "GreedyRegex", diff --git a/pkg/logql/syntax/clone.go b/pkg/logql/syntax/clone.go index a93aa53d599ac..d047218b0b607 100644 --- a/pkg/logql/syntax/clone.go +++ b/pkg/logql/syntax/clone.go @@ -3,7 +3,7 @@ package syntax import ( "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/log" ) type cloneVisitor struct { diff --git a/pkg/logql/syntax/clone_test.go b/pkg/logql/syntax/clone_test.go index 58dc6efb03e2c..cfed2134c6bbc 100644 --- a/pkg/logql/syntax/clone_test.go +++ b/pkg/logql/syntax/clone_test.go @@ -7,7 +7,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/log" ) func TestClone(t *testing.T) { diff --git a/pkg/logql/syntax/expr.y b/pkg/logql/syntax/expr.y index 7e801480f4808..7f443831159bb 100644 --- a/pkg/logql/syntax/expr.y +++ b/pkg/logql/syntax/expr.y @@ -4,14 +4,14 @@ package syntax import ( "time" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/log" ) %} %union{ Expr Expr - Filter labels.MatchType + Filter log.LineMatchType Grouping *Grouping Labels []string LogExpr LogSelectorExpr @@ -134,7 +134,7 @@ import ( %token BYTES %token IDENTIFIER STRING NUMBER PARSER_FLAG %token DURATION RANGE -%token MATCHERS LABELS EQ RE NRE OPEN_BRACE CLOSE_BRACE OPEN_BRACKET CLOSE_BRACKET COMMA DOT PIPE_MATCH PIPE_EXACT +%token MATCHERS LABELS EQ RE NRE NPA OPEN_BRACE CLOSE_BRACE OPEN_BRACKET CLOSE_BRACKET COMMA DOT PIPE_MATCH PIPE_EXACT PIPE_PATTERN OPEN_PARENTHESIS CLOSE_PARENTHESIS BY WITHOUT COUNT_OVER_TIME RATE RATE_COUNTER SUM SORT SORT_DESC AVG MAX MIN COUNT STDDEV STDVAR BOTTOMK TOPK BYTES_OVER_TIME BYTES_RATE BOOL JSON REGEXP LOGFMT PIPE LINE_FMT LABEL_FMT UNWRAP AVG_OVER_TIME SUM_OVER_TIME MIN_OVER_TIME MAX_OVER_TIME STDVAR_OVER_TIME STDDEV_OVER_TIME QUANTILE_OVER_TIME BYTES_CONV DURATION_CONV DURATION_SECONDS_CONV @@ -239,10 +239,12 @@ labelReplaceExpr: ; filter: - PIPE_MATCH { $$ = labels.MatchRegexp } - | PIPE_EXACT { $$ = labels.MatchEqual } - | NRE { $$ = labels.MatchNotRegexp } - | NEQ { $$ = labels.MatchNotEqual } + PIPE_MATCH { $$ = log.LineMatchRegexp } + | PIPE_EXACT { $$ = log.LineMatchEqual } + | PIPE_PATTERN { $$ = log.LineMatchPattern } + | NRE { $$ = log.LineMatchNotRegexp } + | NEQ { $$ = log.LineMatchNotEqual } + | NPA { $$ = log.LineMatchNotPattern } ; selector: @@ -287,9 +289,9 @@ filterOp: ; orFilter: - STRING { $$ = newLineFilterExpr(labels.MatchEqual, "", $1) } - | filterOp OPEN_PARENTHESIS STRING CLOSE_PARENTHESIS { $$ = newLineFilterExpr(labels.MatchEqual, $1, $3) } - | STRING OR orFilter { $$ = newOrLineFilter(newLineFilterExpr(labels.MatchEqual, "", $1), $3) } + STRING { $$ = newLineFilterExpr(log.LineMatchEqual, "", $1) } + | filterOp OPEN_PARENTHESIS STRING CLOSE_PARENTHESIS { $$ = newLineFilterExpr(log.LineMatchEqual, $1, $3) } + | STRING OR orFilter { $$ = newOrLineFilter(newLineFilterExpr(log.LineMatchEqual, "", $1), $3) } ; lineFilter: diff --git a/pkg/logql/syntax/expr.y.go b/pkg/logql/syntax/expr.y.go index 1f38ab579f10b..2d322514a75fc 100644 --- a/pkg/logql/syntax/expr.y.go +++ b/pkg/logql/syntax/expr.y.go @@ -4,9 +4,8 @@ package syntax import __yyfmt__ "fmt" - import ( - "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/log" "github.com/prometheus/prometheus/model/labels" "time" ) @@ -14,7 +13,7 @@ import ( type exprSymType struct { yys int Expr Expr - Filter labels.MatchType + Filter log.LineMatchType Grouping *Grouping Labels []string LogExpr LogSelectorExpr @@ -88,83 +87,85 @@ const LABELS = 57354 const EQ = 57355 const RE = 57356 const NRE = 57357 -const OPEN_BRACE = 57358 -const CLOSE_BRACE = 57359 -const OPEN_BRACKET = 57360 -const CLOSE_BRACKET = 57361 -const COMMA = 57362 -const DOT = 57363 -const PIPE_MATCH = 57364 -const PIPE_EXACT = 57365 -const OPEN_PARENTHESIS = 57366 -const CLOSE_PARENTHESIS = 57367 -const BY = 57368 -const WITHOUT = 57369 -const COUNT_OVER_TIME = 57370 -const RATE = 57371 -const RATE_COUNTER = 57372 -const SUM = 57373 -const SORT = 57374 -const SORT_DESC = 57375 -const AVG = 57376 -const MAX = 57377 -const MIN = 57378 -const COUNT = 57379 -const STDDEV = 57380 -const STDVAR = 57381 -const BOTTOMK = 57382 -const TOPK = 57383 -const BYTES_OVER_TIME = 57384 -const BYTES_RATE = 57385 -const BOOL = 57386 -const JSON = 57387 -const REGEXP = 57388 -const LOGFMT = 57389 -const PIPE = 57390 -const LINE_FMT = 57391 -const LABEL_FMT = 57392 -const UNWRAP = 57393 -const AVG_OVER_TIME = 57394 -const SUM_OVER_TIME = 57395 -const MIN_OVER_TIME = 57396 -const MAX_OVER_TIME = 57397 -const STDVAR_OVER_TIME = 57398 -const STDDEV_OVER_TIME = 57399 -const QUANTILE_OVER_TIME = 57400 -const BYTES_CONV = 57401 -const DURATION_CONV = 57402 -const DURATION_SECONDS_CONV = 57403 -const FIRST_OVER_TIME = 57404 -const LAST_OVER_TIME = 57405 -const ABSENT_OVER_TIME = 57406 -const VECTOR = 57407 -const LABEL_REPLACE = 57408 -const UNPACK = 57409 -const OFFSET = 57410 -const PATTERN = 57411 -const IP = 57412 -const ON = 57413 -const IGNORING = 57414 -const GROUP_LEFT = 57415 -const GROUP_RIGHT = 57416 -const DECOLORIZE = 57417 -const DROP = 57418 -const KEEP = 57419 -const OR = 57420 -const AND = 57421 -const UNLESS = 57422 -const CMP_EQ = 57423 -const NEQ = 57424 -const LT = 57425 -const LTE = 57426 -const GT = 57427 -const GTE = 57428 -const ADD = 57429 -const SUB = 57430 -const MUL = 57431 -const DIV = 57432 -const MOD = 57433 -const POW = 57434 +const NPA = 57358 +const OPEN_BRACE = 57359 +const CLOSE_BRACE = 57360 +const OPEN_BRACKET = 57361 +const CLOSE_BRACKET = 57362 +const COMMA = 57363 +const DOT = 57364 +const PIPE_MATCH = 57365 +const PIPE_EXACT = 57366 +const PIPE_PATTERN = 57367 +const OPEN_PARENTHESIS = 57368 +const CLOSE_PARENTHESIS = 57369 +const BY = 57370 +const WITHOUT = 57371 +const COUNT_OVER_TIME = 57372 +const RATE = 57373 +const RATE_COUNTER = 57374 +const SUM = 57375 +const SORT = 57376 +const SORT_DESC = 57377 +const AVG = 57378 +const MAX = 57379 +const MIN = 57380 +const COUNT = 57381 +const STDDEV = 57382 +const STDVAR = 57383 +const BOTTOMK = 57384 +const TOPK = 57385 +const BYTES_OVER_TIME = 57386 +const BYTES_RATE = 57387 +const BOOL = 57388 +const JSON = 57389 +const REGEXP = 57390 +const LOGFMT = 57391 +const PIPE = 57392 +const LINE_FMT = 57393 +const LABEL_FMT = 57394 +const UNWRAP = 57395 +const AVG_OVER_TIME = 57396 +const SUM_OVER_TIME = 57397 +const MIN_OVER_TIME = 57398 +const MAX_OVER_TIME = 57399 +const STDVAR_OVER_TIME = 57400 +const STDDEV_OVER_TIME = 57401 +const QUANTILE_OVER_TIME = 57402 +const BYTES_CONV = 57403 +const DURATION_CONV = 57404 +const DURATION_SECONDS_CONV = 57405 +const FIRST_OVER_TIME = 57406 +const LAST_OVER_TIME = 57407 +const ABSENT_OVER_TIME = 57408 +const VECTOR = 57409 +const LABEL_REPLACE = 57410 +const UNPACK = 57411 +const OFFSET = 57412 +const PATTERN = 57413 +const IP = 57414 +const ON = 57415 +const IGNORING = 57416 +const GROUP_LEFT = 57417 +const GROUP_RIGHT = 57418 +const DECOLORIZE = 57419 +const DROP = 57420 +const KEEP = 57421 +const OR = 57422 +const AND = 57423 +const UNLESS = 57424 +const CMP_EQ = 57425 +const NEQ = 57426 +const LT = 57427 +const LTE = 57428 +const GT = 57429 +const GTE = 57430 +const ADD = 57431 +const SUB = 57432 +const MUL = 57433 +const DIV = 57434 +const MOD = 57435 +const POW = 57436 var exprToknames = [...]string{ "$end", @@ -182,6 +183,7 @@ var exprToknames = [...]string{ "EQ", "RE", "NRE", + "NPA", "OPEN_BRACE", "CLOSE_BRACE", "OPEN_BRACKET", @@ -190,6 +192,7 @@ var exprToknames = [...]string{ "DOT", "PIPE_MATCH", "PIPE_EXACT", + "PIPE_PATTERN", "OPEN_PARENTHESIS", "CLOSE_PARENTHESIS", "BY", @@ -266,7 +269,6 @@ const exprEofCode = 1 const exprErrCode = 2 const exprInitialStackSize = 16 - var exprExca = [...]int{ -1, 1, 1, -1, @@ -275,121 +277,122 @@ var exprExca = [...]int{ const exprPrivate = 57344 -const exprLast = 592 +const exprLast = 608 var exprAct = [...]int{ - 287, 226, 82, 4, 212, 64, 180, 124, 202, 187, - 73, 198, 195, 63, 235, 5, 150, 185, 75, 2, - 56, 78, 48, 49, 50, 57, 58, 61, 62, 59, + 289, 228, 84, 4, 214, 64, 182, 126, 204, 189, + 75, 200, 197, 63, 237, 5, 152, 187, 77, 2, + 56, 80, 48, 49, 50, 57, 58, 61, 62, 59, 60, 51, 52, 53, 54, 55, 56, 49, 50, 57, 58, 61, 62, 59, 60, 51, 52, 53, 54, 55, 56, 57, 58, 61, 62, 59, 60, 51, 52, 53, - 54, 55, 56, 51, 52, 53, 54, 55, 56, 107, - 146, 148, 149, 113, 53, 54, 55, 56, 205, 148, - 149, 281, 215, 137, 290, 154, 164, 165, 213, 293, - 138, 159, 67, 293, 71, 214, 152, 295, 71, 162, - 163, 69, 70, 292, 345, 69, 70, 364, 161, 364, - 92, 384, 166, 167, 168, 169, 170, 171, 172, 173, - 174, 175, 176, 177, 178, 179, 71, 228, 379, 290, - 337, 228, 304, 69, 70, 192, 134, 354, 189, 147, - 200, 204, 83, 84, 372, 367, 211, 206, 209, 210, - 207, 208, 140, 217, 140, 128, 134, 291, 108, 228, - 233, 72, 371, 337, 139, 72, 227, 291, 292, 229, - 230, 182, 344, 238, 222, 128, 120, 121, 119, 225, - 129, 131, 295, 81, 71, 83, 84, 369, 246, 247, - 248, 69, 70, 72, 296, 292, 222, 329, 122, 225, - 123, 292, 250, 357, 71, 292, 130, 132, 133, 347, - 304, 69, 70, 304, 71, 353, 71, 228, 352, 299, - 283, 69, 70, 69, 70, 134, 285, 288, 328, 294, - 181, 297, 237, 107, 300, 113, 301, 228, 71, 289, - 152, 286, 237, 298, 128, 69, 70, 66, 304, 228, - 222, 72, 314, 351, 302, 134, 237, 308, 310, 313, - 315, 316, 312, 241, 200, 204, 323, 318, 322, 290, - 182, 72, 231, 223, 128, 253, 311, 134, 134, 304, - 338, 72, 237, 72, 306, 304, 326, 142, 141, 330, - 305, 332, 334, 182, 336, 107, 128, 128, 361, 335, - 346, 331, 309, 237, 107, 72, 264, 348, 219, 265, - 237, 263, 134, 325, 324, 282, 13, 120, 121, 119, - 245, 129, 131, 239, 153, 244, 382, 182, 183, 181, - 236, 128, 358, 359, 340, 341, 342, 107, 360, 122, - 243, 123, 242, 216, 362, 363, 158, 130, 132, 133, - 368, 183, 181, 151, 157, 260, 156, 218, 261, 16, - 259, 88, 13, 87, 374, 80, 375, 376, 13, 378, - 153, 255, 262, 350, 251, 303, 6, 257, 380, 256, - 21, 22, 23, 36, 45, 46, 37, 39, 40, 38, - 41, 42, 43, 44, 24, 25, 254, 240, 232, 224, - 252, 79, 234, 377, 26, 27, 28, 29, 30, 31, - 32, 13, 366, 77, 33, 34, 35, 47, 19, 6, - 365, 258, 343, 21, 22, 23, 36, 45, 46, 37, - 39, 40, 38, 41, 42, 43, 44, 24, 25, 17, - 18, 279, 333, 160, 280, 155, 278, 26, 27, 28, - 29, 30, 31, 32, 13, 89, 86, 33, 34, 35, - 47, 19, 6, 320, 321, 383, 21, 22, 23, 36, - 45, 46, 37, 39, 40, 38, 41, 42, 43, 44, - 24, 25, 17, 18, 276, 85, 373, 277, 381, 275, - 26, 27, 28, 29, 30, 31, 32, 370, 356, 355, - 33, 34, 35, 47, 19, 93, 94, 95, 96, 97, - 98, 99, 100, 101, 102, 103, 104, 105, 106, 144, - 188, 125, 327, 249, 273, 17, 18, 274, 270, 272, - 349, 271, 317, 269, 143, 267, 3, 145, 268, 188, - 266, 319, 186, 74, 196, 126, 307, 284, 221, 220, - 219, 218, 193, 191, 190, 203, 199, 188, 79, 196, - 111, 112, 194, 116, 201, 118, 197, 117, 115, 114, - 184, 65, 135, 127, 136, 109, 110, 91, 90, 11, - 10, 9, 20, 12, 15, 8, 339, 14, 7, 76, - 68, 1, + 54, 55, 56, 51, 52, 53, 54, 55, 56, 109, + 148, 150, 151, 115, 53, 54, 55, 56, 207, 150, + 151, 283, 217, 139, 166, 167, 292, 156, 215, 295, + 216, 72, 74, 161, 72, 74, 164, 165, 154, 69, + 70, 71, 69, 70, 71, 297, 347, 140, 67, 294, + 163, 366, 136, 366, 168, 169, 170, 171, 172, 173, + 174, 175, 176, 177, 178, 179, 180, 181, 184, 230, + 339, 292, 94, 130, 255, 136, 339, 194, 85, 86, + 191, 149, 202, 206, 293, 136, 386, 369, 213, 208, + 211, 212, 209, 210, 142, 219, 130, 363, 306, 141, + 73, 184, 235, 73, 356, 381, 130, 239, 229, 374, + 294, 231, 232, 142, 110, 240, 294, 122, 123, 121, + 227, 131, 133, 297, 294, 72, 74, 185, 183, 316, + 248, 249, 250, 69, 70, 71, 373, 298, 371, 124, + 295, 125, 136, 359, 252, 72, 74, 132, 134, 135, + 293, 349, 239, 69, 70, 71, 72, 74, 184, 224, + 230, 183, 285, 130, 69, 70, 71, 346, 287, 290, + 330, 296, 136, 299, 314, 109, 302, 115, 303, 304, + 230, 291, 154, 288, 331, 300, 306, 72, 74, 243, + 294, 230, 355, 130, 73, 69, 70, 71, 306, 310, + 312, 315, 317, 318, 354, 233, 202, 206, 325, 320, + 324, 292, 340, 83, 73, 85, 86, 185, 183, 136, + 144, 266, 230, 221, 267, 73, 265, 262, 328, 220, + 263, 332, 261, 334, 336, 184, 338, 109, 239, 306, + 130, 337, 348, 333, 239, 353, 109, 224, 227, 350, + 327, 306, 306, 72, 74, 239, 73, 308, 307, 239, + 313, 69, 70, 71, 13, 224, 311, 143, 342, 343, + 344, 379, 301, 155, 360, 361, 384, 241, 326, 109, + 362, 238, 284, 153, 247, 246, 364, 365, 230, 264, + 225, 245, 370, 13, 244, 260, 218, 160, 159, 158, + 16, 90, 155, 89, 72, 74, 376, 82, 377, 378, + 13, 380, 69, 70, 71, 352, 253, 305, 259, 6, + 382, 258, 73, 21, 22, 23, 36, 45, 46, 37, + 39, 40, 38, 41, 42, 43, 44, 24, 25, 66, + 256, 242, 234, 226, 257, 254, 368, 26, 27, 28, + 29, 30, 31, 32, 81, 146, 367, 33, 34, 35, + 47, 19, 236, 281, 345, 335, 282, 79, 280, 322, + 323, 145, 13, 73, 147, 278, 162, 88, 279, 87, + 277, 6, 17, 18, 385, 21, 22, 23, 36, 45, + 46, 37, 39, 40, 38, 41, 42, 43, 44, 24, + 25, 275, 272, 127, 276, 273, 274, 271, 383, 26, + 27, 28, 29, 30, 31, 32, 372, 358, 357, 33, + 34, 35, 47, 19, 157, 269, 329, 375, 270, 190, + 268, 3, 251, 190, 13, 319, 188, 321, 76, 309, + 198, 128, 286, 6, 17, 18, 136, 21, 22, 23, + 36, 45, 46, 37, 39, 40, 38, 41, 42, 43, + 44, 24, 25, 223, 222, 221, 220, 130, 195, 193, + 192, 26, 27, 28, 29, 30, 31, 32, 91, 351, + 205, 33, 34, 35, 47, 19, 201, 190, 122, 123, + 121, 81, 131, 133, 198, 113, 114, 196, 118, 203, + 120, 199, 119, 117, 116, 186, 17, 18, 65, 137, + 124, 129, 125, 138, 111, 112, 93, 92, 132, 134, + 135, 11, 10, 9, 20, 12, 15, 8, 95, 96, + 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, + 107, 108, 341, 14, 7, 78, 68, 1, } var exprPact = [...]int{ - 352, -1000, -56, -1000, -1000, 199, 352, -1000, -1000, -1000, - -1000, -1000, -1000, 396, 341, 159, -1000, 478, 449, 339, - 337, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + 353, -1000, -58, -1000, -1000, 349, 353, -1000, -1000, -1000, + -1000, -1000, -1000, 409, 341, 247, -1000, 432, 430, 337, + 335, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, 66, 66, - 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, - 66, 66, 66, 199, -1000, 223, 272, 5, 84, -1000, - -1000, -1000, -1000, 263, 262, -56, 517, -1000, -1000, 57, - 346, 438, 332, 330, 322, -1000, -1000, 352, 436, 352, - 28, 13, -1000, 352, 352, 352, 352, 352, 352, 352, - 352, 352, 352, 352, 352, 352, 352, -1000, -1000, -1000, - -1000, -1000, -1000, 273, -1000, -1000, -1000, -1000, -1000, 534, - 552, 548, -1000, 547, -1000, -1000, -1000, -1000, 220, 546, - -1000, 554, 551, 550, 65, -1000, -1000, 82, 4, 319, - -1000, -1000, -1000, -1000, -1000, 553, 545, 544, 543, 542, - 248, 379, 189, 300, 247, 378, 395, 305, 298, 377, - 238, -42, 318, 316, 301, 296, -30, -30, -15, -15, - -72, -72, -72, -72, -24, -24, -24, -24, -24, -24, - 273, 220, 220, 220, 515, 354, -1000, -1000, 387, 354, - -1000, -1000, 250, -1000, 376, -1000, 358, 359, -1000, 57, - -1000, 357, -1000, 57, -1000, 351, 302, 531, 524, 520, - 480, 437, -1000, 3, 291, 82, 541, -1000, -1000, -1000, - -1000, -1000, -1000, 116, 300, 201, 157, 83, 131, 169, - 194, 116, 352, 229, 355, 265, -1000, -1000, 259, -1000, - 540, -1000, 277, 251, 237, 227, 307, 273, 151, -1000, - 354, 552, 526, -1000, 539, 458, 551, 550, 290, -1000, - -1000, -1000, 289, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, 86, 86, + 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, + 86, 86, 86, 349, -1000, 76, 501, 3, 101, -1000, + -1000, -1000, -1000, -1000, -1000, 300, 253, -58, 413, -1000, + -1000, 57, 336, 477, 333, 332, 331, -1000, -1000, 353, + 429, 353, 23, 9, -1000, 353, 353, 353, 353, 353, + 353, 353, 353, 353, 353, 353, 353, 353, 353, -1000, + -1000, -1000, -1000, -1000, -1000, 197, -1000, -1000, -1000, -1000, + -1000, 488, 542, 524, -1000, 523, -1000, -1000, -1000, -1000, + 227, 522, -1000, 549, 541, 535, 65, -1000, -1000, 82, + 2, 330, -1000, -1000, -1000, -1000, -1000, 546, 520, 519, + 518, 517, 323, 382, 298, 307, 238, 381, 415, 314, + 310, 380, 222, -44, 328, 325, 319, 318, -32, -32, + -17, -17, -74, -74, -74, -74, -26, -26, -26, -26, + -26, -26, 197, 227, 227, 227, 484, 355, -1000, -1000, + 392, 355, -1000, -1000, 107, -1000, 379, -1000, 391, 360, + -1000, 57, -1000, 357, -1000, 57, -1000, 283, 277, 481, + 458, 457, 431, 419, -1000, 1, 316, 82, 496, -1000, + -1000, -1000, -1000, -1000, -1000, 110, 307, 201, 134, 190, + 130, 170, 305, 110, 353, 212, 356, 291, -1000, -1000, + 290, -1000, 493, -1000, 299, 293, 207, 162, 274, 197, + 140, -1000, 355, 542, 489, -1000, 495, 424, 541, 535, + 312, -1000, -1000, -1000, 284, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, 82, 516, -1000, 203, -1000, 172, 111, 55, 111, - 433, 16, 220, 16, 153, 275, 412, 147, 79, -1000, - -1000, 184, -1000, 352, 525, -1000, -1000, 353, 228, -1000, - 193, -1000, -1000, 190, -1000, 112, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, 493, 492, -1000, 178, -1000, 116, - 55, 111, 55, -1000, -1000, 273, -1000, 16, -1000, 274, - -1000, -1000, -1000, 61, 410, 402, 120, 116, 162, -1000, - 491, -1000, -1000, -1000, -1000, 137, 119, -1000, -1000, 55, - -1000, 481, 59, 55, 46, 16, 16, 393, -1000, -1000, - 349, -1000, -1000, 103, 55, -1000, -1000, 16, 482, -1000, - -1000, 306, 459, 86, -1000, + -1000, -1000, -1000, 82, 480, -1000, 203, -1000, 217, 232, + 59, 232, 416, 16, 227, 16, 126, 267, 414, 200, + 79, -1000, -1000, 184, -1000, 353, 534, -1000, -1000, 354, + 278, -1000, 237, -1000, -1000, 225, -1000, 137, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, 472, 471, -1000, 176, + -1000, 110, 59, 232, 59, -1000, -1000, 197, -1000, 16, + -1000, 131, -1000, -1000, -1000, 61, 406, 396, 120, 110, + 171, -1000, 470, -1000, -1000, -1000, -1000, 169, 142, -1000, + -1000, 59, -1000, 482, 63, 59, 52, 16, 16, 321, + -1000, -1000, 350, -1000, -1000, 138, 59, -1000, -1000, 16, + 462, -1000, -1000, 315, 438, 119, -1000, } var exprPgo = [...]int{ - 0, 591, 18, 590, 2, 14, 536, 3, 16, 7, - 589, 588, 587, 586, 15, 585, 584, 583, 582, 95, - 581, 580, 579, 455, 578, 577, 576, 575, 13, 5, - 574, 573, 572, 6, 571, 92, 4, 570, 569, 568, - 567, 566, 11, 565, 564, 8, 563, 12, 562, 9, - 17, 561, 560, 1, 545, 521, 0, + 0, 607, 18, 606, 2, 14, 491, 3, 16, 7, + 605, 604, 603, 602, 15, 587, 586, 585, 584, 90, + 583, 582, 581, 538, 577, 576, 575, 574, 13, 5, + 573, 571, 569, 6, 568, 108, 4, 565, 564, 563, + 562, 561, 11, 560, 559, 8, 558, 12, 557, 9, + 17, 556, 555, 1, 501, 463, 0, } var exprR1 = [...]int{ @@ -399,23 +402,23 @@ var exprR1 = [...]int{ 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 53, 53, 53, 13, 13, 13, 11, 11, 11, 11, 15, 15, 15, 15, 15, 15, 22, 3, 3, 3, - 3, 14, 14, 14, 10, 10, 9, 9, 9, 9, - 28, 28, 29, 29, 29, 29, 29, 29, 29, 29, - 29, 29, 29, 19, 36, 36, 36, 35, 35, 35, - 34, 34, 34, 37, 37, 27, 27, 26, 26, 26, - 26, 52, 51, 51, 38, 39, 47, 47, 48, 48, - 48, 46, 33, 33, 33, 33, 33, 33, 33, 33, - 33, 49, 49, 50, 50, 55, 55, 54, 54, 32, - 32, 32, 32, 32, 32, 32, 30, 30, 30, 30, - 30, 30, 30, 31, 31, 31, 31, 31, 31, 31, - 42, 42, 41, 41, 40, 45, 45, 44, 44, 43, - 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, - 20, 20, 20, 20, 20, 24, 24, 25, 25, 25, - 25, 23, 23, 23, 23, 23, 23, 23, 23, 21, - 21, 21, 17, 18, 16, 16, 16, 16, 16, 16, - 16, 16, 16, 16, 16, 12, 12, 12, 12, 12, + 3, 3, 3, 14, 14, 14, 10, 10, 9, 9, + 9, 9, 28, 28, 29, 29, 29, 29, 29, 29, + 29, 29, 29, 29, 29, 19, 36, 36, 36, 35, + 35, 35, 34, 34, 34, 37, 37, 27, 27, 26, + 26, 26, 26, 52, 51, 51, 38, 39, 47, 47, + 48, 48, 48, 46, 33, 33, 33, 33, 33, 33, + 33, 33, 33, 49, 49, 50, 50, 55, 55, 54, + 54, 32, 32, 32, 32, 32, 32, 32, 30, 30, + 30, 30, 30, 30, 30, 31, 31, 31, 31, 31, + 31, 31, 42, 42, 41, 41, 40, 45, 45, 44, + 44, 43, 20, 20, 20, 20, 20, 20, 20, 20, + 20, 20, 20, 20, 20, 20, 20, 24, 24, 25, + 25, 25, 25, 23, 23, 23, 23, 23, 23, 23, + 23, 21, 21, 21, 17, 18, 16, 16, 16, 16, + 16, 16, 16, 16, 16, 16, 16, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, - 56, 5, 5, 4, 4, 4, 4, + 12, 12, 56, 5, 5, 4, 4, 4, 4, } var exprR2 = [...]int{ @@ -425,107 +428,107 @@ var exprR2 = [...]int{ 4, 5, 6, 7, 3, 4, 4, 5, 3, 2, 3, 6, 3, 1, 1, 1, 4, 6, 5, 7, 4, 5, 5, 6, 7, 7, 12, 1, 1, 1, - 1, 3, 3, 2, 1, 3, 3, 3, 3, 3, - 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 1, 1, 4, 3, 2, 5, 4, - 1, 3, 2, 1, 2, 1, 2, 1, 2, 1, - 2, 2, 3, 2, 2, 1, 3, 3, 1, 3, - 3, 2, 1, 1, 1, 1, 3, 2, 3, 3, - 3, 3, 1, 1, 3, 6, 6, 1, 1, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 1, 1, 1, 3, 3, 2, 1, 3, 3, 3, + 3, 3, 1, 2, 1, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 1, 1, 4, 3, 2, + 5, 4, 1, 3, 2, 1, 2, 1, 2, 1, + 2, 1, 2, 2, 3, 2, 2, 1, 3, 3, + 1, 3, 3, 2, 1, 1, 1, 1, 3, 2, + 3, 3, 3, 3, 1, 1, 3, 6, 6, 1, + 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 1, 1, 1, 3, 2, 1, 1, 1, 3, 2, - 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 0, 1, 5, 4, 5, - 4, 1, 1, 2, 4, 5, 2, 4, 5, 1, - 2, 2, 4, 1, 1, 1, 1, 1, 1, 1, + 3, 3, 1, 1, 1, 3, 2, 1, 1, 1, + 3, 2, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 0, 1, 5, + 4, 5, 4, 1, 1, 2, 4, 5, 2, 4, + 5, 1, 2, 2, 4, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 2, 1, 3, 4, 4, 3, 3, + 1, 1, 2, 1, 3, 4, 4, 3, 3, } var exprChk = [...]int{ - -1000, -1, -2, -6, -7, -14, 24, -11, -15, -20, - -21, -22, -17, 16, -12, -16, 7, 87, 88, 66, - -18, 28, 29, 30, 42, 43, 52, 53, 54, 55, - 56, 57, 58, 62, 63, 64, 31, 34, 37, 35, - 36, 38, 39, 40, 41, 32, 33, 65, 78, 79, - 80, 87, 88, 89, 90, 91, 92, 81, 82, 85, - 86, 83, 84, -28, -29, -34, 48, -35, -3, 22, - 23, 15, 82, -7, -6, -2, -10, 17, -9, 5, - 24, 24, -4, 26, 27, 7, 7, 24, 24, -23, - -24, -25, 44, -23, -23, -23, -23, -23, -23, -23, - -23, -23, -23, -23, -23, -23, -23, -29, -35, -27, - -26, -52, -51, -33, -38, -39, -46, -40, -43, 47, - 45, 46, 67, 69, -9, -55, -54, -31, 24, 49, - 75, 50, 76, 77, 5, -32, -30, 78, 6, -19, - 70, 25, 25, 17, 2, 20, 13, 82, 14, 15, - -8, 7, -14, 24, -7, 7, 24, 24, 24, -7, - 7, -2, 71, 72, 73, 74, -2, -2, -2, -2, + -1000, -1, -2, -6, -7, -14, 26, -11, -15, -20, + -21, -22, -17, 17, -12, -16, 7, 89, 90, 68, + -18, 30, 31, 32, 44, 45, 54, 55, 56, 57, + 58, 59, 60, 64, 65, 66, 33, 36, 39, 37, + 38, 40, 41, 42, 43, 34, 35, 67, 80, 81, + 82, 89, 90, 91, 92, 93, 94, 83, 84, 87, + 88, 85, 86, -28, -29, -34, 50, -35, -3, 23, + 24, 25, 15, 84, 16, -7, -6, -2, -10, 18, + -9, 5, 26, 26, -4, 28, 29, 7, 7, 26, + 26, -23, -24, -25, 46, -23, -23, -23, -23, -23, + -23, -23, -23, -23, -23, -23, -23, -23, -23, -29, + -35, -27, -26, -52, -51, -33, -38, -39, -46, -40, + -43, 49, 47, 48, 69, 71, -9, -55, -54, -31, + 26, 51, 77, 52, 78, 79, 5, -32, -30, 80, + 6, -19, 72, 27, 27, 18, 2, 21, 13, 84, + 14, 15, -8, 7, -14, 26, -7, 7, 26, 26, + 26, -7, 7, -2, 73, 74, 75, 76, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, - -33, 79, 20, 78, -37, -50, 8, -49, 5, -50, - 6, 6, -33, 6, -48, -47, 5, -41, -42, 5, - -9, -44, -45, 5, -9, 13, 82, 85, 86, 83, - 84, 81, -36, 6, -19, 78, 24, -9, 6, 6, - 6, 6, 2, 25, 20, 10, -53, -28, 48, -14, - -8, 25, 20, -7, 7, -5, 25, 5, -5, 25, - 20, 25, 24, 24, 24, 24, -33, -33, -33, 8, - -50, 20, 13, 25, 20, 13, 20, 20, 70, 9, - 4, 7, 70, 9, 4, 7, 9, 4, 7, 9, - 4, 7, 9, 4, 7, 9, 4, 7, 9, 4, - 7, 78, 24, -36, 6, -4, -8, -56, -53, -28, - 68, 10, 48, 10, -53, 51, 25, -53, -28, 25, - -4, -7, 25, 20, 20, 25, 25, 6, -5, 25, - -5, 25, 25, -5, 25, -5, -49, 6, -47, 2, - 5, 6, -42, -45, 24, 24, -36, 6, 25, 25, - -53, -28, -53, 9, -56, -33, -56, 10, 5, -13, - 59, 60, 61, 10, 25, 25, -53, 25, -7, 5, - 20, 25, 25, 25, 25, 6, 6, 25, -4, -53, - -56, 24, -56, -53, 48, 10, 10, 25, -4, 25, - 6, 25, 25, 5, -53, -56, -56, 10, 20, 25, - -56, 6, 20, 6, 25, + -2, -2, -33, 81, 21, 80, -37, -50, 8, -49, + 5, -50, 6, 6, -33, 6, -48, -47, 5, -41, + -42, 5, -9, -44, -45, 5, -9, 13, 84, 87, + 88, 85, 86, 83, -36, 6, -19, 80, 26, -9, + 6, 6, 6, 6, 2, 27, 21, 10, -53, -28, + 50, -14, -8, 27, 21, -7, 7, -5, 27, 5, + -5, 27, 21, 27, 26, 26, 26, 26, -33, -33, + -33, 8, -50, 21, 13, 27, 21, 13, 21, 21, + 72, 9, 4, 7, 72, 9, 4, 7, 9, 4, + 7, 9, 4, 7, 9, 4, 7, 9, 4, 7, + 9, 4, 7, 80, 26, -36, 6, -4, -8, -56, + -53, -28, 70, 10, 50, 10, -53, 53, 27, -53, + -28, 27, -4, -7, 27, 21, 21, 27, 27, 6, + -5, 27, -5, 27, 27, -5, 27, -5, -49, 6, + -47, 2, 5, 6, -42, -45, 26, 26, -36, 6, + 27, 27, -53, -28, -53, 9, -56, -33, -56, 10, + 5, -13, 61, 62, 63, 10, 27, 27, -53, 27, + -7, 5, 21, 27, 27, 27, 27, 6, 6, 27, + -4, -53, -56, 26, -56, -53, 50, 10, 10, 27, + -4, 27, 6, 27, 27, 5, -53, -56, -56, 10, + 21, 27, -56, 6, 21, 6, 27, } var exprDef = [...]int{ 0, -2, 1, 2, 3, 11, 0, 4, 5, 6, - 7, 8, 9, 0, 0, 0, 189, 0, 0, 0, - 0, 205, 206, 207, 208, 209, 210, 211, 212, 213, - 214, 215, 216, 217, 218, 219, 194, 195, 196, 197, - 198, 199, 200, 201, 202, 203, 204, 193, 175, 175, - 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, - 175, 175, 175, 12, 70, 72, 0, 90, 0, 57, - 58, 59, 60, 3, 2, 0, 0, 63, 64, 0, - 0, 0, 0, 0, 0, 190, 191, 0, 0, 0, - 181, 182, 176, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 71, 92, 73, - 74, 75, 76, 77, 78, 79, 80, 81, 82, 95, - 97, 0, 99, 0, 112, 113, 114, 115, 0, 0, - 105, 0, 0, 0, 0, 127, 128, 0, 87, 0, - 83, 10, 13, 61, 62, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 3, 189, 0, 0, 0, 3, - 0, 160, 0, 0, 183, 186, 161, 162, 163, 164, + 7, 8, 9, 0, 0, 0, 191, 0, 0, 0, + 0, 207, 208, 209, 210, 211, 212, 213, 214, 215, + 216, 217, 218, 219, 220, 221, 196, 197, 198, 199, + 200, 201, 202, 203, 204, 205, 206, 195, 177, 177, + 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, + 177, 177, 177, 12, 72, 74, 0, 92, 0, 57, + 58, 59, 60, 61, 62, 3, 2, 0, 0, 65, + 66, 0, 0, 0, 0, 0, 0, 192, 193, 0, + 0, 0, 183, 184, 178, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 73, + 94, 75, 76, 77, 78, 79, 80, 81, 82, 83, + 84, 97, 99, 0, 101, 0, 114, 115, 116, 117, + 0, 0, 107, 0, 0, 0, 0, 129, 130, 0, + 89, 0, 85, 10, 13, 63, 64, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 3, 191, 0, 0, + 0, 3, 0, 162, 0, 0, 185, 188, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, - 117, 0, 0, 0, 96, 103, 93, 123, 122, 101, - 98, 100, 0, 104, 111, 108, 0, 154, 152, 150, - 151, 159, 157, 155, 156, 0, 0, 0, 0, 0, - 0, 0, 91, 84, 0, 0, 0, 65, 66, 67, - 68, 69, 39, 46, 0, 14, 0, 0, 0, 0, - 0, 50, 0, 3, 189, 0, 225, 221, 0, 226, - 0, 192, 0, 0, 0, 0, 118, 119, 120, 94, - 102, 0, 0, 116, 0, 0, 0, 0, 0, 134, - 141, 148, 0, 133, 140, 147, 129, 136, 143, 130, - 137, 144, 131, 138, 145, 132, 139, 146, 135, 142, - 149, 0, 0, 89, 0, 48, 0, 15, 18, 34, - 0, 22, 0, 26, 0, 0, 0, 0, 0, 38, - 52, 3, 51, 0, 0, 223, 224, 0, 0, 178, - 0, 180, 184, 0, 187, 0, 124, 121, 109, 110, - 106, 107, 153, 158, 0, 0, 86, 0, 88, 47, - 19, 35, 36, 220, 23, 42, 27, 30, 40, 0, - 43, 44, 45, 16, 0, 0, 0, 53, 3, 222, - 0, 177, 179, 185, 188, 0, 0, 85, 49, 37, - 31, 0, 17, 20, 0, 24, 28, 0, 54, 55, - 0, 125, 126, 0, 21, 25, 29, 32, 0, 41, - 33, 0, 0, 0, 56, + 175, 176, 119, 0, 0, 0, 98, 105, 95, 125, + 124, 103, 100, 102, 0, 106, 113, 110, 0, 156, + 154, 152, 153, 161, 159, 157, 158, 0, 0, 0, + 0, 0, 0, 0, 93, 86, 0, 0, 0, 67, + 68, 69, 70, 71, 39, 46, 0, 14, 0, 0, + 0, 0, 0, 50, 0, 3, 191, 0, 227, 223, + 0, 228, 0, 194, 0, 0, 0, 0, 120, 121, + 122, 96, 104, 0, 0, 118, 0, 0, 0, 0, + 0, 136, 143, 150, 0, 135, 142, 149, 131, 138, + 145, 132, 139, 146, 133, 140, 147, 134, 141, 148, + 137, 144, 151, 0, 0, 91, 0, 48, 0, 15, + 18, 34, 0, 22, 0, 26, 0, 0, 0, 0, + 0, 38, 52, 3, 51, 0, 0, 225, 226, 0, + 0, 180, 0, 182, 186, 0, 189, 0, 126, 123, + 111, 112, 108, 109, 155, 160, 0, 0, 88, 0, + 90, 47, 19, 35, 36, 222, 23, 42, 27, 30, + 40, 0, 43, 44, 45, 16, 0, 0, 0, 53, + 3, 224, 0, 179, 181, 187, 190, 0, 0, 87, + 49, 37, 31, 0, 17, 20, 0, 24, 28, 0, + 54, 55, 0, 127, 128, 0, 21, 25, 29, 32, + 0, 41, 33, 0, 0, 0, 56, } var exprTok1 = [...]int{ @@ -542,7 +545,7 @@ var exprTok2 = [...]int{ 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, - 92, + 92, 93, 94, } var exprTok3 = [...]int{ 0, @@ -554,7 +557,6 @@ var exprErrorMessages = [...]struct { msg string }{} - /* parser for yacc output */ var ( @@ -1162,855 +1164,865 @@ exprdefault: case 57: exprDollar = exprS[exprpt-1 : exprpt+1] { - exprVAL.Filter = labels.MatchRegexp + exprVAL.Filter = log.LineMatchRegexp } case 58: exprDollar = exprS[exprpt-1 : exprpt+1] { - exprVAL.Filter = labels.MatchEqual + exprVAL.Filter = log.LineMatchEqual } case 59: exprDollar = exprS[exprpt-1 : exprpt+1] { - exprVAL.Filter = labels.MatchNotRegexp + exprVAL.Filter = log.LineMatchPattern } case 60: exprDollar = exprS[exprpt-1 : exprpt+1] { - exprVAL.Filter = labels.MatchNotEqual + exprVAL.Filter = log.LineMatchNotRegexp } case 61: + exprDollar = exprS[exprpt-1 : exprpt+1] + { + exprVAL.Filter = log.LineMatchNotEqual + } + case 62: + exprDollar = exprS[exprpt-1 : exprpt+1] + { + exprVAL.Filter = log.LineMatchNotPattern + } + case 63: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.Selector = exprDollar[2].Matchers } - case 62: + case 64: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.Selector = exprDollar[2].Matchers } - case 63: + case 65: exprDollar = exprS[exprpt-2 : exprpt+1] { } - case 64: + case 66: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.Matchers = []*labels.Matcher{exprDollar[1].Matcher} } - case 65: + case 67: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.Matchers = append(exprDollar[1].Matchers, exprDollar[3].Matcher) } - case 66: + case 68: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.Matcher = mustNewMatcher(labels.MatchEqual, exprDollar[1].str, exprDollar[3].str) } - case 67: + case 69: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.Matcher = mustNewMatcher(labels.MatchNotEqual, exprDollar[1].str, exprDollar[3].str) } - case 68: + case 70: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.Matcher = mustNewMatcher(labels.MatchRegexp, exprDollar[1].str, exprDollar[3].str) } - case 69: + case 71: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.Matcher = mustNewMatcher(labels.MatchNotRegexp, exprDollar[1].str, exprDollar[3].str) } - case 70: + case 72: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.PipelineExpr = MultiStageExpr{exprDollar[1].PipelineStage} } - case 71: + case 73: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.PipelineExpr = append(exprDollar[1].PipelineExpr, exprDollar[2].PipelineStage) } - case 72: + case 74: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.PipelineStage = exprDollar[1].LineFilters } - case 73: + case 75: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.PipelineStage = exprDollar[2].LogfmtParser } - case 74: + case 76: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.PipelineStage = exprDollar[2].LabelParser } - case 75: + case 77: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.PipelineStage = exprDollar[2].JSONExpressionParser } - case 76: + case 78: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.PipelineStage = exprDollar[2].LogfmtExpressionParser } - case 77: + case 79: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.PipelineStage = &LabelFilterExpr{LabelFilterer: exprDollar[2].LabelFilter} } - case 78: + case 80: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.PipelineStage = exprDollar[2].LineFormatExpr } - case 79: + case 81: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.PipelineStage = exprDollar[2].DecolorizeExpr } - case 80: + case 82: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.PipelineStage = exprDollar[2].LabelFormatExpr } - case 81: + case 83: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.PipelineStage = exprDollar[2].DropLabelsExpr } - case 82: + case 84: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.PipelineStage = exprDollar[2].KeepLabelsExpr } - case 83: + case 85: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.FilterOp = OpFilterIP } - case 84: + case 86: exprDollar = exprS[exprpt-1 : exprpt+1] { - exprVAL.OrFilter = newLineFilterExpr(labels.MatchEqual, "", exprDollar[1].str) + exprVAL.OrFilter = newLineFilterExpr(log.LineMatchEqual, "", exprDollar[1].str) } - case 85: + case 87: exprDollar = exprS[exprpt-4 : exprpt+1] { - exprVAL.OrFilter = newLineFilterExpr(labels.MatchEqual, exprDollar[1].FilterOp, exprDollar[3].str) + exprVAL.OrFilter = newLineFilterExpr(log.LineMatchEqual, exprDollar[1].FilterOp, exprDollar[3].str) } - case 86: + case 88: exprDollar = exprS[exprpt-3 : exprpt+1] { - exprVAL.OrFilter = newOrLineFilter(newLineFilterExpr(labels.MatchEqual, "", exprDollar[1].str), exprDollar[3].OrFilter) + exprVAL.OrFilter = newOrLineFilter(newLineFilterExpr(log.LineMatchEqual, "", exprDollar[1].str), exprDollar[3].OrFilter) } - case 87: + case 89: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.LineFilter = newLineFilterExpr(exprDollar[1].Filter, "", exprDollar[2].str) } - case 88: + case 90: exprDollar = exprS[exprpt-5 : exprpt+1] { exprVAL.LineFilter = newLineFilterExpr(exprDollar[1].Filter, exprDollar[2].FilterOp, exprDollar[4].str) } - case 89: + case 91: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.LineFilter = newOrLineFilter(newLineFilterExpr(exprDollar[1].Filter, "", exprDollar[2].str), exprDollar[4].OrFilter) } - case 90: + case 92: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.LineFilters = exprDollar[1].LineFilter } - case 91: + case 93: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.LineFilters = newOrLineFilter(exprDollar[1].LineFilter, exprDollar[3].OrFilter) } - case 92: + case 94: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.LineFilters = newNestedLineFilterExpr(exprDollar[1].LineFilters, exprDollar[2].LineFilter) } - case 93: + case 95: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.ParserFlags = []string{exprDollar[1].str} } - case 94: + case 96: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.ParserFlags = append(exprDollar[1].ParserFlags, exprDollar[2].str) } - case 95: + case 97: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.LogfmtParser = newLogfmtParserExpr(nil) } - case 96: + case 98: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.LogfmtParser = newLogfmtParserExpr(exprDollar[2].ParserFlags) } - case 97: + case 99: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeJSON, "") } - case 98: + case 100: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeRegexp, exprDollar[2].str) } - case 99: + case 101: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeUnpack, "") } - case 100: + case 102: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.LabelParser = newLabelParserExpr(OpParserTypePattern, exprDollar[2].str) } - case 101: + case 103: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.JSONExpressionParser = newJSONExpressionParser(exprDollar[2].LabelExtractionExpressionList) } - case 102: + case 104: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.LogfmtExpressionParser = newLogfmtExpressionParser(exprDollar[3].LabelExtractionExpressionList, exprDollar[2].ParserFlags) } - case 103: + case 105: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.LogfmtExpressionParser = newLogfmtExpressionParser(exprDollar[2].LabelExtractionExpressionList, nil) } - case 104: + case 106: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.LineFormatExpr = newLineFmtExpr(exprDollar[2].str) } - case 105: + case 107: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.DecolorizeExpr = newDecolorizeExpr() } - case 106: + case 108: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.LabelFormat = log.NewRenameLabelFmt(exprDollar[1].str, exprDollar[3].str) } - case 107: + case 109: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.LabelFormat = log.NewTemplateLabelFmt(exprDollar[1].str, exprDollar[3].str) } - case 108: + case 110: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.LabelsFormat = []log.LabelFmt{exprDollar[1].LabelFormat} } - case 109: + case 111: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.LabelsFormat = append(exprDollar[1].LabelsFormat, exprDollar[3].LabelFormat) } - case 111: + case 113: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.LabelFormatExpr = newLabelFmtExpr(exprDollar[2].LabelsFormat) } - case 112: + case 114: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.LabelFilter = log.NewStringLabelFilter(exprDollar[1].Matcher) } - case 113: + case 115: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.LabelFilter = exprDollar[1].IPLabelFilter } - case 114: + case 116: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.LabelFilter = exprDollar[1].UnitFilter } - case 115: + case 117: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.LabelFilter = exprDollar[1].NumberFilter } - case 116: + case 118: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.LabelFilter = exprDollar[2].LabelFilter } - case 117: + case 119: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.LabelFilter = log.NewAndLabelFilter(exprDollar[1].LabelFilter, exprDollar[2].LabelFilter) } - case 118: + case 120: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.LabelFilter = log.NewAndLabelFilter(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } - case 119: + case 121: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.LabelFilter = log.NewAndLabelFilter(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } - case 120: + case 122: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.LabelFilter = log.NewOrLabelFilter(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } - case 121: + case 123: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.LabelExtractionExpression = log.NewLabelExtractionExpr(exprDollar[1].str, exprDollar[3].str) } - case 122: + case 124: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.LabelExtractionExpression = log.NewLabelExtractionExpr(exprDollar[1].str, exprDollar[1].str) } - case 123: + case 125: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.LabelExtractionExpressionList = []log.LabelExtractionExpr{exprDollar[1].LabelExtractionExpression} } - case 124: + case 126: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.LabelExtractionExpressionList = append(exprDollar[1].LabelExtractionExpressionList, exprDollar[3].LabelExtractionExpression) } - case 125: + case 127: exprDollar = exprS[exprpt-6 : exprpt+1] { exprVAL.IPLabelFilter = log.NewIPLabelFilter(exprDollar[5].str, exprDollar[1].str, log.LabelFilterEqual) } - case 126: + case 128: exprDollar = exprS[exprpt-6 : exprpt+1] { exprVAL.IPLabelFilter = log.NewIPLabelFilter(exprDollar[5].str, exprDollar[1].str, log.LabelFilterNotEqual) } - case 127: + case 129: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.UnitFilter = exprDollar[1].DurationFilter } - case 128: + case 130: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.UnitFilter = exprDollar[1].BytesFilter } - case 129: + case 131: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.DurationFilter = log.NewDurationLabelFilter(log.LabelFilterGreaterThan, exprDollar[1].str, exprDollar[3].duration) } - case 130: + case 132: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.DurationFilter = log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, exprDollar[1].str, exprDollar[3].duration) } - case 131: + case 133: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.DurationFilter = log.NewDurationLabelFilter(log.LabelFilterLesserThan, exprDollar[1].str, exprDollar[3].duration) } - case 132: + case 134: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.DurationFilter = log.NewDurationLabelFilter(log.LabelFilterLesserThanOrEqual, exprDollar[1].str, exprDollar[3].duration) } - case 133: + case 135: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.DurationFilter = log.NewDurationLabelFilter(log.LabelFilterNotEqual, exprDollar[1].str, exprDollar[3].duration) } - case 134: + case 136: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.DurationFilter = log.NewDurationLabelFilter(log.LabelFilterEqual, exprDollar[1].str, exprDollar[3].duration) } - case 135: + case 137: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.DurationFilter = log.NewDurationLabelFilter(log.LabelFilterEqual, exprDollar[1].str, exprDollar[3].duration) } - case 136: + case 138: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.BytesFilter = log.NewBytesLabelFilter(log.LabelFilterGreaterThan, exprDollar[1].str, exprDollar[3].bytes) } - case 137: + case 139: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.BytesFilter = log.NewBytesLabelFilter(log.LabelFilterGreaterThanOrEqual, exprDollar[1].str, exprDollar[3].bytes) } - case 138: + case 140: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.BytesFilter = log.NewBytesLabelFilter(log.LabelFilterLesserThan, exprDollar[1].str, exprDollar[3].bytes) } - case 139: + case 141: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.BytesFilter = log.NewBytesLabelFilter(log.LabelFilterLesserThanOrEqual, exprDollar[1].str, exprDollar[3].bytes) } - case 140: + case 142: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.BytesFilter = log.NewBytesLabelFilter(log.LabelFilterNotEqual, exprDollar[1].str, exprDollar[3].bytes) } - case 141: + case 143: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.BytesFilter = log.NewBytesLabelFilter(log.LabelFilterEqual, exprDollar[1].str, exprDollar[3].bytes) } - case 142: + case 144: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.BytesFilter = log.NewBytesLabelFilter(log.LabelFilterEqual, exprDollar[1].str, exprDollar[3].bytes) } - case 143: + case 145: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.NumberFilter = log.NewNumericLabelFilter(log.LabelFilterGreaterThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 144: + case 146: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.NumberFilter = log.NewNumericLabelFilter(log.LabelFilterGreaterThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 145: + case 147: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.NumberFilter = log.NewNumericLabelFilter(log.LabelFilterLesserThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 146: + case 148: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.NumberFilter = log.NewNumericLabelFilter(log.LabelFilterLesserThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 147: + case 149: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.NumberFilter = log.NewNumericLabelFilter(log.LabelFilterNotEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 148: + case 150: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.NumberFilter = log.NewNumericLabelFilter(log.LabelFilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 149: + case 151: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.NumberFilter = log.NewNumericLabelFilter(log.LabelFilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 150: + case 152: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.DropLabel = log.NewDropLabel(nil, exprDollar[1].str) } - case 151: + case 153: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.DropLabel = log.NewDropLabel(exprDollar[1].Matcher, "") } - case 152: + case 154: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.DropLabels = []log.DropLabel{exprDollar[1].DropLabel} } - case 153: + case 155: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.DropLabels = append(exprDollar[1].DropLabels, exprDollar[3].DropLabel) } - case 154: + case 156: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.DropLabelsExpr = newDropLabelsExpr(exprDollar[2].DropLabels) } - case 155: + case 157: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.KeepLabel = log.NewKeepLabel(nil, exprDollar[1].str) } - case 156: + case 158: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.KeepLabel = log.NewKeepLabel(exprDollar[1].Matcher, "") } - case 157: + case 159: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.KeepLabels = []log.KeepLabel{exprDollar[1].KeepLabel} } - case 158: + case 160: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.KeepLabels = append(exprDollar[1].KeepLabels, exprDollar[3].KeepLabel) } - case 159: + case 161: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.KeepLabelsExpr = newKeepLabelsExpr(exprDollar[2].KeepLabels) } - case 160: + case 162: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr("or", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 161: + case 163: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr("and", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 162: + case 164: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr("unless", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 163: + case 165: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr("+", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 164: + case 166: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr("-", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 165: + case 167: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr("*", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 166: + case 168: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr("/", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 167: + case 169: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr("%", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 168: + case 170: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr("^", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 169: + case 171: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr("==", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 170: + case 172: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr("!=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 171: + case 173: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr(">", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 172: + case 174: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr(">=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 173: + case 175: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr("<", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 174: + case 176: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr("<=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 175: + case 177: exprDollar = exprS[exprpt-0 : exprpt+1] { exprVAL.BoolModifier = &BinOpOptions{VectorMatching: &VectorMatching{Card: CardOneToOne}} } - case 176: + case 178: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.BoolModifier = &BinOpOptions{VectorMatching: &VectorMatching{Card: CardOneToOne}, ReturnBool: true} } - case 177: + case 179: exprDollar = exprS[exprpt-5 : exprpt+1] { exprVAL.OnOrIgnoringModifier = exprDollar[1].BoolModifier exprVAL.OnOrIgnoringModifier.VectorMatching.On = true exprVAL.OnOrIgnoringModifier.VectorMatching.MatchingLabels = exprDollar[4].Labels } - case 178: + case 180: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.OnOrIgnoringModifier = exprDollar[1].BoolModifier exprVAL.OnOrIgnoringModifier.VectorMatching.On = true } - case 179: + case 181: exprDollar = exprS[exprpt-5 : exprpt+1] { exprVAL.OnOrIgnoringModifier = exprDollar[1].BoolModifier exprVAL.OnOrIgnoringModifier.VectorMatching.MatchingLabels = exprDollar[4].Labels } - case 180: + case 182: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.OnOrIgnoringModifier = exprDollar[1].BoolModifier } - case 181: + case 183: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.BinOpModifier = exprDollar[1].BoolModifier } - case 182: + case 184: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.BinOpModifier = exprDollar[1].OnOrIgnoringModifier } - case 183: + case 185: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.BinOpModifier = exprDollar[1].OnOrIgnoringModifier exprVAL.BinOpModifier.VectorMatching.Card = CardManyToOne } - case 184: + case 186: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpModifier = exprDollar[1].OnOrIgnoringModifier exprVAL.BinOpModifier.VectorMatching.Card = CardManyToOne } - case 185: + case 187: exprDollar = exprS[exprpt-5 : exprpt+1] { exprVAL.BinOpModifier = exprDollar[1].OnOrIgnoringModifier exprVAL.BinOpModifier.VectorMatching.Card = CardManyToOne exprVAL.BinOpModifier.VectorMatching.Include = exprDollar[4].Labels } - case 186: + case 188: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.BinOpModifier = exprDollar[1].OnOrIgnoringModifier exprVAL.BinOpModifier.VectorMatching.Card = CardOneToMany } - case 187: + case 189: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpModifier = exprDollar[1].OnOrIgnoringModifier exprVAL.BinOpModifier.VectorMatching.Card = CardOneToMany } - case 188: + case 190: exprDollar = exprS[exprpt-5 : exprpt+1] { exprVAL.BinOpModifier = exprDollar[1].OnOrIgnoringModifier exprVAL.BinOpModifier.VectorMatching.Card = CardOneToMany exprVAL.BinOpModifier.VectorMatching.Include = exprDollar[4].Labels } - case 189: + case 191: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[1].str, false) } - case 190: + case 192: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, false) } - case 191: + case 193: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, true) } - case 192: + case 194: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.VectorExpr = NewVectorExpr(exprDollar[3].str) } - case 193: + case 195: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.Vector = OpTypeVector } - case 194: + case 196: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.VectorOp = OpTypeSum } - case 195: + case 197: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.VectorOp = OpTypeAvg } - case 196: + case 198: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.VectorOp = OpTypeCount } - case 197: + case 199: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.VectorOp = OpTypeMax } - case 198: + case 200: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.VectorOp = OpTypeMin } - case 199: + case 201: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.VectorOp = OpTypeStddev } - case 200: + case 202: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.VectorOp = OpTypeStdvar } - case 201: + case 203: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.VectorOp = OpTypeBottomK } - case 202: + case 204: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.VectorOp = OpTypeTopK } - case 203: + case 205: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.VectorOp = OpTypeSort } - case 204: + case 206: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.VectorOp = OpTypeSortDesc } - case 205: + case 207: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeCount } - case 206: + case 208: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeRate } - case 207: + case 209: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeRateCounter } - case 208: + case 210: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeBytes } - case 209: + case 211: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeBytesRate } - case 210: + case 212: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeAvg } - case 211: + case 213: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeSum } - case 212: + case 214: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeMin } - case 213: + case 215: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeMax } - case 214: + case 216: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeStdvar } - case 215: + case 217: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeStddev } - case 216: + case 218: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeQuantile } - case 217: + case 219: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeFirst } - case 218: + case 220: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeLast } - case 219: + case 221: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeAbsent } - case 220: + case 222: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.OffsetExpr = newOffsetExpr(exprDollar[2].duration) } - case 221: + case 223: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.Labels = []string{exprDollar[1].str} } - case 222: + case 224: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.Labels = append(exprDollar[1].Labels, exprDollar[3].str) } - case 223: + case 225: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.Grouping = &Grouping{Without: false, Groups: exprDollar[3].Labels} } - case 224: + case 226: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.Grouping = &Grouping{Without: true, Groups: exprDollar[3].Labels} } - case 225: + case 227: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.Grouping = &Grouping{Without: false, Groups: nil} } - case 226: + case 228: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.Grouping = &Grouping{Without: true, Groups: nil} diff --git a/pkg/logql/syntax/extractor.go b/pkg/logql/syntax/extractor.go index 922cd25ce9a70..8d79202968da0 100644 --- a/pkg/logql/syntax/extractor.go +++ b/pkg/logql/syntax/extractor.go @@ -4,7 +4,7 @@ import ( "fmt" "sort" - "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/log" ) const UnsupportedErr = "unsupported range vector aggregation operation: %s" diff --git a/pkg/logql/syntax/lex.go b/pkg/logql/syntax/lex.go index 14db797e248f1..dffb05ab0189b 100644 --- a/pkg/logql/syntax/lex.go +++ b/pkg/logql/syntax/lex.go @@ -11,7 +11,7 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/util/strutil" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) var tokens = map[string]int{ @@ -23,8 +23,10 @@ var tokens = map[string]int{ OpTypeNEQ: NEQ, "=~": RE, "!~": NRE, + "!>": NPA, "|=": PIPE_EXACT, "|~": PIPE_MATCH, + "|>": PIPE_PATTERN, OpPipe: PIPE, OpUnwrap: UNWRAP, "(": OPEN_PARENTHESIS, diff --git a/pkg/logql/syntax/linefilter.go b/pkg/logql/syntax/linefilter.go index f85b210234139..e48c847d79a6f 100644 --- a/pkg/logql/syntax/linefilter.go +++ b/pkg/logql/syntax/linefilter.go @@ -1,9 +1,8 @@ package syntax import ( - "github.com/prometheus/prometheus/model/labels" - - "github.com/grafana/loki/pkg/util/encoding" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/util/encoding" ) // Binary encoding of the LineFilter @@ -40,7 +39,7 @@ func (lf LineFilter) MarshalTo(b []byte) (int, error) { func (lf *LineFilter) Unmarshal(b []byte) error { buf := encoding.DecWith(b) - lf.Ty = labels.MatchType(buf.Uvarint()) + lf.Ty = log.LineMatchType(buf.Uvarint()) lf.Match = buf.UvarintStr() lf.Op = buf.UvarintStr() return nil diff --git a/pkg/logql/syntax/linefilter_test.go b/pkg/logql/syntax/linefilter_test.go index 6ce5a601c2815..129aaac9d8a8f 100644 --- a/pkg/logql/syntax/linefilter_test.go +++ b/pkg/logql/syntax/linefilter_test.go @@ -4,18 +4,22 @@ import ( "fmt" "testing" - "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" + + "github.com/grafana/loki/v3/pkg/logql/log" ) func TestLineFilterSerialization(t *testing.T) { for i, orig := range []LineFilter{ {}, - {Ty: labels.MatchEqual, Match: "match"}, - {Ty: labels.MatchEqual, Match: "match", Op: "OR"}, - {Ty: labels.MatchNotEqual, Match: "not match"}, - {Ty: labels.MatchNotEqual, Match: "not match", Op: "OR"}, - {Ty: labels.MatchRegexp, Op: "OR"}, + {Ty: log.LineMatchEqual, Match: "match"}, + {Ty: log.LineMatchEqual, Match: "match", Op: "OR"}, + {Ty: log.LineMatchNotEqual, Match: "not match"}, + {Ty: log.LineMatchNotEqual, Match: "not match", Op: "OR"}, + {Ty: log.LineMatchPattern, Match: "match"}, + {Ty: log.LineMatchPattern, Match: "match", Op: "OR"}, + {Ty: log.LineMatchNotPattern, Match: "not match"}, + {Ty: log.LineMatchNotPattern, Match: "not match", Op: "OR"}, } { t.Run(fmt.Sprintf("%d", i), func(t *testing.T) { b := make([]byte, orig.Size()) diff --git a/pkg/logql/syntax/parser.go b/pkg/logql/syntax/parser.go index 79213049f376c..524c86109afb4 100644 --- a/pkg/logql/syntax/parser.go +++ b/pkg/logql/syntax/parser.go @@ -10,8 +10,8 @@ import ( "github.com/prometheus/prometheus/model/labels" promql_parser "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/util" ) const ( diff --git a/pkg/logql/syntax/parser_test.go b/pkg/logql/syntax/parser_test.go index 7152d78adac12..3851013f4be92 100644 --- a/pkg/logql/syntax/parser_test.go +++ b/pkg/logql/syntax/parser_test.go @@ -9,8 +9,8 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) func NewStringLabelFilter(s string) *string { @@ -30,7 +30,7 @@ var ParseTestCases = []struct { Left: &LogRange{ Left: &PipelineExpr{ MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchRegexp, "", "error\\"), + newLineFilterExpr(log.LineMatchRegexp, "", "error\\"), }, Left: &MatchersExpr{ Mts: []*labels.Matcher{ @@ -60,7 +60,7 @@ var ParseTestCases = []struct { Left: newPipelineExpr( newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "foo", Value: "bar"}}), MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "error"), + newLineFilterExpr(log.LineMatchEqual, "", "error"), }, ), Interval: 12 * time.Hour, @@ -75,7 +75,7 @@ var ParseTestCases = []struct { Left: &LogRange{ Left: newPipelineExpr( newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "foo", Value: "bar"}}), - MultiStageExpr{newLineFilterExpr(labels.MatchEqual, "", "error")}, + MultiStageExpr{newLineFilterExpr(log.LineMatchEqual, "", "error")}, ), Interval: 12 * time.Hour, }, @@ -392,8 +392,8 @@ var ParseTestCases = []struct { newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), MultiStageExpr{ newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchEqual, OpFilterIP, "123.123.123.123"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchEqual, OpFilterIP, "123.123.123.123"), ), }, ), @@ -404,7 +404,7 @@ var ParseTestCases = []struct { newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar"), mustNewMatcher(labels.MatchEqual, "ip", "foo")}), MultiStageExpr{ newLogfmtParserExpr(nil), - newLineFilterExpr(labels.MatchEqual, OpFilterIP, "127.0.0.1"), + newLineFilterExpr(log.LineMatchEqual, OpFilterIP, "127.0.0.1"), newLabelFilterExpr(log.NewStringLabelFilter(mustNewMatcher(labels.MatchEqual, "ip", "2.3.4.5"))), newLabelFilterExpr(log.NewStringLabelFilter(mustNewMatcher(labels.MatchEqual, "ip", "abc"))), newLabelFilterExpr(log.NewIPLabelFilter("4.5.6.7", "ipaddr", log.LabelFilterEqual)), @@ -417,7 +417,7 @@ var ParseTestCases = []struct { exp: newPipelineExpr( newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, OpFilterIP, "123.123.123.123"), + newLineFilterExpr(log.LineMatchEqual, OpFilterIP, "123.123.123.123"), }, ), }, @@ -427,8 +427,8 @@ var ParseTestCases = []struct { newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), MultiStageExpr{ newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, OpFilterIP, "123.123.123.123"), - newLineFilterExpr(labels.MatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchEqual, OpFilterIP, "123.123.123.123"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), ), }, ), @@ -440,10 +440,10 @@ var ParseTestCases = []struct { MultiStageExpr{ newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, OpFilterIP, "123.123.123.123"), - newLineFilterExpr(labels.MatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchEqual, OpFilterIP, "123.123.123.123"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), ), - newLineFilterExpr(labels.MatchEqual, OpFilterIP, "123.123.123.123"), + newLineFilterExpr(log.LineMatchEqual, OpFilterIP, "123.123.123.123"), ), }, ), @@ -454,8 +454,8 @@ var ParseTestCases = []struct { newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), MultiStageExpr{ newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchEqual, OpFilterIP, "123.123.123.123"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchEqual, OpFilterIP, "123.123.123.123"), ), }, ), @@ -465,7 +465,7 @@ var ParseTestCases = []struct { exp: newPipelineExpr( newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), MultiStageExpr{ - newLineFilterExpr(labels.MatchNotEqual, OpFilterIP, "123.123.123.123"), + newLineFilterExpr(log.LineMatchNotEqual, OpFilterIP, "123.123.123.123"), }, ), }, @@ -475,8 +475,8 @@ var ParseTestCases = []struct { newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), MultiStageExpr{ newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchNotEqual, OpFilterIP, "123.123.123.123"), - newLineFilterExpr(labels.MatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchNotEqual, OpFilterIP, "123.123.123.123"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), ), }, ), @@ -488,10 +488,10 @@ var ParseTestCases = []struct { MultiStageExpr{ newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchNotEqual, OpFilterIP, "123.123.123.123"), - newLineFilterExpr(labels.MatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchNotEqual, OpFilterIP, "123.123.123.123"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), ), - newLineFilterExpr(labels.MatchNotEqual, OpFilterIP, "123.123.123.123"), + newLineFilterExpr(log.LineMatchNotEqual, OpFilterIP, "123.123.123.123"), ), }, ), @@ -662,7 +662,7 @@ var ParseTestCases = []struct { in: `{foo="bar"} |= "baz"`, exp: newPipelineExpr( newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), - MultiStageExpr{newLineFilterExpr(labels.MatchEqual, "", "baz")}, + MultiStageExpr{newLineFilterExpr(log.LineMatchEqual, "", "baz")}, ), }, { @@ -673,12 +673,12 @@ var ParseTestCases = []struct { newNestedLineFilterExpr( newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchRegexp, "", "blip"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchRegexp, "", "blip"), ), - newLineFilterExpr(labels.MatchNotEqual, "", "flip"), + newLineFilterExpr(log.LineMatchNotEqual, "", "flip"), ), - newLineFilterExpr(labels.MatchNotRegexp, "", "flap"), + newLineFilterExpr(log.LineMatchNotRegexp, "", "flap"), ), }, ), @@ -693,12 +693,12 @@ var ParseTestCases = []struct { newNestedLineFilterExpr( newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchRegexp, "", "blip"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchRegexp, "", "blip"), ), - newLineFilterExpr(labels.MatchNotEqual, "", "flip"), + newLineFilterExpr(log.LineMatchNotEqual, "", "flip"), ), - newLineFilterExpr(labels.MatchNotRegexp, "", "flap"), + newLineFilterExpr(log.LineMatchNotRegexp, "", "flap"), ), }, ), @@ -715,12 +715,12 @@ var ParseTestCases = []struct { newNestedLineFilterExpr( newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchRegexp, "", "blip"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchRegexp, "", "blip"), ), - newLineFilterExpr(labels.MatchNotEqual, "", "flip"), + newLineFilterExpr(log.LineMatchNotEqual, "", "flip"), ), - newLineFilterExpr(labels.MatchNotRegexp, "", "flap"), + newLineFilterExpr(log.LineMatchNotRegexp, "", "flap"), ), }, ), @@ -737,12 +737,12 @@ var ParseTestCases = []struct { newNestedLineFilterExpr( newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchRegexp, "", "blip"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchRegexp, "", "blip"), ), - newLineFilterExpr(labels.MatchNotEqual, "", "flip"), + newLineFilterExpr(log.LineMatchNotEqual, "", "flip"), ), - newLineFilterExpr(labels.MatchNotRegexp, "", "flap"), + newLineFilterExpr(log.LineMatchNotRegexp, "", "flap"), ), newLabelParserExpr(OpParserTypeUnpack, ""), }, @@ -769,12 +769,12 @@ var ParseTestCases = []struct { newNestedLineFilterExpr( newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchRegexp, "", "blip"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchRegexp, "", "blip"), ), - newLineFilterExpr(labels.MatchNotEqual, "", "flip"), + newLineFilterExpr(log.LineMatchNotEqual, "", "flip"), ), - newLineFilterExpr(labels.MatchNotRegexp, "", "flap"), + newLineFilterExpr(log.LineMatchNotRegexp, "", "flap"), ), }, ), @@ -796,12 +796,12 @@ var ParseTestCases = []struct { newNestedLineFilterExpr( newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchRegexp, "", "blip"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchRegexp, "", "blip"), ), - newLineFilterExpr(labels.MatchNotEqual, "", "flip"), + newLineFilterExpr(log.LineMatchNotEqual, "", "flip"), ), - newLineFilterExpr(labels.MatchNotRegexp, "", "flap"), + newLineFilterExpr(log.LineMatchNotRegexp, "", "flap"), ), }, ), @@ -824,12 +824,12 @@ var ParseTestCases = []struct { newNestedLineFilterExpr( newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchRegexp, "", "blip"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchRegexp, "", "blip"), ), - newLineFilterExpr(labels.MatchNotEqual, "", "flip"), + newLineFilterExpr(log.LineMatchNotEqual, "", "flip"), ), - newLineFilterExpr(labels.MatchNotRegexp, "", "flap"), + newLineFilterExpr(log.LineMatchNotRegexp, "", "flap"), ), }, ), @@ -852,12 +852,12 @@ var ParseTestCases = []struct { newNestedLineFilterExpr( newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchRegexp, "", "blip"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchRegexp, "", "blip"), ), - newLineFilterExpr(labels.MatchNotEqual, "", "flip"), + newLineFilterExpr(log.LineMatchNotEqual, "", "flip"), ), - newLineFilterExpr(labels.MatchNotRegexp, "", "flap"), + newLineFilterExpr(log.LineMatchNotRegexp, "", "flap"), ), }, ), @@ -882,12 +882,12 @@ var ParseTestCases = []struct { newNestedLineFilterExpr( newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchRegexp, "", "blip"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchRegexp, "", "blip"), ), - newLineFilterExpr(labels.MatchNotEqual, "", "flip"), + newLineFilterExpr(log.LineMatchNotEqual, "", "flip"), ), - newLineFilterExpr(labels.MatchNotRegexp, "", "flap"), + newLineFilterExpr(log.LineMatchNotRegexp, "", "flap"), ), }, ), @@ -913,12 +913,12 @@ var ParseTestCases = []struct { newNestedLineFilterExpr( newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchRegexp, "", "blip"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchRegexp, "", "blip"), ), - newLineFilterExpr(labels.MatchNotEqual, "", "flip"), + newLineFilterExpr(log.LineMatchNotEqual, "", "flip"), ), - newLineFilterExpr(labels.MatchNotRegexp, "", "flap"), + newLineFilterExpr(log.LineMatchNotRegexp, "", "flap"), ), }, ), @@ -935,12 +935,12 @@ var ParseTestCases = []struct { newNestedLineFilterExpr( newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchRegexp, "", "blip"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchRegexp, "", "blip"), ), - newLineFilterExpr(labels.MatchNotEqual, "", "flip"), + newLineFilterExpr(log.LineMatchNotEqual, "", "flip"), ), - newLineFilterExpr(labels.MatchNotRegexp, "", "flap"), + newLineFilterExpr(log.LineMatchNotRegexp, "", "flap"), ), }, ), @@ -963,12 +963,12 @@ var ParseTestCases = []struct { newNestedLineFilterExpr( newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchRegexp, "", "blip"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchRegexp, "", "blip"), ), - newLineFilterExpr(labels.MatchNotEqual, "", "flip"), + newLineFilterExpr(log.LineMatchNotEqual, "", "flip"), ), - newLineFilterExpr(labels.MatchNotRegexp, "", "flap"), + newLineFilterExpr(log.LineMatchNotRegexp, "", "flap"), ), }, ), @@ -993,12 +993,12 @@ var ParseTestCases = []struct { newNestedLineFilterExpr( newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchRegexp, "", "blip"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchRegexp, "", "blip"), ), - newLineFilterExpr(labels.MatchNotEqual, "", "flip"), + newLineFilterExpr(log.LineMatchNotEqual, "", "flip"), ), - newLineFilterExpr(labels.MatchNotRegexp, "", "flap"), + newLineFilterExpr(log.LineMatchNotRegexp, "", "flap"), ), }, ), @@ -1257,7 +1257,7 @@ var ParseTestCases = []struct { mustNewMatcher(labels.MatchEqual, "namespace", "tns"), }), MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "level=error"), + newLineFilterExpr(log.LineMatchEqual, "", "level=error"), }), Interval: 5 * time.Minute, }, OpRangeTypeCount, nil, nil), @@ -1291,7 +1291,7 @@ var ParseTestCases = []struct { mustNewMatcher(labels.MatchEqual, "namespace", "tns"), }), MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "level=error"), + newLineFilterExpr(log.LineMatchEqual, "", "level=error"), }), Interval: 5 * time.Minute, }, OpRangeTypeCount, nil, nil), @@ -1368,7 +1368,7 @@ var ParseTestCases = []struct { exp: &PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -1387,7 +1387,7 @@ var ParseTestCases = []struct { exp: &PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeUnpack, ""), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ @@ -1407,7 +1407,7 @@ var ParseTestCases = []struct { exp: &PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewAndLabelFilter( @@ -1426,7 +1426,7 @@ var ParseTestCases = []struct { exp: &PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypePattern, " bar "), &LabelFilterExpr{ LabelFilterer: log.NewAndLabelFilter( @@ -1445,7 +1445,7 @@ var ParseTestCases = []struct { exp: &PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -1464,7 +1464,7 @@ var ParseTestCases = []struct { exp: &PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewAndLabelFilter( @@ -1483,7 +1483,7 @@ var ParseTestCases = []struct { exp: &PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -1503,7 +1503,7 @@ var ParseTestCases = []struct { exp: &PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -1534,7 +1534,7 @@ var ParseTestCases = []struct { exp: &PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLineFmtExpr("blip{{ .foo }}blop"), }, }, @@ -1545,7 +1545,7 @@ var ParseTestCases = []struct { exp: &PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -1566,7 +1566,7 @@ var ParseTestCases = []struct { exp: &PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -1592,7 +1592,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -1638,7 +1638,7 @@ var ParseTestCases = []struct { exp: &PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -1659,7 +1659,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -1690,7 +1690,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -1720,7 +1720,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "namespace", Value: "tns"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "level=error"), + newLineFilterExpr(log.LineMatchEqual, "", "level=error"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewAndLabelFilter( @@ -1742,7 +1742,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "namespace", Value: "tns"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "level=error"), + newLineFilterExpr(log.LineMatchEqual, "", "level=error"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewAndLabelFilter( @@ -1764,7 +1764,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "namespace", Value: "tns"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "level=error"), + newLineFilterExpr(log.LineMatchEqual, "", "level=error"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewAndLabelFilter( @@ -1786,7 +1786,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "namespace", Value: "tns"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "level=error"), + newLineFilterExpr(log.LineMatchEqual, "", "level=error"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewAndLabelFilter( @@ -1808,7 +1808,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), }, }, 5*time.Minute, @@ -1890,7 +1890,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -1921,7 +1921,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -1952,7 +1952,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -1983,7 +1983,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2018,7 +2018,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2057,7 +2057,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2096,7 +2096,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2135,7 +2135,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2174,7 +2174,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2213,7 +2213,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2263,7 +2263,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2295,7 +2295,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2344,7 +2344,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2376,7 +2376,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2425,7 +2425,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2457,7 +2457,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2506,7 +2506,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2538,7 +2538,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2655,7 +2655,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2687,7 +2687,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2932,7 +2932,7 @@ var ParseTestCases = []struct { exp: &PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), }, }, @@ -2963,7 +2963,7 @@ var ParseTestCases = []struct { exp: &PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "#"), + newLineFilterExpr(log.LineMatchEqual, "", "#"), }, }, }, @@ -3147,23 +3147,23 @@ var ParseTestCases = []struct { Left: newOrLineFilter( &LineFilterExpr{ LineFilter: LineFilter{ - Ty: labels.MatchEqual, + Ty: log.LineMatchEqual, Match: "foo", }, }, &LineFilterExpr{ LineFilter: LineFilter{ - Ty: labels.MatchEqual, + Ty: log.LineMatchEqual, Match: "bar", }, }), LineFilter: LineFilter{ - Ty: labels.MatchEqual, + Ty: log.LineMatchEqual, Match: "buzz", }, Or: &LineFilterExpr{ LineFilter: LineFilter{ - Ty: labels.MatchEqual, + Ty: log.LineMatchEqual, Match: "fizz", }, IsOrChild: true, diff --git a/pkg/logql/syntax/serialize.go b/pkg/logql/syntax/serialize.go index 84af7e803d0d3..4e4362683543e 100644 --- a/pkg/logql/syntax/serialize.go +++ b/pkg/logql/syntax/serialize.go @@ -8,7 +8,7 @@ import ( jsoniter "github.com/json-iterator/go" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/log" ) type JSONSerializer struct { diff --git a/pkg/logql/test_utils.go b/pkg/logql/test_utils.go index 72b8429e11bf9..d141f39bf0778 100644 --- a/pkg/logql/test_utils.go +++ b/pkg/logql/test_utils.go @@ -14,11 +14,11 @@ import ( "github.com/prometheus/prometheus/model/labels" promql_parser "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func NewMockQuerier(shards int, streams []logproto.Stream) MockQuerier { @@ -34,6 +34,20 @@ type MockQuerier struct { streams []logproto.Stream } +func (q MockQuerier) extractOldShard(xs []string) (*index.ShardAnnotation, error) { + parsed, version, err := ParseShards(xs) + if err != nil { + return nil, err + } + + if version != PowerOfTwoVersion { + return nil, fmt.Errorf("unsupported shard version: %d", version) + } + + return parsed[0].PowerOfTwo, nil + +} + func (q MockQuerier) SelectLogs(_ context.Context, req SelectLogParams) (iter.EntryIterator, error) { expr, err := req.LogSelector() if err != nil { @@ -46,13 +60,12 @@ func (q MockQuerier) SelectLogs(_ context.Context, req SelectLogParams) (iter.En matchers := expr.Matchers() - var shard *astmapper.ShardAnnotation + var shard *index.ShardAnnotation if len(req.Shards) > 0 { - shards, err := ParseShards(req.Shards) + shard, err = q.extractOldShard(req.Shards) if err != nil { return nil, err } - shard = &shards[0] } var matched []logproto.Stream @@ -172,13 +185,12 @@ func (q MockQuerier) SelectSamples(_ context.Context, req SelectSampleParams) (i matchers := selector.Matchers() - var shard *astmapper.ShardAnnotation + var shard *index.ShardAnnotation if len(req.Shards) > 0 { - shards, err := ParseShards(req.Shards) + shard, err = q.extractOldShard(req.Shards) if err != nil { return nil, err } - shard = &shards[0] } var matched []logproto.Stream diff --git a/pkg/logqlanalyzer/analyzer.go b/pkg/logqlanalyzer/analyzer.go index 5d32680744289..ceff726507f1b 100644 --- a/pkg/logqlanalyzer/analyzer.go +++ b/pkg/logqlanalyzer/analyzer.go @@ -8,8 +8,8 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/syntax" ) type logQLAnalyzer struct { diff --git a/pkg/logqlanalyzer/http.go b/pkg/logqlanalyzer/http.go index 6f1324348892a..c3cff9763a3f8 100644 --- a/pkg/logqlanalyzer/http.go +++ b/pkg/logqlanalyzer/http.go @@ -9,7 +9,7 @@ import ( "github.com/go-kit/log/level" "github.com/gorilla/mux" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func CorsMiddleware() mux.MiddlewareFunc { diff --git a/pkg/logqlmodel/logqlmodel.go b/pkg/logqlmodel/logqlmodel.go index 8ba0e198c403a..8da990f0e74bb 100644 --- a/pkg/logqlmodel/logqlmodel.go +++ b/pkg/logqlmodel/logqlmodel.go @@ -3,10 +3,11 @@ package logqlmodel import ( "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions" - "github.com/grafana/loki/pkg/logqlmodel/stats" "github.com/grafana/loki/pkg/push" + + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" ) // ValueTypeStreams promql.ValueType for log streams diff --git a/pkg/logqlmodel/metadata/context.go b/pkg/logqlmodel/metadata/context.go index b819893679a4f..f4d7dca265da1 100644 --- a/pkg/logqlmodel/metadata/context.go +++ b/pkg/logqlmodel/metadata/context.go @@ -10,7 +10,7 @@ import ( "sort" "sync" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions" ) type ( diff --git a/pkg/logqlmodel/metadata/context_test.go b/pkg/logqlmodel/metadata/context_test.go index 256abdb18ef77..2f4e3316ece13 100644 --- a/pkg/logqlmodel/metadata/context_test.go +++ b/pkg/logqlmodel/metadata/context_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions" ) func TestHeaders(t *testing.T) { diff --git a/pkg/logqlmodel/stats/context.go b/pkg/logqlmodel/stats/context.go index 31344b01c585d..f895f7fc6c9b0 100644 --- a/pkg/logqlmodel/stats/context.go +++ b/pkg/logqlmodel/stats/context.go @@ -43,6 +43,7 @@ type Context struct { querier Querier ingester Ingester caches Caches + index Index // store is the store statistics collected across the query path store Store @@ -115,6 +116,11 @@ func (c *Context) Caches() Caches { } } +// Index returns the index statistics accumulated so far. +func (c *Context) Index() Index { + return c.index +} + // Reset clears the statistics. func (c *Context) Reset() { c.mtx.Lock() @@ -125,6 +131,7 @@ func (c *Context) Reset() { c.ingester.Reset() c.result.Reset() c.caches.Reset() + c.index.Reset() } // Result calculates the summary based on store and ingester data. @@ -137,6 +144,7 @@ func (c *Context) Result(execTime time.Duration, queueTime time.Duration, totalE }, Ingester: c.ingester, Caches: c.caches, + Index: c.index, }) r.ComputeSummary(execTime, queueTime, totalEntriesReturned) @@ -162,6 +170,15 @@ func JoinIngesters(ctx context.Context, inc Ingester) { stats.ingester.Merge(inc) } +// JoinIndex joins the index statistics in a concurrency-safe manner. +func JoinIndex(ctx context.Context, index Index) { + stats := FromContext(ctx) + stats.mtx.Lock() + defer stats.mtx.Unlock() + + stats.index.Merge(index) +} + // ComputeSummary compute the summary of the statistics. func (r *Result) ComputeSummary(execTime time.Duration, queueTime time.Duration, totalEntriesReturned int) { r.Summary.TotalBytesProcessed = r.Querier.Store.Chunk.DecompressedBytes + r.Querier.Store.Chunk.HeadChunkBytes + @@ -189,6 +206,7 @@ func (s *Store) Merge(m Store) { s.TotalChunksRef += m.TotalChunksRef s.TotalChunksDownloaded += m.TotalChunksDownloaded s.CongestionControlLatency += m.CongestionControlLatency + s.PipelineWrapperFilteredLines += m.PipelineWrapperFilteredLines s.ChunksDownloadTime += m.ChunksDownloadTime s.ChunkRefsFetchTime += m.ChunkRefsFetchTime s.Chunk.HeadChunkBytes += m.Chunk.HeadChunkBytes @@ -226,6 +244,11 @@ func (i *Ingester) Merge(m Ingester) { i.TotalReached += m.TotalReached } +func (i *Index) Merge(m Index) { + i.TotalChunks += m.TotalChunks + i.PostFilterChunks += m.PostFilterChunks +} + func (c *Caches) Merge(m Caches) { c.Chunk.Merge(m.Chunk) c.Index.Merge(m.Index) @@ -267,6 +290,7 @@ func (r *Result) Merge(m Result) { r.Ingester.Merge(m.Ingester) r.Caches.Merge(m.Caches) r.Summary.Merge(m.Summary) + r.Index.Merge(m.Index) r.ComputeSummary(ConvertSecondsToNanoseconds(r.Summary.ExecTime+m.Summary.ExecTime), ConvertSecondsToNanoseconds(r.Summary.QueueTime+m.Summary.QueueTime), int(r.Summary.TotalEntriesReturned)) } @@ -289,6 +313,10 @@ func (r Result) CongestionControlLatency() time.Duration { return time.Duration(r.Querier.Store.CongestionControlLatency) } +func (r Result) PipelineWrapperFilteredLines() int64 { + return r.Querier.Store.PipelineWrapperFilteredLines + r.Ingester.Store.PipelineWrapperFilteredLines +} + func (r Result) TotalDuplicates() int64 { return r.Querier.Store.Chunk.TotalDuplicates + r.Ingester.Store.Chunk.TotalDuplicates } @@ -374,6 +402,10 @@ func (c *Context) AddCongestionControlLatency(i time.Duration) { atomic.AddInt64(&c.store.CongestionControlLatency, int64(i)) } +func (c *Context) AddPipelineWrapperFilterdLines(i int64) { + atomic.AddInt64(&c.store.PipelineWrapperFilteredLines, i) +} + func (c *Context) AddChunksDownloaded(i int64) { atomic.AddInt64(&c.store.TotalChunksDownloaded, i) } diff --git a/pkg/logqlmodel/stats/context_test.go b/pkg/logqlmodel/stats/context_test.go index e40a5372a8968..55f5b93c70b05 100644 --- a/pkg/logqlmodel/stats/context_test.go +++ b/pkg/logqlmodel/stats/context_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/require" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func TestResult(t *testing.T) { @@ -26,6 +26,7 @@ func TestResult(t *testing.T) { stats.AddCacheRequest(IndexCache, 4) stats.AddCacheRequest(ResultCache, 1) stats.SetQueryReferencedStructuredMetadata() + stats.AddPipelineWrapperFilterdLines(1) fakeIngesterQuery(ctx) fakeIngesterQuery(ctx) @@ -39,6 +40,7 @@ func TestResult(t *testing.T) { TotalLinesSent: 60, TotalReached: 2, Store: Store{ + PipelineWrapperFilteredLines: 2, Chunk: Chunk{ HeadChunkBytes: 10, HeadChunkLines: 20, @@ -51,10 +53,11 @@ func TestResult(t *testing.T) { }, Querier: Querier{ Store: Store{ - TotalChunksRef: 50, - TotalChunksDownloaded: 60, - ChunksDownloadTime: time.Second.Nanoseconds(), - QueryReferencedStructured: true, + TotalChunksRef: 50, + TotalChunksDownloaded: 60, + ChunksDownloadTime: time.Second.Nanoseconds(), + QueryReferencedStructured: true, + PipelineWrapperFilteredLines: 1, Chunk: Chunk{ HeadChunkBytes: 10, HeadChunkLines: 20, @@ -148,6 +151,7 @@ func fakeIngesterQuery(ctx context.Context) { TotalBatches: 25, TotalLinesSent: 30, Store: Store{ + PipelineWrapperFilteredLines: 1, Chunk: Chunk{ HeadChunkBytes: 5, HeadChunkLines: 10, @@ -173,6 +177,7 @@ func TestResult_Merge(t *testing.T) { TotalLinesSent: 60, TotalReached: 2, Store: Store{ + PipelineWrapperFilteredLines: 4, Chunk: Chunk{ HeadChunkBytes: 10, HeadChunkLines: 20, @@ -185,10 +190,11 @@ func TestResult_Merge(t *testing.T) { }, Querier: Querier{ Store: Store{ - TotalChunksRef: 50, - TotalChunksDownloaded: 60, - ChunksDownloadTime: time.Second.Nanoseconds(), - QueryReferencedStructured: true, + TotalChunksRef: 50, + TotalChunksDownloaded: 60, + ChunksDownloadTime: time.Second.Nanoseconds(), + QueryReferencedStructured: true, + PipelineWrapperFilteredLines: 2, Chunk: Chunk{ HeadChunkBytes: 10, HeadChunkLines: 20, @@ -235,6 +241,7 @@ func TestResult_Merge(t *testing.T) { TotalBatches: 2 * 50, TotalLinesSent: 2 * 60, Store: Store{ + PipelineWrapperFilteredLines: 8, Chunk: Chunk{ HeadChunkBytes: 2 * 10, HeadChunkLines: 2 * 20, @@ -248,10 +255,11 @@ func TestResult_Merge(t *testing.T) { }, Querier: Querier{ Store: Store{ - TotalChunksRef: 2 * 50, - TotalChunksDownloaded: 2 * 60, - ChunksDownloadTime: 2 * time.Second.Nanoseconds(), - QueryReferencedStructured: true, + TotalChunksRef: 2 * 50, + TotalChunksDownloaded: 2 * 60, + ChunksDownloadTime: 2 * time.Second.Nanoseconds(), + QueryReferencedStructured: true, + PipelineWrapperFilteredLines: 4, Chunk: Chunk{ HeadChunkBytes: 2 * 10, HeadChunkLines: 2 * 20, @@ -306,13 +314,15 @@ func TestIngester(t *testing.T) { statsCtx.AddDuplicates(10) statsCtx.AddHeadChunkBytes(200) statsCtx.SetQueryReferencedStructuredMetadata() + statsCtx.AddPipelineWrapperFilterdLines(1) require.Equal(t, Ingester{ TotalReached: 1, TotalChunksMatched: 100, TotalBatches: 25, TotalLinesSent: 30, Store: Store{ - QueryReferencedStructured: true, + QueryReferencedStructured: true, + PipelineWrapperFilteredLines: 1, Chunk: Chunk{ HeadChunkBytes: 200, CompressedBytes: 100, diff --git a/pkg/logqlmodel/stats/stats.pb.go b/pkg/logqlmodel/stats/stats.pb.go index f25143272599b..9a728c1612671 100644 --- a/pkg/logqlmodel/stats/stats.pb.go +++ b/pkg/logqlmodel/stats/stats.pb.go @@ -32,6 +32,7 @@ type Result struct { Querier Querier `protobuf:"bytes,2,opt,name=querier,proto3" json:"querier"` Ingester Ingester `protobuf:"bytes,3,opt,name=ingester,proto3" json:"ingester"` Caches Caches `protobuf:"bytes,4,opt,name=caches,proto3" json:"cache"` + Index Index `protobuf:"bytes,5,opt,name=index,proto3" json:"index"` } func (m *Result) Reset() { *m = Result{} } @@ -94,6 +95,13 @@ func (m *Result) GetCaches() Caches { return Caches{} } +func (m *Result) GetIndex() Index { + if m != nil { + return m.Index + } + return Index{} +} + type Caches struct { Chunk Cache `protobuf:"bytes,1,opt,name=chunk,proto3" json:"chunk"` Index Cache `protobuf:"bytes,2,opt,name=index,proto3" json:"index"` @@ -342,6 +350,62 @@ func (m *Summary) GetTotalStructuredMetadataBytesProcessed() int64 { return 0 } +// Statistics from Index queries +// TODO(owen-d): include bytes. +// Needs some index methods added to return _sized_ chunk refs to know +type Index struct { + // Total chunks + TotalChunks int64 `protobuf:"varint,1,opt,name=totalChunks,proto3" json:"totalChunks"` + // Post-filtered chunks + PostFilterChunks int64 `protobuf:"varint,2,opt,name=postFilterChunks,proto3" json:"postFilterChunks"` +} + +func (m *Index) Reset() { *m = Index{} } +func (*Index) ProtoMessage() {} +func (*Index) Descriptor() ([]byte, []int) { + return fileDescriptor_6cdfe5d2aea33ebb, []int{3} +} +func (m *Index) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *Index) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_Index.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *Index) XXX_Merge(src proto.Message) { + xxx_messageInfo_Index.Merge(m, src) +} +func (m *Index) XXX_Size() int { + return m.Size() +} +func (m *Index) XXX_DiscardUnknown() { + xxx_messageInfo_Index.DiscardUnknown(m) +} + +var xxx_messageInfo_Index proto.InternalMessageInfo + +func (m *Index) GetTotalChunks() int64 { + if m != nil { + return m.TotalChunks + } + return 0 +} + +func (m *Index) GetPostFilterChunks() int64 { + if m != nil { + return m.PostFilterChunks + } + return 0 +} + type Querier struct { Store Store `protobuf:"bytes,1,opt,name=store,proto3" json:"store"` } @@ -349,7 +413,7 @@ type Querier struct { func (m *Querier) Reset() { *m = Querier{} } func (*Querier) ProtoMessage() {} func (*Querier) Descriptor() ([]byte, []int) { - return fileDescriptor_6cdfe5d2aea33ebb, []int{3} + return fileDescriptor_6cdfe5d2aea33ebb, []int{4} } func (m *Querier) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -400,7 +464,7 @@ type Ingester struct { func (m *Ingester) Reset() { *m = Ingester{} } func (*Ingester) ProtoMessage() {} func (*Ingester) Descriptor() ([]byte, []int) { - return fileDescriptor_6cdfe5d2aea33ebb, []int{4} + return fileDescriptor_6cdfe5d2aea33ebb, []int{5} } func (m *Ingester) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -478,12 +542,14 @@ type Store struct { ChunkRefsFetchTime int64 `protobuf:"varint,5,opt,name=chunkRefsFetchTime,proto3" json:"chunkRefsFetchTime"` // Time spent being blocked on congestion control. CongestionControlLatency int64 `protobuf:"varint,6,opt,name=congestionControlLatency,proto3" json:"congestionControlLatency"` + // Total number of lines filtered by pipeline wrapper. + PipelineWrapperFilteredLines int64 `protobuf:"varint,7,opt,name=pipelineWrapperFilteredLines,proto3" json:"pipelineWrapperFilteredLines"` } func (m *Store) Reset() { *m = Store{} } func (*Store) ProtoMessage() {} func (*Store) Descriptor() ([]byte, []int) { - return fileDescriptor_6cdfe5d2aea33ebb, []int{5} + return fileDescriptor_6cdfe5d2aea33ebb, []int{6} } func (m *Store) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -561,6 +627,13 @@ func (m *Store) GetCongestionControlLatency() int64 { return 0 } +func (m *Store) GetPipelineWrapperFilteredLines() int64 { + if m != nil { + return m.PipelineWrapperFilteredLines + } + return 0 +} + type Chunk struct { // Total bytes processed but was already in memory (found in the headchunk). Includes structured metadata bytes. HeadChunkBytes int64 `protobuf:"varint,4,opt,name=headChunkBytes,proto3" json:"headChunkBytes"` @@ -585,7 +658,7 @@ type Chunk struct { func (m *Chunk) Reset() { *m = Chunk{} } func (*Chunk) ProtoMessage() {} func (*Chunk) Descriptor() ([]byte, []int) { - return fileDescriptor_6cdfe5d2aea33ebb, []int{6} + return fileDescriptor_6cdfe5d2aea33ebb, []int{7} } func (m *Chunk) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -691,7 +764,7 @@ type Cache struct { func (m *Cache) Reset() { *m = Cache{} } func (*Cache) ProtoMessage() {} func (*Cache) Descriptor() ([]byte, []int) { - return fileDescriptor_6cdfe5d2aea33ebb, []int{7} + return fileDescriptor_6cdfe5d2aea33ebb, []int{8} } func (m *Cache) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -780,6 +853,7 @@ func init() { proto.RegisterType((*Result)(nil), "stats.Result") proto.RegisterType((*Caches)(nil), "stats.Caches") proto.RegisterType((*Summary)(nil), "stats.Summary") + proto.RegisterType((*Index)(nil), "stats.Index") proto.RegisterType((*Querier)(nil), "stats.Querier") proto.RegisterType((*Ingester)(nil), "stats.Ingester") proto.RegisterType((*Store)(nil), "stats.Store") @@ -790,87 +864,92 @@ func init() { func init() { proto.RegisterFile("pkg/logqlmodel/stats/stats.proto", fileDescriptor_6cdfe5d2aea33ebb) } var fileDescriptor_6cdfe5d2aea33ebb = []byte{ - // 1274 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x57, 0x4b, 0x6f, 0xe3, 0x54, - 0x14, 0x8e, 0x27, 0x75, 0xd2, 0xde, 0x3e, 0xe7, 0xb6, 0xc3, 0x64, 0x18, 0x64, 0x97, 0xc0, 0x88, - 0x22, 0x50, 0x23, 0x1e, 0x12, 0x02, 0x31, 0x12, 0x72, 0x87, 0x4a, 0x95, 0x5a, 0x51, 0x4e, 0x40, - 0x42, 0xb0, 0x72, 0xec, 0xdb, 0xc4, 0xaa, 0x63, 0xa7, 0xf6, 0x75, 0x99, 0xae, 0xe0, 0x27, 0xb0, - 0x62, 0xc3, 0x1f, 0x60, 0xc3, 0x8a, 0x15, 0x6b, 0x36, 0xb3, 0xec, 0x72, 0x56, 0x16, 0x4d, 0x37, - 0xc8, 0xab, 0x91, 0xf8, 0x03, 0xe8, 0x3e, 0xe2, 0x57, 0xec, 0x99, 0x6c, 0xea, 0x7b, 0xbe, 0xf3, - 0x7d, 0xe7, 0x3e, 0x72, 0xce, 0xb9, 0xb7, 0x68, 0x77, 0x72, 0x3e, 0xec, 0xb9, 0xfe, 0xf0, 0xc2, - 0x1d, 0xfb, 0x36, 0x71, 0x7b, 0x21, 0x35, 0x69, 0x28, 0xfe, 0xee, 0x4f, 0x02, 0x9f, 0xfa, 0x58, - 0xe5, 0xc6, 0xeb, 0x3b, 0x43, 0x7f, 0xe8, 0x73, 0xa4, 0xc7, 0x46, 0xc2, 0xd9, 0xfd, 0x4f, 0x41, - 0x2d, 0x20, 0x61, 0xe4, 0x52, 0xfc, 0x29, 0x6a, 0x87, 0xd1, 0x78, 0x6c, 0x06, 0x57, 0x1d, 0x65, - 0x57, 0xd9, 0x5b, 0xfd, 0x70, 0x63, 0x5f, 0x84, 0xe9, 0x0b, 0xd4, 0xd8, 0x7c, 0x16, 0xeb, 0x8d, - 0x24, 0xd6, 0x67, 0x34, 0x98, 0x0d, 0x98, 0xf4, 0x22, 0x22, 0x81, 0x43, 0x82, 0xce, 0x9d, 0x82, - 0xf4, 0x6b, 0x81, 0x66, 0x52, 0x49, 0x83, 0xd9, 0x00, 0x3f, 0x46, 0xcb, 0x8e, 0x37, 0x24, 0x21, - 0x25, 0x41, 0xa7, 0xc9, 0xb5, 0x9b, 0x52, 0x7b, 0x24, 0x61, 0x63, 0x4b, 0x8a, 0x53, 0x22, 0xa4, - 0x23, 0xfc, 0x31, 0x6a, 0x59, 0xa6, 0x35, 0x22, 0x61, 0x67, 0x89, 0x8b, 0xd7, 0xa5, 0xf8, 0x80, - 0x83, 0xc6, 0xba, 0x94, 0xaa, 0x9c, 0x04, 0x92, 0xdb, 0xfd, 0x6d, 0x09, 0xb5, 0x04, 0x03, 0x7f, - 0x80, 0x54, 0x6b, 0x14, 0x79, 0xe7, 0x72, 0xcf, 0x6b, 0x79, 0x7d, 0x4e, 0xce, 0x28, 0x20, 0x3e, - 0x4c, 0xe2, 0x78, 0x36, 0x79, 0x2a, 0xf7, 0x5a, 0x23, 0xe1, 0x14, 0x10, 0x1f, 0xb6, 0xcc, 0x80, - 0x9f, 0xb2, 0xdc, 0x63, 0x51, 0xb3, 0x21, 0x35, 0x92, 0x03, 0xf2, 0x8b, 0x0f, 0xd0, 0x2a, 0xa7, - 0x89, 0x1f, 0x48, 0xee, 0xb0, 0x28, 0xdd, 0x96, 0xd2, 0x3c, 0x11, 0xf2, 0x06, 0x3e, 0x44, 0x6b, - 0x97, 0xbe, 0x1b, 0x8d, 0x89, 0x8c, 0xa2, 0x56, 0x44, 0xd9, 0x91, 0x51, 0x0a, 0x4c, 0x28, 0x58, - 0x2c, 0x4e, 0xc8, 0x7e, 0xb2, 0xd9, 0x6a, 0x5a, 0x2f, 0x8b, 0x93, 0x67, 0x42, 0xc1, 0x62, 0x9b, - 0x72, 0xcd, 0x01, 0x71, 0x65, 0x98, 0xf6, 0xcb, 0x36, 0x95, 0x23, 0x42, 0xde, 0xc0, 0x3f, 0xa0, - 0x6d, 0xc7, 0x0b, 0xa9, 0xe9, 0xd1, 0x13, 0x42, 0x03, 0xc7, 0x92, 0xc1, 0x96, 0x2b, 0x82, 0x3d, - 0x94, 0xc1, 0xaa, 0x04, 0x50, 0x05, 0x76, 0xff, 0x6a, 0xa1, 0xb6, 0xcc, 0x79, 0xfc, 0x2d, 0xba, - 0x3f, 0xb8, 0xa2, 0x24, 0x3c, 0x0d, 0x7c, 0x8b, 0x84, 0x21, 0xb1, 0x4f, 0x49, 0xd0, 0x27, 0x96, - 0xef, 0xd9, 0x3c, 0x61, 0x9a, 0xc6, 0xc3, 0x24, 0xd6, 0xeb, 0x28, 0x50, 0xe7, 0x60, 0x61, 0x5d, - 0xc7, 0xab, 0x0c, 0x7b, 0x27, 0x0b, 0x5b, 0x43, 0x81, 0x3a, 0x07, 0x3e, 0x42, 0xdb, 0xd4, 0xa7, - 0xa6, 0x6b, 0x14, 0xa6, 0xe5, 0x39, 0xd7, 0x34, 0xee, 0xb3, 0x43, 0xa8, 0x70, 0x43, 0x15, 0x98, - 0x86, 0x3a, 0x2e, 0x4c, 0xc5, 0x73, 0x30, 0x1f, 0xaa, 0xe8, 0x86, 0x2a, 0x10, 0xef, 0xa1, 0x65, - 0xf2, 0x94, 0x58, 0xdf, 0x38, 0x63, 0xc2, 0xb3, 0x4f, 0x31, 0xd6, 0x58, 0x35, 0xcf, 0x30, 0x48, - 0x47, 0xf8, 0x3d, 0xb4, 0x72, 0x11, 0x91, 0x88, 0x70, 0x6a, 0x8b, 0x53, 0xd7, 0x93, 0x58, 0xcf, - 0x40, 0xc8, 0x86, 0x78, 0x1f, 0xa1, 0x30, 0x1a, 0x88, 0x3e, 0x12, 0xf2, 0x3c, 0x6a, 0x1a, 0x1b, - 0x49, 0xac, 0xe7, 0x50, 0xc8, 0x8d, 0xf1, 0x31, 0xda, 0xe1, 0xab, 0xfb, 0xd2, 0xa3, 0x22, 0x1d, - 0x69, 0x14, 0x78, 0xc4, 0xe6, 0x49, 0xd3, 0x34, 0x3a, 0x49, 0xac, 0x57, 0xfa, 0xa1, 0x12, 0xc5, - 0x5d, 0xd4, 0x0a, 0x27, 0xae, 0x43, 0xc3, 0xce, 0x0a, 0xd7, 0x23, 0x56, 0xbf, 0x02, 0x01, 0xf9, - 0xe5, 0x9c, 0x91, 0x19, 0xd8, 0x61, 0x07, 0xe5, 0x38, 0x1c, 0x01, 0xf9, 0x4d, 0x57, 0x75, 0xea, - 0x87, 0xf4, 0xd0, 0x71, 0x29, 0x09, 0xf8, 0xe9, 0x75, 0x56, 0x4b, 0xab, 0x2a, 0xf9, 0xa1, 0x12, - 0xc5, 0x3f, 0xa1, 0x47, 0x1c, 0xef, 0xd3, 0x20, 0xb2, 0x68, 0x14, 0x10, 0xfb, 0x84, 0x50, 0xd3, - 0x36, 0xa9, 0x59, 0x4a, 0x89, 0x35, 0x1e, 0xfe, 0xdd, 0x24, 0xd6, 0x17, 0x13, 0xc0, 0x62, 0xb4, - 0xee, 0xe7, 0xa8, 0x2d, 0x7b, 0x3e, 0x6b, 0x93, 0x21, 0xf5, 0x03, 0x52, 0xea, 0xac, 0x7d, 0x86, - 0x65, 0x6d, 0x92, 0x53, 0x40, 0x7c, 0xba, 0x7f, 0xdc, 0x41, 0xcb, 0x47, 0x59, 0x6b, 0x5f, 0xe3, - 0x73, 0x02, 0x61, 0x75, 0x2c, 0xea, 0x4d, 0x35, 0xb6, 0x58, 0x7b, 0xc9, 0xe3, 0x50, 0xb0, 0xf0, - 0x21, 0xc2, 0xdc, 0x3e, 0x60, 0xad, 0x3a, 0x3c, 0x31, 0x29, 0xd7, 0x8a, 0xa2, 0x7a, 0x2d, 0x89, - 0xf5, 0x0a, 0x2f, 0x54, 0x60, 0xe9, 0xec, 0x06, 0xb7, 0x43, 0x59, 0x43, 0xd9, 0xec, 0x12, 0x87, - 0x82, 0x85, 0x3f, 0x43, 0x1b, 0x59, 0x05, 0xf4, 0x89, 0x47, 0x65, 0xc1, 0xe0, 0x24, 0xd6, 0x4b, - 0x1e, 0x28, 0xd9, 0xd9, 0x79, 0xa9, 0x0b, 0x9f, 0xd7, 0xaf, 0x4b, 0x48, 0xe5, 0xfe, 0x74, 0x62, - 0xb1, 0x09, 0x20, 0x67, 0xb2, 0x3d, 0x65, 0x13, 0xa7, 0x1e, 0x28, 0xd9, 0xf8, 0x2b, 0x74, 0x2f, - 0x87, 0x3c, 0xf1, 0x7f, 0xf4, 0x5c, 0xdf, 0xb4, 0xd3, 0x53, 0x7b, 0x90, 0xc4, 0x7a, 0x35, 0x01, - 0xaa, 0x61, 0xf6, 0x1b, 0x58, 0x05, 0x8c, 0xd7, 0x73, 0x33, 0xfb, 0x0d, 0xe6, 0xbd, 0x50, 0x81, - 0x61, 0x0b, 0x3d, 0x60, 0xc5, 0x7b, 0x05, 0xe4, 0x8c, 0x04, 0xc4, 0xb3, 0x88, 0x9d, 0xe5, 0x5f, - 0x67, 0x7d, 0x57, 0xd9, 0x5b, 0x36, 0x1e, 0x25, 0xb1, 0xfe, 0x66, 0x2d, 0x69, 0x96, 0xa4, 0x50, - 0x1f, 0x27, 0x7b, 0x00, 0x94, 0xae, 0x57, 0x86, 0xd5, 0x3c, 0x00, 0x66, 0xfb, 0x03, 0x72, 0x16, - 0x1e, 0x12, 0x6a, 0x8d, 0xd2, 0xd6, 0x96, 0xdf, 0x5f, 0xc1, 0x0b, 0x15, 0x18, 0xfe, 0x0e, 0x75, - 0x2c, 0x9f, 0xa7, 0xbb, 0xe3, 0x7b, 0x07, 0xbe, 0x47, 0x03, 0xdf, 0x3d, 0x36, 0x29, 0xf1, 0xac, - 0x2b, 0xde, 0xfd, 0x9a, 0xc6, 0x1b, 0x49, 0xac, 0xd7, 0x72, 0xa0, 0xd6, 0xd3, 0xfd, 0x53, 0x45, - 0x2a, 0xdf, 0x01, 0x4b, 0x8c, 0x11, 0x31, 0x6d, 0xb1, 0x1d, 0x56, 0xab, 0xf9, 0x8c, 0x2c, 0x7a, - 0xa0, 0x64, 0x17, 0xb4, 0xa2, 0x2b, 0xa9, 0x15, 0x5a, 0xd1, 0x8f, 0x4a, 0x36, 0x3e, 0x40, 0x77, - 0x6d, 0x62, 0xf9, 0xe3, 0x49, 0xc0, 0x1b, 0x83, 0x98, 0x5a, 0x6c, 0xea, 0x5e, 0x12, 0xeb, 0xf3, - 0x4e, 0x98, 0x87, 0xca, 0x41, 0xc4, 0x1a, 0xda, 0xd5, 0x41, 0xc4, 0x32, 0xe6, 0x21, 0xfc, 0x18, - 0x6d, 0x96, 0xd7, 0x21, 0x5a, 0xfe, 0x76, 0x12, 0xeb, 0x65, 0x17, 0x94, 0x01, 0x26, 0xe7, 0x59, - 0xfe, 0x24, 0x9a, 0xb8, 0x8e, 0x65, 0x32, 0xf9, 0x4a, 0x26, 0x2f, 0xb9, 0xa0, 0x0c, 0x30, 0xf9, - 0xa4, 0xd4, 0xda, 0x51, 0x26, 0x2f, 0xb9, 0xa0, 0x0c, 0xe0, 0x09, 0xda, 0x4d, 0x0f, 0xb6, 0xa6, - 0xf9, 0xca, 0xab, 0xe2, 0xed, 0x24, 0xd6, 0x5f, 0xc9, 0x85, 0x57, 0x32, 0xf0, 0x15, 0x7a, 0x2b, - 0x7f, 0x86, 0x75, 0x93, 0x8a, 0x0b, 0xe4, 0x9d, 0x24, 0xd6, 0x17, 0xa1, 0xc3, 0x22, 0xa4, 0xee, - 0xdf, 0x4d, 0xa4, 0xf2, 0x47, 0x1b, 0xeb, 0xbe, 0x44, 0x5c, 0xb8, 0x87, 0x7e, 0xe4, 0x15, 0x7a, - 0x7f, 0x1e, 0x87, 0x82, 0x85, 0xbf, 0x40, 0x5b, 0x64, 0x76, 0x4d, 0x5f, 0x44, 0xec, 0x16, 0x11, - 0x3d, 0x4c, 0x35, 0x76, 0x92, 0x58, 0x9f, 0xf3, 0xc1, 0x1c, 0x82, 0x3f, 0x41, 0xeb, 0x12, 0xe3, - 0x6d, 0x55, 0x3c, 0x9d, 0x54, 0xe3, 0x6e, 0x12, 0xeb, 0x45, 0x07, 0x14, 0x4d, 0x26, 0xe4, 0x6f, - 0x3d, 0x20, 0x16, 0x71, 0x2e, 0xd3, 0x87, 0x12, 0x17, 0x16, 0x1c, 0x50, 0x34, 0xd9, 0x93, 0x87, - 0x03, 0xfc, 0xb2, 0x10, 0xe5, 0xc5, 0x9f, 0x3c, 0x29, 0x08, 0xd9, 0x90, 0xbd, 0xa4, 0x02, 0xb1, - 0x56, 0x51, 0x4b, 0xaa, 0x78, 0x49, 0xcd, 0x30, 0x48, 0x47, 0xec, 0x00, 0xed, 0x7c, 0xf3, 0x6d, - 0x67, 0xd7, 0x57, 0x1e, 0x87, 0x82, 0xc5, 0xea, 0x8d, 0x37, 0xca, 0x63, 0xe2, 0x0d, 0xe9, 0xa8, - 0x4f, 0x82, 0xcb, 0xf4, 0x7d, 0xc4, 0xeb, 0x6d, 0xce, 0x09, 0xf3, 0x90, 0x31, 0xb8, 0xbe, 0xd1, - 0x1a, 0xcf, 0x6f, 0xb4, 0xc6, 0x8b, 0x1b, 0x4d, 0xf9, 0x79, 0xaa, 0x29, 0xbf, 0x4f, 0x35, 0xe5, - 0xd9, 0x54, 0x53, 0xae, 0xa7, 0x9a, 0xf2, 0xcf, 0x54, 0x53, 0xfe, 0x9d, 0x6a, 0x8d, 0x17, 0x53, - 0x4d, 0xf9, 0xe5, 0x56, 0x6b, 0x5c, 0xdf, 0x6a, 0x8d, 0xe7, 0xb7, 0x5a, 0xe3, 0xfb, 0xf7, 0x87, - 0x0e, 0x1d, 0x45, 0x83, 0x7d, 0xcb, 0x1f, 0xf7, 0x86, 0x81, 0x79, 0x66, 0x7a, 0x66, 0xcf, 0xf5, - 0xcf, 0x9d, 0x5e, 0xd5, 0xff, 0xb7, 0x83, 0x16, 0xff, 0xef, 0xf5, 0xa3, 0xff, 0x03, 0x00, 0x00, - 0xff, 0xff, 0x39, 0xf0, 0xb8, 0xbf, 0xfe, 0x0e, 0x00, 0x00, + // 1358 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x57, 0xcd, 0x6f, 0xdc, 0x44, + 0x14, 0xdf, 0xcd, 0xd6, 0x9b, 0x74, 0xf2, 0xd5, 0x4e, 0x52, 0xba, 0xa5, 0x95, 0x1d, 0x16, 0x2a, + 0x8a, 0x90, 0xb2, 0x2a, 0x45, 0x42, 0x20, 0x2a, 0x21, 0xa7, 0x44, 0xaa, 0x94, 0x8a, 0xf2, 0x16, + 0x04, 0x82, 0x93, 0x63, 0xbf, 0xec, 0x5a, 0xf5, 0xda, 0x8e, 0x3d, 0x0e, 0x8d, 0x84, 0x04, 0x7f, + 0x02, 0x77, 0xee, 0x88, 0x0b, 0x27, 0x4e, 0x9c, 0xb9, 0xf4, 0xd8, 0x63, 0x4f, 0x16, 0xdd, 0x5c, + 0x90, 0x4f, 0xfd, 0x03, 0x38, 0xa0, 0xf9, 0x58, 0x7f, 0xad, 0x37, 0xcd, 0x65, 0x3d, 0xef, 0xf7, + 0xde, 0xef, 0xcd, 0xcc, 0x9b, 0x79, 0xef, 0xcd, 0x92, 0x9d, 0xf0, 0xc9, 0x68, 0xe0, 0x05, 0xa3, + 0x63, 0x6f, 0x12, 0x38, 0xe8, 0x0d, 0x62, 0x66, 0xb1, 0x58, 0xfe, 0xee, 0x86, 0x51, 0xc0, 0x02, + 0xaa, 0x09, 0xe1, 0xcd, 0xed, 0x51, 0x30, 0x0a, 0x04, 0x32, 0xe0, 0x23, 0xa9, 0xec, 0xff, 0xb6, + 0x44, 0xba, 0x80, 0x71, 0xe2, 0x31, 0xfa, 0x31, 0x59, 0x8e, 0x93, 0xc9, 0xc4, 0x8a, 0x4e, 0x7b, + 0xed, 0x9d, 0xf6, 0x9d, 0xd5, 0x0f, 0x36, 0x76, 0xa5, 0x9b, 0xa1, 0x44, 0xcd, 0xcd, 0x67, 0xa9, + 0xd1, 0xca, 0x52, 0x63, 0x66, 0x06, 0xb3, 0x01, 0xa7, 0x1e, 0x27, 0x18, 0xb9, 0x18, 0xf5, 0x96, + 0x2a, 0xd4, 0x2f, 0x25, 0x5a, 0x50, 0x95, 0x19, 0xcc, 0x06, 0xf4, 0x3e, 0x59, 0x71, 0xfd, 0x11, + 0xc6, 0x0c, 0xa3, 0x5e, 0x47, 0x70, 0x37, 0x15, 0xf7, 0xa1, 0x82, 0xcd, 0x2b, 0x8a, 0x9c, 0x1b, + 0x42, 0x3e, 0xa2, 0x1f, 0x92, 0xae, 0x6d, 0xd9, 0x63, 0x8c, 0x7b, 0x97, 0x04, 0x79, 0x5d, 0x91, + 0xf7, 0x04, 0x68, 0xae, 0x2b, 0xaa, 0x26, 0x8c, 0x40, 0xd9, 0xd2, 0xbb, 0x44, 0x73, 0x7d, 0x07, + 0x9f, 0xf6, 0x34, 0x41, 0x5a, 0xcb, 0x67, 0x74, 0xf0, 0x69, 0xc1, 0x11, 0x26, 0x20, 0x3f, 0xfd, + 0x5f, 0x2f, 0x91, 0xee, 0x5e, 0xce, 0xb6, 0xc7, 0x89, 0xff, 0x44, 0x85, 0x69, 0xad, 0x3c, 0x65, + 0x69, 0x46, 0x6e, 0x02, 0xf2, 0x53, 0x4c, 0xb8, 0x74, 0x1e, 0xa5, 0x3c, 0x21, 0xdf, 0x59, 0x24, + 0x0e, 0x46, 0x85, 0xa5, 0xca, 0xd9, 0x50, 0x1c, 0x65, 0x03, 0xea, 0x4b, 0xf7, 0xc8, 0xaa, 0x30, + 0x93, 0x67, 0xaa, 0x82, 0x52, 0xa5, 0x6e, 0x29, 0x6a, 0xd9, 0x10, 0xca, 0x02, 0xdd, 0x27, 0x6b, + 0x27, 0x81, 0x97, 0x4c, 0x50, 0x79, 0xd1, 0x1a, 0xbc, 0x6c, 0x2b, 0x2f, 0x15, 0x4b, 0xa8, 0x48, + 0xdc, 0x4f, 0xcc, 0x4f, 0x79, 0xb6, 0x9a, 0xee, 0x79, 0x7e, 0xca, 0x96, 0x50, 0x91, 0xf8, 0xa6, + 0x3c, 0xeb, 0x10, 0x3d, 0xe5, 0x66, 0xf9, 0xbc, 0x4d, 0x95, 0x0c, 0xa1, 0x2c, 0xd0, 0xef, 0xc9, + 0x96, 0xeb, 0xc7, 0xcc, 0xf2, 0xd9, 0x23, 0x64, 0x91, 0x6b, 0x2b, 0x67, 0x2b, 0x0d, 0xce, 0x6e, + 0x2a, 0x67, 0x4d, 0x04, 0x68, 0x02, 0xfb, 0x7f, 0x75, 0xc9, 0xb2, 0x4a, 0x13, 0xfa, 0x35, 0xb9, + 0x7e, 0x78, 0xca, 0x30, 0x7e, 0x1c, 0x05, 0x36, 0xc6, 0x31, 0x3a, 0x8f, 0x31, 0x1a, 0xa2, 0x1d, + 0xf8, 0x8e, 0xb8, 0x30, 0x1d, 0xf3, 0x66, 0x96, 0x1a, 0x8b, 0x4c, 0x60, 0x91, 0x82, 0xbb, 0xf5, + 0x5c, 0xbf, 0xd1, 0xed, 0x52, 0xe1, 0x76, 0x81, 0x09, 0x2c, 0x52, 0xd0, 0x87, 0x64, 0x8b, 0x05, + 0xcc, 0xf2, 0xcc, 0xca, 0xb4, 0xe2, 0xce, 0x75, 0xcc, 0xeb, 0x3c, 0x08, 0x0d, 0x6a, 0x68, 0x02, + 0x73, 0x57, 0x07, 0x95, 0xa9, 0xc4, 0x1d, 0x2c, 0xbb, 0xaa, 0xaa, 0xa1, 0x09, 0xa4, 0x77, 0xc8, + 0x0a, 0x3e, 0x45, 0xfb, 0x2b, 0x77, 0x82, 0xe2, 0xf6, 0xb5, 0xcd, 0x35, 0x5e, 0x00, 0x66, 0x18, + 0xe4, 0x23, 0xfa, 0x3e, 0xb9, 0x7c, 0x9c, 0x60, 0x82, 0xc2, 0xb4, 0x2b, 0x4c, 0xd7, 0xb3, 0xd4, + 0x28, 0x40, 0x28, 0x86, 0x74, 0x97, 0x90, 0x38, 0x39, 0x94, 0xa5, 0x27, 0x16, 0xf7, 0xa8, 0x63, + 0x6e, 0x64, 0xa9, 0x51, 0x42, 0xa1, 0x34, 0xa6, 0x07, 0x64, 0x5b, 0xac, 0xee, 0x73, 0x9f, 0xc9, + 0xeb, 0xc8, 0x92, 0xc8, 0x47, 0x47, 0x5c, 0x9a, 0x8e, 0xd9, 0xcb, 0x52, 0xa3, 0x51, 0x0f, 0x8d, + 0x28, 0xed, 0x93, 0x6e, 0x1c, 0x7a, 0x2e, 0x8b, 0x7b, 0x97, 0x05, 0x9f, 0xf0, 0xfc, 0x95, 0x08, + 0xa8, 0xaf, 0xb0, 0x19, 0x5b, 0x91, 0x13, 0xf7, 0x48, 0xc9, 0x46, 0x20, 0xa0, 0xbe, 0xf9, 0xaa, + 0x1e, 0x07, 0x31, 0xdb, 0x77, 0x3d, 0x86, 0x91, 0x88, 0x5e, 0x6f, 0xb5, 0xb6, 0xaa, 0x9a, 0x1e, + 0x1a, 0x51, 0xfa, 0x13, 0xb9, 0x2d, 0xf0, 0x21, 0x8b, 0x12, 0x9b, 0x25, 0x11, 0x3a, 0x8f, 0x90, + 0x59, 0x8e, 0xc5, 0xac, 0xda, 0x95, 0x58, 0x13, 0xee, 0xdf, 0xcb, 0x52, 0xe3, 0x62, 0x04, 0xb8, + 0x98, 0x59, 0xff, 0x47, 0xa2, 0x89, 0xc2, 0x4b, 0xef, 0x92, 0x55, 0xc1, 0xd8, 0xe3, 0x25, 0x33, + 0x56, 0xc9, 0xb2, 0xc9, 0x93, 0xba, 0x04, 0x43, 0x59, 0xa0, 0x9f, 0x91, 0x2b, 0x61, 0xbe, 0x1f, + 0xc5, 0x93, 0xd9, 0xb0, 0x9d, 0xa5, 0xc6, 0x9c, 0x0e, 0xe6, 0x90, 0xfe, 0xa7, 0x64, 0x59, 0x35, + 0x29, 0x5e, 0xa4, 0x63, 0x16, 0x44, 0x58, 0xab, 0xeb, 0x43, 0x8e, 0x15, 0x45, 0x5a, 0x98, 0x80, + 0xfc, 0xf4, 0xff, 0x58, 0x22, 0x2b, 0x0f, 0x8b, 0x5e, 0xb4, 0x26, 0xd6, 0x06, 0xc8, 0xab, 0x88, + 0xcc, 0x76, 0xcd, 0xbc, 0xc2, 0x8b, 0x5b, 0x19, 0x87, 0x8a, 0x44, 0xf7, 0x09, 0x2d, 0xed, 0xe8, + 0x91, 0xc5, 0x04, 0x57, 0x6e, 0xe2, 0x8d, 0x2c, 0x35, 0x1a, 0xb4, 0xd0, 0x80, 0xe5, 0xb3, 0x9b, + 0x42, 0x8e, 0x55, 0x06, 0x17, 0xb3, 0x2b, 0x1c, 0x2a, 0x12, 0xfd, 0x84, 0x6c, 0x14, 0xf9, 0x37, + 0x44, 0x9f, 0xa9, 0x74, 0xa5, 0x59, 0x6a, 0xd4, 0x34, 0x50, 0x93, 0x8b, 0x78, 0x69, 0x17, 0x8e, + 0xd7, 0x7f, 0x97, 0x88, 0x26, 0xf4, 0xf9, 0xc4, 0xea, 0x60, 0xf0, 0x48, 0x9d, 0x77, 0x31, 0x71, + 0xae, 0x81, 0x9a, 0x4c, 0xbf, 0x20, 0xd7, 0x4a, 0xc8, 0x83, 0xe0, 0x07, 0xdf, 0x0b, 0x2c, 0x27, + 0x8f, 0xda, 0x8d, 0x2c, 0x35, 0x9a, 0x0d, 0xa0, 0x19, 0xe6, 0x67, 0x60, 0x57, 0x30, 0x51, 0x4d, + 0x3a, 0xc5, 0x19, 0xcc, 0x6b, 0xa1, 0x01, 0xa3, 0x36, 0xb9, 0xc1, 0x4b, 0xc7, 0x29, 0xe0, 0x11, + 0x46, 0xe8, 0xdb, 0xe8, 0x14, 0xb7, 0xbf, 0xb7, 0xbe, 0xd3, 0xbe, 0xb3, 0x62, 0xde, 0xce, 0x52, + 0xe3, 0xad, 0x85, 0x46, 0xb3, 0x14, 0x81, 0xc5, 0x7e, 0x8a, 0xe7, 0x47, 0xad, 0xb9, 0x73, 0x6c, + 0xc1, 0xf3, 0x63, 0xb6, 0x3f, 0xc0, 0xa3, 0x78, 0x1f, 0x99, 0x3d, 0xce, 0x0b, 0x6b, 0x79, 0x7f, + 0x15, 0x2d, 0x34, 0x60, 0xf4, 0x5b, 0xd2, 0xb3, 0x03, 0x71, 0xdd, 0xdd, 0xc0, 0xdf, 0x0b, 0x7c, + 0x16, 0x05, 0xde, 0x81, 0xc5, 0xd0, 0xb7, 0x4f, 0x45, 0xed, 0xed, 0x98, 0xb7, 0xb2, 0xd4, 0x58, + 0x68, 0x03, 0x0b, 0x35, 0xd4, 0x21, 0xb7, 0x42, 0x37, 0x44, 0xde, 0xa5, 0xbe, 0x89, 0xac, 0x30, + 0xc4, 0x48, 0x66, 0x29, 0x3a, 0xb2, 0xb6, 0xc9, 0x5a, 0xbd, 0x93, 0xa5, 0xc6, 0xb9, 0x76, 0x70, + 0xae, 0xb6, 0xff, 0xa7, 0x46, 0x34, 0x11, 0x27, 0x7e, 0xfd, 0xc6, 0x68, 0x39, 0x32, 0x68, 0xbc, + 0x1e, 0x95, 0xef, 0x7d, 0x55, 0x03, 0x35, 0xb9, 0xc2, 0x95, 0xab, 0xd3, 0x1a, 0xb8, 0x72, 0x3d, + 0x35, 0x99, 0xee, 0x91, 0xab, 0x0e, 0xda, 0xc1, 0x24, 0x8c, 0x44, 0xf1, 0x93, 0x53, 0xcb, 0xd0, + 0x5d, 0xcb, 0x52, 0x63, 0x5e, 0x09, 0xf3, 0x50, 0xdd, 0x49, 0x39, 0x42, 0x73, 0x4e, 0xe4, 0x32, + 0xe6, 0x21, 0x7a, 0x9f, 0x6c, 0xd6, 0xd7, 0x21, 0xdb, 0xda, 0x56, 0x96, 0x1a, 0x75, 0x15, 0xd4, + 0x01, 0x4e, 0x17, 0xb9, 0xf4, 0x20, 0x09, 0x3d, 0xd7, 0xb6, 0x38, 0xfd, 0x72, 0x41, 0xaf, 0xa9, + 0xa0, 0x0e, 0x70, 0x7a, 0x58, 0x6b, 0x5f, 0xa4, 0xa0, 0xd7, 0x54, 0x50, 0x07, 0x68, 0x48, 0x76, + 0xf2, 0xc0, 0x2e, 0x68, 0x30, 0xaa, 0x1d, 0xbe, 0x93, 0xa5, 0xc6, 0x6b, 0x6d, 0xe1, 0xb5, 0x16, + 0xf4, 0x94, 0xbc, 0x5d, 0x8e, 0xe1, 0xa2, 0x49, 0x65, 0x93, 0x7c, 0x37, 0x4b, 0x8d, 0x8b, 0x98, + 0xc3, 0x45, 0x8c, 0xfa, 0x7f, 0x77, 0x88, 0x26, 0x1e, 0xa6, 0xbc, 0xc6, 0xa3, 0x7c, 0x54, 0xec, + 0x07, 0x89, 0x5f, 0xe9, 0x30, 0x65, 0x1c, 0x2a, 0x12, 0x6f, 0x92, 0x38, 0x7b, 0x8a, 0x1c, 0x27, + 0xbc, 0x57, 0xc9, 0x4a, 0xa9, 0xc9, 0x26, 0x59, 0xd7, 0xc1, 0x1c, 0x42, 0x3f, 0x22, 0xeb, 0x0a, + 0x13, 0xc5, 0x5b, 0x3e, 0x0f, 0x35, 0xf3, 0x6a, 0x96, 0x1a, 0x55, 0x05, 0x54, 0x45, 0x4e, 0x14, + 0xef, 0x59, 0x40, 0x1b, 0xdd, 0x93, 0xfc, 0x31, 0x28, 0x88, 0x15, 0x05, 0x54, 0x45, 0xfe, 0xac, + 0x13, 0x80, 0x68, 0x49, 0x32, 0xbd, 0xc4, 0xb3, 0x2e, 0x07, 0xa1, 0x18, 0xf2, 0xd7, 0x62, 0x24, + 0xd7, 0x2a, 0x73, 0x49, 0x93, 0xaf, 0xc5, 0x19, 0x06, 0xf9, 0x88, 0x07, 0xd0, 0x29, 0x97, 0xf8, + 0xe5, 0xa2, 0x49, 0x96, 0x71, 0xa8, 0x48, 0x3c, 0xdf, 0x44, 0x39, 0x3e, 0x40, 0x7f, 0xc4, 0xc6, + 0x43, 0x8c, 0x4e, 0xf2, 0x37, 0xa0, 0xc8, 0xb7, 0x39, 0x25, 0xcc, 0x43, 0x26, 0x3e, 0x7f, 0xa9, + 0xb7, 0x5e, 0xbc, 0xd4, 0x5b, 0xaf, 0x5e, 0xea, 0xed, 0x9f, 0xa7, 0x7a, 0xfb, 0xf7, 0xa9, 0xde, + 0x7e, 0x36, 0xd5, 0xdb, 0xcf, 0xa7, 0x7a, 0xfb, 0x9f, 0xa9, 0xde, 0xfe, 0x77, 0xaa, 0xb7, 0x5e, + 0x4d, 0xf5, 0xf6, 0x2f, 0x67, 0x7a, 0xeb, 0xf9, 0x99, 0xde, 0x7a, 0x71, 0xa6, 0xb7, 0xbe, 0x1b, + 0x8c, 0x5c, 0x36, 0x4e, 0x0e, 0x77, 0xed, 0x60, 0x32, 0x18, 0x45, 0xd6, 0x91, 0xe5, 0x5b, 0x03, + 0x2f, 0x78, 0xe2, 0x0e, 0x4e, 0xee, 0x0d, 0x9a, 0xfe, 0xf9, 0x1f, 0x76, 0xc5, 0xff, 0xfa, 0x7b, + 0xff, 0x07, 0x00, 0x00, 0xff, 0xff, 0x19, 0x80, 0x75, 0xde, 0x18, 0x10, 0x00, 0x00, } func (this *Result) Equal(that interface{}) bool { @@ -904,6 +983,9 @@ func (this *Result) Equal(that interface{}) bool { if !this.Caches.Equal(&that1.Caches) { return false } + if !this.Index.Equal(&that1.Index) { + return false + } return true } func (this *Caches) Equal(that interface{}) bool { @@ -1008,6 +1090,33 @@ func (this *Summary) Equal(that interface{}) bool { } return true } +func (this *Index) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*Index) + if !ok { + that2, ok := that.(Index) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if this.TotalChunks != that1.TotalChunks { + return false + } + if this.PostFilterChunks != that1.PostFilterChunks { + return false + } + return true +} func (this *Querier) Equal(that interface{}) bool { if that == nil { return this == nil @@ -1108,6 +1217,9 @@ func (this *Store) Equal(that interface{}) bool { if this.CongestionControlLatency != that1.CongestionControlLatency { return false } + if this.PipelineWrapperFilteredLines != that1.PipelineWrapperFilteredLines { + return false + } return true } func (this *Chunk) Equal(that interface{}) bool { @@ -1207,12 +1319,13 @@ func (this *Result) GoString() string { if this == nil { return "nil" } - s := make([]string, 0, 8) + s := make([]string, 0, 9) s = append(s, "&stats.Result{") s = append(s, "Summary: "+strings.Replace(this.Summary.GoString(), `&`, ``, 1)+",\n") s = append(s, "Querier: "+strings.Replace(this.Querier.GoString(), `&`, ``, 1)+",\n") s = append(s, "Ingester: "+strings.Replace(this.Ingester.GoString(), `&`, ``, 1)+",\n") s = append(s, "Caches: "+strings.Replace(this.Caches.GoString(), `&`, ``, 1)+",\n") + s = append(s, "Index: "+strings.Replace(this.Index.GoString(), `&`, ``, 1)+",\n") s = append(s, "}") return strings.Join(s, "") } @@ -1254,6 +1367,17 @@ func (this *Summary) GoString() string { s = append(s, "}") return strings.Join(s, "") } +func (this *Index) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 6) + s = append(s, "&stats.Index{") + s = append(s, "TotalChunks: "+fmt.Sprintf("%#v", this.TotalChunks)+",\n") + s = append(s, "PostFilterChunks: "+fmt.Sprintf("%#v", this.PostFilterChunks)+",\n") + s = append(s, "}") + return strings.Join(s, "") +} func (this *Querier) GoString() string { if this == nil { return "nil" @@ -1282,7 +1406,7 @@ func (this *Store) GoString() string { if this == nil { return "nil" } - s := make([]string, 0, 11) + s := make([]string, 0, 12) s = append(s, "&stats.Store{") s = append(s, "TotalChunksRef: "+fmt.Sprintf("%#v", this.TotalChunksRef)+",\n") s = append(s, "TotalChunksDownloaded: "+fmt.Sprintf("%#v", this.TotalChunksDownloaded)+",\n") @@ -1291,6 +1415,7 @@ func (this *Store) GoString() string { s = append(s, "Chunk: "+strings.Replace(this.Chunk.GoString(), `&`, ``, 1)+",\n") s = append(s, "ChunkRefsFetchTime: "+fmt.Sprintf("%#v", this.ChunkRefsFetchTime)+",\n") s = append(s, "CongestionControlLatency: "+fmt.Sprintf("%#v", this.CongestionControlLatency)+",\n") + s = append(s, "PipelineWrapperFilteredLines: "+fmt.Sprintf("%#v", this.PipelineWrapperFilteredLines)+",\n") s = append(s, "}") return strings.Join(s, "") } @@ -1357,6 +1482,16 @@ func (m *Result) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + { + size, err := m.Index.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintStats(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x2a { size, err := m.Caches.MarshalToSizedBuffer(dAtA[:i]) if err != nil { @@ -1588,6 +1723,39 @@ func (m *Summary) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } +func (m *Index) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *Index) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *Index) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.PostFilterChunks != 0 { + i = encodeVarintStats(dAtA, i, uint64(m.PostFilterChunks)) + i-- + dAtA[i] = 0x10 + } + if m.TotalChunks != 0 { + i = encodeVarintStats(dAtA, i, uint64(m.TotalChunks)) + i-- + dAtA[i] = 0x8 + } + return len(dAtA) - i, nil +} + func (m *Querier) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) @@ -1704,6 +1872,11 @@ func (m *Store) MarshalToSizedBuffer(dAtA []byte) (int, error) { i-- dAtA[i] = 0x68 } + if m.PipelineWrapperFilteredLines != 0 { + i = encodeVarintStats(dAtA, i, uint64(m.PipelineWrapperFilteredLines)) + i-- + dAtA[i] = 0x38 + } if m.CongestionControlLatency != 0 { i = encodeVarintStats(dAtA, i, uint64(m.CongestionControlLatency)) i-- @@ -1898,6 +2071,8 @@ func (m *Result) Size() (n int) { n += 1 + l + sovStats(uint64(l)) l = m.Caches.Size() n += 1 + l + sovStats(uint64(l)) + l = m.Index.Size() + n += 1 + l + sovStats(uint64(l)) return n } @@ -1971,6 +2146,21 @@ func (m *Summary) Size() (n int) { return n } +func (m *Index) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.TotalChunks != 0 { + n += 1 + sovStats(uint64(m.TotalChunks)) + } + if m.PostFilterChunks != 0 { + n += 1 + sovStats(uint64(m.PostFilterChunks)) + } + return n +} + func (m *Querier) Size() (n int) { if m == nil { return 0 @@ -2028,6 +2218,9 @@ func (m *Store) Size() (n int) { if m.CongestionControlLatency != 0 { n += 1 + sovStats(uint64(m.CongestionControlLatency)) } + if m.PipelineWrapperFilteredLines != 0 { + n += 1 + sovStats(uint64(m.PipelineWrapperFilteredLines)) + } if m.QueryReferencedStructured { n += 2 } @@ -2118,6 +2311,7 @@ func (this *Result) String() string { `Querier:` + strings.Replace(strings.Replace(this.Querier.String(), "Querier", "Querier", 1), `&`, ``, 1) + `,`, `Ingester:` + strings.Replace(strings.Replace(this.Ingester.String(), "Ingester", "Ingester", 1), `&`, ``, 1) + `,`, `Caches:` + strings.Replace(strings.Replace(this.Caches.String(), "Caches", "Caches", 1), `&`, ``, 1) + `,`, + `Index:` + strings.Replace(strings.Replace(this.Index.String(), "Index", "Index", 1), `&`, ``, 1) + `,`, `}`, }, "") return s @@ -2160,6 +2354,17 @@ func (this *Summary) String() string { }, "") return s } +func (this *Index) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&Index{`, + `TotalChunks:` + fmt.Sprintf("%v", this.TotalChunks) + `,`, + `PostFilterChunks:` + fmt.Sprintf("%v", this.PostFilterChunks) + `,`, + `}`, + }, "") + return s +} func (this *Querier) String() string { if this == nil { return "nil" @@ -2195,6 +2400,7 @@ func (this *Store) String() string { `Chunk:` + strings.Replace(strings.Replace(this.Chunk.String(), "Chunk", "Chunk", 1), `&`, ``, 1) + `,`, `ChunkRefsFetchTime:` + fmt.Sprintf("%v", this.ChunkRefsFetchTime) + `,`, `CongestionControlLatency:` + fmt.Sprintf("%v", this.CongestionControlLatency) + `,`, + `PipelineWrapperFilteredLines:` + fmt.Sprintf("%v", this.PipelineWrapperFilteredLines) + `,`, `QueryReferencedStructured:` + fmt.Sprintf("%v", this.QueryReferencedStructured) + `,`, `}`, }, "") @@ -2404,6 +2610,39 @@ func (m *Result) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex + case 5: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Index", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowStats + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthStats + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthStats + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := m.Index.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipStats(dAtA[iNdEx:]) @@ -3010,6 +3249,97 @@ func (m *Summary) Unmarshal(dAtA []byte) error { } return nil } +func (m *Index) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowStats + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: Index: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: Index: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field TotalChunks", wireType) + } + m.TotalChunks = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowStats + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.TotalChunks |= int64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 2: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field PostFilterChunks", wireType) + } + m.PostFilterChunks = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowStats + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.PostFilterChunks |= int64(b&0x7F) << shift + if b < 0x80 { + break + } + } + default: + iNdEx = preIndex + skippy, err := skipStats(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthStats + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthStats + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} func (m *Querier) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 @@ -3415,6 +3745,25 @@ func (m *Store) Unmarshal(dAtA []byte) error { break } } + case 7: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field PipelineWrapperFilteredLines", wireType) + } + m.PipelineWrapperFilteredLines = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowStats + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.PipelineWrapperFilteredLines |= int64(b&0x7F) << shift + if b < 0x80 { + break + } + } case 13: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field QueryReferencedStructured", wireType) diff --git a/pkg/logqlmodel/stats/stats.proto b/pkg/logqlmodel/stats/stats.proto index 20ad6077392b6..b53747b7941fd 100644 --- a/pkg/logqlmodel/stats/stats.proto +++ b/pkg/logqlmodel/stats/stats.proto @@ -4,7 +4,7 @@ package stats; import "gogoproto/gogo.proto"; -option go_package = "github.com/grafana/loki/pkg/logqlmodel/stats"; +option go_package = "github.com/grafana/loki/v3/pkg/logqlmodel/stats"; option (gogoproto.marshaler_all) = true; option (gogoproto.unmarshaler_all) = true; @@ -26,6 +26,10 @@ message Result { (gogoproto.nullable) = false, (gogoproto.jsontag) = "cache" ]; + Index index = 5 [ + (gogoproto.nullable) = false, + (gogoproto.jsontag) = "index" + ]; } message Caches { @@ -96,6 +100,16 @@ message Summary { int64 totalStructuredMetadataBytesProcessed = 12 [(gogoproto.jsontag) = "totalStructuredMetadataBytesProcessed"]; } +// Statistics from Index queries +// TODO(owen-d): include bytes. +// Needs some index methods added to return _sized_ chunk refs to know +message Index { + // Total chunks + int64 totalChunks = 1 [(gogoproto.jsontag) = "totalChunks"]; + // Post-filtered chunks + int64 postFilterChunks = 2 [(gogoproto.jsontag) = "postFilterChunks"]; +} + message Querier { Store store = 1 [ (gogoproto.nullable) = false, @@ -139,6 +153,9 @@ message Store { // Time spent being blocked on congestion control. int64 congestionControlLatency = 6 [(gogoproto.jsontag) = "congestionControlLatency"]; + + // Total number of lines filtered by pipeline wrapper. + int64 pipelineWrapperFilteredLines = 7 [(gogoproto.jsontag) = "pipelineWrapperFilteredLines"]; } message Chunk { diff --git a/pkg/loki/common/common.go b/pkg/loki/common/common.go index 6f7fd1c768edf..b7bb08e2cd46e 100644 --- a/pkg/loki/common/common.go +++ b/pkg/loki/common/common.go @@ -6,17 +6,17 @@ import ( "github.com/grafana/dskit/flagext" "github.com/grafana/dskit/netutil" - "github.com/grafana/loki/pkg/storage/chunk/client/alibaba" - "github.com/grafana/loki/pkg/storage/chunk/client/aws" - "github.com/grafana/loki/pkg/storage/chunk/client/azure" - "github.com/grafana/loki/pkg/storage/chunk/client/baidubce" - "github.com/grafana/loki/pkg/storage/chunk/client/congestion" - "github.com/grafana/loki/pkg/storage/chunk/client/gcp" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" - "github.com/grafana/loki/pkg/storage/chunk/client/ibmcloud" - "github.com/grafana/loki/pkg/storage/chunk/client/openstack" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/ring" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/alibaba" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/aws" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/azure" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/baidubce" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/congestion" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/gcp" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/ibmcloud" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/openstack" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/ring" ) // Config holds common config that can be shared between multiple other config sections. diff --git a/pkg/loki/config_compat.go b/pkg/loki/config_compat.go index 1e4f800c46476..fff357453be6c 100644 --- a/pkg/loki/config_compat.go +++ b/pkg/loki/config_compat.go @@ -4,9 +4,9 @@ import ( "errors" "fmt" - "github.com/grafana/loki/pkg/ingester/index" - frontend "github.com/grafana/loki/pkg/lokifrontend/frontend/v2" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/ingester/index" + frontend "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v2" + "github.com/grafana/loki/v3/pkg/storage/config" ) func ValidateConfigCompatibility(c Config) error { diff --git a/pkg/loki/config_test.go b/pkg/loki/config_test.go index 73fc2cbb46aba..7a29f80bf02b9 100644 --- a/pkg/loki/config_test.go +++ b/pkg/loki/config_test.go @@ -8,8 +8,8 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/ingester" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/ingester" + "github.com/grafana/loki/v3/pkg/storage/config" ) func TestCrossComponentValidation(t *testing.T) { diff --git a/pkg/loki/config_wrapper.go b/pkg/loki/config_wrapper.go index 8a5f6c6811250..e10618e88c4ff 100644 --- a/pkg/loki/config_wrapper.go +++ b/pkg/loki/config_wrapper.go @@ -11,14 +11,14 @@ import ( "github.com/grafana/dskit/flagext" "github.com/pkg/errors" - "github.com/grafana/loki/pkg/loki/common" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util/cfg" - lokiring "github.com/grafana/loki/pkg/util/ring" - - "github.com/grafana/loki/pkg/ruler/rulestore/local" - loki_net "github.com/grafana/loki/pkg/util/net" + "github.com/grafana/loki/v3/pkg/loki/common" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util/cfg" + lokiring "github.com/grafana/loki/v3/pkg/util/ring" + + "github.com/grafana/loki/v3/pkg/ruler/rulestore/local" + loki_net "github.com/grafana/loki/v3/pkg/util/net" ) const versionFlag = "version" @@ -409,8 +409,11 @@ func applyPathPrefixDefaults(r, defaults *ConfigWrapper) { if r.CompactorConfig.WorkingDirectory == defaults.CompactorConfig.WorkingDirectory { r.CompactorConfig.WorkingDirectory = fmt.Sprintf("%s/compactor", prefix) } - if r.StorageConfig.BloomShipperConfig.WorkingDirectory == defaults.StorageConfig.BloomShipperConfig.WorkingDirectory { - r.StorageConfig.BloomShipperConfig.WorkingDirectory = fmt.Sprintf("%s/blooms", prefix) + if len(r.StorageConfig.BloomShipperConfig.WorkingDirectory) == 1 && + len(r.StorageConfig.BloomShipperConfig.WorkingDirectory) == len(defaults.StorageConfig.BloomShipperConfig.WorkingDirectory) && + + r.StorageConfig.BloomShipperConfig.WorkingDirectory[0] == defaults.StorageConfig.BloomShipperConfig.WorkingDirectory[0] { + _ = r.StorageConfig.BloomShipperConfig.WorkingDirectory.Set(fmt.Sprintf("%s/blooms", prefix)) } } } diff --git a/pkg/loki/config_wrapper_test.go b/pkg/loki/config_wrapper_test.go index 41705f012f020..1852846aa2998 100644 --- a/pkg/loki/config_wrapper_test.go +++ b/pkg/loki/config_wrapper_test.go @@ -9,25 +9,26 @@ import ( "testing" "time" + "github.com/grafana/dskit/flagext" "github.com/grafana/dskit/netutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/distributor" - "github.com/grafana/loki/pkg/loki/common" - "github.com/grafana/loki/pkg/storage/bucket/swift" - "github.com/grafana/loki/pkg/storage/chunk/client/alibaba" - "github.com/grafana/loki/pkg/storage/chunk/client/aws" - "github.com/grafana/loki/pkg/storage/chunk/client/azure" - "github.com/grafana/loki/pkg/storage/chunk/client/baidubce" - "github.com/grafana/loki/pkg/storage/chunk/client/gcp" - "github.com/grafana/loki/pkg/storage/chunk/client/ibmcloud" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/chunk/client/openstack" - "github.com/grafana/loki/pkg/util/cfg" - util_log "github.com/grafana/loki/pkg/util/log" - loki_net "github.com/grafana/loki/pkg/util/net" - lokiring "github.com/grafana/loki/pkg/util/ring" + "github.com/grafana/loki/v3/pkg/distributor" + "github.com/grafana/loki/v3/pkg/loki/common" + "github.com/grafana/loki/v3/pkg/storage/bucket/swift" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/alibaba" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/aws" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/azure" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/baidubce" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/gcp" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/ibmcloud" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/openstack" + "github.com/grafana/loki/v3/pkg/util/cfg" + util_log "github.com/grafana/loki/v3/pkg/util/log" + loki_net "github.com/grafana/loki/v3/pkg/util/net" + lokiring "github.com/grafana/loki/v3/pkg/util/ring" ) // Can't use a totally empty yaml file or it causes weird behavior in the unmarshalling. @@ -100,7 +101,7 @@ common: assert.EqualValues(t, "/opt/loki/rules-temp", config.Ruler.RulePath) assert.EqualValues(t, "/opt/loki/wal", config.Ingester.WAL.Dir) assert.EqualValues(t, "/opt/loki/compactor", config.CompactorConfig.WorkingDirectory) - assert.EqualValues(t, "/opt/loki/blooms", config.StorageConfig.BloomShipperConfig.WorkingDirectory) + assert.EqualValues(t, flagext.StringSliceCSV{"/opt/loki/blooms"}, config.StorageConfig.BloomShipperConfig.WorkingDirectory) }) t.Run("accepts paths both with and without trailing slash", func(t *testing.T) { @@ -112,7 +113,7 @@ common: assert.EqualValues(t, "/opt/loki/rules-temp", config.Ruler.RulePath) assert.EqualValues(t, "/opt/loki/wal", config.Ingester.WAL.Dir) assert.EqualValues(t, "/opt/loki/compactor", config.CompactorConfig.WorkingDirectory) - assert.EqualValues(t, "/opt/loki/blooms", config.StorageConfig.BloomShipperConfig.WorkingDirectory) + assert.EqualValues(t, flagext.StringSliceCSV{"/opt/loki/blooms"}, config.StorageConfig.BloomShipperConfig.WorkingDirectory) }) t.Run("does not rewrite custom (non-default) paths passed via config file", func(t *testing.T) { diff --git a/pkg/loki/delete_store_listener.go b/pkg/loki/delete_store_listener.go index 10fbc88a9899f..ec2d9978bab87 100644 --- a/pkg/loki/delete_store_listener.go +++ b/pkg/loki/delete_store_listener.go @@ -3,7 +3,7 @@ package loki import ( "github.com/grafana/dskit/services" - "github.com/grafana/loki/pkg/compactor/deletion" + "github.com/grafana/loki/v3/pkg/compactor/deletion" ) func deleteRequestsStoreListener(d deletion.DeleteRequestsClient) *listener { diff --git a/pkg/loki/format_query_handler.go b/pkg/loki/format_query_handler.go index 4e65999ebbb38..4b715215a283c 100644 --- a/pkg/loki/format_query_handler.go +++ b/pkg/loki/format_query_handler.go @@ -4,8 +4,8 @@ import ( "encoding/json" "net/http" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/util/server" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/util/server" ) func formatQueryHandler() http.HandlerFunc { diff --git a/pkg/loki/loki.go b/pkg/loki/loki.go index eb513910f1707..c77ef07892931 100644 --- a/pkg/loki/loki.go +++ b/pkg/loki/loki.go @@ -29,43 +29,43 @@ import ( "github.com/prometheus/client_golang/prometheus" "google.golang.org/grpc/health/grpc_health_v1" - "github.com/grafana/loki/pkg/analytics" - "github.com/grafana/loki/pkg/bloomcompactor" - "github.com/grafana/loki/pkg/bloomgateway" - "github.com/grafana/loki/pkg/compactor" - compactorclient "github.com/grafana/loki/pkg/compactor/client" - "github.com/grafana/loki/pkg/compactor/deletion" - "github.com/grafana/loki/pkg/distributor" - "github.com/grafana/loki/pkg/ingester" - ingester_client "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/loghttp/push" - "github.com/grafana/loki/pkg/loki/common" - "github.com/grafana/loki/pkg/lokifrontend" - "github.com/grafana/loki/pkg/lokifrontend/frontend/transport" - "github.com/grafana/loki/pkg/querier" - "github.com/grafana/loki/pkg/querier/queryrange" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/querier/worker" - "github.com/grafana/loki/pkg/ruler" - base_ruler "github.com/grafana/loki/pkg/ruler/base" - "github.com/grafana/loki/pkg/ruler/rulestore" - "github.com/grafana/loki/pkg/runtime" - "github.com/grafana/loki/pkg/scheduler" - internalserver "github.com/grafana/loki/pkg/server" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/indexgateway" - "github.com/grafana/loki/pkg/tracing" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/fakeauth" - "github.com/grafana/loki/pkg/util/limiter" - util_log "github.com/grafana/loki/pkg/util/log" - lokiring "github.com/grafana/loki/pkg/util/ring" - serverutil "github.com/grafana/loki/pkg/util/server" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/analytics" + "github.com/grafana/loki/v3/pkg/bloomcompactor" + "github.com/grafana/loki/v3/pkg/bloomgateway" + "github.com/grafana/loki/v3/pkg/compactor" + compactorclient "github.com/grafana/loki/v3/pkg/compactor/client" + "github.com/grafana/loki/v3/pkg/compactor/deletion" + "github.com/grafana/loki/v3/pkg/distributor" + "github.com/grafana/loki/v3/pkg/ingester" + ingester_client "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/loghttp/push" + "github.com/grafana/loki/v3/pkg/loki/common" + "github.com/grafana/loki/v3/pkg/lokifrontend" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/transport" + "github.com/grafana/loki/v3/pkg/querier" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/querier/worker" + "github.com/grafana/loki/v3/pkg/ruler" + base_ruler "github.com/grafana/loki/v3/pkg/ruler/base" + "github.com/grafana/loki/v3/pkg/ruler/rulestore" + "github.com/grafana/loki/v3/pkg/runtime" + "github.com/grafana/loki/v3/pkg/scheduler" + internalserver "github.com/grafana/loki/v3/pkg/server" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/indexgateway" + "github.com/grafana/loki/v3/pkg/tracing" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/fakeauth" + "github.com/grafana/loki/v3/pkg/util/limiter" + util_log "github.com/grafana/loki/v3/pkg/util/log" + lokiring "github.com/grafana/loki/v3/pkg/util/ring" + serverutil "github.com/grafana/loki/v3/pkg/util/server" + "github.com/grafana/loki/v3/pkg/validation" ) // Config is the root config for Loki. @@ -312,7 +312,7 @@ type Loki struct { querierAPI *querier.QuerierAPI ingesterQuerier *querier.IngesterQuerier Store storage.Store - BloomStore bloomshipper.Store + BloomStore bloomshipper.StoreWithMetrics tableManager *index.TableManager frontend Frontend ruler *base_ruler.Ruler @@ -665,10 +665,7 @@ func (t *Loki) setupModuleManager() error { Write: {Ingester, Distributor}, Backend: {QueryScheduler, Ruler, Compactor, IndexGateway, BloomGateway, BloomCompactor}, - // TODO(salvacorts): We added the BloomCompactor component to the `all` target to ease testing. - // We should remove it before releasing the feature since we don’t think any user running - // the single binary will benefit from the blooms given their scale in terms of ingested data - All: {QueryScheduler, QueryFrontend, Querier, Ingester, Distributor, Ruler, Compactor, BloomCompactor}, + All: {QueryScheduler, QueryFrontend, Querier, Ingester, Distributor, Ruler, Compactor}, } if t.Cfg.Querier.PerRequestLimitsEnabled { diff --git a/pkg/loki/loki_test.go b/pkg/loki/loki_test.go index 81d7c0384d7dc..a4e6ff73ca565 100644 --- a/pkg/loki/loki_test.go +++ b/pkg/loki/loki_test.go @@ -16,7 +16,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - internalserver "github.com/grafana/loki/pkg/server" + internalserver "github.com/grafana/loki/v3/pkg/server" ) func TestFlagDefaults(t *testing.T) { diff --git a/pkg/loki/modules.go b/pkg/loki/modules.go index d3a9a4842adc7..79c86836331aa 100644 --- a/pkg/loki/modules.go +++ b/pkg/loki/modules.go @@ -33,53 +33,52 @@ import ( "github.com/prometheus/client_golang/prometheus/collectors/version" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/bloomcompactor" - "github.com/grafana/loki/pkg/logqlmodel/stats" - - "github.com/grafana/loki/pkg/analytics" - "github.com/grafana/loki/pkg/bloomgateway" - "github.com/grafana/loki/pkg/compactor" - compactorclient "github.com/grafana/loki/pkg/compactor/client" - "github.com/grafana/loki/pkg/compactor/client/grpc" - "github.com/grafana/loki/pkg/compactor/deletion" - "github.com/grafana/loki/pkg/compactor/generationnumber" - "github.com/grafana/loki/pkg/distributor" - "github.com/grafana/loki/pkg/ingester" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/lokifrontend/frontend" - "github.com/grafana/loki/pkg/lokifrontend/frontend/transport" - "github.com/grafana/loki/pkg/lokifrontend/frontend/v1/frontendv1pb" - "github.com/grafana/loki/pkg/lokifrontend/frontend/v2/frontendv2pb" - "github.com/grafana/loki/pkg/querier" - "github.com/grafana/loki/pkg/querier/queryrange" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/ruler" - base_ruler "github.com/grafana/loki/pkg/ruler/base" - "github.com/grafana/loki/pkg/runtime" - "github.com/grafana/loki/pkg/scheduler" - "github.com/grafana/loki/pkg/scheduler/schedulerpb" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/client" - chunk_util "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/boltdb" - boltdbcompactor "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/boltdb/compactor" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/indexgateway" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/httpreq" - "github.com/grafana/loki/pkg/util/limiter" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/querylimits" - lokiring "github.com/grafana/loki/pkg/util/ring" - util_ring "github.com/grafana/loki/pkg/util/ring" - serverutil "github.com/grafana/loki/pkg/util/server" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/bloomcompactor" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + + "github.com/grafana/loki/v3/pkg/analytics" + "github.com/grafana/loki/v3/pkg/bloomgateway" + "github.com/grafana/loki/v3/pkg/compactor" + compactorclient "github.com/grafana/loki/v3/pkg/compactor/client" + "github.com/grafana/loki/v3/pkg/compactor/client/grpc" + "github.com/grafana/loki/v3/pkg/compactor/deletion" + "github.com/grafana/loki/v3/pkg/compactor/generationnumber" + "github.com/grafana/loki/v3/pkg/distributor" + "github.com/grafana/loki/v3/pkg/ingester" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/transport" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v1/frontendv1pb" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v2/frontendv2pb" + "github.com/grafana/loki/v3/pkg/querier" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/ruler" + base_ruler "github.com/grafana/loki/v3/pkg/ruler/base" + "github.com/grafana/loki/v3/pkg/runtime" + "github.com/grafana/loki/v3/pkg/scheduler" + "github.com/grafana/loki/v3/pkg/scheduler/schedulerpb" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + chunk_util "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/boltdb" + boltdbcompactor "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/boltdb/compactor" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/indexgateway" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/util/limiter" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/querylimits" + lokiring "github.com/grafana/loki/v3/pkg/util/ring" + serverutil "github.com/grafana/loki/v3/pkg/util/server" + "github.com/grafana/loki/v3/pkg/validation" ) const maxChunkAgeForTableManager = 12 * time.Hour @@ -414,6 +413,7 @@ func (t *Loki) initQuerier() (services.Service, error) { t.querierAPI = querier.NewQuerierAPI(t.Cfg.Querier, t.Querier, t.Overrides, logger) indexStatsHTTPMiddleware := querier.WrapQuerySpanAndTimeout("query.IndexStats", t.Overrides) + indexShardsHTTPMiddleware := querier.WrapQuerySpanAndTimeout("query.IndexShards", t.Overrides) volumeHTTPMiddleware := querier.WrapQuerySpanAndTimeout("query.VolumeInstant", t.Overrides) volumeRangeHTTPMiddleware := querier.WrapQuerySpanAndTimeout("query.VolumeRange", t.Overrides) seriesHTTPMiddleware := querier.WrapQuerySpanAndTimeout("query.Series", t.Overrides) @@ -465,6 +465,7 @@ func (t *Loki) initQuerier() (services.Service, error) { if querierWorkerServiceConfig.QuerierRunningStandalone() { labelsHTTPMiddleware = middleware.Merge(httpMiddleware, labelsHTTPMiddleware) indexStatsHTTPMiddleware = middleware.Merge(httpMiddleware, indexStatsHTTPMiddleware) + indexShardsHTTPMiddleware = middleware.Merge(httpMiddleware, indexShardsHTTPMiddleware) volumeHTTPMiddleware = middleware.Merge(httpMiddleware, volumeHTTPMiddleware) volumeRangeHTTPMiddleware = middleware.Merge(httpMiddleware, volumeRangeHTTPMiddleware) seriesHTTPMiddleware = middleware.Merge(httpMiddleware, seriesHTTPMiddleware) @@ -495,6 +496,7 @@ func (t *Loki) initQuerier() (services.Service, error) { router.Path("/loki/api/v1/series").Methods("GET", "POST").Handler(seriesHTTPMiddleware.Wrap(httpHandler)) router.Path("/loki/api/v1/index/stats").Methods("GET", "POST").Handler(indexStatsHTTPMiddleware.Wrap(httpHandler)) + router.Path("/loki/api/v1/index/shards").Methods("GET", "POST").Handler(indexShardsHTTPMiddleware.Wrap(httpHandler)) router.Path("/loki/api/v1/index/volume").Methods("GET", "POST").Handler(volumeHTTPMiddleware.Wrap(httpHandler)) router.Path("/loki/api/v1/index/volume_range").Methods("GET", "POST").Handler(volumeRangeHTTPMiddleware.Wrap(httpHandler)) @@ -1037,7 +1039,10 @@ func (t *Loki) initQueryFrontend() (_ services.Service, err error) { t.Server.HTTP.Path("/loki/api/v1/labels").Methods("GET", "POST").Handler(frontendHandler) t.Server.HTTP.Path("/loki/api/v1/label/{name}/values").Methods("GET", "POST").Handler(frontendHandler) t.Server.HTTP.Path("/loki/api/v1/series").Methods("GET", "POST").Handler(frontendHandler) + t.Server.HTTP.Path("/loki/api/v1/detected_fields").Methods("GET", "POST").Handler(frontendHandler) + t.Server.HTTP.Path("/loki/api/v1/detected_labels").Methods("GET", "POST").Handler(frontendHandler) t.Server.HTTP.Path("/loki/api/v1/index/stats").Methods("GET", "POST").Handler(frontendHandler) + t.Server.HTTP.Path("/loki/api/v1/index/shards").Methods("GET", "POST").Handler(frontendHandler) t.Server.HTTP.Path("/loki/api/v1/index/volume").Methods("GET", "POST").Handler(frontendHandler) t.Server.HTTP.Path("/loki/api/v1/index/volume_range").Methods("GET", "POST").Handler(frontendHandler) t.Server.HTTP.Path("/api/prom/query").Methods("GET", "POST").Handler(frontendHandler) @@ -1475,15 +1480,14 @@ func (t *Loki) initBloomCompactor() (services.Service, error) { } logger := log.With(util_log.Logger, "component", "bloom-compactor") - shuffleSharding := util_ring.NewTenantShuffleSharding(t.bloomCompactorRingManager.Ring, t.bloomCompactorRingManager.RingLifecycler, t.Overrides.BloomCompactorShardSize) - return bloomcompactor.New( t.Cfg.BloomCompactor, t.Cfg.SchemaConfig, t.Cfg.StorageConfig, t.ClientMetrics, t.Store, - shuffleSharding, + t.bloomCompactorRingManager.Ring, + t.bloomCompactorRingManager.RingLifecycler, t.Overrides, t.BloomStore, logger, diff --git a/pkg/loki/modules_test.go b/pkg/loki/modules_test.go index 61cc9198bbf28..989d8e588c0de 100644 --- a/pkg/loki/modules_test.go +++ b/pkg/loki/modules_test.go @@ -13,13 +13,13 @@ import ( "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" - bloomshipperconfig "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/boltdb" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/indexgateway" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" + bloomshipperconfig "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/boltdb" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/indexgateway" ) func Test_calculateMaxLookBack(t *testing.T) { @@ -367,10 +367,8 @@ func minimalWorkingConfig(t *testing.T, dir, target string, cfgTransformers ...f cfg.StorageConfig = storage.Config{ FSConfig: local.FSConfig{Directory: dir}, BloomShipperConfig: bloomshipperconfig.Config{ - WorkingDirectory: filepath.Join(dir, "blooms"), - BlocksDownloadingQueue: bloomshipperconfig.DownloadingQueueConfig{ - WorkersCount: 1, - }, + WorkingDirectory: []string{filepath.Join(dir, "blooms")}, + DownloadParallelism: 1, }, BoltDBShipperConfig: boltdb.IndexCfg{ Config: indexshipper.Config{ diff --git a/pkg/loki/runtime_config.go b/pkg/loki/runtime_config.go index 3432ee1b68b80..e8e3c7e315870 100644 --- a/pkg/loki/runtime_config.go +++ b/pkg/loki/runtime_config.go @@ -9,9 +9,9 @@ import ( "github.com/grafana/dskit/runtimeconfig" "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/runtime" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/runtime" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/validation" ) // runtimeConfigValues are values that can be reloaded from configuration file while Loki is running. diff --git a/pkg/loki/runtime_config_test.go b/pkg/loki/runtime_config_test.go index d0fd2ffa41038..cf604455929c6 100644 --- a/pkg/loki/runtime_config_test.go +++ b/pkg/loki/runtime_config_test.go @@ -16,8 +16,8 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/runtime" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/runtime" + "github.com/grafana/loki/v3/pkg/validation" ) func Test_LoadRetentionRules(t *testing.T) { diff --git a/pkg/loki/version_handler.go b/pkg/loki/version_handler.go index 316d4825f7005..ef49d1b0f7de7 100644 --- a/pkg/loki/version_handler.go +++ b/pkg/loki/version_handler.go @@ -6,7 +6,7 @@ import ( prom "github.com/prometheus/prometheus/web/api/v1" - "github.com/grafana/loki/pkg/util/build" + "github.com/grafana/loki/v3/pkg/util/build" ) func versionHandler() http.HandlerFunc { diff --git a/pkg/loki/version_handler_test.go b/pkg/loki/version_handler_test.go index c7b9094b4ae28..fb39b63f37568 100644 --- a/pkg/loki/version_handler_test.go +++ b/pkg/loki/version_handler_test.go @@ -8,7 +8,7 @@ import ( "github.com/stretchr/testify/assert" - "github.com/grafana/loki/pkg/util/build" + "github.com/grafana/loki/v3/pkg/util/build" ) func TestVersionHandler(t *testing.T) { diff --git a/pkg/lokifrontend/config.go b/pkg/lokifrontend/config.go index 30ab5cd29fecc..f53f17085c70a 100644 --- a/pkg/lokifrontend/config.go +++ b/pkg/lokifrontend/config.go @@ -5,9 +5,9 @@ import ( "github.com/grafana/dskit/crypto/tls" - "github.com/grafana/loki/pkg/lokifrontend/frontend/transport" - v1 "github.com/grafana/loki/pkg/lokifrontend/frontend/v1" - v2 "github.com/grafana/loki/pkg/lokifrontend/frontend/v2" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/transport" + v1 "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v1" + v2 "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v2" ) type Config struct { diff --git a/pkg/lokifrontend/frontend/config.go b/pkg/lokifrontend/frontend/config.go index 54eaa264d98f0..fb61a482563fd 100644 --- a/pkg/lokifrontend/frontend/config.go +++ b/pkg/lokifrontend/frontend/config.go @@ -9,11 +9,11 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/lokifrontend/frontend/transport" - v1 "github.com/grafana/loki/pkg/lokifrontend/frontend/v1" - v2 "github.com/grafana/loki/pkg/lokifrontend/frontend/v2" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/transport" + v1 "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v1" + v2 "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v2" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/util" ) // This struct combines several configuration options together to preserve backwards compatibility. diff --git a/pkg/lokifrontend/frontend/downstream_roundtripper.go b/pkg/lokifrontend/frontend/downstream_roundtripper.go index 90f330900c32b..86010e127621f 100644 --- a/pkg/lokifrontend/frontend/downstream_roundtripper.go +++ b/pkg/lokifrontend/frontend/downstream_roundtripper.go @@ -10,7 +10,7 @@ import ( "github.com/grafana/dskit/user" "github.com/opentracing/opentracing-go" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) // RoundTripper that forwards requests to downstream URL. diff --git a/pkg/lokifrontend/frontend/transport/handler.go b/pkg/lokifrontend/frontend/transport/handler.go index 1c271805bbdab..7c9e50daf8b59 100644 --- a/pkg/lokifrontend/frontend/transport/handler.go +++ b/pkg/lokifrontend/frontend/transport/handler.go @@ -23,11 +23,11 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - querier_stats "github.com/grafana/loki/pkg/querier/stats" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/server" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + querier_stats "github.com/grafana/loki/v3/pkg/querier/stats" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/server" ) const ( diff --git a/pkg/lokifrontend/frontend/transport/roundtripper.go b/pkg/lokifrontend/frontend/transport/roundtripper.go index c6e38315930a2..d76512f5b00a6 100644 --- a/pkg/lokifrontend/frontend/transport/roundtripper.go +++ b/pkg/lokifrontend/frontend/transport/roundtripper.go @@ -5,8 +5,8 @@ import ( "github.com/grafana/dskit/httpgrpc" - "github.com/grafana/loki/pkg/querier/queryrange" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) // GrpcRoundTripper is similar to http.RoundTripper, but works with HTTP requests converted to protobuf messages. diff --git a/pkg/lokifrontend/frontend/v1/frontend.go b/pkg/lokifrontend/frontend/v1/frontend.go index cf17b62b03186..3caae56955ca4 100644 --- a/pkg/lokifrontend/frontend/v1/frontend.go +++ b/pkg/lokifrontend/frontend/v1/frontend.go @@ -18,12 +18,12 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/lokifrontend/frontend/v1/frontendv1pb" - "github.com/grafana/loki/pkg/querier/stats" - "github.com/grafana/loki/pkg/queue" - "github.com/grafana/loki/pkg/scheduler/limits" - "github.com/grafana/loki/pkg/util" - lokigrpc "github.com/grafana/loki/pkg/util/httpgrpc" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v1/frontendv1pb" + "github.com/grafana/loki/v3/pkg/querier/stats" + "github.com/grafana/loki/v3/pkg/queue" + "github.com/grafana/loki/v3/pkg/scheduler/limits" + "github.com/grafana/loki/v3/pkg/util" + lokigrpc "github.com/grafana/loki/v3/pkg/util/httpgrpc" ) var errTooManyRequest = httpgrpc.Errorf(http.StatusTooManyRequests, "too many outstanding requests") diff --git a/pkg/lokifrontend/frontend/v1/frontend_test.go b/pkg/lokifrontend/frontend/v1/frontend_test.go index a10a55b37984f..2d26e9f188a3b 100644 --- a/pkg/lokifrontend/frontend/v1/frontend_test.go +++ b/pkg/lokifrontend/frontend/v1/frontend_test.go @@ -28,15 +28,15 @@ import ( "go.uber.org/atomic" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/lokifrontend/frontend/transport" - "github.com/grafana/loki/pkg/lokifrontend/frontend/v1/frontendv1pb" - "github.com/grafana/loki/pkg/querier/queryrange" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - querier_worker "github.com/grafana/loki/pkg/querier/worker" - "github.com/grafana/loki/pkg/queue" - "github.com/grafana/loki/pkg/scheduler/limits" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/transport" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v1/frontendv1pb" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + querier_worker "github.com/grafana/loki/v3/pkg/querier/worker" + "github.com/grafana/loki/v3/pkg/queue" + "github.com/grafana/loki/v3/pkg/scheduler/limits" + "github.com/grafana/loki/v3/pkg/util/constants" ) const ( diff --git a/pkg/lokifrontend/frontend/v1/frontendv1pb/frontend.pb.go b/pkg/lokifrontend/frontend/v1/frontendv1pb/frontend.pb.go index 10d525a0a829a..e31c88efa9e41 100644 --- a/pkg/lokifrontend/frontend/v1/frontendv1pb/frontend.pb.go +++ b/pkg/lokifrontend/frontend/v1/frontendv1pb/frontend.pb.go @@ -12,7 +12,7 @@ import ( _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" httpgrpc "github.com/grafana/dskit/httpgrpc" - stats "github.com/grafana/loki/pkg/querier/stats" + stats "github.com/grafana/loki/v3/pkg/querier/stats" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" status "google.golang.org/grpc/status" diff --git a/pkg/lokifrontend/frontend/v1/queue_test.go b/pkg/lokifrontend/frontend/v1/queue_test.go index a6f380afd492d..bd429e11bccf3 100644 --- a/pkg/lokifrontend/frontend/v1/queue_test.go +++ b/pkg/lokifrontend/frontend/v1/queue_test.go @@ -17,8 +17,8 @@ import ( "github.com/stretchr/testify/require" "google.golang.org/grpc/metadata" - "github.com/grafana/loki/pkg/lokifrontend/frontend/v1/frontendv1pb" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v1/frontendv1pb" + "github.com/grafana/loki/v3/pkg/util/constants" ) func setupFrontend(t *testing.T, config Config) *Frontend { diff --git a/pkg/lokifrontend/frontend/v2/frontend.go b/pkg/lokifrontend/frontend/v2/frontend.go index 99e3e05ad83c9..5311573020735 100644 --- a/pkg/lokifrontend/frontend/v2/frontend.go +++ b/pkg/lokifrontend/frontend/v2/frontend.go @@ -27,14 +27,14 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/lokifrontend/frontend/transport" - "github.com/grafana/loki/pkg/lokifrontend/frontend/v2/frontendv2pb" - "github.com/grafana/loki/pkg/querier/queryrange" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/querier/stats" - lokigrpc "github.com/grafana/loki/pkg/util/httpgrpc" - "github.com/grafana/loki/pkg/util/httpreq" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/transport" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v2/frontendv2pb" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/querier/stats" + lokigrpc "github.com/grafana/loki/v3/pkg/util/httpgrpc" + "github.com/grafana/loki/v3/pkg/util/httpreq" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/lokifrontend/frontend/v2/frontend_scheduler_worker.go b/pkg/lokifrontend/frontend/v2/frontend_scheduler_worker.go index b58c573b29136..b5cdf56f2d9a4 100644 --- a/pkg/lokifrontend/frontend/v2/frontend_scheduler_worker.go +++ b/pkg/lokifrontend/frontend/v2/frontend_scheduler_worker.go @@ -15,9 +15,9 @@ import ( "github.com/pkg/errors" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/scheduler/schedulerpb" - "github.com/grafana/loki/pkg/util" - lokiutil "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/scheduler/schedulerpb" + "github.com/grafana/loki/v3/pkg/util" + lokiutil "github.com/grafana/loki/v3/pkg/util" ) type frontendSchedulerWorkers struct { diff --git a/pkg/lokifrontend/frontend/v2/frontend_test.go b/pkg/lokifrontend/frontend/v2/frontend_test.go index 9a87c5ff1c7cc..41fa9653f6949 100644 --- a/pkg/lokifrontend/frontend/v2/frontend_test.go +++ b/pkg/lokifrontend/frontend/v2/frontend_test.go @@ -19,14 +19,14 @@ import ( "go.uber.org/atomic" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/lokifrontend/frontend/v2/frontendv2pb" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/querier/queryrange" - "github.com/grafana/loki/pkg/querier/stats" - "github.com/grafana/loki/pkg/scheduler/schedulerpb" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/test" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v2/frontendv2pb" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + "github.com/grafana/loki/v3/pkg/querier/stats" + "github.com/grafana/loki/v3/pkg/scheduler/schedulerpb" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/test" ) const testFrontendWorkerConcurrency = 5 diff --git a/pkg/lokifrontend/frontend/v2/frontendv2pb/frontend.pb.go b/pkg/lokifrontend/frontend/v2/frontendv2pb/frontend.pb.go index 3773159c0cc37..8fdae39bf525e 100644 --- a/pkg/lokifrontend/frontend/v2/frontendv2pb/frontend.pb.go +++ b/pkg/lokifrontend/frontend/v2/frontendv2pb/frontend.pb.go @@ -9,8 +9,8 @@ import ( _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" httpgrpc "github.com/grafana/dskit/httpgrpc" - queryrange "github.com/grafana/loki/pkg/querier/queryrange" - stats "github.com/grafana/loki/pkg/querier/stats" + queryrange "github.com/grafana/loki/v3/pkg/querier/queryrange" + stats "github.com/grafana/loki/v3/pkg/querier/stats" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" status "google.golang.org/grpc/status" diff --git a/pkg/push/go.mod b/pkg/push/go.mod index 9cddcb5f2e342..067ca8f02c80c 100644 --- a/pkg/push/go.mod +++ b/pkg/push/go.mod @@ -18,7 +18,7 @@ require ( golang.org/x/sys v0.13.0 // indirect golang.org/x/text v0.13.0 // indirect google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 // indirect - google.golang.org/protobuf v1.30.0 // indirect + google.golang.org/protobuf v1.33.0 // indirect gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/pkg/push/go.sum b/pkg/push/go.sum index e7e0e8118dbff..3ed0ecc1d4b45 100644 --- a/pkg/push/go.sum +++ b/pkg/push/go.sum @@ -67,8 +67,8 @@ google.golang.org/grpc v1.56.3 h1:8I4C0Yq1EjstUzUJzpcRVbuYA2mODtEmpWiQoN/b2nc= google.golang.org/grpc v1.56.3/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng= -google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= +google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= diff --git a/pkg/querier/astmapper/parallel.go b/pkg/querier/astmapper/parallel.go index 4ae5a5b5c7fbd..e935f14204521 100644 --- a/pkg/querier/astmapper/parallel.go +++ b/pkg/querier/astmapper/parallel.go @@ -6,7 +6,7 @@ import ( "github.com/go-kit/log/level" "github.com/prometheus/prometheus/promql/parser" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var summableAggregates = map[parser.ItemType]struct{}{ diff --git a/pkg/querier/astmapper/shard_summer.go b/pkg/querier/astmapper/shard_summer.go index 8226f35804eef..12f7cf616f160 100644 --- a/pkg/querier/astmapper/shard_summer.go +++ b/pkg/querier/astmapper/shard_summer.go @@ -12,7 +12,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) const ( diff --git a/pkg/querier/handler.go b/pkg/querier/handler.go index 47a4c15e07511..0f3feacc0087b 100644 --- a/pkg/querier/handler.go +++ b/pkg/querier/handler.go @@ -7,10 +7,10 @@ import ( "github.com/opentracing/opentracing-go" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/queryrange" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) type Handler struct { @@ -93,12 +93,40 @@ func (h *Handler) Do(ctx context.Context, req queryrangebase.Request) (queryrang return nil, err } return &queryrange.IndexStatsResponse{Response: result}, nil + case *logproto.ShardsRequest: + request := loghttp.NewRangeQueryWithDefaults() + request.Start = concrete.From.Time() + request.End = concrete.Through.Time() + request.Query = concrete.GetQuery() + request.UpdateStep() + result, err := h.api.IndexShardsHandler(ctx, request, concrete.TargetBytesPerShard) + if err != nil { + return nil, err + } + return &queryrange.ShardsResponse{Response: result}, nil + case *logproto.VolumeRequest: result, err := h.api.VolumeHandler(ctx, concrete) if err != nil { return nil, err } return &queryrange.VolumeResponse{Response: result}, nil + case *queryrange.DetectedFieldsRequest: + result, err := h.api.DetectedFieldsHandler(ctx, &concrete.DetectedFieldsRequest) + if err != nil { + return nil, err + } + + return &queryrange.DetectedFieldsResponse{ + Response: result, + }, nil + case *queryrange.DetectedLabelsRequest: + result, err := h.api.DetectedLabelsHandler(ctx, &concrete.DetectedLabelsRequest) + if err != nil { + return nil, err + } + + return &queryrange.DetectedLabelsResponse{Response: result}, nil default: return nil, fmt.Errorf("unsupported query type %T", req) } diff --git a/pkg/querier/http.go b/pkg/querier/http.go index a508bf9f7286b..614fc5e46104d 100644 --- a/pkg/querier/http.go +++ b/pkg/querier/http.go @@ -17,22 +17,22 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/loghttp" - loghttp_legacy "github.com/grafana/loki/pkg/loghttp/legacy" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange" - index_stats "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/util/httpreq" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/marshal" - marshal_legacy "github.com/grafana/loki/pkg/util/marshal/legacy" - serverutil "github.com/grafana/loki/pkg/util/server" - "github.com/grafana/loki/pkg/util/spanlogger" - util_validation "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/loghttp" + loghttp_legacy "github.com/grafana/loki/v3/pkg/loghttp/legacy" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + index_stats "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/util/httpreq" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/marshal" + marshal_legacy "github.com/grafana/loki/v3/pkg/util/marshal/legacy" + serverutil "github.com/grafana/loki/v3/pkg/util/server" + "github.com/grafana/loki/v3/pkg/util/spanlogger" + util_validation "github.com/grafana/loki/v3/pkg/util/validation" ) const ( @@ -309,6 +309,39 @@ func (q *QuerierAPI) IndexStatsHandler(ctx context.Context, req *loghttp.RangeQu return resp, err } +func (q *QuerierAPI) IndexShardsHandler(ctx context.Context, req *loghttp.RangeQuery, targetBytesPerShard uint64) (*logproto.ShardsResponse, error) { + timer := prometheus.NewTimer(logql.QueryTime.WithLabelValues(logql.QueryTypeShards)) + defer timer.ObserveDuration() + + start := time.Now() + statsCtx, ctx := stats.NewContext(ctx) + + resp, err := q.querier.IndexShards(ctx, req, targetBytesPerShard) + queueTime, _ := ctx.Value(httpreq.QueryQueueTimeHTTPHeader).(time.Duration) + + resLength := 0 + if resp != nil { + resLength = len(resp.Shards) + stats.JoinResults(ctx, resp.Statistics) + } + + statResult := statsCtx.Result(time.Since(start), queueTime, resLength) + + log := spanlogger.FromContext(ctx) + statResult.Log(level.Debug(log)) + + status := 200 + if err != nil { + status, _ = serverutil.ClientHTTPStatusAndError(err) + } + + logql.RecordShardsQueryMetrics( + ctx, log, req.Start, req.End, req.Query, targetBytesPerShard, strconv.Itoa(status), resLength, statResult, + ) + + return resp, err +} + // TODO(trevorwhitney): add test for the handler split // VolumeHandler queries the index label volumes related to the passed matchers and given time range. @@ -343,6 +376,22 @@ func (q *QuerierAPI) VolumeHandler(ctx context.Context, req *logproto.VolumeRequ return resp, nil } +func (q *QuerierAPI) DetectedFieldsHandler(ctx context.Context, req *logproto.DetectedFieldsRequest) (*logproto.DetectedFieldsResponse, error) { + resp, err := q.querier.DetectedFields(ctx, req) + if err != nil { + return nil, err + } + if resp == nil { // Some stores don't implement this + level.Debug(spanlogger.FromContext(ctx)).Log( + "msg", "queried store for detected fields that does not support it, no response from querier.DetectedFields", + ) + return &logproto.DetectedFieldsResponse{ + Fields: []*logproto.DetectedField{}, + }, nil + } + return resp, nil +} + func (q *QuerierAPI) validateMaxEntriesLimits(ctx context.Context, expr syntax.Expr, limit uint32) error { tenantIDs, err := tenant.TenantIDs(ctx) if err != nil { @@ -363,6 +412,16 @@ func (q *QuerierAPI) validateMaxEntriesLimits(ctx context.Context, expr syntax.E return nil } +// DetectedLabelsHandler returns a response for detected labels +func (q *QuerierAPI) DetectedLabelsHandler(ctx context.Context, req *logproto.DetectedLabelsRequest) (*logproto.DetectedLabelsResponse, error) { + resp, err := q.querier.DetectedLabels(ctx, req) + + if err != nil { + return nil, err + } + return resp, nil +} + // WrapQuerySpanAndTimeout applies a context deadline and a span logger to a query call. // // The timeout is based on the per-tenant query timeout configuration. diff --git a/pkg/querier/http_test.go b/pkg/querier/http_test.go index 180e82c6b07d4..a97e55f882bab 100644 --- a/pkg/querier/http_test.go +++ b/pkg/querier/http_test.go @@ -11,9 +11,9 @@ import ( "github.com/pkg/errors" "github.com/stretchr/testify/mock" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/validation" "github.com/go-kit/log" "github.com/grafana/dskit/user" diff --git a/pkg/querier/ingester_querier.go b/pkg/querier/ingester_querier.go index fb57a415ba7f6..386bcfb4be788 100644 --- a/pkg/querier/ingester_querier.go +++ b/pkg/querier/ingester_querier.go @@ -6,7 +6,7 @@ import ( "strings" "time" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" "github.com/gogo/status" "github.com/grafana/dskit/httpgrpc" @@ -18,15 +18,15 @@ import ( "github.com/prometheus/prometheus/model/labels" "google.golang.org/grpc/codes" - "github.com/grafana/loki/pkg/distributor/clientpool" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel/stats" - index_stats "github.com/grafana/loki/pkg/storage/stores/index/stats" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/distributor/clientpool" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + index_stats "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type responseFromIngesters struct { diff --git a/pkg/querier/ingester_querier_test.go b/pkg/querier/ingester_querier_test.go index d5f4d872c5084..d2cb00d82ec59 100644 --- a/pkg/querier/ingester_querier_test.go +++ b/pkg/querier/ingester_querier_test.go @@ -19,9 +19,9 @@ import ( "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/util/constants" ) func TestIngesterQuerier_earlyExitOnQuorum(t *testing.T) { diff --git a/pkg/querier/limits/definitions.go b/pkg/querier/limits/definitions.go index cda30b116976d..dec518a7fc7da 100644 --- a/pkg/querier/limits/definitions.go +++ b/pkg/querier/limits/definitions.go @@ -4,7 +4,7 @@ import ( "context" "time" - "github.com/grafana/loki/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql" ) type TimeRangeLimits interface { diff --git a/pkg/querier/multi_tenant_querier.go b/pkg/querier/multi_tenant_querier.go index 2849830141167..fb90cb0ad4472 100644 --- a/pkg/querier/multi_tenant_querier.go +++ b/pkg/querier/multi_tenant_querier.go @@ -3,22 +3,24 @@ package querier import ( "context" "fmt" + "strings" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" "github.com/go-kit/log" + "github.com/go-kit/log/level" "github.com/grafana/dskit/user" "github.com/prometheus/prometheus/model/labels" "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" ) const ( @@ -29,12 +31,14 @@ const ( // MultiTenantQuerier is able to query across different tenants. type MultiTenantQuerier struct { Querier + logger log.Logger } // NewMultiTenantQuerier returns a new querier able to query across different tenants. -func NewMultiTenantQuerier(querier Querier, _ log.Logger) *MultiTenantQuerier { +func NewMultiTenantQuerier(querier Querier, logger log.Logger) *MultiTenantQuerier { return &MultiTenantQuerier{ Querier: querier, + logger: logger, } } @@ -199,6 +203,44 @@ func (q *MultiTenantQuerier) IndexStats(ctx context.Context, req *loghttp.RangeQ return &merged, nil } +func (q *MultiTenantQuerier) IndexShards( + ctx context.Context, + req *loghttp.RangeQuery, + targetBytesPerShard uint64, +) (*logproto.ShardsResponse, error) { + tenantIDs, err := tenant.TenantIDs(ctx) + if err != nil { + return nil, err + } + + if len(tenantIDs) == 1 { + return q.Querier.IndexShards(ctx, req, targetBytesPerShard) + } + + responses := make([]*logproto.ShardsResponse, len(tenantIDs)) + for i, id := range tenantIDs { + singleContext := user.InjectOrgID(ctx, id) + resp, err := q.Querier.IndexShards(singleContext, req, targetBytesPerShard) + if err != nil { + return nil, err + } + + responses[i] = resp + } + + // TODO(owen-d): better merging + var highestIdx int + var highestVal int + for i, resp := range responses { + if len(resp.Shards) > highestVal { + highestIdx = i + highestVal = len(resp.Shards) + } + } + + return responses[highestIdx], nil +} + func (q *MultiTenantQuerier) Volume(ctx context.Context, req *logproto.VolumeRequest) (*logproto.VolumeResponse, error) { tenantIDs, err := tenant.TenantIDs(ctx) if err != nil { @@ -220,6 +262,48 @@ func (q *MultiTenantQuerier) Volume(ctx context.Context, req *logproto.VolumeReq return merged, nil } +func (q *MultiTenantQuerier) DetectedFields(ctx context.Context, req *logproto.DetectedFieldsRequest) (*logproto.DetectedFieldsResponse, error) { + tenantIDs, err := tenant.TenantIDs(ctx) + if err != nil { + return nil, err + } + + if len(tenantIDs) == 1 { + return q.Querier.DetectedFields(ctx, req) + } + + level.Debug(q.logger).Log( + "msg", "detected fields requested for multiple tenants, but not yet supported", + "tenantIDs", strings.Join(tenantIDs, ","), + ) + + return &logproto.DetectedFieldsResponse{ + Fields: []*logproto.DetectedField{}, + }, nil +} + +func (q *MultiTenantQuerier) DetectedLabels(ctx context.Context, req *logproto.DetectedLabelsRequest) (*logproto.DetectedLabelsResponse, error) { + // TODO(shantanu) + tenantIDs, err := tenant.TenantID(ctx) + if err != nil { + return nil, err + } + + if len(tenantIDs) == 1 { + return q.Querier.DetectedLabels(ctx, req) + } + + //resp := make([]*logproto.DetectedLabels, len(tenantIDs)) + + return &logproto.DetectedLabelsResponse{ + DetectedLabels: []*logproto.DetectedLabel{ + {Label: "cluster"}, + {Label: "namespace"}, + {Label: "instance"}, + }, + }, nil +} + // removeTenantSelector filters the given tenant IDs based on any tenant ID filter the in passed selector. func removeTenantSelector(params logql.SelectSampleParams, tenantIDs []string) (map[string]struct{}, syntax.Expr, error) { expr, err := params.Expr() @@ -248,7 +332,7 @@ func replaceMatchers(expr syntax.Expr, matchers []*labels.Matcher) syntax.Expr { } // See https://github.com/grafana/mimir/blob/114ab88b50638a2047e2ca2a60640f6ca6fe8c17/pkg/querier/tenantfederation/tenant_federation.go#L29-L69 -// filterValuesByMatchers applies matchers to inputed `idLabelName` and +// filterValuesByMatchers applies matchers to inputted `idLabelName` and // `ids`. A set of matched IDs is returned and also all label matchers not // targeting the `idLabelName` label. // diff --git a/pkg/querier/multi_tenant_querier_test.go b/pkg/querier/multi_tenant_querier_test.go index 0d17bcc9adffa..38f190562ea1a 100644 --- a/pkg/querier/multi_tenant_querier_test.go +++ b/pkg/querier/multi_tenant_querier_test.go @@ -15,11 +15,11 @@ import ( "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/plan" ) func TestMultiTenantQuerier_SelectLogs(t *testing.T) { diff --git a/pkg/querier/plan/plan.go b/pkg/querier/plan/plan.go index d6548537a394c..ea872ac11cc58 100644 --- a/pkg/querier/plan/plan.go +++ b/pkg/querier/plan/plan.go @@ -3,8 +3,8 @@ package plan import ( "bytes" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/util" ) type QueryPlan struct { diff --git a/pkg/querier/plan/plan_test.go b/pkg/querier/plan/plan_test.go index 60f7d3fad1806..5998448cfa56a 100644 --- a/pkg/querier/plan/plan_test.go +++ b/pkg/querier/plan/plan_test.go @@ -6,7 +6,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logql/syntax" ) func TestMarshalTo(t *testing.T) { diff --git a/pkg/querier/querier.go b/pkg/querier/querier.go index a91293c977968..b0109ae8986ca 100644 --- a/pkg/querier/querier.go +++ b/pkg/querier/querier.go @@ -3,14 +3,22 @@ package querier import ( "context" "flag" + "fmt" "net/http" + "sort" + "strconv" "time" + "github.com/axiomhq/hyperloglog" + "github.com/dustin/go-humanize" "github.com/go-kit/log" "github.com/opentracing/opentracing-go" - "github.com/grafana/loki/pkg/storage/stores/index" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" + logql_log "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/storage/stores/index" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/indexgateway" "github.com/go-kit/log/level" "github.com/grafana/dskit/httpgrpc" @@ -22,19 +30,19 @@ import ( "golang.org/x/sync/errgroup" "google.golang.org/grpc/health/grpc_health_v1" - "github.com/grafana/loki/pkg/compactor/deletion" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - querier_limits "github.com/grafana/loki/pkg/querier/limits" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - listutil "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/spanlogger" - util_validation "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/compactor/deletion" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + querier_limits "github.com/grafana/loki/v3/pkg/querier/limits" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + listutil "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/spanlogger" + util_validation "github.com/grafana/loki/v3/pkg/util/validation" ) const ( @@ -92,7 +100,10 @@ type Querier interface { Series(ctx context.Context, req *logproto.SeriesRequest) (*logproto.SeriesResponse, error) Tail(ctx context.Context, req *logproto.TailRequest, categorizedLabels bool) (*Tailer, error) IndexStats(ctx context.Context, req *loghttp.RangeQuery) (*stats.Stats, error) + IndexShards(ctx context.Context, req *loghttp.RangeQuery, targetBytesPerShard uint64) (*logproto.ShardsResponse, error) Volume(ctx context.Context, req *logproto.VolumeRequest) (*logproto.VolumeResponse, error) + DetectedFields(ctx context.Context, req *logproto.DetectedFieldsRequest) (*logproto.DetectedFieldsResponse, error) + DetectedLabels(ctx context.Context, req *logproto.DetectedLabelsRequest) (*logproto.DetectedLabelsResponse, error) } type Limits querier_limits.Limits @@ -772,6 +783,45 @@ func (q *SingleTenantQuerier) IndexStats(ctx context.Context, req *loghttp.Range ) } +func (q *SingleTenantQuerier) IndexShards( + ctx context.Context, + req *loghttp.RangeQuery, + targetBytesPerShard uint64, +) (*logproto.ShardsResponse, error) { + userID, err := tenant.TenantID(ctx) + if err != nil { + return nil, err + } + + start, end, err := validateQueryTimeRangeLimits(ctx, userID, q.limits, req.Start, req.End) + if err != nil { + return nil, err + } + + // Enforce the query timeout while querying backends + queryTimeout := q.limits.QueryTimeout(ctx, userID) + ctx, cancel := context.WithDeadline(ctx, time.Now().Add(queryTimeout)) + defer cancel() + + p, err := indexgateway.ExtractShardRequestMatchersAndAST(req.Query) + if err != nil { + return nil, err + } + + shards, err := q.store.GetShards( + ctx, + userID, + model.TimeFromUnixNano(start.UnixNano()), + model.TimeFromUnixNano(end.UnixNano()), + targetBytesPerShard, + p, + ) + if err != nil { + return nil, err + } + return shards, nil +} + func (q *SingleTenantQuerier) Volume(ctx context.Context, req *logproto.VolumeRequest) (*logproto.VolumeResponse, error) { sp, ctx := opentracing.StartSpanFromContext(ctx, "Querier.Volume") defer sp.Finish() @@ -856,3 +906,233 @@ func (q *SingleTenantQuerier) Volume(ctx context.Context, req *logproto.VolumeRe return seriesvolume.Merge(responses, req.Limit), nil } + +func (q *SingleTenantQuerier) DetectedLabels(_ context.Context, _ *logproto.DetectedLabelsRequest) (*logproto.DetectedLabelsResponse, error) { + return &logproto.DetectedLabelsResponse{ + DetectedLabels: []*logproto.DetectedLabel{ + {Label: "namespace"}, + {Label: "cluster"}, + {Label: "instance"}, + {Label: "pod"}, + }, + }, nil +} + +func (q *SingleTenantQuerier) DetectedFields(ctx context.Context, req *logproto.DetectedFieldsRequest) (*logproto.DetectedFieldsResponse, error) { + expr, err := syntax.ParseLogSelector(req.Query, true) + if err != nil { + return nil, err + } + params := logql.SelectLogParams{ + QueryRequest: &logproto.QueryRequest{ + Start: req.Start, + End: req.End, + Limit: req.LineLimit, + Direction: logproto.BACKWARD, + Selector: expr.String(), + Plan: &plan.QueryPlan{ + AST: expr, + }, + }, + } + + iters, err := q.SelectLogs(ctx, params) + if err != nil { + return nil, err + } + + //TODO(twhitney): converting from a step to a duration should be abstracted and reused, + // doing this in a few places now. + streams, err := streamsForFieldDetection(iters, req.LineLimit, time.Duration(req.Step*1e6)) + if err != nil { + return nil, err + } + + detectedFields := parseDetectedFields(ctx, req.FieldLimit, streams) + + fields := make([]*logproto.DetectedField, len(detectedFields)) + fieldCount := 0 + for k, v := range detectedFields { + fields[fieldCount] = &logproto.DetectedField{ + Label: k, + Type: v.fieldType, + Cardinality: v.Estimate(), + } + + fieldCount++ + } + + return &logproto.DetectedFieldsResponse{ + Fields: fields, + }, nil +} + +type parsedFields struct { + sketch *hyperloglog.Sketch + isTypeDetected bool + fieldType logproto.DetectedFieldType +} + +func newParsedFields() *parsedFields { + return &parsedFields{ + sketch: hyperloglog.New(), + isTypeDetected: false, + fieldType: logproto.DetectedFieldString, + } +} + +func (p *parsedFields) Insert(value string) { + p.sketch.Insert([]byte(value)) +} + +func (p *parsedFields) Estimate() uint64 { + return p.sketch.Estimate() +} + +func (p *parsedFields) DetermineType(value string) { + p.fieldType = determineType(value) + p.isTypeDetected = true +} + +func determineType(value string) logproto.DetectedFieldType { + if _, err := strconv.ParseInt(value, 10, 64); err == nil { + return logproto.DetectedFieldInt + } + + if _, err := strconv.ParseFloat(value, 64); err == nil { + return logproto.DetectedFieldFloat + } + + if _, err := strconv.ParseBool(value); err == nil { + return logproto.DetectedFieldBoolean + } + + if _, err := time.ParseDuration(value); err == nil { + return logproto.DetectedFieldDuration + } + + if _, err := humanize.ParseBytes(value); err == nil { + return logproto.DetectedFieldBytes + } + + return logproto.DetectedFieldString +} + +func parseDetectedFields(ctx context.Context, limit uint32, streams logqlmodel.Streams) map[string]*parsedFields { + detectedFields := make(map[string]*parsedFields, limit) + fieldCount := uint32(0) + + for _, stream := range streams { + + level.Debug(spanlogger.FromContext(ctx)).Log( + "detected_fields", "true", + "msg", fmt.Sprintf("looking for detected fields in stream %d with %d lines", stream.Hash, len(stream.Entries))) + + for _, entry := range stream.Entries { + detected := parseLine(entry.Line) + for k, vals := range detected { + if fieldCount >= limit { + return detectedFields + } + + if _, ok := detectedFields[k]; !ok { + detectedFields[k] = newParsedFields() + } + + for _, v := range vals { + parsedFields := detectedFields[k] + if !parsedFields.isTypeDetected { + parsedFields.DetermineType(v) + } + + parsedFields.Insert(v) + } + + level.Debug(spanlogger.FromContext(ctx)).Log( + "detected_fields", "true", + "msg", fmt.Sprintf("detected field %s with %d values", k, len(vals))) + + fieldCount++ + } + } + } + + return detectedFields +} + +func parseLine(line string) map[string][]string { + logFmtParser := logql_log.NewLogfmtParser(true, false) + jsonParser := logql_log.NewJSONParser() + + lbls := logql_log.NewBaseLabelsBuilder().ForLabels(labels.EmptyLabels(), 0) + _, logfmtSuccess := logFmtParser.Process(0, []byte(line), lbls) + if !logfmtSuccess || lbls.HasErr() { + lbls.Reset() + _, jsonSuccess := jsonParser.Process(0, []byte(line), lbls) + if !jsonSuccess || lbls.HasErr() { + return map[string][]string{} + } + } + + parsedLabels := map[string]map[string]struct{}{} + for _, lbl := range lbls.LabelsResult().Labels() { + if values, ok := parsedLabels[lbl.Name]; ok { + values[lbl.Value] = struct{}{} + } else { + parsedLabels[lbl.Name] = map[string]struct{}{lbl.Value: {}} + } + } + + result := make(map[string][]string, len(parsedLabels)) + for lbl, values := range parsedLabels { + vals := make([]string, 0, len(values)) + for v := range values { + vals = append(vals, v) + } + result[lbl] = vals + } + + return result +} + +// readStreams reads the streams from the iterator and returns them sorted. +// If categorizeLabels is true, the stream labels contains just the stream labels and entries inside each stream have their +// structuredMetadata and parsed fields populated with structured metadata labels plus the parsed labels respectively. +// Otherwise, the stream labels are the whole series labels including the stream labels, structured metadata labels and parsed labels. +func streamsForFieldDetection(i iter.EntryIterator, size uint32, interval time.Duration) (logqlmodel.Streams, error) { + streams := map[string]*logproto.Stream{} + respSize := uint32(0) + // lastEntry should be a really old time so that the first comparison is always true, we use a negative + // value here because many unit tests start at time.Unix(0,0) + lastEntry := time.Unix(-100, 0) + for respSize < size && i.Next() { + streamLabels, entry := i.Labels(), i.Entry() + + // Always going backward + shouldOutput := entry.Timestamp.Equal(lastEntry.Add(-interval)) || + entry.Timestamp.Before(lastEntry.Add(-interval)) + + // If step == 0 output every line. + // If lastEntry.Unix < 0 this is the first pass through the loop and we should output the line. + // Then check to see if the entry is equal to, or past a forward step + if interval == 0 || lastEntry.Unix() < 0 || shouldOutput { + stream, ok := streams[streamLabels] + if !ok { + stream = &logproto.Stream{ + Labels: streamLabels, + } + streams[streamLabels] = stream + } + stream.Entries = append(stream.Entries, entry) + lastEntry = i.Entry().Timestamp + respSize++ + } + } + + result := make(logqlmodel.Streams, 0, len(streams)) + for _, stream := range streams { + result = append(result, *stream) + } + sort.Sort(result) + return result, i.Error() +} diff --git a/pkg/querier/querier_mock_test.go b/pkg/querier/querier_mock_test.go index 268e05528f781..83b1b6e6a8a4e 100644 --- a/pkg/querier/querier_mock_test.go +++ b/pkg/querier/querier_mock_test.go @@ -7,9 +7,9 @@ import ( "math" "time" - "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/log" - "github.com/grafana/loki/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/loghttp" "github.com/grafana/dskit/grpcclient" "github.com/grafana/dskit/ring" @@ -21,18 +21,19 @@ import ( "google.golang.org/grpc/health/grpc_health_v1" grpc_metadata "google.golang.org/grpc/metadata" - "github.com/grafana/loki/pkg/distributor/clientpool" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/distributor/clientpool" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/validation" ) // querierClientMock is a mockable version of QuerierClient, used in querier @@ -371,6 +372,14 @@ func (s *storeMock) Stats(_ context.Context, _ string, _, _ model.Time, _ ...*la return nil, nil } +func (s *storeMock) GetShards(_ context.Context, _ string, _, _ model.Time, _ uint64, _ chunk.Predicate) (*logproto.ShardsResponse, error) { + return nil, nil +} + +func (s *storeMock) HasForSeries(_, _ model.Time) (sharding.ForSeries, bool) { + return nil, false +} + func (s *storeMock) Volume(ctx context.Context, userID string, from, through model.Time, _ int32, targetLabels []string, _ string, matchers ...*labels.Matcher) (*logproto.VolumeResponse, error) { args := s.Called(ctx, userID, from, through, targetLabels, matchers) return args.Get(0).(*logproto.VolumeResponse), args.Error(1) @@ -547,6 +556,18 @@ func (q *querierMock) IndexStats(_ context.Context, _ *loghttp.RangeQuery) (*sta return nil, nil } +func (q *querierMock) GetShards(_ context.Context, _ string, _, _ model.Time, _ uint64, _ chunk.Predicate) ([]logproto.Shard, error) { + return nil, nil +} + +func (q *querierMock) HasForSeries(_, _ model.Time) (sharding.ForSeries, bool) { + return nil, false +} + +func (q *querierMock) IndexShards(_ context.Context, _ *loghttp.RangeQuery, _ uint64) (*logproto.ShardsResponse, error) { + return nil, errors.New("unimplemented") +} + func (q *querierMock) Volume(ctx context.Context, req *logproto.VolumeRequest) (*logproto.VolumeResponse, error) { args := q.MethodCalled("Volume", ctx, req) @@ -559,6 +580,30 @@ func (q *querierMock) Volume(ctx context.Context, req *logproto.VolumeRequest) ( return resp.(*logproto.VolumeResponse), err } +func (q *querierMock) DetectedFields(ctx context.Context, req *logproto.DetectedFieldsRequest) (*logproto.DetectedFieldsResponse, error) { + args := q.MethodCalled("DetectedFields", ctx, req) + + resp := args.Get(0) + err := args.Error(1) + if resp == nil { + return nil, err + } + + return resp.(*logproto.DetectedFieldsResponse), err +} + +func (q *querierMock) DetectedLabels(ctx context.Context, req *logproto.DetectedLabelsRequest) (*logproto.DetectedLabelsResponse, error) { + args := q.MethodCalled("DetectedFields", ctx, req) + + resp := args.Get(0) + err := args.Error(1) + if resp == nil { + return nil, err + } + + return resp.(*logproto.DetectedLabelsResponse), err +} + type engineMock struct { util.ExtendedMock } diff --git a/pkg/querier/querier_test.go b/pkg/querier/querier_test.go index e9c36f7ae91e8..3848fc1746fc0 100644 --- a/pkg/querier/querier_test.go +++ b/pkg/querier/querier_test.go @@ -19,15 +19,15 @@ import ( "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/compactor/deletion" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/compactor/deletion" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/validation" ) const ( diff --git a/pkg/querier/queryrange/benchmarkutils_test.go b/pkg/querier/queryrange/benchmarkutils_test.go index 80552f552d20a..afe5f2866c047 100644 --- a/pkg/querier/queryrange/benchmarkutils_test.go +++ b/pkg/querier/queryrange/benchmarkutils_test.go @@ -3,7 +3,7 @@ package queryrange import ( "sort" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) type entry struct { diff --git a/pkg/querier/queryrange/codec.go b/pkg/querier/queryrange/codec.go index 44de02408b4df..b9fe39c3a7145 100644 --- a/pkg/querier/queryrange/codec.go +++ b/pkg/querier/queryrange/codec.go @@ -6,38 +6,39 @@ import ( "context" "errors" "fmt" - io "io" + "io" "net/http" "net/url" "regexp" "sort" - strings "strings" + "strings" "time" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" "github.com/grafana/dskit/httpgrpc" "github.com/grafana/dskit/user" json "github.com/json-iterator/go" "github.com/opentracing/opentracing-go" otlog "github.com/opentracing/opentracing-go/log" + "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/timestamp" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - indexStats "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/httpreq" - "github.com/grafana/loki/pkg/util/marshal" - marshal_legacy "github.com/grafana/loki/pkg/util/marshal/legacy" - "github.com/grafana/loki/pkg/util/querylimits" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + indexStats "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/util/marshal" + marshal_legacy "github.com/grafana/loki/v3/pkg/util/marshal/legacy" + "github.com/grafana/loki/v3/pkg/util/querylimits" ) var DefaultCodec = &Codec{} @@ -83,8 +84,8 @@ func (r *LokiRequest) WithShards(shards logql.Shards) *LokiRequest { func (r *LokiRequest) LogToSpan(sp opentracing.Span) { sp.LogFields( otlog.String("query", r.GetQuery()), - otlog.String("start", timestamp.Time(r.GetStart().UnixNano()).String()), - otlog.String("end", timestamp.Time(r.GetEnd().UnixNano()).String()), + otlog.String("start", timestamp.Time(r.GetStart().UnixMilli()).String()), + otlog.String("end", timestamp.Time(r.GetEnd().UnixMilli()).String()), otlog.Int64("step (ms)", r.GetStep()), otlog.Int64("interval (ms)", r.GetInterval()), otlog.Int64("limit", int64(r.GetLimit())), @@ -178,8 +179,8 @@ func (r *LokiSeriesRequest) GetStep() int64 { func (r *LokiSeriesRequest) LogToSpan(sp opentracing.Span) { sp.LogFields( otlog.String("matchers", strings.Join(r.GetMatch(), ",")), - otlog.String("start", timestamp.Time(r.GetStart().UnixNano()).String()), - otlog.String("end", timestamp.Time(r.GetEnd().UnixNano()).String()), + otlog.String("start", timestamp.Time(r.GetStart().UnixMilli()).String()), + otlog.String("end", timestamp.Time(r.GetEnd().UnixMilli()).String()), otlog.String("shards", strings.Join(r.GetShards(), ",")), ) } @@ -249,8 +250,8 @@ func (r *LabelRequest) WithQuery(query string) queryrangebase.Request { func (r *LabelRequest) LogToSpan(sp opentracing.Span) { sp.LogFields( - otlog.String("start", timestamp.Time(r.GetStart().UnixNano()).String()), - otlog.String("end", timestamp.Time(r.GetEnd().UnixNano()).String()), + otlog.String("start", timestamp.Time(r.GetStart().UnixMilli()).String()), + otlog.String("end", timestamp.Time(r.GetEnd().UnixMilli()).String()), ) } @@ -260,6 +261,80 @@ func (r *LabelRequest) Path() string { func (*LabelRequest) GetCachingOptions() (res queryrangebase.CachingOptions) { return } +type DetectedLabelsRequest struct { + path string + logproto.DetectedLabelsRequest +} + +// NewDetectedLabelsRequest creates a new request for detected labels +func NewDetectedLabelsRequest(start, end time.Time, query, path string) *DetectedLabelsRequest { + return &DetectedLabelsRequest{ + DetectedLabelsRequest: logproto.DetectedLabelsRequest{ + Start: &start, + End: &end, + Query: query, + }, + path: path, + } +} + +func (r *DetectedLabelsRequest) AsProto() *logproto.DetectedLabelsRequest { + return &r.DetectedLabelsRequest +} + +func (r *DetectedLabelsRequest) GetEnd() time.Time { + return *r.End +} + +func (r *DetectedLabelsRequest) GetEndTs() time.Time { + return *r.End +} + +func (r *DetectedLabelsRequest) GetStart() time.Time { + return *r.Start +} + +func (r *DetectedLabelsRequest) GetStartTs() time.Time { + return *r.Start +} + +func (r *DetectedLabelsRequest) GetStep() int64 { + return 0 +} + +func (r *DetectedLabelsRequest) WithStartEnd(s, e time.Time) queryrangebase.Request { + clone := *r + clone.Start = &s + clone.End = &e + return &clone +} + +// WithStartEndForCache implements resultscache.Request. +func (r *DetectedLabelsRequest) WithStartEndForCache(s time.Time, e time.Time) resultscache.Request { + return r.WithStartEnd(s, e).(resultscache.Request) +} + +func (r *DetectedLabelsRequest) WithQuery(query string) queryrangebase.Request { + clone := *r + clone.Query = query + return &clone +} + +func (r *DetectedLabelsRequest) LogToSpan(sp opentracing.Span) { + sp.LogFields( + otlog.String("start", timestamp.Time(r.GetStart().UnixNano()).String()), + otlog.String("end", timestamp.Time(r.GetEnd().UnixNano()).String()), + ) +} + +func (r *DetectedLabelsRequest) Path() string { + return r.path +} + +func (*DetectedLabelsRequest) GetCachingOptions() (res queryrangebase.CachingOptions) { + return +} + func (Codec) DecodeRequest(_ context.Context, r *http.Request, _ []string) (queryrangebase.Request, error) { if err := r.ParseForm(); err != nil { return nil, httpgrpc.Errorf(http.StatusBadRequest, err.Error()) @@ -346,6 +421,18 @@ func (Codec) DecodeRequest(_ context.Context, r *http.Request, _ []string) (quer Through: through, Matchers: req.Query, }, err + case IndexShardsOp: + req, targetBytes, err := loghttp.ParseIndexShardsQuery(r) + if err != nil { + return nil, httpgrpc.Errorf(http.StatusBadRequest, err.Error()) + } + from, through := util.RoundToMilliseconds(req.Start, req.End) + return &logproto.ShardsRequest{ + From: from, + Through: through, + Query: req.Query, + TargetBytesPerShard: targetBytes.Bytes(), + }, err case VolumeOp: req, err := loghttp.ParseVolumeInstantQuery(r) if err != nil { @@ -376,6 +463,31 @@ func (Codec) DecodeRequest(_ context.Context, r *http.Request, _ []string) (quer TargetLabels: req.TargetLabels, AggregateBy: req.AggregateBy, }, err + case DetectedFieldsOp: + req, err := loghttp.ParseDetectedFieldsQuery(r) + if err != nil { + return nil, httpgrpc.Errorf(http.StatusBadRequest, err.Error()) + } + + _, err = syntax.ParseExpr(req.Query) + if err != nil { + return nil, httpgrpc.Errorf(http.StatusBadRequest, err.Error()) + } + + return &DetectedFieldsRequest{ + DetectedFieldsRequest: *req, + path: r.URL.Path, + }, nil + case DetectedLabelsOp: + req, err := loghttp.ParseDetectedLabelsQuery(r) + if err != nil { + return nil, httpgrpc.Errorf(http.StatusBadRequest, err.Error()) + } + + return &DetectedLabelsRequest{ + DetectedLabelsRequest: *req, + path: r.URL.Path, + }, nil default: return nil, httpgrpc.Errorf(http.StatusNotFound, fmt.Sprintf("unknown request path: %s", r.URL.Path)) } @@ -413,6 +525,11 @@ func (Codec) DecodeHTTPGrpcRequest(ctx context.Context, r *httpgrpc.HTTPRequest) ctx = httpreq.InjectQueryTags(ctx, queryTags) } + // Add disable pipeline wrappers + if disableWrappers := httpReq.Header.Get(httpreq.LokiDisablePipelineWrappersHeader); disableWrappers != "" { + httpreq.InjectHeader(ctx, httpreq.LokiDisablePipelineWrappersHeader, disableWrappers) + } + // Add query metrics if queueTimeHeader := httpReq.Header.Get(string(httpreq.QueryQueueTimeHTTPHeader)); queueTimeHeader != "" { queueTime, err := time.ParseDuration(queueTimeHeader) @@ -521,6 +638,19 @@ func (Codec) DecodeHTTPGrpcRequest(ctx context.Context, r *httpgrpc.HTTPRequest) Through: through, Matchers: req.Query, }, ctx, err + case IndexShardsOp: + req, targetBytes, err := loghttp.ParseIndexShardsQuery(httpReq) + if err != nil { + return nil, ctx, httpgrpc.Errorf(http.StatusBadRequest, err.Error()) + } + from, through := util.RoundToMilliseconds(req.Start, req.End) + return &logproto.ShardsRequest{ + From: from, + Through: through, + Query: req.Query, + TargetBytesPerShard: targetBytes.Bytes(), + }, ctx, nil + case VolumeOp: req, err := loghttp.ParseVolumeInstantQuery(httpReq) if err != nil { @@ -551,6 +681,25 @@ func (Codec) DecodeHTTPGrpcRequest(ctx context.Context, r *httpgrpc.HTTPRequest) TargetLabels: req.TargetLabels, AggregateBy: req.AggregateBy, }, ctx, err + case DetectedFieldsOp: + req, err := loghttp.ParseDetectedFieldsQuery(httpReq) + if err != nil { + return nil, ctx, httpgrpc.Errorf(http.StatusBadRequest, err.Error()) + } + + return &DetectedFieldsRequest{ + DetectedFieldsRequest: *req, + path: httpReq.URL.Path, + }, ctx, nil + case DetectedLabelsOp: + req, err := loghttp.ParseDetectedLabelsQuery(httpReq) + if err != nil { + return nil, ctx, httpgrpc.Errorf(http.StatusBadRequest, err.Error()) + } + return &DetectedLabelsRequest{ + DetectedLabelsRequest: *req, + path: httpReq.URL.Path, + }, ctx, err default: return nil, ctx, httpgrpc.Errorf(http.StatusBadRequest, fmt.Sprintf("unknown request path in HTTP gRPC decode: %s", r.Url)) } @@ -788,6 +937,65 @@ func (c Codec) EncodeRequest(ctx context.Context, r queryrangebase.Request) (*ht Body: http.NoBody, Header: header, } + return req.WithContext(ctx), nil + case *logproto.ShardsRequest: + params := url.Values{ + "start": []string{fmt.Sprintf("%d", request.From.Time().UnixNano())}, + "end": []string{fmt.Sprintf("%d", request.Through.Time().UnixNano())}, + "query": []string{request.GetQuery()}, + "targetBytesPerShard": []string{fmt.Sprintf("%d", request.TargetBytesPerShard)}, + } + u := &url.URL{ + Path: "/loki/api/v1/index/shards", + RawQuery: params.Encode(), + } + req := &http.Request{ + Method: "GET", + RequestURI: u.String(), // This is what the httpgrpc code looks at. + URL: u, + Body: http.NoBody, + Header: header, + } + return req.WithContext(ctx), nil + case *DetectedFieldsRequest: + params := url.Values{ + "start": []string{fmt.Sprintf("%d", request.Start.UnixNano())}, + "end": []string{fmt.Sprintf("%d", request.End.UnixNano())}, + "query": []string{request.GetQuery()}, + } + + u := &url.URL{ + Path: "/loki/api/v1/detected_fields", + RawQuery: params.Encode(), + } + req := &http.Request{ + Method: "GET", + RequestURI: u.String(), // This is what the httpgrpc code looks at. + URL: u, + Body: http.NoBody, + Header: header, + } + + return req.WithContext(ctx), nil + case *DetectedLabelsRequest: + params := url.Values{ + "start": []string{fmt.Sprintf("%d", request.Start.UnixNano())}, + "end": []string{fmt.Sprintf("%d", request.End.UnixNano())}, + "query": []string{request.GetQuery()}, + } + + u := &url.URL{ + Path: "/loki/api/v1/detected_labels", + RawQuery: params.Encode(), + } + req := &http.Request{ + Method: "GET", + RequestURI: u.String(), // This is what the httpgrpc code looks at. + URL: u, + Body: http.NoBody, + Header: header, + } + return req.WithContext(ctx), nil default: return nil, httpgrpc.Errorf(http.StatusInternalServerError, fmt.Sprintf("invalid request format, got (%T)", r)) @@ -814,6 +1022,10 @@ func (c Codec) Path(r queryrangebase.Request) string { return "/loki/api/v1/index/stats" case *logproto.VolumeRequest: return "/loki/api/v1/index/volume_range" + case *DetectedFieldsRequest: + return "/loki/api/v1/detected_fields" + case *DetectedLabelsRequest: + return "/loki/api/v1/detected_labels" } return "other" @@ -897,6 +1109,15 @@ func decodeResponseJSONFrom(buf []byte, req queryrangebase.Request, headers http Response: &resp, Headers: httpResponseHeadersToPromResponseHeaders(headers), }, nil + case *logproto.ShardsRequest: + var resp logproto.ShardsResponse + if err := json.Unmarshal(buf, &resp); err != nil { + return nil, httpgrpc.Errorf(http.StatusInternalServerError, "error decoding response: %v", err) + } + return &ShardsResponse{ + Response: &resp, + Headers: httpResponseHeadersToPromResponseHeaders(headers), + }, nil case *logproto.VolumeRequest: var resp logproto.VolumeResponse if err := json.Unmarshal(buf, &resp); err != nil { @@ -906,6 +1127,15 @@ func decodeResponseJSONFrom(buf []byte, req queryrangebase.Request, headers http Response: &resp, Headers: httpResponseHeadersToPromResponseHeaders(headers), }, nil + case *DetectedFieldsRequest: + var resp logproto.DetectedFieldsResponse + if err := json.Unmarshal(buf, &resp); err != nil { + return nil, httpgrpc.Errorf(http.StatusInternalServerError, "error decoding response: %v", err) + } + return &DetectedFieldsResponse{ + Response: &resp, + Headers: httpResponseHeadersToPromResponseHeaders(headers), + }, nil default: var resp loghttp.QueryResponse if err := resp.UnmarshalJSON(buf); err != nil { @@ -1013,6 +1243,8 @@ func decodeResponseProtobuf(r *http.Response, req queryrangebase.Request) (query return resp.GetLabels().WithHeaders(headers), nil case *logproto.IndexStatsRequest: return resp.GetStats().WithHeaders(headers), nil + case *logproto.ShardsRequest: + return resp.GetShardsResponse().WithHeaders(headers), nil default: switch concrete := resp.Response.(type) { case *QueryResponse_Prom: @@ -1110,10 +1342,22 @@ func encodeResponseJSONTo(version loghttp.Version, res queryrangebase.Response, if err := marshal.WriteIndexStatsResponseJSON(response.Response, w); err != nil { return err } + case *ShardsResponse: + if err := marshal.WriteIndexShardsResponseJSON(response.Response, w); err != nil { + return err + } case *VolumeResponse: if err := marshal.WriteVolumeResponseJSON(response.Response, w); err != nil { return err } + case *DetectedFieldsResponse: + if err := marshal.WriteDetectedFieldsResponseJSON(response.Response, w); err != nil { + return err + } + case *DetectedLabelsResponse: + if err := marshal.WriteDetectedLabelsResponseJSON(response.Response, w); err != nil { + return err + } default: return httpgrpc.Errorf(http.StatusInternalServerError, fmt.Sprintf("invalid response format, got (%T)", res)) } @@ -1152,15 +1396,22 @@ func (Codec) MergeResponse(responses ...queryrangebase.Response) (queryrangebase return nil, errors.New("merging responses requires at least one response") } var mergedStats stats.Result - switch responses[0].(type) { + switch res := responses[0].(type) { + // LokiPromResponse type is used for both instant and range queries. + // Meaning, values that are merged can be either vector or matrix types. case *LokiPromResponse: + codec := queryrangebase.PrometheusCodecForRangeQueries + if res.Response.Data.ResultType == model.ValVector.String() { + codec = queryrangebase.PrometheusCodecForInstantQueries + } + promResponses := make([]queryrangebase.Response, 0, len(responses)) for _, res := range responses { mergedStats.MergeSplit(res.(*LokiPromResponse).Statistics) promResponses = append(promResponses, res.(*LokiPromResponse).Response) } - promRes, err := queryrangebase.PrometheusCodec.MergeResponse(promResponses...) + promRes, err := codec.MergeResponse(promResponses...) if err != nil { return nil, err } @@ -1681,7 +1932,7 @@ func NewEmptyResponse(r queryrangebase.Request) (queryrangebase.Response, error) } if _, ok := expr.(syntax.SampleExpr); ok { return &LokiPromResponse{ - Response: queryrangebase.NewEmptyPrometheusResponse(), + Response: queryrangebase.NewEmptyPrometheusResponse(model.ValMatrix), // range metric query }, nil } return &LokiResponse{ @@ -1736,3 +1987,75 @@ func mergeLokiResponse(responses ...queryrangebase.Response) *LokiResponse { }, } } + +// In some other world LabelRequest could implement queryrangebase.Request. +type DetectedFieldsRequest struct { + logproto.DetectedFieldsRequest + path string +} + +func NewDetectedFieldsRequest(start, end time.Time, query, path string) *DetectedFieldsRequest { + return &DetectedFieldsRequest{ + DetectedFieldsRequest: logproto.DetectedFieldsRequest{ + Start: start, + End: end, + Query: query, + }, + path: path, + } +} + +func (r *DetectedFieldsRequest) AsProto() *logproto.DetectedFieldsRequest { + return &r.DetectedFieldsRequest +} + +func (r *DetectedFieldsRequest) GetEnd() time.Time { + return r.End +} + +func (r *DetectedFieldsRequest) GetEndTs() time.Time { + return r.End +} + +func (r *DetectedFieldsRequest) GetStart() time.Time { + return r.Start +} + +func (r *DetectedFieldsRequest) GetStartTs() time.Time { + return r.Start +} + +func (r *DetectedFieldsRequest) GetStep() int64 { + return 0 +} + +func (r *DetectedFieldsRequest) Path() string { + return r.path +} + +func (r *DetectedFieldsRequest) WithStartEnd(s, e time.Time) queryrangebase.Request { + clone := *r + clone.Start = s + clone.End = e + return &clone +} + +// WithStartEndForCache implements resultscache.Request. +func (r *DetectedFieldsRequest) WithStartEndForCache(s time.Time, e time.Time) resultscache.Request { + return r.WithStartEnd(s, e).(resultscache.Request) +} + +func (r *DetectedFieldsRequest) WithQuery(query string) queryrangebase.Request { + clone := *r + clone.Query = query + return &clone +} + +func (r *DetectedFieldsRequest) LogToSpan(sp opentracing.Span) { + sp.LogFields( + otlog.String("start", timestamp.Time(r.GetStart().UnixNano()).String()), + otlog.String("end", timestamp.Time(r.GetEnd().UnixNano()).String()), + ) +} + +func (*DetectedFieldsRequest) GetCachingOptions() (res queryrangebase.CachingOptions) { return } diff --git a/pkg/querier/queryrange/codec_test.go b/pkg/querier/queryrange/codec_test.go index fa6fa9e036711..35a101cb590aa 100644 --- a/pkg/querier/queryrange/codec_test.go +++ b/pkg/querier/queryrange/codec_test.go @@ -16,22 +16,24 @@ import ( "github.com/gorilla/mux" "github.com/grafana/dskit/user" + "github.com/opentracing/opentracing-go/mocktracer" "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" + "github.com/prometheus/prometheus/model/timestamp" "github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql/parser" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/httpreq" ) func init() { @@ -424,6 +426,96 @@ func Test_codec_DecodeResponse(t *testing.T) { } } +func TestLokiRequestSpanLogging(t *testing.T) { + now := time.Now() + end := now.Add(1000 * time.Second) + req := LokiRequest{ + StartTs: now, + EndTs: end, + } + + span := mocktracer.MockSpan{} + req.LogToSpan(&span) + + for _, l := range span.Logs() { + for _, field := range l.Fields { + if field.Key == "start" { + require.Equal(t, timestamp.Time(now.UnixMilli()).String(), field.ValueString) + } + if field.Key == "end" { + require.Equal(t, timestamp.Time(end.UnixMilli()).String(), field.ValueString) + } + } + } +} + +func TestLokiInstantRequestSpanLogging(t *testing.T) { + now := time.Now() + req := LokiInstantRequest{ + TimeTs: now, + } + + span := mocktracer.MockSpan{} + req.LogToSpan(&span) + + for _, l := range span.Logs() { + for _, field := range l.Fields { + if field.Key == "ts" { + require.Equal(t, timestamp.Time(now.UnixMilli()).String(), field.ValueString) + } + } + } +} + +func TestLokiSeriesRequestSpanLogging(t *testing.T) { + now := time.Now() + end := now.Add(1000 * time.Second) + req := LokiSeriesRequest{ + StartTs: now, + EndTs: end, + } + + span := mocktracer.MockSpan{} + req.LogToSpan(&span) + + for _, l := range span.Logs() { + for _, field := range l.Fields { + if field.Key == "start" { + require.Equal(t, timestamp.Time(now.UnixMilli()).String(), field.ValueString) + } + if field.Key == "end" { + require.Equal(t, timestamp.Time(end.UnixMilli()).String(), field.ValueString) + + } + } + } +} + +func TestLabelRequestSpanLogging(t *testing.T) { + now := time.Now() + end := now.Add(1000 * time.Second) + req := LabelRequest{ + LabelRequest: logproto.LabelRequest{ + Start: &now, + End: &end, + }, + } + + span := mocktracer.MockSpan{} + req.LogToSpan(&span) + + for _, l := range span.Logs() { + for _, field := range l.Fields { + if field.Key == "start" { + require.Equal(t, timestamp.Time(now.UnixMilli()).String(), field.ValueString) + } + if field.Key == "end" { + require.Equal(t, timestamp.Time(end.UnixMilli()).String(), field.ValueString) + } + } + } +} + func Test_codec_DecodeProtobufResponseParity(t *testing.T) { // test fixtures from pkg/util/marshal_test var queryTests = []struct { @@ -1565,7 +1657,8 @@ var ( "totalChunksRef": 0, "totalChunksDownloaded": 0, "chunkRefsFetchTime": 0, - "queryReferencedStructuredMetadata": false + "queryReferencedStructuredMetadata": false, + "pipelineWrapperFilteredLines": 2 }, "totalBatches": 6, "totalChunksMatched": 7, @@ -1590,9 +1683,14 @@ var ( "totalChunksRef": 17, "totalChunksDownloaded": 18, "chunkRefsFetchTime": 19, - "queryReferencedStructuredMetadata": true + "queryReferencedStructuredMetadata": true, + "pipelineWrapperFilteredLines": 4 } }, + "index": { + "postFilterChunks": 0, + "totalChunks": 0 + }, "cache": { "chunk": { "entriesFound": 0, @@ -2019,17 +2117,19 @@ var ( PostFilterLines: 0, TotalDuplicates: 19, }, - ChunksDownloadTime: 16, - CongestionControlLatency: 0, - TotalChunksRef: 17, - TotalChunksDownloaded: 18, - ChunkRefsFetchTime: 19, - QueryReferencedStructured: true, + ChunksDownloadTime: 16, + CongestionControlLatency: 0, + TotalChunksRef: 17, + TotalChunksDownloaded: 18, + ChunkRefsFetchTime: 19, + QueryReferencedStructured: true, + PipelineWrapperFilteredLines: 4, }, }, Ingester: stats.Ingester{ Store: stats.Store{ + PipelineWrapperFilteredLines: 2, Chunk: stats.Chunk{ CompressedBytes: 1, DecompressedBytes: 2, diff --git a/pkg/querier/queryrange/downstreamer.go b/pkg/querier/queryrange/downstreamer.go index 4db8034291f64..3d1485d5a77e0 100644 --- a/pkg/querier/queryrange/downstreamer.go +++ b/pkg/querier/queryrange/downstreamer.go @@ -14,12 +14,12 @@ import ( "github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) const ( diff --git a/pkg/querier/queryrange/downstreamer_test.go b/pkg/querier/queryrange/downstreamer_test.go index cadfceeee20e3..0dddddab9b004 100644 --- a/pkg/querier/queryrange/downstreamer_test.go +++ b/pkg/querier/queryrange/downstreamer_test.go @@ -17,12 +17,13 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func testSampleStreams() []queryrangebase.SampleStream { @@ -290,7 +291,7 @@ func TestInstanceFor(t *testing.T) { Params: logql.ParamsWithShardsOverride{ Params: newParams(), ShardsOverride: logql.Shards{ - {Shard: 0, Of: 2}, + logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 0, Of: 2}), }.Encode(), }, }, @@ -298,7 +299,7 @@ func TestInstanceFor(t *testing.T) { Params: logql.ParamsWithShardsOverride{ Params: newParams(), ShardsOverride: logql.Shards{ - {Shard: 1, Of: 2}, + logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 1, Of: 2}), }.Encode(), }, }, @@ -363,8 +364,10 @@ func TestInstanceDownstream(t *testing.T) { queries := []logql.DownstreamQuery{ { Params: logql.ParamsWithShardsOverride{ - Params: logql.ParamsWithExpressionOverride{Params: params, ExpressionOverride: expr}, - ShardsOverride: logql.Shards{{Shard: 0, Of: 2}}.Encode(), + Params: logql.ParamsWithExpressionOverride{Params: params, ExpressionOverride: expr}, + ShardsOverride: logql.Shards{ + logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 0, Of: 2}), + }.Encode(), }, }, } diff --git a/pkg/querier/queryrange/extensions.go b/pkg/querier/queryrange/extensions.go index 75d4ce2cb4edd..6e377295283f5 100644 --- a/pkg/querier/queryrange/extensions.go +++ b/pkg/querier/queryrange/extensions.go @@ -5,8 +5,8 @@ import ( "github.com/grafana/jsonparser" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) // To satisfy queryrange.Response interface(https://github.com/cortexproject/cortex/blob/21bad57b346c730d684d6d0205efef133422ab28/pkg/querier/queryrange/query_range.go#L88) @@ -220,3 +220,53 @@ func (m *QuantileSketchResponse) WithHeaders(h []queryrangebase.PrometheusRespon m.Headers = h return m } + +func (m *ShardsResponse) GetHeaders() []*queryrangebase.PrometheusResponseHeader { + if m != nil { + return convertPrometheusResponseHeadersToPointers(m.Headers) + } + return nil +} + +func (m *ShardsResponse) SetHeader(name, value string) { + m.Headers = setHeader(m.Headers, name, value) +} + +func (m *ShardsResponse) WithHeaders(h []queryrangebase.PrometheusResponseHeader) queryrangebase.Response { + m.Headers = h + return m +} + +// GetHeaders returns the HTTP headers in the response. +func (m *DetectedFieldsResponse) GetHeaders() []*queryrangebase.PrometheusResponseHeader { + if m != nil { + return convertPrometheusResponseHeadersToPointers(m.Headers) + } + return nil +} + +func (m *DetectedFieldsResponse) SetHeader(name, value string) { + m.Headers = setHeader(m.Headers, name, value) +} + +func (m *DetectedFieldsResponse) WithHeaders(h []queryrangebase.PrometheusResponseHeader) queryrangebase.Response { + m.Headers = h + return m +} + +// GetHeaders returns the HTTP headers in the response. +func (m *DetectedLabelsResponse) GetHeaders() []*queryrangebase.PrometheusResponseHeader { + if m != nil { + return convertPrometheusResponseHeadersToPointers(m.Headers) + } + return nil +} + +func (m *DetectedLabelsResponse) SetHeader(name, value string) { + m.Headers = setHeader(m.Headers, name, value) +} + +func (m *DetectedLabelsResponse) WithHeaders(h []queryrangebase.PrometheusResponseHeader) queryrangebase.Response { + m.Headers = h + return m +} diff --git a/pkg/querier/queryrange/extensions_test.go b/pkg/querier/queryrange/extensions_test.go index a7354d57eb054..727931bed4586 100644 --- a/pkg/querier/queryrange/extensions_test.go +++ b/pkg/querier/queryrange/extensions_test.go @@ -5,7 +5,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) func Test_setHeader(t *testing.T) { diff --git a/pkg/querier/queryrange/index_stats_cache.go b/pkg/querier/queryrange/index_stats_cache.go index a91721bf36873..b536fe7963efd 100644 --- a/pkg/querier/queryrange/index_stats_cache.go +++ b/pkg/querier/queryrange/index_stats_cache.go @@ -11,12 +11,12 @@ import ( "github.com/grafana/dskit/tenant" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/validation" ) type IndexStatsSplitter struct { diff --git a/pkg/querier/queryrange/index_stats_cache_test.go b/pkg/querier/queryrange/index_stats_cache_test.go index 1127b88576e11..4d0f4124788a4 100644 --- a/pkg/querier/queryrange/index_stats_cache_test.go +++ b/pkg/querier/queryrange/index_stats_cache_test.go @@ -11,13 +11,13 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" ) func TestIndexStatsCache(t *testing.T) { diff --git a/pkg/querier/queryrange/ingester_query_window.go b/pkg/querier/queryrange/ingester_query_window.go index 7a161f40c0072..d2bae2233c38e 100644 --- a/pkg/querier/queryrange/ingester_query_window.go +++ b/pkg/querier/queryrange/ingester_query_window.go @@ -3,8 +3,8 @@ package queryrange import ( "time" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/validation" ) // SplitIntervalForTimeRange returns the correct split interval to use. It accounts for the given upperBound value being diff --git a/pkg/querier/queryrange/instant_metric_cache.go b/pkg/querier/queryrange/instant_metric_cache.go index ef1083e6cd229..37f97a3d032b4 100644 --- a/pkg/querier/queryrange/instant_metric_cache.go +++ b/pkg/querier/queryrange/instant_metric_cache.go @@ -8,9 +8,9 @@ import ( "github.com/go-kit/log" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" ) type InstantMetricSplitter struct { @@ -49,8 +49,6 @@ func (cfg *InstantMetricCacheConfig) Validate() error { return cfg.ResultsCacheConfig.Validate() } -type instantMetricExtractor struct{} - func NewInstantMetricCacheMiddleware( log log.Logger, limits Limits, diff --git a/pkg/querier/queryrange/instrument.go b/pkg/querier/queryrange/instrument.go index 497cfb2dd8a1a..a2c3be1a733c6 100644 --- a/pkg/querier/queryrange/instrument.go +++ b/pkg/querier/queryrange/instrument.go @@ -12,7 +12,7 @@ import ( "github.com/grafana/dskit/server" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) const ( diff --git a/pkg/querier/queryrange/labels_cache.go b/pkg/querier/queryrange/labels_cache.go index 3a940e34fa034..5979a0c2f91e1 100644 --- a/pkg/querier/queryrange/labels_cache.go +++ b/pkg/querier/queryrange/labels_cache.go @@ -8,10 +8,10 @@ import ( "github.com/go-kit/log" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/util/validation" ) type cacheKeyLabels struct { diff --git a/pkg/querier/queryrange/labels_cache_test.go b/pkg/querier/queryrange/labels_cache_test.go index 90b85cb1faf82..22e967a113762 100644 --- a/pkg/querier/queryrange/labels_cache_test.go +++ b/pkg/querier/queryrange/labels_cache_test.go @@ -11,12 +11,12 @@ import ( "github.com/grafana/dskit/user" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/util" ) func TestCacheKeyLabels_GenerateCacheKey(t *testing.T) { diff --git a/pkg/querier/queryrange/limits.go b/pkg/querier/queryrange/limits.go index ab7818460738f..e1b11fde3449d 100644 --- a/pkg/querier/queryrange/limits.go +++ b/pkg/querier/queryrange/limits.go @@ -22,18 +22,18 @@ import ( "github.com/prometheus/prometheus/model/timestamp" "golang.org/x/sync/semaphore" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - queryrange_limits "github.com/grafana/loki/pkg/querier/queryrange/limits" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/spanlogger" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + queryrange_limits "github.com/grafana/loki/v3/pkg/querier/queryrange/limits" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/util/validation" ) const ( diff --git a/pkg/querier/queryrange/limits/definitions.go b/pkg/querier/queryrange/limits/definitions.go index 9e1232b750797..be366fdc10a44 100644 --- a/pkg/querier/queryrange/limits/definitions.go +++ b/pkg/querier/queryrange/limits/definitions.go @@ -4,8 +4,8 @@ import ( "context" "time" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) // Limits extends the cortex limits interface with support for per tenant splitby parameters @@ -27,6 +27,7 @@ type Limits interface { TSDBMaxQueryParallelism(context.Context, string) int // TSDBMaxBytesPerShard returns the limit to the number of bytes a single shard TSDBMaxBytesPerShard(string) int + TSDBShardingStrategy(userID string) string RequiredLabels(context.Context, string) []string RequiredNumberLabels(context.Context, string) int diff --git a/pkg/querier/queryrange/limits_test.go b/pkg/querier/queryrange/limits_test.go index a80cf96dde805..7591d5d170f76 100644 --- a/pkg/querier/queryrange/limits_test.go +++ b/pkg/querier/queryrange/limits_test.go @@ -17,16 +17,16 @@ import ( "go.uber.org/atomic" "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/querier/plan" - base "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/math" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/querier/plan" + base "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/math" ) func TestLimits(t *testing.T) { @@ -240,7 +240,7 @@ func Test_MaxQueryParallelism(t *testing.T) { defer count.Dec() // simulate some work time.Sleep(20 * time.Millisecond) - return base.NewEmptyPrometheusResponse(), nil + return base.NewEmptyPrometheusResponse(model.ValMatrix), nil }) ctx := user.InjectOrgID(context.Background(), "foo") @@ -271,7 +271,7 @@ func Test_MaxQueryParallelismLateScheduling(t *testing.T) { h := base.HandlerFunc(func(_ context.Context, _ base.Request) (base.Response, error) { // simulate some work time.Sleep(20 * time.Millisecond) - return base.NewEmptyPrometheusResponse(), nil + return base.NewEmptyPrometheusResponse(model.ValMatrix), nil }) ctx := user.InjectOrgID(context.Background(), "foo") @@ -298,7 +298,7 @@ func Test_MaxQueryParallelismDisable(t *testing.T) { h := base.HandlerFunc(func(_ context.Context, _ base.Request) (base.Response, error) { // simulate some work time.Sleep(20 * time.Millisecond) - return base.NewEmptyPrometheusResponse(), nil + return base.NewEmptyPrometheusResponse(model.ValMatrix), nil }) ctx := user.InjectOrgID(context.Background(), "foo") diff --git a/pkg/querier/queryrange/log_result_cache.go b/pkg/querier/queryrange/log_result_cache.go index fd26b67412a6b..4a74b71d8d760 100644 --- a/pkg/querier/queryrange/log_result_cache.go +++ b/pkg/querier/queryrange/log_result_cache.go @@ -17,13 +17,13 @@ import ( "github.com/prometheus/common/model" "golang.org/x/sync/errgroup" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/validation" ) // LogResultCacheMetrics is the metrics wrapper used in log result cache. diff --git a/pkg/querier/queryrange/log_result_cache_test.go b/pkg/querier/queryrange/log_result_cache_test.go index 5da4aee7c4be3..608820d08f8a6 100644 --- a/pkg/querier/queryrange/log_result_cache_test.go +++ b/pkg/querier/queryrange/log_result_cache_test.go @@ -13,11 +13,11 @@ import ( "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" ) const ( diff --git a/pkg/querier/queryrange/marshal.go b/pkg/querier/queryrange/marshal.go index 4480b06adcfac..cc8602d1ce87a 100644 --- a/pkg/querier/queryrange/marshal.go +++ b/pkg/querier/queryrange/marshal.go @@ -17,17 +17,17 @@ import ( "github.com/prometheus/prometheus/promql" "google.golang.org/grpc/codes" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/sketch" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/util/httpreq" - "github.com/grafana/loki/pkg/util/querylimits" - "github.com/grafana/loki/pkg/util/server" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/sketch" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/util/querylimits" + "github.com/grafana/loki/v3/pkg/util/server" ) const ( @@ -200,6 +200,8 @@ func QueryResponseUnwrap(res *QueryResponse) (queryrangebase.Response, error) { return concrete.Labels, nil case *QueryResponse_Stats: return concrete.Stats, nil + case *QueryResponse_ShardsResponse: + return concrete.ShardsResponse, nil case *QueryResponse_Prom: return concrete.Prom, nil case *QueryResponse_Streams: @@ -210,6 +212,10 @@ func QueryResponseUnwrap(res *QueryResponse) (queryrangebase.Response, error) { return concrete.TopkSketches, nil case *QueryResponse_QuantileSketches: return concrete.QuantileSketches, nil + case *QueryResponse_DetectedLabels: + return concrete.DetectedLabels, nil + case *QueryResponse_DetectedFields: + return concrete.DetectedFields, nil default: return nil, fmt.Errorf("unsupported QueryResponse response type, got (%T)", res.Response) } @@ -243,6 +249,12 @@ func QueryResponseWrap(res queryrangebase.Response) (*QueryResponse, error) { p.Response = &QueryResponse_TopkSketches{response} case *QuantileSketchResponse: p.Response = &QueryResponse_QuantileSketches{response} + case *ShardsResponse: + p.Response = &QueryResponse_ShardsResponse{response} + case *DetectedLabelsResponse: + p.Response = &QueryResponse_DetectedLabels{response} + case *DetectedFieldsResponse: + p.Response = &QueryResponse_DetectedFields{response} default: return nil, fmt.Errorf("invalid response format, got (%T)", res) } @@ -311,6 +323,8 @@ func (Codec) QueryRequestUnwrap(ctx context.Context, req *QueryRequest) (queryra return concrete.Instant, ctx, nil case *QueryRequest_Stats: return concrete.Stats, ctx, nil + case *QueryRequest_ShardsRequest: + return concrete.ShardsRequest, ctx, nil case *QueryRequest_Volume: return concrete.Volume, ctx, nil case *QueryRequest_Streams: @@ -329,8 +343,16 @@ func (Codec) QueryRequestUnwrap(ctx context.Context, req *QueryRequest) (queryra return &LabelRequest{ LabelRequest: *concrete.Labels, }, ctx, nil + case *QueryRequest_DetectedLabels: + return &DetectedLabelsRequest{ + DetectedLabelsRequest: *concrete.DetectedLabels, + }, ctx, nil + case *QueryRequest_DetectedFields: + return &DetectedFieldsRequest{ + DetectedFieldsRequest: *concrete.DetectedFields, + }, ctx, nil default: - return nil, ctx, fmt.Errorf("unsupported request type, got (%T)", req.Request) + return nil, ctx, fmt.Errorf("unsupported request type while unwrapping, got (%T)", req.Request) } } @@ -353,8 +375,14 @@ func (Codec) QueryRequestWrap(ctx context.Context, r queryrangebase.Request) (*Q result.Request = &QueryRequest_Instant{Instant: req} case *LokiRequest: result.Request = &QueryRequest_Streams{Streams: req} + case *logproto.ShardsRequest: + result.Request = &QueryRequest_ShardsRequest{ShardsRequest: req} + case *DetectedLabelsRequest: + result.Request = &QueryRequest_DetectedLabels{DetectedLabels: &req.DetectedLabelsRequest} + case *DetectedFieldsRequest: + result.Request = &QueryRequest_DetectedFields{DetectedFields: &req.DetectedFieldsRequest} default: - return nil, fmt.Errorf("unsupported request type, got (%T)", r) + return nil, fmt.Errorf("unsupported request type while wrapping, got (%T)", r) } // Add query tags diff --git a/pkg/querier/queryrange/marshal_test.go b/pkg/querier/queryrange/marshal_test.go index 6fa9bbe23897c..43d02f1071b6f 100644 --- a/pkg/querier/queryrange/marshal_test.go +++ b/pkg/querier/queryrange/marshal_test.go @@ -6,12 +6,12 @@ import ( "github.com/prometheus/prometheus/promql" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) func TestResultToResponse(t *testing.T) { diff --git a/pkg/querier/queryrange/metrics.go b/pkg/querier/queryrange/metrics.go index 9482becf98817..bd9ce6fa79bac 100644 --- a/pkg/querier/queryrange/metrics.go +++ b/pkg/querier/queryrange/metrics.go @@ -6,9 +6,9 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) type Metrics struct { diff --git a/pkg/querier/queryrange/ordering.go b/pkg/querier/queryrange/ordering.go index 761ec9cc3fafe..7eb61a604ef03 100644 --- a/pkg/querier/queryrange/ordering.go +++ b/pkg/querier/queryrange/ordering.go @@ -3,7 +3,7 @@ package queryrange import ( "sort" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) /* diff --git a/pkg/querier/queryrange/prometheus.go b/pkg/querier/queryrange/prometheus.go index 2a8ff78c164e5..22ee8b3c1d4dd 100644 --- a/pkg/querier/queryrange/prometheus.go +++ b/pkg/querier/queryrange/prometheus.go @@ -11,10 +11,10 @@ import ( otlog "github.com/opentracing/opentracing-go/log" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" ) var ( diff --git a/pkg/querier/queryrange/prometheus_test.go b/pkg/querier/queryrange/prometheus_test.go index 80e4f5367afb0..6e3a4ac3e4d4e 100644 --- a/pkg/querier/queryrange/prometheus_test.go +++ b/pkg/querier/queryrange/prometheus_test.go @@ -7,12 +7,16 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) var emptyStats = `"stats": { + "index": { + "postFilterChunks": 0, + "totalChunks": 0 + }, "ingester" : { "store": { "chunksDownloadTime": 0, @@ -21,6 +25,7 @@ var emptyStats = `"stats": { "totalChunksDownloaded": 0, "chunkRefsFetchTime": 0, "queryReferencedStructuredMetadata": false, + "pipelineWrapperFilteredLines": 0, "chunk" :{ "compressedBytes": 0, "decompressedBytes": 0, @@ -46,6 +51,7 @@ var emptyStats = `"stats": { "totalChunksDownloaded": 0, "chunkRefsFetchTime": 0, "queryReferencedStructuredMetadata": false, + "pipelineWrapperFilteredLines": 0, "chunk" :{ "compressedBytes": 0, "decompressedBytes": 0, diff --git a/pkg/querier/queryrange/queryrange.pb.go b/pkg/querier/queryrange/queryrange.pb.go index cbc541a044044..a602dcafcd802 100644 --- a/pkg/querier/queryrange/queryrange.pb.go +++ b/pkg/querier/queryrange/queryrange.pb.go @@ -12,15 +12,15 @@ import ( github_com_gogo_protobuf_sortkeys "github.com/gogo/protobuf/sortkeys" _ "github.com/gogo/protobuf/types" github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" - github_com_grafana_loki_pkg_logproto "github.com/grafana/loki/pkg/logproto" - logproto "github.com/grafana/loki/pkg/logproto" - stats "github.com/grafana/loki/pkg/logqlmodel/stats" _ "github.com/grafana/loki/pkg/push" github_com_grafana_loki_pkg_push "github.com/grafana/loki/pkg/push" - github_com_grafana_loki_pkg_querier_plan "github.com/grafana/loki/pkg/querier/plan" - queryrangebase "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - _ "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions" - github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions" + github_com_grafana_loki_v3_pkg_logproto "github.com/grafana/loki/v3/pkg/logproto" + logproto "github.com/grafana/loki/v3/pkg/logproto" + stats "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + github_com_grafana_loki_v3_pkg_querier_plan "github.com/grafana/loki/v3/pkg/querier/plan" + queryrangebase "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + _ "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions" + github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions" io "io" math "math" math_bits "math/bits" @@ -42,16 +42,16 @@ var _ = time.Kitchen const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package type LokiRequest struct { - Query string `protobuf:"bytes,1,opt,name=query,proto3" json:"query,omitempty"` - Limit uint32 `protobuf:"varint,2,opt,name=limit,proto3" json:"limit,omitempty"` - Step int64 `protobuf:"varint,3,opt,name=step,proto3" json:"step,omitempty"` - Interval int64 `protobuf:"varint,9,opt,name=interval,proto3" json:"interval,omitempty"` - StartTs time.Time `protobuf:"bytes,4,opt,name=startTs,proto3,stdtime" json:"startTs"` - EndTs time.Time `protobuf:"bytes,5,opt,name=endTs,proto3,stdtime" json:"endTs"` - Direction logproto.Direction `protobuf:"varint,6,opt,name=direction,proto3,enum=logproto.Direction" json:"direction,omitempty"` - Path string `protobuf:"bytes,7,opt,name=path,proto3" json:"path,omitempty"` - Shards []string `protobuf:"bytes,8,rep,name=shards,proto3" json:"shards"` - Plan *github_com_grafana_loki_pkg_querier_plan.QueryPlan `protobuf:"bytes,10,opt,name=plan,proto3,customtype=github.com/grafana/loki/pkg/querier/plan.QueryPlan" json:"plan,omitempty"` + Query string `protobuf:"bytes,1,opt,name=query,proto3" json:"query,omitempty"` + Limit uint32 `protobuf:"varint,2,opt,name=limit,proto3" json:"limit,omitempty"` + Step int64 `protobuf:"varint,3,opt,name=step,proto3" json:"step,omitempty"` + Interval int64 `protobuf:"varint,9,opt,name=interval,proto3" json:"interval,omitempty"` + StartTs time.Time `protobuf:"bytes,4,opt,name=startTs,proto3,stdtime" json:"startTs"` + EndTs time.Time `protobuf:"bytes,5,opt,name=endTs,proto3,stdtime" json:"endTs"` + Direction logproto.Direction `protobuf:"varint,6,opt,name=direction,proto3,enum=logproto.Direction" json:"direction,omitempty"` + Path string `protobuf:"bytes,7,opt,name=path,proto3" json:"path,omitempty"` + Shards []string `protobuf:"bytes,8,rep,name=shards,proto3" json:"shards"` + Plan *github_com_grafana_loki_v3_pkg_querier_plan.QueryPlan `protobuf:"bytes,10,opt,name=plan,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/plan.QueryPlan" json:"plan,omitempty"` } func (m *LokiRequest) Reset() { *m = LokiRequest{} } @@ -150,13 +150,13 @@ func (m *LokiRequest) GetShards() []string { } type LokiInstantRequest struct { - Query string `protobuf:"bytes,1,opt,name=query,proto3" json:"query,omitempty"` - Limit uint32 `protobuf:"varint,2,opt,name=limit,proto3" json:"limit,omitempty"` - TimeTs time.Time `protobuf:"bytes,3,opt,name=timeTs,proto3,stdtime" json:"timeTs"` - Direction logproto.Direction `protobuf:"varint,4,opt,name=direction,proto3,enum=logproto.Direction" json:"direction,omitempty"` - Path string `protobuf:"bytes,5,opt,name=path,proto3" json:"path,omitempty"` - Shards []string `protobuf:"bytes,6,rep,name=shards,proto3" json:"shards"` - Plan *github_com_grafana_loki_pkg_querier_plan.QueryPlan `protobuf:"bytes,7,opt,name=plan,proto3,customtype=github.com/grafana/loki/pkg/querier/plan.QueryPlan" json:"plan,omitempty"` + Query string `protobuf:"bytes,1,opt,name=query,proto3" json:"query,omitempty"` + Limit uint32 `protobuf:"varint,2,opt,name=limit,proto3" json:"limit,omitempty"` + TimeTs time.Time `protobuf:"bytes,3,opt,name=timeTs,proto3,stdtime" json:"timeTs"` + Direction logproto.Direction `protobuf:"varint,4,opt,name=direction,proto3,enum=logproto.Direction" json:"direction,omitempty"` + Path string `protobuf:"bytes,5,opt,name=path,proto3" json:"path,omitempty"` + Shards []string `protobuf:"bytes,6,rep,name=shards,proto3" json:"shards"` + Plan *github_com_grafana_loki_v3_pkg_querier_plan.QueryPlan `protobuf:"bytes,7,opt,name=plan,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/plan.QueryPlan" json:"plan,omitempty"` } func (m *LokiInstantRequest) Reset() { *m = LokiInstantRequest{} } @@ -277,15 +277,15 @@ func (m *Plan) GetRaw() []byte { } type LokiResponse struct { - Status string `protobuf:"bytes,1,opt,name=Status,proto3" json:"status"` - Data LokiData `protobuf:"bytes,2,opt,name=Data,proto3" json:"data,omitempty"` - ErrorType string `protobuf:"bytes,3,opt,name=ErrorType,proto3" json:"errorType,omitempty"` - Error string `protobuf:"bytes,4,opt,name=Error,proto3" json:"error,omitempty"` - Direction logproto.Direction `protobuf:"varint,5,opt,name=direction,proto3,enum=logproto.Direction" json:"direction,omitempty"` - Limit uint32 `protobuf:"varint,6,opt,name=limit,proto3" json:"limit,omitempty"` - Version uint32 `protobuf:"varint,7,opt,name=version,proto3" json:"version,omitempty"` - Statistics stats.Result `protobuf:"bytes,8,opt,name=statistics,proto3" json:"statistics"` - Headers []github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,9,rep,name=Headers,proto3,customtype=github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` + Status string `protobuf:"bytes,1,opt,name=Status,proto3" json:"status"` + Data LokiData `protobuf:"bytes,2,opt,name=Data,proto3" json:"data,omitempty"` + ErrorType string `protobuf:"bytes,3,opt,name=ErrorType,proto3" json:"errorType,omitempty"` + Error string `protobuf:"bytes,4,opt,name=Error,proto3" json:"error,omitempty"` + Direction logproto.Direction `protobuf:"varint,5,opt,name=direction,proto3,enum=logproto.Direction" json:"direction,omitempty"` + Limit uint32 `protobuf:"varint,6,opt,name=limit,proto3" json:"limit,omitempty"` + Version uint32 `protobuf:"varint,7,opt,name=version,proto3" json:"version,omitempty"` + Statistics stats.Result `protobuf:"bytes,8,opt,name=statistics,proto3" json:"statistics"` + Headers []github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,9,rep,name=Headers,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` } func (m *LokiResponse) Reset() { *m = LokiResponse{} } @@ -452,11 +452,11 @@ func (m *LokiSeriesRequest) GetShards() []string { } type LokiSeriesResponse struct { - Status string `protobuf:"bytes,1,opt,name=Status,proto3" json:"status"` - Data []logproto.SeriesIdentifier `protobuf:"bytes,2,rep,name=Data,proto3" json:"data,omitempty"` - Version uint32 `protobuf:"varint,3,opt,name=version,proto3" json:"version,omitempty"` - Headers []github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,4,rep,name=Headers,proto3,customtype=github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` - Statistics stats.Result `protobuf:"bytes,5,opt,name=statistics,proto3" json:"statistics"` + Status string `protobuf:"bytes,1,opt,name=Status,proto3" json:"status"` + Data []logproto.SeriesIdentifier `protobuf:"bytes,2,rep,name=Data,proto3" json:"data,omitempty"` + Version uint32 `protobuf:"varint,3,opt,name=version,proto3" json:"version,omitempty"` + Headers []github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,4,rep,name=Headers,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` + Statistics stats.Result `protobuf:"bytes,5,opt,name=statistics,proto3" json:"statistics"` } func (m *LokiSeriesResponse) Reset() { *m = LokiSeriesResponse{} } @@ -520,11 +520,11 @@ func (m *LokiSeriesResponse) GetStatistics() stats.Result { } type LokiLabelNamesResponse struct { - Status string `protobuf:"bytes,1,opt,name=Status,proto3" json:"status"` - Data []string `protobuf:"bytes,2,rep,name=Data,proto3" json:"data,omitempty"` - Version uint32 `protobuf:"varint,3,opt,name=version,proto3" json:"version,omitempty"` - Headers []github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,4,rep,name=Headers,proto3,customtype=github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` - Statistics stats.Result `protobuf:"bytes,5,opt,name=statistics,proto3" json:"statistics"` + Status string `protobuf:"bytes,1,opt,name=Status,proto3" json:"status"` + Data []string `protobuf:"bytes,2,rep,name=Data,proto3" json:"data,omitempty"` + Version uint32 `protobuf:"varint,3,opt,name=version,proto3" json:"version,omitempty"` + Headers []github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,4,rep,name=Headers,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` + Statistics stats.Result `protobuf:"bytes,5,opt,name=statistics,proto3" json:"statistics"` } func (m *LokiLabelNamesResponse) Reset() { *m = LokiLabelNamesResponse{} } @@ -684,8 +684,8 @@ func (m *LokiPromResponse) GetStatistics() stats.Result { } type IndexStatsResponse struct { - Response *github_com_grafana_loki_pkg_logproto.IndexStatsResponse `protobuf:"bytes,1,opt,name=response,proto3,customtype=github.com/grafana/loki/pkg/logproto.IndexStatsResponse" json:"response,omitempty"` - Headers []github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,2,rep,name=Headers,proto3,customtype=github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` + Response *github_com_grafana_loki_v3_pkg_logproto.IndexStatsResponse `protobuf:"bytes,1,opt,name=response,proto3,customtype=github.com/grafana/loki/v3/pkg/logproto.IndexStatsResponse" json:"response,omitempty"` + Headers []github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,2,rep,name=Headers,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` } func (m *IndexStatsResponse) Reset() { *m = IndexStatsResponse{} } @@ -721,8 +721,8 @@ func (m *IndexStatsResponse) XXX_DiscardUnknown() { var xxx_messageInfo_IndexStatsResponse proto.InternalMessageInfo type VolumeResponse struct { - Response *github_com_grafana_loki_pkg_logproto.VolumeResponse `protobuf:"bytes,1,opt,name=response,proto3,customtype=github.com/grafana/loki/pkg/logproto.VolumeResponse" json:"response,omitempty"` - Headers []github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,2,rep,name=Headers,proto3,customtype=github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` + Response *github_com_grafana_loki_v3_pkg_logproto.VolumeResponse `protobuf:"bytes,1,opt,name=response,proto3,customtype=github.com/grafana/loki/v3/pkg/logproto.VolumeResponse" json:"response,omitempty"` + Headers []github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,2,rep,name=Headers,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` } func (m *VolumeResponse) Reset() { *m = VolumeResponse{} } @@ -758,8 +758,8 @@ func (m *VolumeResponse) XXX_DiscardUnknown() { var xxx_messageInfo_VolumeResponse proto.InternalMessageInfo type TopKSketchesResponse struct { - Response *github_com_grafana_loki_pkg_logproto.TopKMatrix `protobuf:"bytes,1,opt,name=response,proto3,customtype=github.com/grafana/loki/pkg/logproto.TopKMatrix" json:"response,omitempty"` - Headers []github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,2,rep,name=Headers,proto3,customtype=github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` + Response *github_com_grafana_loki_v3_pkg_logproto.TopKMatrix `protobuf:"bytes,1,opt,name=response,proto3,customtype=github.com/grafana/loki/v3/pkg/logproto.TopKMatrix" json:"response,omitempty"` + Headers []github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,2,rep,name=Headers,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` } func (m *TopKSketchesResponse) Reset() { *m = TopKSketchesResponse{} } @@ -795,8 +795,8 @@ func (m *TopKSketchesResponse) XXX_DiscardUnknown() { var xxx_messageInfo_TopKSketchesResponse proto.InternalMessageInfo type QuantileSketchResponse struct { - Response *github_com_grafana_loki_pkg_logproto.QuantileSketchMatrix `protobuf:"bytes,1,opt,name=response,proto3,customtype=github.com/grafana/loki/pkg/logproto.QuantileSketchMatrix" json:"response,omitempty"` - Headers []github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,2,rep,name=Headers,proto3,customtype=github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` + Response *github_com_grafana_loki_v3_pkg_logproto.QuantileSketchMatrix `protobuf:"bytes,1,opt,name=response,proto3,customtype=github.com/grafana/loki/v3/pkg/logproto.QuantileSketchMatrix" json:"response,omitempty"` + Headers []github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,2,rep,name=Headers,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` } func (m *QuantileSketchResponse) Reset() { *m = QuantileSketchResponse{} } @@ -831,6 +831,117 @@ func (m *QuantileSketchResponse) XXX_DiscardUnknown() { var xxx_messageInfo_QuantileSketchResponse proto.InternalMessageInfo +type ShardsResponse struct { + Response *github_com_grafana_loki_v3_pkg_logproto.ShardsResponse `protobuf:"bytes,1,opt,name=response,proto3,customtype=github.com/grafana/loki/v3/pkg/logproto.ShardsResponse" json:"response,omitempty"` + Headers []github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,2,rep,name=Headers,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` +} + +func (m *ShardsResponse) Reset() { *m = ShardsResponse{} } +func (*ShardsResponse) ProtoMessage() {} +func (*ShardsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_51b9d53b40d11902, []int{13} +} +func (m *ShardsResponse) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *ShardsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_ShardsResponse.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *ShardsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ShardsResponse.Merge(m, src) +} +func (m *ShardsResponse) XXX_Size() int { + return m.Size() +} +func (m *ShardsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ShardsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ShardsResponse proto.InternalMessageInfo + +type DetectedFieldsResponse struct { + Response *github_com_grafana_loki_v3_pkg_logproto.DetectedFieldsResponse `protobuf:"bytes,1,opt,name=response,proto3,customtype=github.com/grafana/loki/v3/pkg/logproto.DetectedFieldsResponse" json:"response,omitempty"` + Headers []github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,2,rep,name=Headers,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` +} + +func (m *DetectedFieldsResponse) Reset() { *m = DetectedFieldsResponse{} } +func (*DetectedFieldsResponse) ProtoMessage() {} +func (*DetectedFieldsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_51b9d53b40d11902, []int{14} +} +func (m *DetectedFieldsResponse) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *DetectedFieldsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_DetectedFieldsResponse.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *DetectedFieldsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_DetectedFieldsResponse.Merge(m, src) +} +func (m *DetectedFieldsResponse) XXX_Size() int { + return m.Size() +} +func (m *DetectedFieldsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_DetectedFieldsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_DetectedFieldsResponse proto.InternalMessageInfo + +type DetectedLabelsResponse struct { + Response *github_com_grafana_loki_v3_pkg_logproto.DetectedLabelsResponse `protobuf:"bytes,1,opt,name=response,proto3,customtype=github.com/grafana/loki/v3/pkg/logproto.DetectedLabelsResponse" json:"response,omitempty"` + Headers []github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,2,rep,name=Headers,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` +} + +func (m *DetectedLabelsResponse) Reset() { *m = DetectedLabelsResponse{} } +func (*DetectedLabelsResponse) ProtoMessage() {} +func (*DetectedLabelsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_51b9d53b40d11902, []int{15} +} +func (m *DetectedLabelsResponse) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *DetectedLabelsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_DetectedLabelsResponse.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *DetectedLabelsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_DetectedLabelsResponse.Merge(m, src) +} +func (m *DetectedLabelsResponse) XXX_Size() int { + return m.Size() +} +func (m *DetectedLabelsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_DetectedLabelsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_DetectedLabelsResponse proto.InternalMessageInfo + type QueryResponse struct { Status *rpc.Status `protobuf:"bytes,1,opt,name=status,proto3" json:"status,omitempty"` // Types that are valid to be assigned to Response: @@ -842,13 +953,16 @@ type QueryResponse struct { // *QueryResponse_Volume // *QueryResponse_TopkSketches // *QueryResponse_QuantileSketches + // *QueryResponse_ShardsResponse + // *QueryResponse_DetectedFields + // *QueryResponse_DetectedLabels Response isQueryResponse_Response `protobuf_oneof:"response"` } func (m *QueryResponse) Reset() { *m = QueryResponse{} } func (*QueryResponse) ProtoMessage() {} func (*QueryResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_51b9d53b40d11902, []int{13} + return fileDescriptor_51b9d53b40d11902, []int{16} } func (m *QueryResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -908,6 +1022,15 @@ type QueryResponse_TopkSketches struct { type QueryResponse_QuantileSketches struct { QuantileSketches *QuantileSketchResponse `protobuf:"bytes,9,opt,name=quantileSketches,proto3,oneof"` } +type QueryResponse_ShardsResponse struct { + ShardsResponse *ShardsResponse `protobuf:"bytes,10,opt,name=shardsResponse,proto3,oneof"` +} +type QueryResponse_DetectedFields struct { + DetectedFields *DetectedFieldsResponse `protobuf:"bytes,11,opt,name=detectedFields,proto3,oneof"` +} +type QueryResponse_DetectedLabels struct { + DetectedLabels *DetectedLabelsResponse `protobuf:"bytes,12,opt,name=detectedLabels,proto3,oneof"` +} func (*QueryResponse_Series) isQueryResponse_Response() {} func (*QueryResponse_Labels) isQueryResponse_Response() {} @@ -917,6 +1040,9 @@ func (*QueryResponse_Streams) isQueryResponse_Response() {} func (*QueryResponse_Volume) isQueryResponse_Response() {} func (*QueryResponse_TopkSketches) isQueryResponse_Response() {} func (*QueryResponse_QuantileSketches) isQueryResponse_Response() {} +func (*QueryResponse_ShardsResponse) isQueryResponse_Response() {} +func (*QueryResponse_DetectedFields) isQueryResponse_Response() {} +func (*QueryResponse_DetectedLabels) isQueryResponse_Response() {} func (m *QueryResponse) GetResponse() isQueryResponse_Response { if m != nil { @@ -988,6 +1114,27 @@ func (m *QueryResponse) GetQuantileSketches() *QuantileSketchResponse { return nil } +func (m *QueryResponse) GetShardsResponse() *ShardsResponse { + if x, ok := m.GetResponse().(*QueryResponse_ShardsResponse); ok { + return x.ShardsResponse + } + return nil +} + +func (m *QueryResponse) GetDetectedFields() *DetectedFieldsResponse { + if x, ok := m.GetResponse().(*QueryResponse_DetectedFields); ok { + return x.DetectedFields + } + return nil +} + +func (m *QueryResponse) GetDetectedLabels() *DetectedLabelsResponse { + if x, ok := m.GetResponse().(*QueryResponse_DetectedLabels); ok { + return x.DetectedLabels + } + return nil +} + // XXX_OneofWrappers is for the internal use of the proto package. func (*QueryResponse) XXX_OneofWrappers() []interface{} { return []interface{}{ @@ -999,6 +1146,9 @@ func (*QueryResponse) XXX_OneofWrappers() []interface{} { (*QueryResponse_Volume)(nil), (*QueryResponse_TopkSketches)(nil), (*QueryResponse_QuantileSketches)(nil), + (*QueryResponse_ShardsResponse)(nil), + (*QueryResponse_DetectedFields)(nil), + (*QueryResponse_DetectedLabels)(nil), } } @@ -1010,6 +1160,9 @@ type QueryRequest struct { // *QueryRequest_Instant // *QueryRequest_Streams // *QueryRequest_Volume + // *QueryRequest_ShardsRequest + // *QueryRequest_DetectedFields + // *QueryRequest_DetectedLabels Request isQueryRequest_Request `protobuf_oneof:"request"` Metadata map[string]string `protobuf:"bytes,7,rep,name=metadata,proto3" json:"metadata" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } @@ -1017,7 +1170,7 @@ type QueryRequest struct { func (m *QueryRequest) Reset() { *m = QueryRequest{} } func (*QueryRequest) ProtoMessage() {} func (*QueryRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_51b9d53b40d11902, []int{14} + return fileDescriptor_51b9d53b40d11902, []int{17} } func (m *QueryRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1071,13 +1224,25 @@ type QueryRequest_Streams struct { type QueryRequest_Volume struct { Volume *logproto.VolumeRequest `protobuf:"bytes,6,opt,name=volume,proto3,oneof"` } +type QueryRequest_ShardsRequest struct { + ShardsRequest *logproto.ShardsRequest `protobuf:"bytes,8,opt,name=shardsRequest,proto3,oneof"` +} +type QueryRequest_DetectedFields struct { + DetectedFields *logproto.DetectedFieldsRequest `protobuf:"bytes,9,opt,name=detectedFields,proto3,oneof"` +} +type QueryRequest_DetectedLabels struct { + DetectedLabels *logproto.DetectedLabelsRequest `protobuf:"bytes,10,opt,name=detectedLabels,proto3,oneof"` +} -func (*QueryRequest_Series) isQueryRequest_Request() {} -func (*QueryRequest_Labels) isQueryRequest_Request() {} -func (*QueryRequest_Stats) isQueryRequest_Request() {} -func (*QueryRequest_Instant) isQueryRequest_Request() {} -func (*QueryRequest_Streams) isQueryRequest_Request() {} -func (*QueryRequest_Volume) isQueryRequest_Request() {} +func (*QueryRequest_Series) isQueryRequest_Request() {} +func (*QueryRequest_Labels) isQueryRequest_Request() {} +func (*QueryRequest_Stats) isQueryRequest_Request() {} +func (*QueryRequest_Instant) isQueryRequest_Request() {} +func (*QueryRequest_Streams) isQueryRequest_Request() {} +func (*QueryRequest_Volume) isQueryRequest_Request() {} +func (*QueryRequest_ShardsRequest) isQueryRequest_Request() {} +func (*QueryRequest_DetectedFields) isQueryRequest_Request() {} +func (*QueryRequest_DetectedLabels) isQueryRequest_Request() {} func (m *QueryRequest) GetRequest() isQueryRequest_Request { if m != nil { @@ -1128,6 +1293,27 @@ func (m *QueryRequest) GetVolume() *logproto.VolumeRequest { return nil } +func (m *QueryRequest) GetShardsRequest() *logproto.ShardsRequest { + if x, ok := m.GetRequest().(*QueryRequest_ShardsRequest); ok { + return x.ShardsRequest + } + return nil +} + +func (m *QueryRequest) GetDetectedFields() *logproto.DetectedFieldsRequest { + if x, ok := m.GetRequest().(*QueryRequest_DetectedFields); ok { + return x.DetectedFields + } + return nil +} + +func (m *QueryRequest) GetDetectedLabels() *logproto.DetectedLabelsRequest { + if x, ok := m.GetRequest().(*QueryRequest_DetectedLabels); ok { + return x.DetectedLabels + } + return nil +} + func (m *QueryRequest) GetMetadata() map[string]string { if m != nil { return m.Metadata @@ -1144,6 +1330,9 @@ func (*QueryRequest) XXX_OneofWrappers() []interface{} { (*QueryRequest_Instant)(nil), (*QueryRequest_Streams)(nil), (*QueryRequest_Volume)(nil), + (*QueryRequest_ShardsRequest)(nil), + (*QueryRequest_DetectedFields)(nil), + (*QueryRequest_DetectedLabels)(nil), } } @@ -1161,6 +1350,9 @@ func init() { proto.RegisterType((*VolumeResponse)(nil), "queryrange.VolumeResponse") proto.RegisterType((*TopKSketchesResponse)(nil), "queryrange.TopKSketchesResponse") proto.RegisterType((*QuantileSketchResponse)(nil), "queryrange.QuantileSketchResponse") + proto.RegisterType((*ShardsResponse)(nil), "queryrange.ShardsResponse") + proto.RegisterType((*DetectedFieldsResponse)(nil), "queryrange.DetectedFieldsResponse") + proto.RegisterType((*DetectedLabelsResponse)(nil), "queryrange.DetectedLabelsResponse") proto.RegisterType((*QueryResponse)(nil), "queryrange.QueryResponse") proto.RegisterType((*QueryRequest)(nil), "queryrange.QueryRequest") proto.RegisterMapType((map[string]string)(nil), "queryrange.QueryRequest.MetadataEntry") @@ -1171,102 +1363,115 @@ func init() { } var fileDescriptor_51b9d53b40d11902 = []byte{ - // 1514 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x58, 0x5b, 0x6f, 0x1b, 0x45, - 0x1b, 0xf6, 0xfa, 0x18, 0x4f, 0x0e, 0x5f, 0xbe, 0x49, 0x94, 0xee, 0x97, 0xf6, 0xdb, 0xb5, 0x2c, - 0xd1, 0x06, 0x04, 0x6b, 0x9a, 0x94, 0x1e, 0x01, 0xd1, 0xa5, 0xad, 0x52, 0xd1, 0xa2, 0x76, 0x13, - 0x71, 0x81, 0xb8, 0x99, 0xd8, 0x13, 0x7b, 0xf1, 0x9e, 0xb2, 0x33, 0x0e, 0xcd, 0x1d, 0x3f, 0x00, - 0xa4, 0xfe, 0x0a, 0x84, 0x44, 0x55, 0x89, 0x5b, 0x2e, 0xb9, 0xa1, 0x97, 0xbd, 0xac, 0x2c, 0xb1, - 0x50, 0x97, 0x0b, 0x94, 0xab, 0xfe, 0x04, 0x34, 0x87, 0x5d, 0xef, 0xda, 0x6e, 0xeb, 0x14, 0x21, - 0xb5, 0x12, 0x37, 0xf6, 0x1c, 0xde, 0x67, 0xf6, 0xdd, 0xe7, 0x79, 0xdf, 0x77, 0x66, 0x16, 0x9c, - 0x0a, 0xba, 0xed, 0xc6, 0x5e, 0x0f, 0x87, 0x36, 0x0e, 0xf9, 0xff, 0x41, 0x88, 0xbc, 0x36, 0x4e, - 0x35, 0x8d, 0x20, 0xf4, 0xa9, 0x0f, 0xc1, 0x70, 0x64, 0x75, 0xbd, 0x6d, 0xd3, 0x4e, 0x6f, 0xc7, - 0x68, 0xfa, 0x6e, 0xa3, 0xed, 0xb7, 0xfd, 0x46, 0xdb, 0xf7, 0xdb, 0x0e, 0x46, 0x81, 0x4d, 0x64, - 0xb3, 0x11, 0x06, 0xcd, 0x06, 0xa1, 0x88, 0xf6, 0x88, 0xc0, 0xaf, 0x2e, 0x33, 0x43, 0xde, 0xe4, - 0x10, 0x39, 0xaa, 0x4b, 0x73, 0xde, 0xdb, 0xe9, 0xed, 0x36, 0xa8, 0xed, 0x62, 0x42, 0x91, 0x1b, - 0x48, 0x83, 0xe3, 0xcc, 0x3f, 0xc7, 0x6f, 0x0b, 0x64, 0xdc, 0x90, 0x93, 0xff, 0xcb, 0x4c, 0x92, - 0x2e, 0xa6, 0xcd, 0x8e, 0x9c, 0xaa, 0xc9, 0xa9, 0x3d, 0xc7, 0xf5, 0x5b, 0xd8, 0xe1, 0xbe, 0x10, - 0xf1, 0x2b, 0x2d, 0x96, 0x98, 0x45, 0xd0, 0x23, 0x1d, 0xfe, 0x23, 0x07, 0x3f, 0x7e, 0x21, 0x1d, - 0x3b, 0x88, 0xe0, 0x46, 0x0b, 0xef, 0xda, 0x9e, 0x4d, 0x6d, 0xdf, 0x23, 0xe9, 0xb6, 0x5c, 0xe4, - 0xec, 0x74, 0x8b, 0x8c, 0x52, 0x5c, 0xbf, 0x5f, 0x00, 0xb3, 0x37, 0xfc, 0xae, 0x6d, 0xe1, 0xbd, - 0x1e, 0x26, 0x14, 0x2e, 0x83, 0x12, 0xb7, 0x51, 0x95, 0x9a, 0xb2, 0x56, 0xb5, 0x44, 0x87, 0x8d, - 0x3a, 0xb6, 0x6b, 0x53, 0x35, 0x5f, 0x53, 0xd6, 0xe6, 0x2d, 0xd1, 0x81, 0x10, 0x14, 0x09, 0xc5, - 0x81, 0x5a, 0xa8, 0x29, 0x6b, 0x05, 0x8b, 0xb7, 0xe1, 0x2a, 0x98, 0xb1, 0x3d, 0x8a, 0xc3, 0x7d, - 0xe4, 0xa8, 0x55, 0x3e, 0x9e, 0xf4, 0xe1, 0x87, 0xa0, 0x42, 0x28, 0x0a, 0xe9, 0x36, 0x51, 0x8b, - 0x35, 0x65, 0x6d, 0x76, 0x7d, 0xd5, 0x10, 0x52, 0x18, 0xb1, 0x14, 0xc6, 0x76, 0x2c, 0x85, 0x39, - 0xf3, 0x20, 0xd2, 0x73, 0x77, 0x7f, 0xd3, 0x15, 0x2b, 0x06, 0xc1, 0x8b, 0xa0, 0x84, 0xbd, 0xd6, - 0x36, 0x51, 0x4b, 0x47, 0x40, 0x0b, 0x08, 0x3c, 0x0d, 0xaa, 0x2d, 0x3b, 0xc4, 0x4d, 0xc6, 0x99, - 0x5a, 0xae, 0x29, 0x6b, 0x0b, 0xeb, 0x4b, 0x46, 0x22, 0xed, 0x95, 0x78, 0xca, 0x1a, 0x5a, 0xb1, - 0xd7, 0x0b, 0x10, 0xed, 0xa8, 0x15, 0xce, 0x04, 0x6f, 0xc3, 0x3a, 0x28, 0x93, 0x0e, 0x0a, 0x5b, - 0x44, 0x9d, 0xa9, 0x15, 0xd6, 0xaa, 0x26, 0x38, 0x8c, 0x74, 0x39, 0x62, 0xc9, 0x7f, 0xf8, 0x05, - 0x28, 0x06, 0x0e, 0xf2, 0x54, 0xc0, 0xbd, 0x5c, 0x34, 0x52, 0x9c, 0xdf, 0x72, 0x90, 0x67, 0x9e, - 0xed, 0x47, 0x7a, 0x26, 0x9a, 0x43, 0xb4, 0x8b, 0x3c, 0xd4, 0x70, 0xfc, 0xae, 0xdd, 0x48, 0xcb, - 0xc8, 0x56, 0x31, 0x6e, 0x33, 0x34, 0xc3, 0x59, 0x7c, 0xd5, 0xfa, 0x2f, 0x79, 0x00, 0x99, 0x60, - 0xd7, 0x3d, 0x42, 0x91, 0x47, 0x5f, 0x46, 0xb7, 0xf7, 0x41, 0x99, 0x85, 0xfc, 0x36, 0xe1, 0xca, - 0x4d, 0x4b, 0xa4, 0xc4, 0x64, 0x99, 0x2c, 0x1e, 0x89, 0xc9, 0xd2, 0x44, 0x26, 0xcb, 0x2f, 0x64, - 0xb2, 0xf2, 0x8f, 0x30, 0xa9, 0x82, 0x22, 0xeb, 0xc1, 0x45, 0x50, 0x08, 0xd1, 0x57, 0x9c, 0xb8, - 0x39, 0x8b, 0x35, 0xeb, 0x3f, 0x14, 0xc1, 0x9c, 0x48, 0x0a, 0x12, 0xf8, 0x1e, 0xc1, 0xcc, 0xd9, - 0x2d, 0x5e, 0x58, 0x04, 0xbd, 0xd2, 0x59, 0x3e, 0x62, 0xc9, 0x19, 0xf8, 0x11, 0x28, 0x5e, 0x41, - 0x14, 0x71, 0xaa, 0x67, 0xd7, 0x97, 0xd3, 0xce, 0xb2, 0xb5, 0xd8, 0x9c, 0xb9, 0xc2, 0xd8, 0x3c, - 0x8c, 0xf4, 0x85, 0x16, 0xa2, 0xe8, 0x6d, 0xdf, 0xb5, 0x29, 0x76, 0x03, 0x7a, 0x60, 0x71, 0x24, - 0x7c, 0x0f, 0x54, 0xaf, 0x86, 0xa1, 0x1f, 0x6e, 0x1f, 0x04, 0x98, 0x4b, 0x53, 0x35, 0x8f, 0x1d, - 0x46, 0xfa, 0x12, 0x8e, 0x07, 0x53, 0x88, 0xa1, 0x25, 0x7c, 0x13, 0x94, 0x78, 0x87, 0x8b, 0x51, - 0x35, 0x97, 0x0e, 0x23, 0xfd, 0x3f, 0x1c, 0x92, 0x32, 0x17, 0x16, 0x59, 0xed, 0x4a, 0x53, 0x69, - 0x97, 0x84, 0x50, 0x39, 0x1d, 0x42, 0x2a, 0xa8, 0xec, 0xe3, 0x90, 0xb0, 0x65, 0x2a, 0x7c, 0x3c, - 0xee, 0xc2, 0xcb, 0x00, 0x30, 0x62, 0x6c, 0x42, 0xed, 0x26, 0xcb, 0x12, 0x46, 0xc6, 0xbc, 0x21, - 0x8a, 0xa0, 0x85, 0x49, 0xcf, 0xa1, 0x26, 0x94, 0x2c, 0xa4, 0x0c, 0xad, 0x54, 0x1b, 0xde, 0x53, - 0x40, 0x65, 0x13, 0xa3, 0x16, 0x0e, 0x89, 0x5a, 0xad, 0x15, 0xd6, 0x66, 0xd7, 0xdf, 0x30, 0xd2, - 0x15, 0xef, 0x56, 0xe8, 0xbb, 0x98, 0x76, 0x70, 0x8f, 0xc4, 0x02, 0x09, 0x6b, 0xb3, 0xdb, 0x8f, - 0xf4, 0x9d, 0x69, 0xe2, 0x61, 0xaa, 0x2a, 0xfb, 0xcc, 0xe7, 0x1c, 0x46, 0xba, 0xf2, 0x8e, 0x15, - 0xbb, 0x58, 0xff, 0x55, 0x01, 0xff, 0x65, 0x0a, 0x6f, 0xb1, 0xb5, 0x49, 0x2a, 0x21, 0x5d, 0x44, - 0x9b, 0x1d, 0x55, 0x61, 0xe1, 0x6d, 0x89, 0x4e, 0xba, 0x04, 0xe6, 0xff, 0x56, 0x09, 0x2c, 0x1c, - 0xbd, 0x04, 0xc6, 0x59, 0x58, 0x9c, 0x98, 0x85, 0xa5, 0x67, 0x65, 0x61, 0xfd, 0x9b, 0x82, 0xa8, - 0x38, 0xf1, 0xfb, 0x1d, 0x21, 0x27, 0xae, 0x25, 0x39, 0x51, 0xe0, 0xde, 0x26, 0xa1, 0x26, 0xd6, - 0xba, 0xde, 0xc2, 0x1e, 0xb5, 0x77, 0x6d, 0x1c, 0xbe, 0x20, 0x33, 0x52, 0xe1, 0x56, 0xc8, 0x86, - 0x5b, 0x3a, 0x56, 0x8a, 0xaf, 0x7c, 0xac, 0x8c, 0x64, 0x47, 0xe9, 0x25, 0xb2, 0xa3, 0xfe, 0x34, - 0x0f, 0x56, 0x98, 0x1c, 0x37, 0xd0, 0x0e, 0x76, 0x3e, 0x45, 0xee, 0x11, 0x25, 0x39, 0x99, 0x92, - 0xa4, 0x6a, 0xc2, 0x7f, 0x29, 0x9f, 0x82, 0xf2, 0xef, 0x14, 0x30, 0x13, 0xd7, 0x70, 0x68, 0x00, - 0x20, 0x60, 0xbc, 0x4c, 0x0b, 0xa2, 0x17, 0x18, 0x38, 0x4c, 0x46, 0xad, 0x94, 0x05, 0xfc, 0x12, - 0x94, 0x45, 0x4f, 0x66, 0xc1, 0xb1, 0x54, 0x16, 0xd0, 0x10, 0x23, 0xf7, 0x72, 0x0b, 0x05, 0x14, - 0x87, 0xe6, 0x05, 0xe6, 0x45, 0x3f, 0xd2, 0x4f, 0x3d, 0x8f, 0x22, 0x7e, 0x6e, 0x14, 0x38, 0x26, - 0xae, 0x78, 0xa6, 0x25, 0x9f, 0x50, 0xff, 0x56, 0x01, 0x8b, 0xcc, 0x51, 0x46, 0x4d, 0x12, 0x15, - 0x57, 0xc0, 0x4c, 0x28, 0xdb, 0xdc, 0xdd, 0xd9, 0xf5, 0xba, 0x91, 0xa5, 0x75, 0x02, 0x95, 0x66, - 0xf1, 0x41, 0xa4, 0x2b, 0x56, 0x82, 0x84, 0x1b, 0x19, 0x1a, 0xf3, 0x93, 0x68, 0x64, 0x90, 0x5c, - 0x86, 0xb8, 0x9f, 0xf2, 0x00, 0x5e, 0xf7, 0x5a, 0xf8, 0x0e, 0x0b, 0xbe, 0x61, 0x9c, 0xf6, 0xc6, - 0x3c, 0x3a, 0x31, 0x24, 0x65, 0xdc, 0xde, 0xbc, 0xd4, 0x8f, 0xf4, 0x73, 0xcf, 0x63, 0xe5, 0x39, - 0xe0, 0xd4, 0x2b, 0xa4, 0x03, 0x37, 0xff, 0xea, 0xef, 0x2b, 0xf7, 0xf3, 0x60, 0xe1, 0x33, 0xdf, - 0xe9, 0xb9, 0x38, 0x21, 0xce, 0x1d, 0x23, 0x4e, 0x1d, 0x12, 0x97, 0xb5, 0x35, 0xcf, 0xf5, 0x23, - 0x7d, 0x63, 0x2a, 0xd2, 0xb2, 0xc0, 0xd7, 0x97, 0xb0, 0x7b, 0x79, 0xb0, 0xbc, 0xed, 0x07, 0x9f, - 0x6c, 0xf1, 0x4b, 0x59, 0xaa, 0x2e, 0xe2, 0x31, 0xda, 0x96, 0x87, 0xb4, 0x31, 0xc4, 0x4d, 0x44, - 0x43, 0xfb, 0x8e, 0xb9, 0xd1, 0x8f, 0xf4, 0xc6, 0x54, 0x94, 0x0d, 0x41, 0xaf, 0x2f, 0x5d, 0x3f, - 0xe7, 0xc1, 0xca, 0xed, 0x1e, 0xf2, 0xa8, 0xed, 0x60, 0x41, 0x59, 0x42, 0xd8, 0xc1, 0x18, 0x61, - 0xda, 0x90, 0xb0, 0x2c, 0x46, 0x52, 0xf7, 0x41, 0x3f, 0xd2, 0x2f, 0x4c, 0x45, 0xdd, 0x24, 0xf8, - 0xeb, 0x4b, 0xe2, 0x8f, 0x45, 0x30, 0xcf, 0x2f, 0x16, 0x09, 0x77, 0x6f, 0x01, 0xb9, 0xe5, 0x4a, - 0xe6, 0x60, 0x7c, 0x46, 0x0b, 0x83, 0xa6, 0xb1, 0x25, 0x37, 0x63, 0x61, 0x01, 0xcf, 0x83, 0x32, - 0xe1, 0x27, 0x21, 0x59, 0x50, 0xb5, 0xd1, 0x5b, 0x43, 0xf6, 0xcc, 0xb5, 0x99, 0xb3, 0xa4, 0x3d, - 0xbb, 0xc3, 0x39, 0xec, 0x00, 0x10, 0x9f, 0x04, 0xeb, 0xa3, 0xc8, 0xf1, 0xe3, 0x01, 0x43, 0x0b, - 0x0c, 0x3c, 0x0b, 0x4a, 0xbc, 0x72, 0xcb, 0x7b, 0x78, 0xe6, 0xb1, 0xe3, 0x25, 0x74, 0x33, 0x67, - 0x09, 0x73, 0xb8, 0x0e, 0x8a, 0x41, 0xe8, 0xbb, 0x72, 0x17, 0x3d, 0x31, 0xfa, 0xcc, 0xf4, 0xb6, - 0xb3, 0x99, 0xb3, 0xb8, 0x2d, 0x3c, 0xc3, 0x8e, 0xbc, 0x6c, 0xbf, 0x22, 0xfc, 0x0a, 0xc1, 0x4a, - 0xd6, 0x08, 0x2c, 0x05, 0x89, 0x4d, 0xe1, 0x19, 0x50, 0xde, 0xe7, 0x65, 0x49, 0x5e, 0xfe, 0x56, - 0xd3, 0xa0, 0x6c, 0xc1, 0x62, 0xef, 0x25, 0x6c, 0xe1, 0x35, 0x30, 0x47, 0xfd, 0xa0, 0x1b, 0x17, - 0x00, 0x79, 0xfd, 0xa8, 0xa5, 0xb1, 0x93, 0x0a, 0xc4, 0x66, 0xce, 0xca, 0xe0, 0xe0, 0x2d, 0xb0, - 0xb8, 0x97, 0x09, 0x53, 0x4c, 0xf8, 0xd7, 0x8c, 0x11, 0x9e, 0x27, 0x67, 0xcf, 0x66, 0xce, 0x1a, - 0x43, 0x9b, 0x60, 0x98, 0x51, 0xf5, 0x3f, 0x0a, 0x60, 0x4e, 0xc6, 0x8c, 0xb8, 0x2b, 0x9c, 0x4b, - 0xc2, 0x40, 0x84, 0xcc, 0xff, 0x9f, 0x15, 0x06, 0xdc, 0x3c, 0x15, 0x05, 0xef, 0x26, 0x51, 0x20, - 0xe2, 0x67, 0x65, 0x98, 0xa5, 0x5c, 0xff, 0x14, 0x42, 0x2a, 0xbf, 0x11, 0x2b, 0x2f, 0xc2, 0xe6, - 0xf8, 0xe4, 0x7d, 0x37, 0x46, 0x49, 0xd9, 0x2f, 0x82, 0x8a, 0x2d, 0x3e, 0x37, 0x4c, 0x0a, 0x98, - 0xf1, 0xaf, 0x11, 0x4c, 0x48, 0x09, 0x80, 0x1b, 0x43, 0xf9, 0x45, 0xd4, 0x1c, 0x1b, 0x97, 0x3f, - 0x01, 0xc5, 0xea, 0x9f, 0x4e, 0xd4, 0x2f, 0x4b, 0xcc, 0xd8, 0x66, 0x95, 0xbc, 0x98, 0x94, 0x7e, - 0x13, 0xcc, 0xb8, 0x98, 0x22, 0x76, 0x96, 0x55, 0x2b, 0xbc, 0x6e, 0x9c, 0xcc, 0x4a, 0x35, 0xe4, - 0xdb, 0xb8, 0x29, 0x0d, 0xaf, 0x7a, 0x34, 0x3c, 0x90, 0xc7, 0x96, 0x04, 0xbd, 0x7a, 0x09, 0xcc, - 0x67, 0x0c, 0xe0, 0x22, 0x28, 0x74, 0x71, 0xfc, 0x65, 0x85, 0x35, 0xd9, 0xe5, 0x6e, 0x1f, 0x39, - 0x3d, 0xcc, 0x69, 0xaf, 0x5a, 0xa2, 0x73, 0x31, 0x7f, 0x5e, 0x31, 0xab, 0xa0, 0x12, 0x8a, 0xa7, - 0x98, 0xad, 0x87, 0x8f, 0xb5, 0xdc, 0xa3, 0xc7, 0x5a, 0xee, 0xe9, 0x63, 0x4d, 0xf9, 0x7a, 0xa0, - 0x29, 0xdf, 0x0f, 0x34, 0xe5, 0xc1, 0x40, 0x53, 0x1e, 0x0e, 0x34, 0xe5, 0xf7, 0x81, 0xa6, 0xfc, - 0x39, 0xd0, 0x72, 0x4f, 0x07, 0x9a, 0x72, 0xf7, 0x89, 0x96, 0x7b, 0xf8, 0x44, 0xcb, 0x3d, 0x7a, - 0xa2, 0xe5, 0x3e, 0x37, 0x8e, 0x56, 0xc2, 0x76, 0xca, 0x9c, 0x96, 0x8d, 0xbf, 0x02, 0x00, 0x00, - 0xff, 0xff, 0xe6, 0x4a, 0x9a, 0x06, 0x55, 0x15, 0x00, 0x00, + // 1720 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x58, 0x4d, 0x6f, 0xdb, 0x46, + 0x1a, 0x16, 0xf5, 0x69, 0x8d, 0x3f, 0xd6, 0x3b, 0x36, 0x1c, 0xae, 0x93, 0x88, 0x82, 0x80, 0x4d, + 0xbc, 0x8b, 0x5d, 0x69, 0x23, 0x27, 0xde, 0xc4, 0x1b, 0x04, 0x09, 0xd7, 0x09, 0x64, 0x6c, 0xb2, + 0x48, 0x68, 0xa3, 0x87, 0x5e, 0x8a, 0xb1, 0x34, 0x96, 0x58, 0x53, 0x24, 0x4d, 0x8e, 0x9c, 0x18, + 0xe8, 0x21, 0x7f, 0xa0, 0x68, 0x80, 0xfe, 0x87, 0xa2, 0xa7, 0x16, 0x2d, 0x7a, 0xea, 0xa9, 0xbd, + 0x19, 0x05, 0x0a, 0xe4, 0x18, 0x08, 0xa8, 0xda, 0x38, 0x97, 0xc2, 0xa7, 0x00, 0xfd, 0x03, 0xc5, + 0x7c, 0x90, 0x1a, 0x8a, 0x52, 0x2d, 0xa5, 0xe8, 0xc1, 0x45, 0x2f, 0xd2, 0x7c, 0xbc, 0xcf, 0xcb, + 0xe1, 0xf3, 0x3e, 0xef, 0xf0, 0x9d, 0x01, 0x97, 0xdd, 0xbd, 0x66, 0x65, 0xbf, 0x83, 0x3d, 0x13, + 0x7b, 0xec, 0xff, 0xd0, 0x43, 0x76, 0x13, 0x4b, 0xcd, 0xb2, 0xeb, 0x39, 0xc4, 0x81, 0xa0, 0x3f, + 0xb2, 0x5c, 0x6d, 0x9a, 0xa4, 0xd5, 0xd9, 0x29, 0xd7, 0x9d, 0x76, 0xa5, 0xe9, 0x34, 0x9d, 0x4a, + 0xd3, 0x71, 0x9a, 0x16, 0x46, 0xae, 0xe9, 0x8b, 0x66, 0xc5, 0x73, 0xeb, 0x15, 0x9f, 0x20, 0xd2, + 0xf1, 0x39, 0x7e, 0x79, 0x91, 0x1a, 0xb2, 0x26, 0x83, 0x88, 0x51, 0x4d, 0x98, 0xb3, 0xde, 0x4e, + 0x67, 0xb7, 0x42, 0xcc, 0x36, 0xf6, 0x09, 0x6a, 0xbb, 0x81, 0x01, 0x5d, 0x9f, 0xe5, 0x34, 0x39, + 0xd2, 0xb4, 0x1b, 0xf8, 0x49, 0x13, 0x11, 0xfc, 0x18, 0x1d, 0x0a, 0x83, 0xf3, 0x11, 0x83, 0xa0, + 0x21, 0x26, 0xff, 0x12, 0x99, 0xf4, 0xf7, 0x30, 0xa9, 0xb7, 0xc4, 0x54, 0x51, 0x4c, 0xed, 0x5b, + 0x6d, 0xa7, 0x81, 0x2d, 0xb6, 0x58, 0x9f, 0xff, 0x0a, 0x8b, 0x05, 0x6a, 0xe1, 0x76, 0xfc, 0x16, + 0xfb, 0x11, 0x83, 0xff, 0x3d, 0x95, 0xaf, 0x1d, 0xe4, 0xe3, 0x4a, 0x03, 0xef, 0x9a, 0xb6, 0x49, + 0x4c, 0xc7, 0xf6, 0xe5, 0xb6, 0x70, 0xb2, 0x36, 0x9e, 0x93, 0xc1, 0x18, 0x94, 0x3e, 0x4b, 0x81, + 0xe9, 0xfb, 0xce, 0x9e, 0x69, 0xe0, 0xfd, 0x0e, 0xf6, 0x09, 0x5c, 0x04, 0x19, 0x66, 0xa3, 0x2a, + 0x45, 0x65, 0x25, 0x6f, 0xf0, 0x0e, 0x1d, 0xb5, 0xcc, 0xb6, 0x49, 0xd4, 0x64, 0x51, 0x59, 0x99, + 0x35, 0x78, 0x07, 0x42, 0x90, 0xf6, 0x09, 0x76, 0xd5, 0x54, 0x51, 0x59, 0x49, 0x19, 0xac, 0x0d, + 0x97, 0xc1, 0x94, 0x69, 0x13, 0xec, 0x1d, 0x20, 0x4b, 0xcd, 0xb3, 0xf1, 0xb0, 0x0f, 0x6f, 0x81, + 0x9c, 0x4f, 0x90, 0x47, 0xb6, 0x7d, 0x35, 0x5d, 0x54, 0x56, 0xa6, 0xab, 0xcb, 0x65, 0x1e, 0xab, + 0x72, 0x10, 0xab, 0xf2, 0x76, 0x10, 0x2b, 0x7d, 0xea, 0xa8, 0xa7, 0x25, 0x9e, 0x7d, 0xaf, 0x29, + 0x46, 0x00, 0x82, 0xeb, 0x20, 0x83, 0xed, 0xc6, 0xb6, 0xaf, 0x66, 0x26, 0x40, 0x73, 0x08, 0xbc, + 0x02, 0xf2, 0x0d, 0xd3, 0xc3, 0x75, 0xca, 0x99, 0x9a, 0x2d, 0x2a, 0x2b, 0x73, 0xd5, 0x85, 0x72, + 0x18, 0xda, 0x8d, 0x60, 0xca, 0xe8, 0x5b, 0xd1, 0xd7, 0x73, 0x11, 0x69, 0xa9, 0x39, 0xc6, 0x04, + 0x6b, 0xc3, 0x12, 0xc8, 0xfa, 0x2d, 0xe4, 0x35, 0x7c, 0x75, 0xaa, 0x98, 0x5a, 0xc9, 0xeb, 0xe0, + 0xa4, 0xa7, 0x89, 0x11, 0x43, 0xfc, 0xc3, 0x77, 0x40, 0xda, 0xb5, 0x90, 0xad, 0x02, 0xb6, 0xca, + 0xf9, 0xb2, 0xc4, 0xf9, 0x43, 0x0b, 0xd9, 0xfa, 0x8d, 0x6e, 0x4f, 0xbb, 0x26, 0xcb, 0xdd, 0x43, + 0xbb, 0xc8, 0x46, 0x15, 0xcb, 0xd9, 0x33, 0x2b, 0x07, 0xab, 0x15, 0x39, 0x92, 0xd4, 0x51, 0xf9, + 0x11, 0x75, 0x40, 0xa1, 0x06, 0x73, 0x5c, 0xfa, 0x26, 0x09, 0x20, 0x8d, 0xd9, 0xa6, 0xed, 0x13, + 0x64, 0x93, 0x37, 0x09, 0xdd, 0x4d, 0x90, 0xa5, 0x69, 0xb1, 0xed, 0xb3, 0xe0, 0x8d, 0xcb, 0xa5, + 0xc0, 0x44, 0xc9, 0x4c, 0x4f, 0x44, 0x66, 0x66, 0x28, 0x99, 0xd9, 0x53, 0xc9, 0xcc, 0xfd, 0x56, + 0x64, 0xaa, 0x20, 0x4d, 0x7b, 0x70, 0x1e, 0xa4, 0x3c, 0xf4, 0x98, 0x71, 0x37, 0x63, 0xd0, 0x66, + 0xe9, 0x93, 0x34, 0x98, 0xe1, 0xa9, 0xe1, 0xbb, 0x8e, 0xed, 0x63, 0xba, 0xde, 0x2d, 0xb6, 0xff, + 0x70, 0x86, 0xc5, 0x7a, 0xd9, 0x88, 0x21, 0x66, 0xe0, 0x6d, 0x90, 0xde, 0x40, 0x04, 0x31, 0xb6, + 0xa7, 0xab, 0x8b, 0xf2, 0x7a, 0xa9, 0x2f, 0x3a, 0xa7, 0x2f, 0x51, 0x42, 0x4f, 0x7a, 0xda, 0x5c, + 0x03, 0x11, 0xf4, 0x0f, 0xa7, 0x6d, 0x12, 0xdc, 0x76, 0xc9, 0xa1, 0xc1, 0x90, 0xf0, 0x1a, 0xc8, + 0xdf, 0xf5, 0x3c, 0xc7, 0xdb, 0x3e, 0x74, 0x31, 0x8b, 0x4e, 0x5e, 0x3f, 0x77, 0xd2, 0xd3, 0x16, + 0x70, 0x30, 0x28, 0x21, 0xfa, 0x96, 0xf0, 0x6f, 0x20, 0xc3, 0x3a, 0x2c, 0x1e, 0x79, 0x7d, 0xe1, + 0xa4, 0xa7, 0xfd, 0x89, 0x41, 0x24, 0x73, 0x6e, 0x11, 0x0d, 0x5f, 0x66, 0xac, 0xf0, 0x85, 0x2a, + 0xca, 0xca, 0x2a, 0x52, 0x41, 0xee, 0x00, 0x7b, 0x3e, 0x75, 0x93, 0x63, 0xe3, 0x41, 0x17, 0xde, + 0x01, 0x80, 0x12, 0x63, 0xfa, 0xc4, 0xac, 0xd3, 0x5c, 0xa1, 0x64, 0xcc, 0x96, 0xf9, 0x56, 0x68, + 0x60, 0xbf, 0x63, 0x11, 0x1d, 0x0a, 0x16, 0x24, 0x43, 0x43, 0x6a, 0xc3, 0x4f, 0x15, 0x90, 0xab, + 0x61, 0xd4, 0xc0, 0x9e, 0xaf, 0xe6, 0x8b, 0xa9, 0x95, 0xe9, 0xea, 0x5f, 0xcb, 0xf2, 0xbe, 0xf7, + 0xd0, 0x73, 0xda, 0x98, 0xb4, 0x70, 0xc7, 0x0f, 0x02, 0xc4, 0xad, 0x75, 0xbb, 0xdb, 0xd3, 0xf0, + 0x98, 0x92, 0x18, 0x6b, 0xbb, 0x1d, 0xf9, 0xa8, 0x93, 0x9e, 0xa6, 0xfc, 0xd3, 0x08, 0x56, 0x59, + 0xfa, 0x4e, 0x01, 0x7f, 0xa6, 0x41, 0xde, 0xa2, 0xbe, 0x7d, 0x29, 0x2d, 0xdb, 0x88, 0xd4, 0x5b, + 0xaa, 0x42, 0x45, 0x6e, 0xf0, 0x8e, 0xbc, 0x17, 0x26, 0x7f, 0xd5, 0x5e, 0x98, 0x9a, 0x7c, 0x2f, + 0x0c, 0x72, 0x31, 0x3d, 0x34, 0x17, 0x33, 0xa3, 0x72, 0xb1, 0xf4, 0x41, 0x8a, 0xef, 0x3b, 0xc1, + 0xfb, 0x4d, 0x90, 0x16, 0xf7, 0xc2, 0xb4, 0x48, 0xb1, 0xd5, 0x86, 0x6a, 0xe3, 0xbe, 0x36, 0x1b, + 0xd8, 0x26, 0xe6, 0xae, 0x89, 0xbd, 0x53, 0x92, 0x43, 0x52, 0x5c, 0x2a, 0xaa, 0x38, 0x59, 0x2e, + 0xe9, 0xb3, 0x20, 0x97, 0x81, 0x1c, 0xc9, 0xbc, 0x41, 0x8e, 0x94, 0x7e, 0x4a, 0x82, 0x25, 0x1a, + 0x91, 0xfb, 0x68, 0x07, 0x5b, 0xff, 0x47, 0xed, 0x09, 0xa3, 0x72, 0x49, 0x8a, 0x4a, 0x5e, 0x87, + 0x7f, 0xb0, 0x3e, 0x1e, 0xeb, 0x1f, 0x29, 0x60, 0x2a, 0xd8, 0xcc, 0x61, 0x19, 0x00, 0x0e, 0x63, + 0xfb, 0x35, 0xe7, 0x7a, 0x8e, 0x82, 0xbd, 0x70, 0xd4, 0x90, 0x2c, 0xe0, 0xbb, 0x20, 0xcb, 0x7b, + 0x22, 0x17, 0xce, 0x49, 0xb9, 0x40, 0x3c, 0x8c, 0xda, 0x77, 0x1a, 0xc8, 0x25, 0xd8, 0xd3, 0x6f, + 0xd0, 0x55, 0x74, 0x7b, 0xda, 0xe5, 0x51, 0x2c, 0x05, 0xb5, 0xa4, 0xc0, 0xd1, 0xf8, 0xf2, 0x67, + 0x1a, 0xe2, 0x09, 0xa5, 0xf7, 0x15, 0x30, 0x4f, 0x17, 0x4a, 0xa9, 0x09, 0x85, 0xb1, 0x01, 0xa6, + 0x3c, 0xd1, 0x66, 0xcb, 0x9d, 0xae, 0x96, 0xca, 0x51, 0x5a, 0x87, 0x50, 0xa9, 0xa7, 0x8f, 0x7a, + 0x9a, 0x62, 0x84, 0x48, 0xb8, 0x1a, 0xa1, 0x31, 0x39, 0x8c, 0x46, 0x0a, 0x49, 0x44, 0x88, 0xfb, + 0x2a, 0x09, 0xe0, 0x26, 0xad, 0xb7, 0xa9, 0xfe, 0xfa, 0x52, 0x7d, 0x12, 0x5b, 0xd1, 0x85, 0x3e, + 0x29, 0x71, 0x7b, 0xfd, 0x56, 0xb7, 0xa7, 0xad, 0x9f, 0xa2, 0x9d, 0x5f, 0xc0, 0x4b, 0x6f, 0x21, + 0xcb, 0x37, 0x79, 0x26, 0xbe, 0x31, 0x5f, 0x24, 0xc1, 0xdc, 0x5b, 0x8e, 0xd5, 0x69, 0xe3, 0x90, + 0x3e, 0x37, 0x46, 0x9f, 0xda, 0xa7, 0x2f, 0x6a, 0xab, 0xaf, 0x77, 0x7b, 0xda, 0xda, 0xb8, 0xd4, + 0x45, 0xb1, 0x67, 0x9a, 0xb6, 0xcf, 0x93, 0x60, 0x71, 0xdb, 0x71, 0xff, 0xb7, 0xc5, 0xce, 0x6b, + 0xd2, 0x36, 0xd9, 0x8a, 0x91, 0xb7, 0xd8, 0x27, 0x8f, 0x22, 0x1e, 0x20, 0xe2, 0x99, 0x4f, 0xf4, + 0xb5, 0x6e, 0x4f, 0xab, 0x8e, 0x4b, 0x5c, 0x1f, 0x77, 0xa6, 0x49, 0x3b, 0x4a, 0x82, 0xa5, 0x47, + 0x1d, 0x64, 0x13, 0xd3, 0xc2, 0x9c, 0xb8, 0x90, 0xb6, 0xf7, 0x62, 0xb4, 0x15, 0xfa, 0xb4, 0x45, + 0x31, 0x82, 0xc0, 0xdb, 0xdd, 0x9e, 0x76, 0x73, 0x5c, 0x02, 0x87, 0x79, 0x38, 0xd3, 0x54, 0x7e, + 0x99, 0x04, 0x73, 0x5b, 0xbc, 0x9a, 0x0a, 0x5e, 0xe2, 0x60, 0x08, 0x85, 0xf2, 0x65, 0x84, 0xbb, + 0x53, 0x8e, 0x22, 0x26, 0x4b, 0xde, 0x28, 0xf6, 0x4c, 0x93, 0xf7, 0x6d, 0x12, 0x2c, 0x6d, 0x60, + 0x82, 0xeb, 0x04, 0x37, 0xee, 0x99, 0xd8, 0x92, 0x48, 0x7c, 0xaa, 0xc4, 0x58, 0x2c, 0x4a, 0x47, + 0x99, 0xa1, 0x20, 0x5d, 0xef, 0xf6, 0xb4, 0x5b, 0xe3, 0xf2, 0x38, 0xdc, 0xc7, 0xef, 0x86, 0x4f, + 0x56, 0x39, 0x4e, 0xca, 0x67, 0x14, 0xf4, 0x66, 0x7c, 0x46, 0x7d, 0x9c, 0x69, 0x3e, 0x3f, 0xcc, + 0x82, 0x59, 0x76, 0xad, 0x10, 0xd2, 0xf8, 0x77, 0x20, 0x4a, 0x6d, 0xc1, 0x21, 0x0c, 0x8e, 0x67, + 0x9e, 0x5b, 0x2f, 0x6f, 0x89, 0x22, 0x9c, 0x5b, 0xc0, 0xeb, 0x20, 0xeb, 0xb3, 0x43, 0x90, 0xa8, + 0xa2, 0x0a, 0x83, 0x77, 0x06, 0xd1, 0xe3, 0x56, 0x2d, 0x61, 0x08, 0x7b, 0x78, 0x13, 0x64, 0x2d, + 0xc6, 0xa2, 0x38, 0x04, 0x96, 0x06, 0x91, 0xf1, 0x63, 0x01, 0x45, 0x73, 0x0c, 0x5c, 0x03, 0x19, + 0x56, 0xae, 0x89, 0xbb, 0xb8, 0xc8, 0x63, 0xe3, 0x45, 0x53, 0x2d, 0x61, 0x70, 0x73, 0x58, 0x05, + 0x69, 0xd7, 0x73, 0xda, 0xa2, 0x74, 0xbe, 0x30, 0xf8, 0x4c, 0xb9, 0xd6, 0xac, 0x25, 0x0c, 0x66, + 0x0b, 0xaf, 0xd2, 0xd3, 0x2e, 0x2d, 0x52, 0x7d, 0x76, 0x81, 0x40, 0x2b, 0x94, 0x01, 0x98, 0x04, + 0x09, 0x4c, 0xe1, 0x55, 0x90, 0x3d, 0x60, 0x25, 0x88, 0xb8, 0xfd, 0x59, 0x96, 0x41, 0xd1, 0xe2, + 0x84, 0xbe, 0x17, 0xb7, 0x85, 0xf7, 0xc0, 0x0c, 0x71, 0xdc, 0xbd, 0xe0, 0x4b, 0x2f, 0x2e, 0x1f, + 0x8a, 0x32, 0x76, 0x58, 0x25, 0x50, 0x4b, 0x18, 0x11, 0x1c, 0x7c, 0x08, 0xe6, 0xf7, 0x23, 0x9f, + 0x21, 0xec, 0xb3, 0x1b, 0xcd, 0x01, 0x9e, 0x87, 0x7f, 0x20, 0x6b, 0x09, 0x23, 0x86, 0x86, 0x1b, + 0x60, 0xce, 0x8f, 0xec, 0xca, 0xe2, 0x8a, 0x30, 0xf2, 0x5e, 0xd1, 0x7d, 0xbb, 0x96, 0x30, 0x06, + 0x30, 0xf0, 0x3e, 0x98, 0x6b, 0x44, 0xf6, 0x24, 0x75, 0x3a, 0xbe, 0xaa, 0xe1, 0xbb, 0x16, 0xf5, + 0x16, 0xc5, 0xca, 0xde, 0x78, 0x46, 0xaa, 0x33, 0xa3, 0xbd, 0x45, 0x73, 0x56, 0xf6, 0xc6, 0x67, + 0x74, 0xd0, 0xdf, 0x3d, 0x4a, 0x5f, 0x67, 0xc0, 0x8c, 0xc8, 0x0a, 0x7e, 0x11, 0xf2, 0xef, 0x50, + 0xe8, 0x3c, 0x29, 0x2e, 0x8e, 0x12, 0x3a, 0x33, 0x97, 0x74, 0xfe, 0xaf, 0x50, 0xe7, 0x3c, 0x43, + 0x96, 0xfa, 0x3b, 0x12, 0x7b, 0xae, 0x84, 0x10, 0xda, 0x5e, 0x0d, 0xb4, 0xcd, 0x13, 0xe3, 0xfc, + 0xf0, 0xe3, 0x44, 0x80, 0x12, 0xc2, 0x5e, 0x07, 0x39, 0x93, 0xdf, 0xa8, 0x0e, 0x4b, 0x89, 0xf8, + 0x85, 0x2b, 0x95, 0xaa, 0x00, 0xc0, 0xd5, 0xbe, 0xc0, 0x79, 0x5e, 0x9c, 0x8b, 0x0b, 0x3c, 0x04, + 0x05, 0xfa, 0xbe, 0x12, 0xea, 0x3b, 0x2b, 0x30, 0xb1, 0xd2, 0x3b, 0x7c, 0x31, 0x21, 0xee, 0xbb, + 0x60, 0x36, 0x90, 0x03, 0x9b, 0x12, 0xea, 0xbe, 0x38, 0xaa, 0x72, 0x08, 0xf0, 0x51, 0x14, 0xdc, + 0x8c, 0x69, 0x88, 0x2b, 0x5b, 0x1b, 0xfd, 0xed, 0x0c, 0x3c, 0x0d, 0x0a, 0x68, 0x33, 0x26, 0x20, + 0x30, 0xca, 0x55, 0x20, 0x9f, 0x98, 0x2b, 0x3e, 0x01, 0x6b, 0x60, 0xaa, 0x8d, 0x09, 0x6a, 0x20, + 0x82, 0xd4, 0x1c, 0xdb, 0xf9, 0x2f, 0x45, 0x33, 0xad, 0x2f, 0xa6, 0xf2, 0x03, 0x61, 0x78, 0xd7, + 0x26, 0xde, 0xa1, 0x38, 0x6a, 0x86, 0xe8, 0xe5, 0xff, 0x80, 0xd9, 0x88, 0x01, 0x9c, 0x07, 0xa9, + 0x3d, 0x1c, 0xdc, 0x8c, 0xd3, 0x26, 0x5c, 0x04, 0x99, 0x03, 0x64, 0x75, 0x30, 0xd3, 0x54, 0xde, + 0xe0, 0x9d, 0xf5, 0xe4, 0x75, 0x45, 0xcf, 0x83, 0x9c, 0xc7, 0x9f, 0xa2, 0x37, 0x9f, 0xbf, 0x2c, + 0x24, 0x5e, 0xbc, 0x2c, 0x24, 0x5e, 0xbf, 0x2c, 0x28, 0x4f, 0x8f, 0x0b, 0xca, 0xc7, 0xc7, 0x05, + 0xe5, 0xe8, 0xb8, 0xa0, 0x3c, 0x3f, 0x2e, 0x28, 0x3f, 0x1c, 0x17, 0x94, 0x1f, 0x8f, 0x0b, 0x89, + 0xd7, 0xc7, 0x05, 0xe5, 0xd9, 0xab, 0x42, 0xe2, 0xf9, 0xab, 0x42, 0xe2, 0xc5, 0xab, 0x42, 0xe2, + 0xed, 0x2b, 0x13, 0x7f, 0x84, 0x76, 0xb2, 0x8c, 0xa9, 0xd5, 0x9f, 0x03, 0x00, 0x00, 0xff, 0xff, + 0x84, 0x24, 0xe9, 0xad, 0x3c, 0x1b, 0x00, 0x00, } func (this *LokiRequest) Equal(that interface{}) bool { @@ -1798,6 +2003,114 @@ func (this *QuantileSketchResponse) Equal(that interface{}) bool { } return true } +func (this *ShardsResponse) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*ShardsResponse) + if !ok { + that2, ok := that.(ShardsResponse) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if that1.Response == nil { + if this.Response != nil { + return false + } + } else if !this.Response.Equal(*that1.Response) { + return false + } + if len(this.Headers) != len(that1.Headers) { + return false + } + for i := range this.Headers { + if !this.Headers[i].Equal(that1.Headers[i]) { + return false + } + } + return true +} +func (this *DetectedFieldsResponse) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*DetectedFieldsResponse) + if !ok { + that2, ok := that.(DetectedFieldsResponse) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if that1.Response == nil { + if this.Response != nil { + return false + } + } else if !this.Response.Equal(*that1.Response) { + return false + } + if len(this.Headers) != len(that1.Headers) { + return false + } + for i := range this.Headers { + if !this.Headers[i].Equal(that1.Headers[i]) { + return false + } + } + return true +} +func (this *DetectedLabelsResponse) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*DetectedLabelsResponse) + if !ok { + that2, ok := that.(DetectedLabelsResponse) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if that1.Response == nil { + if this.Response != nil { + return false + } + } else if !this.Response.Equal(*that1.Response) { + return false + } + if len(this.Headers) != len(that1.Headers) { + return false + } + for i := range this.Headers { + if !this.Headers[i].Equal(that1.Headers[i]) { + return false + } + } + return true +} func (this *QueryResponse) Equal(that interface{}) bool { if that == nil { return this == nil @@ -2023,6 +2336,78 @@ func (this *QueryResponse_QuantileSketches) Equal(that interface{}) bool { } return true } +func (this *QueryResponse_ShardsResponse) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*QueryResponse_ShardsResponse) + if !ok { + that2, ok := that.(QueryResponse_ShardsResponse) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if !this.ShardsResponse.Equal(that1.ShardsResponse) { + return false + } + return true +} +func (this *QueryResponse_DetectedFields) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*QueryResponse_DetectedFields) + if !ok { + that2, ok := that.(QueryResponse_DetectedFields) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if !this.DetectedFields.Equal(that1.DetectedFields) { + return false + } + return true +} +func (this *QueryResponse_DetectedLabels) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*QueryResponse_DetectedLabels) + if !ok { + that2, ok := that.(QueryResponse_DetectedLabels) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if !this.DetectedLabels.Equal(that1.DetectedLabels) { + return false + } + return true +} func (this *QueryRequest) Equal(that interface{}) bool { if that == nil { return this == nil @@ -2205,6 +2590,78 @@ func (this *QueryRequest_Volume) Equal(that interface{}) bool { } return true } +func (this *QueryRequest_ShardsRequest) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*QueryRequest_ShardsRequest) + if !ok { + that2, ok := that.(QueryRequest_ShardsRequest) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if !this.ShardsRequest.Equal(that1.ShardsRequest) { + return false + } + return true +} +func (this *QueryRequest_DetectedFields) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*QueryRequest_DetectedFields) + if !ok { + that2, ok := that.(QueryRequest_DetectedFields) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if !this.DetectedFields.Equal(that1.DetectedFields) { + return false + } + return true +} +func (this *QueryRequest_DetectedLabels) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*QueryRequest_DetectedLabels) + if !ok { + that2, ok := that.(QueryRequest_DetectedLabels) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if !this.DetectedLabels.Equal(that1.DetectedLabels) { + return false + } + return true +} func (this *LokiRequest) GoString() string { if this == nil { return "nil" @@ -2384,11 +2841,44 @@ func (this *QuantileSketchResponse) GoString() string { s = append(s, "}") return strings.Join(s, "") } +func (this *ShardsResponse) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 6) + s = append(s, "&queryrange.ShardsResponse{") + s = append(s, "Response: "+fmt.Sprintf("%#v", this.Response)+",\n") + s = append(s, "Headers: "+fmt.Sprintf("%#v", this.Headers)+",\n") + s = append(s, "}") + return strings.Join(s, "") +} +func (this *DetectedFieldsResponse) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 6) + s = append(s, "&queryrange.DetectedFieldsResponse{") + s = append(s, "Response: "+fmt.Sprintf("%#v", this.Response)+",\n") + s = append(s, "Headers: "+fmt.Sprintf("%#v", this.Headers)+",\n") + s = append(s, "}") + return strings.Join(s, "") +} +func (this *DetectedLabelsResponse) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 6) + s = append(s, "&queryrange.DetectedLabelsResponse{") + s = append(s, "Response: "+fmt.Sprintf("%#v", this.Response)+",\n") + s = append(s, "Headers: "+fmt.Sprintf("%#v", this.Headers)+",\n") + s = append(s, "}") + return strings.Join(s, "") +} func (this *QueryResponse) GoString() string { if this == nil { return "nil" } - s := make([]string, 0, 13) + s := make([]string, 0, 16) s = append(s, "&queryrange.QueryResponse{") if this.Status != nil { s = append(s, "Status: "+fmt.Sprintf("%#v", this.Status)+",\n") @@ -2463,11 +2953,35 @@ func (this *QueryResponse_QuantileSketches) GoString() string { `QuantileSketches:` + fmt.Sprintf("%#v", this.QuantileSketches) + `}`}, ", ") return s } +func (this *QueryResponse_ShardsResponse) GoString() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&queryrange.QueryResponse_ShardsResponse{` + + `ShardsResponse:` + fmt.Sprintf("%#v", this.ShardsResponse) + `}`}, ", ") + return s +} +func (this *QueryResponse_DetectedFields) GoString() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&queryrange.QueryResponse_DetectedFields{` + + `DetectedFields:` + fmt.Sprintf("%#v", this.DetectedFields) + `}`}, ", ") + return s +} +func (this *QueryResponse_DetectedLabels) GoString() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&queryrange.QueryResponse_DetectedLabels{` + + `DetectedLabels:` + fmt.Sprintf("%#v", this.DetectedLabels) + `}`}, ", ") + return s +} func (this *QueryRequest) GoString() string { if this == nil { return "nil" } - s := make([]string, 0, 11) + s := make([]string, 0, 14) s = append(s, "&queryrange.QueryRequest{") if this.Request != nil { s = append(s, "Request: "+fmt.Sprintf("%#v", this.Request)+",\n") @@ -2536,6 +3050,30 @@ func (this *QueryRequest_Volume) GoString() string { `Volume:` + fmt.Sprintf("%#v", this.Volume) + `}`}, ", ") return s } +func (this *QueryRequest_ShardsRequest) GoString() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&queryrange.QueryRequest_ShardsRequest{` + + `ShardsRequest:` + fmt.Sprintf("%#v", this.ShardsRequest) + `}`}, ", ") + return s +} +func (this *QueryRequest_DetectedFields) GoString() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&queryrange.QueryRequest_DetectedFields{` + + `DetectedFields:` + fmt.Sprintf("%#v", this.DetectedFields) + `}`}, ", ") + return s +} +func (this *QueryRequest_DetectedLabels) GoString() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&queryrange.QueryRequest_DetectedLabels{` + + `DetectedLabels:` + fmt.Sprintf("%#v", this.DetectedLabels) + `}`}, ", ") + return s +} func valueToGoStringQueryrange(v interface{}, typ string) string { rv := reflect.ValueOf(v) if rv.IsNil() { @@ -3327,7 +3865,7 @@ func (m *QuantileSketchResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) return len(dAtA) - i, nil } -func (m *QueryResponse) Marshal() (dAtA []byte, err error) { +func (m *ShardsResponse) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) @@ -3337,16 +3875,30 @@ func (m *QueryResponse) Marshal() (dAtA []byte, err error) { return dAtA[:n], nil } -func (m *QueryResponse) MarshalTo(dAtA []byte) (int, error) { +func (m *ShardsResponse) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } -func (m *QueryResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { +func (m *ShardsResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l + if len(m.Headers) > 0 { + for iNdEx := len(m.Headers) - 1; iNdEx >= 0; iNdEx-- { + { + size := m.Headers[iNdEx].Size() + i -= size + if _, err := m.Headers[iNdEx].MarshalTo(dAtA[i:]); err != nil { + return 0, err + } + i = encodeVarintQueryrange(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + } + } if m.Response != nil { { size := m.Response.Size() @@ -3354,15 +3906,6 @@ func (m *QueryResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { if _, err := m.Response.MarshalTo(dAtA[i:]); err != nil { return 0, err } - } - } - if m.Status != nil { - { - size, err := m.Status.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size i = encodeVarintQueryrange(dAtA, i, uint64(size)) } i-- @@ -3371,72 +3914,214 @@ func (m *QueryResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } -func (m *QueryResponse_Series) MarshalTo(dAtA []byte) (int, error) { - return m.MarshalToSizedBuffer(dAtA[:m.Size()]) +func (m *DetectedFieldsResponse) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil } -func (m *QueryResponse_Series) MarshalToSizedBuffer(dAtA []byte) (int, error) { +func (m *DetectedFieldsResponse) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *DetectedFieldsResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) - if m.Series != nil { - { - size, err := m.Series.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err + _ = i + var l int + _ = l + if len(m.Headers) > 0 { + for iNdEx := len(m.Headers) - 1; iNdEx >= 0; iNdEx-- { + { + size := m.Headers[iNdEx].Size() + i -= size + if _, err := m.Headers[iNdEx].MarshalTo(dAtA[i:]); err != nil { + return 0, err + } + i = encodeVarintQueryrange(dAtA, i, uint64(size)) } - i -= size - i = encodeVarintQueryrange(dAtA, i, uint64(size)) + i-- + dAtA[i] = 0x12 } - i-- - dAtA[i] = 0x12 } - return len(dAtA) - i, nil -} -func (m *QueryResponse_Labels) MarshalTo(dAtA []byte) (int, error) { - return m.MarshalToSizedBuffer(dAtA[:m.Size()]) -} - -func (m *QueryResponse_Labels) MarshalToSizedBuffer(dAtA []byte) (int, error) { - i := len(dAtA) - if m.Labels != nil { + if m.Response != nil { { - size, err := m.Labels.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { + size := m.Response.Size() + i -= size + if _, err := m.Response.MarshalTo(dAtA[i:]); err != nil { return 0, err } - i -= size i = encodeVarintQueryrange(dAtA, i, uint64(size)) } i-- - dAtA[i] = 0x1a + dAtA[i] = 0xa } return len(dAtA) - i, nil } -func (m *QueryResponse_Stats) MarshalTo(dAtA []byte) (int, error) { - return m.MarshalToSizedBuffer(dAtA[:m.Size()]) + +func (m *DetectedLabelsResponse) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil } -func (m *QueryResponse_Stats) MarshalToSizedBuffer(dAtA []byte) (int, error) { +func (m *DetectedLabelsResponse) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *DetectedLabelsResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) - if m.Stats != nil { + _ = i + var l int + _ = l + if len(m.Headers) > 0 { + for iNdEx := len(m.Headers) - 1; iNdEx >= 0; iNdEx-- { + { + size := m.Headers[iNdEx].Size() + i -= size + if _, err := m.Headers[iNdEx].MarshalTo(dAtA[i:]); err != nil { + return 0, err + } + i = encodeVarintQueryrange(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + } + } + if m.Response != nil { { - size, err := m.Stats.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { + size := m.Response.Size() + i -= size + if _, err := m.Response.MarshalTo(dAtA[i:]); err != nil { return 0, err } - i -= size i = encodeVarintQueryrange(dAtA, i, uint64(size)) } i-- - dAtA[i] = 0x22 + dAtA[i] = 0xa } return len(dAtA) - i, nil } -func (m *QueryResponse_Prom) MarshalTo(dAtA []byte) (int, error) { - return m.MarshalToSizedBuffer(dAtA[:m.Size()]) + +func (m *QueryResponse) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil } -func (m *QueryResponse_Prom) MarshalToSizedBuffer(dAtA []byte) (int, error) { - i := len(dAtA) +func (m *QueryResponse) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *QueryResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.Response != nil { + { + size := m.Response.Size() + i -= size + if _, err := m.Response.MarshalTo(dAtA[i:]); err != nil { + return 0, err + } + } + } + if m.Status != nil { + { + size, err := m.Status.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintQueryrange(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + +func (m *QueryResponse_Series) MarshalTo(dAtA []byte) (int, error) { + return m.MarshalToSizedBuffer(dAtA[:m.Size()]) +} + +func (m *QueryResponse_Series) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + if m.Series != nil { + { + size, err := m.Series.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintQueryrange(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + } + return len(dAtA) - i, nil +} +func (m *QueryResponse_Labels) MarshalTo(dAtA []byte) (int, error) { + return m.MarshalToSizedBuffer(dAtA[:m.Size()]) +} + +func (m *QueryResponse_Labels) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + if m.Labels != nil { + { + size, err := m.Labels.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintQueryrange(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x1a + } + return len(dAtA) - i, nil +} +func (m *QueryResponse_Stats) MarshalTo(dAtA []byte) (int, error) { + return m.MarshalToSizedBuffer(dAtA[:m.Size()]) +} + +func (m *QueryResponse_Stats) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + if m.Stats != nil { + { + size, err := m.Stats.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintQueryrange(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x22 + } + return len(dAtA) - i, nil +} +func (m *QueryResponse_Prom) MarshalTo(dAtA []byte) (int, error) { + return m.MarshalToSizedBuffer(dAtA[:m.Size()]) +} + +func (m *QueryResponse_Prom) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) if m.Prom != nil { { size, err := m.Prom.MarshalToSizedBuffer(dAtA[:i]) @@ -3531,6 +4216,66 @@ func (m *QueryResponse_QuantileSketches) MarshalToSizedBuffer(dAtA []byte) (int, } return len(dAtA) - i, nil } +func (m *QueryResponse_ShardsResponse) MarshalTo(dAtA []byte) (int, error) { + return m.MarshalToSizedBuffer(dAtA[:m.Size()]) +} + +func (m *QueryResponse_ShardsResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + if m.ShardsResponse != nil { + { + size, err := m.ShardsResponse.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintQueryrange(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x52 + } + return len(dAtA) - i, nil +} +func (m *QueryResponse_DetectedFields) MarshalTo(dAtA []byte) (int, error) { + return m.MarshalToSizedBuffer(dAtA[:m.Size()]) +} + +func (m *QueryResponse_DetectedFields) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + if m.DetectedFields != nil { + { + size, err := m.DetectedFields.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintQueryrange(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x5a + } + return len(dAtA) - i, nil +} +func (m *QueryResponse_DetectedLabels) MarshalTo(dAtA []byte) (int, error) { + return m.MarshalToSizedBuffer(dAtA[:m.Size()]) +} + +func (m *QueryResponse_DetectedLabels) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + if m.DetectedLabels != nil { + { + size, err := m.DetectedLabels.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintQueryrange(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x62 + } + return len(dAtA) - i, nil +} func (m *QueryRequest) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) @@ -3551,6 +4296,15 @@ func (m *QueryRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + if m.Request != nil { + { + size := m.Request.Size() + i -= size + if _, err := m.Request.MarshalTo(dAtA[i:]); err != nil { + return 0, err + } + } + } if len(m.Metadata) > 0 { for k := range m.Metadata { v := m.Metadata[k] @@ -3570,15 +4324,6 @@ func (m *QueryRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { dAtA[i] = 0x3a } } - if m.Request != nil { - { - size := m.Request.Size() - i -= size - if _, err := m.Request.MarshalTo(dAtA[i:]); err != nil { - return 0, err - } - } - } return len(dAtA) - i, nil } @@ -3702,6 +4447,66 @@ func (m *QueryRequest_Volume) MarshalToSizedBuffer(dAtA []byte) (int, error) { } return len(dAtA) - i, nil } +func (m *QueryRequest_ShardsRequest) MarshalTo(dAtA []byte) (int, error) { + return m.MarshalToSizedBuffer(dAtA[:m.Size()]) +} + +func (m *QueryRequest_ShardsRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + if m.ShardsRequest != nil { + { + size, err := m.ShardsRequest.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintQueryrange(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x42 + } + return len(dAtA) - i, nil +} +func (m *QueryRequest_DetectedFields) MarshalTo(dAtA []byte) (int, error) { + return m.MarshalToSizedBuffer(dAtA[:m.Size()]) +} + +func (m *QueryRequest_DetectedFields) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + if m.DetectedFields != nil { + { + size, err := m.DetectedFields.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintQueryrange(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x4a + } + return len(dAtA) - i, nil +} +func (m *QueryRequest_DetectedLabels) MarshalTo(dAtA []byte) (int, error) { + return m.MarshalToSizedBuffer(dAtA[:m.Size()]) +} + +func (m *QueryRequest_DetectedLabels) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + if m.DetectedLabels != nil { + { + size, err := m.DetectedLabels.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintQueryrange(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x52 + } + return len(dAtA) - i, nil +} func encodeVarintQueryrange(dAtA []byte, offset int, v uint64) int { offset -= sovQueryrange(v) base := offset @@ -4043,58 +4848,115 @@ func (m *QuantileSketchResponse) Size() (n int) { return n } -func (m *QueryResponse) Size() (n int) { +func (m *ShardsResponse) Size() (n int) { if m == nil { return 0 } var l int _ = l - if m.Status != nil { - l = m.Status.Size() + if m.Response != nil { + l = m.Response.Size() n += 1 + l + sovQueryrange(uint64(l)) } - if m.Response != nil { - n += m.Response.Size() + if len(m.Headers) > 0 { + for _, e := range m.Headers { + l = e.Size() + n += 1 + l + sovQueryrange(uint64(l)) + } } return n } -func (m *QueryResponse_Series) Size() (n int) { +func (m *DetectedFieldsResponse) Size() (n int) { if m == nil { return 0 } var l int _ = l - if m.Series != nil { - l = m.Series.Size() + if m.Response != nil { + l = m.Response.Size() n += 1 + l + sovQueryrange(uint64(l)) } + if len(m.Headers) > 0 { + for _, e := range m.Headers { + l = e.Size() + n += 1 + l + sovQueryrange(uint64(l)) + } + } return n } -func (m *QueryResponse_Labels) Size() (n int) { + +func (m *DetectedLabelsResponse) Size() (n int) { if m == nil { return 0 } var l int _ = l - if m.Labels != nil { - l = m.Labels.Size() + if m.Response != nil { + l = m.Response.Size() n += 1 + l + sovQueryrange(uint64(l)) } + if len(m.Headers) > 0 { + for _, e := range m.Headers { + l = e.Size() + n += 1 + l + sovQueryrange(uint64(l)) + } + } return n } -func (m *QueryResponse_Stats) Size() (n int) { + +func (m *QueryResponse) Size() (n int) { if m == nil { return 0 } var l int _ = l - if m.Stats != nil { - l = m.Stats.Size() + if m.Status != nil { + l = m.Status.Size() n += 1 + l + sovQueryrange(uint64(l)) } - return n -} + if m.Response != nil { + n += m.Response.Size() + } + return n +} + +func (m *QueryResponse_Series) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Series != nil { + l = m.Series.Size() + n += 1 + l + sovQueryrange(uint64(l)) + } + return n +} +func (m *QueryResponse_Labels) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Labels != nil { + l = m.Labels.Size() + n += 1 + l + sovQueryrange(uint64(l)) + } + return n +} +func (m *QueryResponse_Stats) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Stats != nil { + l = m.Stats.Size() + n += 1 + l + sovQueryrange(uint64(l)) + } + return n +} func (m *QueryResponse_Prom) Size() (n int) { if m == nil { return 0 @@ -4155,6 +5017,42 @@ func (m *QueryResponse_QuantileSketches) Size() (n int) { } return n } +func (m *QueryResponse_ShardsResponse) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.ShardsResponse != nil { + l = m.ShardsResponse.Size() + n += 1 + l + sovQueryrange(uint64(l)) + } + return n +} +func (m *QueryResponse_DetectedFields) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.DetectedFields != nil { + l = m.DetectedFields.Size() + n += 1 + l + sovQueryrange(uint64(l)) + } + return n +} +func (m *QueryResponse_DetectedLabels) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.DetectedLabels != nil { + l = m.DetectedLabels.Size() + n += 1 + l + sovQueryrange(uint64(l)) + } + return n +} func (m *QueryRequest) Size() (n int) { if m == nil { return 0 @@ -4247,6 +5145,42 @@ func (m *QueryRequest_Volume) Size() (n int) { } return n } +func (m *QueryRequest_ShardsRequest) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.ShardsRequest != nil { + l = m.ShardsRequest.Size() + n += 1 + l + sovQueryrange(uint64(l)) + } + return n +} +func (m *QueryRequest_DetectedFields) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.DetectedFields != nil { + l = m.DetectedFields.Size() + n += 1 + l + sovQueryrange(uint64(l)) + } + return n +} +func (m *QueryRequest_DetectedLabels) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.DetectedLabels != nil { + l = m.DetectedLabels.Size() + n += 1 + l + sovQueryrange(uint64(l)) + } + return n +} func sovQueryrange(x uint64) (n int) { return (math_bits.Len64(x|1) + 6) / 7 @@ -4430,6 +5364,39 @@ func (this *QuantileSketchResponse) String() string { }, "") return s } +func (this *ShardsResponse) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&ShardsResponse{`, + `Response:` + fmt.Sprintf("%v", this.Response) + `,`, + `Headers:` + fmt.Sprintf("%v", this.Headers) + `,`, + `}`, + }, "") + return s +} +func (this *DetectedFieldsResponse) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&DetectedFieldsResponse{`, + `Response:` + fmt.Sprintf("%v", this.Response) + `,`, + `Headers:` + fmt.Sprintf("%v", this.Headers) + `,`, + `}`, + }, "") + return s +} +func (this *DetectedLabelsResponse) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&DetectedLabelsResponse{`, + `Response:` + fmt.Sprintf("%v", this.Response) + `,`, + `Headers:` + fmt.Sprintf("%v", this.Headers) + `,`, + `}`, + }, "") + return s +} func (this *QueryResponse) String() string { if this == nil { return "nil" @@ -4521,6 +5488,36 @@ func (this *QueryResponse_QuantileSketches) String() string { }, "") return s } +func (this *QueryResponse_ShardsResponse) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&QueryResponse_ShardsResponse{`, + `ShardsResponse:` + strings.Replace(fmt.Sprintf("%v", this.ShardsResponse), "ShardsResponse", "ShardsResponse", 1) + `,`, + `}`, + }, "") + return s +} +func (this *QueryResponse_DetectedFields) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&QueryResponse_DetectedFields{`, + `DetectedFields:` + strings.Replace(fmt.Sprintf("%v", this.DetectedFields), "DetectedFieldsResponse", "DetectedFieldsResponse", 1) + `,`, + `}`, + }, "") + return s +} +func (this *QueryResponse_DetectedLabels) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&QueryResponse_DetectedLabels{`, + `DetectedLabels:` + strings.Replace(fmt.Sprintf("%v", this.DetectedLabels), "DetectedLabelsResponse", "DetectedLabelsResponse", 1) + `,`, + `}`, + }, "") + return s +} func (this *QueryRequest) String() string { if this == nil { return "nil" @@ -4602,6 +5599,36 @@ func (this *QueryRequest_Volume) String() string { }, "") return s } +func (this *QueryRequest_ShardsRequest) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&QueryRequest_ShardsRequest{`, + `ShardsRequest:` + strings.Replace(fmt.Sprintf("%v", this.ShardsRequest), "ShardsRequest", "logproto.ShardsRequest", 1) + `,`, + `}`, + }, "") + return s +} +func (this *QueryRequest_DetectedFields) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&QueryRequest_DetectedFields{`, + `DetectedFields:` + strings.Replace(fmt.Sprintf("%v", this.DetectedFields), "DetectedFieldsRequest", "logproto.DetectedFieldsRequest", 1) + `,`, + `}`, + }, "") + return s +} +func (this *QueryRequest_DetectedLabels) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&QueryRequest_DetectedLabels{`, + `DetectedLabels:` + strings.Replace(fmt.Sprintf("%v", this.DetectedLabels), "DetectedLabelsRequest", "logproto.DetectedLabelsRequest", 1) + `,`, + `}`, + }, "") + return s +} func valueToStringQueryrange(v interface{}) string { rv := reflect.ValueOf(v) if rv.IsNil() { @@ -4907,7 +5934,7 @@ func (m *LokiRequest) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Plan == nil { - m.Plan = &github_com_grafana_loki_pkg_querier_plan.QueryPlan{} + m.Plan = &github_com_grafana_loki_v3_pkg_querier_plan.QueryPlan{} } if err := m.Plan.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -5163,7 +6190,7 @@ func (m *LokiInstantRequest) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Plan == nil { - m.Plan = &github_com_grafana_loki_pkg_querier_plan.QueryPlan{} + m.Plan = &github_com_grafana_loki_v3_pkg_querier_plan.QueryPlan{} } if err := m.Plan.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -5557,7 +6584,7 @@ func (m *LokiResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Headers = append(m.Headers, github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) + m.Headers = append(m.Headers, github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) if err := m.Headers[len(m.Headers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -5944,7 +6971,7 @@ func (m *LokiSeriesResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Headers = append(m.Headers, github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) + m.Headers = append(m.Headers, github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) if err := m.Headers[len(m.Headers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -6120,7 +7147,368 @@ func (m *LokiLabelNamesResponse) Unmarshal(dAtA []byte) error { } case 4: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Headers", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field Headers", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthQueryrange + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQueryrange + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Headers = append(m.Headers, github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) + if err := m.Headers[len(m.Headers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 5: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Statistics", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthQueryrange + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQueryrange + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := m.Statistics.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipQueryrange(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthQueryrange + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthQueryrange + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *LokiData) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: LokiData: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: LokiData: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ResultType", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthQueryrange + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthQueryrange + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.ResultType = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Result", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthQueryrange + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQueryrange + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Result = append(m.Result, github_com_grafana_loki_pkg_push.Stream{}) + if err := m.Result[len(m.Result)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipQueryrange(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthQueryrange + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthQueryrange + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *LokiPromResponse) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: LokiPromResponse: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: LokiPromResponse: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Response", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthQueryrange + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQueryrange + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Response == nil { + m.Response = &queryrangebase.PrometheusResponse{} + } + if err := m.Response.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Statistics", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthQueryrange + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQueryrange + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := m.Statistics.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipQueryrange(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthQueryrange + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthQueryrange + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *IndexStatsResponse) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: IndexStatsResponse: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: IndexStatsResponse: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Response", wireType) } var msglen int for shift := uint(0); ; shift += 7 { @@ -6147,14 +7535,16 @@ func (m *LokiLabelNamesResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Headers = append(m.Headers, github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) - if err := m.Headers[len(m.Headers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + if m.Response == nil { + m.Response = &github_com_grafana_loki_v3_pkg_logproto.IndexStatsResponse{} + } + if err := m.Response.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex - case 5: + case 2: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Statistics", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field Headers", wireType) } var msglen int for shift := uint(0); ; shift += 7 { @@ -6181,7 +7571,8 @@ func (m *LokiLabelNamesResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if err := m.Statistics.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + m.Headers = append(m.Headers, github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) + if err := m.Headers[len(m.Headers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex @@ -6209,7 +7600,7 @@ func (m *LokiLabelNamesResponse) Unmarshal(dAtA []byte) error { } return nil } -func (m *LokiData) Unmarshal(dAtA []byte) error { +func (m *VolumeResponse) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -6232,17 +7623,17 @@ func (m *LokiData) Unmarshal(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: LokiData: wiretype end group for non-group") + return fmt.Errorf("proto: VolumeResponse: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: LokiData: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: VolumeResponse: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field ResultType", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field Response", wireType) } - var stringLen uint64 + var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowQueryrange @@ -6252,27 +7643,31 @@ func (m *LokiData) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - stringLen |= uint64(b&0x7F) << shift + msglen |= int(b&0x7F) << shift if b < 0x80 { break } } - intStringLen := int(stringLen) - if intStringLen < 0 { + if msglen < 0 { return ErrInvalidLengthQueryrange } - postIndex := iNdEx + intStringLen + postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthQueryrange } if postIndex > l { return io.ErrUnexpectedEOF } - m.ResultType = string(dAtA[iNdEx:postIndex]) + if m.Response == nil { + m.Response = &github_com_grafana_loki_v3_pkg_logproto.VolumeResponse{} + } + if err := m.Response.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } iNdEx = postIndex case 2: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Result", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field Headers", wireType) } var msglen int for shift := uint(0); ; shift += 7 { @@ -6299,8 +7694,8 @@ func (m *LokiData) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Result = append(m.Result, github_com_grafana_loki_pkg_push.Stream{}) - if err := m.Result[len(m.Result)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + m.Headers = append(m.Headers, github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) + if err := m.Headers[len(m.Headers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex @@ -6328,7 +7723,7 @@ func (m *LokiData) Unmarshal(dAtA []byte) error { } return nil } -func (m *LokiPromResponse) Unmarshal(dAtA []byte) error { +func (m *TopKSketchesResponse) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -6351,10 +7746,10 @@ func (m *LokiPromResponse) Unmarshal(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: LokiPromResponse: wiretype end group for non-group") + return fmt.Errorf("proto: TopKSketchesResponse: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: LokiPromResponse: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: TopKSketchesResponse: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: @@ -6387,7 +7782,7 @@ func (m *LokiPromResponse) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Response == nil { - m.Response = &queryrangebase.PrometheusResponse{} + m.Response = &github_com_grafana_loki_v3_pkg_logproto.TopKMatrix{} } if err := m.Response.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -6395,7 +7790,7 @@ func (m *LokiPromResponse) Unmarshal(dAtA []byte) error { iNdEx = postIndex case 2: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Statistics", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field Headers", wireType) } var msglen int for shift := uint(0); ; shift += 7 { @@ -6422,7 +7817,8 @@ func (m *LokiPromResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if err := m.Statistics.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + m.Headers = append(m.Headers, github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) + if err := m.Headers[len(m.Headers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex @@ -6450,7 +7846,7 @@ func (m *LokiPromResponse) Unmarshal(dAtA []byte) error { } return nil } -func (m *IndexStatsResponse) Unmarshal(dAtA []byte) error { +func (m *QuantileSketchResponse) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -6473,10 +7869,10 @@ func (m *IndexStatsResponse) Unmarshal(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: IndexStatsResponse: wiretype end group for non-group") + return fmt.Errorf("proto: QuantileSketchResponse: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: IndexStatsResponse: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: QuantileSketchResponse: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: @@ -6509,7 +7905,7 @@ func (m *IndexStatsResponse) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Response == nil { - m.Response = &github_com_grafana_loki_pkg_logproto.IndexStatsResponse{} + m.Response = &github_com_grafana_loki_v3_pkg_logproto.QuantileSketchMatrix{} } if err := m.Response.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -6544,7 +7940,7 @@ func (m *IndexStatsResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Headers = append(m.Headers, github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) + m.Headers = append(m.Headers, github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) if err := m.Headers[len(m.Headers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -6573,7 +7969,7 @@ func (m *IndexStatsResponse) Unmarshal(dAtA []byte) error { } return nil } -func (m *VolumeResponse) Unmarshal(dAtA []byte) error { +func (m *ShardsResponse) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -6596,10 +7992,10 @@ func (m *VolumeResponse) Unmarshal(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: VolumeResponse: wiretype end group for non-group") + return fmt.Errorf("proto: ShardsResponse: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: VolumeResponse: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: ShardsResponse: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: @@ -6632,7 +8028,7 @@ func (m *VolumeResponse) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Response == nil { - m.Response = &github_com_grafana_loki_pkg_logproto.VolumeResponse{} + m.Response = &github_com_grafana_loki_v3_pkg_logproto.ShardsResponse{} } if err := m.Response.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -6667,7 +8063,7 @@ func (m *VolumeResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Headers = append(m.Headers, github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) + m.Headers = append(m.Headers, github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) if err := m.Headers[len(m.Headers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -6696,7 +8092,7 @@ func (m *VolumeResponse) Unmarshal(dAtA []byte) error { } return nil } -func (m *TopKSketchesResponse) Unmarshal(dAtA []byte) error { +func (m *DetectedFieldsResponse) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -6719,10 +8115,10 @@ func (m *TopKSketchesResponse) Unmarshal(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: TopKSketchesResponse: wiretype end group for non-group") + return fmt.Errorf("proto: DetectedFieldsResponse: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: TopKSketchesResponse: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: DetectedFieldsResponse: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: @@ -6755,7 +8151,7 @@ func (m *TopKSketchesResponse) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Response == nil { - m.Response = &github_com_grafana_loki_pkg_logproto.TopKMatrix{} + m.Response = &github_com_grafana_loki_v3_pkg_logproto.DetectedFieldsResponse{} } if err := m.Response.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -6790,7 +8186,7 @@ func (m *TopKSketchesResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Headers = append(m.Headers, github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) + m.Headers = append(m.Headers, github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) if err := m.Headers[len(m.Headers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -6819,7 +8215,7 @@ func (m *TopKSketchesResponse) Unmarshal(dAtA []byte) error { } return nil } -func (m *QuantileSketchResponse) Unmarshal(dAtA []byte) error { +func (m *DetectedLabelsResponse) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -6842,10 +8238,10 @@ func (m *QuantileSketchResponse) Unmarshal(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: QuantileSketchResponse: wiretype end group for non-group") + return fmt.Errorf("proto: DetectedLabelsResponse: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: QuantileSketchResponse: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: DetectedLabelsResponse: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: @@ -6878,7 +8274,7 @@ func (m *QuantileSketchResponse) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Response == nil { - m.Response = &github_com_grafana_loki_pkg_logproto.QuantileSketchMatrix{} + m.Response = &github_com_grafana_loki_v3_pkg_logproto.DetectedLabelsResponse{} } if err := m.Response.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -6913,7 +8309,7 @@ func (m *QuantileSketchResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Headers = append(m.Headers, github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) + m.Headers = append(m.Headers, github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) if err := m.Headers[len(m.Headers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -7287,6 +8683,111 @@ func (m *QueryResponse) Unmarshal(dAtA []byte) error { } m.Response = &QueryResponse_QuantileSketches{v} iNdEx = postIndex + case 10: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ShardsResponse", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthQueryrange + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQueryrange + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + v := &ShardsResponse{} + if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + m.Response = &QueryResponse_ShardsResponse{v} + iNdEx = postIndex + case 11: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field DetectedFields", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthQueryrange + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQueryrange + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + v := &DetectedFieldsResponse{} + if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + m.Response = &QueryResponse_DetectedFields{v} + iNdEx = postIndex + case 12: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field DetectedLabels", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthQueryrange + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQueryrange + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + v := &DetectedLabelsResponse{} + if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + m.Response = &QueryResponse_DetectedLabels{v} + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipQueryrange(dAtA[iNdEx:]) @@ -7677,6 +9178,111 @@ func (m *QueryRequest) Unmarshal(dAtA []byte) error { } m.Metadata[mapkey] = mapvalue iNdEx = postIndex + case 8: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ShardsRequest", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthQueryrange + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQueryrange + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + v := &logproto.ShardsRequest{} + if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + m.Request = &QueryRequest_ShardsRequest{v} + iNdEx = postIndex + case 9: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field DetectedFields", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthQueryrange + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQueryrange + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + v := &logproto.DetectedFieldsRequest{} + if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + m.Request = &QueryRequest_DetectedFields{v} + iNdEx = postIndex + case 10: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field DetectedLabels", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthQueryrange + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQueryrange + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + v := &logproto.DetectedLabelsRequest{} + if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + m.Request = &QueryRequest_DetectedLabels{v} + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipQueryrange(dAtA[iNdEx:]) diff --git a/pkg/querier/queryrange/queryrange.proto b/pkg/querier/queryrange/queryrange.proto index f673464acfc0b..01f48298521c1 100644 --- a/pkg/querier/queryrange/queryrange.proto +++ b/pkg/querier/queryrange/queryrange.proto @@ -5,6 +5,7 @@ package queryrange; import "github.com/gogo/googleapis/google/rpc/status.proto"; import "gogoproto/gogo.proto"; import "google/protobuf/timestamp.proto"; +import "pkg/logproto/indexgateway.proto"; import "pkg/logproto/logproto.proto"; import "pkg/logproto/sketch.proto"; import "pkg/logqlmodel/stats/stats.proto"; @@ -12,7 +13,7 @@ import "pkg/push/push.proto"; import "pkg/querier/queryrange/queryrangebase/definitions/definitions.proto"; import "pkg/querier/queryrange/queryrangebase/queryrange.proto"; -option go_package = "github.com/grafana/loki/pkg/querier/queryrange"; +option go_package = "github.com/grafana/loki/v3/pkg/querier/queryrange"; option (gogoproto.marshaler_all) = true; option (gogoproto.sizer_all) = true; option (gogoproto.unmarshaler_all) = true; @@ -33,7 +34,7 @@ message LokiRequest { logproto.Direction direction = 6; string path = 7; repeated string shards = 8 [(gogoproto.jsontag) = "shards"]; - Plan plan = 10 [(gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/plan.QueryPlan"]; + Plan plan = 10 [(gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/plan.QueryPlan"]; } message LokiInstantRequest { @@ -46,7 +47,7 @@ message LokiInstantRequest { logproto.Direction direction = 4; string path = 5; repeated string shards = 6 [(gogoproto.jsontag) = "shards"]; - Plan plan = 7 [(gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/plan.QueryPlan"]; + Plan plan = 7 [(gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/plan.QueryPlan"]; } message Plan { @@ -70,7 +71,7 @@ message LokiResponse { ]; repeated definitions.PrometheusResponseHeader Headers = 9 [ (gogoproto.jsontag) = "-", - (gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" ]; } @@ -97,7 +98,7 @@ message LokiSeriesResponse { uint32 version = 3; repeated definitions.PrometheusResponseHeader Headers = 4 [ (gogoproto.jsontag) = "-", - (gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" ]; stats.Result statistics = 5 [ (gogoproto.nullable) = false, @@ -111,7 +112,7 @@ message LokiLabelNamesResponse { uint32 version = 3; repeated definitions.PrometheusResponseHeader Headers = 4 [ (gogoproto.jsontag) = "-", - (gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" ]; stats.Result statistics = 5 [ (gogoproto.nullable) = false, @@ -135,34 +136,58 @@ message LokiPromResponse { } message IndexStatsResponse { - logproto.IndexStatsResponse response = 1 [(gogoproto.customtype) = "github.com/grafana/loki/pkg/logproto.IndexStatsResponse"]; + logproto.IndexStatsResponse response = 1 [(gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logproto.IndexStatsResponse"]; repeated definitions.PrometheusResponseHeader Headers = 2 [ (gogoproto.jsontag) = "-", - (gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" ]; } message VolumeResponse { - logproto.VolumeResponse response = 1 [(gogoproto.customtype) = "github.com/grafana/loki/pkg/logproto.VolumeResponse"]; + logproto.VolumeResponse response = 1 [(gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logproto.VolumeResponse"]; repeated definitions.PrometheusResponseHeader Headers = 2 [ (gogoproto.jsontag) = "-", - (gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" ]; } message TopKSketchesResponse { - logproto.TopKMatrix response = 1 [(gogoproto.customtype) = "github.com/grafana/loki/pkg/logproto.TopKMatrix"]; + logproto.TopKMatrix response = 1 [(gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logproto.TopKMatrix"]; repeated definitions.PrometheusResponseHeader Headers = 2 [ (gogoproto.jsontag) = "-", - (gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" ]; } message QuantileSketchResponse { - logproto.QuantileSketchMatrix response = 1 [(gogoproto.customtype) = "github.com/grafana/loki/pkg/logproto.QuantileSketchMatrix"]; + logproto.QuantileSketchMatrix response = 1 [(gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logproto.QuantileSketchMatrix"]; repeated definitions.PrometheusResponseHeader Headers = 2 [ (gogoproto.jsontag) = "-", - (gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" + ]; +} + +message ShardsResponse { + indexgatewaypb.ShardsResponse response = 1 [(gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logproto.ShardsResponse"]; + repeated definitions.PrometheusResponseHeader Headers = 2 [ + (gogoproto.jsontag) = "-", + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" + ]; +} + +message DetectedFieldsResponse { + logproto.DetectedFieldsResponse response = 1 [(gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logproto.DetectedFieldsResponse"]; + repeated definitions.PrometheusResponseHeader Headers = 2 [ + (gogoproto.jsontag) = "-", + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" + ]; +} + +message DetectedLabelsResponse { + logproto.DetectedLabelsResponse response = 1 [(gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logproto.DetectedLabelsResponse"]; + repeated definitions.PrometheusResponseHeader Headers = 2 [ + (gogoproto.jsontag) = "-", + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" ]; } @@ -177,6 +202,9 @@ message QueryResponse { VolumeResponse volume = 7; TopKSketchesResponse topkSketches = 8; QuantileSketchResponse quantileSketches = 9; + ShardsResponse shardsResponse = 10; + DetectedFieldsResponse detectedFields = 11; + DetectedLabelsResponse detectedLabels = 12; } } @@ -188,6 +216,9 @@ message QueryRequest { LokiInstantRequest instant = 4; LokiRequest streams = 5; logproto.VolumeRequest volume = 6; + indexgatewaypb.ShardsRequest shardsRequest = 8; + logproto.DetectedFieldsRequest detectedFields = 9; + logproto.DetectedLabelsRequest detectedLabels = 10; } map metadata = 7 [(gogoproto.nullable) = false]; } diff --git a/pkg/querier/queryrange/queryrangebase/alias.go b/pkg/querier/queryrange/queryrangebase/alias.go index 4b4e219202f0b..7aab7a200eb05 100644 --- a/pkg/querier/queryrange/queryrangebase/alias.go +++ b/pkg/querier/queryrange/queryrangebase/alias.go @@ -1,8 +1,8 @@ package queryrangebase import ( - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" ) // Helpful aliases for refactoring circular imports diff --git a/pkg/querier/queryrange/queryrangebase/definitions/definitions.pb.go b/pkg/querier/queryrange/queryrangebase/definitions/definitions.pb.go index d4eb4fb83b25f..1f2b484f640a4 100644 --- a/pkg/querier/queryrange/queryrangebase/definitions/definitions.pb.go +++ b/pkg/querier/queryrange/queryrangebase/definitions/definitions.pb.go @@ -137,7 +137,7 @@ func init() { } var fileDescriptor_d1a37772b6ae2c5c = []byte{ - // 262 bytes of a gzipped FileDescriptorProto + // 266 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x72, 0x2e, 0xc8, 0x4e, 0xd7, 0x2f, 0x2c, 0x4d, 0x2d, 0xca, 0x4c, 0x2d, 0x02, 0xd3, 0x95, 0x45, 0x89, 0x79, 0xe9, 0xa9, 0x48, 0xcc, 0xa4, 0xc4, 0xe2, 0x54, 0xfd, 0x94, 0xd4, 0xb4, 0xcc, 0xbc, 0xcc, 0x92, 0xcc, 0xfc, 0xbc, @@ -147,14 +147,14 @@ var fileDescriptor_d1a37772b6ae2c5c = []byte{ 0x4d, 0x4c, 0x49, 0x2d, 0x12, 0x92, 0xe4, 0x62, 0xf1, 0x4b, 0xcc, 0x4d, 0x95, 0x60, 0x54, 0x60, 0xd4, 0xe0, 0x74, 0x62, 0x7d, 0x75, 0x4f, 0x9e, 0x51, 0x37, 0x08, 0x2c, 0x24, 0x24, 0xcb, 0xc5, 0x16, 0x96, 0x98, 0x53, 0x9a, 0x5a, 0x2c, 0xc1, 0xa4, 0xc0, 0x8c, 0x90, 0x84, 0x0a, 0x2a, 0x85, - 0x70, 0x49, 0x20, 0x1b, 0x5a, 0x5c, 0x90, 0x9f, 0x57, 0x9c, 0x4a, 0xa9, 0xa9, 0x4e, 0xf5, 0x17, - 0x1e, 0xca, 0x31, 0xdc, 0x78, 0x28, 0xc7, 0xf0, 0xe1, 0xa1, 0x1c, 0x63, 0xc3, 0x23, 0x39, 0xc6, - 0x15, 0x8f, 0xe4, 0x18, 0x4f, 0x3c, 0x92, 0x63, 0xbc, 0xf0, 0x48, 0x8e, 0xf1, 0xc1, 0x23, 0x39, - 0xc6, 0x17, 0x8f, 0xe4, 0x18, 0x3e, 0x3c, 0x92, 0x63, 0x9c, 0xf0, 0x58, 0x8e, 0xe1, 0xc2, 0x63, - 0x39, 0x86, 0x1b, 0x8f, 0xe5, 0x18, 0xa2, 0x3c, 0xd3, 0x33, 0x4b, 0x32, 0x4a, 0x93, 0xf4, 0x92, - 0xf3, 0x73, 0xf5, 0xd3, 0x8b, 0x12, 0xd3, 0x12, 0xf3, 0x12, 0xf5, 0x73, 0xf2, 0xb3, 0x33, 0xf5, - 0x49, 0x0e, 0xe0, 0x24, 0x36, 0x70, 0x90, 0x19, 0x03, 0x02, 0x00, 0x00, 0xff, 0xff, 0x51, 0x1b, - 0x61, 0xc9, 0x9c, 0x01, 0x00, 0x00, + 0x70, 0x49, 0x20, 0x1b, 0x5a, 0x5c, 0x90, 0x9f, 0x57, 0x9c, 0x4a, 0xa9, 0xa9, 0x4e, 0x4d, 0x8c, + 0x17, 0x1e, 0xca, 0x31, 0xdc, 0x78, 0x28, 0xc7, 0xf0, 0xe1, 0xa1, 0x1c, 0x63, 0xc3, 0x23, 0x39, + 0xc6, 0x15, 0x8f, 0xe4, 0x18, 0x4f, 0x3c, 0x92, 0x63, 0xbc, 0xf0, 0x48, 0x8e, 0xf1, 0xc1, 0x23, + 0x39, 0xc6, 0x17, 0x8f, 0xe4, 0x18, 0x3e, 0x3c, 0x92, 0x63, 0x9c, 0xf0, 0x58, 0x8e, 0xe1, 0xc2, + 0x63, 0x39, 0x86, 0x1b, 0x8f, 0xe5, 0x18, 0xa2, 0x7c, 0xd2, 0x33, 0x4b, 0x32, 0x4a, 0x93, 0xf4, + 0x92, 0xf3, 0x73, 0xf5, 0xd3, 0x8b, 0x12, 0xd3, 0x12, 0xf3, 0x12, 0xf5, 0x73, 0xf2, 0xb3, 0x33, + 0xf5, 0xcb, 0x8c, 0xf5, 0x49, 0x0e, 0xe4, 0x24, 0x36, 0x70, 0xb0, 0x19, 0x03, 0x02, 0x00, 0x00, + 0xff, 0xff, 0xf5, 0x9e, 0x05, 0x86, 0xa0, 0x01, 0x00, 0x00, } func (this *PrometheusRequestHeader) Equal(that interface{}) bool { diff --git a/pkg/querier/queryrange/queryrangebase/definitions/definitions.proto b/pkg/querier/queryrange/queryrangebase/definitions/definitions.proto index 9f5e7967c8524..d5064e3aaad32 100644 --- a/pkg/querier/queryrange/queryrangebase/definitions/definitions.proto +++ b/pkg/querier/queryrange/queryrangebase/definitions/definitions.proto @@ -4,7 +4,7 @@ package definitions; import "gogoproto/gogo.proto"; -option go_package = "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions"; +option go_package = "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions"; option (gogoproto.marshaler_all) = true; option (gogoproto.unmarshaler_all) = true; diff --git a/pkg/querier/queryrange/queryrangebase/definitions/interface.go b/pkg/querier/queryrange/queryrangebase/definitions/interface.go index f8c9a0f5531fb..6f303f387dfa6 100644 --- a/pkg/querier/queryrange/queryrangebase/definitions/interface.go +++ b/pkg/querier/queryrange/queryrangebase/definitions/interface.go @@ -8,7 +8,7 @@ import ( "github.com/gogo/protobuf/proto" "github.com/opentracing/opentracing-go" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" ) // Codec is used to encode/decode query range requests and responses so they can be passed down to middlewares. diff --git a/pkg/querier/queryrange/queryrangebase/marshaling_test.go b/pkg/querier/queryrange/queryrangebase/marshaling_test.go index ee0cdd0f217ca..c2d9f432a4510 100644 --- a/pkg/querier/queryrange/queryrangebase/marshaling_test.go +++ b/pkg/querier/queryrange/queryrangebase/marshaling_test.go @@ -10,7 +10,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func BenchmarkPrometheusCodec_DecodeResponse(b *testing.B) { @@ -29,7 +29,7 @@ func BenchmarkPrometheusCodec_DecodeResponse(b *testing.B) { b.ReportAllocs() for n := 0; n < b.N; n++ { - _, err := PrometheusCodec.DecodeResponse(context.Background(), &http.Response{ + _, err := PrometheusCodecForRangeQueries.DecodeResponse(context.Background(), &http.Response{ StatusCode: 200, Body: io.NopCloser(bytes.NewReader(encodedRes)), ContentLength: int64(len(encodedRes)), @@ -51,7 +51,7 @@ func BenchmarkPrometheusCodec_EncodeResponse(b *testing.B) { b.ReportAllocs() for n := 0; n < b.N; n++ { - _, err := PrometheusCodec.EncodeResponse(context.Background(), nil, res) + _, err := PrometheusCodecForRangeQueries.EncodeResponse(context.Background(), nil, res) require.NoError(b, err) } } diff --git a/pkg/querier/queryrange/queryrangebase/middleware.go b/pkg/querier/queryrange/queryrangebase/middleware.go index 10e80ddf8a2ec..44d0ad65582e9 100644 --- a/pkg/querier/queryrange/queryrangebase/middleware.go +++ b/pkg/querier/queryrange/queryrangebase/middleware.go @@ -7,7 +7,7 @@ import ( "github.com/grafana/dskit/middleware" "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" ) const ( diff --git a/pkg/querier/queryrange/queryrangebase/promql_test.go b/pkg/querier/queryrange/queryrangebase/promql_test.go index 8e84304e64117..6ab7f460a99cb 100644 --- a/pkg/querier/queryrange/queryrangebase/promql_test.go +++ b/pkg/querier/queryrange/queryrangebase/promql_test.go @@ -18,7 +18,7 @@ import ( "github.com/prometheus/prometheus/util/annotations" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/querier/astmapper" ) var ( diff --git a/pkg/querier/queryrange/queryrangebase/query_range.go b/pkg/querier/queryrange/queryrangebase/query_range.go index ed2bf48c6757f..6b33097c0033b 100644 --- a/pkg/querier/queryrange/queryrangebase/query_range.go +++ b/pkg/querier/queryrange/queryrangebase/query_range.go @@ -19,9 +19,9 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/timestamp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) // StatusSuccess Prometheus success result. @@ -38,14 +38,26 @@ var ( errNegativeStep = httpgrpc.Errorf(http.StatusBadRequest, "zero or negative query resolution step widths are not accepted. Try a positive integer") errStepTooSmall = httpgrpc.Errorf(http.StatusBadRequest, "exceeded maximum resolution of 11,000 points per time series. Try increasing the value of the step parameter") - // PrometheusCodec is a codec to encode and decode Prometheus query range requests and responses. - PrometheusCodec = &prometheusCodec{} + // PrometheusCodecForRangeQueries is a codec to encode and decode Loki range metric query requests and responses. + PrometheusCodecForRangeQueries = &prometheusCodec{ + resultType: model.ValMatrix, + } + + // PrometheusCodecForInstantQueries is a codec to encode and decode Loki range metric query requests and responses. + PrometheusCodecForInstantQueries = &prometheusCodec{ + resultType: model.ValVector, + } // Name of the cache control header. cacheControlHeader = "Cache-Control" ) -type prometheusCodec struct{} +type prometheusCodec struct { + // prometheusCodec is used to merge multiple response of either range (matrix) or instant queries(vector). + // when creating empty responses during merge, it need to be aware what kind of valueType it should create with. + // helps other middlewares to filter the correct result type. + resultType model.ValueType +} // WithStartEnd clones the current `PrometheusRequest` with a new `start` and `end` timestamp. func (q *PrometheusRequest) WithStartEnd(start, end time.Time) Request { @@ -125,19 +137,19 @@ func (resp *PrometheusResponse) SetHeader(name, value string) { } // NewEmptyPrometheusResponse returns an empty successful Prometheus query range response. -func NewEmptyPrometheusResponse() *PrometheusResponse { +func NewEmptyPrometheusResponse(v model.ValueType) *PrometheusResponse { return &PrometheusResponse{ Status: StatusSuccess, Data: PrometheusData{ - ResultType: model.ValMatrix.String(), + ResultType: v.String(), Result: []SampleStream{}, }, } } -func (prometheusCodec) MergeResponse(responses ...Response) (Response, error) { +func (p prometheusCodec) MergeResponse(responses ...Response) (Response, error) { if len(responses) == 0 { - return NewEmptyPrometheusResponse(), nil + return NewEmptyPrometheusResponse(p.resultType), nil } promResponses := make([]*PrometheusResponse, 0, len(responses)) @@ -155,7 +167,7 @@ func (prometheusCodec) MergeResponse(responses ...Response) (Response, error) { response := PrometheusResponse{ Status: StatusSuccess, Data: PrometheusData{ - ResultType: model.ValMatrix.String(), + ResultType: p.resultType.String(), Result: matrixMerge(promResponses), }, } diff --git a/pkg/querier/queryrange/queryrangebase/query_range_test.go b/pkg/querier/queryrange/queryrangebase/query_range_test.go index 21c115eec5892..4a723403fe1c4 100644 --- a/pkg/querier/queryrange/queryrangebase/query_range_test.go +++ b/pkg/querier/queryrange/queryrangebase/query_range_test.go @@ -7,12 +7,15 @@ import ( "net/http" "strconv" "testing" + "time" jsoniter "github.com/json-iterator/go" + "github.com/opentracing/opentracing-go/mocktracer" + "github.com/prometheus/prometheus/model/timestamp" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func TestResponse(t *testing.T) { @@ -33,7 +36,7 @@ func TestResponse(t *testing.T) { Header: http.Header{"Content-Type": []string{"application/json"}}, Body: io.NopCloser(bytes.NewBuffer([]byte(tc.body))), } - resp, err := PrometheusCodec.DecodeResponse(context.Background(), response, nil) + resp, err := PrometheusCodecForRangeQueries.DecodeResponse(context.Background(), response, nil) require.NoError(t, err) assert.Equal(t, tc.expected, resp) @@ -44,7 +47,7 @@ func TestResponse(t *testing.T) { Body: io.NopCloser(bytes.NewBuffer([]byte(tc.body))), ContentLength: int64(len(tc.body)), } - resp2, err := PrometheusCodec.EncodeResponse(context.Background(), nil, resp) + resp2, err := PrometheusCodecForRangeQueries.EncodeResponse(context.Background(), nil, resp) require.NoError(t, err) assert.Equal(t, response, resp2) }) @@ -262,13 +265,36 @@ func TestMergeAPIResponses(t *testing.T) { }, }} { t.Run(tc.name, func(t *testing.T) { - output, err := PrometheusCodec.MergeResponse(tc.input...) + output, err := PrometheusCodecForRangeQueries.MergeResponse(tc.input...) require.NoError(t, err) require.Equal(t, tc.expected, output) }) } } +func TestPrometheusRequestSpanLogging(t *testing.T) { + now := time.Now() + end := now.Add(1000 * time.Second) + req := PrometheusRequest{ + Start: now, + End: end, + } + + span := mocktracer.MockSpan{} + req.LogToSpan(&span) + + for _, l := range span.Logs() { + for _, field := range l.Fields { + if field.Key == "start" { + require.Equal(t, timestamp.Time(now.UnixMilli()).String(), field.ValueString) + } + if field.Key == "end" { + require.Equal(t, timestamp.Time(end.UnixMilli()).String(), field.ValueString) + } + } + } +} + func mustParse(t *testing.T, response string) Response { var resp PrometheusResponse // Needed as goimports automatically add a json import otherwise. diff --git a/pkg/querier/queryrange/queryrangebase/queryrange.pb.go b/pkg/querier/queryrange/queryrangebase/queryrange.pb.go index f908b3621dcf6..caf71d1f83206 100644 --- a/pkg/querier/queryrange/queryrangebase/queryrange.pb.go +++ b/pkg/querier/queryrange/queryrangebase/queryrange.pb.go @@ -10,10 +10,10 @@ import ( _ "github.com/gogo/protobuf/types" github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" _ "github.com/golang/protobuf/ptypes/duration" - github_com_grafana_loki_pkg_logproto "github.com/grafana/loki/pkg/logproto" - logproto "github.com/grafana/loki/pkg/logproto" - definitions "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions" - resultscache "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" + github_com_grafana_loki_v3_pkg_logproto "github.com/grafana/loki/v3/pkg/logproto" + logproto "github.com/grafana/loki/v3/pkg/logproto" + definitions "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions" + resultscache "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" io "io" math "math" math_bits "math/bits" @@ -260,8 +260,8 @@ func (m *PrometheusData) GetResult() []SampleStream { } type SampleStream struct { - Labels []github_com_grafana_loki_pkg_logproto.LabelAdapter `protobuf:"bytes,1,rep,name=labels,proto3,customtype=github.com/grafana/loki/pkg/logproto.LabelAdapter" json:"metric"` - Samples []logproto.LegacySample `protobuf:"bytes,2,rep,name=samples,proto3" json:"values"` + Labels []github_com_grafana_loki_v3_pkg_logproto.LabelAdapter `protobuf:"bytes,1,rep,name=labels,proto3,customtype=github.com/grafana/loki/v3/pkg/logproto.LabelAdapter" json:"metric"` + Samples []logproto.LegacySample `protobuf:"bytes,2,rep,name=samples,proto3" json:"values"` } func (m *SampleStream) Reset() { *m = SampleStream{} } @@ -315,54 +315,54 @@ func init() { } var fileDescriptor_4cc6a0c1d6b614c4 = []byte{ - // 739 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0xcf, 0x4f, 0xdb, 0x48, - 0x18, 0x8d, 0xc9, 0x2f, 0x32, 0xac, 0xb2, 0xda, 0x01, 0xb1, 0x5e, 0x16, 0x8d, 0xa3, 0x68, 0x57, - 0xca, 0x4a, 0x5b, 0xbb, 0xa5, 0x2a, 0x87, 0x4a, 0x48, 0xad, 0x81, 0xaa, 0x42, 0x48, 0x45, 0x86, - 0x53, 0x6f, 0x93, 0x64, 0x70, 0x2c, 0x6c, 0x8f, 0x99, 0x19, 0x23, 0xe5, 0xd6, 0x53, 0xcf, 0xdc, - 0xda, 0x3f, 0xa1, 0xa7, 0xfe, 0x1d, 0x1c, 0x39, 0xa2, 0x1e, 0xdc, 0x12, 0x2e, 0x55, 0x4e, 0xfc, - 0x09, 0xd5, 0xcc, 0x38, 0xe0, 0x04, 0xd1, 0x1f, 0xa7, 0xcc, 0xe7, 0xef, 0xbd, 0x37, 0xef, 0x7b, - 0x9f, 0x63, 0xb0, 0x9e, 0x1c, 0xf9, 0xce, 0x71, 0x4a, 0x58, 0x40, 0x98, 0xfa, 0x1d, 0x32, 0x1c, - 0xfb, 0xa4, 0x70, 0xec, 0x62, 0x5e, 0x2c, 0xed, 0x84, 0x51, 0x41, 0x61, 0x73, 0x1a, 0xb0, 0xb2, - 0xe4, 0x53, 0x9f, 0xaa, 0x96, 0x23, 0x4f, 0x1a, 0xb5, 0x82, 0x7c, 0x4a, 0xfd, 0x90, 0x38, 0xaa, - 0xea, 0xa6, 0x87, 0x4e, 0x3f, 0x65, 0x58, 0x04, 0x34, 0xce, 0xfb, 0xd6, 0x6c, 0x5f, 0x04, 0x11, - 0xe1, 0x02, 0x47, 0x49, 0x0e, 0xf8, 0x5b, 0xda, 0x0b, 0xa9, 0xaf, 0x95, 0x27, 0x87, 0xbc, 0xb9, - 0xf9, 0x73, 0xde, 0xfb, 0xe4, 0x30, 0x88, 0x03, 0x79, 0x2b, 0x2f, 0x9e, 0x73, 0x91, 0x87, 0x52, - 0x84, 0x0b, 0xca, 0xb0, 0x4f, 0x9c, 0xde, 0x20, 0x8d, 0x8f, 0x9c, 0x1e, 0xee, 0x0d, 0x88, 0xc3, - 0x08, 0x4f, 0x43, 0xc1, 0x75, 0x21, 0x86, 0x09, 0xc9, 0x19, 0xed, 0x77, 0x65, 0xf0, 0xc7, 0x1e, - 0xa3, 0x11, 0x11, 0x03, 0x92, 0x72, 0x8f, 0x1c, 0xa7, 0x84, 0x0b, 0x08, 0x41, 0x25, 0xc1, 0x62, - 0x60, 0x1a, 0x2d, 0xa3, 0xd3, 0xf0, 0xd4, 0x19, 0x3e, 0x05, 0x55, 0x2e, 0x30, 0x13, 0xe6, 0x5c, - 0xcb, 0xe8, 0x2c, 0xac, 0xad, 0xd8, 0x7a, 0x5c, 0x7b, 0x32, 0xae, 0x7d, 0x30, 0x19, 0xd7, 0x9d, - 0x3f, 0xcb, 0xac, 0xd2, 0xe9, 0x67, 0xcb, 0xf0, 0x34, 0x05, 0xae, 0x83, 0x32, 0x89, 0xfb, 0x66, - 0xf9, 0x17, 0x98, 0x92, 0x20, 0x7d, 0x70, 0x41, 0x12, 0xb3, 0xd2, 0x32, 0x3a, 0x65, 0x4f, 0x9d, - 0xe1, 0x06, 0xa8, 0xcb, 0x60, 0x69, 0x2a, 0xcc, 0xaa, 0xd2, 0xfb, 0xeb, 0x8e, 0xde, 0x56, 0xbe, - 0x18, 0x2d, 0xf7, 0x5e, 0xca, 0x4d, 0x38, 0x70, 0x09, 0x54, 0x55, 0xa4, 0x66, 0x4d, 0xcd, 0xa6, - 0x0b, 0xb8, 0x03, 0x9a, 0x32, 0x9b, 0x20, 0xf6, 0x5f, 0x25, 0x2a, 0x50, 0xb3, 0xae, 0xb4, 0x57, - 0xed, 0x62, 0x72, 0xf6, 0xe6, 0x14, 0xc6, 0xad, 0x48, 0x79, 0x6f, 0x86, 0x09, 0xb7, 0x41, 0xfd, - 0x25, 0xc1, 0x7d, 0xc2, 0xb8, 0x39, 0xdf, 0x2a, 0x77, 0x16, 0xd6, 0xfe, 0xb1, 0x8b, 0x9b, 0xba, - 0x93, 0xb6, 0x06, 0xbb, 0xd5, 0x71, 0x66, 0x19, 0x0f, 0xbc, 0x09, 0xb7, 0xfd, 0x71, 0x0e, 0xc0, - 0x22, 0x96, 0x27, 0x34, 0xe6, 0x04, 0xb6, 0x41, 0x6d, 0x5f, 0x60, 0x91, 0x72, 0xbd, 0x1c, 0x17, - 0x8c, 0x33, 0xab, 0xc6, 0xd5, 0x13, 0x2f, 0xef, 0xc0, 0x1d, 0x50, 0xd9, 0xc2, 0x02, 0xe7, 0x9b, - 0x42, 0xf6, 0xf4, 0x3b, 0x54, 0x70, 0x20, 0x51, 0xee, 0xb2, 0x9c, 0x62, 0x9c, 0x59, 0xcd, 0x3e, - 0x16, 0xf8, 0x7f, 0x1a, 0x05, 0x82, 0x44, 0x89, 0x18, 0x7a, 0x4a, 0x03, 0x3e, 0x01, 0x8d, 0x6d, - 0xc6, 0x28, 0x3b, 0x18, 0x26, 0x44, 0x2d, 0xb0, 0xe1, 0xfe, 0x39, 0xce, 0xac, 0x45, 0x32, 0x79, - 0x58, 0x60, 0xdc, 0x22, 0xe1, 0x7f, 0xa0, 0xaa, 0x0a, 0xb5, 0xba, 0x86, 0xbb, 0x38, 0xce, 0xac, - 0xdf, 0x15, 0xa5, 0x00, 0xd7, 0x08, 0xf8, 0xe2, 0x36, 0xaf, 0xaa, 0xca, 0xeb, 0xdf, 0x7b, 0xf3, - 0xd2, 0x19, 0xdc, 0x13, 0xd8, 0x5b, 0x03, 0x34, 0xa7, 0x47, 0x83, 0x36, 0x00, 0x9e, 0xda, 0x9f, - 0x72, 0xaf, 0x03, 0x6b, 0x8e, 0x33, 0x0b, 0xb0, 0x9b, 0xa7, 0x5e, 0x01, 0x01, 0xb7, 0x40, 0x4d, - 0x57, 0xe6, 0x9c, 0x72, 0xb2, 0x3a, 0x1b, 0xdd, 0x3e, 0x8e, 0x92, 0x90, 0xec, 0x0b, 0x46, 0x70, - 0xe4, 0x36, 0xf3, 0xe0, 0x6a, 0x5a, 0xcd, 0xcb, 0xb9, 0xed, 0x33, 0x03, 0xfc, 0x56, 0x04, 0xc2, - 0x13, 0x50, 0x0b, 0x71, 0x97, 0x84, 0x72, 0x67, 0x65, 0xf5, 0xc6, 0xde, 0xfc, 0xf9, 0x77, 0x89, - 0x8f, 0x7b, 0xc3, 0x5d, 0xd9, 0xdd, 0xc3, 0x01, 0x73, 0x37, 0xa5, 0xe6, 0xa7, 0xcc, 0x7a, 0xe4, - 0x07, 0x62, 0x90, 0x76, 0xed, 0x1e, 0x8d, 0x1c, 0x9f, 0xe1, 0x43, 0x1c, 0x63, 0x27, 0xa4, 0x47, - 0x81, 0x53, 0xfc, 0x86, 0xd8, 0x8a, 0xf7, 0xbc, 0x8f, 0x13, 0x41, 0x98, 0x34, 0x12, 0x11, 0xc1, - 0x82, 0x9e, 0x97, 0xdf, 0x06, 0x9f, 0x81, 0x3a, 0x57, 0x3e, 0x78, 0x3e, 0xcf, 0xf2, 0xec, 0xc5, - 0xda, 0xe6, 0xed, 0x24, 0x27, 0x38, 0x4c, 0x09, 0xf7, 0x26, 0x34, 0x97, 0x9f, 0x5f, 0xa2, 0xd2, - 0xc5, 0x25, 0x2a, 0x5d, 0x5f, 0x22, 0xe3, 0xcd, 0x08, 0x19, 0x1f, 0x46, 0xc8, 0x38, 0x1b, 0x21, - 0xe3, 0x7c, 0x84, 0x8c, 0x2f, 0x23, 0x64, 0x7c, 0x1d, 0xa1, 0xd2, 0xf5, 0x08, 0x19, 0xa7, 0x57, - 0xa8, 0x74, 0x7e, 0x85, 0x4a, 0x17, 0x57, 0xa8, 0xf4, 0x7a, 0xe3, 0x7b, 0xe6, 0x7f, 0xf8, 0x8d, - 0xeb, 0xd6, 0x94, 0xc3, 0xc7, 0xdf, 0x02, 0x00, 0x00, 0xff, 0xff, 0xfe, 0xcd, 0xe4, 0x4f, 0xcf, - 0x05, 0x00, 0x00, + // 740 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0xcf, 0x4f, 0xd4, 0x40, + 0x18, 0xdd, 0xb2, 0xbf, 0xd8, 0xc1, 0xac, 0x71, 0x20, 0x58, 0x91, 0xb4, 0x9b, 0x8d, 0x26, 0x6b, + 0xa2, 0xad, 0x01, 0xe5, 0x60, 0x62, 0x82, 0xe5, 0x47, 0x0c, 0x21, 0x91, 0x14, 0x4e, 0xde, 0x66, + 0x77, 0x87, 0x6e, 0x43, 0xdb, 0x29, 0x33, 0x53, 0x92, 0xbd, 0x79, 0xf2, 0xcc, 0x4d, 0xff, 0x04, + 0x4f, 0xfe, 0x1d, 0x24, 0x5e, 0x38, 0x12, 0x0f, 0x55, 0x96, 0x8b, 0xd9, 0x13, 0x7f, 0x82, 0x99, + 0x99, 0x2e, 0x74, 0x97, 0x10, 0xf4, 0xb4, 0xf3, 0xf5, 0x7b, 0xef, 0xcd, 0xfb, 0xde, 0xd7, 0x2d, + 0x58, 0x89, 0x0f, 0x3c, 0xfb, 0x30, 0xc1, 0xd4, 0xc7, 0x54, 0xfe, 0xf6, 0x29, 0x8a, 0x3c, 0x9c, + 0x3b, 0xb6, 0x11, 0xcb, 0x97, 0x56, 0x4c, 0x09, 0x27, 0xb0, 0x3e, 0x0e, 0x58, 0x98, 0xf3, 0x88, + 0x47, 0x64, 0xcb, 0x16, 0x27, 0x85, 0x5a, 0x30, 0x3c, 0x42, 0xbc, 0x00, 0xdb, 0xb2, 0x6a, 0x27, + 0xfb, 0x76, 0x37, 0xa1, 0x88, 0xfb, 0x24, 0xca, 0xfa, 0xe6, 0x64, 0x9f, 0xfb, 0x21, 0x66, 0x1c, + 0x85, 0x71, 0x06, 0x78, 0x2c, 0xec, 0x05, 0xc4, 0x53, 0xca, 0xa3, 0x43, 0xd6, 0x5c, 0xfb, 0x37, + 0xef, 0x5d, 0xbc, 0xef, 0x47, 0xbe, 0xb8, 0x95, 0xe5, 0xcf, 0x99, 0xc8, 0x4b, 0x21, 0xc2, 0x38, + 0xa1, 0xc8, 0xc3, 0x76, 0xa7, 0x97, 0x44, 0x07, 0x76, 0x07, 0x75, 0x7a, 0xd8, 0xa6, 0x98, 0x25, + 0x01, 0x67, 0xaa, 0xe0, 0xfd, 0x18, 0x67, 0x8c, 0xe6, 0x97, 0x22, 0x78, 0xb0, 0x43, 0x49, 0x88, + 0x79, 0x0f, 0x27, 0xcc, 0xc5, 0x87, 0x09, 0x66, 0x1c, 0x42, 0x50, 0x8a, 0x11, 0xef, 0xe9, 0x5a, + 0x43, 0x6b, 0xd5, 0x5c, 0x79, 0x86, 0x6f, 0x40, 0x99, 0x71, 0x44, 0xb9, 0x3e, 0xd5, 0xd0, 0x5a, + 0x33, 0x4b, 0x0b, 0x96, 0x1a, 0xd7, 0x1a, 0x8d, 0x6b, 0xed, 0x8d, 0xc6, 0x75, 0xa6, 0x4f, 0x52, + 0xb3, 0x70, 0xfc, 0xcb, 0xd4, 0x5c, 0x45, 0x81, 0x2b, 0xa0, 0x88, 0xa3, 0xae, 0x5e, 0xfc, 0x0f, + 0xa6, 0x20, 0x08, 0x1f, 0x8c, 0xe3, 0x58, 0x2f, 0x35, 0xb4, 0x56, 0xd1, 0x95, 0x67, 0xf8, 0x16, + 0x54, 0x45, 0xb0, 0x24, 0xe1, 0x7a, 0x59, 0xea, 0x3d, 0xba, 0xa1, 0xb7, 0x9e, 0x2d, 0x46, 0xc9, + 0x7d, 0x15, 0x72, 0x23, 0x0e, 0x9c, 0x03, 0x65, 0x19, 0xa9, 0x5e, 0x91, 0xb3, 0xa9, 0x02, 0x6e, + 0x81, 0xba, 0xc8, 0xc6, 0x8f, 0xbc, 0x0f, 0xb1, 0x0c, 0x54, 0xaf, 0x4a, 0xed, 0x45, 0x2b, 0x9f, + 0x9c, 0xb5, 0x36, 0x86, 0x71, 0x4a, 0x42, 0xde, 0x9d, 0x60, 0xc2, 0x0d, 0x50, 0x7d, 0x8f, 0x51, + 0x17, 0x53, 0xa6, 0x4f, 0x37, 0x8a, 0xad, 0x99, 0xa5, 0x27, 0x56, 0x7e, 0x53, 0x37, 0xd2, 0x56, + 0x60, 0xa7, 0x3c, 0x4c, 0x4d, 0xed, 0x85, 0x3b, 0xe2, 0x36, 0xbf, 0x4f, 0x01, 0x98, 0xc7, 0xb2, + 0x98, 0x44, 0x0c, 0xc3, 0x26, 0xa8, 0xec, 0x72, 0xc4, 0x13, 0xa6, 0x96, 0xe3, 0x80, 0x61, 0x6a, + 0x56, 0x98, 0x7c, 0xe2, 0x66, 0x1d, 0xb8, 0x05, 0x4a, 0xeb, 0x88, 0xa3, 0x6c, 0x53, 0x86, 0x35, + 0xfe, 0x0e, 0xe5, 0x1c, 0x08, 0x94, 0x33, 0x2f, 0xa6, 0x18, 0xa6, 0x66, 0xbd, 0x8b, 0x38, 0x7a, + 0x4e, 0x42, 0x9f, 0xe3, 0x30, 0xe6, 0x7d, 0x57, 0x6a, 0xc0, 0xd7, 0xa0, 0xb6, 0x41, 0x29, 0xa1, + 0x7b, 0xfd, 0x18, 0xcb, 0x05, 0xd6, 0x9c, 0x87, 0xc3, 0xd4, 0x9c, 0xc5, 0xa3, 0x87, 0x39, 0xc6, + 0x35, 0x12, 0x3e, 0x03, 0x65, 0x59, 0xc8, 0xd5, 0xd5, 0x9c, 0xd9, 0x61, 0x6a, 0xde, 0x97, 0x94, + 0x1c, 0x5c, 0x21, 0xe0, 0xe6, 0x75, 0x5e, 0x65, 0x99, 0xd7, 0xd3, 0x5b, 0xf3, 0x52, 0x19, 0xdc, + 0x12, 0xd8, 0x67, 0x0d, 0xd4, 0xc7, 0x47, 0x83, 0x16, 0x00, 0xae, 0xdc, 0x9f, 0x74, 0xaf, 0x02, + 0xab, 0x0f, 0x53, 0x13, 0xd0, 0xab, 0xa7, 0x6e, 0x0e, 0x01, 0xd7, 0x41, 0x45, 0x55, 0xfa, 0x94, + 0x74, 0xb2, 0x38, 0x19, 0xdd, 0x2e, 0x0a, 0xe3, 0x00, 0xef, 0x72, 0x8a, 0x51, 0xe8, 0xd4, 0xb3, + 0xe0, 0x2a, 0x4a, 0xcd, 0xcd, 0xb8, 0xcd, 0x1f, 0x1a, 0xb8, 0x97, 0x07, 0xc2, 0x3e, 0xa8, 0x04, + 0xa8, 0x8d, 0x03, 0xb1, 0xb3, 0xa2, 0x7c, 0x63, 0xaf, 0xfe, 0xfc, 0xdb, 0xd8, 0x43, 0x9d, 0xfe, + 0xb6, 0xe8, 0xee, 0x20, 0x9f, 0x3a, 0x9b, 0x42, 0xf3, 0x67, 0x6a, 0xbe, 0xf2, 0x7c, 0xde, 0x4b, + 0xda, 0x56, 0x87, 0x84, 0xb6, 0x47, 0xd1, 0x3e, 0x8a, 0x90, 0x1d, 0x90, 0x03, 0xdf, 0x3e, 0x5a, + 0xb6, 0xf3, 0x9f, 0x11, 0x4b, 0x52, 0xdf, 0x75, 0x51, 0xcc, 0x31, 0x15, 0x5e, 0x42, 0xcc, 0xa9, + 0xdf, 0x71, 0xb3, 0x0b, 0xe1, 0x2a, 0xa8, 0x32, 0x69, 0x85, 0x65, 0x23, 0xcd, 0x4f, 0xde, 0xad, + 0x9c, 0x5e, 0x0f, 0x73, 0x84, 0x82, 0x04, 0x33, 0x77, 0x44, 0x73, 0x8e, 0x4e, 0xcf, 0x8d, 0xc2, + 0xd9, 0xb9, 0x51, 0xb8, 0x3c, 0x37, 0xb4, 0x4f, 0x03, 0x43, 0xfb, 0x36, 0x30, 0xb4, 0x93, 0x81, + 0xa1, 0x9d, 0x0e, 0x0c, 0xed, 0xf7, 0xc0, 0xd0, 0xfe, 0x0c, 0x8c, 0xc2, 0xe5, 0xc0, 0xd0, 0x8e, + 0x2f, 0x8c, 0xc2, 0xe9, 0x85, 0x51, 0x38, 0xbb, 0x30, 0x0a, 0x1f, 0x57, 0xef, 0xf0, 0x7f, 0xe7, + 0x97, 0xae, 0x5d, 0x91, 0x26, 0x97, 0xff, 0x06, 0x00, 0x00, 0xff, 0xff, 0xae, 0xb0, 0xf3, 0xf9, + 0xd5, 0x05, 0x00, 0x00, } func (this *PrometheusRequest) Equal(that interface{}) bool { @@ -1751,7 +1751,7 @@ func (m *SampleStream) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Labels = append(m.Labels, github_com_grafana_loki_pkg_logproto.LabelAdapter{}) + m.Labels = append(m.Labels, github_com_grafana_loki_v3_pkg_logproto.LabelAdapter{}) if err := m.Labels[len(m.Labels)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } diff --git a/pkg/querier/queryrange/queryrangebase/queryrange.proto b/pkg/querier/queryrange/queryrangebase/queryrange.proto index 98ddaa2b7d2db..4d8e42016cc22 100644 --- a/pkg/querier/queryrange/queryrangebase/queryrange.proto +++ b/pkg/querier/queryrange/queryrangebase/queryrange.proto @@ -9,7 +9,7 @@ import "pkg/logproto/logproto.proto"; import "pkg/querier/queryrange/queryrangebase/definitions/definitions.proto"; import "pkg/storage/chunk/cache/resultscache/types.proto"; -option go_package = "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase"; +option go_package = "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase"; option (gogoproto.marshaler_all) = true; option (gogoproto.unmarshaler_all) = true; @@ -56,7 +56,7 @@ message SampleStream { repeated logproto.LegacyLabelPair labels = 1 [ (gogoproto.nullable) = false, (gogoproto.jsontag) = "metric", - (gogoproto.customtype) = "github.com/grafana/loki/pkg/logproto.LabelAdapter" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logproto.LabelAdapter" ]; repeated logproto.LegacySample samples = 2 [ (gogoproto.nullable) = false, diff --git a/pkg/querier/queryrange/queryrangebase/results_cache.go b/pkg/querier/queryrange/queryrangebase/results_cache.go index 3511fe0b7dd30..e519ae74f075d 100644 --- a/pkg/querier/queryrange/queryrangebase/results_cache.go +++ b/pkg/querier/queryrange/queryrangebase/results_cache.go @@ -15,10 +15,10 @@ import ( "github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/util/constants" ) var ( diff --git a/pkg/querier/queryrange/queryrangebase/results_cache_test.go b/pkg/querier/queryrange/queryrangebase/results_cache_test.go index 6706e6a2d9fa7..fa1d9c81ef4f1 100644 --- a/pkg/querier/queryrange/queryrangebase/results_cache_test.go +++ b/pkg/querier/queryrange/queryrangebase/results_cache_test.go @@ -14,11 +14,11 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/util/constants" ) const ( @@ -414,7 +414,7 @@ func TestResultsCache(t *testing.T) { c, resultscache.ConstSplitter(day), mockLimits{}, - PrometheusCodec, + PrometheusCodecForRangeQueries, PrometheusResponseExtractor{}, nil, nil, @@ -461,7 +461,7 @@ func TestResultsCacheRecent(t *testing.T) { c, resultscache.ConstSplitter(day), mockLimits{maxCacheFreshness: 10 * time.Minute}, - PrometheusCodec, + PrometheusCodecForRangeQueries, PrometheusResponseExtractor{}, nil, nil, @@ -572,7 +572,7 @@ func TestResultsCacheShouldCacheFunc(t *testing.T) { c, resultscache.ConstSplitter(day), mockLimits{maxCacheFreshness: 10 * time.Minute}, - PrometheusCodec, + PrometheusCodecForRangeQueries, PrometheusResponseExtractor{}, nil, tc.shouldCache, diff --git a/pkg/querier/queryrange/queryrangebase/retry.go b/pkg/querier/queryrange/queryrangebase/retry.go index d051363771bb9..f02b5d73cd6c4 100644 --- a/pkg/querier/queryrange/queryrangebase/retry.go +++ b/pkg/querier/queryrange/queryrangebase/retry.go @@ -11,8 +11,8 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type RetryMiddlewareMetrics struct { diff --git a/pkg/querier/queryrange/queryrangebase/retry_test.go b/pkg/querier/queryrange/queryrangebase/retry_test.go index 2c4a15bb9f480..7476fa21a06b2 100644 --- a/pkg/querier/queryrange/queryrangebase/retry_test.go +++ b/pkg/querier/queryrange/queryrangebase/retry_test.go @@ -12,7 +12,7 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) func TestRetry(t *testing.T) { diff --git a/pkg/querier/queryrange/queryrangebase/series_test.go b/pkg/querier/queryrange/queryrangebase/series_test.go index 0bfdfaefcced4..db574403fffb4 100644 --- a/pkg/querier/queryrange/queryrangebase/series_test.go +++ b/pkg/querier/queryrange/queryrangebase/series_test.go @@ -7,7 +7,7 @@ import ( "github.com/prometheus/prometheus/tsdb/chunkenc" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func Test_ResponseToSamples(t *testing.T) { diff --git a/pkg/querier/queryrange/queryrangebase/test_utils.go b/pkg/querier/queryrange/queryrangebase/test_utils.go index fdc02d87a9091..64be6cc0b48e3 100644 --- a/pkg/querier/queryrange/queryrangebase/test_utils.go +++ b/pkg/querier/queryrange/queryrangebase/test_utils.go @@ -11,8 +11,8 @@ import ( "github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/util/annotations" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/querier/series" + "github.com/grafana/loki/v3/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/querier/series" ) // genLabels will create a slice of labels where each label has an equal chance to occupy a value from [0,labelBuckets]. It returns a slice of length labelBuckets^len(labelSet) diff --git a/pkg/querier/queryrange/queryrangebase/test_utils_test.go b/pkg/querier/queryrange/queryrangebase/test_utils_test.go index 0eacb00199375..5cf748dd7dbc3 100644 --- a/pkg/querier/queryrange/queryrangebase/test_utils_test.go +++ b/pkg/querier/queryrange/queryrangebase/test_utils_test.go @@ -9,7 +9,7 @@ import ( "github.com/prometheus/prometheus/tsdb/chunkenc" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/querier/astmapper" ) func TestGenLabelsCorrectness(t *testing.T) { diff --git a/pkg/querier/queryrange/queryrangebase/util.go b/pkg/querier/queryrange/queryrangebase/util.go index 5073b715bc269..a47753013c607 100644 --- a/pkg/querier/queryrange/queryrangebase/util.go +++ b/pkg/querier/queryrange/queryrangebase/util.go @@ -3,7 +3,7 @@ package queryrangebase import ( "context" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" ) // RequestResponse contains a request response and the respective request that was used. diff --git a/pkg/querier/queryrange/queryrangebase/value.go b/pkg/querier/queryrange/queryrangebase/value.go index b0d96f100eac1..e798233dcea38 100644 --- a/pkg/querier/queryrange/queryrangebase/value.go +++ b/pkg/querier/queryrange/queryrangebase/value.go @@ -8,8 +8,8 @@ import ( "github.com/prometheus/prometheus/promql/parser" "github.com/prometheus/prometheus/storage" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/series" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/querier/series" ) // FromResult transforms a promql query result into a samplestream diff --git a/pkg/querier/queryrange/queryrangebase/value_test.go b/pkg/querier/queryrange/queryrangebase/value_test.go index 249fe5eaeb812..965effa30d7ae 100644 --- a/pkg/querier/queryrange/queryrangebase/value_test.go +++ b/pkg/querier/queryrange/queryrangebase/value_test.go @@ -9,7 +9,7 @@ import ( "github.com/prometheus/prometheus/promql" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func TestFromValue(t *testing.T) { diff --git a/pkg/querier/queryrange/querysharding.go b/pkg/querier/queryrange/querysharding.go index a6c32b1525862..c67cd753a693a 100644 --- a/pkg/querier/queryrange/querysharding.go +++ b/pkg/querier/queryrange/querysharding.go @@ -15,18 +15,18 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/marshal" - "github.com/grafana/loki/pkg/util/spanlogger" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/marshal" + "github.com/grafana/loki/v3/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/util/validation" ) var errInvalidShardingRange = errors.New("Query does not fit in a single sharding configuration") @@ -189,13 +189,27 @@ func (ast *astMapperware) Do(ctx context.Context, r queryrangebase.Request) (que ast.maxShards, r, ast.statsHandler, + ast.next, ast.limits, ) if !ok { return ast.next.Do(ctx, r) } - mapper := logql.NewShardMapper(resolver, ast.metrics, ast.shardAggregation) + v := ast.limits.TSDBShardingStrategy(tenants[0]) + version, err := logql.ParseShardVersion(v) + if err != nil { + level.Warn(logger).Log( + "msg", "failed to parse shard version", + "fallback", version.String(), + "err", err.Error(), + "user", tenants[0], + "query", r.GetQuery(), + ) + } + strategy := version.Strategy(resolver, uint64(ast.limits.TSDBMaxBytesPerShard(tenants[0]))) + + mapper := logql.NewShardMapper(strategy, ast.metrics, ast.shardAggregation) noop, bytesPerShard, parsed, err := mapper.Parse(params.GetExpression()) if err != nil { @@ -232,9 +246,7 @@ func (ast *astMapperware) Do(ctx context.Context, r queryrangebase.Request) (que } // Merge index and volume stats result cache stats from shard resolver into the query stats. - res.Statistics.Caches.StatsResult.Merge(resolverStats.Caches().StatsResult) - res.Statistics.Caches.VolumeResult.Merge(resolverStats.Caches().VolumeResult) - + res.Statistics.Merge(resolverStats.Result(0, 0, 0)) value, err := marshal.NewResultValue(res.Data) if err != nil { return nil, err diff --git a/pkg/querier/queryrange/querysharding_test.go b/pkg/querier/queryrange/querysharding_test.go index b17dfc4d3678a..cb080c279a4d1 100644 --- a/pkg/querier/queryrange/querysharding_test.go +++ b/pkg/querier/queryrange/querysharding_test.go @@ -16,17 +16,17 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" ) var ( diff --git a/pkg/querier/queryrange/roundtrip.go b/pkg/querier/queryrange/roundtrip.go index 5184ef62bb13c..3d64c50231d02 100644 --- a/pkg/querier/queryrange/roundtrip.go +++ b/pkg/querier/queryrange/roundtrip.go @@ -15,18 +15,18 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" - "github.com/prometheus/prometheus/model/labels" - - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel/stats" - base "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" - logutil "github.com/grafana/loki/pkg/util/log" + + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + logqllog "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + base "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" + logutil "github.com/grafana/loki/v3/pkg/util/log" ) const ( @@ -237,42 +237,46 @@ func NewMiddleware( return base.MiddlewareFunc(func(next base.Handler) base.Handler { var ( - metricRT = metricsTripperware.Wrap(next) - limitedRT = limitedTripperware.Wrap(next) - logFilterRT = logFilterTripperware.Wrap(next) - seriesRT = seriesTripperware.Wrap(next) - labelsRT = labelsTripperware.Wrap(next) - instantRT = instantMetricTripperware.Wrap(next) - statsRT = indexStatsTripperware.Wrap(next) - seriesVolumeRT = seriesVolumeTripperware.Wrap(next) + metricRT = metricsTripperware.Wrap(next) + limitedRT = limitedTripperware.Wrap(next) + logFilterRT = logFilterTripperware.Wrap(next) + seriesRT = seriesTripperware.Wrap(next) + labelsRT = labelsTripperware.Wrap(next) + instantRT = instantMetricTripperware.Wrap(next) + statsRT = indexStatsTripperware.Wrap(next) + seriesVolumeRT = seriesVolumeTripperware.Wrap(next) + detectedFieldsRT = next //TODO(twhitney): add middlewares for detected fields + detectedLabelsRT = next // TODO(shantanu): add middlewares ) - return newRoundTripper(log, next, limitedRT, logFilterRT, metricRT, seriesRT, labelsRT, instantRT, statsRT, seriesVolumeRT, limits) + return newRoundTripper(log, next, limitedRT, logFilterRT, metricRT, seriesRT, labelsRT, instantRT, statsRT, seriesVolumeRT, detectedFieldsRT, detectedLabelsRT, limits) }), StopperWrapper{resultsCache, statsCache, volumeCache}, nil } type roundTripper struct { logger log.Logger - next, limited, log, metric, series, labels, instantMetric, indexStats, seriesVolume base.Handler + next, limited, log, metric, series, labels, instantMetric, indexStats, seriesVolume, detectedFields, detectedLabels base.Handler limits Limits } // newRoundTripper creates a new queryrange roundtripper -func newRoundTripper(logger log.Logger, next, limited, log, metric, series, labels, instantMetric, indexStats, seriesVolume base.Handler, limits Limits) roundTripper { +func newRoundTripper(logger log.Logger, next, limited, log, metric, series, labels, instantMetric, indexStats, seriesVolume, detectedFields, detectedLabels base.Handler, limits Limits) roundTripper { return roundTripper{ - logger: logger, - limited: limited, - log: log, - limits: limits, - metric: metric, - series: series, - labels: labels, - instantMetric: instantMetric, - indexStats: indexStats, - seriesVolume: seriesVolume, - next: next, + logger: logger, + limited: limited, + log: log, + limits: limits, + metric: metric, + series: series, + labels: labels, + instantMetric: instantMetric, + indexStats: indexStats, + seriesVolume: seriesVolume, + detectedFields: detectedFields, + detectedLabels: detectedLabels, + next: next, } } @@ -365,6 +369,18 @@ func (r roundTripper) Do(ctx context.Context, req base.Request) (base.Response, ) return r.seriesVolume.Do(ctx, req) + case *DetectedFieldsRequest: + level.Info(logger).Log( + "msg", "executing query", + "type", "detected fields", + "query", op.Query, + "length", op.End.Sub(op.Start), + "start", op.Start, + "end", op.End, + ) + + return r.detectedFields.Do(ctx, req) + // TODO(shantanu): Add DetectedLabels default: return r.next.Do(ctx, req) } @@ -374,7 +390,7 @@ func (r roundTripper) Do(ctx context.Context, req base.Request) (base.Response, func transformRegexQuery(req *http.Request, expr syntax.LogSelectorExpr) (syntax.LogSelectorExpr, error) { regexp := req.Form.Get("regexp") if regexp != "" { - filterExpr, err := syntax.AddFilterExpr(expr, labels.MatchRegexp, "", regexp) + filterExpr, err := syntax.AddFilterExpr(expr, logqllog.LineMatchRegexp, "", regexp) if err != nil { return nil, err } @@ -390,13 +406,16 @@ func transformRegexQuery(req *http.Request, expr syntax.LogSelectorExpr) (syntax } const ( - InstantQueryOp = "instant_query" - QueryRangeOp = "query_range" - SeriesOp = "series" - LabelNamesOp = "labels" - IndexStatsOp = "index_stats" - VolumeOp = "volume" - VolumeRangeOp = "volume_range" + InstantQueryOp = "instant_query" + QueryRangeOp = "query_range" + SeriesOp = "series" + LabelNamesOp = "labels" + IndexStatsOp = "index_stats" + VolumeOp = "volume" + VolumeRangeOp = "volume_range" + IndexShardsOp = "index_shards" + DetectedFieldsOp = "detected_fields" + DetectedLabelsOp = "detected_labels" ) func getOperation(path string) string { @@ -415,6 +434,12 @@ func getOperation(path string) string { return VolumeOp case path == "/loki/api/v1/index/volume_range": return VolumeRangeOp + case path == "/loki/api/v1/index/shards": + return IndexShardsOp + case path == "/loki/api/v1/detected_fields": + return DetectedFieldsOp + case path == "/loki/api/v1/detected_labels": + return DetectedLabelsOp default: return "" } diff --git a/pkg/querier/queryrange/roundtrip_test.go b/pkg/querier/queryrange/roundtrip_test.go index 206822a50f6e8..27d3ff781b0b5 100644 --- a/pkg/querier/queryrange/roundtrip_test.go +++ b/pkg/querier/queryrange/roundtrip_test.go @@ -20,23 +20,23 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/plan" - base "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/validation" - valid "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/plan" + base "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/validation" + valid "github.com/grafana/loki/v3/pkg/validation" ) var ( @@ -86,6 +86,21 @@ var ( }, }, }, + CacheInstantMetricResults: true, + InstantMetricQuerySplitAlign: true, + InstantMetricCacheConfig: InstantMetricCacheConfig{ + ResultsCacheConfig: base.ResultsCacheConfig{ + Config: resultscache.Config{ + CacheConfig: cache.Config{ + EmbeddedCache: cache.EmbeddedCacheConfig{ + Enabled: true, + MaxSizeMB: 1024, + TTL: 24 * time.Hour, + }, + }, + }, + }, + }, } testEngineOpts = logql.EngineOpts{ MaxLookBackPeriod: 30 * time.Second, @@ -334,6 +349,127 @@ func TestLogFilterTripperware(t *testing.T) { require.Equal(t, 0, *queryCount) } +func TestInstantQueryTripperwareResultCaching(t *testing.T) { + // Goal is to make sure the instant query tripperware returns same results with and without cache middleware. + // 1. Get result without cache middleware. + // 2. Get result with middelware (with splitting). Result should be same. + // 3. Make same query with middleware (this time hitting the cache). Result should be same. + + testLocal := testConfig + testLocal.ShardedQueries = true + testLocal.CacheResults = false + testLocal.CacheIndexStatsResults = false + testLocal.CacheInstantMetricResults = false + var l = fakeLimits{ + maxQueryParallelism: 1, + tsdbMaxQueryParallelism: 1, + maxQueryBytesRead: 1000, + maxQuerierBytesRead: 100, + queryTimeout: 1 * time.Minute, + maxSeries: 1, + } + tpw, stopper, err := NewMiddleware(testLocal, testEngineOpts, nil, util_log.Logger, l, config.SchemaConfig{Configs: testSchemasTSDB}, nil, false, nil, constants.Loki) + if stopper != nil { + defer stopper.Stop() + } + require.NoError(t, err) + + q := `sum by (job) (bytes_rate({cluster="dev-us-central-0"}[15m]))` + lreq := &LokiInstantRequest{ + Query: q, + Limit: 1000, + TimeTs: testTime.Add(-4 * time.Hour), + Direction: logproto.FORWARD, + Path: "/loki/api/v1/query", + Plan: &plan.QueryPlan{ + AST: syntax.MustParseExpr(q), + }, + } + + ctx := user.InjectOrgID(context.Background(), "1") + + // Test MaxQueryBytesRead limit + statsCount, statsHandler := indexStatsResult(logproto.IndexStatsResponse{Bytes: 2000}) + queryCount, queryHandler := counter() + h := getQueryAndStatsHandler(queryHandler, statsHandler) + _, err = tpw.Wrap(h).Do(ctx, lreq) + require.Error(t, err) + require.Equal(t, 1, *statsCount) + require.Equal(t, 0, *queryCount) + + // Test MaxQuerierBytesRead limit + statsCount, statsHandler = indexStatsResult(logproto.IndexStatsResponse{Bytes: 200}) + queryCount, queryHandler = counter() + h = getQueryAndStatsHandler(queryHandler, statsHandler) + _, err = tpw.Wrap(h).Do(ctx, lreq) + require.Error(t, err) + require.Equal(t, 2, *statsCount) + require.Equal(t, 0, *queryCount) + + // 1. Without cache middleware. + count, queryHandler := promqlResult(vector) + _, statsHandler = indexStatsResult(logproto.IndexStatsResponse{Bytes: 10}) + h = getQueryAndStatsHandler(queryHandler, statsHandler) + h = tpw.Wrap(h) + lokiResponse, err := h.Do(ctx, lreq) + require.Equal(t, 1, *count) + require.NoError(t, err) + + exp, err := ResultToResponse(logqlmodel.Result{ + Data: vector, + }, nil) + require.NoError(t, err) + expected := exp.(*LokiPromResponse) + + require.IsType(t, &LokiPromResponse{}, lokiResponse) + concrete := lokiResponse.(*LokiPromResponse) + require.Equal(t, loghttp.ResultTypeVector, concrete.Response.Data.ResultType) + assertInstantSampleValues(t, expected, concrete) // assert actual sample values + + // 2. First time with caching enabled (no cache hit). + testLocal.CacheInstantMetricResults = true + l.instantMetricSplitDuration = map[string]time.Duration{ + // so making request [15] range, will have 2 subqueries aligned with [5m] giving total of [10m]. And 2 more subqueries for remaining [5m] aligning depending on exec time of the query. + "1": 5 * time.Minute, + } + tpw, stopper, err = NewMiddleware(testLocal, testEngineOpts, nil, util_log.Logger, l, config.SchemaConfig{Configs: testSchemasTSDB}, nil, false, nil, constants.Loki) + if stopper != nil { + defer stopper.Stop() + } + require.NoError(t, err) + + count, queryHandler = promqlResult(vector) + _, statsHandler = indexStatsResult(logproto.IndexStatsResponse{Bytes: 10}) + h = getQueryAndStatsHandler(queryHandler, statsHandler) + lokiResponse, err = tpw.Wrap(h).Do(ctx, lreq) + require.Equal(t, 4, *count) // split into 4 subqueries. like explained in `instantMetricSplitDuration` limits config. + require.NoError(t, err) + + exp, err = ResultToResponse(logqlmodel.Result{ + Data: instantQueryResultWithCache(*count, 15*time.Minute, vector[0]), + }, nil) + require.NoError(t, err) + expected = exp.(*LokiPromResponse) + + require.IsType(t, &LokiPromResponse{}, lokiResponse) + concrete = lokiResponse.(*LokiPromResponse) + require.Equal(t, loghttp.ResultTypeVector, concrete.Response.Data.ResultType) + assertInstantSampleValues(t, expected, concrete) // assert actual sample values + + // 3. Second time with caching enabled (cache hit). + count, queryHandler = promqlResult(vector) + _, statsHandler = indexStatsResult(logproto.IndexStatsResponse{Bytes: 10}) + h = getQueryAndStatsHandler(queryHandler, statsHandler) + lokiResponse, err = tpw.Wrap(h).Do(ctx, lreq) + require.Equal(t, 0, *count) // no queries hit base handler, because all queries hit from cache. + require.NoError(t, err) + + require.IsType(t, &LokiPromResponse{}, lokiResponse) + concrete = lokiResponse.(*LokiPromResponse) + require.Equal(t, loghttp.ResultTypeVector, concrete.Response.Data.ResultType) + assertInstantSampleValues(t, expected, concrete) // assert actual sample values +} + func TestInstantQueryTripperware(t *testing.T) { testShardingConfigNoCache := testConfig testShardingConfigNoCache.ShardedQueries = true @@ -357,7 +493,7 @@ func TestInstantQueryTripperware(t *testing.T) { lreq := &LokiInstantRequest{ Query: q, Limit: 1000, - TimeTs: testTime, + TimeTs: testTime.Add(-4 * time.Hour), // because vector data we return from mock handler has that time. Direction: logproto.FORWARD, Path: "/loki/api/v1/query", Plan: &plan.QueryPlan{ @@ -393,6 +529,8 @@ func TestInstantQueryTripperware(t *testing.T) { require.NoError(t, err) require.IsType(t, &LokiPromResponse{}, lokiResponse) + concrete := lokiResponse.(*LokiPromResponse) + require.Equal(t, loghttp.ResultTypeVector, concrete.Response.Data.ResultType) } func TestSeriesTripperware(t *testing.T) { @@ -865,6 +1003,8 @@ func TestPostQueries(t *testing.T) { handler, handler, handler, + handler, + handler, fakeLimits{}, ).Do(ctx, lreq) require.NoError(t, err) @@ -1164,8 +1304,11 @@ func TestMetricsTripperware_SplitShardStats(t *testing.T) { AST: syntax.MustParseExpr(`sum by (app) (rate({app="foo"} |= "foo"[2h]))`), }, }, - expectedSplitStats: 2, // [2h] interval split by 1h configured split interval - expectedShardStats: 8, // 2 time splits * 4 row shards + // [2h] interval split by 1h configured split interval. + // Also since we split align(testConfig.InstantQuerySplitAlign=true) with split interval (1h). + // 1 subquery will be exactly [1h] and 2 more subqueries to align with `testTime` used in query's TimeTs. + expectedSplitStats: 3, + expectedShardStats: 12, // 3 time splits * 4 row shards }, { name: "instant query split not split", @@ -1380,6 +1523,9 @@ func (f fakeLimits) VolumeEnabled(_ string) bool { func (f fakeLimits) TSDBMaxBytesPerShard(_ string) int { return valid.DefaultTSDBMaxBytesPerShard } +func (f fakeLimits) TSDBShardingStrategy(string) string { + return logql.PowerOfTwoVersion.String() +} type ingesterQueryOpts struct { queryStoreOnly bool @@ -1401,7 +1547,7 @@ func counter() (*int, base.Handler) { lock.Lock() defer lock.Unlock() count++ - return base.NewEmptyPrometheusResponse(), nil + return base.NewEmptyPrometheusResponse(model.ValMatrix), nil }) } @@ -1469,6 +1615,37 @@ func seriesVolumeResult(v logproto.VolumeResponse) (*int, base.Handler) { }) } +// instantQueryResultWithCache used when instant query tripperware is created with split align and cache middleware. +// Assuming each subquery handler returns `val` sample, then this function returns overal result by combining all the subqueries sample values. +func instantQueryResultWithCache(split int, ts time.Duration, val promql.Sample) promql.Vector { + v := (val.F * float64(split)) / ts.Seconds() + return promql.Vector{ + promql.Sample{ + T: val.T, + F: v, + H: val.H, + Metric: val.Metric, + }, + } +} + +func assertInstantSampleValues(t *testing.T, exp *LokiPromResponse, got *LokiPromResponse) { + expR := exp.Response.Data.Result + gotR := got.Response.Data.Result + + expSamples := make([]logproto.LegacySample, 0) + for _, v := range expR { + expSamples = append(expSamples, v.Samples...) + } + + gotSamples := make([]logproto.LegacySample, 0) + for _, v := range gotR { + gotSamples = append(gotSamples, v.Samples...) + } + + require.Equal(t, expSamples, gotSamples) +} + type fakeHandler struct { count int lock sync.Mutex diff --git a/pkg/querier/queryrange/serialize.go b/pkg/querier/queryrange/serialize.go index b85f707a692b7..d7a5eb125d30c 100644 --- a/pkg/querier/queryrange/serialize.go +++ b/pkg/querier/queryrange/serialize.go @@ -5,10 +5,10 @@ import ( "github.com/opentracing/opentracing-go" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/util/httpreq" - serverutil "github.com/grafana/loki/pkg/util/server" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/util/httpreq" + serverutil "github.com/grafana/loki/v3/pkg/util/server" ) type serializeRoundTripper struct { diff --git a/pkg/querier/queryrange/serialize_test.go b/pkg/querier/queryrange/serialize_test.go index f926da9f19b55..0bd6c36aa4bd6 100644 --- a/pkg/querier/queryrange/serialize_test.go +++ b/pkg/querier/queryrange/serialize_test.go @@ -11,10 +11,10 @@ import ( "github.com/grafana/dskit/user" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) func TestResponseFormat(t *testing.T) { diff --git a/pkg/querier/queryrange/series_cache.go b/pkg/querier/queryrange/series_cache.go index 5120d61fb0b4f..c7bf2e165117b 100644 --- a/pkg/querier/queryrange/series_cache.go +++ b/pkg/querier/queryrange/series_cache.go @@ -14,10 +14,10 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/util/validation" ) type cacheKeySeries struct { diff --git a/pkg/querier/queryrange/series_cache_test.go b/pkg/querier/queryrange/series_cache_test.go index 6ba869a69411a..97a5eabb9ae2b 100644 --- a/pkg/querier/queryrange/series_cache_test.go +++ b/pkg/querier/queryrange/series_cache_test.go @@ -11,13 +11,13 @@ import ( "github.com/grafana/dskit/user" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/util" ) var ( diff --git a/pkg/querier/queryrange/shard_resolver.go b/pkg/querier/queryrange/shard_resolver.go index 652637a724655..f6bed294eaae0 100644 --- a/pkg/querier/queryrange/shard_resolver.go +++ b/pkg/querier/queryrange/shard_resolver.go @@ -3,28 +3,30 @@ package queryrange import ( "context" "fmt" - "math" + "net/http" strings "strings" "time" "github.com/dustin/go-humanize" + "github.com/efficientgo/core/errors" "github.com/go-kit/log" "github.com/go-kit/log/level" "github.com/grafana/dskit/concurrency" + "github.com/grafana/dskit/httpgrpc" "github.com/grafana/dskit/tenant" "github.com/opentracing/opentracing-go" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - utilMath "github.com/grafana/loki/pkg/util/math" - "github.com/grafana/loki/pkg/util/spanlogger" - "github.com/grafana/loki/pkg/util/validation" - valid "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + logqlstats "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + "github.com/grafana/loki/v3/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/util/validation" ) func shardResolverForConf( @@ -35,14 +37,15 @@ func shardResolverForConf( maxParallelism int, maxShards int, r queryrangebase.Request, - handler queryrangebase.Handler, + statsHandler, next queryrangebase.Handler, limits Limits, ) (logql.ShardResolver, bool) { if conf.IndexType == config.TSDBType { return &dynamicShardResolver{ ctx: ctx, logger: logger, - handler: handler, + statsHandler: statsHandler, + next: next, limits: limits, from: model.Time(r.GetStart().UnixMilli()), through: model.Time(r.GetEnd().UnixMilli()), @@ -58,10 +61,13 @@ func shardResolverForConf( } type dynamicShardResolver struct { - ctx context.Context - handler queryrangebase.Handler - logger log.Logger - limits Limits + ctx context.Context + // TODO(owen-d): shouldn't have to fork handlers here -- one should just transparently handle the right logic + // depending on the underlying type? + statsHandler queryrangebase.Handler // index stats handler (hooked up to results cache, etc) + next queryrangebase.Handler // next handler in the chain (used for non-stats reqs) + logger log.Logger + limits Limits from, through model.Time maxParallelism int @@ -154,7 +160,7 @@ func (r *dynamicShardResolver) GetStats(e syntax.Expr) (stats.Stats, error) { grps = append(grps, syntax.MatcherRange{}) } - results, err := getStatsForMatchers(ctx, log, r.handler, r.from, r.through, grps, r.maxParallelism, r.defaultLookback) + results, err := getStatsForMatchers(ctx, log, r.statsHandler, r.from, r.through, grps, r.maxParallelism, r.defaultLookback) if err != nil { return stats.Stats{}, err } @@ -192,7 +198,7 @@ func (r *dynamicShardResolver) Shards(e syntax.Expr) (int, uint64, error) { } maxBytesPerShard := validation.SmallestPositiveIntPerTenant(tenantIDs, r.limits.TSDBMaxBytesPerShard) - factor := guessShardFactor(combined, maxBytesPerShard, r.maxShards) + factor := sharding.GuessShardFactor(combined.Bytes, uint64(maxBytesPerShard), r.maxShards) var bytesPerShard = combined.Bytes if factor > 0 { @@ -211,34 +217,84 @@ func (r *dynamicShardResolver) Shards(e syntax.Expr) (int, uint64, error) { return factor, bytesPerShard, nil } -// Since we shard by powers of two and we increase shard factor -// once each shard surpasses maxBytesPerShard, if the shard factor -// is at least two, the range of data per shard is (maxBytesPerShard/2, maxBytesPerShard] -// For instance, for a maxBytesPerShard of 500MB and a query touching 1000MB, we split into two shards of 500MB. -// If there are 1004MB, we split into four shards of 251MB. -func guessShardFactor(stats stats.Stats, maxBytesPerShard, maxShards int) int { - // If maxBytesPerShard is 0, we use the default value - // to avoid division by zero - if maxBytesPerShard < 1 { - maxBytesPerShard = valid.DefaultTSDBMaxBytesPerShard +func (r *dynamicShardResolver) ShardingRanges(expr syntax.Expr, targetBytesPerShard uint64) ([]logproto.Shard, error) { + sp, ctx := opentracing.StartSpanFromContext(r.ctx, "dynamicShardResolver.ShardingRanges") + defer sp.Finish() + log := spanlogger.FromContext(ctx) + defer log.Finish() + + adjustedFrom := r.from + + // NB(owen-d): there should only ever be 1 matcher group passed + // to this call as we call it separately for different legs + // of binary ops, but I'm putting in the loop for completion + grps, err := syntax.MatcherGroups(expr) + if err != nil { + return nil, err } - minShards := float64(stats.Bytes) / float64(maxBytesPerShard) + for _, grp := range grps { + diff := grp.Interval + grp.Offset - // round up to nearest power of 2 - power := math.Ceil(math.Log2(minShards)) + // For instant queries, when start == end, + // we have a default lookback which we add here + if grp.Interval == 0 { + diff = diff + r.defaultLookback + } - // Since x^0 == 1 and we only support factors of 2 - // reset this edge case manually - factor := int(math.Pow(2, power)) - if maxShards > 0 { - factor = utilMath.Min(factor, maxShards) + // use the oldest adjustedFrom + if r.from.Add(-diff).Before(adjustedFrom) { + adjustedFrom = r.from.Add(-diff) + } } - // shortcut: no need to run any sharding logic when factor=1 - // as it's the same as no sharding - if factor == 1 { - factor = 0 + exprStr := expr.String() + // try to get shards for the given expression + // if it fails, fallback to linearshards based on stats + resp, err := r.next.Do(ctx, &logproto.ShardsRequest{ + From: adjustedFrom, + Through: r.through, + Query: expr.String(), + TargetBytesPerShard: targetBytesPerShard, + }) + + if err != nil { + // check unimplemented to fallback + // TODO(owen-d): fix if this isn't right + if resp, ok := httpgrpc.HTTPResponseFromError(err); ok && (resp.Code == http.StatusNotFound) { + n, bytesPerShard, err := r.Shards(expr) + if err != nil { + return nil, errors.Wrap(err, "falling back to building linear shards from stats") + } + level.Debug(log).Log( + "msg", "falling back to building linear shards from stats", + "bytes_per_shard", bytesPerShard, + "shards", n, + "query", exprStr, + ) + return sharding.LinearShards(n, uint64(n)*bytesPerShard), nil + } + + return nil, errors.Wrapf(err, "failed to get shards for expression, got %T: %+v", err, err) + + } + + casted, ok := resp.(*ShardsResponse) + if !ok { + return nil, fmt.Errorf("expected *ShardsResponse while querying index, got %T", resp) } - return factor + + // accumulate stats + logqlstats.JoinResults(ctx, casted.Response.Statistics) + + level.Debug(log).Log( + "msg", "retrieved sharding ranges", + "target_bytes_per_shard", targetBytesPerShard, + "shards", len(casted.Response.Shards), + "query", exprStr, + "total_chunks", casted.Response.Statistics.Index.TotalChunks, + "post_filter_chunks:", casted.Response.Statistics.Index.PostFilterChunks, + ) + + return casted.Response.Shards, err } diff --git a/pkg/querier/queryrange/split_by_interval.go b/pkg/querier/queryrange/split_by_interval.go index ef05aa969ec1e..92c956bbfed82 100644 --- a/pkg/querier/queryrange/split_by_interval.go +++ b/pkg/querier/queryrange/split_by_interval.go @@ -12,16 +12,16 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/math" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/math" "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util/validation" ) type lokiResult struct { @@ -223,7 +223,7 @@ func (h *splitByInterval) Do(ctx context.Context, r queryrangebase.Request) (que intervals[i], intervals[j] = intervals[j], intervals[i] } } - case *LokiSeriesRequest, *LabelRequest, *logproto.IndexStatsRequest, *logproto.VolumeRequest: + case *LokiSeriesRequest, *LabelRequest, *logproto.IndexStatsRequest, *logproto.VolumeRequest, *logproto.ShardsRequest: // Set this to 0 since this is not used in Series/Labels/Index Request. limit = 0 default: diff --git a/pkg/querier/queryrange/split_by_interval_test.go b/pkg/querier/queryrange/split_by_interval_test.go index 6c6d66e3bb662..c74ec05c252c7 100644 --- a/pkg/querier/queryrange/split_by_interval_test.go +++ b/pkg/querier/queryrange/split_by_interval_test.go @@ -15,15 +15,15 @@ import ( "github.com/stretchr/testify/require" "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/util" ) var nilMetrics = NewSplitByMetrics(nil) diff --git a/pkg/querier/queryrange/split_by_range.go b/pkg/querier/queryrange/split_by_range.go index 16076cd948596..380466d04408b 100644 --- a/pkg/querier/queryrange/split_by_range.go +++ b/pkg/querier/queryrange/split_by_range.go @@ -11,13 +11,13 @@ import ( "github.com/grafana/dskit/tenant" "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/marshal" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/marshal" + "github.com/grafana/loki/v3/pkg/util/validation" ) type splitByRange struct { diff --git a/pkg/querier/queryrange/split_by_range_test.go b/pkg/querier/queryrange/split_by_range_test.go index af66c10a2f08a..0f61c3c276b1f 100644 --- a/pkg/querier/queryrange/split_by_range_test.go +++ b/pkg/querier/queryrange/split_by_range_test.go @@ -11,11 +11,11 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) func Test_RangeVectorSplitAlign(t *testing.T) { diff --git a/pkg/querier/queryrange/splitters.go b/pkg/querier/queryrange/splitters.go index eddcc10edf491..30bc3da18392d 100644 --- a/pkg/querier/queryrange/splitters.go +++ b/pkg/querier/queryrange/splitters.go @@ -5,10 +5,10 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/validation" ) type splitter interface { @@ -75,6 +75,15 @@ func (s *defaultSplitter) split(execTime time.Time, tenantIDs []string, req quer Matchers: r.GetMatchers(), }) } + case *logproto.ShardsRequest: + factory = func(start, end time.Time) { + reqs = append(reqs, &logproto.ShardsRequest{ + From: model.TimeFromUnix(start.Unix()), + Through: model.TimeFromUnix(end.Unix()), + Query: r.Query, + TargetBytesPerShard: r.TargetBytesPerShard, + }) + } case *logproto.VolumeRequest: factory = func(start, end time.Time) { reqs = append(reqs, &logproto.VolumeRequest{ diff --git a/pkg/querier/queryrange/stats.go b/pkg/querier/queryrange/stats.go index 71f93959c3b69..4e5b646e7429b 100644 --- a/pkg/querier/queryrange/stats.go +++ b/pkg/querier/queryrange/stats.go @@ -14,14 +14,14 @@ import ( "github.com/grafana/dskit/middleware" promql_parser "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/logproto" - - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/logproto" + + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) type ctxKeyType string @@ -29,12 +29,15 @@ type ctxKeyType string const ctxKey ctxKeyType = "stats" const ( - queryTypeLog = "log" - queryTypeMetric = "metric" - queryTypeSeries = "series" - queryTypeLabel = "label" - queryTypeStats = "stats" - queryTypeVolume = "volume" + queryTypeLog = "log" + queryTypeMetric = "metric" + queryTypeSeries = "series" + queryTypeLabel = "label" + queryTypeStats = "stats" + queryTypeVolume = "volume" + queryTypeShards = "shards" + queryTypeDetectedFields = "detected_fields" + queryTypeDetectedLabels = "detected_labels" ) var ( @@ -60,6 +63,10 @@ func recordQueryMetrics(data *queryData) { logql.RecordStatsQueryMetrics(data.ctx, logger, data.params.Start(), data.params.End(), data.params.QueryString(), data.status, *data.statistics) case queryTypeVolume: logql.RecordVolumeQueryMetrics(data.ctx, logger, data.params.Start(), data.params.End(), data.params.QueryString(), data.params.Limit(), data.params.Step(), data.status, *data.statistics) + case queryTypeDetectedFields: + logql.RecordDetectedFieldsQueryMetrics(data.ctx, logger, data.params.Start(), data.params.End(), data.params.QueryString(), data.status, *data.statistics) + case queryTypeDetectedLabels: + logql.RecordDetectedLabelsQueryMetrics(data.ctx, logger, data.params.Start(), data.params.End(), data.params.QueryString(), data.status, *data.statistics) default: level.Error(logger).Log("msg", "failed to record query metrics", "err", fmt.Errorf("expected one of the *LokiRequest, *LokiInstantRequest, *LokiSeriesRequest, *LokiLabelNamesRequest, got %s", data.queryType)) } @@ -160,6 +167,13 @@ func StatsCollectorMiddleware() queryrangebase.Middleware { responseStats = &stats.Result{} // TODO: support stats in proto totalEntries = 1 queryType = queryTypeStats + case *ShardsResponse: + responseStats = &r.Response.Statistics + queryType = queryTypeShards + case *DetectedFieldsResponse: + responseStats = &stats.Result{} // TODO: support stats in detected fields + totalEntries = 1 + queryType = queryTypeDetectedFields default: level.Warn(logger).Log("msg", fmt.Sprintf("cannot compute stats, unexpected type: %T", resp)) } diff --git a/pkg/querier/queryrange/stats_test.go b/pkg/querier/queryrange/stats_test.go index 28f8d12de7f6d..8c48a9ece8538 100644 --- a/pkg/querier/queryrange/stats_test.go +++ b/pkg/querier/queryrange/stats_test.go @@ -12,9 +12,9 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) func TestStatsCollectorMiddleware(t *testing.T) { diff --git a/pkg/querier/queryrange/views.go b/pkg/querier/queryrange/views.go index be9eee016b4b5..b34020934c1c5 100644 --- a/pkg/querier/queryrange/views.go +++ b/pkg/querier/queryrange/views.go @@ -11,8 +11,8 @@ import ( "github.com/richardartoul/molecule" "github.com/richardartoul/molecule/src/codec" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) // Pull fiel numbers from protobuf message descriptions. diff --git a/pkg/querier/queryrange/views_test.go b/pkg/querier/queryrange/views_test.go index c4c28fe462c0d..7d1938dacb775 100644 --- a/pkg/querier/queryrange/views_test.go +++ b/pkg/querier/queryrange/views_test.go @@ -12,11 +12,11 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/util/marshal" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/util/marshal" ) func TestGetLokiSeriesResponse(t *testing.T) { diff --git a/pkg/querier/queryrange/volume.go b/pkg/querier/queryrange/volume.go index b12fbd48d2459..d4c40964d1423 100644 --- a/pkg/querier/queryrange/volume.go +++ b/pkg/querier/queryrange/volume.go @@ -9,14 +9,14 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/util" ) func NewVolumeMiddleware() queryrangebase.Middleware { diff --git a/pkg/querier/queryrange/volume_cache.go b/pkg/querier/queryrange/volume_cache.go index 5ae2af4111150..6f4f73de568ed 100644 --- a/pkg/querier/queryrange/volume_cache.go +++ b/pkg/querier/queryrange/volume_cache.go @@ -12,12 +12,12 @@ import ( "github.com/grafana/dskit/tenant" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/validation" ) type VolumeSplitter struct { diff --git a/pkg/querier/queryrange/volume_cache_test.go b/pkg/querier/queryrange/volume_cache_test.go index 038d8fa925f57..cdf492ee3b1bf 100644 --- a/pkg/querier/queryrange/volume_cache_test.go +++ b/pkg/querier/queryrange/volume_cache_test.go @@ -10,15 +10,15 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" ) func TestVolumeCache(t *testing.T) { diff --git a/pkg/querier/queryrange/volume_test.go b/pkg/querier/queryrange/volume_test.go index 8d8b8d48a3f23..7327a58e15d9e 100644 --- a/pkg/querier/queryrange/volume_test.go +++ b/pkg/querier/queryrange/volume_test.go @@ -9,11 +9,12 @@ import ( "github.com/grafana/dskit/user" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" "github.com/grafana/loki/pkg/push" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" + + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" ) const forRangeQuery = false diff --git a/pkg/querier/stats/stats.pb.go b/pkg/querier/stats/stats.pb.go index bb56bb26c49c0..bae01dd0eeed5 100644 --- a/pkg/querier/stats/stats.pb.go +++ b/pkg/querier/stats/stats.pb.go @@ -98,26 +98,27 @@ func init() { func init() { proto.RegisterFile("pkg/querier/stats/stats.proto", fileDescriptor_8ca2404f80bab2e8) } var fileDescriptor_8ca2404f80bab2e8 = []byte{ - // 302 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x64, 0xd0, 0xb1, 0x4e, 0x3a, 0x31, - 0x1c, 0xc0, 0xf1, 0xfe, 0xfe, 0x7f, 0x31, 0x78, 0x4e, 0x9e, 0x0c, 0x48, 0xe2, 0x0f, 0xe2, 0x84, - 0xcb, 0xd5, 0xe8, 0x0b, 0x18, 0xf0, 0x09, 0xc0, 0xc9, 0xe5, 0xd2, 0x3b, 0x4a, 0x69, 0x38, 0xae, - 0x78, 0xd7, 0xc6, 0xb8, 0xf9, 0x08, 0x8e, 0x3e, 0x82, 0x89, 0x2f, 0xc2, 0xc8, 0xc8, 0xa4, 0x52, - 0x16, 0x47, 0x1e, 0xc1, 0xb4, 0x07, 0x93, 0x4b, 0xd3, 0x5f, 0x3f, 0xf9, 0x26, 0x6d, 0x83, 0xf3, - 0xf9, 0x54, 0xd0, 0x47, 0xc3, 0x0b, 0xc9, 0x0b, 0x5a, 0x6a, 0xa6, 0xcb, 0x6a, 0x8d, 0xe6, 0x85, - 0xd2, 0x2a, 0xac, 0xf9, 0xa1, 0xd5, 0x10, 0x4a, 0x28, 0x7f, 0x42, 0xdd, 0xae, 0xc2, 0x16, 0x0a, - 0xa5, 0x44, 0xc6, 0xa9, 0x9f, 0x12, 0x33, 0xa6, 0x23, 0x53, 0x30, 0x2d, 0x55, 0x5e, 0xf9, 0xc5, - 0x07, 0x04, 0xb5, 0xa1, 0xeb, 0xc3, 0xdb, 0xe0, 0xe8, 0x89, 0x65, 0x59, 0xac, 0xe5, 0x8c, 0x37, - 0xa1, 0x03, 0xdd, 0xe3, 0xeb, 0xb3, 0xa8, 0xaa, 0xa3, 0x7d, 0x1d, 0xdd, 0xed, 0xea, 0x5e, 0x7d, - 0xf1, 0xd9, 0x26, 0x6f, 0x5f, 0x6d, 0x18, 0xd4, 0x5d, 0x75, 0x2f, 0x67, 0x3c, 0xbc, 0x0a, 0x1a, - 0x63, 0xae, 0xd3, 0x09, 0x1f, 0xc5, 0xa5, 0xbb, 0x6c, 0x19, 0xa7, 0xca, 0xe4, 0xba, 0xf9, 0xaf, - 0x03, 0xdd, 0x83, 0x41, 0xb8, 0xb3, 0xa1, 0xa7, 0xbe, 0x93, 0x30, 0x0a, 0x4e, 0xf7, 0x45, 0x3a, - 0x31, 0xf9, 0x34, 0x4e, 0x9e, 0x35, 0x2f, 0x9b, 0xff, 0x7d, 0x70, 0xb2, 0xa3, 0xbe, 0x93, 0x9e, - 0x83, 0x5e, 0xbc, 0x5c, 0x23, 0x59, 0xad, 0x91, 0x6c, 0xd7, 0x08, 0x2f, 0x16, 0xe1, 0xdd, 0x22, - 0x2c, 0x2c, 0xc2, 0xd2, 0x22, 0x7c, 0x5b, 0x84, 0x1f, 0x8b, 0x64, 0x6b, 0x11, 0x5e, 0x37, 0x48, - 0x96, 0x1b, 0x24, 0xab, 0x0d, 0x92, 0x87, 0x4b, 0x21, 0xf5, 0xc4, 0x24, 0x51, 0xaa, 0x66, 0x54, - 0x14, 0x6c, 0xcc, 0x72, 0x46, 0x33, 0x35, 0x95, 0xf4, 0xcf, 0xbf, 0x26, 0x87, 0xfe, 0xa5, 0x37, - 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0xb4, 0xe6, 0x22, 0x2b, 0x73, 0x01, 0x00, 0x00, + // 305 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x64, 0xd0, 0x31, 0x4e, 0xf3, 0x30, + 0x14, 0xc0, 0x71, 0xbf, 0xef, 0xa3, 0xa8, 0x84, 0x89, 0xd0, 0xa1, 0x54, 0xe2, 0xb5, 0x62, 0xea, + 0x80, 0x62, 0x44, 0x2f, 0x80, 0x5a, 0x4e, 0xd0, 0x32, 0xb1, 0x44, 0x49, 0xea, 0xba, 0x56, 0xd3, + 0xb8, 0x24, 0x36, 0x88, 0x8d, 0x23, 0x30, 0x72, 0x04, 0x24, 0x2e, 0xd2, 0xb1, 0x63, 0x27, 0xa0, + 0xee, 0xc2, 0xd8, 0x23, 0x20, 0x3b, 0xe9, 0xc4, 0x62, 0xf9, 0xf9, 0xa7, 0xbf, 0x64, 0xdb, 0x3b, + 0x5f, 0xcc, 0x38, 0x7d, 0xd0, 0x2c, 0x17, 0x2c, 0xa7, 0x85, 0x8a, 0x54, 0x51, 0xae, 0xc1, 0x22, + 0x97, 0x4a, 0xfa, 0x35, 0x37, 0xb4, 0x1a, 0x5c, 0x72, 0xe9, 0x4e, 0xa8, 0xdd, 0x95, 0xd8, 0x42, + 0x2e, 0x25, 0x4f, 0x19, 0x75, 0x53, 0xac, 0x27, 0x74, 0xac, 0xf3, 0x48, 0x09, 0x99, 0x95, 0x7e, + 0xf1, 0x01, 0x5e, 0x6d, 0x64, 0x7b, 0xff, 0xc6, 0x3b, 0x7a, 0x8a, 0xd2, 0x34, 0x54, 0x62, 0xce, + 0x9a, 0xd0, 0x81, 0xee, 0xf1, 0xf5, 0x59, 0x50, 0xd6, 0xc1, 0xbe, 0x0e, 0x6e, 0xab, 0xba, 0x5f, + 0x5f, 0x7e, 0xb6, 0xc9, 0xdb, 0x57, 0x1b, 0x86, 0x75, 0x5b, 0xdd, 0x89, 0x39, 0xf3, 0xaf, 0xbc, + 0xc6, 0x84, 0xa9, 0x64, 0xca, 0xc6, 0x61, 0x61, 0x2f, 0x5b, 0x84, 0x89, 0xd4, 0x99, 0x6a, 0xfe, + 0xeb, 0x40, 0xf7, 0x60, 0xe8, 0x57, 0x36, 0x72, 0x34, 0xb0, 0xe2, 0x07, 0xde, 0xe9, 0xbe, 0x48, + 0xa6, 0x3a, 0x9b, 0x85, 0xf1, 0xb3, 0x62, 0x45, 0xf3, 0xbf, 0x0b, 0x4e, 0x2a, 0x1a, 0x58, 0xe9, + 0x5b, 0xe8, 0xc7, 0xab, 0x0d, 0x92, 0xf5, 0x06, 0xc9, 0x6e, 0x83, 0xf0, 0x62, 0x10, 0xde, 0x0d, + 0xc2, 0xd2, 0x20, 0xac, 0x0c, 0xc2, 0xb7, 0x41, 0xf8, 0x31, 0x48, 0x76, 0x06, 0xe1, 0x75, 0x8b, + 0x64, 0xb5, 0x45, 0xb2, 0xde, 0x22, 0xb9, 0xbf, 0xe4, 0x42, 0x4d, 0x75, 0x1c, 0x24, 0x72, 0x4e, + 0x79, 0x1e, 0x4d, 0xa2, 0x2c, 0xa2, 0xa9, 0x9c, 0x09, 0xfa, 0xd8, 0xa3, 0x7f, 0xbe, 0x36, 0x3e, + 0x74, 0x8f, 0xed, 0xfd, 0x06, 0x00, 0x00, 0xff, 0xff, 0x67, 0x51, 0xd3, 0x06, 0x76, 0x01, 0x00, + 0x00, } func (this *Stats) Equal(that interface{}) bool { diff --git a/pkg/querier/stats/stats.proto b/pkg/querier/stats/stats.proto index 1315ef7774ce3..2aa02df7351d7 100644 --- a/pkg/querier/stats/stats.proto +++ b/pkg/querier/stats/stats.proto @@ -5,7 +5,7 @@ package stats; import "gogoproto/gogo.proto"; import "google/protobuf/duration.proto"; -option go_package = "github.com/grafana/loki/pkg/querier/stats"; +option go_package = "github.com/grafana/loki/v3/pkg/querier/stats"; option (gogoproto.marshaler_all) = true; option (gogoproto.unmarshaler_all) = true; diff --git a/pkg/querier/tail.go b/pkg/querier/tail.go index 35cb4bc18e7a7..1bdc01159ed65 100644 --- a/pkg/querier/tail.go +++ b/pkg/querier/tail.go @@ -12,10 +12,10 @@ import ( "github.com/go-kit/log/level" "github.com/pkg/errors" - "github.com/grafana/loki/pkg/iter" - loghttp "github.com/grafana/loki/pkg/loghttp/legacy" - "github.com/grafana/loki/pkg/logproto" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/iter" + loghttp "github.com/grafana/loki/v3/pkg/loghttp/legacy" + "github.com/grafana/loki/v3/pkg/logproto" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/querier/tail_mock_test.go b/pkg/querier/tail_mock_test.go index 94f135e77648e..a1d161d2f2c95 100644 --- a/pkg/querier/tail_mock_test.go +++ b/pkg/querier/tail_mock_test.go @@ -1,6 +1,6 @@ package querier -import "github.com/grafana/loki/pkg/logproto" +import "github.com/grafana/loki/v3/pkg/logproto" func mockTailResponse(stream logproto.Stream) *logproto.TailResponse { return &logproto.TailResponse{ diff --git a/pkg/querier/tail_test.go b/pkg/querier/tail_test.go index 07d3743af03c5..4867574e5792c 100644 --- a/pkg/querier/tail_test.go +++ b/pkg/querier/tail_test.go @@ -12,9 +12,9 @@ import ( gokitlog "github.com/go-kit/log" - "github.com/grafana/loki/pkg/iter" - loghttp "github.com/grafana/loki/pkg/loghttp/legacy" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/iter" + loghttp "github.com/grafana/loki/v3/pkg/loghttp/legacy" + "github.com/grafana/loki/v3/pkg/logproto" ) const ( diff --git a/pkg/querier/testutils.go b/pkg/querier/testutils.go index 5b38ad11eba5f..34cae5f70580d 100644 --- a/pkg/querier/testutils.go +++ b/pkg/querier/testutils.go @@ -3,7 +3,7 @@ package querier import ( "github.com/grafana/dskit/flagext" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/validation" ) func DefaultLimitsConfig() validation.Limits { diff --git a/pkg/querier/worker/frontend_processor.go b/pkg/querier/worker/frontend_processor.go index 45c61862d0598..a0e3569359bfa 100644 --- a/pkg/querier/worker/frontend_processor.go +++ b/pkg/querier/worker/frontend_processor.go @@ -13,9 +13,9 @@ import ( "github.com/opentracing/opentracing-go" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/lokifrontend/frontend/v1/frontendv1pb" - querier_stats "github.com/grafana/loki/pkg/querier/stats" - httpgrpcutil "github.com/grafana/loki/pkg/util/httpgrpc" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v1/frontendv1pb" + querier_stats "github.com/grafana/loki/v3/pkg/querier/stats" + httpgrpcutil "github.com/grafana/loki/v3/pkg/util/httpgrpc" ) var ( diff --git a/pkg/querier/worker/frontend_processor_test.go b/pkg/querier/worker/frontend_processor_test.go index cecdb7bfe27d3..85eac4338a373 100644 --- a/pkg/querier/worker/frontend_processor_test.go +++ b/pkg/querier/worker/frontend_processor_test.go @@ -14,8 +14,8 @@ import ( "google.golang.org/grpc/credentials/insecure" "google.golang.org/grpc/test/bufconn" - "github.com/grafana/loki/pkg/querier/queryrange" - "github.com/grafana/loki/pkg/util/test" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + "github.com/grafana/loki/v3/pkg/util/test" ) const bufConnSize = 1024 * 1024 diff --git a/pkg/querier/worker/scheduler_processor.go b/pkg/querier/worker/scheduler_processor.go index 16d0e59d1ed14..00b08219e5dbe 100644 --- a/pkg/querier/worker/scheduler_processor.go +++ b/pkg/querier/worker/scheduler_processor.go @@ -25,12 +25,12 @@ import ( "google.golang.org/grpc" "google.golang.org/grpc/health/grpc_health_v1" - "github.com/grafana/loki/pkg/lokifrontend/frontend/v2/frontendv2pb" - "github.com/grafana/loki/pkg/querier/queryrange" - querier_stats "github.com/grafana/loki/pkg/querier/stats" - "github.com/grafana/loki/pkg/scheduler/schedulerpb" - httpgrpcutil "github.com/grafana/loki/pkg/util/httpgrpc" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v2/frontendv2pb" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + querier_stats "github.com/grafana/loki/v3/pkg/querier/stats" + "github.com/grafana/loki/v3/pkg/scheduler/schedulerpb" + httpgrpcutil "github.com/grafana/loki/v3/pkg/util/httpgrpc" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func newSchedulerProcessor(cfg Config, handler RequestHandler, log log.Logger, metrics *Metrics, codec RequestCodec) (*schedulerProcessor, []services.Service) { diff --git a/pkg/querier/worker/scheduler_processor_test.go b/pkg/querier/worker/scheduler_processor_test.go index 154ba1ae4fa73..264d5a1769fd1 100644 --- a/pkg/querier/worker/scheduler_processor_test.go +++ b/pkg/querier/worker/scheduler_processor_test.go @@ -19,9 +19,9 @@ import ( "google.golang.org/grpc/codes" "google.golang.org/grpc/metadata" - "github.com/grafana/loki/pkg/querier/queryrange" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/scheduler/schedulerpb" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/scheduler/schedulerpb" ) func TestSchedulerProcessor_processQueriesOnSingleStream(t *testing.T) { diff --git a/pkg/querier/worker/util.go b/pkg/querier/worker/util.go index 812236809a097..3ebb0029bfe76 100644 --- a/pkg/querier/worker/util.go +++ b/pkg/querier/worker/util.go @@ -14,8 +14,8 @@ import ( "go.uber.org/atomic" "google.golang.org/grpc/codes" - "github.com/grafana/loki/pkg/querier/queryrange" - "github.com/grafana/loki/pkg/util/server" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + "github.com/grafana/loki/v3/pkg/util/server" ) // newExecutionContext returns a new execution context (execCtx) that wraps the input workerCtx and diff --git a/pkg/querier/worker/util_test.go b/pkg/querier/worker/util_test.go index 25dd8127a0da4..96b9be9891cf4 100644 --- a/pkg/querier/worker/util_test.go +++ b/pkg/querier/worker/util_test.go @@ -10,10 +10,10 @@ import ( "github.com/grafana/dskit/user" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/querier/queryrange" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/util/server" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/util/server" ) type HandlerFunc func(context.Context, queryrangebase.Request) (queryrangebase.Response, error) diff --git a/pkg/querier/worker/worker.go b/pkg/querier/worker/worker.go index b2e50b205d143..bc41a49d9075d 100644 --- a/pkg/querier/worker/worker.go +++ b/pkg/querier/worker/worker.go @@ -17,9 +17,9 @@ import ( "github.com/prometheus/client_golang/prometheus" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/querier/queryrange" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/util" ) type Config struct { diff --git a/pkg/querier/worker/worker_test.go b/pkg/querier/worker/worker_test.go index 68791b214f178..fb311925fb207 100644 --- a/pkg/querier/worker/worker_test.go +++ b/pkg/querier/worker/worker_test.go @@ -12,7 +12,7 @@ import ( "github.com/stretchr/testify/require" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/util/test" + "github.com/grafana/loki/v3/pkg/util/test" ) func TestResetConcurrency(t *testing.T) { diff --git a/pkg/querier/worker_service.go b/pkg/querier/worker_service.go index f95da0eba16d4..40415678122f1 100644 --- a/pkg/querier/worker_service.go +++ b/pkg/querier/worker_service.go @@ -10,9 +10,9 @@ import ( "github.com/grafana/dskit/services" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - querier_worker "github.com/grafana/loki/pkg/querier/worker" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + querier_worker "github.com/grafana/loki/v3/pkg/querier/worker" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type WorkerServiceConfig struct { diff --git a/pkg/queue/dequeue_qos_test.go b/pkg/queue/dequeue_qos_test.go index c889cbe8f4c60..db876e1d738fd 100644 --- a/pkg/queue/dequeue_qos_test.go +++ b/pkg/queue/dequeue_qos_test.go @@ -12,7 +12,7 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) const ( diff --git a/pkg/queue/queue_test.go b/pkg/queue/queue_test.go index a21ce8af622cf..b51ccf7cc2a06 100644 --- a/pkg/queue/queue_test.go +++ b/pkg/queue/queue_test.go @@ -15,7 +15,7 @@ import ( "go.uber.org/atomic" "golang.org/x/sync/errgroup" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) func BenchmarkGetNextRequest(b *testing.B) { diff --git a/pkg/queue/tenant_queues.go b/pkg/queue/tenant_queues.go index 69fac6ed60a01..46f9f7adccd43 100644 --- a/pkg/queue/tenant_queues.go +++ b/pkg/queue/tenant_queues.go @@ -14,9 +14,9 @@ import ( "github.com/go-kit/log/level" "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/validation" ) type intPointerMap map[string]*int diff --git a/pkg/queue/tenant_queues_test.go b/pkg/queue/tenant_queues_test.go index 4f49b8233304d..d1b52480e5458 100644 --- a/pkg/queue/tenant_queues_test.go +++ b/pkg/queue/tenant_queues_test.go @@ -16,7 +16,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/scheduler/limits" + "github.com/grafana/loki/v3/pkg/scheduler/limits" ) var noQueueLimits = limits.NewQueueLimits(nil) diff --git a/pkg/ruler/base/api.go b/pkg/ruler/base/api.go index 04a303993228b..4e4d71d5691db 100644 --- a/pkg/ruler/base/api.go +++ b/pkg/ruler/base/api.go @@ -23,10 +23,10 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/ruler/rulespb" - "github.com/grafana/loki/pkg/ruler/rulestore" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/ruler/rulestore" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // In order to reimplement the prometheus rules API, a large amount of code was copied over diff --git a/pkg/ruler/base/api_test.go b/pkg/ruler/base/api_test.go index c14f5de8d4614..f61f2fe3f0237 100644 --- a/pkg/ruler/base/api_test.go +++ b/pkg/ruler/base/api_test.go @@ -24,7 +24,7 @@ import ( "github.com/stretchr/testify/require" "gopkg.in/yaml.v3" - "github.com/grafana/loki/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" ) func TestRuler_PrometheusRules(t *testing.T) { diff --git a/pkg/ruler/base/client_pool_test.go b/pkg/ruler/base/client_pool_test.go index 05fc23290033c..fa068c2580bdf 100644 --- a/pkg/ruler/base/client_pool_test.go +++ b/pkg/ruler/base/client_pool_test.go @@ -14,7 +14,7 @@ import ( "github.com/stretchr/testify/require" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) func Test_newRulerClientFactory(t *testing.T) { diff --git a/pkg/ruler/base/compat.go b/pkg/ruler/base/compat.go index 822537e419bc7..cfe18fcebd087 100644 --- a/pkg/ruler/base/compat.go +++ b/pkg/ruler/base/compat.go @@ -20,9 +20,9 @@ import ( "github.com/prometheus/prometheus/rules" "github.com/prometheus/prometheus/storage" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/ruler/config" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/ruler/config" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // Pusher is an ingester server that accepts pushes. diff --git a/pkg/ruler/base/compat_test.go b/pkg/ruler/base/compat_test.go index d4cdf4f298a34..e37ef6646811a 100644 --- a/pkg/ruler/base/compat_test.go +++ b/pkg/ruler/base/compat_test.go @@ -18,7 +18,7 @@ import ( "github.com/prometheus/prometheus/promql/parser" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) type fakePusher struct { diff --git a/pkg/ruler/base/error_translate_queryable.go b/pkg/ruler/base/error_translate_queryable.go index 0785a1421ac6e..6e65ed1aaafa2 100644 --- a/pkg/ruler/base/error_translate_queryable.go +++ b/pkg/ruler/base/error_translate_queryable.go @@ -10,8 +10,8 @@ import ( "github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/util/annotations" - storage_errors "github.com/grafana/loki/pkg/storage/errors" - "github.com/grafana/loki/pkg/validation" + storage_errors "github.com/grafana/loki/v3/pkg/storage/errors" + "github.com/grafana/loki/v3/pkg/validation" ) // TranslateToPromqlAPIError converts error to one of promql.Errors for consumption in PromQL API. diff --git a/pkg/ruler/base/lifecycle_test.go b/pkg/ruler/base/lifecycle_test.go index ea1496712839e..2fefc62bf6cbb 100644 --- a/pkg/ruler/base/lifecycle_test.go +++ b/pkg/ruler/base/lifecycle_test.go @@ -14,8 +14,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/util/test" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/util/test" ) // TestRulerShutdown tests shutting down ruler unregisters correctly diff --git a/pkg/ruler/base/manager.go b/pkg/ruler/base/manager.go index 371eb712508e4..93b975895846c 100644 --- a/pkg/ruler/base/manager.go +++ b/pkg/ruler/base/manager.go @@ -19,7 +19,7 @@ import ( promRules "github.com/prometheus/prometheus/rules" "golang.org/x/net/context/ctxhttp" - "github.com/grafana/loki/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" ) type DefaultMultiTenantManager struct { diff --git a/pkg/ruler/base/manager_metrics.go b/pkg/ruler/base/manager_metrics.go index d5caab8c2ef87..25d5aa13fcae1 100644 --- a/pkg/ruler/base/manager_metrics.go +++ b/pkg/ruler/base/manager_metrics.go @@ -3,7 +3,7 @@ package base import ( "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) // ManagerMetrics aggregates metrics exported by the Prometheus diff --git a/pkg/ruler/base/manager_metrics_test.go b/pkg/ruler/base/manager_metrics_test.go index 421133bd4f060..49f34405bff84 100644 --- a/pkg/ruler/base/manager_metrics_test.go +++ b/pkg/ruler/base/manager_metrics_test.go @@ -12,9 +12,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/ruler/rulespb" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/util/constants" ) func TestManagerMetricsWithRuleGroupLabel(t *testing.T) { diff --git a/pkg/ruler/base/manager_test.go b/pkg/ruler/base/manager_test.go index c2cdc58854090..a93dca6f7977a 100644 --- a/pkg/ruler/base/manager_test.go +++ b/pkg/ruler/base/manager_test.go @@ -13,9 +13,9 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/ruler/rulespb" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/test" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/test" ) func TestSyncRuleGroups(t *testing.T) { diff --git a/pkg/ruler/base/notifier.go b/pkg/ruler/base/notifier.go index 403383f2eefc1..8fea76be13963 100644 --- a/pkg/ruler/base/notifier.go +++ b/pkg/ruler/base/notifier.go @@ -19,8 +19,8 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/notifier" - ruler_config "github.com/grafana/loki/pkg/ruler/config" - "github.com/grafana/loki/pkg/util" + ruler_config "github.com/grafana/loki/v3/pkg/ruler/config" + "github.com/grafana/loki/v3/pkg/util" ) // TODO: Instead of using the same metrics for all notifiers, diff --git a/pkg/ruler/base/notifier_test.go b/pkg/ruler/base/notifier_test.go index 9db716623c41e..8166193d84575 100644 --- a/pkg/ruler/base/notifier_test.go +++ b/pkg/ruler/base/notifier_test.go @@ -14,8 +14,8 @@ import ( "github.com/prometheus/prometheus/model/relabel" "github.com/stretchr/testify/require" - ruler_config "github.com/grafana/loki/pkg/ruler/config" - "github.com/grafana/loki/pkg/util" + ruler_config "github.com/grafana/loki/v3/pkg/ruler/config" + "github.com/grafana/loki/v3/pkg/util" ) func TestBuildNotifierConfig(t *testing.T) { diff --git a/pkg/ruler/base/pusher_mock_test.go b/pkg/ruler/base/pusher_mock_test.go index 0fb571c70b653..cde9f43b56fbe 100644 --- a/pkg/ruler/base/pusher_mock_test.go +++ b/pkg/ruler/base/pusher_mock_test.go @@ -5,7 +5,7 @@ import ( "github.com/stretchr/testify/mock" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) type pusherMock struct { diff --git a/pkg/ruler/base/ruler.go b/pkg/ruler/base/ruler.go index a0dc0df08f2d7..b8e9f3899e417 100644 --- a/pkg/ruler/base/ruler.go +++ b/pkg/ruler/base/ruler.go @@ -34,12 +34,12 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/ruler/config" - "github.com/grafana/loki/pkg/ruler/rulespb" - "github.com/grafana/loki/pkg/ruler/rulestore" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/ruler/config" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/ruler/rulestore" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var ( diff --git a/pkg/ruler/base/ruler.pb.go b/pkg/ruler/base/ruler.pb.go index 63904a26e3927..5b3b1f1b4d5d8 100644 --- a/pkg/ruler/base/ruler.pb.go +++ b/pkg/ruler/base/ruler.pb.go @@ -12,9 +12,9 @@ import ( _ "github.com/gogo/protobuf/types" github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" _ "github.com/golang/protobuf/ptypes/duration" - _ "github.com/grafana/loki/pkg/logproto" - github_com_grafana_loki_pkg_logproto "github.com/grafana/loki/pkg/logproto" - rulespb "github.com/grafana/loki/pkg/ruler/rulespb" + _ "github.com/grafana/loki/v3/pkg/logproto" + github_com_grafana_loki_v3_pkg_logproto "github.com/grafana/loki/v3/pkg/logproto" + rulespb "github.com/grafana/loki/v3/pkg/ruler/rulespb" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" status "google.golang.org/grpc/status" @@ -334,15 +334,15 @@ func (m *RuleStateDesc) GetEvaluationDuration() time.Duration { } type AlertStateDesc struct { - State string `protobuf:"bytes,1,opt,name=state,proto3" json:"state,omitempty"` - Labels []github_com_grafana_loki_pkg_logproto.LabelAdapter `protobuf:"bytes,2,rep,name=labels,proto3,customtype=github.com/grafana/loki/pkg/logproto.LabelAdapter" json:"labels"` - Annotations []github_com_grafana_loki_pkg_logproto.LabelAdapter `protobuf:"bytes,3,rep,name=annotations,proto3,customtype=github.com/grafana/loki/pkg/logproto.LabelAdapter" json:"annotations"` - Value float64 `protobuf:"fixed64,4,opt,name=value,proto3" json:"value,omitempty"` - ActiveAt time.Time `protobuf:"bytes,5,opt,name=active_at,json=activeAt,proto3,stdtime" json:"active_at"` - FiredAt time.Time `protobuf:"bytes,6,opt,name=fired_at,json=firedAt,proto3,stdtime" json:"fired_at"` - ResolvedAt time.Time `protobuf:"bytes,7,opt,name=resolved_at,json=resolvedAt,proto3,stdtime" json:"resolved_at"` - LastSentAt time.Time `protobuf:"bytes,8,opt,name=last_sent_at,json=lastSentAt,proto3,stdtime" json:"last_sent_at"` - ValidUntil time.Time `protobuf:"bytes,9,opt,name=valid_until,json=validUntil,proto3,stdtime" json:"valid_until"` + State string `protobuf:"bytes,1,opt,name=state,proto3" json:"state,omitempty"` + Labels []github_com_grafana_loki_v3_pkg_logproto.LabelAdapter `protobuf:"bytes,2,rep,name=labels,proto3,customtype=github.com/grafana/loki/v3/pkg/logproto.LabelAdapter" json:"labels"` + Annotations []github_com_grafana_loki_v3_pkg_logproto.LabelAdapter `protobuf:"bytes,3,rep,name=annotations,proto3,customtype=github.com/grafana/loki/v3/pkg/logproto.LabelAdapter" json:"annotations"` + Value float64 `protobuf:"fixed64,4,opt,name=value,proto3" json:"value,omitempty"` + ActiveAt time.Time `protobuf:"bytes,5,opt,name=active_at,json=activeAt,proto3,stdtime" json:"active_at"` + FiredAt time.Time `protobuf:"bytes,6,opt,name=fired_at,json=firedAt,proto3,stdtime" json:"fired_at"` + ResolvedAt time.Time `protobuf:"bytes,7,opt,name=resolved_at,json=resolvedAt,proto3,stdtime" json:"resolved_at"` + LastSentAt time.Time `protobuf:"bytes,8,opt,name=last_sent_at,json=lastSentAt,proto3,stdtime" json:"last_sent_at"` + ValidUntil time.Time `protobuf:"bytes,9,opt,name=valid_until,json=validUntil,proto3,stdtime" json:"valid_until"` } func (m *AlertStateDesc) Reset() { *m = AlertStateDesc{} } @@ -438,57 +438,57 @@ func init() { func init() { proto.RegisterFile("pkg/ruler/base/ruler.proto", fileDescriptor_ca810a0fd7057a73) } var fileDescriptor_ca810a0fd7057a73 = []byte{ - // 791 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x55, 0x4f, 0x4f, 0xdb, 0x48, - 0x14, 0xf7, 0xe4, 0xbf, 0x27, 0x81, 0x65, 0x07, 0xb4, 0x32, 0x61, 0x71, 0xa2, 0xec, 0x25, 0x5a, - 0xad, 0x6c, 0x6d, 0x58, 0xad, 0xb4, 0xda, 0x5d, 0x55, 0x41, 0xd0, 0x5e, 0x50, 0x55, 0x19, 0xda, - 0x6b, 0x34, 0x49, 0x26, 0xc6, 0x62, 0xe2, 0x71, 0xc7, 0xe3, 0x48, 0xdc, 0xfa, 0x11, 0x38, 0xf6, - 0xda, 0x5b, 0x3f, 0x0a, 0x47, 0x7a, 0x43, 0xad, 0x44, 0x4b, 0xb8, 0xf4, 0xc8, 0x07, 0xe8, 0xa1, - 0x9a, 0x19, 0x9b, 0x84, 0x42, 0x0f, 0x51, 0xc5, 0x05, 0xe6, 0xfd, 0xf9, 0xfd, 0xde, 0x7b, 0xbf, - 0x79, 0xe3, 0xc0, 0x7a, 0x74, 0xe4, 0xbb, 0x3c, 0xa1, 0x84, 0xbb, 0x7d, 0x1c, 0x13, 0x7d, 0x74, - 0x22, 0xce, 0x04, 0x43, 0x05, 0xe9, 0xa9, 0xaf, 0xf9, 0xcc, 0x67, 0xca, 0xe1, 0xca, 0x93, 0x8e, - 0xd5, 0x6d, 0x9f, 0x31, 0x9f, 0x12, 0x57, 0x59, 0xfd, 0x64, 0xe4, 0x0e, 0x13, 0x8e, 0x45, 0xc0, - 0xc2, 0x34, 0xde, 0xf8, 0x36, 0x2e, 0x82, 0x31, 0x89, 0x05, 0x1e, 0x47, 0x69, 0xc2, 0x86, 0x2c, - 0x4c, 0x99, 0xaf, 0x99, 0xb3, 0x43, 0x1a, 0xdc, 0x9c, 0x75, 0x25, 0xff, 0xc6, 0x51, 0x5f, 0xff, - 0xd7, 0xe1, 0xd6, 0x3b, 0x00, 0x6b, 0x9e, 0xb4, 0x3d, 0xf2, 0x32, 0x21, 0xb1, 0x40, 0x5b, 0xb0, - 0x34, 0x0a, 0xa8, 0x20, 0xdc, 0x02, 0x4d, 0xd0, 0x5e, 0xee, 0x6c, 0x38, 0xb2, 0x75, 0x67, 0x3e, - 0x47, 0x19, 0x07, 0xc7, 0x11, 0xf1, 0xd2, 0x54, 0xb4, 0x01, 0x4d, 0x49, 0xda, 0x0b, 0xf1, 0x98, - 0x58, 0xb9, 0x66, 0xbe, 0x6d, 0x7a, 0x15, 0xe9, 0x78, 0x8a, 0xc7, 0x04, 0x6d, 0x42, 0xa8, 0x82, - 0x3e, 0x67, 0x49, 0x64, 0xe5, 0x55, 0x54, 0xa5, 0x3f, 0x91, 0x0e, 0x84, 0x60, 0x61, 0x14, 0x50, - 0x62, 0x15, 0x54, 0x40, 0x9d, 0x5b, 0xff, 0xc1, 0x4a, 0x56, 0x03, 0x55, 0x61, 0xb9, 0x1b, 0x1e, - 0x4b, 0x73, 0xc5, 0x40, 0x2b, 0xb0, 0xd6, 0xa5, 0x84, 0x8b, 0x20, 0xf4, 0x95, 0x07, 0xa0, 0x9f, - 0xe1, 0x92, 0x47, 0x06, 0x8c, 0x0f, 0x33, 0x57, 0xae, 0xf5, 0x3f, 0x5c, 0x4a, 0xdb, 0x8d, 0x23, - 0x16, 0xc6, 0x04, 0xfd, 0x01, 0x4b, 0xaa, 0x78, 0x6c, 0x81, 0x66, 0xbe, 0x5d, 0xed, 0xac, 0xe9, - 0x99, 0x54, 0xfd, 0x7d, 0x81, 0x05, 0xd9, 0x21, 0xf1, 0xc0, 0x4b, 0x73, 0x5a, 0x6f, 0x72, 0x70, - 0xf9, 0x76, 0x08, 0xfd, 0x0e, 0x8b, 0xba, 0x7b, 0xa9, 0x89, 0xc4, 0x6b, 0x09, 0xbd, 0x6c, 0x08, - 0x85, 0xd7, 0x29, 0xe8, 0x6f, 0x58, 0xc3, 0x03, 0x11, 0x4c, 0x48, 0x4f, 0x25, 0x29, 0x39, 0xaa, - 0x9d, 0xd5, 0x99, 0x8c, 0xb3, 0x8a, 0x55, 0x9d, 0xa8, 0x9a, 0x45, 0x2f, 0xe0, 0x2a, 0x99, 0x60, - 0x9a, 0xa8, 0xab, 0x3f, 0xc8, 0xae, 0xd8, 0xca, 0xab, 0x8a, 0x75, 0x47, 0x2f, 0x81, 0x93, 0x2d, - 0x81, 0x73, 0x93, 0xb1, 0x5d, 0x39, 0xbd, 0x68, 0x18, 0x27, 0x1f, 0x1b, 0xc0, 0xbb, 0x8f, 0x00, - 0xed, 0x43, 0x34, 0x73, 0xef, 0xa4, 0xab, 0x65, 0x15, 0x14, 0xed, 0xfa, 0x1d, 0xda, 0x2c, 0x41, - 0xb3, 0xbe, 0x96, 0xac, 0xf7, 0xc0, 0x5b, 0x1f, 0x72, 0x5a, 0xe3, 0x99, 0x44, 0xbf, 0xc1, 0x82, - 0x9c, 0x37, 0x55, 0xe8, 0xa7, 0x39, 0x85, 0xd4, 0xa8, 0x2a, 0x88, 0xd6, 0x60, 0x31, 0x96, 0x08, - 0x2b, 0xd7, 0x04, 0x6d, 0xd3, 0xd3, 0x06, 0xfa, 0x05, 0x96, 0x0e, 0x09, 0xa6, 0xe2, 0x50, 0x0d, - 0x6b, 0x7a, 0xa9, 0x85, 0x7e, 0x85, 0x26, 0xc5, 0xb1, 0xd8, 0xe5, 0x9c, 0x71, 0xd5, 0xb0, 0xe9, - 0xcd, 0x1c, 0xf2, 0x52, 0xb1, 0x5c, 0x85, 0xd8, 0x2a, 0xce, 0x5f, 0xaa, 0x5a, 0x8f, 0xb9, 0x4b, - 0xd5, 0x39, 0xdf, 0x53, 0xb7, 0xf4, 0x30, 0xea, 0x96, 0x7f, 0x4c, 0xdd, 0x2f, 0x05, 0xb8, 0x7c, - 0x7b, 0x8e, 0x99, 0x72, 0x60, 0x5e, 0x39, 0x0a, 0x4b, 0x14, 0xf7, 0x09, 0xcd, 0xb6, 0x6c, 0xdd, - 0xb9, 0x79, 0xfd, 0x7b, 0xc4, 0xc7, 0x83, 0xe3, 0x3d, 0x19, 0x7d, 0x86, 0x03, 0xbe, 0xfd, 0x8f, - 0xac, 0xf8, 0xfe, 0xa2, 0xf1, 0xa7, 0x1f, 0x88, 0xc3, 0xa4, 0xef, 0x0c, 0xd8, 0xd8, 0xf5, 0x39, - 0x1e, 0xe1, 0x10, 0xbb, 0x94, 0x1d, 0x05, 0xee, 0xfc, 0x47, 0xc4, 0x51, 0xb8, 0xee, 0x10, 0x47, - 0x82, 0x70, 0x2f, 0xad, 0x81, 0x26, 0xb0, 0x8a, 0xc3, 0x90, 0x09, 0xd5, 0x64, 0xac, 0x5e, 0xf2, - 0x43, 0x95, 0x9c, 0x2f, 0x24, 0x67, 0x97, 0x1a, 0x11, 0xb5, 0x03, 0xc0, 0xd3, 0x06, 0xea, 0x42, - 0x33, 0x7d, 0x67, 0x58, 0x58, 0xc5, 0x05, 0xee, 0xb1, 0xa2, 0x61, 0x5d, 0x81, 0x1e, 0xc1, 0xca, - 0x28, 0xe0, 0x64, 0x28, 0x19, 0x16, 0xd9, 0x84, 0xb2, 0x42, 0x75, 0x05, 0xda, 0x85, 0x55, 0x4e, - 0x62, 0x46, 0x27, 0x9a, 0xa3, 0xbc, 0x00, 0x07, 0xcc, 0x80, 0x5d, 0x81, 0x1e, 0xc3, 0x9a, 0xdc, - 0xeb, 0x5e, 0x4c, 0x42, 0x21, 0x79, 0x2a, 0x8b, 0xf0, 0x48, 0xe4, 0x3e, 0x09, 0x85, 0x6e, 0x67, - 0x82, 0x69, 0x30, 0xec, 0x25, 0xa1, 0x08, 0xa8, 0x65, 0x2e, 0x42, 0xa3, 0x80, 0xcf, 0x25, 0xae, - 0xf3, 0x2f, 0x2c, 0xca, 0x77, 0xcb, 0x51, 0x47, 0x1f, 0x62, 0x84, 0xee, 0xfe, 0x08, 0xd4, 0x57, - 0x6f, 0xf9, 0xf4, 0x97, 0xb6, 0x65, 0x6c, 0xff, 0x75, 0x76, 0x69, 0x1b, 0xe7, 0x97, 0xb6, 0x71, - 0x7d, 0x69, 0x83, 0x57, 0x53, 0x1b, 0xbc, 0x9d, 0xda, 0xe0, 0x74, 0x6a, 0x83, 0xb3, 0xa9, 0x0d, - 0x3e, 0x4d, 0x6d, 0xf0, 0x79, 0x6a, 0x1b, 0xd7, 0x53, 0x1b, 0x9c, 0x5c, 0xd9, 0xc6, 0xd9, 0x95, - 0x6d, 0x9c, 0x5f, 0xd9, 0x46, 0xbf, 0xa4, 0x9a, 0xdb, 0xfa, 0x1a, 0x00, 0x00, 0xff, 0xff, 0xf0, - 0x4e, 0x48, 0x39, 0x44, 0x07, 0x00, 0x00, + // 790 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x55, 0xcd, 0x4e, 0xdb, 0x4c, + 0x14, 0xf5, 0xe4, 0xdf, 0x93, 0xc0, 0xc7, 0x37, 0xa0, 0xca, 0x84, 0xe2, 0x44, 0xe9, 0x26, 0xaa, + 0x2a, 0x5b, 0x0a, 0xa8, 0x9b, 0x52, 0x55, 0x41, 0xd0, 0x6e, 0x50, 0x55, 0x19, 0xda, 0x6d, 0x34, + 0x49, 0x26, 0xc6, 0xc2, 0xf1, 0xb8, 0xe3, 0x71, 0xa4, 0xec, 0xaa, 0x3e, 0x01, 0xcb, 0x6e, 0xbb, + 0xeb, 0xa3, 0xb0, 0xa4, 0x3b, 0xd4, 0x4a, 0xb4, 0x84, 0x4d, 0x97, 0x3c, 0x42, 0x35, 0x33, 0x36, + 0x09, 0x85, 0x2e, 0xa2, 0x8a, 0x0d, 0xcc, 0xfd, 0x39, 0xe7, 0xde, 0x39, 0xf7, 0x8e, 0x03, 0xab, + 0xe1, 0x91, 0x6b, 0xb3, 0xd8, 0x27, 0xcc, 0xee, 0xe2, 0x88, 0xa8, 0xa3, 0x15, 0x32, 0xca, 0x29, + 0xca, 0x09, 0x4f, 0x75, 0xc5, 0xa5, 0x2e, 0x95, 0x0e, 0x5b, 0x9c, 0x54, 0xac, 0x6a, 0xba, 0x94, + 0xba, 0x3e, 0xb1, 0xa5, 0xd5, 0x8d, 0x07, 0x76, 0x3f, 0x66, 0x98, 0x7b, 0x34, 0x48, 0xe2, 0xb5, + 0x3f, 0xe3, 0xdc, 0x1b, 0x92, 0x88, 0xe3, 0x61, 0x98, 0x24, 0xac, 0x89, 0xc2, 0x3e, 0x75, 0x15, + 0x73, 0x7a, 0x48, 0x82, 0xeb, 0xd3, 0xae, 0xc4, 0xdf, 0x28, 0xec, 0xaa, 0xff, 0x2a, 0xdc, 0xf8, + 0x0a, 0x60, 0xc5, 0x11, 0xb6, 0x43, 0xde, 0xc7, 0x24, 0xe2, 0x68, 0x03, 0x16, 0x06, 0x9e, 0xcf, + 0x09, 0x33, 0x40, 0x1d, 0x34, 0x17, 0x5b, 0x6b, 0x96, 0x68, 0xdd, 0x9a, 0xcd, 0x91, 0xc6, 0xc1, + 0x38, 0x24, 0x4e, 0x92, 0x8a, 0xd6, 0xa0, 0x2e, 0x48, 0x3b, 0x01, 0x1e, 0x12, 0x23, 0x53, 0xcf, + 0x36, 0x75, 0xa7, 0x24, 0x1c, 0xaf, 0xf1, 0x90, 0xa0, 0x75, 0x08, 0x65, 0xd0, 0x65, 0x34, 0x0e, + 0x8d, 0xac, 0x8c, 0xca, 0xf4, 0x57, 0xc2, 0x81, 0x10, 0xcc, 0x0d, 0x3c, 0x9f, 0x18, 0x39, 0x19, + 0x90, 0xe7, 0xc6, 0x16, 0x2c, 0xa5, 0x35, 0x50, 0x19, 0x16, 0xdb, 0xc1, 0x58, 0x98, 0x4b, 0x1a, + 0x5a, 0x82, 0x95, 0xb6, 0x4f, 0x18, 0xf7, 0x02, 0x57, 0x7a, 0x00, 0xfa, 0x1f, 0x2e, 0x38, 0xa4, + 0x47, 0x59, 0x3f, 0x75, 0x65, 0x1a, 0xcf, 0xe1, 0x42, 0xd2, 0x6e, 0x14, 0xd2, 0x20, 0x22, 0xe8, + 0x09, 0x2c, 0xc8, 0xe2, 0x91, 0x01, 0xea, 0xd9, 0x66, 0xb9, 0xb5, 0xa2, 0xee, 0x24, 0xeb, 0xef, + 0x73, 0xcc, 0xc9, 0x0e, 0x89, 0x7a, 0x4e, 0x92, 0xd3, 0xf8, 0x9c, 0x81, 0x8b, 0x37, 0x43, 0xe8, + 0x31, 0xcc, 0xab, 0xee, 0x85, 0x26, 0x02, 0xaf, 0x24, 0x74, 0xd2, 0x4b, 0x48, 0xbc, 0x4a, 0x41, + 0x4f, 0x61, 0x05, 0xf7, 0xb8, 0x37, 0x22, 0x1d, 0x99, 0x24, 0xe5, 0x28, 0xb7, 0x96, 0xa7, 0x32, + 0x4e, 0x2b, 0x96, 0x55, 0xa2, 0x6c, 0x16, 0xbd, 0x83, 0xcb, 0x64, 0x84, 0xfd, 0x58, 0x8e, 0xfe, + 0x20, 0x1d, 0xb1, 0x91, 0x95, 0x15, 0xab, 0x96, 0x5a, 0x02, 0x2b, 0x5d, 0x02, 0xeb, 0x3a, 0x63, + 0xbb, 0x74, 0x72, 0x5e, 0xd3, 0x8e, 0x7f, 0xd4, 0x80, 0x73, 0x17, 0x01, 0xda, 0x87, 0x68, 0xea, + 0xde, 0x49, 0x56, 0xcb, 0xc8, 0x49, 0xda, 0xd5, 0x5b, 0xb4, 0x69, 0x82, 0x62, 0xfd, 0x24, 0x58, + 0xef, 0x80, 0x37, 0xbe, 0x67, 0x94, 0xc6, 0x53, 0x89, 0x1e, 0xc1, 0x9c, 0xb8, 0x6f, 0xa2, 0xd0, + 0x7f, 0x33, 0x0a, 0xc9, 0xab, 0xca, 0x20, 0x5a, 0x81, 0xf9, 0x48, 0x20, 0x8c, 0x4c, 0x1d, 0x34, + 0x75, 0x47, 0x19, 0xe8, 0x01, 0x2c, 0x1c, 0x12, 0xec, 0xf3, 0x43, 0x79, 0x59, 0xdd, 0x49, 0x2c, + 0xf4, 0x10, 0xea, 0x3e, 0x8e, 0xf8, 0x2e, 0x63, 0x94, 0xc9, 0x86, 0x75, 0x67, 0xea, 0x10, 0x43, + 0xc5, 0x62, 0x15, 0x22, 0x23, 0x3f, 0x3b, 0x54, 0xb9, 0x1e, 0x33, 0x43, 0x55, 0x39, 0x7f, 0x53, + 0xb7, 0x70, 0x3f, 0xea, 0x16, 0xff, 0x4d, 0xdd, 0x8f, 0x79, 0xb8, 0x78, 0xf3, 0x1e, 0x53, 0xe5, + 0xc0, 0xac, 0x72, 0x14, 0x16, 0x7c, 0xdc, 0x25, 0x7e, 0xba, 0x65, 0xab, 0xd6, 0xf5, 0xeb, 0xdf, + 0x23, 0x2e, 0xee, 0x8d, 0xf7, 0x44, 0xf4, 0x0d, 0xf6, 0xd8, 0xf6, 0x96, 0xa8, 0xf8, 0xed, 0xbc, + 0xb6, 0xe9, 0x7a, 0xfc, 0x30, 0xee, 0x5a, 0x3d, 0x3a, 0xb4, 0x5d, 0x86, 0x07, 0x38, 0xc0, 0xb6, + 0x4f, 0x8f, 0x3c, 0x7b, 0xb4, 0x61, 0xcf, 0x7e, 0x47, 0x2c, 0x09, 0x6d, 0xf7, 0x71, 0xc8, 0x09, + 0x73, 0x92, 0x32, 0x68, 0x0c, 0xcb, 0x38, 0x08, 0x28, 0x97, 0x7d, 0x46, 0xf2, 0x31, 0xdf, 0x63, + 0xd5, 0xd9, 0x5a, 0x42, 0x01, 0xa1, 0x14, 0x91, 0x9b, 0x00, 0x1c, 0x65, 0xa0, 0x36, 0xd4, 0x93, + 0xd7, 0x86, 0xb9, 0x91, 0x9f, 0x63, 0x9a, 0x25, 0x05, 0x6b, 0x73, 0xf4, 0x02, 0x96, 0x06, 0x1e, + 0x23, 0x7d, 0xc1, 0x30, 0xcf, 0x3e, 0x14, 0x25, 0xaa, 0xcd, 0xd1, 0x2e, 0x2c, 0x33, 0x12, 0x51, + 0x7f, 0xa4, 0x38, 0x8a, 0x73, 0x70, 0xc0, 0x14, 0xd8, 0xe6, 0xe8, 0x25, 0xac, 0x88, 0xed, 0xee, + 0x44, 0x24, 0xe0, 0x82, 0xa7, 0x34, 0x0f, 0x8f, 0x40, 0xee, 0x93, 0x80, 0xab, 0x76, 0x46, 0xd8, + 0xf7, 0xfa, 0x9d, 0x38, 0xe0, 0x9e, 0x6f, 0xe8, 0xf3, 0xd0, 0x48, 0xe0, 0x5b, 0x81, 0x6b, 0x3d, + 0x83, 0x79, 0xf1, 0x7a, 0x19, 0x6a, 0xa9, 0x43, 0x84, 0xd0, 0xed, 0x9f, 0x82, 0xea, 0xf2, 0x0d, + 0x9f, 0xfa, 0xde, 0x36, 0xb4, 0xed, 0xcd, 0xd3, 0x0b, 0x53, 0x3b, 0xbb, 0x30, 0xb5, 0xab, 0x0b, + 0x13, 0x7c, 0x98, 0x98, 0xe0, 0xcb, 0xc4, 0x04, 0x27, 0x13, 0x13, 0x9c, 0x4e, 0x4c, 0xf0, 0x73, + 0x62, 0x82, 0x5f, 0x13, 0x53, 0xbb, 0x9a, 0x98, 0xe0, 0xf8, 0xd2, 0xd4, 0x4e, 0x2f, 0x4d, 0xed, + 0xec, 0xd2, 0xd4, 0xba, 0x05, 0xd9, 0xdc, 0xc6, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0xe9, 0x34, + 0xd7, 0x09, 0x4a, 0x07, 0x00, 0x00, } func (x RulesRequest_RuleType) String() string { @@ -2304,7 +2304,7 @@ func (m *AlertStateDesc) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Labels = append(m.Labels, github_com_grafana_loki_pkg_logproto.LabelAdapter{}) + m.Labels = append(m.Labels, github_com_grafana_loki_v3_pkg_logproto.LabelAdapter{}) if err := m.Labels[len(m.Labels)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -2338,7 +2338,7 @@ func (m *AlertStateDesc) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Annotations = append(m.Annotations, github_com_grafana_loki_pkg_logproto.LabelAdapter{}) + m.Annotations = append(m.Annotations, github_com_grafana_loki_v3_pkg_logproto.LabelAdapter{}) if err := m.Annotations[len(m.Annotations)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } diff --git a/pkg/ruler/base/ruler.proto b/pkg/ruler/base/ruler.proto index 0b143728efe61..e47c497def902 100644 --- a/pkg/ruler/base/ruler.proto +++ b/pkg/ruler/base/ruler.proto @@ -69,11 +69,11 @@ message AlertStateDesc { string state = 1; repeated logproto.LegacyLabelPair labels = 2 [ (gogoproto.nullable) = false, - (gogoproto.customtype) = "github.com/grafana/loki/pkg/logproto.LabelAdapter" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logproto.LabelAdapter" ]; repeated logproto.LegacyLabelPair annotations = 3 [ (gogoproto.nullable) = false, - (gogoproto.customtype) = "github.com/grafana/loki/pkg/logproto.LabelAdapter" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logproto.LabelAdapter" ]; double value = 4; google.protobuf.Timestamp active_at = 5 [ diff --git a/pkg/ruler/base/ruler_ring.go b/pkg/ruler/base/ruler_ring.go index 697b1d8b9ac59..4d50dd2934c51 100644 --- a/pkg/ruler/base/ruler_ring.go +++ b/pkg/ruler/base/ruler_ring.go @@ -14,7 +14,7 @@ import ( "github.com/grafana/dskit/netutil" "github.com/grafana/dskit/ring" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/ruler/base/ruler_test.go b/pkg/ruler/base/ruler_test.go index d17691e1bb6a0..931f8288be4d6 100644 --- a/pkg/ruler/base/ruler_test.go +++ b/pkg/ruler/base/ruler_test.go @@ -45,17 +45,17 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/series" - "github.com/grafana/loki/pkg/ruler/config" - "github.com/grafana/loki/pkg/ruler/rulespb" - "github.com/grafana/loki/pkg/ruler/rulestore" - "github.com/grafana/loki/pkg/ruler/rulestore/objectclient" - loki_storage "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" - "github.com/grafana/loki/pkg/storage/chunk/client/testutils" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/querier/series" + "github.com/grafana/loki/v3/pkg/ruler/config" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/ruler/rulestore" + "github.com/grafana/loki/v3/pkg/ruler/rulestore/objectclient" + loki_storage "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" ) func defaultRulerConfig(t testing.TB, store rulestore.RuleStore) Config { diff --git a/pkg/ruler/base/storage.go b/pkg/ruler/base/storage.go index 4a79fd5691221..c2548f0e46830 100644 --- a/pkg/ruler/base/storage.go +++ b/pkg/ruler/base/storage.go @@ -10,23 +10,23 @@ import ( "github.com/prometheus/client_golang/prometheus" promRules "github.com/prometheus/prometheus/rules" - configClient "github.com/grafana/loki/pkg/configs/client" - "github.com/grafana/loki/pkg/ruler/rulestore" - "github.com/grafana/loki/pkg/ruler/rulestore/bucketclient" - "github.com/grafana/loki/pkg/ruler/rulestore/configdb" - "github.com/grafana/loki/pkg/ruler/rulestore/local" - "github.com/grafana/loki/pkg/ruler/rulestore/objectclient" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/bucket" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/alibaba" - "github.com/grafana/loki/pkg/storage/chunk/client/aws" - "github.com/grafana/loki/pkg/storage/chunk/client/azure" - "github.com/grafana/loki/pkg/storage/chunk/client/baidubce" - "github.com/grafana/loki/pkg/storage/chunk/client/gcp" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" - "github.com/grafana/loki/pkg/storage/chunk/client/ibmcloud" - "github.com/grafana/loki/pkg/storage/chunk/client/openstack" + configClient "github.com/grafana/loki/v3/pkg/configs/client" + "github.com/grafana/loki/v3/pkg/ruler/rulestore" + "github.com/grafana/loki/v3/pkg/ruler/rulestore/bucketclient" + "github.com/grafana/loki/v3/pkg/ruler/rulestore/configdb" + "github.com/grafana/loki/v3/pkg/ruler/rulestore/local" + "github.com/grafana/loki/v3/pkg/ruler/rulestore/objectclient" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/bucket" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/alibaba" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/aws" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/azure" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/baidubce" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/gcp" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/ibmcloud" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/openstack" ) // RuleStoreConfig configures a rule store. diff --git a/pkg/ruler/base/store_mock_test.go b/pkg/ruler/base/store_mock_test.go index 2d92afdfbb1d8..b20a76571df65 100644 --- a/pkg/ruler/base/store_mock_test.go +++ b/pkg/ruler/base/store_mock_test.go @@ -7,9 +7,9 @@ import ( "sync" "time" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/ruler/rulespb" - "github.com/grafana/loki/pkg/ruler/rulestore" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/ruler/rulestore" ) type mockRuleStore struct { diff --git a/pkg/ruler/compat.go b/pkg/ruler/compat.go index 8f70d314da884..6d3a6b68334d3 100644 --- a/pkg/ruler/compat.go +++ b/pkg/ruler/compat.go @@ -24,11 +24,11 @@ import ( "github.com/prometheus/prometheus/rules" "github.com/prometheus/prometheus/template" - "github.com/grafana/loki/pkg/logql/syntax" - ruler "github.com/grafana/loki/pkg/ruler/base" - "github.com/grafana/loki/pkg/ruler/rulespb" - rulerutil "github.com/grafana/loki/pkg/ruler/util" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logql/syntax" + ruler "github.com/grafana/loki/v3/pkg/ruler/base" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" + rulerutil "github.com/grafana/loki/v3/pkg/ruler/util" + "github.com/grafana/loki/v3/pkg/util" ) // RulesLimits is the one function we need from limits.Overrides, and @@ -152,18 +152,19 @@ func MultiTenantRuleManager(cfg Config, evaluator Evaluator, overrides RulesLimi groupLoader := NewCachingGroupLoader(GroupLoader{}) mgr := rules.NewManager(&rules.ManagerOptions{ - Appendable: registry, - Queryable: memStore, - QueryFunc: queryFn, - Context: user.InjectOrgID(ctx, userID), - ExternalURL: cfg.ExternalURL.URL, - NotifyFunc: ruler.SendAlerts(notifier, cfg.ExternalURL.URL.String(), cfg.DatasourceUID), - Logger: logger, - Registerer: reg, - OutageTolerance: cfg.OutageTolerance, - ForGracePeriod: cfg.ForGracePeriod, - ResendDelay: cfg.ResendDelay, - GroupLoader: groupLoader, + Appendable: registry, + Queryable: memStore, + QueryFunc: queryFn, + Context: user.InjectOrgID(ctx, userID), + ExternalURL: cfg.ExternalURL.URL, + NotifyFunc: ruler.SendAlerts(notifier, cfg.ExternalURL.URL.String(), cfg.DatasourceUID), + Logger: logger, + Registerer: reg, + OutageTolerance: cfg.OutageTolerance, + ForGracePeriod: cfg.ForGracePeriod, + ResendDelay: cfg.ResendDelay, + GroupLoader: groupLoader, + RuleDependencyController: &noopRuleDependencyController{}, }) cachingManager := &CachingRulesManager{ @@ -347,3 +348,12 @@ func (exprAdapter) PositionRange() posrange.PositionRange { return posrange.Posi func (exprAdapter) PromQLExpr() {} func (exprAdapter) Type() parser.ValueType { return parser.ValueType("unimplemented") } func (exprAdapter) Pretty(_ int) string { return "" } + +type noopRuleDependencyController struct{} + +// Prometheus rules manager calls AnalyseRules to determine the dependents and dependencies of a rule +// which it then uses to decide if a rule within a group is eligible for concurrent execution. +// AnalyseRules is a noop for Loki since there is no dependency relation between rules. +func (*noopRuleDependencyController) AnalyseRules([]rules.Rule) { + // Do nothing +} diff --git a/pkg/ruler/compat_test.go b/pkg/ruler/compat_test.go index 55e77c2f18a2a..6855368aefdee 100644 --- a/pkg/ruler/compat_test.go +++ b/pkg/ruler/compat_test.go @@ -12,11 +12,11 @@ import ( "gopkg.in/yaml.v3" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logql" - rulerbase "github.com/grafana/loki/pkg/ruler/base" - "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logql" + rulerbase "github.com/grafana/loki/v3/pkg/ruler/base" + "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/validation" ) // TestInvalidRuleGroup tests that a validation error is raised when rule group is invalid diff --git a/pkg/ruler/config.go b/pkg/ruler/config.go index 7d948baa0c30d..69293b91324a0 100644 --- a/pkg/ruler/config.go +++ b/pkg/ruler/config.go @@ -9,9 +9,9 @@ import ( "github.com/prometheus/prometheus/config" "gopkg.in/yaml.v2" - ruler "github.com/grafana/loki/pkg/ruler/base" - "github.com/grafana/loki/pkg/ruler/storage/cleaner" - "github.com/grafana/loki/pkg/ruler/storage/instance" + ruler "github.com/grafana/loki/v3/pkg/ruler/base" + "github.com/grafana/loki/v3/pkg/ruler/storage/cleaner" + "github.com/grafana/loki/v3/pkg/ruler/storage/instance" ) type Config struct { diff --git a/pkg/ruler/config/alertmanager.go b/pkg/ruler/config/alertmanager.go index d30eec8df2be3..8282a39326f0f 100644 --- a/pkg/ruler/config/alertmanager.go +++ b/pkg/ruler/config/alertmanager.go @@ -7,7 +7,7 @@ import ( "github.com/grafana/dskit/crypto/tls" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) type AlertManagerConfig struct { diff --git a/pkg/ruler/evaluator.go b/pkg/ruler/evaluator.go index 639bd102502cd..ce93e8310be77 100644 --- a/pkg/ruler/evaluator.go +++ b/pkg/ruler/evaluator.go @@ -7,7 +7,7 @@ import ( "strings" "time" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) // Evaluator is the interface that must be satisfied in order to accept rule evaluations from the Ruler. diff --git a/pkg/ruler/evaluator_jitter.go b/pkg/ruler/evaluator_jitter.go index 449ca0e18011c..ba5b0998c6a19 100644 --- a/pkg/ruler/evaluator_jitter.go +++ b/pkg/ruler/evaluator_jitter.go @@ -10,8 +10,8 @@ import ( "github.com/go-kit/log" "github.com/go-kit/log/level" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/util" ) // EvaluatorWithJitter wraps a given Evaluator. It applies a consistent jitter based on a rule's query string by hashing diff --git a/pkg/ruler/evaluator_jitter_test.go b/pkg/ruler/evaluator_jitter_test.go index 7a1636c55939a..f6e35a390e99a 100644 --- a/pkg/ruler/evaluator_jitter_test.go +++ b/pkg/ruler/evaluator_jitter_test.go @@ -11,7 +11,7 @@ import ( "github.com/go-kit/log" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) type mockEval struct{} diff --git a/pkg/ruler/evaluator_local.go b/pkg/ruler/evaluator_local.go index 91efd5a14d995..dbbd8c813e1f3 100644 --- a/pkg/ruler/evaluator_local.go +++ b/pkg/ruler/evaluator_local.go @@ -7,9 +7,9 @@ import ( "github.com/go-kit/log" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) const EvalModeLocal = "local" diff --git a/pkg/ruler/evaluator_remote.go b/pkg/ruler/evaluator_remote.go index 8cfc63efca912..a409e814d87c2 100644 --- a/pkg/ruler/evaluator_remote.go +++ b/pkg/ruler/evaluator_remote.go @@ -35,13 +35,13 @@ import ( "google.golang.org/grpc" "google.golang.org/grpc/keepalive" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/build" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/httpreq" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/build" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) const ( diff --git a/pkg/ruler/evaluator_remote_test.go b/pkg/ruler/evaluator_remote_test.go index 6c38fe6ca1e5e..515b8ea306528 100644 --- a/pkg/ruler/evaluator_remote_test.go +++ b/pkg/ruler/evaluator_remote_test.go @@ -18,9 +18,9 @@ import ( "github.com/stretchr/testify/require" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/validation" ) type mockClient struct { diff --git a/pkg/ruler/grouploader.go b/pkg/ruler/grouploader.go index 628e5a1f873a6..2f22e0a680b16 100644 --- a/pkg/ruler/grouploader.go +++ b/pkg/ruler/grouploader.go @@ -11,7 +11,7 @@ import ( "github.com/prometheus/prometheus/rules" "gopkg.in/yaml.v3" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logql/syntax" ) type GroupLoader struct{} diff --git a/pkg/ruler/memstore.go b/pkg/ruler/memstore.go index b70d17a954b38..69d37ddfeed29 100644 --- a/pkg/ruler/memstore.go +++ b/pkg/ruler/memstore.go @@ -19,9 +19,9 @@ import ( "github.com/prometheus/prometheus/rules" "github.com/prometheus/prometheus/storage" - "github.com/grafana/loki/pkg/querier/series" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/querier/series" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" ) const ( diff --git a/pkg/ruler/memstore_test.go b/pkg/ruler/memstore_test.go index 43c13be42c6c1..3c26a0f71506a 100644 --- a/pkg/ruler/memstore_test.go +++ b/pkg/ruler/memstore_test.go @@ -15,7 +15,7 @@ import ( "github.com/prometheus/prometheus/tsdb/chunkenc" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) const ruleName = "testrule" diff --git a/pkg/ruler/registry.go b/pkg/ruler/registry.go index 5874eb7536e8d..868b7f29a6f94 100644 --- a/pkg/ruler/registry.go +++ b/pkg/ruler/registry.go @@ -25,9 +25,9 @@ import ( "github.com/prometheus/prometheus/storage" "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/ruler/storage/cleaner" - "github.com/grafana/loki/pkg/ruler/storage/instance" - "github.com/grafana/loki/pkg/ruler/storage/wal" + "github.com/grafana/loki/v3/pkg/ruler/storage/cleaner" + "github.com/grafana/loki/v3/pkg/ruler/storage/instance" + "github.com/grafana/loki/v3/pkg/ruler/storage/wal" ) type walRegistry struct { diff --git a/pkg/ruler/registry_test.go b/pkg/ruler/registry_test.go index 46ab9a7084576..261b6d3836763 100644 --- a/pkg/ruler/registry_test.go +++ b/pkg/ruler/registry_test.go @@ -19,10 +19,10 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/ruler/storage/instance" - "github.com/grafana/loki/pkg/ruler/util" - "github.com/grafana/loki/pkg/util/test" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/ruler/storage/instance" + "github.com/grafana/loki/v3/pkg/ruler/util" + "github.com/grafana/loki/v3/pkg/util/test" + "github.com/grafana/loki/v3/pkg/validation" ) const enabledRWTenant = "enabled" diff --git a/pkg/ruler/ruler.go b/pkg/ruler/ruler.go index dd90ccb153390..5ef091badeb7c 100644 --- a/pkg/ruler/ruler.go +++ b/pkg/ruler/ruler.go @@ -6,8 +6,8 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/prometheus/config" - ruler "github.com/grafana/loki/pkg/ruler/base" - "github.com/grafana/loki/pkg/ruler/rulestore" + ruler "github.com/grafana/loki/v3/pkg/ruler/base" + "github.com/grafana/loki/v3/pkg/ruler/rulestore" ) func NewRuler(cfg Config, evaluator Evaluator, reg prometheus.Registerer, logger log.Logger, ruleStore rulestore.RuleStore, limits RulesLimits, metricsNamespace string) (*ruler.Ruler, error) { diff --git a/pkg/ruler/rulespb/compat.go b/pkg/ruler/rulespb/compat.go index a0da3dc014f46..0c9de4185a101 100644 --- a/pkg/ruler/rulespb/compat.go +++ b/pkg/ruler/rulespb/compat.go @@ -8,7 +8,7 @@ import ( "github.com/prometheus/prometheus/model/rulefmt" "gopkg.in/yaml.v3" - "github.com/grafana/loki/pkg/logproto" //lint:ignore faillint allowed to import other protobuf + "github.com/grafana/loki/v3/pkg/logproto" //lint:ignore faillint allowed to import other protobuf ) // ToProto transforms a formatted prometheus rulegroup to a rule group protobuf diff --git a/pkg/ruler/rulespb/rules.pb.go b/pkg/ruler/rulespb/rules.pb.go index ead0d482791be..91afa25a655ef 100644 --- a/pkg/ruler/rulespb/rules.pb.go +++ b/pkg/ruler/rulespb/rules.pb.go @@ -10,8 +10,8 @@ import ( github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" types "github.com/gogo/protobuf/types" _ "github.com/golang/protobuf/ptypes/duration" - _ "github.com/grafana/loki/pkg/logproto" - github_com_grafana_loki_pkg_logproto "github.com/grafana/loki/pkg/logproto" + _ "github.com/grafana/loki/v3/pkg/logproto" + github_com_grafana_loki_v3_pkg_logproto "github.com/grafana/loki/v3/pkg/logproto" io "io" math "math" math_bits "math/bits" @@ -130,12 +130,12 @@ func (m *RuleGroupDesc) GetLimit() int64 { // RuleDesc is a proto representation of a Prometheus Rule type RuleDesc struct { - Expr string `protobuf:"bytes,1,opt,name=expr,proto3" json:"expr,omitempty"` - Record string `protobuf:"bytes,2,opt,name=record,proto3" json:"record,omitempty"` - Alert string `protobuf:"bytes,3,opt,name=alert,proto3" json:"alert,omitempty"` - For time.Duration `protobuf:"bytes,4,opt,name=for,proto3,stdduration" json:"for"` - Labels []github_com_grafana_loki_pkg_logproto.LabelAdapter `protobuf:"bytes,5,rep,name=labels,proto3,customtype=github.com/grafana/loki/pkg/logproto.LabelAdapter" json:"labels"` - Annotations []github_com_grafana_loki_pkg_logproto.LabelAdapter `protobuf:"bytes,6,rep,name=annotations,proto3,customtype=github.com/grafana/loki/pkg/logproto.LabelAdapter" json:"annotations"` + Expr string `protobuf:"bytes,1,opt,name=expr,proto3" json:"expr,omitempty"` + Record string `protobuf:"bytes,2,opt,name=record,proto3" json:"record,omitempty"` + Alert string `protobuf:"bytes,3,opt,name=alert,proto3" json:"alert,omitempty"` + For time.Duration `protobuf:"bytes,4,opt,name=for,proto3,stdduration" json:"for"` + Labels []github_com_grafana_loki_v3_pkg_logproto.LabelAdapter `protobuf:"bytes,5,rep,name=labels,proto3,customtype=github.com/grafana/loki/v3/pkg/logproto.LabelAdapter" json:"labels"` + Annotations []github_com_grafana_loki_v3_pkg_logproto.LabelAdapter `protobuf:"bytes,6,rep,name=annotations,proto3,customtype=github.com/grafana/loki/v3/pkg/logproto.LabelAdapter" json:"annotations"` } func (m *RuleDesc) Reset() { *m = RuleDesc{} } @@ -206,39 +206,39 @@ func init() { func init() { proto.RegisterFile("pkg/ruler/rulespb/rules.proto", fileDescriptor_dd3ef3757f506fba) } var fileDescriptor_dd3ef3757f506fba = []byte{ - // 501 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x53, 0x41, 0x6f, 0xd3, 0x30, - 0x18, 0x8d, 0xdb, 0x34, 0x4b, 0x5c, 0x4d, 0x54, 0x56, 0x85, 0xd2, 0x01, 0x6e, 0x35, 0x09, 0xa9, - 0x5c, 0x12, 0x31, 0xc4, 0x81, 0x13, 0x5a, 0x35, 0x09, 0xa9, 0xda, 0x01, 0xe5, 0xc8, 0x05, 0x39, - 0xa9, 0x6b, 0xa2, 0xb9, 0x71, 0xe4, 0x24, 0x13, 0xbd, 0xf1, 0x13, 0xb8, 0x20, 0xf1, 0x13, 0xf8, - 0x29, 0x3b, 0xf6, 0x38, 0x71, 0x18, 0x34, 0xbd, 0x70, 0x63, 0x3f, 0x01, 0xd9, 0x4e, 0xc6, 0x00, - 0x09, 0x71, 0xe1, 0x12, 0x7f, 0xcf, 0xcf, 0x9f, 0xdf, 0xfb, 0x5e, 0x12, 0xf8, 0x20, 0x3f, 0x63, - 0xa1, 0xac, 0x38, 0x95, 0xfa, 0x59, 0xe4, 0xb1, 0x59, 0x83, 0x5c, 0x8a, 0x52, 0xa0, 0x9e, 0x06, - 0x07, 0x43, 0x26, 0x98, 0xd0, 0x3b, 0xa1, 0xaa, 0x0c, 0x79, 0x30, 0x62, 0x42, 0x30, 0x4e, 0x43, - 0x8d, 0xe2, 0x6a, 0x19, 0x92, 0x6c, 0xdd, 0x50, 0xf8, 0x77, 0x6a, 0x51, 0x49, 0x52, 0xa6, 0x22, - 0x6b, 0xf8, 0x7b, 0x4a, 0x96, 0x0b, 0x66, 0xee, 0x6c, 0x0b, 0x43, 0x1e, 0x7e, 0xe8, 0xc0, 0xfd, - 0xa8, 0xe2, 0xf4, 0x85, 0x14, 0x55, 0x7e, 0x42, 0x8b, 0x04, 0x21, 0x68, 0x67, 0x64, 0x45, 0x7d, - 0x30, 0x01, 0x53, 0x2f, 0xd2, 0x35, 0xba, 0x0f, 0x3d, 0xb5, 0x16, 0x39, 0x49, 0xa8, 0xdf, 0xd1, - 0xc4, 0xcf, 0x0d, 0xf4, 0x1c, 0xba, 0x69, 0x56, 0x52, 0x79, 0x4e, 0xb8, 0xdf, 0x9d, 0x80, 0x69, - 0xff, 0x68, 0x14, 0x18, 0x4f, 0x41, 0xeb, 0x29, 0x38, 0x69, 0x3c, 0xcd, 0xdc, 0x8b, 0xab, 0xb1, - 0xf5, 0xf1, 0xcb, 0x18, 0x44, 0x37, 0x4d, 0xe8, 0x21, 0x34, 0xb3, 0xfb, 0xf6, 0xa4, 0x3b, 0xed, - 0x1f, 0xdd, 0x09, 0x4c, 0x2c, 0xca, 0x97, 0xb2, 0x14, 0x19, 0x56, 0x39, 0xab, 0x0a, 0x2a, 0x7d, - 0xc7, 0x38, 0x53, 0x35, 0x0a, 0xe0, 0x9e, 0xc8, 0xd5, 0xc5, 0x85, 0xef, 0xe9, 0xe6, 0xe1, 0x1f, - 0xd2, 0xc7, 0xd9, 0x3a, 0x6a, 0x0f, 0xa1, 0x21, 0xec, 0xf1, 0x74, 0x95, 0x96, 0x3e, 0x9c, 0x80, - 0x69, 0x37, 0x32, 0x60, 0x6e, 0xbb, 0xbd, 0x81, 0x33, 0xb7, 0xdd, 0xbd, 0x81, 0x3b, 0xb7, 0x5d, - 0x77, 0xe0, 0x1d, 0x7e, 0xef, 0x40, 0xb7, 0xd5, 0x57, 0xc2, 0xf4, 0x6d, 0x2e, 0xdb, 0x48, 0x54, - 0x8d, 0xee, 0x42, 0x47, 0xd2, 0x44, 0xc8, 0x45, 0x93, 0x47, 0x83, 0x94, 0x00, 0xe1, 0x54, 0x96, - 0x3a, 0x09, 0x2f, 0x32, 0x00, 0x3d, 0x85, 0xdd, 0xa5, 0x90, 0xbe, 0xfd, 0xef, 0xe9, 0xa8, 0xf3, - 0x88, 0x43, 0x87, 0x93, 0x98, 0xf2, 0xc2, 0xef, 0xe9, 0xe1, 0x46, 0xc1, 0xcd, 0xeb, 0x3b, 0xa5, - 0x8c, 0x24, 0xeb, 0x53, 0xc5, 0xbe, 0x24, 0xa9, 0x9c, 0x3d, 0x53, 0x9d, 0x9f, 0xaf, 0xc6, 0x8f, - 0x59, 0x5a, 0xbe, 0xa9, 0xe2, 0x20, 0x11, 0xab, 0x90, 0x49, 0xb2, 0x24, 0x19, 0x09, 0xb9, 0x38, - 0x4b, 0xc3, 0xdb, 0x5f, 0x41, 0xa0, 0xfb, 0x8e, 0x17, 0x24, 0x2f, 0xa9, 0x8c, 0x1a, 0x0d, 0x74, - 0x0e, 0xfb, 0x24, 0xcb, 0x44, 0x49, 0x4c, 0x9e, 0xce, 0x7f, 0x94, 0xbc, 0x2d, 0xa4, 0x73, 0xdf, - 0x9f, 0xbd, 0xde, 0x6c, 0xb1, 0x75, 0xb9, 0xc5, 0xd6, 0xf5, 0x16, 0x83, 0x77, 0x35, 0x06, 0x9f, - 0x6a, 0x0c, 0x2e, 0x6a, 0x0c, 0x36, 0x35, 0x06, 0x5f, 0x6b, 0x0c, 0xbe, 0xd5, 0xd8, 0xba, 0xae, - 0x31, 0x78, 0xbf, 0xc3, 0xd6, 0x66, 0x87, 0xad, 0xcb, 0x1d, 0xb6, 0x5e, 0x3d, 0xfa, 0x9b, 0xf6, - 0x2f, 0xff, 0x5a, 0xec, 0x68, 0x1f, 0x4f, 0x7e, 0x04, 0x00, 0x00, 0xff, 0xff, 0x6f, 0xf3, 0x96, - 0x82, 0x87, 0x03, 0x00, 0x00, + // 503 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x53, 0x31, 0x6f, 0xd3, 0x40, + 0x18, 0xf5, 0x25, 0x8e, 0x6b, 0x5f, 0x54, 0x11, 0x9d, 0x22, 0xe4, 0x14, 0xb8, 0x44, 0x95, 0x90, + 0x32, 0x20, 0x5b, 0x6a, 0x61, 0x43, 0x42, 0x8d, 0x2a, 0x21, 0x45, 0x1d, 0x90, 0x47, 0xb6, 0xb3, + 0x73, 0x39, 0xac, 0x5e, 0x7c, 0xd6, 0xd9, 0xae, 0xc8, 0xc6, 0x4f, 0x60, 0x41, 0xe2, 0x27, 0xf0, + 0x53, 0x3a, 0x66, 0xac, 0x18, 0x0a, 0x71, 0x16, 0xc6, 0x2e, 0xec, 0xe8, 0xee, 0xec, 0x52, 0x60, + 0x80, 0x85, 0xc5, 0xf7, 0xbd, 0x7b, 0xf7, 0xdd, 0x7b, 0xdf, 0xb3, 0x0d, 0x1f, 0xe5, 0xe7, 0x2c, + 0x94, 0x15, 0xa7, 0x52, 0x3f, 0x8b, 0x3c, 0x36, 0x6b, 0x90, 0x4b, 0x51, 0x0a, 0xd4, 0xd3, 0xe0, + 0x60, 0xc8, 0x04, 0x13, 0x7a, 0x27, 0x54, 0x95, 0x21, 0x0f, 0x46, 0x4c, 0x08, 0xc6, 0x69, 0xa8, + 0x51, 0x5c, 0x2d, 0x43, 0x92, 0xad, 0x1b, 0x0a, 0xff, 0x4e, 0x2d, 0x2a, 0x49, 0xca, 0x54, 0x64, + 0x0d, 0xff, 0x40, 0xc9, 0x72, 0xc1, 0xcc, 0x9d, 0x6d, 0x61, 0xc8, 0xc3, 0x0f, 0x1d, 0xb8, 0x1f, + 0x55, 0x9c, 0xbe, 0x94, 0xa2, 0xca, 0x4f, 0x69, 0x91, 0x20, 0x04, 0xed, 0x8c, 0xac, 0xa8, 0x0f, + 0x26, 0x60, 0xea, 0x45, 0xba, 0x46, 0x0f, 0xa1, 0xa7, 0xd6, 0x22, 0x27, 0x09, 0xf5, 0x3b, 0x9a, + 0xf8, 0xb9, 0x81, 0x5e, 0x40, 0x37, 0xcd, 0x4a, 0x2a, 0x2f, 0x08, 0xf7, 0xbb, 0x13, 0x30, 0xed, + 0x1f, 0x8d, 0x02, 0xe3, 0x29, 0x68, 0x3d, 0x05, 0xa7, 0x8d, 0xa7, 0x99, 0x7b, 0x79, 0x3d, 0xb6, + 0x3e, 0x7e, 0x19, 0x83, 0xe8, 0xb6, 0x09, 0x3d, 0x86, 0x66, 0x76, 0xdf, 0x9e, 0x74, 0xa7, 0xfd, + 0xa3, 0x7b, 0x81, 0x89, 0x45, 0xf9, 0x52, 0x96, 0x22, 0xc3, 0x2a, 0x67, 0x55, 0x41, 0xa5, 0xef, + 0x18, 0x67, 0xaa, 0x46, 0x01, 0xdc, 0x13, 0xb9, 0xba, 0xb8, 0xf0, 0x3d, 0xdd, 0x3c, 0xfc, 0x43, + 0xfa, 0x24, 0x5b, 0x47, 0xed, 0x21, 0x34, 0x84, 0x3d, 0x9e, 0xae, 0xd2, 0xd2, 0x87, 0x13, 0x30, + 0xed, 0x46, 0x06, 0xcc, 0x6d, 0xb7, 0x37, 0x70, 0xe6, 0xb6, 0xbb, 0x37, 0x70, 0xe7, 0xb6, 0xeb, + 0x0e, 0xbc, 0xc3, 0xef, 0x1d, 0xe8, 0xb6, 0xfa, 0x4a, 0x98, 0xbe, 0xcd, 0x65, 0x1b, 0x89, 0xaa, + 0xd1, 0x7d, 0xe8, 0x48, 0x9a, 0x08, 0xb9, 0x68, 0xf2, 0x68, 0x90, 0x12, 0x20, 0x9c, 0xca, 0x52, + 0x27, 0xe1, 0x45, 0x06, 0xa0, 0x67, 0xb0, 0xbb, 0x14, 0xd2, 0xb7, 0xff, 0x3d, 0x1d, 0x75, 0x1e, + 0x09, 0xe8, 0x70, 0x12, 0x53, 0x5e, 0xf8, 0x3d, 0x3d, 0xdc, 0x28, 0xb8, 0x7d, 0x7d, 0x67, 0x94, + 0x91, 0x64, 0x7d, 0xa6, 0xd8, 0x57, 0x24, 0x95, 0xb3, 0xe7, 0xaa, 0xf3, 0xf3, 0xf5, 0xf8, 0x29, + 0x4b, 0xcb, 0x37, 0x55, 0x1c, 0x24, 0x62, 0x15, 0x32, 0x49, 0x96, 0x24, 0x23, 0x21, 0x17, 0xe7, + 0x69, 0x78, 0x71, 0x1c, 0xde, 0xfd, 0x10, 0x02, 0xdd, 0x7a, 0xb2, 0x20, 0x79, 0x49, 0x65, 0xd4, + 0xc8, 0xa0, 0x35, 0xec, 0x93, 0x2c, 0x13, 0x25, 0x31, 0x91, 0x3a, 0xff, 0x57, 0xf5, 0xae, 0x96, + 0x4e, 0x7f, 0x7f, 0x16, 0x6f, 0xb6, 0xd8, 0xba, 0xda, 0x62, 0xeb, 0x66, 0x8b, 0xc1, 0xbb, 0x1a, + 0x83, 0x4f, 0x35, 0x06, 0x97, 0x35, 0x06, 0x9b, 0x1a, 0x83, 0xaf, 0x35, 0x06, 0xdf, 0x6a, 0x6c, + 0xdd, 0xd4, 0x18, 0xbc, 0xdf, 0x61, 0x6b, 0xb3, 0xc3, 0xd6, 0xd5, 0x0e, 0x5b, 0xaf, 0x9f, 0xfc, + 0x45, 0xfe, 0x97, 0x9f, 0x2e, 0x76, 0xb4, 0x95, 0xe3, 0x1f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xab, + 0x30, 0xeb, 0xc9, 0x90, 0x03, 0x00, 0x00, } func (this *RuleGroupDesc) Equal(that interface{}) bool { @@ -1145,7 +1145,7 @@ func (m *RuleDesc) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Labels = append(m.Labels, github_com_grafana_loki_pkg_logproto.LabelAdapter{}) + m.Labels = append(m.Labels, github_com_grafana_loki_v3_pkg_logproto.LabelAdapter{}) if err := m.Labels[len(m.Labels)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -1179,7 +1179,7 @@ func (m *RuleDesc) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Annotations = append(m.Annotations, github_com_grafana_loki_pkg_logproto.LabelAdapter{}) + m.Annotations = append(m.Annotations, github_com_grafana_loki_v3_pkg_logproto.LabelAdapter{}) if err := m.Annotations[len(m.Annotations)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } diff --git a/pkg/ruler/rulespb/rules.proto b/pkg/ruler/rulespb/rules.proto index be001d238d1d0..82dc7daabd56a 100644 --- a/pkg/ruler/rulespb/rules.proto +++ b/pkg/ruler/rulespb/rules.proto @@ -7,7 +7,7 @@ import "google/protobuf/any.proto"; import "google/protobuf/duration.proto"; import "pkg/logproto/logproto.proto"; -option go_package = "github.com/grafana/loki/pkg/ruler/rulespb"; +option go_package = "github.com/grafana/loki/v3/pkg/ruler/rulespb"; option (gogoproto.marshaler_all) = true; option (gogoproto.unmarshaler_all) = true; @@ -42,10 +42,10 @@ message RuleDesc { ]; repeated logproto.LegacyLabelPair labels = 5 [ (gogoproto.nullable) = false, - (gogoproto.customtype) = "github.com/grafana/loki/pkg/logproto.LabelAdapter" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logproto.LabelAdapter" ]; repeated logproto.LegacyLabelPair annotations = 6 [ (gogoproto.nullable) = false, - (gogoproto.customtype) = "github.com/grafana/loki/pkg/logproto.LabelAdapter" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logproto.LabelAdapter" ]; } diff --git a/pkg/ruler/rulestore/bucketclient/bucket_client.go b/pkg/ruler/rulestore/bucketclient/bucket_client.go index ddd90cb57fdfa..a39a8b03532da 100644 --- a/pkg/ruler/rulestore/bucketclient/bucket_client.go +++ b/pkg/ruler/rulestore/bucketclient/bucket_client.go @@ -15,9 +15,9 @@ import ( "github.com/thanos-io/objstore" "golang.org/x/sync/errgroup" - "github.com/grafana/loki/pkg/ruler/rulespb" - "github.com/grafana/loki/pkg/ruler/rulestore" - "github.com/grafana/loki/pkg/storage/bucket" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/ruler/rulestore" + "github.com/grafana/loki/v3/pkg/storage/bucket" ) const ( diff --git a/pkg/ruler/rulestore/bucketclient/bucket_client_test.go b/pkg/ruler/rulestore/bucketclient/bucket_client_test.go index 360b7aedb221b..0644238b21685 100644 --- a/pkg/ruler/rulestore/bucketclient/bucket_client_test.go +++ b/pkg/ruler/rulestore/bucketclient/bucket_client_test.go @@ -16,11 +16,11 @@ import ( "github.com/stretchr/testify/require" "github.com/thanos-io/objstore" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/ruler/rulespb" - "github.com/grafana/loki/pkg/ruler/rulestore" - "github.com/grafana/loki/pkg/ruler/rulestore/objectclient" - "github.com/grafana/loki/pkg/storage/chunk/client/testutils" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/ruler/rulestore" + "github.com/grafana/loki/v3/pkg/ruler/rulestore/objectclient" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils" ) type testGroup struct { diff --git a/pkg/ruler/rulestore/config.go b/pkg/ruler/rulestore/config.go index f43464d463df2..1f0602424cdb9 100644 --- a/pkg/ruler/rulestore/config.go +++ b/pkg/ruler/rulestore/config.go @@ -6,10 +6,10 @@ import ( "github.com/grafana/dskit/flagext" - "github.com/grafana/loki/pkg/configs/client" - "github.com/grafana/loki/pkg/ruler/rulestore/configdb" - "github.com/grafana/loki/pkg/ruler/rulestore/local" - "github.com/grafana/loki/pkg/storage/bucket" + "github.com/grafana/loki/v3/pkg/configs/client" + "github.com/grafana/loki/v3/pkg/ruler/rulestore/configdb" + "github.com/grafana/loki/v3/pkg/ruler/rulestore/local" + "github.com/grafana/loki/v3/pkg/storage/bucket" ) // Config configures a rule store. diff --git a/pkg/ruler/rulestore/configdb/store.go b/pkg/ruler/rulestore/configdb/store.go index a3efa3fc30155..e4a0526386fe4 100644 --- a/pkg/ruler/rulestore/configdb/store.go +++ b/pkg/ruler/rulestore/configdb/store.go @@ -4,9 +4,9 @@ import ( "context" "errors" - "github.com/grafana/loki/pkg/configs/client" - "github.com/grafana/loki/pkg/configs/userconfig" - "github.com/grafana/loki/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/configs/client" + "github.com/grafana/loki/v3/pkg/configs/userconfig" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" ) const ( diff --git a/pkg/ruler/rulestore/configdb/store_test.go b/pkg/ruler/rulestore/configdb/store_test.go index 3542b7bf7bc4a..4d39581cb6492 100644 --- a/pkg/ruler/rulestore/configdb/store_test.go +++ b/pkg/ruler/rulestore/configdb/store_test.go @@ -8,8 +8,8 @@ import ( "github.com/stretchr/testify/assert" - "github.com/grafana/loki/pkg/configs/client" - "github.com/grafana/loki/pkg/configs/userconfig" + "github.com/grafana/loki/v3/pkg/configs/client" + "github.com/grafana/loki/v3/pkg/configs/userconfig" ) var zeroTime time.Time diff --git a/pkg/ruler/rulestore/local/local.go b/pkg/ruler/rulestore/local/local.go index d798c9831510d..0eb3cda68175d 100644 --- a/pkg/ruler/rulestore/local/local.go +++ b/pkg/ruler/rulestore/local/local.go @@ -9,7 +9,7 @@ import ( "github.com/pkg/errors" promRules "github.com/prometheus/prometheus/rules" - "github.com/grafana/loki/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" ) const ( diff --git a/pkg/ruler/rulestore/local/local_test.go b/pkg/ruler/rulestore/local/local_test.go index 8922781a207ae..ee6abc5b8ee3d 100644 --- a/pkg/ruler/rulestore/local/local_test.go +++ b/pkg/ruler/rulestore/local/local_test.go @@ -13,7 +13,7 @@ import ( "github.com/stretchr/testify/require" "gopkg.in/yaml.v3" - "github.com/grafana/loki/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" ) func TestClient_LoadAllRuleGroups(t *testing.T) { diff --git a/pkg/ruler/rulestore/objectclient/rule_store.go b/pkg/ruler/rulestore/objectclient/rule_store.go index 8e7effbcaae01..3471bd88db49a 100644 --- a/pkg/ruler/rulestore/objectclient/rule_store.go +++ b/pkg/ruler/rulestore/objectclient/rule_store.go @@ -14,9 +14,9 @@ import ( "github.com/pkg/errors" "golang.org/x/sync/errgroup" - "github.com/grafana/loki/pkg/ruler/rulespb" - "github.com/grafana/loki/pkg/ruler/rulestore" - "github.com/grafana/loki/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/ruler/rulestore" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" ) // Object Rule Storage Schema diff --git a/pkg/ruler/rulestore/store.go b/pkg/ruler/rulestore/store.go index 82f7d4f6ec097..67bd5580dd592 100644 --- a/pkg/ruler/rulestore/store.go +++ b/pkg/ruler/rulestore/store.go @@ -4,7 +4,7 @@ import ( "context" "errors" - "github.com/grafana/loki/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" ) var ( diff --git a/pkg/ruler/storage/cleaner/cleaner.go b/pkg/ruler/storage/cleaner/cleaner.go index b1ad8e76caa0b..e923815c0726a 100644 --- a/pkg/ruler/storage/cleaner/cleaner.go +++ b/pkg/ruler/storage/cleaner/cleaner.go @@ -14,8 +14,8 @@ import ( "github.com/go-kit/log/level" promwal "github.com/prometheus/prometheus/tsdb/wlog" - "github.com/grafana/loki/pkg/ruler/storage/instance" - "github.com/grafana/loki/pkg/ruler/storage/wal" + "github.com/grafana/loki/v3/pkg/ruler/storage/instance" + "github.com/grafana/loki/v3/pkg/ruler/storage/wal" ) // Default settings for the WAL cleaner. diff --git a/pkg/ruler/storage/cleaner/cleaner_test.go b/pkg/ruler/storage/cleaner/cleaner_test.go index 448333165857e..5d5147eb0ada7 100644 --- a/pkg/ruler/storage/cleaner/cleaner_test.go +++ b/pkg/ruler/storage/cleaner/cleaner_test.go @@ -12,7 +12,7 @@ import ( "github.com/go-kit/log" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/ruler/storage/instance" + "github.com/grafana/loki/v3/pkg/ruler/storage/instance" ) func TestWALCleaner_getAllStorageNoRoot(t *testing.T) { diff --git a/pkg/ruler/storage/instance/instance.go b/pkg/ruler/storage/instance/instance.go index b6cd1fc2ed25f..9bcfcea1319d4 100644 --- a/pkg/ruler/storage/instance/instance.go +++ b/pkg/ruler/storage/instance/instance.go @@ -27,9 +27,9 @@ import ( "github.com/prometheus/prometheus/tsdb/wlog" "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/ruler/storage/util" - "github.com/grafana/loki/pkg/ruler/storage/wal" - "github.com/grafana/loki/pkg/util/build" + "github.com/grafana/loki/v3/pkg/ruler/storage/util" + "github.com/grafana/loki/v3/pkg/ruler/storage/wal" + "github.com/grafana/loki/v3/pkg/util/build" ) func init() { diff --git a/pkg/ruler/storage/instance/instance_test.go b/pkg/ruler/storage/instance/instance_test.go index b017664e33f76..03a469ed187c6 100644 --- a/pkg/ruler/storage/instance/instance_test.go +++ b/pkg/ruler/storage/instance/instance_test.go @@ -29,7 +29,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/util/test" + "github.com/grafana/loki/v3/pkg/util/test" ) func TestConfig_Unmarshal_Defaults(t *testing.T) { diff --git a/pkg/ruler/storage/instance/manager.go b/pkg/ruler/storage/instance/manager.go index c65b087acae11..765f3dfc91657 100644 --- a/pkg/ruler/storage/instance/manager.go +++ b/pkg/ruler/storage/instance/manager.go @@ -14,7 +14,7 @@ import ( "github.com/go-kit/log/level" "github.com/prometheus/prometheus/storage" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var ( diff --git a/pkg/scheduler/scheduler.go b/pkg/scheduler/scheduler.go index 4c26becce7a63..ead2f1799b636 100644 --- a/pkg/scheduler/scheduler.go +++ b/pkg/scheduler/scheduler.go @@ -28,15 +28,15 @@ import ( "go.uber.org/atomic" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/lokifrontend/frontend/v2/frontendv2pb" - "github.com/grafana/loki/pkg/querier/queryrange" - "github.com/grafana/loki/pkg/queue" - "github.com/grafana/loki/pkg/scheduler/limits" - "github.com/grafana/loki/pkg/scheduler/schedulerpb" - "github.com/grafana/loki/pkg/util" - lokigrpc "github.com/grafana/loki/pkg/util/httpgrpc" - lokihttpreq "github.com/grafana/loki/pkg/util/httpreq" - lokiring "github.com/grafana/loki/pkg/util/ring" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v2/frontendv2pb" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + "github.com/grafana/loki/v3/pkg/queue" + "github.com/grafana/loki/v3/pkg/scheduler/limits" + "github.com/grafana/loki/v3/pkg/scheduler/schedulerpb" + "github.com/grafana/loki/v3/pkg/util" + lokigrpc "github.com/grafana/loki/v3/pkg/util/httpgrpc" + lokihttpreq "github.com/grafana/loki/v3/pkg/util/httpreq" + lokiring "github.com/grafana/loki/v3/pkg/util/ring" ) const ( diff --git a/pkg/scheduler/scheduler_test.go b/pkg/scheduler/scheduler_test.go index 5be9ed7ed6c8d..7f8d88e4d679e 100644 --- a/pkg/scheduler/scheduler_test.go +++ b/pkg/scheduler/scheduler_test.go @@ -12,8 +12,8 @@ import ( "github.com/stretchr/testify/assert" "google.golang.org/grpc/metadata" - "github.com/grafana/loki/pkg/scheduler/schedulerpb" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/scheduler/schedulerpb" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func TestScheduler_setRunState(t *testing.T) { diff --git a/pkg/scheduler/schedulerpb/scheduler.pb.go b/pkg/scheduler/schedulerpb/scheduler.pb.go index c2f95e59cdae4..fa4df89363c5b 100644 --- a/pkg/scheduler/schedulerpb/scheduler.pb.go +++ b/pkg/scheduler/schedulerpb/scheduler.pb.go @@ -9,7 +9,7 @@ import ( _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" httpgrpc "github.com/grafana/dskit/httpgrpc" - queryrange "github.com/grafana/loki/pkg/querier/queryrange" + queryrange "github.com/grafana/loki/v3/pkg/querier/queryrange" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" status "google.golang.org/grpc/status" diff --git a/pkg/storage/async_store.go b/pkg/storage/async_store.go index e3794a63733ad..31fea0ced0881 100644 --- a/pkg/storage/async_store.go +++ b/pkg/storage/async_store.go @@ -7,22 +7,22 @@ import ( "github.com/opentracing/opentracing-go" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/stores" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/stores" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" "github.com/go-kit/log/level" "github.com/grafana/dskit/concurrency" "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) type IngesterQuerier interface { diff --git a/pkg/storage/async_store_test.go b/pkg/storage/async_store_test.go index 9b2e440c1dc7d..366a7d6f1f1d5 100644 --- a/pkg/storage/async_store_test.go +++ b/pkg/storage/async_store_test.go @@ -5,17 +5,17 @@ import ( "testing" "time" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util" ) // storeMock is a mockable version of Loki's storage, used in querier unit tests diff --git a/pkg/storage/batch.go b/pkg/storage/batch.go index 20cc45b69590b..21f321a6f8a54 100644 --- a/pkg/storage/batch.go +++ b/pkg/storage/batch.go @@ -13,18 +13,18 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/promql" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util/constants" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util/constants" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type ChunkMetrics struct { diff --git a/pkg/storage/batch_test.go b/pkg/storage/batch_test.go index 1df906f7dcf2b..e041ad186fe89 100644 --- a/pkg/storage/batch_test.go +++ b/pkg/storage/batch_test.go @@ -13,13 +13,13 @@ import ( "github.com/prometheus/prometheus/promql" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage/config" ) var NilMetrics = NewChunkMetrics(nil, 0) diff --git a/pkg/storage/bloom/v1/archive.go b/pkg/storage/bloom/v1/archive.go index 07ed9cd76d7f3..fcc3294eba977 100644 --- a/pkg/storage/bloom/v1/archive.go +++ b/pkg/storage/bloom/v1/archive.go @@ -8,7 +8,7 @@ import ( "github.com/pkg/errors" - "github.com/grafana/loki/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/chunkenc" ) type TarEntry struct { diff --git a/pkg/storage/bloom/v1/archive_test.go b/pkg/storage/bloom/v1/archive_test.go index c77fbc69f122b..d6131c166f674 100644 --- a/pkg/storage/bloom/v1/archive_test.go +++ b/pkg/storage/bloom/v1/archive_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/chunkenc" ) func TestArchive(t *testing.T) { diff --git a/pkg/storage/bloom/v1/block.go b/pkg/storage/bloom/v1/block.go index c9eef5fa33027..91ba171b272c0 100644 --- a/pkg/storage/bloom/v1/block.go +++ b/pkg/storage/bloom/v1/block.go @@ -14,6 +14,7 @@ type BlockMetadata struct { } type Block struct { + metrics *Metrics // covers series pages index BlockIndex // covers bloom pages @@ -26,9 +27,10 @@ type Block struct { initialized bool } -func NewBlock(reader BlockReader) *Block { +func NewBlock(reader BlockReader, metrics *Metrics) *Block { return &Block{ - reader: reader, + reader: reader, + metrics: metrics, } } @@ -87,19 +89,6 @@ func combineChecksums(index, blooms uint32) uint32 { return index ^ blooms } -// convenience method -func (b *Block) Querier() *BlockQuerier { - return NewBlockQuerier(b) -} - -func (b *Block) Series() *LazySeriesIter { - return NewLazySeriesIter(b) -} - -func (b *Block) Blooms() *LazyBloomIter { - return NewLazyBloomIter(b) -} - func (b *Block) Metadata() (BlockMetadata, error) { if err := b.LoadHeaders(); err != nil { return BlockMetadata{}, err @@ -123,11 +112,16 @@ type BlockQuerier struct { cur *SeriesWithBloom } -func NewBlockQuerier(b *Block) *BlockQuerier { +// NewBlockQuerier returns a new BlockQuerier for the given block. +// WARNING: If noCapture is true, the underlying byte slice of the bloom page +// will be returned to the pool for efficiency. This can only safely be used +// when the underlying bloom bytes don't escape the decoder, i.e. +// when loading blooms for querying (bloom-gw) but not for writing (bloom-compactor). +func NewBlockQuerier(b *Block, noCapture bool, maxPageSize int) *BlockQuerier { return &BlockQuerier{ block: b, series: NewLazySeriesIter(b), - blooms: NewLazyBloomIter(b), + blooms: NewLazyBloomIter(b, noCapture, maxPageSize), } } diff --git a/pkg/storage/bloom/v1/block_writer.go b/pkg/storage/bloom/v1/block_writer.go index b7954264ae8ba..1bdc38f32fca9 100644 --- a/pkg/storage/bloom/v1/block_writer.go +++ b/pkg/storage/bloom/v1/block_writer.go @@ -8,7 +8,7 @@ import ( "github.com/pkg/errors" - "github.com/grafana/loki/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" ) const ( diff --git a/pkg/storage/bloom/v1/bloom.go b/pkg/storage/bloom/v1/bloom.go index 661c60970ab8e..1554b6828f246 100644 --- a/pkg/storage/bloom/v1/bloom.go +++ b/pkg/storage/bloom/v1/bloom.go @@ -7,18 +7,17 @@ import ( "github.com/pkg/errors" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/storage/bloom/v1/filter" - "github.com/grafana/loki/pkg/util/encoding" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/storage/bloom/v1/filter" + "github.com/grafana/loki/v3/pkg/util/encoding" ) // NB(chaudum): Some block pages are way bigger than others (400MiB and // bigger), and loading multiple pages into memory in parallel can cause the // gateways to OOM. -// Figure out a decent maximum page size that we can process. -// TODO(chaudum): Make max page size configurable -var maxPageSize = 32 << 20 // 32MB -var ErrPageTooLarge = errors.Errorf("bloom page too large: size limit is %.1fMiB", float64(maxPageSize)/float64(1<<20)) +// Figure out a decent default maximum page size that we can process. +var DefaultMaxPageSize = 64 << 20 // 64MB +var ErrPageTooLarge = errors.Errorf("bloom page too large") type Bloom struct { filter.ScalableBloomFilter @@ -86,7 +85,7 @@ func LazyDecodeBloomPage(r io.Reader, pool chunkenc.ReaderPool, page BloomPageHe } defer pool.PutReader(decompressor) - b := make([]byte, page.DecompressedLen) + b := BlockPool.Get(page.DecompressedLen)[:page.DecompressedLen] if _, err = io.ReadFull(decompressor, b); err != nil { return nil, errors.Wrap(err, "decompressing bloom page") @@ -97,11 +96,13 @@ func LazyDecodeBloomPage(r io.Reader, pool chunkenc.ReaderPool, page BloomPageHe return decoder, nil } +// shortcut to skip allocations when we know the page is not compressed func LazyDecodeBloomPageNoCompression(r io.Reader, page BloomPageHeader) (*BloomPageDecoder, error) { + // data + checksum if page.Len != page.DecompressedLen+4 { return nil, errors.New("the Len and DecompressedLen of the page do not match") } - data := make([]byte, page.Len) + data := BlockPool.Get(page.Len)[:page.Len] _, err := io.ReadFull(r, data) if err != nil { @@ -152,6 +153,21 @@ type BloomPageDecoder struct { err error } +// Relinquish returns the underlying byte slice to the pool +// for efficiency. It's intended to be used as a +// perf optimization. +// This can only safely be used when the underlying bloom +// bytes don't escape the decoder: +// on reads in the bloom-gw but not in the bloom-compactor +func (d *BloomPageDecoder) Relinquish() { + data := d.data + d.data = nil + + if cap(data) > 0 { + BlockPool.Put(data) + } +} + func (d *BloomPageDecoder) Reset() { d.err = nil d.cur = nil @@ -259,8 +275,10 @@ func (b *BloomBlock) DecodeHeaders(r io.ReadSeeker) (uint32, error) { return checksum, nil } -func (b *BloomBlock) BloomPageDecoder(r io.ReadSeeker, pageIdx int) (*BloomPageDecoder, error) { +func (b *BloomBlock) BloomPageDecoder(r io.ReadSeeker, pageIdx int, maxPageSize int, metrics *Metrics) (res *BloomPageDecoder, err error) { if pageIdx < 0 || pageIdx >= len(b.pageHeaders) { + metrics.pagesSkipped.WithLabelValues(pageTypeBloom, skipReasonOOB).Inc() + metrics.bytesSkipped.WithLabelValues(pageTypeBloom, skipReasonOOB).Add(float64(b.pageHeaders[pageIdx].DecompressedLen)) return nil, fmt.Errorf("invalid page (%d) for bloom page decoding", pageIdx) } @@ -268,16 +286,30 @@ func (b *BloomBlock) BloomPageDecoder(r io.ReadSeeker, pageIdx int) (*BloomPageD // fmt.Printf("pageIdx=%d page=%+v size=%.2fMiB\n", pageIdx, page, float64(page.Len)/float64(1<<20)) if page.Len > maxPageSize { + metrics.pagesSkipped.WithLabelValues(pageTypeBloom, skipReasonTooLarge).Inc() + metrics.bytesSkipped.WithLabelValues(pageTypeBloom, skipReasonTooLarge).Add(float64(page.DecompressedLen)) return nil, ErrPageTooLarge } - if _, err := r.Seek(int64(page.Offset), io.SeekStart); err != nil { + if _, err = r.Seek(int64(page.Offset), io.SeekStart); err != nil { + metrics.pagesSkipped.WithLabelValues(pageTypeBloom, skipReasonErr).Inc() + metrics.bytesSkipped.WithLabelValues(pageTypeBloom, skipReasonErr).Add(float64(page.DecompressedLen)) return nil, errors.Wrap(err, "seeking to bloom page") } if b.schema.encoding == chunkenc.EncNone { - return LazyDecodeBloomPageNoCompression(r, page) + res, err = LazyDecodeBloomPageNoCompression(r, page) + } else { + res, err = LazyDecodeBloomPage(r, b.schema.DecompressorPool(), page) + } + + if err != nil { + metrics.pagesSkipped.WithLabelValues(pageTypeBloom, skipReasonErr).Inc() + metrics.bytesSkipped.WithLabelValues(pageTypeBloom, skipReasonErr).Add(float64(page.DecompressedLen)) + return nil, errors.Wrap(err, "decoding bloom page") } - return LazyDecodeBloomPage(r, b.schema.DecompressorPool(), page) + metrics.pagesRead.WithLabelValues(pageTypeBloom).Inc() + metrics.bytesRead.WithLabelValues(pageTypeBloom).Add(float64(page.DecompressedLen)) + return res, nil } diff --git a/pkg/storage/bloom/v1/bloom_querier.go b/pkg/storage/bloom/v1/bloom_querier.go index d0dbdc1b3b550..01c0216c36f0a 100644 --- a/pkg/storage/bloom/v1/bloom_querier.go +++ b/pkg/storage/bloom/v1/bloom_querier.go @@ -7,7 +7,10 @@ type BloomQuerier interface { } type LazyBloomIter struct { + usePool bool + b *Block + m int // max page size in bytes // state initialized bool @@ -16,9 +19,16 @@ type LazyBloomIter struct { curPage *BloomPageDecoder } -func NewLazyBloomIter(b *Block) *LazyBloomIter { +// NewLazyBloomIter returns a new lazy bloom iterator. +// If pool is true, the underlying byte slice of the bloom page +// will be returned to the pool for efficiency. +// This can only safely be used when the underlying bloom +// bytes don't escape the decoder. +func NewLazyBloomIter(b *Block, pool bool, maxSize int) *LazyBloomIter { return &LazyBloomIter{ - b: b, + usePool: pool, + b: b, + m: maxSize, } } @@ -39,12 +49,18 @@ func (it *LazyBloomIter) Seek(offset BloomOffset) { // load the desired page if it.curPageIndex != offset.Page || it.curPage == nil { + // drop the current page if it exists and + // we're using the pool + if it.curPage != nil && it.usePool { + it.curPage.Relinquish() + } + r, err := it.b.reader.Blooms() if err != nil { it.err = errors.Wrap(err, "getting blooms reader") return } - decoder, err := it.b.blooms.BloomPageDecoder(r, offset.Page) + decoder, err := it.b.blooms.BloomPageDecoder(r, offset.Page, it.m, it.b.metrics) if err != nil { it.err = errors.Wrap(err, "loading bloom page") return @@ -83,6 +99,8 @@ func (it *LazyBloomIter) next() bool { it.curPage, err = it.b.blooms.BloomPageDecoder( r, it.curPageIndex, + it.m, + it.b.metrics, ) if err != nil { it.err = err @@ -96,8 +114,14 @@ func (it *LazyBloomIter) next() bool { if it.curPage.Err() != nil { return false } + // we've exhausted the current page, progress to next it.curPageIndex++ + // drop the current page if it exists and + // we're using the pool + if it.usePool { + it.curPage.Relinquish() + } it.curPage = nil continue } diff --git a/pkg/storage/bloom/v1/bloom_tester.go b/pkg/storage/bloom/v1/bloom_tester.go index ab9cbcc64a653..dbb0f7a12aaff 100644 --- a/pkg/storage/bloom/v1/bloom_tester.go +++ b/pkg/storage/bloom/v1/bloom_tester.go @@ -3,11 +3,11 @@ package v1 import ( "github.com/grafana/regexp" regexpsyntax "github.com/grafana/regexp/syntax" - "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/storage/bloom/v1/filter" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/log/pattern" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/storage/bloom/v1/filter" ) type BloomTest interface { @@ -90,13 +90,16 @@ func FiltersToBloomTest(b NGramBuilder, filters ...syntax.LineFilterExpr) BloomT func simpleFilterToBloomTest(b NGramBuilder, filter syntax.LineFilter) BloomTest { switch filter.Ty { - case labels.MatchEqual, labels.MatchNotEqual: - var test BloomTest = newStringTest(b, filter.Match) - if filter.Ty == labels.MatchNotEqual { - test = newNotTest(test) - } - return test - case labels.MatchRegexp, labels.MatchNotRegexp: + case log.LineMatchNotEqual, log.LineMatchNotRegexp, log.LineMatchNotPattern: + // We cannot test _negated_ filters with a bloom filter since blooms are probabilistic + // filters that can only tell us if a string _might_ exist. + // For example, for `!= "foo"`, the bloom filter might tell us that the string "foo" might exist + // but because we are not sure, we cannot discard that chunk because it might actually not be there. + // Therefore, we return a test that always returns true. + return MatchAll + case log.LineMatchEqual: + return newStringTest(b, filter.Match) + case log.LineMatchRegexp: reg, err := regexpsyntax.Parse(filter.Match, regexpsyntax.Perl) if err != nil { // TODO: log error @@ -111,11 +114,9 @@ func simpleFilterToBloomTest(b NGramBuilder, filter syntax.LineFilter) BloomTest return MatchAll } - var test BloomTest = matcherFilterWrapper{filter: matcher} - if filter.Ty == labels.MatchNotRegexp { - test = newNotTest(test) - } - return test + return matcherFilterWrapper{filter: matcher} + case log.LineMatchPattern: + return newPatternTest(b, filter.Match) default: return MatchAll } @@ -277,3 +278,20 @@ func (o orTest) Matches(bloom filter.Checker) bool { func (o orTest) MatchesWithPrefixBuf(bloom filter.Checker, buf []byte, prefixLen int) bool { return o.left.MatchesWithPrefixBuf(bloom, buf, prefixLen) || o.right.MatchesWithPrefixBuf(bloom, buf, prefixLen) } + +func newPatternTest(b NGramBuilder, match string) BloomTest { + lit, err := pattern.ParseLiterals(match) + if err != nil { + return MatchAll + } + var test stringTest + for _, l := range lit { + it := b.Tokens(string(l)) + for it.Next() { + ngram := make([]byte, len(it.At())) + copy(ngram, it.At()) + test.ngrams = append(test.ngrams, ngram) + } + } + return test +} diff --git a/pkg/storage/bloom/v1/bloom_tester_test.go b/pkg/storage/bloom/v1/bloom_tester_test.go index 46884140ad59e..00db00340340f 100644 --- a/pkg/storage/bloom/v1/bloom_tester_test.go +++ b/pkg/storage/bloom/v1/bloom_tester_test.go @@ -5,8 +5,8 @@ import ( "github.com/stretchr/testify/assert" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/storage/bloom/v1/filter" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/storage/bloom/v1/filter" ) func TestFiltersToBloomTests(t *testing.T) { @@ -47,16 +47,16 @@ func TestFiltersToBloomTests(t *testing.T) { expectMatch: false, }, { - name: "notEq match", + name: "notEq doesnt exist", query: `{app="fake"} != "nope"`, bloom: fakeBloom{"foo", "bar"}, expectMatch: true, }, { - name: "notEq no match", + name: "notEq exists", query: `{app="fake"} != "foo"`, bloom: fakeBloom{"foo", "bar"}, - expectMatch: false, + expectMatch: true, // Still should match because it's NotEQ }, { name: "or filter both match", @@ -89,22 +89,22 @@ func TestFiltersToBloomTests(t *testing.T) { expectMatch: true, }, { - name: "Not or filter right no match", + name: "NotEq OR filter right exists", query: `{app="fake"} != "nope" or "bar"`, bloom: fakeBloom{"foo", "bar"}, - expectMatch: false, + expectMatch: true, // Still should match because it's NotEQ }, { - name: "Not or filter left no match", + name: "Not OR filter left exists", query: `{app="fake"} != "foo" or "nope"`, bloom: fakeBloom{"foo", "bar"}, - expectMatch: false, + expectMatch: true, // Still should match because it's NotEQ }, { - name: "Not or filter no match", + name: "NotEq OR filter both exists", query: `{app="fake"} != "foo" or "bar"`, bloom: fakeBloom{"foo", "bar"}, - expectMatch: false, + expectMatch: true, // Still should match because it's NotEQ }, { name: "complex filter match", @@ -125,10 +125,10 @@ func TestFiltersToBloomTests(t *testing.T) { expectMatch: true, }, { - name: "regex match none", + name: "regex match all notEq", query: `{app="fake"} !~ ".*"`, bloom: fakeBloom{"foo", "bar"}, - expectMatch: false, + expectMatch: true, // Still should match, }, { name: "regex match", @@ -138,10 +138,16 @@ func TestFiltersToBloomTests(t *testing.T) { }, { name: "regex no match", - query: `{app="fake"} !~ "nope|.*foo.*"`, + query: `{app="fake"} |~ ".*not.*"`, bloom: fakeBloom{"foo", "bar"}, expectMatch: false, }, + { + name: "regex notEq right exists", + query: `{app="fake"} !~ "nope|.*foo.*"`, + bloom: fakeBloom{"foo", "bar"}, + expectMatch: true, // Still should match because it's NotEQ + }, { name: "complex regex match", query: `{app="fake"} |~ "(nope|.*not.*|.*foo.*)" or "(no|ba)" !~ "noz.*" or "(nope|not)"`, @@ -149,10 +155,10 @@ func TestFiltersToBloomTests(t *testing.T) { expectMatch: true, }, { - name: "complex regex no match", + name: "complex regex with notEq exists", query: `{app="fake"} |~ "(nope|.*not.*|.*foo.*)" or "(no|ba)" !~ "noz.*"`, bloom: fakeBloom{"foo", "bar", "baz", "fuzz", "noz"}, - expectMatch: false, + expectMatch: true, // Still should match because it's NotEQ }, { name: "line filter after line format", @@ -160,6 +166,42 @@ func TestFiltersToBloomTests(t *testing.T) { bloom: fakeBloom{"foo"}, expectMatch: true, }, + { + name: "pattern match exists", + query: `{app="fake"} |> "<_>foo"`, + bloom: fakeBloom{"foo", "bar"}, + expectMatch: true, + }, + { + name: "pattern match does not exist", + query: `{app="fake"} |> "<_>foo"`, + bloom: fakeBloom{"bar", "baz"}, + expectMatch: false, + }, + { + name: "pattern not match exists", + query: `{app="fake"} !> "<_>foo"`, + bloom: fakeBloom{"foo", "bar"}, + expectMatch: true, + }, + { + name: "pattern not match does not exist", + query: `{app="fake"} !> "<_>foo"`, + bloom: fakeBloom{"bar", "baz"}, + expectMatch: true, + }, + { + name: "pattern all", + query: `{app="fake"} |> "<_>"`, + bloom: fakeBloom{"bar", "baz"}, + expectMatch: true, + }, + { + name: "pattern empty", + query: `{app="fake"} |> ""`, + bloom: fakeBloom{"bar", "baz"}, + expectMatch: true, + }, } { t.Run(tc.name, func(t *testing.T) { expr, err := syntax.ParseExpr(tc.query) diff --git a/pkg/storage/bloom/v1/bloom_tokenizer.go b/pkg/storage/bloom/v1/bloom_tokenizer.go index 16f6f0bc68165..f529f22717639 100644 --- a/pkg/storage/bloom/v1/bloom_tokenizer.go +++ b/pkg/storage/bloom/v1/bloom_tokenizer.go @@ -10,10 +10,10 @@ import ( "github.com/grafana/dskit/multierror" - "github.com/grafana/loki/pkg/iter" + "github.com/grafana/loki/v3/pkg/iter" - "github.com/grafana/loki/pkg/util/encoding" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/encoding" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) /* diff --git a/pkg/storage/bloom/v1/bloom_tokenizer_test.go b/pkg/storage/bloom/v1/bloom_tokenizer_test.go index d80c175713454..048c972d44c68 100644 --- a/pkg/storage/bloom/v1/bloom_tokenizer_test.go +++ b/pkg/storage/bloom/v1/bloom_tokenizer_test.go @@ -9,15 +9,16 @@ import ( "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" "github.com/grafana/loki/pkg/push" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/bloom/v1/filter" + "github.com/grafana/loki/v3/pkg/storage/bloom/v1/filter" "github.com/prometheus/client_golang/prometheus" ) diff --git a/pkg/storage/bloom/v1/bounds.go b/pkg/storage/bloom/v1/bounds.go index fd538ed161c09..1b482e46665bf 100644 --- a/pkg/storage/bloom/v1/bounds.go +++ b/pkg/storage/bloom/v1/bounds.go @@ -3,13 +3,16 @@ package v1 import ( "fmt" "hash" + "math" "strings" + "unsafe" "github.com/pkg/errors" "github.com/prometheus/common/model" "golang.org/x/exp/slices" - "github.com/grafana/loki/pkg/util/encoding" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/util/encoding" ) type BoundsCheck uint8 @@ -20,6 +23,30 @@ const ( After ) +var ( + // FullBounds is the bounds that covers the entire fingerprint space + FullBounds = NewBounds(0, model.Fingerprint(math.MaxUint64)) +) + +type FingerprintBounds struct { + Min, Max model.Fingerprint +} + +// Proto compat +// compiler check ensuring equal repr of underlying types +var _ FingerprintBounds = FingerprintBounds(logproto.FPBounds{}) + +func BoundsFromProto(pb logproto.FPBounds) FingerprintBounds { + return FingerprintBounds(pb) +} + +// Unsafe cast to avoid allocation. This _requires_ that the underlying types are the same +// which is checked by the compiler above +func MultiBoundsFromProto(pb []logproto.FPBounds) MultiFingerprintBounds { + //nolint:unconvert + return MultiFingerprintBounds(*(*MultiFingerprintBounds)(unsafe.Pointer(&pb))) +} + // ParseBoundsFromAddr parses a fingerprint bounds from a string func ParseBoundsFromAddr(s string) (FingerprintBounds, error) { parts := strings.Split(s, "-") @@ -40,10 +67,6 @@ func ParseBoundsFromParts(a, b string) (FingerprintBounds, error) { return NewBounds(minFingerprint, maxFingerprint), nil } -type FingerprintBounds struct { - Min, Max model.Fingerprint -} - func NewBounds(min, max model.Fingerprint) FingerprintBounds { return FingerprintBounds{Min: min, Max: max} } @@ -91,8 +114,16 @@ func (b FingerprintBounds) Match(fp model.Fingerprint) bool { return b.Cmp(fp) == Overlap } -// GetFromThrough implements TSDBs FingerprintFilter interface +// GetFromThrough implements TSDBs FingerprintFilter interface, +// NB(owen-d): adjusts to return `[from,through)` instead of `[from,through]` which the +// fingerprint bounds struct tracks. func (b FingerprintBounds) GetFromThrough() (model.Fingerprint, model.Fingerprint) { + from, through := b.Bounds() + return from, max(through+1, through) +} + +// Bounds returns the inclusive bounds [from,through] +func (b FingerprintBounds) Bounds() (model.Fingerprint, model.Fingerprint) { return b.Min, b.Max } diff --git a/pkg/storage/bloom/v1/bounds_test.go b/pkg/storage/bloom/v1/bounds_test.go index 4dd01e60c1238..5baaf07e900df 100644 --- a/pkg/storage/bloom/v1/bounds_test.go +++ b/pkg/storage/bloom/v1/bounds_test.go @@ -5,8 +5,29 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/assert" + + "github.com/grafana/loki/v3/pkg/logproto" ) +func TestBoundsFromProto(t *testing.T) { + bounds := BoundsFromProto(logproto.FPBounds{ + Min: 10, + Max: 2000, + }) + assert.Equal(t, NewBounds(10, 2000), bounds) +} + +func TestMultiBoundsFromProto(t *testing.T) { + bounds := MultiBoundsFromProto([]logproto.FPBounds{ + {Min: 10, Max: 2000}, + {Min: 2001, Max: 4000}, + }) + assert.Equal(t, MultiFingerprintBounds{ + NewBounds(10, 2000), + NewBounds(2001, 4000), + }, bounds) +} + func Test_ParseFingerprint(t *testing.T) { t.Parallel() fp, err := model.ParseFingerprint("7d0") diff --git a/pkg/storage/bloom/v1/builder.go b/pkg/storage/bloom/v1/builder.go index aa00b58cf6705..0d129aa3def25 100644 --- a/pkg/storage/bloom/v1/builder.go +++ b/pkg/storage/bloom/v1/builder.go @@ -9,9 +9,9 @@ import ( "github.com/pkg/errors" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/storage/bloom/v1/filter" - "github.com/grafana/loki/pkg/util/encoding" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/storage/bloom/v1/filter" + "github.com/grafana/loki/v3/pkg/util/encoding" ) var ( diff --git a/pkg/storage/bloom/v1/builder_test.go b/pkg/storage/bloom/v1/builder_test.go index 8324475662c58..2046144032495 100644 --- a/pkg/storage/bloom/v1/builder_test.go +++ b/pkg/storage/bloom/v1/builder_test.go @@ -10,8 +10,8 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/util/encoding" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/util/encoding" ) var blockEncodings = []chunkenc.Encoding{ @@ -116,8 +116,8 @@ func TestBlockBuilder_RoundTrip(t *testing.T) { processedData = data[:lastProcessedIdx] } - block := NewBlock(tc.reader) - querier := NewBlockQuerier(block) + block := NewBlock(tc.reader, NewMetrics(nil)) + querier := NewBlockQuerier(block, false, DefaultMaxPageSize) err = block.LoadHeaders() require.Nil(t, err) @@ -218,7 +218,7 @@ func TestMergeBuilder(t *testing.T) { itr := NewSliceIter[SeriesWithBloom](data[min:max]) _, err = builder.BuildFrom(itr) require.Nil(t, err) - blocks = append(blocks, NewPeekingIter[*SeriesWithBloom](NewBlockQuerier(NewBlock(reader)))) + blocks = append(blocks, NewPeekingIter[*SeriesWithBloom](NewBlockQuerier(NewBlock(reader, NewMetrics(nil)), false, DefaultMaxPageSize))) } // We're not testing the ability to extend a bloom in this test @@ -251,8 +251,8 @@ func TestMergeBuilder(t *testing.T) { _, _, err = mergeBuilder.Build(builder) require.Nil(t, err) - block := NewBlock(reader) - querier := NewBlockQuerier(block) + block := NewBlock(reader, NewMetrics(nil)) + querier := NewBlockQuerier(block, false, DefaultMaxPageSize) EqualIterators[*SeriesWithBloom]( t, @@ -295,8 +295,8 @@ func TestBlockReset(t *testing.T) { itr := NewSliceIter[SeriesWithBloom](data) _, err = builder.BuildFrom(itr) require.Nil(t, err) - block := NewBlock(reader) - querier := NewBlockQuerier(block) + block := NewBlock(reader, NewMetrics(nil)) + querier := NewBlockQuerier(block, false, DefaultMaxPageSize) rounds := make([][]model.Fingerprint, 2) @@ -361,8 +361,8 @@ func TestMergeBuilder_Roundtrip(t *testing.T) { itr := NewSliceIter[SeriesWithBloom](xs[minIdx:maxIdx]) _, err = builder.BuildFrom(itr) require.Nil(t, err) - block := NewBlock(reader) - querier := NewBlockQuerier(block) + block := NewBlock(reader, NewMetrics(nil)) + querier := NewBlockQuerier(block, false, DefaultMaxPageSize) // rather than use the block querier directly, collect it's data // so we can use it in a few places later @@ -423,7 +423,7 @@ func TestMergeBuilder_Roundtrip(t *testing.T) { // ensure the new block contains one copy of all the data // by comparing it against an iterator over the source data - mergedBlockQuerier := NewBlockQuerier(NewBlock(reader)) + mergedBlockQuerier := NewBlockQuerier(NewBlock(reader, NewMetrics(nil)), false, DefaultMaxPageSize) sourceItr := NewSliceIter[*SeriesWithBloom](PointerSlice[SeriesWithBloom](xs)) EqualIterators[*SeriesWithBloom]( diff --git a/pkg/storage/bloom/v1/fuse_test.go b/pkg/storage/bloom/v1/fuse_test.go index b5a5c22984559..f1cd77d9ebd4a 100644 --- a/pkg/storage/bloom/v1/fuse_test.go +++ b/pkg/storage/bloom/v1/fuse_test.go @@ -10,7 +10,7 @@ import ( "github.com/grafana/dskit/concurrency" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/chunkenc" ) func keysToBloomTest(keys [][]byte) BloomTest { @@ -48,8 +48,8 @@ func TestFusedQuerier(t *testing.T) { _, err = builder.BuildFrom(itr) require.NoError(t, err) require.False(t, itr.Next()) - block := NewBlock(reader) - querier := NewBlockQuerier(block) + block := NewBlock(reader, NewMetrics(nil)) + querier := NewBlockQuerier(block, true, DefaultMaxPageSize) n := 2 nReqs := numSeries / n @@ -142,8 +142,8 @@ func setupBlockForBenchmark(b *testing.B) (*BlockQuerier, [][]Request, []chan Ou itr := NewSliceIter[SeriesWithBloom](data) _, err = builder.BuildFrom(itr) require.Nil(b, err) - block := NewBlock(reader) - querier := NewBlockQuerier(block) + block := NewBlock(reader, NewMetrics(nil)) + querier := NewBlockQuerier(block, true, DefaultMaxPageSize) numRequestChains := 100 seriesPerRequest := 100 diff --git a/pkg/storage/bloom/v1/index.go b/pkg/storage/bloom/v1/index.go index 838ba891cb2f6..ff9ecaffbac37 100644 --- a/pkg/storage/bloom/v1/index.go +++ b/pkg/storage/bloom/v1/index.go @@ -9,9 +9,9 @@ import ( "github.com/pkg/errors" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/util/encoding" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/util/encoding" ) type Schema struct { @@ -151,7 +151,16 @@ func (b *BlockIndex) DecodeHeaders(r io.ReadSeeker) (uint32, error) { } // decompress page and return an iterator over the bytes -func (b *BlockIndex) NewSeriesPageDecoder(r io.ReadSeeker, header SeriesPageHeaderWithOffset) (*SeriesPageDecoder, error) { +func (b *BlockIndex) NewSeriesPageDecoder(r io.ReadSeeker, header SeriesPageHeaderWithOffset, metrics *Metrics) (res *SeriesPageDecoder, err error) { + defer func() { + if err != nil { + metrics.pagesSkipped.WithLabelValues(pageTypeSeries, skipReasonErr).Inc() + metrics.bytesSkipped.WithLabelValues(pageTypeSeries).Add(float64(header.DecompressedLen)) + } else { + metrics.pagesRead.WithLabelValues(pageTypeSeries).Inc() + metrics.bytesRead.WithLabelValues(pageTypeSeries).Add(float64(header.DecompressedLen)) + } + }() if _, err := r.Seek(int64(header.Offset), io.SeekStart); err != nil { return nil, errors.Wrap(err, "seeking to series page") @@ -159,7 +168,7 @@ func (b *BlockIndex) NewSeriesPageDecoder(r io.ReadSeeker, header SeriesPageHead data := BlockPool.Get(header.Len)[:header.Len] defer BlockPool.Put(data) - _, err := io.ReadFull(r, data) + _, err = io.ReadFull(r, data) if err != nil { return nil, errors.Wrap(err, "reading series page") } @@ -180,7 +189,7 @@ func (b *BlockIndex) NewSeriesPageDecoder(r io.ReadSeeker, header SeriesPageHead return nil, errors.Wrap(err, "decompressing series page") } - res := &SeriesPageDecoder{ + res = &SeriesPageDecoder{ data: decompressed, header: header.SeriesHeader, } @@ -221,8 +230,8 @@ func aggregateHeaders(xs []SeriesHeader) SeriesHeader { return SeriesHeader{} } - fromFp, _ := xs[0].Bounds.GetFromThrough() - _, throughFP := xs[len(xs)-1].Bounds.GetFromThrough() + fromFp, _ := xs[0].Bounds.Bounds() + _, throughFP := xs[len(xs)-1].Bounds.Bounds() res := SeriesHeader{ Bounds: NewBounds(fromFp, throughFP), } diff --git a/pkg/storage/bloom/v1/index_querier.go b/pkg/storage/bloom/v1/index_querier.go index 142b6423185b6..8ba984d3df31c 100644 --- a/pkg/storage/bloom/v1/index_querier.go +++ b/pkg/storage/bloom/v1/index_querier.go @@ -74,6 +74,7 @@ func (it *LazySeriesIter) Seek(fp model.Fingerprint) error { it.curPage, err = it.b.index.NewSeriesPageDecoder( r, page, + it.b.metrics, ) if err != nil { return err @@ -107,6 +108,7 @@ func (it *LazySeriesIter) next() bool { it.curPage, err = it.b.index.NewSeriesPageDecoder( r, curHeader, + it.b.metrics, ) if err != nil { it.err = err diff --git a/pkg/storage/bloom/v1/index_test.go b/pkg/storage/bloom/v1/index_test.go index 477d266af7a75..eb61b1e2a2abc 100644 --- a/pkg/storage/bloom/v1/index_test.go +++ b/pkg/storage/bloom/v1/index_test.go @@ -6,7 +6,7 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/util/encoding" + "github.com/grafana/loki/v3/pkg/util/encoding" ) func TestBloomOffsetEncoding(t *testing.T) { diff --git a/pkg/storage/bloom/v1/metrics.go b/pkg/storage/bloom/v1/metrics.go index 138580a435054..c45b2235dccdb 100644 --- a/pkg/storage/bloom/v1/metrics.go +++ b/pkg/storage/bloom/v1/metrics.go @@ -3,6 +3,8 @@ package v1 import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" + + "github.com/grafana/loki/v3/pkg/util/constants" ) type Metrics struct { @@ -15,6 +17,11 @@ type Metrics struct { blockSeriesIterated prometheus.Counter tokensTotal prometheus.Counter insertsTotal *prometheus.CounterVec + + pagesRead *prometheus.CounterVec + pagesSkipped *prometheus.CounterVec + bytesRead *prometheus.CounterVec + bytesSkipped *prometheus.CounterVec } const ( @@ -26,49 +33,86 @@ const ( collisionTypeFalse = "false" collisionTypeTrue = "true" collisionTypeCache = "cache" + + pageTypeBloom = "bloom" + pageTypeSeries = "series" + + skipReasonTooLarge = "too_large" + skipReasonErr = "err" + skipReasonOOB = "out_of_bounds" ) func NewMetrics(r prometheus.Registerer) *Metrics { return &Metrics{ sbfCreationTime: promauto.With(r).NewCounter(prometheus.CounterOpts{ - Name: "bloom_creation_time_total", - Help: "Time spent creating scalable bloom filters", + Namespace: constants.Loki, + Name: "bloom_creation_time_total", + Help: "Time spent creating scalable bloom filters", }), bloomSize: promauto.With(r).NewHistogram(prometheus.HistogramOpts{ - Name: "bloom_size", - Help: "Size of the bloom filter in bytes", - Buckets: prometheus.ExponentialBucketsRange(128, 16777216, 8), + Namespace: constants.Loki, + Name: "bloom_size", + Help: "Size of the bloom filter in bytes", + Buckets: prometheus.ExponentialBucketsRange(1<<10, 512<<20, 10), }), hammingWeightRatio: promauto.With(r).NewHistogram(prometheus.HistogramOpts{ - Name: "bloom_hamming_weight_ratio", - Help: "Ratio of the hamming weight of the bloom filter to the number of bits in the bloom filter", - Buckets: prometheus.ExponentialBucketsRange(0.001, 1, 12), + Namespace: constants.Loki, + Name: "bloom_hamming_weight_ratio", + Help: "Ratio of the hamming weight of the bloom filter to the number of bits in the bloom filter", + Buckets: prometheus.ExponentialBucketsRange(0.001, 1, 12), }), estimatedCount: promauto.With(r).NewHistogram(prometheus.HistogramOpts{ - Name: "bloom_estimated_count", - Help: "Estimated number of elements in the bloom filter", - Buckets: prometheus.ExponentialBucketsRange(1, 33554432, 10), + Namespace: constants.Loki, + Name: "bloom_estimated_count", + Help: "Estimated number of elements in the bloom filter", + Buckets: prometheus.ExponentialBucketsRange(1, 33554432, 10), }), chunksIndexed: promauto.With(r).NewCounterVec(prometheus.CounterOpts{ - Name: "bloom_chunks_indexed_total", - Help: "Number of chunks indexed in bloom filters, partitioned by type. Type can be iterated or copied, where iterated indicates the chunk data was fetched and ngrams for it's contents generated whereas copied indicates the chunk already existed in another source block and was copied to the new block", + Namespace: constants.Loki, + Name: "bloom_chunks_indexed_total", + Help: "Number of chunks indexed in bloom filters, partitioned by type. Type can be iterated or copied, where iterated indicates the chunk data was fetched and ngrams for it's contents generated whereas copied indicates the chunk already existed in another source block and was copied to the new block", }, []string{"type"}), chunksPerSeries: promauto.With(r).NewHistogram(prometheus.HistogramOpts{ - Name: "bloom_chunks_per_series", - Help: "Number of chunks per series. Can be copied from an existing bloom or iterated", - Buckets: prometheus.ExponentialBucketsRange(1, 4096, 10), + Namespace: constants.Loki, + Name: "bloom_chunks_per_series", + Help: "Number of chunks per series. Can be copied from an existing bloom or iterated", + Buckets: prometheus.ExponentialBucketsRange(1, 4096, 10), }), blockSeriesIterated: promauto.With(r).NewCounter(prometheus.CounterOpts{ - Name: "bloom_block_series_iterated_total", - Help: "Number of series iterated in existing blocks while generating new blocks", + Namespace: constants.Loki, + Name: "bloom_block_series_iterated_total", + Help: "Number of series iterated in existing blocks while generating new blocks", }), tokensTotal: promauto.With(r).NewCounter(prometheus.CounterOpts{ - Name: "bloom_tokens_total", - Help: "Number of tokens processed", + Namespace: constants.Loki, + Name: "bloom_tokens_total", + Help: "Number of tokens processed", }), insertsTotal: promauto.With(r).NewCounterVec(prometheus.CounterOpts{ - Name: "bloom_inserts_total", - Help: "Number of inserts into the bloom filter. collision type may be `false` (no collision), `cache` (found in token cache) or true (found in bloom filter). token_type may be either `raw` (the original ngram) or `chunk_prefixed` (the ngram with the chunk prefix)", + Namespace: constants.Loki, + Name: "bloom_inserts_total", + Help: "Number of inserts into the bloom filter. collision type may be `false` (no collision), `cache` (found in token cache) or true (found in bloom filter). token_type may be either `raw` (the original ngram) or `chunk_prefixed` (the ngram with the chunk prefix)", }, []string{"token_type", "collision"}), + + pagesRead: promauto.With(r).NewCounterVec(prometheus.CounterOpts{ + Namespace: constants.Loki, + Name: "bloom_pages_read_total", + Help: "Number of bloom pages read", + }, []string{"type"}), + pagesSkipped: promauto.With(r).NewCounterVec(prometheus.CounterOpts{ + Namespace: constants.Loki, + Name: "bloom_pages_skipped_total", + Help: "Number of bloom pages skipped during query iteration", + }, []string{"type", "reason"}), + bytesRead: promauto.With(r).NewCounterVec(prometheus.CounterOpts{ + Namespace: constants.Loki, + Name: "bloom_bytes_read_total", + Help: "Number of bytes read from bloom pages", + }, []string{"type"}), + bytesSkipped: promauto.With(r).NewCounterVec(prometheus.CounterOpts{ + Namespace: constants.Loki, + Name: "bloom_bytes_skipped_total", + Help: "Number of bytes skipped during query iteration", + }, []string{"type", "reason"}), } } diff --git a/pkg/storage/bloom/v1/test_util.go b/pkg/storage/bloom/v1/test_util.go index 3cd021de5f5ee..d3ac7e427ec51 100644 --- a/pkg/storage/bloom/v1/test_util.go +++ b/pkg/storage/bloom/v1/test_util.go @@ -9,8 +9,8 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/storage/bloom/v1/filter" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/storage/bloom/v1/filter" ) // TODO(owen-d): this should probably be in it's own testing-util package @@ -41,7 +41,7 @@ func MakeBlock(t testing.TB, nth int, fromFp, throughFp model.Fingerprint, fromT itr := NewSliceIter[SeriesWithBloom](data) _, err = builder.BuildFrom(itr) require.Nil(t, err) - block := NewBlock(reader) + block := NewBlock(reader, NewMetrics(nil)) return block, data, keys } diff --git a/pkg/storage/bloom/v1/util.go b/pkg/storage/bloom/v1/util.go index e6ad69a248fed..06cf1f6add227 100644 --- a/pkg/storage/bloom/v1/util.go +++ b/pkg/storage/bloom/v1/util.go @@ -32,10 +32,10 @@ var ( }, } - // 4KB -> 64MB + // 4KB -> 128MB BlockPool = BytePool{ pool: pool.New( - 4<<10, 64<<20, 4, + 4<<10, 128<<20, 2, func(size int) interface{} { return make([]byte, size) }), diff --git a/pkg/storage/bucket/azure/config.go b/pkg/storage/bucket/azure/config.go index 18d0f74fc3e85..928503190d931 100644 --- a/pkg/storage/bucket/azure/config.go +++ b/pkg/storage/bucket/azure/config.go @@ -5,7 +5,7 @@ import ( "github.com/grafana/dskit/flagext" - "github.com/grafana/loki/pkg/storage/bucket/http" + "github.com/grafana/loki/v3/pkg/storage/bucket/http" ) // Config holds the config options for an Azure backend diff --git a/pkg/storage/bucket/azure/config_test.go b/pkg/storage/bucket/azure/config_test.go index 7d3c6d9f326de..756ae298b65cb 100644 --- a/pkg/storage/bucket/azure/config_test.go +++ b/pkg/storage/bucket/azure/config_test.go @@ -8,7 +8,7 @@ import ( "github.com/stretchr/testify/require" yaml "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/storage/bucket/http" + "github.com/grafana/loki/v3/pkg/storage/bucket/http" ) // defaultConfig should match the default flag values defined in RegisterFlagsWithPrefix. diff --git a/pkg/storage/bucket/client.go b/pkg/storage/bucket/client.go index 57751afe36546..4f81ce7b2934e 100644 --- a/pkg/storage/bucket/client.go +++ b/pkg/storage/bucket/client.go @@ -12,12 +12,12 @@ import ( "github.com/thanos-io/objstore" opentracing "github.com/thanos-io/objstore/tracing/opentracing" - "github.com/grafana/loki/pkg/storage/bucket/azure" - "github.com/grafana/loki/pkg/storage/bucket/filesystem" - "github.com/grafana/loki/pkg/storage/bucket/gcs" - "github.com/grafana/loki/pkg/storage/bucket/s3" - "github.com/grafana/loki/pkg/storage/bucket/swift" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/storage/bucket/azure" + "github.com/grafana/loki/v3/pkg/storage/bucket/filesystem" + "github.com/grafana/loki/v3/pkg/storage/bucket/gcs" + "github.com/grafana/loki/v3/pkg/storage/bucket/s3" + "github.com/grafana/loki/v3/pkg/storage/bucket/swift" + "github.com/grafana/loki/v3/pkg/util" ) const ( diff --git a/pkg/storage/bucket/client_test.go b/pkg/storage/bucket/client_test.go index 7d4bee7c9e15a..489f7d2f1f269 100644 --- a/pkg/storage/bucket/client_test.go +++ b/pkg/storage/bucket/client_test.go @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/require" yaml "gopkg.in/yaml.v2" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/storage/bucket/s3/config.go b/pkg/storage/bucket/s3/config.go index 791ef7d758656..b01eb1444a743 100644 --- a/pkg/storage/bucket/s3/config.go +++ b/pkg/storage/bucket/s3/config.go @@ -12,9 +12,9 @@ import ( "github.com/pkg/errors" "github.com/thanos-io/objstore/providers/s3" - bucket_http "github.com/grafana/loki/pkg/storage/bucket/http" - "github.com/grafana/loki/pkg/storage/common/aws" - "github.com/grafana/loki/pkg/util" + bucket_http "github.com/grafana/loki/v3/pkg/storage/bucket/http" + "github.com/grafana/loki/v3/pkg/storage/common/aws" + "github.com/grafana/loki/v3/pkg/util" ) const ( diff --git a/pkg/storage/bucket/s3/config_test.go b/pkg/storage/bucket/s3/config_test.go index 5e6b9f9545a27..a6ae54f586ee8 100644 --- a/pkg/storage/bucket/s3/config_test.go +++ b/pkg/storage/bucket/s3/config_test.go @@ -13,8 +13,8 @@ import ( "github.com/stretchr/testify/require" "gopkg.in/yaml.v2" - bucket_http "github.com/grafana/loki/pkg/storage/bucket/http" - "github.com/grafana/loki/pkg/storage/common/aws" + bucket_http "github.com/grafana/loki/v3/pkg/storage/bucket/http" + "github.com/grafana/loki/v3/pkg/storage/common/aws" ) // defaultConfig should match the default flag values defined in RegisterFlagsWithPrefix. diff --git a/pkg/storage/bucket/sse_bucket_client.go b/pkg/storage/bucket/sse_bucket_client.go index 51cc68a86673a..426522cfcfd1f 100644 --- a/pkg/storage/bucket/sse_bucket_client.go +++ b/pkg/storage/bucket/sse_bucket_client.go @@ -9,7 +9,7 @@ import ( "github.com/thanos-io/objstore" thanos_s3 "github.com/thanos-io/objstore/providers/s3" - "github.com/grafana/loki/pkg/storage/bucket/s3" + "github.com/grafana/loki/v3/pkg/storage/bucket/s3" ) // TenantConfigProvider defines a per-tenant config provider. diff --git a/pkg/storage/bucket/sse_bucket_client_test.go b/pkg/storage/bucket/sse_bucket_client_test.go index e347ad039bc1f..697e8837a2f32 100644 --- a/pkg/storage/bucket/sse_bucket_client_test.go +++ b/pkg/storage/bucket/sse_bucket_client_test.go @@ -14,7 +14,7 @@ import ( "github.com/stretchr/testify/require" "github.com/thanos-io/objstore" - "github.com/grafana/loki/pkg/storage/bucket/s3" + "github.com/grafana/loki/v3/pkg/storage/bucket/s3" ) func TestSSEBucketClient_Upload_ShouldInjectCustomSSEConfig(t *testing.T) { diff --git a/pkg/storage/chunk/cache/background.go b/pkg/storage/chunk/cache/background.go index 92995cf08d905..859bdf96f9160 100644 --- a/pkg/storage/chunk/cache/background.go +++ b/pkg/storage/chunk/cache/background.go @@ -12,9 +12,9 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/flagext" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/flagext" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // BackgroundConfig is config for a Background Cache. diff --git a/pkg/storage/chunk/cache/background_test.go b/pkg/storage/chunk/cache/background_test.go index a718301957603..38963d4b02495 100644 --- a/pkg/storage/chunk/cache/background_test.go +++ b/pkg/storage/chunk/cache/background_test.go @@ -8,9 +8,9 @@ import ( "github.com/dustin/go-humanize" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util/flagext" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util/flagext" ) func TestBackground(t *testing.T) { diff --git a/pkg/storage/chunk/cache/cache.go b/pkg/storage/chunk/cache/cache.go index 870d7c19e5c7c..6e1565fcaa3e8 100644 --- a/pkg/storage/chunk/cache/cache.go +++ b/pkg/storage/chunk/cache/cache.go @@ -11,7 +11,7 @@ import ( "github.com/go-kit/log" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" ) // Cache byte arrays by key. diff --git a/pkg/storage/chunk/cache/cache_gen.go b/pkg/storage/chunk/cache/cache_gen.go index 689e165e75d01..1e1bcf18062bc 100644 --- a/pkg/storage/chunk/cache/cache_gen.go +++ b/pkg/storage/chunk/cache/cache_gen.go @@ -3,7 +3,7 @@ package cache import ( "context" - "github.com/grafana/loki/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" ) type contextKey int diff --git a/pkg/storage/chunk/cache/cache_test.go b/pkg/storage/chunk/cache/cache_test.go index e65339066ad44..5595b2df0a6cf 100644 --- a/pkg/storage/chunk/cache/cache_test.go +++ b/pkg/storage/chunk/cache/cache_test.go @@ -14,12 +14,12 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/config" ) const userID = "1" diff --git a/pkg/storage/chunk/cache/embeddedcache.go b/pkg/storage/chunk/cache/embeddedcache.go index 46eb204125b0a..8996283158570 100644 --- a/pkg/storage/chunk/cache/embeddedcache.go +++ b/pkg/storage/chunk/cache/embeddedcache.go @@ -13,8 +13,8 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/util/constants" ) const ( diff --git a/pkg/storage/chunk/cache/instrumented.go b/pkg/storage/chunk/cache/instrumented.go index c1f515debf689..81c56c7512e4a 100644 --- a/pkg/storage/chunk/cache/instrumented.go +++ b/pkg/storage/chunk/cache/instrumented.go @@ -10,7 +10,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) // Instrument returns an instrumented cache. diff --git a/pkg/storage/chunk/cache/memcached.go b/pkg/storage/chunk/cache/memcached.go index 9b6150839cd29..ca8e2e2f92da2 100644 --- a/pkg/storage/chunk/cache/memcached.go +++ b/pkg/storage/chunk/cache/memcached.go @@ -14,9 +14,9 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/math" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/math" ) // MemcachedConfig is config to make a Memcached diff --git a/pkg/storage/chunk/cache/memcached_client.go b/pkg/storage/chunk/cache/memcached_client.go index ca355e8f4e244..d6df538342faf 100644 --- a/pkg/storage/chunk/cache/memcached_client.go +++ b/pkg/storage/chunk/cache/memcached_client.go @@ -21,8 +21,7 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "github.com/sony/gobreaker" - "github.com/grafana/loki/pkg/util/constants" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/constants" ) // MemcachedClient interface exists for mocking memcacheClient. @@ -75,7 +74,7 @@ type memcachedClient struct { type MemcachedClientConfig struct { Host string `yaml:"host"` Service string `yaml:"service"` - Addresses string `yaml:"addresses"` // EXPERIMENTAL. + Addresses string `yaml:"addresses"` Timeout time.Duration `yaml:"timeout"` MaxIdleConns int `yaml:"max_idle_conns"` MaxItemSize int `yaml:"max_item_size"` @@ -96,7 +95,7 @@ type MemcachedClientConfig struct { func (cfg *MemcachedClientConfig) RegisterFlagsWithPrefix(prefix, description string, f *flag.FlagSet) { f.StringVar(&cfg.Host, prefix+"memcached.hostname", "", description+"Hostname for memcached service to use. If empty and if addresses is unset, no memcached will be used.") f.StringVar(&cfg.Service, prefix+"memcached.service", "memcached", description+"SRV service used to discover memcache servers.") - f.StringVar(&cfg.Addresses, prefix+"memcached.addresses", "", description+"EXPERIMENTAL: Comma separated addresses list in DNS Service Discovery format: https://cortexmetrics.io/docs/configuration/arguments/#dns-service-discovery") + f.StringVar(&cfg.Addresses, prefix+"memcached.addresses", "", description+"Comma separated addresses list in DNS Service Discovery format: https://grafana.com/docs/mimir/latest/configure/about-dns-service-discovery/#supported-discovery-modes") f.IntVar(&cfg.MaxIdleConns, prefix+"memcached.max-idle-conns", 16, description+"Maximum number of idle connections in pool.") f.DurationVar(&cfg.Timeout, prefix+"memcached.timeout", 100*time.Millisecond, description+"Maximum time to wait before giving up on memcached requests.") f.DurationVar(&cfg.UpdateInterval, prefix+"memcached.update-interval", 1*time.Minute, description+"Period with which to poll DNS for memcache servers.") @@ -180,7 +179,6 @@ func NewMemcachedClient(cfg MemcachedClientConfig, name string, r prometheus.Reg } if len(cfg.Addresses) > 0 { - util_log.WarnExperimentalUse("DNS-based memcached service discovery", logger) newClient.addresses = strings.Split(cfg.Addresses, ",") } diff --git a/pkg/storage/chunk/cache/memcached_client_selector.go b/pkg/storage/chunk/cache/memcached_client_selector.go index c9604ce36a2af..8c8d49e2ba3af 100644 --- a/pkg/storage/chunk/cache/memcached_client_selector.go +++ b/pkg/storage/chunk/cache/memcached_client_selector.go @@ -10,7 +10,7 @@ import ( "github.com/go-kit/log/level" "github.com/grafana/gomemcache/memcache" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // MemcachedJumpHashSelector implements the memcache.ServerSelector diff --git a/pkg/storage/chunk/cache/memcached_client_selector_test.go b/pkg/storage/chunk/cache/memcached_client_selector_test.go index 2a3f28709549c..cec908876b1bb 100644 --- a/pkg/storage/chunk/cache/memcached_client_selector_test.go +++ b/pkg/storage/chunk/cache/memcached_client_selector_test.go @@ -9,7 +9,7 @@ import ( "github.com/grafana/gomemcache/memcache" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" ) func TestNatSort(t *testing.T) { diff --git a/pkg/storage/chunk/cache/memcached_test.go b/pkg/storage/chunk/cache/memcached_test.go index 4082c331a10e0..e79a0f9130939 100644 --- a/pkg/storage/chunk/cache/memcached_test.go +++ b/pkg/storage/chunk/cache/memcached_test.go @@ -13,7 +13,7 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" ) func TestMemcached_fetchKeysBatched(t *testing.T) { diff --git a/pkg/storage/chunk/cache/mock.go b/pkg/storage/chunk/cache/mock.go index 55db7f32a5558..1b0f60da3dec3 100644 --- a/pkg/storage/chunk/cache/mock.go +++ b/pkg/storage/chunk/cache/mock.go @@ -4,7 +4,7 @@ import ( "context" "sync" - "github.com/grafana/loki/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" ) type MockCache interface { diff --git a/pkg/storage/chunk/cache/redis_cache.go b/pkg/storage/chunk/cache/redis_cache.go index 5a4f9f73b87a7..f167b6c4ae225 100644 --- a/pkg/storage/chunk/cache/redis_cache.go +++ b/pkg/storage/chunk/cache/redis_cache.go @@ -7,8 +7,8 @@ import ( "github.com/go-kit/log" "github.com/go-kit/log/level" - "github.com/grafana/loki/pkg/logqlmodel/stats" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // RedisCache type caches chunks in redis diff --git a/pkg/storage/chunk/cache/resultscache/cache.go b/pkg/storage/chunk/cache/resultscache/cache.go index 3ea3e727b502d..549e0b72983cb 100644 --- a/pkg/storage/chunk/cache/resultscache/cache.go +++ b/pkg/storage/chunk/cache/resultscache/cache.go @@ -19,11 +19,11 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/util/math" - "github.com/grafana/loki/pkg/util/spanlogger" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/util/math" + "github.com/grafana/loki/v3/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/util/validation" ) // ConstSplitter is a utility for using a constant split interval when determining cache keys diff --git a/pkg/storage/chunk/cache/resultscache/cache_test.go b/pkg/storage/chunk/cache/resultscache/cache_test.go index cff371097a681..964a310f5951f 100644 --- a/pkg/storage/chunk/cache/resultscache/cache_test.go +++ b/pkg/storage/chunk/cache/resultscache/cache_test.go @@ -14,9 +14,9 @@ import ( "github.com/stretchr/testify/require" "golang.org/x/exp/slices" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/util/constants" ) const day = 24 * time.Hour diff --git a/pkg/storage/chunk/cache/resultscache/config.go b/pkg/storage/chunk/cache/resultscache/config.go index 5a329168e8372..93c032a91ef69 100644 --- a/pkg/storage/chunk/cache/resultscache/config.go +++ b/pkg/storage/chunk/cache/resultscache/config.go @@ -7,7 +7,7 @@ import ( "github.com/pkg/errors" - "github.com/grafana/loki/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" ) // Config is the config for the results cache. diff --git a/pkg/storage/chunk/cache/resultscache/test_types.pb.go b/pkg/storage/chunk/cache/resultscache/test_types.pb.go index 7d3a54864e3df..b53ce50a16024 100644 --- a/pkg/storage/chunk/cache/resultscache/test_types.pb.go +++ b/pkg/storage/chunk/cache/resultscache/test_types.pb.go @@ -278,36 +278,37 @@ func init() { } var fileDescriptor_5b2c489557407809 = []byte{ - // 462 bytes of a gzipped FileDescriptorProto + // 465 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x52, 0x3f, 0x6f, 0x13, 0x31, - 0x14, 0x3f, 0xe7, 0xd2, 0x94, 0x3a, 0x51, 0x07, 0xab, 0xc3, 0x29, 0x42, 0x4e, 0xc8, 0x94, 0xe9, - 0x2c, 0x95, 0x3f, 0x43, 0xc5, 0x14, 0xc4, 0x82, 0xa8, 0x40, 0x86, 0x89, 0xa5, 0x72, 0x0e, 0xd7, - 0x39, 0xe5, 0xee, 0xec, 0xde, 0xf3, 0x21, 0xba, 0xb1, 0xb3, 0xf4, 0x63, 0xf0, 0x51, 0x3a, 0x66, - 0xec, 0x04, 0xe4, 0xb2, 0x30, 0xf6, 0x23, 0x20, 0xfb, 0x92, 0x36, 0xa5, 0x0b, 0xdd, 0xde, 0xf3, - 0xfb, 0xfd, 0xb1, 0x7e, 0xef, 0xe1, 0xe7, 0x66, 0xae, 0x18, 0x58, 0x5d, 0x0a, 0x25, 0x59, 0x32, - 0xab, 0x8a, 0x39, 0x4b, 0x44, 0x32, 0x93, 0xac, 0x94, 0x50, 0x65, 0x16, 0x9a, 0xc6, 0x4a, 0xb0, - 0x27, 0xf6, 0xdc, 0x48, 0x88, 0x4d, 0xa9, 0xad, 0x26, 0xbd, 0xed, 0x71, 0xff, 0x40, 0x69, 0xa5, - 0xfd, 0x80, 0xb9, 0xaa, 0xc1, 0xf4, 0x07, 0x4a, 0x6b, 0x95, 0x49, 0xe6, 0xbb, 0x69, 0x75, 0xca, - 0x6c, 0x9a, 0x4b, 0xb0, 0x22, 0x37, 0x6b, 0x40, 0x77, 0x4b, 0x71, 0xf4, 0xbd, 0x85, 0xbb, 0xc7, - 0x3a, 0x99, 0x73, 0x79, 0x56, 0x49, 0xb0, 0x84, 0xe0, 0xb6, 0x11, 0x76, 0x16, 0xa1, 0x21, 0x1a, - 0xef, 0x71, 0x5f, 0x93, 0x23, 0xbc, 0x03, 0x56, 0x94, 0x36, 0x6a, 0x0d, 0xd1, 0xb8, 0x7b, 0xd8, - 0x8f, 0x1b, 0x87, 0x78, 0xe3, 0x10, 0x7f, 0xdc, 0x38, 0x4c, 0x1e, 0x5d, 0xfe, 0x1c, 0x04, 0x17, - 0xbf, 0x06, 0x88, 0x37, 0x14, 0xf2, 0x02, 0x87, 0xb2, 0xf8, 0x1c, 0x85, 0x0f, 0x60, 0x3a, 0x82, - 0xfb, 0x07, 0x58, 0x69, 0xa2, 0xf6, 0x10, 0x8d, 0x43, 0xee, 0x6b, 0x72, 0x80, 0x77, 0xce, 0x2a, - 0x59, 0x9e, 0x47, 0x1d, 0xff, 0xb9, 0xa6, 0x21, 0x6f, 0xf0, 0xbe, 0x8b, 0x23, 0x2d, 0xd4, 0x3b, - 0x63, 0x53, 0x5d, 0x40, 0xb4, 0xeb, 0xcd, 0x1e, 0xc7, 0xdb, 0x61, 0xc5, 0xaf, 0xee, 0x60, 0x26, - 0x6d, 0x67, 0xc7, 0xff, 0x61, 0x8e, 0xbe, 0xe2, 0x5e, 0x13, 0x06, 0x18, 0x5d, 0x80, 0x24, 0xcf, - 0x70, 0x27, 0x13, 0x53, 0x99, 0x41, 0x84, 0x86, 0xe1, 0x7d, 0x4d, 0x87, 0x7d, 0xeb, 0xe7, 0xef, - 0x45, 0x5a, 0xf2, 0x35, 0x96, 0x1c, 0xe2, 0x5d, 0x10, 0xb9, 0xc9, 0x24, 0x44, 0x2d, 0x4f, 0x8b, - 0xee, 0xd3, 0x3e, 0x78, 0x00, 0xdf, 0x00, 0x47, 0x47, 0x78, 0xff, 0xae, 0x9a, 0x4b, 0xa0, 0x10, - 0xb9, 0xdc, 0x6c, 0xc2, 0xd5, 0x2e, 0x81, 0x2f, 0x22, 0xab, 0xa4, 0xdf, 0xc4, 0x1e, 0x6f, 0x9a, - 0xd1, 0x6b, 0x8c, 0x6f, 0x25, 0x6f, 0x31, 0x8e, 0x88, 0xd6, 0x18, 0xf2, 0x04, 0xf7, 0x6e, 0xee, - 0xe0, 0x24, 0x07, 0x2f, 0x10, 0xf2, 0xee, 0xcd, 0xdb, 0x31, 0x4c, 0xca, 0xc5, 0x92, 0x06, 0x57, - 0x4b, 0x1a, 0x5c, 0x2f, 0x29, 0xfa, 0x56, 0x53, 0xf4, 0xa3, 0xa6, 0xe8, 0xb2, 0xa6, 0x68, 0x51, - 0x53, 0xf4, 0xbb, 0xa6, 0xe8, 0x4f, 0x4d, 0x83, 0xeb, 0x9a, 0xa2, 0x8b, 0x15, 0x0d, 0x16, 0x2b, - 0x1a, 0x5c, 0xad, 0x68, 0xf0, 0xe9, 0xa5, 0x4a, 0xed, 0xac, 0x9a, 0xc6, 0x89, 0xce, 0x99, 0x2a, - 0xc5, 0xa9, 0x28, 0x04, 0xcb, 0xf4, 0x3c, 0x65, 0xff, 0x73, 0xe1, 0xd3, 0x8e, 0xbf, 0x84, 0xa7, - 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0x20, 0x73, 0x6a, 0xfb, 0x10, 0x03, 0x00, 0x00, + 0x14, 0x3f, 0xe7, 0xd2, 0x94, 0x3a, 0x51, 0x07, 0xab, 0x83, 0x15, 0x21, 0x27, 0x64, 0xca, 0x74, + 0x96, 0x5a, 0x60, 0xe8, 0x82, 0x14, 0xc4, 0x82, 0xa8, 0x40, 0x86, 0x89, 0xa5, 0x72, 0x0e, 0xd7, + 0x39, 0xe5, 0xee, 0xec, 0x9e, 0x7d, 0x15, 0xdd, 0xd8, 0x59, 0xfa, 0x31, 0xf8, 0x28, 0x1d, 0x33, + 0x76, 0x02, 0x72, 0x59, 0x18, 0xfb, 0x11, 0x90, 0x7d, 0xb9, 0x36, 0x25, 0x0b, 0xdd, 0xde, 0xf3, + 0xfb, 0xfd, 0xb1, 0x7e, 0xef, 0xc1, 0x17, 0x7a, 0x2e, 0xa9, 0xb1, 0xaa, 0xe0, 0x52, 0xd0, 0x78, + 0x56, 0xe6, 0x73, 0x1a, 0xf3, 0x78, 0x26, 0x68, 0x21, 0x4c, 0x99, 0x5a, 0x53, 0x37, 0x56, 0x18, + 0x7b, 0x6a, 0x2f, 0xb5, 0x30, 0x91, 0x2e, 0x94, 0x55, 0xa8, 0xb7, 0x39, 0xee, 0x1f, 0x48, 0x25, + 0x95, 0x1f, 0x50, 0x57, 0xd5, 0x98, 0xfe, 0x40, 0x2a, 0x25, 0x53, 0x41, 0x7d, 0x37, 0x2d, 0xcf, + 0xa8, 0x4d, 0x32, 0x61, 0x2c, 0xcf, 0xf4, 0x1a, 0xd0, 0xdd, 0x50, 0x1c, 0x7d, 0x6f, 0xc1, 0xee, + 0x89, 0x8a, 0xe7, 0x4c, 0x9c, 0x97, 0xc2, 0x58, 0x84, 0x60, 0x5b, 0x73, 0x3b, 0xc3, 0x60, 0x08, + 0xc6, 0x7b, 0xcc, 0xd7, 0xe8, 0x18, 0xee, 0x18, 0xcb, 0x0b, 0x8b, 0x5b, 0x43, 0x30, 0xee, 0x1e, + 0xf6, 0xa3, 0xda, 0x21, 0x6a, 0x1c, 0xa2, 0x4f, 0x8d, 0xc3, 0xe4, 0xc9, 0xf5, 0xcf, 0x41, 0x70, + 0xf5, 0x6b, 0x00, 0x58, 0x4d, 0x41, 0x2f, 0x61, 0x28, 0xf2, 0x2f, 0x38, 0x7c, 0x04, 0xd3, 0x11, + 0xdc, 0x3f, 0x8c, 0x15, 0x1a, 0xb7, 0x87, 0x60, 0x1c, 0x32, 0x5f, 0xa3, 0x03, 0xb8, 0x73, 0x5e, + 0x8a, 0xe2, 0x12, 0x77, 0xfc, 0xe7, 0xea, 0x06, 0xbd, 0x85, 0xfb, 0x2e, 0x8e, 0x24, 0x97, 0xef, + 0xb5, 0x4d, 0x54, 0x6e, 0xf0, 0xae, 0x37, 0x7b, 0x1a, 0x6d, 0x86, 0x15, 0xbd, 0x7e, 0x80, 0x99, + 0xb4, 0x9d, 0x1d, 0xfb, 0x87, 0x39, 0xfa, 0x0a, 0x7b, 0x75, 0x18, 0x46, 0xab, 0xdc, 0x08, 0xf4, + 0x1c, 0x76, 0x52, 0x3e, 0x15, 0xa9, 0xc1, 0x60, 0x18, 0x6e, 0x6b, 0x3a, 0xec, 0x3b, 0x3f, 0xff, + 0xc0, 0x93, 0x82, 0xad, 0xb1, 0xe8, 0x10, 0xee, 0x1a, 0x9e, 0xe9, 0x54, 0x18, 0xdc, 0xf2, 0x34, + 0xbc, 0x4d, 0xfb, 0xe8, 0x01, 0xac, 0x01, 0x8e, 0x8e, 0xe1, 0xfe, 0x43, 0x35, 0x97, 0x40, 0xce, + 0x33, 0xd1, 0x6c, 0xc2, 0xd5, 0x2e, 0x81, 0x0b, 0x9e, 0x96, 0xc2, 0x6f, 0x62, 0x8f, 0xd5, 0xcd, + 0xe8, 0x0d, 0x84, 0xf7, 0x92, 0xf7, 0x18, 0x47, 0x04, 0x6b, 0x0c, 0x7a, 0x06, 0x7b, 0x77, 0x77, + 0x70, 0x9a, 0x19, 0x2f, 0x10, 0xb2, 0xee, 0xdd, 0xdb, 0x89, 0x99, 0x94, 0x8b, 0x25, 0x09, 0x6e, + 0x96, 0x24, 0xb8, 0x5d, 0x12, 0xf0, 0xad, 0x22, 0xe0, 0x47, 0x45, 0xc0, 0x75, 0x45, 0xc0, 0xa2, + 0x22, 0xe0, 0x77, 0x45, 0xc0, 0x9f, 0x8a, 0x04, 0xb7, 0x15, 0x01, 0x57, 0x2b, 0x12, 0x2c, 0x56, + 0x24, 0xb8, 0x59, 0x91, 0xe0, 0xf3, 0x2b, 0x99, 0xd8, 0x59, 0x39, 0x8d, 0x62, 0x95, 0x51, 0x59, + 0xf0, 0x33, 0x9e, 0x73, 0x9a, 0xaa, 0x79, 0x42, 0x2f, 0x8e, 0xe8, 0xff, 0x1c, 0xf9, 0xb4, 0xe3, + 0x8f, 0xe1, 0xe8, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x51, 0xf5, 0xc5, 0x0d, 0x13, 0x03, 0x00, + 0x00, } func (this *MockRequest) Equal(that interface{}) bool { diff --git a/pkg/storage/chunk/cache/resultscache/test_types.proto b/pkg/storage/chunk/cache/resultscache/test_types.proto index 920db66314de4..bd313a9b85a0b 100644 --- a/pkg/storage/chunk/cache/resultscache/test_types.proto +++ b/pkg/storage/chunk/cache/resultscache/test_types.proto @@ -6,7 +6,7 @@ import "gogoproto/gogo.proto"; import "google/protobuf/timestamp.proto"; import "types.proto"; -option go_package = "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache"; +option go_package = "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache"; option (gogoproto.marshaler_all) = true; option (gogoproto.unmarshaler_all) = true; diff --git a/pkg/storage/chunk/cache/resultscache/types.pb.go b/pkg/storage/chunk/cache/resultscache/types.pb.go index 7c63abdda4bf6..6459262f46840 100644 --- a/pkg/storage/chunk/cache/resultscache/types.pb.go +++ b/pkg/storage/chunk/cache/resultscache/types.pb.go @@ -200,33 +200,33 @@ func init() { } var fileDescriptor_6b13efd4ce8649ef = []byte{ - // 404 bytes of a gzipped FileDescriptorProto + // 406 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xb1, 0x6e, 0xd4, 0x30, - 0x18, 0xc7, 0xe3, 0xde, 0x5d, 0x2f, 0x35, 0x15, 0xa0, 0xa8, 0x43, 0x7a, 0x83, 0x73, 0xba, 0xa9, - 0x03, 0xc4, 0x08, 0x56, 0x04, 0x22, 0x88, 0x01, 0x16, 0x24, 0x8f, 0x2c, 0xc8, 0x49, 0x5c, 0x27, - 0x4a, 0xb0, 0x23, 0xdb, 0x91, 0xc8, 0xc6, 0x23, 0xf0, 0x18, 0x0c, 0x3c, 0x48, 0xc7, 0x1b, 0x3b, - 0x45, 0x5c, 0x6e, 0x41, 0x99, 0xfa, 0x08, 0x28, 0x36, 0x77, 0xea, 0xd8, 0xe5, 0xf3, 0xf7, 0xff, - 0xfe, 0x7f, 0xc9, 0x3f, 0x7f, 0x32, 0x7c, 0xd1, 0x54, 0x1c, 0x6b, 0x23, 0x15, 0xe5, 0x0c, 0x67, - 0x45, 0x2b, 0x2a, 0x9c, 0xd1, 0xac, 0x60, 0x58, 0x31, 0xdd, 0xd6, 0x46, 0x3b, 0x61, 0xba, 0x86, - 0xe9, 0xb8, 0x51, 0xd2, 0xc8, 0xe0, 0xfc, 0xbe, 0xb3, 0xba, 0xe0, 0x92, 0x4b, 0x6b, 0xe0, 0xa9, - 0x73, 0x99, 0xd5, 0x25, 0x97, 0x92, 0xd7, 0x0c, 0x5b, 0x95, 0xb6, 0xd7, 0x98, 0x8a, 0xce, 0x59, - 0x9b, 0x67, 0xf0, 0xf1, 0x7b, 0x9a, 0x15, 0xa5, 0xe0, 0x9f, 0x1b, 0x53, 0x4a, 0xa1, 0x83, 0x15, - 0xf4, 0xf3, 0x52, 0xd3, 0xb4, 0x66, 0x79, 0x08, 0xd6, 0xe0, 0xca, 0x27, 0x47, 0xbd, 0xa9, 0x5d, - 0x9a, 0xe5, 0x84, 0xe9, 0x46, 0x0a, 0xcd, 0x82, 0x4b, 0x38, 0xab, 0x58, 0x67, 0x83, 0x67, 0xc9, - 0x72, 0xec, 0xa3, 0x49, 0x92, 0xa9, 0x04, 0x6f, 0xe1, 0x92, 0x7d, 0x37, 0x4c, 0x18, 0x1d, 0x9e, - 0xac, 0x67, 0x57, 0x8f, 0x5e, 0x5e, 0xc4, 0xf7, 0x59, 0xe3, 0x0f, 0xd6, 0x4c, 0x9e, 0xdc, 0xf4, - 0x91, 0x37, 0xf6, 0xd1, 0x21, 0x4c, 0x0e, 0xcd, 0xe6, 0x37, 0x80, 0xa7, 0x2e, 0x14, 0x44, 0x70, - 0xa1, 0x0d, 0x55, 0xc6, 0x5e, 0x34, 0x4b, 0xce, 0xc6, 0x3e, 0x72, 0x03, 0xe2, 0x8e, 0x89, 0x83, - 0x89, 0x3c, 0x3c, 0xb1, 0xb6, 0xe5, 0x60, 0x22, 0x27, 0x53, 0x09, 0xd6, 0xd0, 0x37, 0x8a, 0x66, - 0xec, 0x6b, 0x99, 0x87, 0x73, 0xcb, 0xb9, 0x18, 0xfb, 0x08, 0x3c, 0x27, 0x4b, 0x3b, 0xfe, 0x98, - 0x07, 0x6f, 0xa0, 0xaf, 0xfe, 0x3f, 0x28, 0x5c, 0xac, 0x81, 0x45, 0x75, 0x2b, 0x8b, 0x0f, 0x2b, - 0x8b, 0xdf, 0x89, 0x2e, 0x39, 0x1f, 0xfb, 0xe8, 0x98, 0x24, 0xc7, 0xee, 0xd3, 0xdc, 0x9f, 0x3d, - 0x9d, 0x27, 0x6a, 0xbb, 0x43, 0xde, 0xed, 0x0e, 0x79, 0x77, 0x3b, 0x04, 0x7e, 0x0c, 0x08, 0xfc, - 0x1a, 0x10, 0xb8, 0x19, 0x10, 0xd8, 0x0e, 0x08, 0xfc, 0x19, 0x10, 0xf8, 0x3b, 0x20, 0xef, 0x6e, - 0x40, 0xe0, 0xe7, 0x1e, 0x79, 0xdb, 0x3d, 0xf2, 0x6e, 0xf7, 0xc8, 0xfb, 0xf2, 0x9a, 0x97, 0xa6, - 0x68, 0xd3, 0x38, 0x93, 0xdf, 0x30, 0x57, 0xf4, 0x9a, 0x0a, 0x8a, 0x6b, 0x59, 0x95, 0xf8, 0x21, - 0x3f, 0x21, 0x3d, 0xb5, 0x7c, 0xaf, 0xfe, 0x05, 0x00, 0x00, 0xff, 0xff, 0xa9, 0xc6, 0x6f, 0x43, - 0x38, 0x02, 0x00, 0x00, + 0x18, 0xc7, 0xe3, 0xde, 0x5d, 0x2f, 0x35, 0x15, 0xa0, 0xa8, 0x43, 0x7a, 0x83, 0x13, 0xdd, 0xd4, + 0x01, 0x62, 0x44, 0x77, 0x2a, 0x82, 0x18, 0x60, 0x41, 0xf2, 0xc8, 0x82, 0x9c, 0xc4, 0x75, 0xa2, + 0x04, 0x3b, 0x8a, 0x1d, 0x44, 0x36, 0x1e, 0x81, 0xc7, 0x60, 0xe0, 0x41, 0x3a, 0xde, 0xd8, 0x29, + 0xe2, 0x72, 0x0b, 0xca, 0xd4, 0x47, 0x40, 0xb1, 0xb9, 0xd3, 0x8d, 0x5d, 0x3e, 0x7f, 0xff, 0xef, + 0xff, 0x97, 0xfc, 0xf3, 0x27, 0xc3, 0x57, 0x75, 0xc9, 0xb1, 0xd2, 0xb2, 0xa1, 0x9c, 0xe1, 0x34, + 0x6f, 0x45, 0x89, 0x53, 0x9a, 0xe6, 0x0c, 0x37, 0x4c, 0xb5, 0x95, 0x56, 0x56, 0xe8, 0xae, 0x66, + 0x2a, 0xaa, 0x1b, 0xa9, 0xa5, 0x77, 0x7e, 0xec, 0xac, 0x2e, 0xb8, 0xe4, 0xd2, 0x18, 0x78, 0xea, + 0x6c, 0x66, 0x75, 0xc9, 0xa5, 0xe4, 0x15, 0xc3, 0x46, 0x25, 0xed, 0x2d, 0xa6, 0xa2, 0xb3, 0xd6, + 0xfa, 0x05, 0x7c, 0xfa, 0x8e, 0xa6, 0x79, 0x21, 0xf8, 0xa7, 0x5a, 0x17, 0x52, 0x28, 0x6f, 0x05, + 0xdd, 0xac, 0x50, 0x34, 0xa9, 0x58, 0xe6, 0x83, 0x10, 0x5c, 0xb9, 0xe4, 0xa0, 0xd7, 0x95, 0x4d, + 0xb3, 0x8c, 0x30, 0x55, 0x4b, 0xa1, 0x98, 0x77, 0x09, 0x67, 0x25, 0xeb, 0x4c, 0xf0, 0x2c, 0x5e, + 0x8e, 0x7d, 0x30, 0x49, 0x32, 0x15, 0xef, 0x06, 0x2e, 0xd9, 0x77, 0xcd, 0x84, 0x56, 0xfe, 0x49, + 0x38, 0xbb, 0x7a, 0xf2, 0xfa, 0x22, 0x3a, 0x66, 0x8d, 0xde, 0x1b, 0x33, 0x7e, 0x76, 0xd7, 0x07, + 0xce, 0xd8, 0x07, 0xfb, 0x30, 0xd9, 0x37, 0xeb, 0xdf, 0x00, 0x9e, 0xda, 0x90, 0x17, 0xc0, 0x85, + 0xd2, 0xb4, 0xd1, 0xe6, 0xa2, 0x59, 0x7c, 0x36, 0xf6, 0x81, 0x1d, 0x10, 0x7b, 0x4c, 0x1c, 0x4c, + 0x64, 0xfe, 0x89, 0xb1, 0x0d, 0x07, 0x13, 0x19, 0x99, 0x8a, 0x17, 0x42, 0x57, 0x37, 0x34, 0x65, + 0x5f, 0x8a, 0xcc, 0x9f, 0x1b, 0xce, 0xc5, 0xd8, 0x07, 0xe0, 0x25, 0x59, 0x9a, 0xf1, 0x87, 0xcc, + 0x7b, 0x03, 0xdd, 0xe6, 0xff, 0x83, 0xfc, 0x45, 0x08, 0x0c, 0xaa, 0x5d, 0x59, 0xb4, 0x5f, 0x59, + 0xf4, 0x56, 0x74, 0xf1, 0xf9, 0xd8, 0x07, 0x87, 0x24, 0x39, 0x74, 0x1f, 0xe7, 0xee, 0xec, 0xf9, + 0x3c, 0x6e, 0x37, 0x5b, 0xe4, 0xdc, 0x6f, 0x91, 0xf3, 0xb0, 0x45, 0xe0, 0xc7, 0x80, 0xc0, 0xaf, + 0x01, 0x81, 0xbb, 0x01, 0x81, 0xcd, 0x80, 0xc0, 0x9f, 0x01, 0x81, 0xbf, 0x03, 0x72, 0x1e, 0x06, + 0x04, 0x7e, 0xee, 0x90, 0xb3, 0xd9, 0x21, 0xe7, 0x7e, 0x87, 0x9c, 0xcf, 0x37, 0xbc, 0xd0, 0x79, + 0x9b, 0x44, 0xa9, 0xfc, 0x8a, 0x79, 0x43, 0x6f, 0xa9, 0xa0, 0xb8, 0x92, 0x65, 0x81, 0xbf, 0x5d, + 0xe3, 0xc7, 0x7c, 0x86, 0xe4, 0xd4, 0x20, 0x5e, 0xff, 0x0b, 0x00, 0x00, 0xff, 0xff, 0x94, 0x7b, + 0xf9, 0x0a, 0x3b, 0x02, 0x00, 0x00, } func (this *CachingOptions) Equal(that interface{}) bool { diff --git a/pkg/storage/chunk/cache/resultscache/types.proto b/pkg/storage/chunk/cache/resultscache/types.proto index 835950a0581e7..33c78831a03e6 100644 --- a/pkg/storage/chunk/cache/resultscache/types.proto +++ b/pkg/storage/chunk/cache/resultscache/types.proto @@ -5,7 +5,7 @@ package resultscache; import "gogoproto/gogo.proto"; import "google/protobuf/any.proto"; -option go_package = "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache"; +option go_package = "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache"; option (gogoproto.marshaler_all) = true; option (gogoproto.unmarshaler_all) = true; diff --git a/pkg/storage/chunk/cache/snappy.go b/pkg/storage/chunk/cache/snappy.go index 000827d1bcff5..8f05efc5b1f37 100644 --- a/pkg/storage/chunk/cache/snappy.go +++ b/pkg/storage/chunk/cache/snappy.go @@ -7,7 +7,7 @@ import ( "github.com/go-kit/log/level" "github.com/golang/snappy" - "github.com/grafana/loki/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" ) type snappyCache struct { diff --git a/pkg/storage/chunk/cache/stats.go b/pkg/storage/chunk/cache/stats.go index 3bdf0b017ef3e..3ce127e9e8b1a 100644 --- a/pkg/storage/chunk/cache/stats.go +++ b/pkg/storage/chunk/cache/stats.go @@ -4,7 +4,7 @@ import ( "context" "time" - "github.com/grafana/loki/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" ) type statsCollector struct { diff --git a/pkg/storage/chunk/cache/tiered.go b/pkg/storage/chunk/cache/tiered.go index 5ff128d34d34e..b6cdef38261fc 100644 --- a/pkg/storage/chunk/cache/tiered.go +++ b/pkg/storage/chunk/cache/tiered.go @@ -3,7 +3,7 @@ package cache import ( "context" - "github.com/grafana/loki/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" ) type tiered []Cache diff --git a/pkg/storage/chunk/cache/tiered_test.go b/pkg/storage/chunk/cache/tiered_test.go index e024fe9ab096f..662e57a51b61f 100644 --- a/pkg/storage/chunk/cache/tiered_test.go +++ b/pkg/storage/chunk/cache/tiered_test.go @@ -6,7 +6,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" ) func TestTieredSimple(t *testing.T) { diff --git a/pkg/storage/chunk/chunk.go b/pkg/storage/chunk/chunk.go index d316553d72fa6..e807b5fb87798 100644 --- a/pkg/storage/chunk/chunk.go +++ b/pkg/storage/chunk/chunk.go @@ -18,7 +18,7 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) var ( diff --git a/pkg/storage/chunk/chunk_test.go b/pkg/storage/chunk/chunk_test.go index f3f9a6f78eef2..aa7334a67f702 100644 --- a/pkg/storage/chunk/chunk_test.go +++ b/pkg/storage/chunk/chunk_test.go @@ -10,8 +10,8 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/logproto" ) const userID = "userID" diff --git a/pkg/storage/chunk/client/alibaba/oss_object_client.go b/pkg/storage/chunk/client/alibaba/oss_object_client.go index b14b4d5a0c8e5..3e7674467ae30 100644 --- a/pkg/storage/chunk/client/alibaba/oss_object_client.go +++ b/pkg/storage/chunk/client/alibaba/oss_object_client.go @@ -12,8 +12,8 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/util/constants" ) const NoSuchKeyErr = "NoSuchKey" diff --git a/pkg/storage/chunk/client/aws/dynamodb_index_reader.go b/pkg/storage/chunk/client/aws/dynamodb_index_reader.go index 0498655f15921..4b1c4cd8a9e2d 100644 --- a/pkg/storage/chunk/client/aws/dynamodb_index_reader.go +++ b/pkg/storage/chunk/client/aws/dynamodb_index_reader.go @@ -17,8 +17,8 @@ import ( "github.com/prometheus/client_golang/prometheus" "golang.org/x/sync/errgroup" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) type dynamodbIndexReader struct { diff --git a/pkg/storage/chunk/client/aws/dynamodb_metrics.go b/pkg/storage/chunk/client/aws/dynamodb_metrics.go index f1fedfb20bffc..9d19f0a77ff5b 100644 --- a/pkg/storage/chunk/client/aws/dynamodb_metrics.go +++ b/pkg/storage/chunk/client/aws/dynamodb_metrics.go @@ -5,7 +5,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) type dynamoDBMetrics struct { diff --git a/pkg/storage/chunk/client/aws/dynamodb_storage_client.go b/pkg/storage/chunk/client/aws/dynamodb_storage_client.go index c48bf518cc23a..87fd24e127db0 100644 --- a/pkg/storage/chunk/client/aws/dynamodb_storage_client.go +++ b/pkg/storage/chunk/client/aws/dynamodb_storage_client.go @@ -28,15 +28,15 @@ import ( "github.com/prometheus/client_golang/prometheus" "golang.org/x/time/rate" - "github.com/grafana/loki/pkg/storage/chunk" - chunkclient "github.com/grafana/loki/pkg/storage/chunk/client" - client_util "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/math" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/storage/chunk" + chunkclient "github.com/grafana/loki/v3/pkg/storage/chunk/client" + client_util "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/math" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) const ( diff --git a/pkg/storage/chunk/client/aws/dynamodb_storage_client_test.go b/pkg/storage/chunk/client/aws/dynamodb_storage_client_test.go index 6c77f56d78bcc..6a4cb1238c5ff 100644 --- a/pkg/storage/chunk/client/aws/dynamodb_storage_client_test.go +++ b/pkg/storage/chunk/client/aws/dynamodb_storage_client_test.go @@ -9,8 +9,8 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client/testutils" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils" + "github.com/grafana/loki/v3/pkg/storage/config" ) const ( diff --git a/pkg/storage/chunk/client/aws/dynamodb_table_client.go b/pkg/storage/chunk/client/aws/dynamodb_table_client.go index 330624a098ced..f4d9a1f7c1cd9 100644 --- a/pkg/storage/chunk/client/aws/dynamodb_table_client.go +++ b/pkg/storage/chunk/client/aws/dynamodb_table_client.go @@ -15,9 +15,9 @@ import ( "github.com/prometheus/client_golang/prometheus" "golang.org/x/time/rate" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/util/log" ) // Pluggable auto-scaler implementation diff --git a/pkg/storage/chunk/client/aws/fixtures.go b/pkg/storage/chunk/client/aws/fixtures.go index d8cc8642a8d9e..b2ab65497b45d 100644 --- a/pkg/storage/chunk/client/aws/fixtures.go +++ b/pkg/storage/chunk/client/aws/fixtures.go @@ -8,10 +8,10 @@ import ( "github.com/grafana/dskit/backoff" "golang.org/x/time/rate" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/testutils" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) type fixture struct { diff --git a/pkg/storage/chunk/client/aws/metrics_autoscaling.go b/pkg/storage/chunk/client/aws/metrics_autoscaling.go index 7aee4df91a47b..a55be707bd51e 100644 --- a/pkg/storage/chunk/client/aws/metrics_autoscaling.go +++ b/pkg/storage/chunk/client/aws/metrics_autoscaling.go @@ -13,8 +13,8 @@ import ( promV1 "github.com/prometheus/client_golang/api/prometheus/v1" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/storage/config" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/config" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/storage/chunk/client/aws/metrics_autoscaling_test.go b/pkg/storage/chunk/client/aws/metrics_autoscaling_test.go index 6ea2fd90044b0..5e4d1a46a7b27 100644 --- a/pkg/storage/chunk/client/aws/metrics_autoscaling_test.go +++ b/pkg/storage/chunk/client/aws/metrics_autoscaling_test.go @@ -13,8 +13,8 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) const ( diff --git a/pkg/storage/chunk/client/aws/mock.go b/pkg/storage/chunk/client/aws/mock.go index 2dec6c94addb3..b2c6ac8556689 100644 --- a/pkg/storage/chunk/client/aws/mock.go +++ b/pkg/storage/chunk/client/aws/mock.go @@ -18,7 +18,7 @@ import ( "github.com/aws/aws-sdk-go/service/s3/s3iface" "github.com/go-kit/log/level" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const arnPrefix = "arn:" diff --git a/pkg/storage/chunk/client/aws/s3_storage_client.go b/pkg/storage/chunk/client/aws/s3_storage_client.go index 0c2136801f812..bae0fce22df7f 100644 --- a/pkg/storage/chunk/client/aws/s3_storage_client.go +++ b/pkg/storage/chunk/client/aws/s3_storage_client.go @@ -27,13 +27,13 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" - bucket_s3 "github.com/grafana/loki/pkg/storage/bucket/s3" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" - storageawscommon "github.com/grafana/loki/pkg/storage/common/aws" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" - loki_instrument "github.com/grafana/loki/pkg/util/instrument" + bucket_s3 "github.com/grafana/loki/v3/pkg/storage/bucket/s3" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" + storageawscommon "github.com/grafana/loki/v3/pkg/storage/common/aws" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" + loki_instrument "github.com/grafana/loki/v3/pkg/util/instrument" ) const ( diff --git a/pkg/storage/chunk/client/aws/s3_storage_client_test.go b/pkg/storage/chunk/client/aws/s3_storage_client_test.go index 769f8cf00665c..db9ba83c61831 100644 --- a/pkg/storage/chunk/client/aws/s3_storage_client_test.go +++ b/pkg/storage/chunk/client/aws/s3_storage_client_test.go @@ -20,7 +20,7 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/request" diff --git a/pkg/storage/chunk/client/aws/sse_config.go b/pkg/storage/chunk/client/aws/sse_config.go index 2ff3c4b4b7995..f39385cbe0151 100644 --- a/pkg/storage/chunk/client/aws/sse_config.go +++ b/pkg/storage/chunk/client/aws/sse_config.go @@ -6,7 +6,7 @@ import ( "github.com/pkg/errors" - bucket_s3 "github.com/grafana/loki/pkg/storage/bucket/s3" + bucket_s3 "github.com/grafana/loki/v3/pkg/storage/bucket/s3" ) const ( diff --git a/pkg/storage/chunk/client/aws/sse_config_test.go b/pkg/storage/chunk/client/aws/sse_config_test.go index b7c9aea6212f8..6f56f93b0d937 100644 --- a/pkg/storage/chunk/client/aws/sse_config_test.go +++ b/pkg/storage/chunk/client/aws/sse_config_test.go @@ -6,7 +6,7 @@ import ( "github.com/pkg/errors" "github.com/stretchr/testify/assert" - s3 "github.com/grafana/loki/pkg/storage/bucket/s3" + s3 "github.com/grafana/loki/v3/pkg/storage/bucket/s3" ) func TestNewSSEParsedConfig(t *testing.T) { diff --git a/pkg/storage/chunk/client/azure/blob_storage_client.go b/pkg/storage/chunk/client/azure/blob_storage_client.go index 0126c048e9b34..7c5f5bb496ca0 100644 --- a/pkg/storage/chunk/client/azure/blob_storage_client.go +++ b/pkg/storage/chunk/client/azure/blob_storage_client.go @@ -25,13 +25,13 @@ import ( "github.com/mattn/go-ieproxy" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" - client_util "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" - loki_instrument "github.com/grafana/loki/pkg/util/instrument" - "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" + client_util "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" + loki_instrument "github.com/grafana/loki/v3/pkg/util/instrument" + "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/storage/chunk/client/azure/blob_storage_client_test.go b/pkg/storage/chunk/client/azure/blob_storage_client_test.go index 2cff02212841f..2f59934aabf20 100644 --- a/pkg/storage/chunk/client/azure/blob_storage_client_test.go +++ b/pkg/storage/chunk/client/azure/blob_storage_client_test.go @@ -17,7 +17,7 @@ import ( "github.com/stretchr/testify/suite" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" ) var metrics = NewBlobStorageMetrics() diff --git a/pkg/storage/chunk/client/baidubce/bos_storage_client.go b/pkg/storage/chunk/client/baidubce/bos_storage_client.go index 7b3fe633d66b4..30a9e97f4955f 100644 --- a/pkg/storage/chunk/client/baidubce/bos_storage_client.go +++ b/pkg/storage/chunk/client/baidubce/bos_storage_client.go @@ -14,8 +14,8 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/util/constants" ) // NoSuchKeyErr The resource you requested does not exist. diff --git a/pkg/storage/chunk/client/cassandra/fixtures.go b/pkg/storage/chunk/client/cassandra/fixtures.go index f1ddb1de65f42..e837dba110a31 100644 --- a/pkg/storage/chunk/client/cassandra/fixtures.go +++ b/pkg/storage/chunk/client/cassandra/fixtures.go @@ -7,10 +7,10 @@ import ( "github.com/grafana/dskit/flagext" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/testutils" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) // GOCQL doesn't provide nice mocks, so we use a real Cassandra instance. diff --git a/pkg/storage/chunk/client/cassandra/instrumentation.go b/pkg/storage/chunk/client/cassandra/instrumentation.go index 31db2b1f542be..756f18065b579 100644 --- a/pkg/storage/chunk/client/cassandra/instrumentation.go +++ b/pkg/storage/chunk/client/cassandra/instrumentation.go @@ -7,7 +7,7 @@ import ( "github.com/gocql/gocql" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) var requestDuration = prometheus.NewHistogramVec(prometheus.HistogramOpts{ diff --git a/pkg/storage/chunk/client/cassandra/storage_client.go b/pkg/storage/chunk/client/cassandra/storage_client.go index e58de9b19e698..d847f9d6b7e2d 100644 --- a/pkg/storage/chunk/client/cassandra/storage_client.go +++ b/pkg/storage/chunk/client/cassandra/storage_client.go @@ -19,11 +19,11 @@ import ( "github.com/prometheus/client_golang/prometheus" "golang.org/x/sync/semaphore" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // Config for a StorageClient diff --git a/pkg/storage/chunk/client/cassandra/table_client.go b/pkg/storage/chunk/client/cassandra/table_client.go index fe8d7549d7467..f46b886b3d61d 100644 --- a/pkg/storage/chunk/client/cassandra/table_client.go +++ b/pkg/storage/chunk/client/cassandra/table_client.go @@ -9,8 +9,8 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) type tableClient struct { diff --git a/pkg/storage/chunk/client/client.go b/pkg/storage/chunk/client/client.go index 76efb022f73e8..36b65d40b6c2e 100644 --- a/pkg/storage/chunk/client/client.go +++ b/pkg/storage/chunk/client/client.go @@ -4,8 +4,8 @@ import ( "context" "errors" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) var ( diff --git a/pkg/storage/chunk/client/congestion/config.go b/pkg/storage/chunk/client/congestion/config.go index 47d86646ad25b..61211d6487ccc 100644 --- a/pkg/storage/chunk/client/congestion/config.go +++ b/pkg/storage/chunk/client/congestion/config.go @@ -4,7 +4,7 @@ import ( "flag" "fmt" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" ) type Config struct { diff --git a/pkg/storage/chunk/client/congestion/controller.go b/pkg/storage/chunk/client/congestion/controller.go index f1f69a29621e6..e7f29fab47638 100644 --- a/pkg/storage/chunk/client/congestion/controller.go +++ b/pkg/storage/chunk/client/congestion/controller.go @@ -10,8 +10,8 @@ import ( "github.com/go-kit/log" "golang.org/x/time/rate" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" ) // AIMDController implements the Additive-Increase/Multiplicative-Decrease algorithm which is used in TCP congestion avoidance. diff --git a/pkg/storage/chunk/client/congestion/controller_test.go b/pkg/storage/chunk/client/congestion/controller_test.go index 6ecc208c1a3f9..74620d334ff9f 100644 --- a/pkg/storage/chunk/client/congestion/controller_test.go +++ b/pkg/storage/chunk/client/congestion/controller_test.go @@ -14,8 +14,8 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" ) var errFakeFailure = errors.New("fake failure") diff --git a/pkg/storage/chunk/client/congestion/hedge.go b/pkg/storage/chunk/client/congestion/hedge.go index 3f7d99b971117..4a7cc265ebf13 100644 --- a/pkg/storage/chunk/client/congestion/hedge.go +++ b/pkg/storage/chunk/client/congestion/hedge.go @@ -5,7 +5,7 @@ import ( "github.com/go-kit/log" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" ) type NoopHedger struct{} diff --git a/pkg/storage/chunk/client/congestion/interfaces.go b/pkg/storage/chunk/client/congestion/interfaces.go index 7266eea67bfe3..ba7e8b40d5745 100644 --- a/pkg/storage/chunk/client/congestion/interfaces.go +++ b/pkg/storage/chunk/client/congestion/interfaces.go @@ -6,8 +6,8 @@ import ( "github.com/go-kit/log" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" ) // Controller handles congestion by: diff --git a/pkg/storage/chunk/client/congestion/metrics.go b/pkg/storage/chunk/client/congestion/metrics.go index 83c035c806dc5..78684a4e40893 100644 --- a/pkg/storage/chunk/client/congestion/metrics.go +++ b/pkg/storage/chunk/client/congestion/metrics.go @@ -1,7 +1,7 @@ package congestion import ( - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" "github.com/prometheus/client_golang/prometheus" ) diff --git a/pkg/storage/chunk/client/gcp/bigtable_index_client.go b/pkg/storage/chunk/client/gcp/bigtable_index_client.go index 6385b1c2f3cd7..28bb8276c386c 100644 --- a/pkg/storage/chunk/client/gcp/bigtable_index_client.go +++ b/pkg/storage/chunk/client/gcp/bigtable_index_client.go @@ -15,11 +15,11 @@ import ( ot "github.com/opentracing/opentracing-go" "github.com/pkg/errors" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/util/math" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/util/math" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) const ( diff --git a/pkg/storage/chunk/client/gcp/bigtable_object_client.go b/pkg/storage/chunk/client/gcp/bigtable_object_client.go index b9b4b71e2f3ed..d878bc19bccf0 100644 --- a/pkg/storage/chunk/client/gcp/bigtable_object_client.go +++ b/pkg/storage/chunk/client/gcp/bigtable_object_client.go @@ -9,10 +9,10 @@ import ( otlog "github.com/opentracing/opentracing-go/log" "github.com/pkg/errors" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util/math" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util/math" ) type bigtableObjectClient struct { diff --git a/pkg/storage/chunk/client/gcp/fixtures.go b/pkg/storage/chunk/client/gcp/fixtures.go index 06debee43d089..fc0d04d115597 100644 --- a/pkg/storage/chunk/client/gcp/fixtures.go +++ b/pkg/storage/chunk/client/gcp/fixtures.go @@ -13,11 +13,11 @@ import ( "google.golang.org/grpc" "google.golang.org/grpc/credentials/insecure" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" - "github.com/grafana/loki/pkg/storage/chunk/client/testutils" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) const ( diff --git a/pkg/storage/chunk/client/gcp/gcs_object_client.go b/pkg/storage/chunk/client/gcp/gcs_object_client.go index d4b35e48d9df5..2f724e159ae2b 100644 --- a/pkg/storage/chunk/client/gcp/gcs_object_client.go +++ b/pkg/storage/chunk/client/gcp/gcs_object_client.go @@ -19,9 +19,9 @@ import ( google_http "google.golang.org/api/transport/http" amnet "k8s.io/apimachinery/pkg/util/net" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" - "github.com/grafana/loki/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" ) type ClientFactory func(ctx context.Context, opts ...option.ClientOption) (*storage.Client, error) @@ -319,8 +319,7 @@ func gcsTransport(ctx context.Context, scope string, insecure bool, http2 bool, transportOptions := []option.ClientOption{option.WithScopes(scope)} if insecure { customTransport.TLSClientConfig = &tls.Config{InsecureSkipVerify: true} - // When using `insecure` (testing only), we add a fake API key as well to skip credential chain lookups. - transportOptions = append(transportOptions, option.WithAPIKey("insecure")) + transportOptions = append(transportOptions, option.WithoutAuthentication()) } if serviceAccount.String() != "" { transportOptions = append(transportOptions, option.WithCredentialsJSON([]byte(serviceAccount.String()))) diff --git a/pkg/storage/chunk/client/gcp/gcs_object_client_test.go b/pkg/storage/chunk/client/gcp/gcs_object_client_test.go index ac3e7a77dcd61..230067f9e9508 100644 --- a/pkg/storage/chunk/client/gcp/gcs_object_client_test.go +++ b/pkg/storage/chunk/client/gcp/gcs_object_client_test.go @@ -14,7 +14,7 @@ import ( "go.uber.org/atomic" "google.golang.org/api/option" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" ) func Test_Hedging(t *testing.T) { diff --git a/pkg/storage/chunk/client/gcp/instrumentation.go b/pkg/storage/chunk/client/gcp/instrumentation.go index 5f6a6cb066f07..bd35a2e9f59c6 100644 --- a/pkg/storage/chunk/client/gcp/instrumentation.go +++ b/pkg/storage/chunk/client/gcp/instrumentation.go @@ -13,7 +13,7 @@ import ( "google.golang.org/api/option" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) var ( diff --git a/pkg/storage/chunk/client/gcp/table_client.go b/pkg/storage/chunk/client/gcp/table_client.go index 5e1819746f1af..24be25b76fda4 100644 --- a/pkg/storage/chunk/client/gcp/table_client.go +++ b/pkg/storage/chunk/client/gcp/table_client.go @@ -11,8 +11,8 @@ import ( "github.com/pkg/errors" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) type tableClient struct { diff --git a/pkg/storage/chunk/client/grpc/grpc_client_test.go b/pkg/storage/chunk/client/grpc/grpc_client_test.go index a9cf2c89af271..dc040cb5aecab 100644 --- a/pkg/storage/chunk/client/grpc/grpc_client_test.go +++ b/pkg/storage/chunk/client/grpc/grpc_client_test.go @@ -7,11 +7,11 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) // This includes test for all RPCs in diff --git a/pkg/storage/chunk/client/grpc/grpc_server_mock_test.go b/pkg/storage/chunk/client/grpc/grpc_server_mock_test.go index 420990766ff3b..e5ed3456fd081 100644 --- a/pkg/storage/chunk/client/grpc/grpc_server_mock_test.go +++ b/pkg/storage/chunk/client/grpc/grpc_server_mock_test.go @@ -11,7 +11,7 @@ import ( "github.com/stretchr/testify/require" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/config" ) type server struct { diff --git a/pkg/storage/chunk/client/grpc/index_client.go b/pkg/storage/chunk/client/grpc/index_client.go index 6e2c5bd3a644a..b40576519c061 100644 --- a/pkg/storage/chunk/client/grpc/index_client.go +++ b/pkg/storage/chunk/client/grpc/index_client.go @@ -6,8 +6,8 @@ import ( "github.com/pkg/errors" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) func (w *WriteBatch) Add(tableName, hashValue string, rangeValue []byte, value []byte) { diff --git a/pkg/storage/chunk/client/grpc/storage_client.go b/pkg/storage/chunk/client/grpc/storage_client.go index 0a2b983f32fcd..42ee00507e412 100644 --- a/pkg/storage/chunk/client/grpc/storage_client.go +++ b/pkg/storage/chunk/client/grpc/storage_client.go @@ -7,8 +7,8 @@ import ( "github.com/pkg/errors" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" ) type StorageClient struct { diff --git a/pkg/storage/chunk/client/grpc/table_client.go b/pkg/storage/chunk/client/grpc/table_client.go index ab68d354d2cce..2abdefc4e98f1 100644 --- a/pkg/storage/chunk/client/grpc/table_client.go +++ b/pkg/storage/chunk/client/grpc/table_client.go @@ -7,7 +7,7 @@ import ( "github.com/pkg/errors" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/config" ) type TableClient struct { diff --git a/pkg/storage/chunk/client/ibmcloud/cos_object_client.go b/pkg/storage/chunk/client/ibmcloud/cos_object_client.go index c576dd2da4751..c9d534ae4163f 100644 --- a/pkg/storage/chunk/client/ibmcloud/cos_object_client.go +++ b/pkg/storage/chunk/client/ibmcloud/cos_object_client.go @@ -27,10 +27,10 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/log" ) const defaultCOSAuthEndpoint = "https://iam.cloud.ibm.com/identity/token" diff --git a/pkg/storage/chunk/client/ibmcloud/cos_object_client_test.go b/pkg/storage/chunk/client/ibmcloud/cos_object_client_test.go index 4b6eb7faece38..f6959b3f31d81 100644 --- a/pkg/storage/chunk/client/ibmcloud/cos_object_client_test.go +++ b/pkg/storage/chunk/client/ibmcloud/cos_object_client_test.go @@ -24,8 +24,8 @@ import ( "github.com/pkg/errors" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" ) var ( diff --git a/pkg/storage/chunk/client/ibmcloud/trusted_profile_authentication_provider.go b/pkg/storage/chunk/client/ibmcloud/trusted_profile_authentication_provider.go index 69f4424f72616..b49c9ead51715 100644 --- a/pkg/storage/chunk/client/ibmcloud/trusted_profile_authentication_provider.go +++ b/pkg/storage/chunk/client/ibmcloud/trusted_profile_authentication_provider.go @@ -10,7 +10,7 @@ import ( "github.com/go-kit/log/level" "github.com/pkg/errors" - "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/storage/chunk/client/local/boltdb_index_client.go b/pkg/storage/chunk/client/local/boltdb_index_client.go index 42e2dddb784e4..de79d1eb945aa 100644 --- a/pkg/storage/chunk/client/local/boltdb_index_client.go +++ b/pkg/storage/chunk/client/local/boltdb_index_client.go @@ -15,9 +15,9 @@ import ( "github.com/pkg/errors" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/stores/series/index" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var ( diff --git a/pkg/storage/chunk/client/local/boltdb_index_client_test.go b/pkg/storage/chunk/client/local/boltdb_index_client_test.go index 2b26b5cc32cf9..76dcd7afd54a7 100644 --- a/pkg/storage/chunk/client/local/boltdb_index_client_test.go +++ b/pkg/storage/chunk/client/local/boltdb_index_client_test.go @@ -10,8 +10,8 @@ import ( "github.com/stretchr/testify/require" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) var ( diff --git a/pkg/storage/chunk/client/local/boltdb_table_client.go b/pkg/storage/chunk/client/local/boltdb_table_client.go index cad8790b836d1..df30db04d29ac 100644 --- a/pkg/storage/chunk/client/local/boltdb_table_client.go +++ b/pkg/storage/chunk/client/local/boltdb_table_client.go @@ -5,8 +5,8 @@ import ( "os" "path/filepath" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) type TableClient struct { diff --git a/pkg/storage/chunk/client/local/fixtures.go b/pkg/storage/chunk/client/local/fixtures.go index 6f86734d7e670..fc344185c5020 100644 --- a/pkg/storage/chunk/client/local/fixtures.go +++ b/pkg/storage/chunk/client/local/fixtures.go @@ -7,10 +7,10 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/testutils" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) type fixture struct { diff --git a/pkg/storage/chunk/client/local/fs_object_client.go b/pkg/storage/chunk/client/local/fs_object_client.go index deee987a27abe..41e911cb28c03 100644 --- a/pkg/storage/chunk/client/local/fs_object_client.go +++ b/pkg/storage/chunk/client/local/fs_object_client.go @@ -13,10 +13,10 @@ import ( "github.com/grafana/dskit/runutil" "github.com/pkg/errors" - "github.com/grafana/loki/pkg/ruler/rulestore/local" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/ruler/rulestore/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // FSConfig is the config for a FSObjectClient. diff --git a/pkg/storage/chunk/client/local/fs_object_client_test.go b/pkg/storage/chunk/client/local/fs_object_client_test.go index 09bf69530befb..2dc059b3f5f1a 100644 --- a/pkg/storage/chunk/client/local/fs_object_client_test.go +++ b/pkg/storage/chunk/client/local/fs_object_client_test.go @@ -12,7 +12,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" ) func TestFSObjectClient_DeleteChunksBefore(t *testing.T) { diff --git a/pkg/storage/chunk/client/metrics.go b/pkg/storage/chunk/client/metrics.go index 4f507621a3a4c..76ca20a1bac5f 100644 --- a/pkg/storage/chunk/client/metrics.go +++ b/pkg/storage/chunk/client/metrics.go @@ -6,8 +6,8 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/util/constants" ) // takes a chunk client and exposes metrics for its operations. diff --git a/pkg/storage/chunk/client/object_client.go b/pkg/storage/chunk/client/object_client.go index a61fc501fec13..7a3b2e40c1663 100644 --- a/pkg/storage/chunk/client/object_client.go +++ b/pkg/storage/chunk/client/object_client.go @@ -10,9 +10,9 @@ import ( "github.com/pkg/errors" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" ) // ObjectClient is used to store arbitrary data in Object Store (S3/GCS/Azure/...) diff --git a/pkg/storage/chunk/client/object_client_test.go b/pkg/storage/chunk/client/object_client_test.go index 0b9e659b91944..f27f5d964150d 100644 --- a/pkg/storage/chunk/client/object_client_test.go +++ b/pkg/storage/chunk/client/object_client_test.go @@ -7,9 +7,9 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" ) func MustParseDayTime(s string) config.DayTime { diff --git a/pkg/storage/chunk/client/openstack/swift_object_client.go b/pkg/storage/chunk/client/openstack/swift_object_client.go index ee29ac4a6ca71..96b836b0a909f 100644 --- a/pkg/storage/chunk/client/openstack/swift_object_client.go +++ b/pkg/storage/chunk/client/openstack/swift_object_client.go @@ -13,10 +13,10 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" - bucket_swift "github.com/grafana/loki/pkg/storage/bucket/swift" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" - "github.com/grafana/loki/pkg/util/log" + bucket_swift "github.com/grafana/loki/v3/pkg/storage/bucket/swift" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/util/log" ) var defaultTransport http.RoundTripper = &http.Transport{ diff --git a/pkg/storage/chunk/client/openstack/swift_object_client_test.go b/pkg/storage/chunk/client/openstack/swift_object_client_test.go index ad2bb173dc91a..ce2f130f1bfca 100644 --- a/pkg/storage/chunk/client/openstack/swift_object_client_test.go +++ b/pkg/storage/chunk/client/openstack/swift_object_client_test.go @@ -10,8 +10,8 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/storage/bucket/swift" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/storage/bucket/swift" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" ) type RoundTripperFunc func(*http.Request) (*http.Response, error) diff --git a/pkg/storage/chunk/client/testutils/inmemory_storage_client.go b/pkg/storage/chunk/client/testutils/inmemory_storage_client.go index 15e2ddb2564bd..5f2a95da76fdf 100644 --- a/pkg/storage/chunk/client/testutils/inmemory_storage_client.go +++ b/pkg/storage/chunk/client/testutils/inmemory_storage_client.go @@ -12,10 +12,10 @@ import ( "github.com/go-kit/log/level" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/util/log" ) type MockStorageMode int diff --git a/pkg/storage/chunk/client/testutils/testutils.go b/pkg/storage/chunk/client/testutils/testutils.go index 1ee5e95fd878c..2b35b612badca 100644 --- a/pkg/storage/chunk/client/testutils/testutils.go +++ b/pkg/storage/chunk/client/testutils/testutils.go @@ -12,13 +12,13 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk" - chunkclient "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk" + chunkclient "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) const ( diff --git a/pkg/storage/chunk/client/util/parallel_chunk_fetch.go b/pkg/storage/chunk/client/util/parallel_chunk_fetch.go index be77e2d556d12..c61fdcf1bd522 100644 --- a/pkg/storage/chunk/client/util/parallel_chunk_fetch.go +++ b/pkg/storage/chunk/client/util/parallel_chunk_fetch.go @@ -7,8 +7,8 @@ import ( "github.com/opentracing/opentracing-go" otlog "github.com/opentracing/opentracing-go/log" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) var decodeContextPool = sync.Pool{ diff --git a/pkg/storage/chunk/client/util/parallel_chunk_fetch_test.go b/pkg/storage/chunk/client/util/parallel_chunk_fetch_test.go index 7fc48ca5366c8..98b654d9df074 100644 --- a/pkg/storage/chunk/client/util/parallel_chunk_fetch_test.go +++ b/pkg/storage/chunk/client/util/parallel_chunk_fetch_test.go @@ -4,7 +4,7 @@ import ( "context" "testing" - "github.com/grafana/loki/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk" ) func BenchmarkGetParallelChunks(b *testing.B) { diff --git a/pkg/storage/chunk/client/util/util.go b/pkg/storage/chunk/client/util/util.go index e49fad20136fb..3485552c220fd 100644 --- a/pkg/storage/chunk/client/util/util.go +++ b/pkg/storage/chunk/client/util/util.go @@ -8,8 +8,8 @@ import ( ot "github.com/opentracing/opentracing-go" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/util/math" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/util/math" ) // DoSingleQuery is the interface for indexes that don't support batching yet. diff --git a/pkg/storage/chunk/dummy.go b/pkg/storage/chunk/dummy.go index 7951ad2f80efb..a957d42e654dd 100644 --- a/pkg/storage/chunk/dummy.go +++ b/pkg/storage/chunk/dummy.go @@ -5,7 +5,7 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/util/filter" + "github.com/grafana/loki/v3/pkg/util/filter" ) func newDummyChunk() *dummyChunk { diff --git a/pkg/storage/chunk/fetcher/fetcher.go b/pkg/storage/chunk/fetcher/fetcher.go index fd90f685e981e..7801143932842 100644 --- a/pkg/storage/chunk/fetcher/fetcher.go +++ b/pkg/storage/chunk/fetcher/fetcher.go @@ -12,14 +12,14 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "github.com/prometheus/prometheus/promql" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util/constants" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util/constants" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) var ( diff --git a/pkg/storage/chunk/fetcher/fetcher_test.go b/pkg/storage/chunk/fetcher/fetcher_test.go index d73974506d4a1..c6215bde5b980 100644 --- a/pkg/storage/chunk/fetcher/fetcher_test.go +++ b/pkg/storage/chunk/fetcher/fetcher_test.go @@ -12,13 +12,13 @@ import ( "github.com/stretchr/testify/assert" "golang.org/x/exp/slices" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/testutils" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils" + "github.com/grafana/loki/v3/pkg/storage/config" ) func Test(t *testing.T) { diff --git a/pkg/storage/chunk/interface.go b/pkg/storage/chunk/interface.go index cf3f619fa8d87..8da4312c60398 100644 --- a/pkg/storage/chunk/interface.go +++ b/pkg/storage/chunk/interface.go @@ -24,7 +24,7 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/util/filter" + "github.com/grafana/loki/v3/pkg/util/filter" ) // ChunkLen is the length of a chunk in bytes. diff --git a/pkg/storage/chunk/predicate.go b/pkg/storage/chunk/predicate.go index 62a91c7a46437..9c6abe226719a 100644 --- a/pkg/storage/chunk/predicate.go +++ b/pkg/storage/chunk/predicate.go @@ -3,9 +3,10 @@ package chunk import ( "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/querier/plan" ) +// TODO(owen-d): rename. This is not a predicate and is confusing. type Predicate struct { Matchers []*labels.Matcher plan *plan.QueryPlan diff --git a/pkg/storage/chunk/tests/by_key_test.go b/pkg/storage/chunk/tests/by_key_test.go index 1fada6bb632e9..e5ab23f8726a2 100644 --- a/pkg/storage/chunk/tests/by_key_test.go +++ b/pkg/storage/chunk/tests/by_key_test.go @@ -1,8 +1,8 @@ package tests import ( - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" ) // ByKey allow you to sort chunks by ID diff --git a/pkg/storage/chunk/tests/caching_fixtures_test.go b/pkg/storage/chunk/tests/caching_fixtures_test.go index 194f2b1276e0a..fbec1ac4a4fc8 100644 --- a/pkg/storage/chunk/tests/caching_fixtures_test.go +++ b/pkg/storage/chunk/tests/caching_fixtures_test.go @@ -8,14 +8,14 @@ import ( "github.com/grafana/dskit/flagext" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/gcp" - "github.com/grafana/loki/pkg/storage/chunk/client/testutils" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/gcp" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/validation" ) type fixture struct { diff --git a/pkg/storage/chunk/tests/chunk_client_test.go b/pkg/storage/chunk/tests/chunk_client_test.go index a584d7a74f9b2..c995225524c0f 100644 --- a/pkg/storage/chunk/tests/chunk_client_test.go +++ b/pkg/storage/chunk/tests/chunk_client_test.go @@ -12,11 +12,11 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/testutils" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) func TestChunksBasic(t *testing.T) { diff --git a/pkg/storage/chunk/tests/index_client_test.go b/pkg/storage/chunk/tests/index_client_test.go index a4751f1fb1d31..0a2545206cdae 100644 --- a/pkg/storage/chunk/tests/index_client_test.go +++ b/pkg/storage/chunk/tests/index_client_test.go @@ -11,9 +11,9 @@ import ( "github.com/grafana/dskit/user" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) var ctx = user.InjectOrgID(context.Background(), "1") diff --git a/pkg/storage/chunk/tests/utils_test.go b/pkg/storage/chunk/tests/utils_test.go index e885e1d53bcdb..80e6e1b659f02 100644 --- a/pkg/storage/chunk/tests/utils_test.go +++ b/pkg/storage/chunk/tests/utils_test.go @@ -5,13 +5,13 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/aws" - "github.com/grafana/loki/pkg/storage/chunk/client/cassandra" - "github.com/grafana/loki/pkg/storage/chunk/client/gcp" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/chunk/client/testutils" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/aws" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/cassandra" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/gcp" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) const ( diff --git a/pkg/storage/common/aws/storage_class.go b/pkg/storage/common/aws/storage_class.go index b284c7326fbb0..ab3950e9d8c3c 100644 --- a/pkg/storage/common/aws/storage_class.go +++ b/pkg/storage/common/aws/storage_class.go @@ -4,7 +4,7 @@ import ( "fmt" "strings" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) const ( diff --git a/pkg/storage/config/bench_test.go b/pkg/storage/config/bench_test.go index 8296f229bde1c..df5fb913b1f43 100644 --- a/pkg/storage/config/bench_test.go +++ b/pkg/storage/config/bench_test.go @@ -6,8 +6,8 @@ import ( "github.com/stretchr/testify/require" yaml "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" ) func BenchmarkExternalKey(b *testing.B) { diff --git a/pkg/storage/config/schema_config.go b/pkg/storage/config/schema_config.go index 968ca87e609b7..c7e72886b738e 100644 --- a/pkg/storage/config/schema_config.go +++ b/pkg/storage/config/schema_config.go @@ -17,10 +17,10 @@ import ( "github.com/prometheus/common/model" yaml "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/util/log" ) const ( @@ -201,7 +201,8 @@ func (cfg *PeriodConfig) GetIndexTableNumberRange(schemaEndDate DayTime) TableRa } func NewDayTime(d model.Time) DayTime { - return DayTime{d} + beginningOfDay := model.TimeFromUnix(d.Time().Truncate(24 * time.Hour).Unix()) + return DayTime{beginningOfDay} } // DayTime is a model.Time what holds day-aligned values, and marshals to/from diff --git a/pkg/storage/config/schema_config_test.go b/pkg/storage/config/schema_config_test.go index 06fd191b7092a..a4ed59933150c 100644 --- a/pkg/storage/config/schema_config_test.go +++ b/pkg/storage/config/schema_config_test.go @@ -12,8 +12,8 @@ import ( "github.com/stretchr/testify/require" yaml "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk" ) func TestChunkTableFor(t *testing.T) { diff --git a/pkg/storage/config/store.go b/pkg/storage/config/store.go index 75bdaa2ace8dc..8dbd57cdc2503 100644 --- a/pkg/storage/config/store.go +++ b/pkg/storage/config/store.go @@ -6,15 +6,15 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" ) type ChunkStoreConfig struct { ChunkCacheConfig cache.Config `yaml:"chunk_cache_config"` - ChunkCacheConfigL2 cache.Config `yaml:"chunk_cache_config_l2" doc:"hidden"` + ChunkCacheConfigL2 cache.Config `yaml:"chunk_cache_config_l2"` WriteDedupeCacheConfig cache.Config `yaml:"write_dedupe_cache_config" doc:"description=Write dedupe cache is deprecated along with legacy index types (aws, aws-dynamo, bigtable, bigtable-hashed, cassandra, gcp, gcp-columnkey, grpc-store).\nConsider using TSDB index which does not require a write dedupe cache."` - L2ChunkCacheHandoff time.Duration `yaml:"l2_chunk_cache_handoff" doc:"hidden"` + L2ChunkCacheHandoff time.Duration `yaml:"l2_chunk_cache_handoff"` CacheLookupsOlderThan model.Duration `yaml:"cache_lookups_older_than"` // Not visible in yaml because the setting shouldn't be common between ingesters and queriers. @@ -34,8 +34,8 @@ func (cfg *ChunkStoreConfig) ChunkCacheStubs() bool { // RegisterFlags adds the flags required to configure this flag set. func (cfg *ChunkStoreConfig) RegisterFlags(f *flag.FlagSet) { cfg.ChunkCacheConfig.RegisterFlagsWithPrefix("store.chunks-cache.", "", f) - cfg.ChunkCacheConfigL2.RegisterFlagsWithPrefix("experimental.store.chunks-cache-l2.", "", f) - f.DurationVar(&cfg.L2ChunkCacheHandoff, "experimental.store.chunks-cache-l2.handoff", 0, "Experimental, subject to change or removal. Chunks will be handed off to the L2 cache after this duration. 0 to disable L2 cache.") + cfg.ChunkCacheConfigL2.RegisterFlagsWithPrefix("store.chunks-cache-l2.", "", f) + f.DurationVar(&cfg.L2ChunkCacheHandoff, "store.chunks-cache-l2.handoff", 0, "Chunks will be handed off to the L2 cache after this duration. 0 to disable L2 cache.") f.BoolVar(&cfg.chunkCacheStubs, "store.chunks-cache.cache-stubs", false, "If true, don't write the full chunk to cache, just a stub entry.") cfg.WriteDedupeCacheConfig.RegisterFlagsWithPrefix("store.index-cache-write.", "", f) diff --git a/pkg/storage/factory.go b/pkg/storage/factory.go index da687c5ea9c7b..3660bfb4b2116 100644 --- a/pkg/storage/factory.go +++ b/pkg/storage/factory.go @@ -14,32 +14,32 @@ import ( "github.com/grafana/dskit/flagext" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/alibaba" - "github.com/grafana/loki/pkg/storage/chunk/client/aws" - "github.com/grafana/loki/pkg/storage/chunk/client/azure" - "github.com/grafana/loki/pkg/storage/chunk/client/baidubce" - "github.com/grafana/loki/pkg/storage/chunk/client/cassandra" - "github.com/grafana/loki/pkg/storage/chunk/client/congestion" - "github.com/grafana/loki/pkg/storage/chunk/client/gcp" - "github.com/grafana/loki/pkg/storage/chunk/client/grpc" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" - "github.com/grafana/loki/pkg/storage/chunk/client/ibmcloud" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/chunk/client/openstack" - "github.com/grafana/loki/pkg/storage/chunk/client/testutils" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores" - "github.com/grafana/loki/pkg/storage/stores/series/index" - bloomshipperconfig "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/boltdb" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/downloads" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/gatewayclient" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/indexgateway" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/alibaba" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/aws" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/azure" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/baidubce" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/cassandra" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/congestion" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/gcp" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/grpc" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/ibmcloud" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/openstack" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + bloomshipperconfig "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/boltdb" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/downloads" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/gatewayclient" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/indexgateway" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" ) var ( @@ -336,7 +336,7 @@ type Config struct { MaxChunkBatchSize int `yaml:"max_chunk_batch_size"` BoltDBShipperConfig boltdb.IndexCfg `yaml:"boltdb_shipper" doc:"description=Configures storing index in an Object Store (GCS/S3/Azure/Swift/COS/Filesystem) in the form of boltdb files. Required fields only required when boltdb-shipper is defined in config."` TSDBShipperConfig indexshipper.Config `yaml:"tsdb_shipper" doc:"description=Configures storing index in an Object Store (GCS/S3/Azure/Swift/COS/Filesystem) in a prometheus TSDB-like format. Required fields only required when TSDB is defined in config."` - BloomShipperConfig bloomshipperconfig.Config `yaml:"bloom_shipper" doc:"description=Configures Bloom Shipper."` + BloomShipperConfig bloomshipperconfig.Config `yaml:"bloom_shipper" doc:"description=Configures the bloom shipper component, which contains the store abstraction to fetch bloom filters from and put them to object storage."` // Config for using AsyncStore when using async index stores like `boltdb-shipper`. // It is required for getting chunk ids of recently flushed chunks from the ingesters. diff --git a/pkg/storage/factory_test.go b/pkg/storage/factory_test.go index 2588c9dc69dd1..5685424f199cd 100644 --- a/pkg/storage/factory_test.go +++ b/pkg/storage/factory_test.go @@ -12,15 +12,15 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/cassandra" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/boltdb" - "github.com/grafana/loki/pkg/util/constants" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/cassandra" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/boltdb" + "github.com/grafana/loki/v3/pkg/util/constants" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/validation" ) func TestFactoryStop(t *testing.T) { diff --git a/pkg/storage/hack/main.go b/pkg/storage/hack/main.go index 93278b429c9af..f85e44a41ac5f 100644 --- a/pkg/storage/hack/main.go +++ b/pkg/storage/hack/main.go @@ -14,16 +14,16 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/validation" ) var ( diff --git a/pkg/storage/lazy_chunk.go b/pkg/storage/lazy_chunk.go index 5a6170c6c6fe4..4c741228eee30 100644 --- a/pkg/storage/lazy_chunk.go +++ b/pkg/storage/lazy_chunk.go @@ -7,13 +7,13 @@ import ( "github.com/go-kit/log/level" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // LazyChunk loads the chunk when it is accessed. diff --git a/pkg/storage/lazy_chunk_test.go b/pkg/storage/lazy_chunk_test.go index 2244c02c924c6..6757e94e1e958 100644 --- a/pkg/storage/lazy_chunk_test.go +++ b/pkg/storage/lazy_chunk_test.go @@ -9,13 +9,13 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util" ) func TestLazyChunkIterator(t *testing.T) { diff --git a/pkg/storage/store.go b/pkg/storage/store.go index 706f630931cd2..1a4fa386062f7 100644 --- a/pkg/storage/store.go +++ b/pkg/storage/store.go @@ -6,9 +6,9 @@ import ( "math" "time" - "github.com/grafana/loki/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/util/httpreq" - lokilog "github.com/grafana/loki/pkg/logql/log" + lokilog "github.com/grafana/loki/v3/pkg/logql/log" "github.com/go-kit/log" "github.com/go-kit/log/level" @@ -19,28 +19,28 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/analytics" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/congestion" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores" - "github.com/grafana/loki/pkg/storage/stores/index" - "github.com/grafana/loki/pkg/storage/stores/series" - series_index "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/gatewayclient" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/indexgateway" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/deletion" + "github.com/grafana/loki/v3/pkg/analytics" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/congestion" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores" + "github.com/grafana/loki/v3/pkg/storage/stores/index" + "github.com/grafana/loki/v3/pkg/storage/stores/series" + series_index "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/gatewayclient" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/indexgateway" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/deletion" ) var ( @@ -364,9 +364,11 @@ func decodeReq(req logql.QueryParams) ([]*labels.Matcher, model.Time, model.Time return matchers, from, through, nil } +// TODO(owen-d): refactor this. Injecting shard labels via matchers is a big hack and we shouldn't continue +// doing it, _but_ it requires adding `fingerprintfilter` support to much of our storage interfaces func injectShardLabel(shards []string, matchers []*labels.Matcher) ([]*labels.Matcher, error) { if shards != nil { - parsed, err := logql.ParseShards(shards) + parsed, _, err := logql.ParseShards(shards) if err != nil { return nil, err } diff --git a/pkg/storage/store_test.go b/pkg/storage/store_test.go index 52df29c079acf..3be136d411dea 100644 --- a/pkg/storage/store_test.go +++ b/pkg/storage/store_test.go @@ -13,7 +13,7 @@ import ( "testing" "time" - "github.com/grafana/loki/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/util/httpreq" "github.com/cespare/xxhash/v2" "github.com/go-kit/log" @@ -23,25 +23,26 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - lokilog "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel/stats" "github.com/grafana/loki/pkg/push" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/boltdb" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/marshal" - "github.com/grafana/loki/pkg/validation" + + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + lokilog "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/boltdb" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/marshal" + "github.com/grafana/loki/v3/pkg/validation" ) var ( diff --git a/pkg/storage/stores/composite_store.go b/pkg/storage/stores/composite_store.go index d0fb516dcbff4..212cd94082837 100644 --- a/pkg/storage/stores/composite_store.go +++ b/pkg/storage/stores/composite_store.go @@ -7,13 +7,15 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/stores/index" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/stores/index" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + tsdb_index "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + "github.com/grafana/loki/v3/pkg/util" ) type ChunkWriter interface { @@ -206,6 +208,83 @@ func (c CompositeStore) Volume(ctx context.Context, userID string, from, through return res, err } +func (c CompositeStore) GetShards( + ctx context.Context, + userID string, + from, through model.Time, + targetBytesPerShard uint64, + predicate chunk.Predicate, +) (*logproto.ShardsResponse, error) { + // TODO(owen-d): improve. Since shards aren't easily merge-able, + // we choose the store which returned the highest shard count. + // This is only used when a query crosses a schema boundary + var groups []*logproto.ShardsResponse + err := c.forStores(ctx, from, through, func(innerCtx context.Context, from, through model.Time, store Store) error { + shards, err := store.GetShards(innerCtx, userID, from, through, targetBytesPerShard, predicate) + if err != nil { + return err + } + groups = append(groups, shards) + return nil + }) + + if err != nil { + return nil, err + } + + switch { + case len(groups) == 1: + return groups[0], nil + case len(groups) == 0: + return nil, nil + default: + sort.Slice(groups, func(i, j int) bool { + return len(groups[i].Shards) > len(groups[j].Shards) + }) + return groups[0], nil + } +} + +func (c CompositeStore) HasForSeries(from, through model.Time) (sharding.ForSeries, bool) { + var impls []sharding.ForSeries + _ = c.forStores(context.Background(), from, through, func(_ context.Context, from, through model.Time, store Store) error { + impl, ok := store.HasForSeries(from, through) + if ok { + impls = append(impls, impl) + } + return nil + }) + + if len(impls) == 0 { + return nil, false + } + + wrapped := sharding.ForSeriesFunc( + func( + ctx context.Context, + userID string, + fpFilter tsdb_index.FingerprintFilter, + from model.Time, + through model.Time, + fn func( + labels.Labels, + model.Fingerprint, + []tsdb_index.ChunkMeta, + ) (stop bool), + matchers ...*labels.Matcher, + ) error { + for _, impl := range impls { + if err := impl.ForSeries(ctx, userID, fpFilter, from, through, fn, matchers...); err != nil { + return err + } + } + return nil + }, + ) + + return wrapped, true +} + func (c CompositeStore) GetChunkFetcher(tm model.Time) *fetcher.Fetcher { // find the schema with the lowest start _after_ tm j := sort.Search(len(c.stores), func(j int) bool { diff --git a/pkg/storage/stores/composite_store_entry.go b/pkg/storage/stores/composite_store_entry.go index 7edbdab404fe6..d4590d199d9ee 100644 --- a/pkg/storage/stores/composite_store_entry.go +++ b/pkg/storage/stores/composite_store_entry.go @@ -5,23 +5,24 @@ import ( "fmt" "time" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" "github.com/go-kit/log/level" "github.com/opentracing/opentracing-go" "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/errors" - "github.com/grafana/loki/pkg/storage/stores/index" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/spanlogger" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/errors" + "github.com/grafana/loki/v3/pkg/storage/stores/index" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/util/validation" ) type StoreLimits interface { @@ -143,6 +144,25 @@ func (c *storeEntry) Volume(ctx context.Context, userID string, from, through mo return c.indexReader.Volume(ctx, userID, from, through, limit, targetLabels, aggregateBy, matchers...) } +func (c *storeEntry) GetShards( + ctx context.Context, + userID string, + from, through model.Time, + targetBytesPerShard uint64, + predicate chunk.Predicate, +) (*logproto.ShardsResponse, error) { + _, err := c.validateQueryTimeRange(ctx, userID, &from, &through) + if err != nil { + return nil, err + } + + return c.indexReader.GetShards(ctx, userID, from, through, targetBytesPerShard, predicate) +} + +func (c *storeEntry) HasForSeries(from, through model.Time) (sharding.ForSeries, bool) { + return c.indexReader.HasForSeries(from, through) +} + func (c *storeEntry) validateQueryTimeRange(ctx context.Context, userID string, from *model.Time, through *model.Time) (bool, error) { //nolint:ineffassign,staticcheck //Leaving ctx even though we don't currently use it, we want to make it available for when we might need it and hopefully will ensure us using the correct context at that time diff --git a/pkg/storage/stores/composite_store_test.go b/pkg/storage/stores/composite_store_test.go index b2878487f4606..3836243f38783 100644 --- a/pkg/storage/stores/composite_store_test.go +++ b/pkg/storage/stores/composite_store_test.go @@ -8,16 +8,17 @@ import ( "github.com/pkg/errors" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" "github.com/grafana/dskit/test" "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" ) type mockStore int @@ -60,6 +61,14 @@ func (m mockStore) Volume(_ context.Context, _ string, _, _ model.Time, _ int32, return nil, nil } +func (m mockStore) GetShards(_ context.Context, _ string, _, _ model.Time, _ uint64, _ chunk.Predicate) (*logproto.ShardsResponse, error) { + return nil, nil +} + +func (m mockStore) HasForSeries(_, _ model.Time) (sharding.ForSeries, bool) { + return nil, false +} + func (m mockStore) Stop() {} func TestCompositeStore(t *testing.T) { diff --git a/pkg/storage/stores/index/index.go b/pkg/storage/stores/index/index.go index 41746e346f20b..26b2a44880047 100644 --- a/pkg/storage/stores/index/index.go +++ b/pkg/storage/stores/index/index.go @@ -8,10 +8,12 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - loki_instrument "github.com/grafana/loki/pkg/util/instrument" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + loki_instrument "github.com/grafana/loki/v3/pkg/util/instrument" ) type Filterable interface { @@ -28,6 +30,17 @@ type BaseReader interface { type StatsReader interface { Stats(ctx context.Context, userID string, from, through model.Time, matchers ...*labels.Matcher) (*stats.Stats, error) Volume(ctx context.Context, userID string, from, through model.Time, limit int32, targetLabels []string, aggregateBy string, matchers ...*labels.Matcher) (*logproto.VolumeResponse, error) + GetShards( + ctx context.Context, + userID string, + from, through model.Time, + targetBytesPerShard uint64, + predicate chunk.Predicate, + ) (*logproto.ShardsResponse, error) + + // If the underlying index supports it, this will return the ForSeries interface + // which is used in bloom-filter accelerated sharding calculation optimization. + HasForSeries(from, through model.Time) (sharding.ForSeries, bool) } type Reader interface { @@ -137,6 +150,24 @@ func (m MonitoredReaderWriter) Volume(ctx context.Context, userID string, from, return vol, nil } +func (m MonitoredReaderWriter) GetShards( + ctx context.Context, + userID string, + from, through model.Time, + targetBytesPerShard uint64, + predicate chunk.Predicate, +) (*logproto.ShardsResponse, error) { + var shards *logproto.ShardsResponse + if err := loki_instrument.TimeRequest(ctx, "shards", instrument.NewHistogramCollector(m.metrics.indexQueryLatency), instrument.ErrorCode, func(ctx context.Context) error { + var err error + shards, err = m.rw.GetShards(ctx, userID, from, through, targetBytesPerShard, predicate) + return err + }); err != nil { + return nil, err + } + return shards, nil +} + func (m MonitoredReaderWriter) SetChunkFilterer(chunkFilter chunk.RequestChunkFilterer) { m.rw.SetChunkFilterer(chunkFilter) } @@ -146,3 +177,29 @@ func (m MonitoredReaderWriter) IndexChunk(ctx context.Context, from, through mod return m.rw.IndexChunk(ctx, from, through, chk) }) } + +func (m MonitoredReaderWriter) HasForSeries(from, through model.Time) (sharding.ForSeries, bool) { + if impl, ok := m.rw.HasForSeries(from, through); ok { + wrapped := sharding.ForSeriesFunc( + func( + ctx context.Context, + userID string, + fpFilter index.FingerprintFilter, + from model.Time, + through model.Time, + fn func( + labels.Labels, + model.Fingerprint, + []index.ChunkMeta, + ) (stop bool), + matchers ...*labels.Matcher, + ) error { + return loki_instrument.TimeRequest(ctx, "for_series", instrument.NewHistogramCollector(m.metrics.indexQueryLatency), instrument.ErrorCode, func(ctx context.Context) error { + return impl.ForSeries(ctx, userID, fpFilter, from, through, fn, matchers...) + }) + }, + ) + return wrapped, true + } + return nil, false +} diff --git a/pkg/storage/stores/index/metrics.go b/pkg/storage/stores/index/metrics.go index 924122f950a80..2474a9ece07c1 100644 --- a/pkg/storage/stores/index/metrics.go +++ b/pkg/storage/stores/index/metrics.go @@ -4,7 +4,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) type metrics struct { diff --git a/pkg/storage/stores/index/seriesvolume/volume.go b/pkg/storage/stores/index/seriesvolume/volume.go index b09ccd800e87f..0e079702ccf67 100644 --- a/pkg/storage/stores/index/seriesvolume/volume.go +++ b/pkg/storage/stores/index/seriesvolume/volume.go @@ -5,7 +5,7 @@ import ( "sort" "sync" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) const ( diff --git a/pkg/storage/stores/index/seriesvolume/volume_test.go b/pkg/storage/stores/index/seriesvolume/volume_test.go index 8f0ecb6eb266b..6487bb0260b95 100644 --- a/pkg/storage/stores/index/seriesvolume/volume_test.go +++ b/pkg/storage/stores/index/seriesvolume/volume_test.go @@ -6,7 +6,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func Test_AddVolume(t *testing.T) { diff --git a/pkg/storage/stores/index/stats/stats.go b/pkg/storage/stores/index/stats/stats.go index 82d0791c1dc7c..088c21f76ca1d 100644 --- a/pkg/storage/stores/index/stats/stats.go +++ b/pkg/storage/stores/index/stats/stats.go @@ -7,8 +7,8 @@ import ( "github.com/prometheus/common/model" "github.com/willf/bloom" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) var BloomPool PoolBloom diff --git a/pkg/storage/stores/series/index/caching_index_client.go b/pkg/storage/stores/series/index/caching_index_client.go index dd6e7348f8fd4..40181ba794c71 100644 --- a/pkg/storage/stores/series/index/caching_index_client.go +++ b/pkg/storage/stores/series/index/caching_index_client.go @@ -14,9 +14,9 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/util/constants" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/util/constants" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var ( diff --git a/pkg/storage/stores/series/index/caching_index_client_test.go b/pkg/storage/stores/series/index/caching_index_client_test.go index dcb7b90a82fa9..99a9264a0a0aa 100644 --- a/pkg/storage/stores/series/index/caching_index_client_test.go +++ b/pkg/storage/stores/series/index/caching_index_client_test.go @@ -14,9 +14,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/validation" ) var ctx = user.InjectOrgID(context.Background(), "1") diff --git a/pkg/storage/stores/series/index/schema.go b/pkg/storage/stores/series/index/schema.go index 24ff305a064b8..5b60c5f9c6a69 100644 --- a/pkg/storage/stores/series/index/schema.go +++ b/pkg/storage/stores/series/index/schema.go @@ -13,8 +13,8 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/querier/astmapper" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/querier/astmapper" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/storage/stores/series/index/schema_config.go b/pkg/storage/stores/series/index/schema_config.go index c4b3f2dfe17cf..32a05fb7634b5 100644 --- a/pkg/storage/stores/series/index/schema_config.go +++ b/pkg/storage/stores/series/index/schema_config.go @@ -7,8 +7,8 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util/math" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util/math" ) const ( diff --git a/pkg/storage/stores/series/index/schema_test.go b/pkg/storage/stores/series/index/schema_test.go index b70fd49a56f91..5a74936085268 100644 --- a/pkg/storage/stores/series/index/schema_test.go +++ b/pkg/storage/stores/series/index/schema_test.go @@ -12,8 +12,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/storage/config" ) func TestDailyBuckets(t *testing.T) { diff --git a/pkg/storage/stores/series/index/table_client.go b/pkg/storage/stores/series/index/table_client.go index 8767fcaf3aaac..e32cc8968b192 100644 --- a/pkg/storage/stores/series/index/table_client.go +++ b/pkg/storage/stores/series/index/table_client.go @@ -3,7 +3,7 @@ package index import ( "context" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/config" ) // TableClient is a client for telling Dynamo what to do with tables. diff --git a/pkg/storage/stores/series/index/table_manager.go b/pkg/storage/stores/series/index/table_manager.go index c477ecf135105..414e08f494c89 100644 --- a/pkg/storage/stores/series/index/table_manager.go +++ b/pkg/storage/stores/series/index/table_manager.go @@ -20,8 +20,8 @@ import ( "github.com/prometheus/common/model" tsdb_errors "github.com/prometheus/prometheus/tsdb/errors" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util/constants" ) const ( diff --git a/pkg/storage/stores/series/index/table_manager_test.go b/pkg/storage/stores/series/index/table_manager_test.go index 09512fd98ccd3..74429b48f9b3a 100644 --- a/pkg/storage/stores/series/index/table_manager_test.go +++ b/pkg/storage/stores/series/index/table_manager_test.go @@ -11,7 +11,7 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/config" ) const ( diff --git a/pkg/storage/stores/series/series_index_gateway_store.go b/pkg/storage/stores/series/series_index_gateway_store.go index 00059fe16c1a3..b58979bd11a13 100644 --- a/pkg/storage/stores/series/series_index_gateway_store.go +++ b/pkg/storage/stores/series/series_index_gateway_store.go @@ -9,19 +9,33 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" ) +// NB(owen-d): mostly modeled off of the proto-generated `logproto.IndexGatewayClient`, +// but decoupled from explicit GRPC dependencies to work well with streaming grpc methods +type GatewayClient interface { + GetChunkRef(ctx context.Context, in *logproto.GetChunkRefRequest) (*logproto.GetChunkRefResponse, error) + GetSeries(ctx context.Context, in *logproto.GetSeriesRequest) (*logproto.GetSeriesResponse, error) + LabelNamesForMetricName(ctx context.Context, in *logproto.LabelNamesForMetricNameRequest) (*logproto.LabelResponse, error) + LabelValuesForMetricName(ctx context.Context, in *logproto.LabelValuesForMetricNameRequest) (*logproto.LabelResponse, error) + GetStats(ctx context.Context, in *logproto.IndexStatsRequest) (*logproto.IndexStatsResponse, error) + GetVolume(ctx context.Context, in *logproto.VolumeRequest) (*logproto.VolumeResponse, error) + + GetShards(ctx context.Context, in *logproto.ShardsRequest) (*logproto.ShardsResponse, error) +} + // IndexGatewayClientStore implements pkg/storage/stores/index.ReaderWriter type IndexGatewayClientStore struct { - client logproto.IndexGatewayClient + client GatewayClient logger log.Logger } -func NewIndexGatewayClientStore(client logproto.IndexGatewayClient, logger log.Logger) *IndexGatewayClientStore { +func NewIndexGatewayClientStore(client GatewayClient, logger log.Logger) *IndexGatewayClientStore { return &IndexGatewayClientStore{ client: client, logger: logger, @@ -111,6 +125,25 @@ func (c *IndexGatewayClientStore) Volume(ctx context.Context, _ string, from, th }) } +func (c *IndexGatewayClientStore) GetShards( + ctx context.Context, + _ string, + from, through model.Time, + targetBytesPerShard uint64, + predicate chunk.Predicate, +) (*logproto.ShardsResponse, error) { + resp, err := c.client.GetShards(ctx, &logproto.ShardsRequest{ + From: from, + Through: through, + Query: predicate.Plan().AST.String(), + TargetBytesPerShard: targetBytesPerShard, + }) + if err != nil { + return nil, err + } + return resp, nil +} + func (c *IndexGatewayClientStore) SetChunkFilterer(_ chunk.RequestChunkFilterer) { level.Warn(c.logger).Log("msg", "SetChunkFilterer called on index gateway client store, but it does not support it") } @@ -118,3 +151,10 @@ func (c *IndexGatewayClientStore) SetChunkFilterer(_ chunk.RequestChunkFilterer) func (c *IndexGatewayClientStore) IndexChunk(_ context.Context, _, _ model.Time, _ chunk.Chunk) error { return fmt.Errorf("index writes not supported on index gateway client") } + +// IndexGatewayClientStore does not implement tsdb.ForSeries; +// that is implemented by the index-gws themselves and will be +// called during the `GetShards() invocation` +func (c *IndexGatewayClientStore) HasForSeries(_, _ model.Time) (sharding.ForSeries, bool) { + return nil, false +} diff --git a/pkg/storage/stores/series/series_index_gateway_store_test.go b/pkg/storage/stores/series/series_index_gateway_store_test.go index 125973eb49d42..48256220191e4 100644 --- a/pkg/storage/stores/series/series_index_gateway_store_test.go +++ b/pkg/storage/stores/series/series_index_gateway_store_test.go @@ -7,20 +7,15 @@ import ( "github.com/go-kit/log" "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "google.golang.org/grpc" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) type fakeClient struct { - logproto.IndexGatewayClient + GatewayClient } -func (fakeClient) GetChunkRef(_ context.Context, _ *logproto.GetChunkRefRequest, _ ...grpc.CallOption) (*logproto.GetChunkRefResponse, error) { - return &logproto.GetChunkRefResponse{}, nil -} - -func (fakeClient) GetSeries(_ context.Context, _ *logproto.GetSeriesRequest, _ ...grpc.CallOption) (*logproto.GetSeriesResponse, error) { +func (fakeClient) GetSeries(_ context.Context, _ *logproto.GetSeriesRequest) (*logproto.GetSeriesResponse, error) { return &logproto.GetSeriesResponse{}, nil } diff --git a/pkg/storage/stores/series/series_index_store.go b/pkg/storage/stores/series/series_index_store.go index 5d17963180556..138fd17a7ab13 100644 --- a/pkg/storage/stores/series/series_index_store.go +++ b/pkg/storage/stores/series/series_index_store.go @@ -3,6 +3,7 @@ package series import ( "context" "fmt" + "math" "sort" "sync" @@ -16,21 +17,22 @@ import ( "github.com/grafana/dskit/concurrency" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/config" - storageerrors "github.com/grafana/loki/pkg/storage/errors" - "github.com/grafana/loki/pkg/storage/stores" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - series_index "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/extract" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/config" + storageerrors "github.com/grafana/loki/v3/pkg/storage/errors" + "github.com/grafana/loki/v3/pkg/storage/stores" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + series_index "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/extract" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) var ( @@ -758,3 +760,30 @@ func (c *IndexReaderWriter) Stats(_ context.Context, _ string, _, _ model.Time, func (c *IndexReaderWriter) Volume(_ context.Context, _ string, _, _ model.Time, _ int32, _ []string, _ string, _ ...*labels.Matcher) (*logproto.VolumeResponse, error) { return nil, nil } + +// old index stores do not implement dynamic sharidng -- skip +func (c *IndexReaderWriter) GetShards( + _ context.Context, + _ string, + _, _ model.Time, + _ uint64, + _ chunk.Predicate, +) (*logproto.ShardsResponse, error) { + // should not be called for legacy indices at all, so just return a single shard covering everything + // could be improved by reading schema shards + return &logproto.ShardsResponse{ + Shards: []logproto.Shard{ + { + Bounds: logproto.FPBounds{ + Min: 0, + Max: math.MaxUint64, + }, + }, + }, + }, nil +} + +// old index stores do not implement tsdb.ForSeries -- skip +func (c *IndexReaderWriter) HasForSeries(_, _ model.Time) (sharding.ForSeries, bool) { + return nil, false +} diff --git a/pkg/storage/stores/series/series_store_test.go b/pkg/storage/stores/series/series_store_test.go index 582bb9a2fea52..2f1146a1d7376 100644 --- a/pkg/storage/stores/series/series_store_test.go +++ b/pkg/storage/stores/series/series_store_test.go @@ -17,18 +17,18 @@ import ( "github.com/prometheus/prometheus/promql/parser" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/client/testutils" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/validation" ) type configFactory func() config.ChunkStoreConfig diff --git a/pkg/storage/stores/series/series_store_utils.go b/pkg/storage/stores/series/series_store_utils.go index 1c8430c9d8fbe..9e7bde3daaef2 100644 --- a/pkg/storage/stores/series/series_store_utils.go +++ b/pkg/storage/stores/series/series_store_utils.go @@ -6,9 +6,9 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/util" ) func filterChunksByTime(from, through model.Time, chunks []chunk.Chunk) []chunk.Chunk { diff --git a/pkg/storage/stores/series/series_store_utils_test.go b/pkg/storage/stores/series/series_store_utils_test.go index c2bd07ee401b4..35ef774c237ea 100644 --- a/pkg/storage/stores/series/series_store_utils_test.go +++ b/pkg/storage/stores/series/series_store_utils_test.go @@ -8,7 +8,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) // Refer to https://github.com/prometheus/prometheus/issues/2651. diff --git a/pkg/storage/stores/series_store_write.go b/pkg/storage/stores/series_store_write.go index db22c5caa1202..a36ae4510b8e3 100644 --- a/pkg/storage/stores/series_store_write.go +++ b/pkg/storage/stores/series_store_write.go @@ -9,12 +9,12 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/index" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/index" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) var ( diff --git a/pkg/storage/stores/series_store_write_test.go b/pkg/storage/stores/series_store_write_test.go index 9c8c2f4069333..823f5bf11f0a1 100644 --- a/pkg/storage/stores/series_store_write_test.go +++ b/pkg/storage/stores/series_store_write_test.go @@ -8,11 +8,11 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/config" ) type mockCache struct { diff --git a/pkg/storage/stores/shipper/bloomshipper/blockscache.go b/pkg/storage/stores/shipper/bloomshipper/blockscache.go index 767518273059a..b26a4ed5cbda5 100644 --- a/pkg/storage/stores/shipper/bloomshipper/blockscache.go +++ b/pkg/storage/stores/shipper/bloomshipper/blockscache.go @@ -15,9 +15,9 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper/config" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper/config" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" ) const ( diff --git a/pkg/storage/stores/shipper/bloomshipper/blockscache_test.go b/pkg/storage/stores/shipper/bloomshipper/blockscache_test.go index 4ec69e6d5a666..1ddc465577fcf 100644 --- a/pkg/storage/stores/shipper/bloomshipper/blockscache_test.go +++ b/pkg/storage/stores/shipper/bloomshipper/blockscache_test.go @@ -11,9 +11,9 @@ import ( "github.com/grafana/dskit/flagext" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper/config" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper/config" ) var ( diff --git a/pkg/storage/stores/shipper/bloomshipper/cache.go b/pkg/storage/stores/shipper/bloomshipper/cache.go index b4118a55f61fb..3e08b53eac3f3 100644 --- a/pkg/storage/stores/shipper/bloomshipper/cache.go +++ b/pkg/storage/stores/shipper/bloomshipper/cache.go @@ -10,8 +10,9 @@ import ( "github.com/go-kit/log/level" "github.com/pkg/errors" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/chunk/cache" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/util" ) type CloseableBlockQuerier struct { @@ -34,10 +35,14 @@ func (c *CloseableBlockQuerier) SeriesIter() (v1.PeekingIterator[*v1.SeriesWithB return v1.NewPeekingIter[*v1.SeriesWithBloom](c.BlockQuerier), nil } -func LoadBlocksDirIntoCache(path string, c Cache, logger log.Logger) error { - level.Debug(logger).Log("msg", "load bloomshipper working directory into cache", "path", path) - keys, values := loadBlockDirectories(path, logger) - return c.PutMany(context.Background(), keys, values) +func LoadBlocksDirIntoCache(paths []string, c Cache, logger log.Logger) error { + var err util.MultiError + for _, path := range paths { + level.Debug(logger).Log("msg", "load bloomshipper working directory into cache", "path", path) + keys, values := loadBlockDirectories(path, logger) + err.Add(c.PutMany(context.Background(), keys, values)) + } + return err.Err() } func loadBlockDirectories(root string, logger log.Logger) (keys []string, values []BlockDirectory) { @@ -95,8 +100,8 @@ type BlockDirectory struct { size int64 } -func (b BlockDirectory) Block() *v1.Block { - return v1.NewBlock(v1.NewDirectoryBlockReader(b.Path)) +func (b BlockDirectory) Block(metrics *v1.Metrics) *v1.Block { + return v1.NewBlock(v1.NewDirectoryBlockReader(b.Path), metrics) } func (b BlockDirectory) Size() int64 { @@ -120,9 +125,15 @@ func (b *BlockDirectory) resolveSize() error { // BlockQuerier returns a new block querier from the directory. // The passed function `close` is called when the the returned querier is closed. -func (b BlockDirectory) BlockQuerier(close func() error) *CloseableBlockQuerier { + +func (b BlockDirectory) BlockQuerier( + usePool bool, + close func() error, + maxPageSize int, + metrics *v1.Metrics, +) *CloseableBlockQuerier { return &CloseableBlockQuerier{ - BlockQuerier: v1.NewBlockQuerier(b.Block()), + BlockQuerier: v1.NewBlockQuerier(b.Block(metrics), usePool, maxPageSize), BlockRef: b.BlockRef, close: close, } diff --git a/pkg/storage/stores/shipper/bloomshipper/cache_test.go b/pkg/storage/stores/shipper/bloomshipper/cache_test.go index ca591efebb993..dd7a44e57cf7d 100644 --- a/pkg/storage/stores/shipper/bloomshipper/cache_test.go +++ b/pkg/storage/stores/shipper/bloomshipper/cache_test.go @@ -11,8 +11,8 @@ import ( "github.com/go-kit/log" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper/config" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper/config" ) type mockCache[K comparable, V any] struct { @@ -88,7 +88,7 @@ func Test_LoadBlocksDirIntoCache(t *testing.T) { } c := NewFsBlocksCache(cfg, nil, log.NewNopLogger()) - err := LoadBlocksDirIntoCache(wd, c, logger) + err := LoadBlocksDirIntoCache([]string{wd, t.TempDir()}, c, logger) require.NoError(t, err) require.Equal(t, 1, len(c.entries)) diff --git a/pkg/storage/stores/shipper/bloomshipper/client.go b/pkg/storage/stores/shipper/bloomshipper/client.go index f5258570d869c..4ac0c24732b69 100644 --- a/pkg/storage/stores/shipper/bloomshipper/client.go +++ b/pkg/storage/stores/shipper/bloomshipper/client.go @@ -14,12 +14,12 @@ import ( "github.com/pkg/errors" "github.com/prometheus/common/model" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" - "github.com/grafana/loki/pkg/util/encoding" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" + "github.com/grafana/loki/v3/pkg/util/encoding" ) const ( @@ -241,6 +241,7 @@ type Client interface { MetaClient BlockClient IsObjectNotFoundErr(err error) bool + ObjectClient() client.ObjectClient Stop() } @@ -256,15 +257,24 @@ type BloomClient struct { } func NewBloomClient(cfg bloomStoreConfig, client client.ObjectClient, logger log.Logger) (*BloomClient, error) { + fsResolver, err := NewShardedPrefixedResolver(cfg.workingDirs, defaultKeyResolver{}) + if err != nil { + return nil, errors.Wrap(err, "creating fs resolver") + } + return &BloomClient{ KeyResolver: defaultKeyResolver{}, // TODO(owen-d): hook into schema, similar to `{,Parse}ExternalKey` - fsResolver: NewPrefixedResolver(cfg.workingDir, defaultKeyResolver{}), + fsResolver: fsResolver, concurrency: cfg.numWorkers, client: client, logger: logger, }, nil } +func (b *BloomClient) ObjectClient() client.ObjectClient { + return b.client +} + func (b *BloomClient) IsObjectNotFoundErr(err error) bool { return b.client.IsObjectNotFoundErr(err) } diff --git a/pkg/storage/stores/shipper/bloomshipper/client_test.go b/pkg/storage/stores/shipper/bloomshipper/client_test.go index e5bbe3b5b1bf5..9bfd3d1674f66 100644 --- a/pkg/storage/stores/shipper/bloomshipper/client_test.go +++ b/pkg/storage/stores/shipper/bloomshipper/client_test.go @@ -13,9 +13,9 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/chunk/client/testutils" - "github.com/grafana/loki/pkg/storage/config" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils" + "github.com/grafana/loki/v3/pkg/storage/config" ) func parseTime(s string) model.Time { @@ -41,8 +41,8 @@ func newMockBloomClient(t *testing.T) (*BloomClient, string) { dir := t.TempDir() logger := log.NewLogfmtLogger(os.Stderr) cfg := bloomStoreConfig{ - workingDir: dir, - numWorkers: 3, + workingDirs: []string{dir}, + numWorkers: 3, } client, err := NewBloomClient(cfg, oc, logger) require.NoError(t, err) diff --git a/pkg/storage/stores/shipper/bloomshipper/compress_utils.go b/pkg/storage/stores/shipper/bloomshipper/compress_utils.go index 57025113cea71..52de4a4da5820 100644 --- a/pkg/storage/stores/shipper/bloomshipper/compress_utils.go +++ b/pkg/storage/stores/shipper/bloomshipper/compress_utils.go @@ -6,7 +6,7 @@ import ( "github.com/go-kit/log" "github.com/go-kit/log/level" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" ) func CompressBloomBlock(ref BlockRef, archivePath, localDst string, logger log.Logger) (Block, error) { diff --git a/pkg/storage/stores/shipper/bloomshipper/compress_utils_test.go b/pkg/storage/stores/shipper/bloomshipper/compress_utils_test.go index 11a6afb21af48..f0b1598dadf9e 100644 --- a/pkg/storage/stores/shipper/bloomshipper/compress_utils_test.go +++ b/pkg/storage/stores/shipper/bloomshipper/compress_utils_test.go @@ -10,7 +10,7 @@ import ( "github.com/google/uuid" "github.com/stretchr/testify/require" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" ) func directoryDoesNotExist(path string) bool { diff --git a/pkg/storage/stores/shipper/bloomshipper/config/config.go b/pkg/storage/stores/shipper/bloomshipper/config/config.go index a37a3028e66e8..de1ad3a12034c 100644 --- a/pkg/storage/stores/shipper/bloomshipper/config/config.go +++ b/pkg/storage/stores/shipper/bloomshipper/config/config.go @@ -4,41 +4,34 @@ package config import ( "errors" "flag" - "strings" "time" "github.com/grafana/dskit/flagext" - "github.com/grafana/loki/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" ) type Config struct { - WorkingDirectory string `yaml:"working_directory"` - BlocksDownloadingQueue DownloadingQueueConfig `yaml:"blocks_downloading_queue"` - BlocksCache BlocksCacheConfig `yaml:"blocks_cache"` - MetasCache cache.Config `yaml:"metas_cache"` -} - -type DownloadingQueueConfig struct { - WorkersCount int `yaml:"workers_count"` - MaxTasksEnqueuedPerTenant int `yaml:"max_tasks_enqueued_per_tenant"` -} - -func (cfg *DownloadingQueueConfig) RegisterFlagsWithPrefix(prefix string, f *flag.FlagSet) { - f.IntVar(&cfg.WorkersCount, prefix+"workers-count", 100, "The count of parallel workers that download Bloom Blocks.") - f.IntVar(&cfg.MaxTasksEnqueuedPerTenant, prefix+"max_tasks_enqueued_per_tenant", 10_000, "Maximum number of task in queue per tenant per bloom-gateway. Enqueuing the tasks above this limit will fail an error.") + WorkingDirectory flagext.StringSliceCSV `yaml:"working_directory"` + MaxQueryPageSize flagext.Bytes `yaml:"max_query_page_size"` + DownloadParallelism int `yaml:"download_parallelism"` + BlocksCache BlocksCacheConfig `yaml:"blocks_cache"` + MetasCache cache.Config `yaml:"metas_cache"` } func (c *Config) RegisterFlagsWithPrefix(prefix string, f *flag.FlagSet) { - f.StringVar(&c.WorkingDirectory, prefix+"shipper.working-directory", "bloom-shipper", "Working directory to store downloaded Bloom Blocks.") - c.BlocksDownloadingQueue.RegisterFlagsWithPrefix(prefix+"shipper.blocks-downloading-queue.", f) + c.WorkingDirectory = []string{"/data/blooms"} + f.Var(&c.WorkingDirectory, prefix+"shipper.working-directory", "Working directory to store downloaded bloom blocks. Supports multiple directories, separated by comma.") + _ = c.MaxQueryPageSize.Set("64MiB") // default should match the one set in pkg/storage/bloom/v1/bloom.go + f.Var(&c.MaxQueryPageSize, prefix+"max-query-page-size", "Maximum size of bloom pages that should be queried. Larger pages than this limit are skipped when querying blooms to limit memory usage.") + f.IntVar(&c.DownloadParallelism, prefix+"download-parallelism", 16, "The amount of maximum concurrent bloom blocks downloads.") c.BlocksCache.RegisterFlagsWithPrefixAndDefaults(prefix+"blocks-cache.", "Cache for bloom blocks. ", f, 24*time.Hour) c.MetasCache.RegisterFlagsWithPrefix(prefix+"metas-cache.", "Cache for bloom metas. ", f) } func (c *Config) Validate() error { - if strings.TrimSpace(c.WorkingDirectory) == "" { - return errors.New("working directory must be specified") + if len(c.WorkingDirectory) == 0 { + return errors.New("at least one working directory must be specified") } return nil } diff --git a/pkg/storage/stores/shipper/bloomshipper/fetcher.go b/pkg/storage/stores/shipper/bloomshipper/fetcher.go index e5779a7294acf..936e120af8501 100644 --- a/pkg/storage/stores/shipper/bloomshipper/fetcher.go +++ b/pkg/storage/stores/shipper/bloomshipper/fetcher.go @@ -16,9 +16,9 @@ import ( "github.com/prometheus/client_golang/prometheus" "k8s.io/utils/keymutex" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/util/constants" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/util/constants" ) var downloadQueueCapacity = 10000 @@ -26,6 +26,10 @@ var downloadQueueCapacity = 10000 type options struct { ignoreNotFound bool // ignore 404s from object storage; default=true fetchAsync bool // dispatch downloading of block and return immediately; default=false + // return bloom blocks to pool after iteration; default=false + // NB(owen-d): this can only be safely used when blooms are not captured outside + // of iteration or it can introduce use-after-free bugs + usePool bool } func (o *options) apply(opts ...FetchOption) { @@ -48,6 +52,12 @@ func WithFetchAsync(v bool) FetchOption { } } +func WithPool(v bool) FetchOption { + return func(opts *options) { + opts.usePool = v + } +} + type fetcher interface { FetchMetas(ctx context.Context, refs []MetaRef) ([]Meta, error) FetchBlocks(ctx context.Context, refs []BlockRef, opts ...FetchOption) ([]*CloseableBlockQuerier, error) @@ -66,19 +76,33 @@ type Fetcher struct { q *downloadQueue[BlockRef, BlockDirectory] - cfg bloomStoreConfig - metrics *fetcherMetrics - logger log.Logger + cfg bloomStoreConfig + metrics *fetcherMetrics + bloomMetrics *v1.Metrics + logger log.Logger } -func NewFetcher(cfg bloomStoreConfig, client Client, metasCache cache.Cache, blocksCache Cache, reg prometheus.Registerer, logger log.Logger) (*Fetcher, error) { +func NewFetcher( + cfg bloomStoreConfig, + client Client, + metasCache cache.Cache, + blocksCache Cache, + reg prometheus.Registerer, + logger log.Logger, + bloomMetrics *v1.Metrics, +) (*Fetcher, error) { + localFSResolver, err := NewShardedPrefixedResolver(cfg.workingDirs, defaultKeyResolver{}) + if err != nil { + return nil, errors.Wrap(err, "creating fs resolver") + } fetcher := &Fetcher{ cfg: cfg, client: client, metasCache: metasCache, blocksCache: blocksCache, - localFSResolver: NewPrefixedResolver(cfg.workingDir, defaultKeyResolver{}), + localFSResolver: localFSResolver, metrics: newFetcherMetrics(reg, constants.Loki, "bloom_store"), + bloomMetrics: bloomMetrics, logger: logger, } q, err := newDownloadQueue[BlockRef, BlockDirectory](downloadQueueCapacity, cfg.numWorkers, fetcher.processTask, logger) @@ -176,7 +200,7 @@ func (f *Fetcher) writeBackMetas(ctx context.Context, metas []Meta) error { // FetchBlocks implements fetcher func (f *Fetcher) FetchBlocks(ctx context.Context, refs []BlockRef, opts ...FetchOption) ([]*CloseableBlockQuerier, error) { // apply fetch options - cfg := &options{ignoreNotFound: true, fetchAsync: false} + cfg := &options{ignoreNotFound: true, fetchAsync: false, usePool: false} cfg.apply(opts...) // first, resolve blocks from cache and enqueue missing blocks to download queue @@ -218,9 +242,14 @@ func (f *Fetcher) FetchBlocks(ctx context.Context, refs []BlockRef, opts ...Fetc } found++ f.metrics.blocksFound.Inc() - results[i] = dir.BlockQuerier(func() error { - return f.blocksCache.Release(ctx, key) - }) + results[i] = dir.BlockQuerier( + cfg.usePool, + func() error { + return f.blocksCache.Release(ctx, key) + }, + f.cfg.maxBloomPageSize, + f.bloomMetrics, + ) } // fetchAsync defines whether the function may return early or whether it @@ -248,9 +277,14 @@ func (f *Fetcher) FetchBlocks(ctx context.Context, refs []BlockRef, opts ...Fetc case res := <-responses: found++ key := f.client.Block(refs[res.idx]).Addr() - results[res.idx] = res.item.BlockQuerier(func() error { - return f.blocksCache.Release(ctx, key) - }) + results[res.idx] = res.item.BlockQuerier( + cfg.usePool, + func() error { + return f.blocksCache.Release(ctx, key) + }, + f.cfg.maxBloomPageSize, + f.bloomMetrics, + ) } } diff --git a/pkg/storage/stores/shipper/bloomshipper/fetcher_test.go b/pkg/storage/stores/shipper/bloomshipper/fetcher_test.go index 78a681dac5014..43658f9ed2137 100644 --- a/pkg/storage/stores/shipper/bloomshipper/fetcher_test.go +++ b/pkg/storage/stores/shipper/bloomshipper/fetcher_test.go @@ -14,10 +14,10 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" ) func makeMetas(t *testing.T, schemaCfg config.SchemaConfig, ts model.Time, keyspaces []v1.FingerprintBounds) []Meta { @@ -100,7 +100,7 @@ func TestMetasFetcher(t *testing.T) { t.Run(test.name, func(t *testing.T) { ctx := context.Background() metasCache := cache.NewMockCache() - cfg := bloomStoreConfig{workingDir: t.TempDir(), numWorkers: 1} + cfg := bloomStoreConfig{workingDirs: []string{t.TempDir()}, numWorkers: 1} oc, err := local.NewFSObjectClient(local.FSConfig{Directory: dir}) require.NoError(t, err) @@ -108,7 +108,7 @@ func TestMetasFetcher(t *testing.T) { c, err := NewBloomClient(cfg, oc, logger) require.NoError(t, err) - fetcher, err := NewFetcher(cfg, c, metasCache, nil, nil, logger) + fetcher, err := NewFetcher(cfg, c, metasCache, nil, nil, logger, v1.NewMetrics(nil)) require.NoError(t, err) // prepare metas cache @@ -259,7 +259,7 @@ func TestFetcher_DownloadQueue(t *testing.T) { func TestFetcher_LoadBlocksFromFS(t *testing.T) { base := t.TempDir() - cfg := bloomStoreConfig{workingDir: base, numWorkers: 1} + cfg := bloomStoreConfig{workingDirs: []string{base}, numWorkers: 1} resolver := NewPrefixedResolver(base, defaultKeyResolver{}) refs := []BlockRef{ @@ -286,7 +286,7 @@ func TestFetcher_LoadBlocksFromFS(t *testing.T) { c, err := NewBloomClient(cfg, oc, log.NewNopLogger()) require.NoError(t, err) - fetcher, err := NewFetcher(cfg, c, nil, nil, nil, log.NewNopLogger()) + fetcher, err := NewFetcher(cfg, c, nil, nil, nil, log.NewNopLogger(), v1.NewMetrics(nil)) require.NoError(t, err) found, missing, err := fetcher.loadBlocksFromFS(context.Background(), refs) @@ -312,9 +312,13 @@ func createBlockDir(t *testing.T, path string) { } func TestFetcher_IsBlockDir(t *testing.T) { - cfg := bloomStoreConfig{numWorkers: 1} + cfg := bloomStoreConfig{ + numWorkers: 1, + workingDirs: []string{t.TempDir()}, + } - fetcher, _ := NewFetcher(cfg, nil, nil, nil, nil, log.NewNopLogger()) + fetcher, err := NewFetcher(cfg, nil, nil, nil, nil, log.NewNopLogger(), v1.NewMetrics(nil)) + require.NoError(t, err) t.Run("path does not exist", func(t *testing.T) { base := t.TempDir() diff --git a/pkg/storage/stores/shipper/bloomshipper/interval.go b/pkg/storage/stores/shipper/bloomshipper/interval.go index 430bde1a76809..86e0aff919d4b 100644 --- a/pkg/storage/stores/shipper/bloomshipper/interval.go +++ b/pkg/storage/stores/shipper/bloomshipper/interval.go @@ -9,8 +9,8 @@ import ( "github.com/pkg/errors" "github.com/prometheus/common/model" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/util/encoding" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/util/encoding" ) // Interval defines a time range with start end end time diff --git a/pkg/storage/stores/shipper/bloomshipper/interval_test.go b/pkg/storage/stores/shipper/bloomshipper/interval_test.go index 2914e18030578..044bffb0b5813 100644 --- a/pkg/storage/stores/shipper/bloomshipper/interval_test.go +++ b/pkg/storage/stores/shipper/bloomshipper/interval_test.go @@ -7,7 +7,7 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/assert" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" ) func Test_Interval_String(t *testing.T) { diff --git a/pkg/storage/stores/shipper/bloomshipper/resolver.go b/pkg/storage/stores/shipper/bloomshipper/resolver.go index b88a48758d63d..8f86ce7cb09ee 100644 --- a/pkg/storage/stores/shipper/bloomshipper/resolver.go +++ b/pkg/storage/stores/shipper/bloomshipper/resolver.go @@ -2,12 +2,14 @@ package bloomshipper import ( "fmt" + "hash" + "hash/fnv" "path" "path/filepath" "strconv" "strings" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" ) const ( @@ -30,6 +32,8 @@ type KeyResolver interface { ParseMetaKey(Location) (MetaRef, error) Block(BlockRef) Location ParseBlockKey(Location) (BlockRef, error) + Tenant(tenant, table string) Location + TenantPrefix(loc Location) (string, error) } type defaultKeyResolver struct{} @@ -124,6 +128,27 @@ func (defaultKeyResolver) ParseBlockKey(loc Location) (BlockRef, error) { }, nil } +func (defaultKeyResolver) Tenant(tenant, table string) Location { + return simpleLocation{ + BloomPrefix, + table, + tenant, + } +} + +func (defaultKeyResolver) TenantPrefix(loc Location) (string, error) { + dir, fn := path.Split(loc.Addr()) + + dirParts := strings.Split(path.Clean(dir), "/") + dirParts = append(dirParts, path.Clean(fn)) + if len(dirParts) < 3 { + return "", fmt.Errorf("directory parts count must be 3 or greater, but was %d : [%s]", len(dirParts), loc) + } + + // The tenant is the third part of the directory. E.g. bloom/schema_b_table_20088/1/metas where 1 is the tenant + return dirParts[2], nil +} + type PrefixedResolver struct { prefix string KeyResolver @@ -150,6 +175,50 @@ func (p PrefixedResolver) Block(ref BlockRef) Location { } } +type hashable interface { + Hash(hash.Hash32) error +} + +type ShardedPrefixedResolver struct { + prefixes []string + KeyResolver +} + +func NewShardedPrefixedResolver(prefixes []string, resolver KeyResolver) (KeyResolver, error) { + n := len(prefixes) + switch n { + case 0: + return nil, fmt.Errorf("requires at least 1 prefix") + case 1: + return NewPrefixedResolver(prefixes[0], resolver), nil + default: + return ShardedPrefixedResolver{ + prefixes: prefixes, + KeyResolver: resolver, + }, nil + } +} + +func (r ShardedPrefixedResolver) prefix(ref hashable) key { + h := fnv.New32() + _ = ref.Hash(h) + return key(r.prefixes[h.Sum32()%uint32(len(r.prefixes))]) +} + +func (r ShardedPrefixedResolver) Meta(ref MetaRef) Location { + return locations{ + r.prefix(ref), + r.KeyResolver.Meta(ref), + } +} + +func (r ShardedPrefixedResolver) Block(ref BlockRef) Location { + return locations{ + r.prefix(ref), + r.KeyResolver.Block(ref), + } +} + type Location interface { Addr() string // object storage location LocalPath() string // local path version diff --git a/pkg/storage/stores/shipper/bloomshipper/resolver_test.go b/pkg/storage/stores/shipper/bloomshipper/resolver_test.go index b2aa7e60a4b53..ba45845ea9ba5 100644 --- a/pkg/storage/stores/shipper/bloomshipper/resolver_test.go +++ b/pkg/storage/stores/shipper/bloomshipper/resolver_test.go @@ -5,7 +5,7 @@ import ( "github.com/stretchr/testify/require" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" ) func TestResolver_ParseMetaKey(t *testing.T) { @@ -53,3 +53,49 @@ func TestResolver_ParseBlockKey(t *testing.T) { require.NoError(t, err) require.Equal(t, ref, parsed) } + +func TestResolver_ShardedPrefixedResolver(t *testing.T) { + + blockRef := BlockRef{ + Ref: Ref{ + TenantID: "tenant", + TableName: "table_1", + Bounds: v1.NewBounds(0x0000, 0xffff), + StartTimestamp: 0, + EndTimestamp: 3600000, + Checksum: 48350, + }, + } + + metaRef := MetaRef{ + Ref: Ref{ + TenantID: "tenant", + TableName: "table_1", + Bounds: v1.NewBounds(0x0000, 0xffff), + Checksum: 43981, + }, + } + + t.Run("empty prefixes cause error", func(t *testing.T) { + _, err := NewShardedPrefixedResolver([]string{}, defaultKeyResolver{}) + require.ErrorContains(t, err, "requires at least 1 prefix") + }) + + t.Run("single prefix", func(t *testing.T) { + r, err := NewShardedPrefixedResolver([]string{"prefix"}, defaultKeyResolver{}) + require.NoError(t, err) + loc := r.Meta(metaRef) + require.Equal(t, "prefix/bloom/table_1/tenant/metas/0000000000000000-000000000000ffff-abcd.json", loc.LocalPath()) + loc = r.Block(blockRef) + require.Equal(t, "prefix/bloom/table_1/tenant/blocks/0000000000000000-000000000000ffff/0-3600000-bcde.tar.gz", loc.LocalPath()) + }) + + t.Run("multiple prefixes", func(t *testing.T) { + r, err := NewShardedPrefixedResolver([]string{"a", "b", "c", "d"}, defaultKeyResolver{}) + require.NoError(t, err) + loc := r.Meta(metaRef) + require.Equal(t, "b/bloom/table_1/tenant/metas/0000000000000000-000000000000ffff-abcd.json", loc.LocalPath()) + loc = r.Block(blockRef) + require.Equal(t, "d/bloom/table_1/tenant/blocks/0000000000000000-000000000000ffff/0-3600000-bcde.tar.gz", loc.LocalPath()) + }) +} diff --git a/pkg/storage/stores/shipper/bloomshipper/shipper.go b/pkg/storage/stores/shipper/bloomshipper/shipper.go index 66982bc065f87..09d4652fb9f61 100644 --- a/pkg/storage/stores/shipper/bloomshipper/shipper.go +++ b/pkg/storage/stores/shipper/bloomshipper/shipper.go @@ -5,7 +5,7 @@ import ( "fmt" "sort" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" ) type ForEachBlockCallback func(bq *v1.BlockQuerier, bounds v1.FingerprintBounds) error diff --git a/pkg/storage/stores/shipper/bloomshipper/shipper_test.go b/pkg/storage/stores/shipper/bloomshipper/shipper_test.go index a85132d379bb6..81e17a84b5279 100644 --- a/pkg/storage/stores/shipper/bloomshipper/shipper_test.go +++ b/pkg/storage/stores/shipper/bloomshipper/shipper_test.go @@ -10,7 +10,7 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" ) func TestBloomShipper_findBlocks(t *testing.T) { diff --git a/pkg/storage/stores/shipper/bloomshipper/store.go b/pkg/storage/stores/shipper/bloomshipper/store.go index 9f099d683cf4e..42e9b66eae6ad 100644 --- a/pkg/storage/stores/shipper/bloomshipper/store.go +++ b/pkg/storage/stores/shipper/bloomshipper/store.go @@ -13,13 +13,13 @@ import ( "github.com/prometheus/common/model" "golang.org/x/exp/slices" - "github.com/grafana/loki/pkg/storage" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/storage" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util/constants" ) var ( @@ -30,14 +30,24 @@ type Store interface { ResolveMetas(ctx context.Context, params MetaSearchParams) ([][]MetaRef, []*Fetcher, error) FetchMetas(ctx context.Context, params MetaSearchParams) ([]Meta, error) FetchBlocks(ctx context.Context, refs []BlockRef, opts ...FetchOption) ([]*CloseableBlockQuerier, error) + TenantFilesForInterval( + ctx context.Context, interval Interval, + filter func(tenant string, object client.StorageObject) bool, + ) (map[string][]client.StorageObject, error) Fetcher(ts model.Time) (*Fetcher, error) Client(ts model.Time) (Client, error) Stop() } +type StoreWithMetrics interface { + Store + BloomMetrics() *v1.Metrics +} + type bloomStoreConfig struct { - workingDir string - numWorkers int + workingDirs []string + numWorkers int + maxBloomPageSize int } // Compiler check to ensure bloomStoreEntry implements the Store interface @@ -123,8 +133,88 @@ func (b *bloomStoreEntry) FetchMetas(ctx context.Context, params MetaSearchParam } // FetchBlocks implements Store. -func (b *bloomStoreEntry) FetchBlocks(ctx context.Context, refs []BlockRef, _ ...FetchOption) ([]*CloseableBlockQuerier, error) { - return b.fetcher.FetchBlocks(ctx, refs) +func (b *bloomStoreEntry) FetchBlocks(ctx context.Context, refs []BlockRef, opts ...FetchOption) ([]*CloseableBlockQuerier, error) { + return b.fetcher.FetchBlocks(ctx, refs, opts...) +} + +func (b *bloomStoreEntry) TenantFilesForInterval( + ctx context.Context, + interval Interval, + filter func(tenant string, object client.StorageObject) bool, +) (map[string][]client.StorageObject, error) { + tables := tablesForRange(b.cfg, interval) + if len(tables) == 0 { + return nil, nil + } + + tenants := make(map[string][]client.StorageObject, 100) + for _, table := range tables { + prefix := path.Join(rootFolder, table) + level.Debug(b.fetcher.logger).Log( + "msg", "listing tenants", + "store", b.cfg.From, + "table", table, + "prefix", prefix, + ) + objects, _, err := b.objectClient.List(ctx, prefix, "") + if err != nil { + if b.objectClient.IsObjectNotFoundErr(err) { + continue + } + + return nil, fmt.Errorf("error listing tenants under prefix [%s]: %w", prefix, err) + } + if len(objects) == 0 { + continue + } + + // Sort objects by the key to ensure keys are sorted by tenant. + cmpObj := func(a, b client.StorageObject) int { + if a.Key < b.Key { + return -1 + } + if a.Key > b.Key { + return 1 + } + return 0 + } + if !slices.IsSortedFunc(objects, cmpObj) { + slices.SortFunc(objects, cmpObj) + } + + for i := 0; i < len(objects); i++ { + tenant, err := b.TenantPrefix(key(objects[i].Key)) + if err != nil { + return nil, fmt.Errorf("error parsing tenant key [%s]: %w", objects[i].Key, err) + } + + // Search next object with different tenant + var j int + for j = i + 1; j < len(objects); j++ { + nextTenant, err := b.TenantPrefix(key(objects[j].Key)) + if err != nil { + return nil, fmt.Errorf("error parsing tenant key [%s]: %w", objects[i].Key, err) + } + if nextTenant != tenant { + break + } + } + + if _, ok := tenants[tenant]; !ok { + tenants[tenant] = nil // Initialize tenant with empty slice + } + + if filter != nil && !filter(tenant, objects[i]) { + continue + } + + // Add all objects for this tenant + tenants[tenant] = append(tenants[tenant], objects[i:j]...) + i = j - 1 // -1 because the loop will increment i by 1 + } + } + + return tenants, nil } // Fetcher implements Store. @@ -144,12 +234,14 @@ func (b bloomStoreEntry) Stop() { } // Compiler check to ensure BloomStore implements the Store interface -var _ Store = &BloomStore{} +var _ StoreWithMetrics = &BloomStore{} type BloomStore struct { - stores []*bloomStoreEntry - storageConfig storage.Config - metrics *storeMetrics + stores []*bloomStoreEntry + storageConfig storage.Config + metrics *storeMetrics + bloomMetrics *v1.Metrics + logger log.Logger defaultKeyResolver // TODO(owen-d): impl schema aware resolvers } @@ -166,6 +258,7 @@ func NewBloomStore( store := &BloomStore{ storageConfig: storageConfig, metrics: newStoreMetrics(reg, constants.Loki, "bloom_store"), + bloomMetrics: v1.NewMetrics(reg), logger: logger, } @@ -184,12 +277,15 @@ func NewBloomStore( // TODO(chaudum): Remove wrapper cfg := bloomStoreConfig{ - workingDir: storageConfig.BloomShipperConfig.WorkingDirectory, - numWorkers: storageConfig.BloomShipperConfig.BlocksDownloadingQueue.WorkersCount, + workingDirs: storageConfig.BloomShipperConfig.WorkingDirectory, + numWorkers: storageConfig.BloomShipperConfig.DownloadParallelism, + maxBloomPageSize: int(storageConfig.BloomShipperConfig.MaxQueryPageSize), } - if err := util.EnsureDirectory(cfg.workingDir); err != nil { - return nil, errors.Wrapf(err, "failed to create working directory for bloom store: '%s'", cfg.workingDir) + for _, wd := range cfg.workingDirs { + if err := util.EnsureDirectory(wd); err != nil { + return nil, errors.Wrapf(err, "failed to create working directory for bloom store: '%s'", wd) + } } for _, periodicConfig := range periodicConfigs { @@ -204,7 +300,7 @@ func NewBloomStore( } regWithLabels := prometheus.WrapRegistererWith(prometheus.Labels{"store": periodicConfig.From.String()}, reg) - fetcher, err := NewFetcher(cfg, bloomClient, metasCache, blocksCache, regWithLabels, logger) + fetcher, err := NewFetcher(cfg, bloomClient, metasCache, blocksCache, regWithLabels, logger, store.bloomMetrics) if err != nil { return nil, errors.Wrapf(err, "creating fetcher for period %s", periodicConfig.From) } @@ -221,6 +317,10 @@ func NewBloomStore( return store, nil } +func (b *BloomStore) BloomMetrics() *v1.Metrics { + return b.bloomMetrics +} + // Impements KeyResolver func (b *BloomStore) Meta(ref MetaRef) (loc Location) { _ = b.storeDo(ref.StartTimestamp, func(s *bloomStoreEntry) error { @@ -253,6 +353,34 @@ func (b *BloomStore) Block(ref BlockRef) (loc Location) { return } +func (b *BloomStore) TenantFilesForInterval( + ctx context.Context, + interval Interval, + filter func(tenant string, object client.StorageObject) bool, +) (map[string][]client.StorageObject, error) { + var allTenants map[string][]client.StorageObject + + err := b.forStores(ctx, interval, func(innerCtx context.Context, interval Interval, store Store) error { + tenants, err := store.TenantFilesForInterval(innerCtx, interval, filter) + if err != nil { + return err + } + + if allTenants == nil { + allTenants = tenants + return nil + } + + for tenant, files := range tenants { + allTenants[tenant] = append(allTenants[tenant], files...) + } + + return nil + }) + + return allTenants, err +} + // Fetcher implements Store. func (b *BloomStore) Fetcher(ts model.Time) (*Fetcher, error) { if store := b.getStore(ts); store != nil { diff --git a/pkg/storage/stores/shipper/bloomshipper/store_test.go b/pkg/storage/stores/shipper/bloomshipper/store_test.go index 9274bfc620b6e..077a871e71fbb 100644 --- a/pkg/storage/stores/shipper/bloomshipper/store_test.go +++ b/pkg/storage/stores/shipper/bloomshipper/store_test.go @@ -15,22 +15,26 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/chunk/cache" - storageconfig "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper/config" + "github.com/grafana/loki/v3/pkg/storage" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + storageconfig "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper/config" ) func newMockBloomStore(t *testing.T) (*BloomStore, string, error) { - workDir := t.TempDir() - return newMockBloomStoreWithWorkDir(t, workDir) + dir := t.TempDir() + workDir := filepath.Join(dir, "bloomshipper") + storeDir := filepath.Join(dir, "fs-storage") + return newMockBloomStoreWithWorkDir(t, workDir, storeDir) } -func newMockBloomStoreWithWorkDir(t *testing.T, workDir string) (*BloomStore, string, error) { +func newMockBloomStoreWithWorkDir(t *testing.T, workDir, storeDir string) (*BloomStore, string, error) { periodicConfigs := []storageconfig.PeriodConfig{ { - ObjectType: storageconfig.StorageTypeInMemory, + ObjectType: storageconfig.StorageTypeFileSystem, From: parseDayTime("2024-01-01"), IndexTables: storageconfig.IndexPeriodicTableConfig{ PeriodicTableConfig: storageconfig.PeriodicTableConfig{ @@ -39,7 +43,7 @@ func newMockBloomStoreWithWorkDir(t *testing.T, workDir string) (*BloomStore, st }}, }, { - ObjectType: storageconfig.StorageTypeInMemory, + ObjectType: storageconfig.StorageTypeFileSystem, From: parseDayTime("2024-02-01"), IndexTables: storageconfig.IndexPeriodicTableConfig{ PeriodicTableConfig: storageconfig.PeriodicTableConfig{ @@ -50,11 +54,12 @@ func newMockBloomStoreWithWorkDir(t *testing.T, workDir string) (*BloomStore, st } storageConfig := storage.Config{ + FSConfig: local.FSConfig{ + Directory: storeDir, + }, BloomShipperConfig: config.Config{ - WorkingDirectory: workDir, - BlocksDownloadingQueue: config.DownloadingQueueConfig{ - WorkersCount: 1, - }, + WorkingDirectory: []string{workDir}, + DownloadParallelism: 1, BlocksCache: config.BlocksCacheConfig{ SoftLimit: 1 << 20, HardLimit: 2 << 20, @@ -272,6 +277,118 @@ func TestBloomStore_FetchBlocks(t *testing.T) { ) } +func TestBloomStore_TenantFilesForInterval(t *testing.T) { + ctx := context.Background() + var keyResolver defaultKeyResolver + + store, _, err := newMockBloomStore(t) + require.NoError(t, err) + + // schema 1 + // day 1 - 1 tenant + s1d1t1m1, _ := createMetaInStorage(store, "1", parseTime("2024-01-19 00:00"), 0x00010000, 0x0001ffff) + s1d1t1m2, _ := createMetaInStorage(store, "1", parseTime("2024-01-19 00:00"), 0x00000000, 0x0000ffff) + // day 2 - 2 tenants + s1d2t1m1, _ := createMetaInStorage(store, "1", parseTime("2024-01-20 00:00"), 0x00010000, 0x0001ffff) + s1d2t1m2, _ := createMetaInStorage(store, "1", parseTime("2024-01-20 00:00"), 0x00000000, 0x0000ffff) + s1d2t2m1, _ := createMetaInStorage(store, "2", parseTime("2024-01-20 00:00"), 0x00010000, 0x0001ffff) + s1d2t2m2, _ := createMetaInStorage(store, "2", parseTime("2024-01-20 00:00"), 0x00000000, 0x0000ffff) + + // schema 2 + // day 1 - 2 tenants + s2d1t1m1, _ := createMetaInStorage(store, "1", parseTime("2024-02-07 00:00"), 0x00010000, 0x0001ffff) + s2d1t1m2, _ := createMetaInStorage(store, "1", parseTime("2024-02-07 00:00"), 0x00000000, 0x0000ffff) + s2d1t2m1, _ := createMetaInStorage(store, "2", parseTime("2024-02-07 00:00"), 0x00010000, 0x0001ffff) + s2d1t2m2, _ := createMetaInStorage(store, "2", parseTime("2024-02-07 00:00"), 0x00000000, 0x0000ffff) + // day 2 - 1 tenant + s2d2t2m1, _ := createMetaInStorage(store, "2", parseTime("2024-02-10 00:00"), 0x00010000, 0x0001ffff) + s2d2t2m2, _ := createMetaInStorage(store, "2", parseTime("2024-02-10 00:00"), 0x00000000, 0x0000ffff) + + t.Run("no filter", func(t *testing.T) { + tenantFiles, err := store.TenantFilesForInterval( + ctx, + NewInterval(parseTime("2024-01-18 00:00"), parseTime("2024-02-12 00:00")), + nil, + ) + require.NoError(t, err) + + var tenants []string + for tenant := range tenantFiles { + tenants = append(tenants, tenant) + } + require.ElementsMatch(t, []string{"1", "2"}, tenants) + + tenant1Keys := keysFromStorageObjects(tenantFiles["1"]) + expectedTenant1Keys := []string{ + // schema 1 - day 1 + keyResolver.Meta(s1d1t1m1.MetaRef).Addr(), + keyResolver.Meta(s1d1t1m2.MetaRef).Addr(), + // schema 1 - day 2 + keyResolver.Meta(s1d2t1m1.MetaRef).Addr(), + keyResolver.Meta(s1d2t1m2.MetaRef).Addr(), + // schema 2 - day 1 + keyResolver.Meta(s2d1t1m1.MetaRef).Addr(), + keyResolver.Meta(s2d1t1m2.MetaRef).Addr(), + } + require.ElementsMatch(t, expectedTenant1Keys, tenant1Keys) + + tenant2Keys := keysFromStorageObjects(tenantFiles["2"]) + expectedTenant2Keys := []string{ + // schema 1 - day 2 + keyResolver.Meta(s1d2t2m1.MetaRef).Addr(), + keyResolver.Meta(s1d2t2m2.MetaRef).Addr(), + // schema 2 - day 1 + keyResolver.Meta(s2d1t2m1.MetaRef).Addr(), + keyResolver.Meta(s2d1t2m2.MetaRef).Addr(), + // schema 2 - day 2 + keyResolver.Meta(s2d2t2m1.MetaRef).Addr(), + keyResolver.Meta(s2d2t2m2.MetaRef).Addr(), + } + require.ElementsMatch(t, expectedTenant2Keys, tenant2Keys) + }) + + t.Run("filter tenant 1", func(t *testing.T) { + tenantFiles, err := store.TenantFilesForInterval( + ctx, + NewInterval(parseTime("2024-01-18 00:00"), parseTime("2024-02-12 00:00")), + func(tenant string, object client.StorageObject) bool { + return tenant == "1" + }, + ) + require.NoError(t, err) + + var tenants []string + for tenant := range tenantFiles { + tenants = append(tenants, tenant) + } + require.ElementsMatch(t, []string{"1", "2"}, tenants) + + tenant1Keys := keysFromStorageObjects(tenantFiles["1"]) + expectedTenant1Keys := []string{ + // schema 1 - day 1 + keyResolver.Meta(s1d1t1m1.MetaRef).Addr(), + keyResolver.Meta(s1d1t1m2.MetaRef).Addr(), + // schema 1 - day 2 + keyResolver.Meta(s1d2t1m1.MetaRef).Addr(), + keyResolver.Meta(s1d2t1m2.MetaRef).Addr(), + // schema 2 - day 1 + keyResolver.Meta(s2d1t1m1.MetaRef).Addr(), + keyResolver.Meta(s2d1t1m2.MetaRef).Addr(), + } + require.ElementsMatch(t, expectedTenant1Keys, tenant1Keys) + + tenant2Keys := keysFromStorageObjects(tenantFiles["2"]) + require.Empty(t, tenant2Keys) + }) +} + +func keysFromStorageObjects(objects []client.StorageObject) (keys []string) { + for _, object := range objects { + keys = append(keys, object.Key) + } + return keys +} + func TestBloomShipper_WorkingDir(t *testing.T) { t.Run("insufficient permissions on directory yields error", func(t *testing.T) { base := t.TempDir() @@ -281,7 +398,7 @@ func TestBloomShipper_WorkingDir(t *testing.T) { fi, _ := os.Stat(wd) t.Log("working directory", wd, fi.Mode()) - _, _, err = newMockBloomStoreWithWorkDir(t, wd) + _, _, err = newMockBloomStoreWithWorkDir(t, wd, base) require.ErrorContains(t, err, "insufficient permissions") }) @@ -291,7 +408,7 @@ func TestBloomShipper_WorkingDir(t *testing.T) { wd := filepath.Join(base, "doesnotexist") t.Log("working directory", wd) - store, _, err := newMockBloomStoreWithWorkDir(t, wd) + store, _, err := newMockBloomStoreWithWorkDir(t, wd, base) require.NoError(t, err) b, err := createBlockInStorage(t, store, "tenant", parseTime("2024-01-20 00:00"), 0x00000000, 0x0000ffff) require.NoError(t, err) diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/compacted_index.go b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/compacted_index.go index 584116b240417..cb73f9aa95bf0 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/compacted_index.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/compacted_index.go @@ -12,14 +12,14 @@ import ( "github.com/prometheus/prometheus/model/labels" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" - seriesindex "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/boltdb" - shipperindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - shipperutil "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/util" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" + seriesindex "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/boltdb" + shipperindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + shipperutil "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/util" ) type CompactedIndex struct { diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/compacted_index_test.go b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/compacted_index_test.go index ace66e0f06749..043d36d00401e 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/compacted_index_test.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/compacted_index_test.go @@ -12,13 +12,13 @@ import ( "github.com/stretchr/testify/require" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/boltdb" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/boltdb" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func TestCompactedIndex_IndexProcessor(t *testing.T) { diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/index.go b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/index.go index 402fb8471cc84..73e87e06e1e76 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/index.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/index.go @@ -8,7 +8,7 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/compactor/retention" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/index_compactor.go b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/index_compactor.go index b017cb82f7fd4..3a5c4a96f3e5a 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/index_compactor.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/index_compactor.go @@ -5,8 +5,8 @@ import ( "github.com/go-kit/log" - "github.com/grafana/loki/pkg/compactor" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/compactor" + "github.com/grafana/loki/v3/pkg/storage/config" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/iterator.go b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/iterator.go index 450ccfabfc1c6..7b2422fdc1149 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/iterator.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/iterator.go @@ -8,9 +8,9 @@ import ( "github.com/prometheus/prometheus/model/labels" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/storage/config" - series_index "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/storage/config" + series_index "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/iterator_test.go b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/iterator_test.go index 509e5661a4d62..26e9aef596bf4 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/iterator_test.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/iterator_test.go @@ -15,11 +15,11 @@ import ( "github.com/stretchr/testify/require" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" ) func Test_ChunkIterator(t *testing.T) { diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/series.go b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/series.go index 497153625e214..2e53a37b44984 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/series.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/series.go @@ -4,7 +4,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/config" ) type userSeries struct { diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/table_compactor.go b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/table_compactor.go index d864d306a2ba7..bdd42afc935d6 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/table_compactor.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/table_compactor.go @@ -15,11 +15,11 @@ import ( "github.com/pkg/errors" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/compactor" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - shipper_util "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/util" + "github.com/grafana/loki/v3/pkg/compactor" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + shipper_util "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/util" ) //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/table_compactor_test.go b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/table_compactor_test.go index 4a2b6d2371e7d..4fa6d598c8e36 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/table_compactor_test.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/table_compactor_test.go @@ -16,14 +16,14 @@ import ( "github.com/stretchr/testify/require" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/compactor" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/testutil" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/compactor" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/testutil" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/util.go b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/util.go index 27897df0dd02e..25ccb52e9b18e 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/util.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/util.go @@ -10,10 +10,10 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - ingesterclient "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/chunkenc" + ingesterclient "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk" ) // unsafeGetString is like yolostring but with a meaningful name diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/util_test.go b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/util_test.go index 7d015316017a6..7e6be7bb1414e 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/util_test.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/util_test.go @@ -15,18 +15,18 @@ import ( "github.com/stretchr/testify/require" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - chunk_util "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/boltdb" - shipper_util "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/util" - "github.com/grafana/loki/pkg/util/constants" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + chunk_util "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/boltdb" + shipper_util "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/util" + "github.com/grafana/loki/v3/pkg/util/constants" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/validation" ) func dayFromTime(t model.Time) config.DayTime { diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/index.go b/pkg/storage/stores/shipper/indexshipper/boltdb/index.go index 6d548f02c3348..aa0a615c00eb7 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/index.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/index.go @@ -13,11 +13,11 @@ import ( "github.com/go-kit/log/level" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - series_index "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + series_index "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const TempFileSuffix = ".temp" diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/index_client.go b/pkg/storage/stores/shipper/indexshipper/boltdb/index_client.go index c60c7feeebce0..07f67c4f35edb 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/index_client.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/index_client.go @@ -13,12 +13,12 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" - series_index "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/downloads" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" + series_index "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/downloads" ) type indexClientMetrics struct { diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/querier.go b/pkg/storage/stores/shipper/indexshipper/boltdb/querier.go index 54d6034b36be1..e27f4fb20dccc 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/querier.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/querier.go @@ -7,9 +7,9 @@ import ( "github.com/grafana/dskit/tenant" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/storage/stores/series/index" - shipperindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/util" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + shipperindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/util" ) type Writer interface { diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/table.go b/pkg/storage/stores/shipper/indexshipper/boltdb/table.go index 0db3ffa7f0a68..a5ed2ff0009a6 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/table.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/table.go @@ -15,11 +15,11 @@ import ( "github.com/grafana/dskit/tenant" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - chunk_util "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/stores/series/index" - shipper_util "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + chunk_util "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + shipper_util "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/table_manager.go b/pkg/storage/stores/shipper/indexshipper/boltdb/table_manager.go index 29fa04deb326b..4bc00d082d293 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/table_manager.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/table_manager.go @@ -16,12 +16,12 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - chunk_util "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - shipperindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + chunk_util "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + shipperindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" ) type tableManagerMetrics struct { diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/table_manager_test.go b/pkg/storage/stores/shipper/indexshipper/boltdb/table_manager_test.go index 2f8b9f2d4b05c..9cd73fe3e60c6 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/table_manager_test.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/table_manager_test.go @@ -12,12 +12,12 @@ import ( "github.com/stretchr/testify/require" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" - shipperindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/testutil" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + shipperindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/testutil" ) const indexTablePeriod = 24 * time.Hour diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/table_test.go b/pkg/storage/stores/shipper/indexshipper/boltdb/table_test.go index 44c583e7966cc..4066149c042aa 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/table_test.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/table_test.go @@ -13,11 +13,11 @@ import ( "github.com/stretchr/testify/require" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/stores/series/index" - shipperindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/testutil" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + shipperindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/testutil" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/downloads/index_set.go b/pkg/storage/stores/shipper/indexshipper/downloads/index_set.go index f9c12edd9c6f3..1be495ed46d99 100644 --- a/pkg/storage/stores/shipper/indexshipper/downloads/index_set.go +++ b/pkg/storage/stores/shipper/indexshipper/downloads/index_set.go @@ -17,11 +17,11 @@ import ( "github.com/grafana/dskit/concurrency" "golang.org/x/sync/errgroup" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/downloads/index_set_test.go b/pkg/storage/stores/shipper/indexshipper/downloads/index_set_test.go index a0cadf1da6cdb..5a2f6522de9f2 100644 --- a/pkg/storage/stores/shipper/indexshipper/downloads/index_set_test.go +++ b/pkg/storage/stores/shipper/indexshipper/downloads/index_set_test.go @@ -9,10 +9,10 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func buildTestIndexSet(t *testing.T, userID, path string) (*indexSet, stopFunc) { diff --git a/pkg/storage/stores/shipper/indexshipper/downloads/table.go b/pkg/storage/stores/shipper/indexshipper/downloads/table.go index 47c78924f2f5b..4767861300594 100644 --- a/pkg/storage/stores/shipper/indexshipper/downloads/table.go +++ b/pkg/storage/stores/shipper/indexshipper/downloads/table.go @@ -15,11 +15,11 @@ import ( "github.com/pkg/errors" "golang.org/x/sync/errgroup" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) // timeout for downloading initial files for a table to avoid leaking resources by allowing it to take all the time. diff --git a/pkg/storage/stores/shipper/indexshipper/downloads/table_manager.go b/pkg/storage/stores/shipper/indexshipper/downloads/table_manager.go index 8d3875afe75cd..12e8a9373ed7d 100644 --- a/pkg/storage/stores/shipper/indexshipper/downloads/table_manager.go +++ b/pkg/storage/stores/shipper/indexshipper/downloads/table_manager.go @@ -14,12 +14,12 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/compactor/deletion" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/compactor/deletion" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/validation" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/downloads/table_manager_test.go b/pkg/storage/stores/shipper/indexshipper/downloads/table_manager_test.go index b02912381e9db..e8d9e3efcc8d3 100644 --- a/pkg/storage/stores/shipper/indexshipper/downloads/table_manager_test.go +++ b/pkg/storage/stores/shipper/indexshipper/downloads/table_manager_test.go @@ -11,11 +11,11 @@ import ( "github.com/go-kit/log" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/validation" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/downloads/table_test.go b/pkg/storage/stores/shipper/indexshipper/downloads/table_test.go index cc88451704b65..a12bfc70cbc69 100644 --- a/pkg/storage/stores/shipper/indexshipper/downloads/table_test.go +++ b/pkg/storage/stores/shipper/indexshipper/downloads/table_test.go @@ -12,9 +12,9 @@ import ( "github.com/pkg/errors" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/gatewayclient/gateway_client.go b/pkg/storage/stores/shipper/indexshipper/gatewayclient/gateway_client.go index 69553fc34b6c3..472f6c019e85e 100644 --- a/pkg/storage/stores/shipper/indexshipper/gatewayclient/gateway_client.go +++ b/pkg/storage/stores/shipper/indexshipper/gatewayclient/gateway_client.go @@ -10,6 +10,7 @@ import ( "github.com/go-kit/log" "github.com/go-kit/log/level" + "github.com/gogo/status" "github.com/grafana/dskit/concurrency" "github.com/grafana/dskit/grpcclient" "github.com/grafana/dskit/instrument" @@ -23,14 +24,17 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" "google.golang.org/grpc" - - "github.com/grafana/loki/pkg/distributor/clientpool" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/indexgateway" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/discovery" - util_math "github.com/grafana/loki/pkg/util/math" + "google.golang.org/grpc/codes" + + "github.com/grafana/loki/v3/pkg/distributor/clientpool" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/indexgateway" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/discovery" + util_math "github.com/grafana/loki/v3/pkg/util/math" ) const ( @@ -237,78 +241,184 @@ func (s *GatewayClient) QueryIndex(_ context.Context, _ *logproto.QueryIndexRequ panic("not implemented") } -func (s *GatewayClient) GetChunkRef(ctx context.Context, in *logproto.GetChunkRefRequest, opts ...grpc.CallOption) (*logproto.GetChunkRefResponse, error) { +func (s *GatewayClient) GetChunkRef(ctx context.Context, in *logproto.GetChunkRefRequest) (*logproto.GetChunkRefResponse, error) { var ( resp *logproto.GetChunkRefResponse err error ) err = s.poolDo(ctx, func(client logproto.IndexGatewayClient) error { - resp, err = client.GetChunkRef(ctx, in, opts...) + resp, err = client.GetChunkRef(ctx, in) return err }) return resp, err } -func (s *GatewayClient) GetSeries(ctx context.Context, in *logproto.GetSeriesRequest, opts ...grpc.CallOption) (*logproto.GetSeriesResponse, error) { +func (s *GatewayClient) GetSeries(ctx context.Context, in *logproto.GetSeriesRequest) (*logproto.GetSeriesResponse, error) { var ( resp *logproto.GetSeriesResponse err error ) err = s.poolDo(ctx, func(client logproto.IndexGatewayClient) error { - resp, err = client.GetSeries(ctx, in, opts...) + resp, err = client.GetSeries(ctx, in) return err }) return resp, err } -func (s *GatewayClient) LabelNamesForMetricName(ctx context.Context, in *logproto.LabelNamesForMetricNameRequest, opts ...grpc.CallOption) (*logproto.LabelResponse, error) { +func (s *GatewayClient) LabelNamesForMetricName(ctx context.Context, in *logproto.LabelNamesForMetricNameRequest) (*logproto.LabelResponse, error) { var ( resp *logproto.LabelResponse err error ) err = s.poolDo(ctx, func(client logproto.IndexGatewayClient) error { - resp, err = client.LabelNamesForMetricName(ctx, in, opts...) + resp, err = client.LabelNamesForMetricName(ctx, in) return err }) return resp, err } -func (s *GatewayClient) LabelValuesForMetricName(ctx context.Context, in *logproto.LabelValuesForMetricNameRequest, opts ...grpc.CallOption) (*logproto.LabelResponse, error) { +func (s *GatewayClient) LabelValuesForMetricName(ctx context.Context, in *logproto.LabelValuesForMetricNameRequest) (*logproto.LabelResponse, error) { var ( resp *logproto.LabelResponse err error ) err = s.poolDo(ctx, func(client logproto.IndexGatewayClient) error { - resp, err = client.LabelValuesForMetricName(ctx, in, opts...) + resp, err = client.LabelValuesForMetricName(ctx, in) return err }) return resp, err } -func (s *GatewayClient) GetStats(ctx context.Context, in *logproto.IndexStatsRequest, opts ...grpc.CallOption) (*logproto.IndexStatsResponse, error) { +func (s *GatewayClient) GetStats(ctx context.Context, in *logproto.IndexStatsRequest) (*logproto.IndexStatsResponse, error) { var ( resp *logproto.IndexStatsResponse err error ) err = s.poolDo(ctx, func(client logproto.IndexGatewayClient) error { - resp, err = client.GetStats(ctx, in, opts...) + resp, err = client.GetStats(ctx, in) return err }) return resp, err } -func (s *GatewayClient) GetVolume(ctx context.Context, in *logproto.VolumeRequest, opts ...grpc.CallOption) (*logproto.VolumeResponse, error) { +func (s *GatewayClient) GetVolume(ctx context.Context, in *logproto.VolumeRequest) (*logproto.VolumeResponse, error) { var ( resp *logproto.VolumeResponse err error ) err = s.poolDo(ctx, func(client logproto.IndexGatewayClient) error { - resp, err = client.GetVolume(ctx, in, opts...) + resp, err = client.GetVolume(ctx, in) return err }) return resp, err } +func (s *GatewayClient) GetShards( + ctx context.Context, + in *logproto.ShardsRequest, +) (res *logproto.ShardsResponse, err error) { + + // We try to get the shards from the index gateway, + // but if it's not implemented, we fall back to the stats. + // We limit the maximum number of errors to 2 to avoid + // cascading all requests to new node(s) when + // the idx-gw replicas start to update to a version + // which supports the new API. + var ( + maxErrs = 2 + errCt int + ) + + if err := s.poolDoWithStrategy( + ctx, + func(client logproto.IndexGatewayClient) error { + perReplicaResult := &logproto.ShardsResponse{} + streamer, err := client.GetShards(ctx, in) + if err != nil { + return errors.Wrap(err, "get shards") + } + + // TODO(owen-d): stream currently unused (buffered) because query planning doesn't expect a streamed response, + // but can be improved easily in the future by using a stream here. + for { + resp, err := streamer.Recv() + if err == io.EOF { + break + } + if err != nil { + return errors.WithStack(err) + } + perReplicaResult.Shards = append(perReplicaResult.Shards, resp.Shards...) + perReplicaResult.Statistics.Merge(resp.Statistics) + } + + // Since `poolDo` retries on error, we only want to set the response if we got a successful response. + // This avoids cases where we add duplicates to the response on retries. + res = perReplicaResult + + return nil + }, + func(err error) bool { + errCt++ + return errCt <= maxErrs + }, + ); err != nil { + if isUnimplementedCallError(err) { + return s.getShardsFromStatsFallback(ctx, in) + } + return nil, err + } + return res, nil +} + +func (s *GatewayClient) getShardsFromStatsFallback( + ctx context.Context, + in *logproto.ShardsRequest, +) (*logproto.ShardsResponse, error) { + userID, err := tenant.TenantID(ctx) + if err != nil { + return nil, errors.Wrap(err, "index gateway client get tenant ID") + } + + p, err := indexgateway.ExtractShardRequestMatchersAndAST(in.Query) + if err != nil { + return nil, errors.Wrap(err, "failure while falling back to stats for shard calculation") + + } + + stats, err := s.GetStats( + ctx, + &logproto.IndexStatsRequest{ + From: in.From, + Through: in.Through, + Matchers: (&syntax.MatchersExpr{Mts: p.Matchers}).String(), + }, + ) + if err != nil { + return nil, err + } + + var strategy sharding.PowerOfTwoSharding + shards := strategy.ShardsFor(stats.Bytes, uint64(s.limits.TSDBMaxBytesPerShard(userID))) + return &logproto.ShardsResponse{ + Shards: shards, + }, nil +} + +// TODO(owen-d): this was copied from ingester_querier.go -- move it to a shared pkg +// isUnimplementedCallError tells if the GRPC error is a gRPC error with code Unimplemented. +func isUnimplementedCallError(err error) bool { + if err == nil { + return false + } + + s, ok := status.FromError(err) + if !ok { + return false + } + return (s.Code() == codes.Unimplemented) +} + func (s *GatewayClient) doQueries(ctx context.Context, queries []index.Query, callback index.QueryPagesCallback) error { queryKeyQueryMap := make(map[string]index.Query, len(queries)) gatewayQueries := make([]*logproto.IndexQuery, 0, len(queries)) @@ -365,6 +475,14 @@ func (s *GatewayClient) clientDoQueries(ctx context.Context, gatewayQueries []*l // poolDo executes the given function for each Index Gateway instance in the ring mapping to the correct tenant in the index. // In case of callback failure, we'll try another member of the ring for that tenant ID. func (s *GatewayClient) poolDo(ctx context.Context, callback func(client logproto.IndexGatewayClient) error) error { + return s.poolDoWithStrategy(ctx, callback, func(error) bool { return true }) +} + +func (s *GatewayClient) poolDoWithStrategy( + ctx context.Context, + callback func(client logproto.IndexGatewayClient) error, + shouldRetry func(error) bool, +) error { userID, err := tenant.TenantID(ctx) if err != nil { return errors.Wrap(err, "index gateway client get tenant ID") @@ -395,6 +513,10 @@ func (s *GatewayClient) poolDo(ctx context.Context, callback func(client logprot if err := callback(client); err != nil { lastErr = err level.Error(s.logger).Log("msg", fmt.Sprintf("client do failed for instance %s", addr), "err", err) + + if !shouldRetry(err) { + return err + } continue } diff --git a/pkg/storage/stores/shipper/indexshipper/gatewayclient/gateway_client_test.go b/pkg/storage/stores/shipper/indexshipper/gatewayclient/gateway_client_test.go index 0ec6e81c17754..1dd1bff4abf04 100644 --- a/pkg/storage/stores/shipper/indexshipper/gatewayclient/gateway_client_test.go +++ b/pkg/storage/stores/shipper/indexshipper/gatewayclient/gateway_client_test.go @@ -20,12 +20,12 @@ import ( "github.com/stretchr/testify/require" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/distributor/clientpool" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/indexgateway" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/distributor/clientpool" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/indexgateway" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/validation" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/gatewayclient/index_gateway_grpc_pool.go b/pkg/storage/stores/shipper/indexshipper/gatewayclient/index_gateway_grpc_pool.go index a769bb55c11e6..825809a252f22 100644 --- a/pkg/storage/stores/shipper/indexshipper/gatewayclient/index_gateway_grpc_pool.go +++ b/pkg/storage/stores/shipper/indexshipper/gatewayclient/index_gateway_grpc_pool.go @@ -7,7 +7,7 @@ import ( "google.golang.org/grpc" "google.golang.org/grpc/health/grpc_health_v1" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) // IndexGatewayGRPCPool represents a pool of gRPC connections to different index gateway instances. diff --git a/pkg/storage/stores/shipper/indexshipper/indexgateway/config.go b/pkg/storage/stores/shipper/indexshipper/indexgateway/config.go index eb5c134a5de18..e27af9516de07 100644 --- a/pkg/storage/stores/shipper/indexshipper/indexgateway/config.go +++ b/pkg/storage/stores/shipper/indexshipper/indexgateway/config.go @@ -6,7 +6,7 @@ import ( "github.com/pkg/errors" - "github.com/grafana/loki/pkg/util/ring" + "github.com/grafana/loki/v3/pkg/util/ring" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/indexgateway/gateway.go b/pkg/storage/stores/shipper/indexshipper/indexgateway/gateway.go index 161323defd9a1..99b79bd9922a7 100644 --- a/pkg/storage/stores/shipper/indexshipper/indexgateway/gateway.go +++ b/pkg/storage/stores/shipper/indexshipper/indexgateway/gateway.go @@ -3,29 +3,35 @@ package indexgateway import ( "context" "fmt" + "math" "sort" "sync" + "github.com/c2h5oh/datasize" "github.com/go-kit/log" "github.com/go-kit/log/level" "github.com/grafana/dskit/services" "github.com/grafana/dskit/tenant" + "github.com/opentracing/opentracing-go" "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/plan" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores" - "github.com/grafana/loki/pkg/storage/stores/index" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - seriesindex "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/plan" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores" + "github.com/grafana/loki/v3/pkg/storage/stores/index" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + seriesindex "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + tsdb_index "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) const ( @@ -59,6 +65,7 @@ type Gateway struct { indexQuerier IndexQuerier indexClients []IndexClientWithRange bloomQuerier BloomQuerier + metrics *Metrics cfg Config log log.Logger @@ -68,13 +75,14 @@ type Gateway struct { // // In case it is configured to be in ring mode, a Basic Service wrapping the ring client is started. // Otherwise, it starts an Idle Service that doesn't have lifecycle hooks. -func NewIndexGateway(cfg Config, log log.Logger, _ prometheus.Registerer, indexQuerier IndexQuerier, indexClients []IndexClientWithRange, bloomQuerier BloomQuerier) (*Gateway, error) { +func NewIndexGateway(cfg Config, log log.Logger, r prometheus.Registerer, indexQuerier IndexQuerier, indexClients []IndexClientWithRange, bloomQuerier BloomQuerier) (*Gateway, error) { g := &Gateway{ indexQuerier: indexQuerier, bloomQuerier: bloomQuerier, cfg: cfg, log: log, indexClients: indexClients, + metrics: NewMetrics(r), } // query newer periods first @@ -195,7 +203,7 @@ func buildResponses(query seriesindex.Query, batch seriesindex.ReadBatchResult, return nil } -func (g *Gateway) GetChunkRef(ctx context.Context, req *logproto.GetChunkRefRequest) (*logproto.GetChunkRefResponse, error) { +func (g *Gateway) GetChunkRef(ctx context.Context, req *logproto.GetChunkRefRequest) (result *logproto.GetChunkRefResponse, err error) { instanceID, err := tenant.TenantID(ctx) if err != nil { return nil, err @@ -211,7 +219,7 @@ func (g *Gateway) GetChunkRef(ctx context.Context, req *logproto.GetChunkRefRequ return nil, err } - result := &logproto.GetChunkRefResponse{ + result = &logproto.GetChunkRefResponse{ Refs: make([]*logproto.ChunkRef, 0, len(chunks)), } for _, cs := range chunks { @@ -221,6 +229,12 @@ func (g *Gateway) GetChunkRef(ctx context.Context, req *logproto.GetChunkRefRequ } initialChunkCount := len(result.Refs) + defer func() { + if err == nil { + g.metrics.preFilterChunks.WithLabelValues(routeChunkRefs).Observe(float64(initialChunkCount)) + g.metrics.postFilterChunks.WithLabelValues(routeChunkRefs).Observe(float64(len(result.Refs))) + } + }() // Return unfiltered results if there is no bloom querier (Bloom Gateway disabled) if g.bloomQuerier == nil { @@ -340,6 +354,288 @@ func (g *Gateway) GetVolume(ctx context.Context, req *logproto.VolumeRequest) (* return g.indexQuerier.Volume(ctx, instanceID, req.From, req.Through, req.GetLimit(), req.TargetLabels, req.AggregateBy, matchers...) } +func (g *Gateway) GetShards(request *logproto.ShardsRequest, server logproto.IndexGateway_GetShardsServer) error { + ctx := server.Context() + sp, ctx := opentracing.StartSpanFromContext(ctx, "indexgateway.GetShards") + defer sp.Finish() + + instanceID, err := tenant.TenantID(ctx) + if err != nil { + return err + } + + p, err := ExtractShardRequestMatchersAndAST(request.Query) + if err != nil { + return err + } + + // Shards were requested, but blooms are not enabled or cannot be used due to lack of filters. + // That's ok; we can still return shard ranges without filtering + // which will be more effective than guessing power-of-2 shard ranges. + forSeries, ok := g.indexQuerier.HasForSeries(request.From, request.Through) + if g.bloomQuerier == nil || len(syntax.ExtractLineFilters(p.Plan().AST)) == 0 || !ok { + shards, err := g.indexQuerier.GetShards( + ctx, + instanceID, + request.From, request.Through, + request.TargetBytesPerShard, + p, + ) + + if err != nil { + return err + } + + return server.Send(shards) + } + + return g.getShardsWithBlooms(ctx, request, server, instanceID, p, forSeries) +} + +// getShardsWithBlooms is a helper function to get shards with blooms enabled. +func (g *Gateway) getShardsWithBlooms( + ctx context.Context, + req *logproto.ShardsRequest, + server logproto.IndexGateway_GetShardsServer, + instanceID string, + p chunk.Predicate, + forSeries sharding.ForSeries, +) error { + // TODO(owen-d): instead of using GetChunks which buffers _all_ the chunks + // (expensive when looking at the full fingerprint space), we should + // use the `ForSeries` implementation to accumulate batches of chunks to dedupe, + // but I'm leaving this as a future improvement. This may be difficult considering + // fingerprints aren't necessarily iterated in order because multiple underlying TSDBs + // can be queried independently. This could also result in the same chunks being present in + // multiple batches. However, this is all OK because we can dedupe them post-blooms and in + // many cases the majority of chunks will only be present in a single post-compacted TSDB, + // making this more of an edge case than a common occurrence (make sure to check this assumption + // as getting it _very_ wrong could harm some cache locality benefits on the bloom-gws by + // sending multiple requests to the entire keyspace). + + logger := log.With(g.log, "tenant", instanceID) + sp, ctx := opentracing.StartSpanFromContext(ctx, "indexgateway.getShardsWithBlooms") + defer sp.Finish() + + // 1) for all bounds, get chunk refs + grps, _, err := g.indexQuerier.GetChunks(ctx, instanceID, req.From, req.Through, p) + if err != nil { + return err + } + + var ct int + for _, g := range grps { + ct += len(g) + } + // TODO(owen-d): pool + refs := make([]*logproto.ChunkRef, 0, ct) + + for _, cs := range grps { + for j := range cs { + refs = append(refs, &cs[j].ChunkRef) + } + } + + // 2) filter via blooms + filtered, err := g.bloomQuerier.FilterChunkRefs(ctx, instanceID, req.From, req.Through, refs, p.Plan()) + if err != nil { + return err + } + g.metrics.preFilterChunks.WithLabelValues(routeShards).Observe(float64(ct)) + g.metrics.postFilterChunks.WithLabelValues(routeShards).Observe(float64(len(filtered))) + + statistics := stats.Result{ + Index: stats.Index{ + TotalChunks: int64(ct), + PostFilterChunks: int64(len(filtered)), + }, + } + + resp := &logproto.ShardsResponse{ + Statistics: statistics, + } + + // Edge case: if there are no chunks after filtering, we still need to return a single shard + if len(filtered) == 0 { + resp.Shards = []logproto.Shard{ + { + Bounds: logproto.FPBounds{Min: 0, Max: math.MaxUint64}, + Stats: &logproto.IndexStatsResponse{}, + }, + } + } else { + shards, err := accumulateChunksToShards(ctx, instanceID, forSeries, req, p, filtered) + if err != nil { + return err + } + resp.Shards = shards + } + + sp.LogKV("msg", "send shards response", "shards", len(resp.Shards)) + + level.Debug(logger).Log( + "msg", "send shards response", + "total_chunks", statistics.Index.TotalChunks, + "post_filter_chunks", statistics.Index.PostFilterChunks, + "shards", len(resp.Shards), + "query", req.Query, + "target_bytes_per_shard", datasize.ByteSize(req.TargetBytesPerShard).HumanReadable(), + ) + + // 3) build shards + return server.Send(resp) +} + +// ExtractShardRequestMatchersAndAST extracts the matchers and AST from a query string. +// It errors if there is more than one matcher group in the AST as this is supposed to be +// split out during query planning before reaching this point. +func ExtractShardRequestMatchersAndAST(query string) (chunk.Predicate, error) { + expr, err := syntax.ParseExpr(query) + if err != nil { + return chunk.Predicate{}, err + } + + ms, err := syntax.MatcherGroups(expr) + if err != nil { + return chunk.Predicate{}, err + } + + var matchers []*labels.Matcher + switch len(ms) { + case 0: + // nothing to do + case 1: + matchers = ms[0].Matchers + default: + return chunk.Predicate{}, fmt.Errorf( + "multiple matcher groups are not supported in GetShards. This is likely an internal bug as binary operations should be dispatched separately in planning", + ) + } + + return chunk.NewPredicate(matchers, &plan.QueryPlan{ + AST: expr, + }), nil +} + +// TODO(owen-d): consider extending index impl to support returning chunkrefs _with_ sizing info +// TODO(owen-d): perf, this is expensive :( +func accumulateChunksToShards( + ctx context.Context, + user string, + forSeries sharding.ForSeries, + req *logproto.ShardsRequest, + p chunk.Predicate, + filtered []*logproto.ChunkRef, +) ([]logproto.Shard, error) { + // map for looking up post-filtered chunks in O(n) while iterating the index again for sizing info + filteredM := make(map[model.Fingerprint][]refWithSizingInfo, 1024) + for _, ref := range filtered { + x := refWithSizingInfo{ref: ref} + filteredM[model.Fingerprint(ref.Fingerprint)] = append(filteredM[model.Fingerprint(ref.Fingerprint)], x) + } + + var mtx sync.Mutex + + if err := forSeries.ForSeries( + ctx, + user, + v1.NewBounds(filtered[0].FingerprintModel(), filtered[len(filtered)-1].FingerprintModel()), + req.From, req.Through, + func(l labels.Labels, fp model.Fingerprint, chks []tsdb_index.ChunkMeta) (stop bool) { + // check if this is a fingerprint we need + if _, ok := filteredM[fp]; !ok { + return false + } + mtx.Lock() + defer mtx.Unlock() + + filteredChks := filteredM[fp] + var j int + + outer: + for i := range filteredChks { + for j < len(chks) { + switch filteredChks[i].Cmp(chks[j]) { + case v1.Less: + // this chunk is not in the queried index, continue checking other chunks + continue outer + case v1.Greater: + // next chunk in index but didn't pass filter; continue + j++ + continue + case v1.Eq: + // a match; set the sizing info + filteredChks[i].KB = chks[j].KB + filteredChks[i].Entries = chks[j].Entries + j++ + continue outer + } + } + + // we've finished this index's chunks; no need to keep checking filtered chunks + break + } + + return false + }, + p.Matchers..., + ); err != nil { + return nil, err + } + + collectedSeries := sharding.SizedFPs(sharding.SizedFPsPool.Get(len(filteredM))) + defer sharding.SizedFPsPool.Put(collectedSeries) + + for fp, chks := range filteredM { + x := sharding.SizedFP{Fp: fp} + x.Stats.Chunks = uint64(len(chks)) + + for _, chk := range chks { + x.Stats.Entries += uint64(chk.Entries) + x.Stats.Bytes += uint64(chk.KB << 10) + } + collectedSeries = append(collectedSeries, x) + } + sort.Sort(collectedSeries) + + return collectedSeries.ShardsFor(req.TargetBytesPerShard), nil +} + +type refWithSizingInfo struct { + ref *logproto.ChunkRef + KB uint32 + Entries uint32 +} + +// careful: only checks from,through,checksum +func (r refWithSizingInfo) Cmp(chk tsdb_index.ChunkMeta) v1.Ord { + ref := *r.ref + chkFrom := model.Time(chk.MinTime) + if ref.From != chkFrom { + if ref.From < chkFrom { + return v1.Less + } + return v1.Greater + } + + chkThrough := model.Time(chk.MaxTime) + if ref.Through != chkThrough { + if ref.Through < chkThrough { + return v1.Less + } + return v1.Greater + } + + if ref.Checksum != chk.Checksum { + if ref.Checksum < chk.Checksum { + return v1.Less + } + return v1.Greater + } + + return v1.Eq +} + type failingIndexClient struct{} func (f failingIndexClient) QueryPages(_ context.Context, _ []seriesindex.Query, _ seriesindex.QueryPagesCallback) error { diff --git a/pkg/storage/stores/shipper/indexshipper/indexgateway/gateway_test.go b/pkg/storage/stores/shipper/indexshipper/indexgateway/gateway_test.go index 48bb4d2c8c383..bf6af7cb5110a 100644 --- a/pkg/storage/stores/shipper/indexshipper/indexgateway/gateway_test.go +++ b/pkg/storage/stores/shipper/indexshipper/indexgateway/gateway_test.go @@ -14,12 +14,16 @@ import ( "github.com/stretchr/testify/require" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" - util_test "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" - util_math "github.com/grafana/loki/pkg/util/math" + "github.com/grafana/loki/v3/pkg/logproto" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + tsdb_index "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + util_test "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" + util_math "github.com/grafana/loki/v3/pkg/util/math" ) const ( @@ -284,3 +288,305 @@ func (i *indexQuerierMock) Volume(_ context.Context, userID string, from, throug return args.Get(0).(*logproto.VolumeResponse), args.Error(1) } + +// Tests for various cases of the `refWithSizingInfo.Cmp` function +func TestRefWithSizingInfo(t *testing.T) { + for _, tc := range []struct { + desc string + a refWithSizingInfo + b tsdb_index.ChunkMeta + exp v1.Ord + }{ + { + desc: "less by from", + a: refWithSizingInfo{ + ref: &logproto.ChunkRef{ + From: 1, + }, + }, + b: tsdb_index.ChunkMeta{ + MinTime: 2, + }, + exp: v1.Less, + }, + { + desc: "eq by from", + a: refWithSizingInfo{ + ref: &logproto.ChunkRef{ + From: 1, + }, + }, + b: tsdb_index.ChunkMeta{ + MinTime: 1, + }, + exp: v1.Eq, + }, + { + desc: "gt by from", + a: refWithSizingInfo{ + ref: &logproto.ChunkRef{ + From: 2, + }, + }, + b: tsdb_index.ChunkMeta{ + MinTime: 1, + }, + exp: v1.Greater, + }, + { + desc: "less by through", + a: refWithSizingInfo{ + ref: &logproto.ChunkRef{ + Through: 1, + }, + }, + b: tsdb_index.ChunkMeta{ + MaxTime: 2, + }, + exp: v1.Less, + }, + { + desc: "eq by through", + a: refWithSizingInfo{ + ref: &logproto.ChunkRef{ + Through: 2, + }, + }, + b: tsdb_index.ChunkMeta{ + MaxTime: 2, + }, + exp: v1.Eq, + }, + { + desc: "gt by through", + a: refWithSizingInfo{ + ref: &logproto.ChunkRef{ + Through: 2, + }, + }, + b: tsdb_index.ChunkMeta{ + MaxTime: 1, + }, + exp: v1.Greater, + }, + { + desc: "less by checksum", + a: refWithSizingInfo{ + ref: &logproto.ChunkRef{ + Checksum: 1, + }, + }, + b: tsdb_index.ChunkMeta{ + Checksum: 2, + }, + exp: v1.Less, + }, + { + desc: "eq by checksum", + a: refWithSizingInfo{ + ref: &logproto.ChunkRef{ + Checksum: 2, + }, + }, + b: tsdb_index.ChunkMeta{ + Checksum: 2, + }, + exp: v1.Eq, + }, + { + desc: "gt by checksum", + a: refWithSizingInfo{ + ref: &logproto.ChunkRef{ + Checksum: 2, + }, + }, + b: tsdb_index.ChunkMeta{ + Checksum: 1, + }, + exp: v1.Greater, + }, + } { + t.Run(tc.desc, func(t *testing.T) { + require.Equal(t, tc.exp, tc.a.Cmp(tc.b)) + }) + } +} + +// TODO(owen-d): more testing for specific cases +func TestAccumulateChunksToShards(t *testing.T) { + // only check eq by checksum for convenience -- we're not testing the comparison function here + mkRef := func(fp model.Fingerprint, checksum uint32) *logproto.ChunkRef { + return &logproto.ChunkRef{ + Fingerprint: uint64(fp), + Checksum: checksum, + } + } + + sized := func(ref *logproto.ChunkRef, kb, entries uint32) refWithSizingInfo { + return refWithSizingInfo{ + ref: ref, + KB: kb, + Entries: entries, + } + + } + + fsImpl := func(series [][]refWithSizingInfo) sharding.ForSeriesFunc { + return sharding.ForSeriesFunc( + func( + ctx context.Context, + _ string, + _ tsdb_index.FingerprintFilter, + _, _ model.Time, + fn func( + _ labels.Labels, + fp model.Fingerprint, + chks []tsdb_index.ChunkMeta, + ) (stop bool), matchers ...*labels.Matcher) error { + + for _, s := range series { + chks := []tsdb_index.ChunkMeta{} + for _, r := range s { + chks = append(chks, tsdb_index.ChunkMeta{ + Checksum: r.ref.Checksum, + KB: r.KB, + Entries: r.Entries, + }) + } + + if stop := fn(nil, s[0].ref.FingerprintModel(), chks); stop { + return nil + } + } + return nil + }, + ) + } + + filtered := []*logproto.ChunkRef{ + // shard 0 + mkRef(1, 0), + mkRef(1, 1), + mkRef(1, 2), + + // shard 1 + mkRef(2, 10), + mkRef(2, 20), + mkRef(2, 30), + + // shard 2 split across multiple series + mkRef(3, 10), + mkRef(4, 10), + mkRef(4, 20), + + // last shard contains leftovers + skip a few fps in between + mkRef(7, 10), + } + + series := [][]refWithSizingInfo{ + { + // first series creates one shard since a shard can't contain partial series. + // no chunks were filtered out + sized(mkRef(1, 0), 100, 1), + sized(mkRef(1, 1), 100, 1), + sized(mkRef(1, 2), 100, 1), + }, + { + // second shard also contains one series, but this series has chunks filtered out. + sized(mkRef(2, 0), 100, 1), // filtered out + sized(mkRef(2, 10), 100, 1), // included + sized(mkRef(2, 11), 100, 1), // filtered out + sized(mkRef(2, 20), 100, 1), // included + sized(mkRef(2, 21), 100, 1), // filtered out + sized(mkRef(2, 30), 100, 1), // included + sized(mkRef(2, 31), 100, 1), // filtered out + }, + + // third shard contains multiple series. + // combined they have 110kb, which is above the target of 100kb + // but closer than leaving the second series out which would create + // a shard with 50kb + { + // first series, 50kb + sized(mkRef(3, 10), 50, 1), // 50kb + sized(mkRef(3, 11), 50, 1), // 50kb, not included + }, + { + // second series + sized(mkRef(4, 10), 30, 1), // 30kb + sized(mkRef(4, 11), 30, 1), // 30kb, not included + sized(mkRef(4, 20), 30, 1), // 30kb + }, + + // Fourth shard contains a single series with 25kb, + // but iterates over non-included fp(s) before it + { + // register a series in the index which is not included in the filtered list + sized(mkRef(6, 10), 100, 1), // not included + sized(mkRef(6, 11), 100, 1), // not included + }, + { + // last shard contains leftovers + sized(mkRef(7, 10), 25, 1), + sized(mkRef(7, 11), 100, 1), // not included + }, + } + + shards, err := accumulateChunksToShards( + context.Background(), + "", + fsImpl(series), + &logproto.ShardsRequest{ + TargetBytesPerShard: 100 << 10, + }, + chunk.NewPredicate(nil, nil), // we're not checking matcher injection here + filtered, + ) + + exp := []logproto.Shard{ + { + Bounds: logproto.FPBounds{Min: 0, Max: 1}, + Stats: &logproto.IndexStatsResponse{ + Streams: 1, + Chunks: 3, + Entries: 3, + Bytes: 300 << 10, + }, + }, + { + Bounds: logproto.FPBounds{Min: 2, Max: 2}, + Stats: &logproto.IndexStatsResponse{ + Streams: 1, + Chunks: 3, + Entries: 3, + Bytes: 300 << 10, + }, + }, + { + Bounds: logproto.FPBounds{Min: 3, Max: 6}, + Stats: &logproto.IndexStatsResponse{ + Streams: 2, + Chunks: 3, + Entries: 3, + Bytes: 110 << 10, + }, + }, + { + Bounds: logproto.FPBounds{Min: 7, Max: math.MaxUint64}, + Stats: &logproto.IndexStatsResponse{ + Streams: 1, + Chunks: 1, + Entries: 1, + Bytes: 25 << 10, + }, + }, + } + + require.NoError(t, err) + + for i := range shards { + require.Equal(t, exp[i], shards[i], "invalid shard at index %d", i) + } + require.Equal(t, len(exp), len(shards)) + +} diff --git a/pkg/storage/stores/shipper/indexshipper/indexgateway/grpc.go b/pkg/storage/stores/shipper/indexshipper/indexgateway/grpc.go index de8edda70c08b..b2f6c16bda430 100644 --- a/pkg/storage/stores/shipper/indexshipper/indexgateway/grpc.go +++ b/pkg/storage/stores/shipper/indexshipper/indexgateway/grpc.go @@ -8,7 +8,7 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) type ServerInterceptors struct { diff --git a/pkg/storage/stores/shipper/indexshipper/indexgateway/metrics.go b/pkg/storage/stores/shipper/indexshipper/indexgateway/metrics.go new file mode 100644 index 0000000000000..6ea51e5623846 --- /dev/null +++ b/pkg/storage/stores/shipper/indexshipper/indexgateway/metrics.go @@ -0,0 +1,37 @@ +package indexgateway + +import ( + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promauto" + + "github.com/grafana/loki/v3/pkg/util/constants" +) + +const ( + routeChunkRefs = "chunk_refs" + routeShards = "shards" +) + +type Metrics struct { + preFilterChunks *prometheus.HistogramVec + postFilterChunks *prometheus.HistogramVec +} + +func NewMetrics(r prometheus.Registerer) *Metrics { + return &Metrics{ + preFilterChunks: promauto.With(r).NewHistogramVec(prometheus.HistogramOpts{ + Namespace: constants.Loki, + Subsystem: "index_gateway", + Name: "prefilter_chunks", + Help: "Number of chunks before filtering", + Buckets: prometheus.ExponentialBuckets(1, 4, 10), + }, []string{"route"}), + postFilterChunks: promauto.With(r).NewHistogramVec(prometheus.HistogramOpts{ + Namespace: constants.Loki, + Subsystem: "index_gateway", + Name: "postfilter_chunks", + Help: "Number of chunks after filtering", + Buckets: prometheus.ExponentialBuckets(1, 4, 10), + }, []string{"route"}), + } +} diff --git a/pkg/storage/stores/shipper/indexshipper/indexgateway/shufflesharding.go b/pkg/storage/stores/shipper/indexshipper/indexgateway/shufflesharding.go index 50a1ae33b1390..07bd2e8aa97b7 100644 --- a/pkg/storage/stores/shipper/indexshipper/indexgateway/shufflesharding.go +++ b/pkg/storage/stores/shipper/indexshipper/indexgateway/shufflesharding.go @@ -4,7 +4,7 @@ import ( "github.com/grafana/dskit/ring" "github.com/pkg/errors" - lokiring "github.com/grafana/loki/pkg/util/ring" + lokiring "github.com/grafana/loki/v3/pkg/util/ring" ) var ( @@ -21,6 +21,7 @@ var ( type Limits interface { IndexGatewayShardSize(tenantID string) int + TSDBMaxBytesPerShard(string) int } type ShardingStrategy interface { diff --git a/pkg/storage/stores/shipper/indexshipper/shipper.go b/pkg/storage/stores/shipper/indexshipper/shipper.go index abfda3b733108..169f7eeb79fee 100644 --- a/pkg/storage/stores/shipper/indexshipper/shipper.go +++ b/pkg/storage/stores/shipper/indexshipper/shipper.go @@ -14,14 +14,14 @@ import ( "github.com/prometheus/client_golang/prometheus" "golang.org/x/sync/errgroup" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/downloads" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/gatewayclient" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/uploads" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/downloads" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/gatewayclient" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/uploads" ) type Mode string diff --git a/pkg/storage/stores/shipper/indexshipper/storage/cached_client.go b/pkg/storage/stores/shipper/indexshipper/storage/cached_client.go index f74c3ea8b4ac5..c7d909bc09844 100644 --- a/pkg/storage/stores/shipper/indexshipper/storage/cached_client.go +++ b/pkg/storage/stores/shipper/indexshipper/storage/cached_client.go @@ -11,9 +11,9 @@ import ( "github.com/go-kit/log/level" "golang.org/x/sync/singleflight" - "github.com/grafana/loki/pkg/storage/chunk/client" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/storage/cached_client_test.go b/pkg/storage/stores/shipper/indexshipper/storage/cached_client_test.go index 6e2c8a5def263..78c04bd4dc3eb 100644 --- a/pkg/storage/stores/shipper/indexshipper/storage/cached_client_test.go +++ b/pkg/storage/stores/shipper/indexshipper/storage/cached_client_test.go @@ -11,9 +11,9 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" ) var objectsMtime = time.Now().Local() diff --git a/pkg/storage/stores/shipper/indexshipper/storage/client.go b/pkg/storage/stores/shipper/indexshipper/storage/client.go index e8a3f30a2d825..d63504cd33090 100644 --- a/pkg/storage/stores/shipper/indexshipper/storage/client.go +++ b/pkg/storage/stores/shipper/indexshipper/storage/client.go @@ -7,7 +7,7 @@ import ( "strings" "time" - "github.com/grafana/loki/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" ) const delimiter = "/" diff --git a/pkg/storage/stores/shipper/indexshipper/storage/client_test.go b/pkg/storage/stores/shipper/indexshipper/storage/client_test.go index c9b7da78b3a3f..1f766e0113802 100644 --- a/pkg/storage/stores/shipper/indexshipper/storage/client_test.go +++ b/pkg/storage/stores/shipper/indexshipper/storage/client_test.go @@ -10,8 +10,8 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" ) func TestIndexStorageClient(t *testing.T) { diff --git a/pkg/storage/stores/shipper/indexshipper/storage/util_test.go b/pkg/storage/stores/shipper/indexshipper/storage/util_test.go index 3136d553c8d2c..330602821d78f 100644 --- a/pkg/storage/stores/shipper/indexshipper/storage/util_test.go +++ b/pkg/storage/stores/shipper/indexshipper/storage/util_test.go @@ -10,9 +10,9 @@ import ( gzip "github.com/klauspost/pgzip" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func Test_GetFileFromStorage(t *testing.T) { diff --git a/pkg/storage/stores/shipper/indexshipper/table_client.go b/pkg/storage/stores/shipper/indexshipper/table_client.go index 2d7de63006a61..6492dd94943eb 100644 --- a/pkg/storage/stores/shipper/indexshipper/table_client.go +++ b/pkg/storage/stores/shipper/indexshipper/table_client.go @@ -3,10 +3,10 @@ package indexshipper import ( "context" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" ) type tableClient struct { diff --git a/pkg/storage/stores/shipper/indexshipper/table_client_test.go b/pkg/storage/stores/shipper/indexshipper/table_client_test.go index 0b76ab64f99c5..7b84b74beeef3 100644 --- a/pkg/storage/stores/shipper/indexshipper/table_client_test.go +++ b/pkg/storage/stores/shipper/indexshipper/table_client_test.go @@ -8,8 +8,8 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) func TestBoltDBShipperTableClient(t *testing.T) { diff --git a/pkg/storage/stores/shipper/indexshipper/testutil/testutil.go b/pkg/storage/stores/shipper/indexshipper/testutil/testutil.go index 6022128e36d49..48f5990dc0790 100644 --- a/pkg/storage/stores/shipper/indexshipper/testutil/testutil.go +++ b/pkg/storage/stores/shipper/indexshipper/testutil/testutil.go @@ -16,9 +16,9 @@ import ( "github.com/stretchr/testify/require" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - chunk_util "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + chunk_util "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) func AddRecordsToDB(t testing.TB, path string, start, numRecords int, bucketName []byte) { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/builder.go b/pkg/storage/stores/shipper/indexshipper/tsdb/builder.go index ee59debb2e078..0e7af08d4d243 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/builder.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/builder.go @@ -12,8 +12,8 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/storage" - chunk_util "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + chunk_util "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) // Builder is a helper used to create tsdb indices. diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/builder_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/builder_test.go index 539cdd57fc5a6..9ccf972151a15 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/builder_test.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/builder_test.go @@ -9,7 +9,7 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func Test_Build(t *testing.T) { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/compactor.go b/pkg/storage/stores/shipper/indexshipper/tsdb/compactor.go index 15ee7f1a1d675..5c2ae28d89351 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/compactor.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/compactor.go @@ -14,13 +14,13 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/compactor" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" - shipperindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - tsdbindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/compactor" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" + shipperindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + tsdbindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) const readDBsConcurrency = 50 @@ -53,8 +53,9 @@ func (i indexProcessor) OpenCompactedIndexFile(ctx context.Context, path, tableN } builder := NewBuilder(indexFormat) - err = indexFile.(*TSDBFile).Index.(*TSDBIndex).ForSeries(ctx, nil, 0, math.MaxInt64, func(lbls labels.Labels, fp model.Fingerprint, chks []tsdbindex.ChunkMeta) { + err = indexFile.(*TSDBFile).Index.(*TSDBIndex).ForSeries(ctx, "", nil, 0, math.MaxInt64, func(lbls labels.Labels, fp model.Fingerprint, chks []tsdbindex.ChunkMeta) (stop bool) { builder.AddSeries(lbls.Copy(), fp, chks) + return false }, labels.MustNewMatcher(labels.MatchEqual, "", "")) if err != nil { return nil, err @@ -212,8 +213,9 @@ func setupBuilder(ctx context.Context, indexType int, userID string, sourceIndex // add users index from multi-tenant indexes to the builder for _, idx := range multiTenantIndexes { - err := idx.(*TSDBFile).Index.(*TSDBIndex).ForSeries(ctx, nil, 0, math.MaxInt64, func(lbls labels.Labels, fp model.Fingerprint, chks []tsdbindex.ChunkMeta) { + err := idx.(*TSDBFile).Index.(*TSDBIndex).ForSeries(ctx, "", nil, 0, math.MaxInt64, func(lbls labels.Labels, fp model.Fingerprint, chks []tsdbindex.ChunkMeta) (stop bool) { builder.AddSeries(withoutTenantLabel(lbls.Copy()), fp, chks) + return false }, withTenantLabelMatcher(userID, []*labels.Matcher{})...) if err != nil { return nil, err @@ -244,8 +246,9 @@ func setupBuilder(ctx context.Context, indexType int, userID string, sourceIndex } }() - err = indexFile.(*TSDBFile).Index.(*TSDBIndex).ForSeries(ctx, nil, 0, math.MaxInt64, func(lbls labels.Labels, fp model.Fingerprint, chks []tsdbindex.ChunkMeta) { + err = indexFile.(*TSDBFile).Index.(*TSDBIndex).ForSeries(ctx, "", nil, 0, math.MaxInt64, func(lbls labels.Labels, fp model.Fingerprint, chks []tsdbindex.ChunkMeta) (stop bool) { builder.AddSeries(lbls.Copy(), fp, chks) + return false }, labels.MustNewMatcher(labels.MatchEqual, "", "")) if err != nil { return nil, err diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/compactor_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/compactor_test.go index a2dfaa2271b6a..5f8a5b1e6d9d5 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/compactor_test.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/compactor_test.go @@ -17,17 +17,17 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/compactor" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/compactor" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( @@ -609,8 +609,9 @@ func TestCompactor_Compact(t *testing.T) { require.NoError(t, err) actualChunks = map[string]index.ChunkMetas{} - err = indexFile.(*TSDBFile).Index.(*TSDBIndex).ForSeries(context.Background(), nil, 0, math.MaxInt64, func(lbls labels.Labels, fp model.Fingerprint, chks []index.ChunkMeta) { + err = indexFile.(*TSDBFile).Index.(*TSDBIndex).ForSeries(context.Background(), "", nil, 0, math.MaxInt64, func(lbls labels.Labels, fp model.Fingerprint, chks []index.ChunkMeta) (stop bool) { actualChunks[lbls.String()] = chks + return false }, labels.MustNewMatcher(labels.MatchEqual, "", "")) require.NoError(t, err) @@ -823,8 +824,9 @@ func TestCompactedIndex(t *testing.T) { require.NoError(t, err) foundChunks := map[string]index.ChunkMetas{} - err = indexFile.(*TSDBFile).Index.(*TSDBIndex).ForSeries(context.Background(), nil, 0, math.MaxInt64, func(lbls labels.Labels, fp model.Fingerprint, chks []index.ChunkMeta) { + err = indexFile.(*TSDBFile).Index.(*TSDBIndex).ForSeries(context.Background(), "", nil, 0, math.MaxInt64, func(lbls labels.Labels, fp model.Fingerprint, chks []index.ChunkMeta) (stop bool) { foundChunks[lbls.String()] = append(index.ChunkMetas{}, chks...) + return false }, labels.MustNewMatcher(labels.MatchEqual, "", "")) require.NoError(t, err) diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/head.go b/pkg/storage/stores/shipper/indexshipper/tsdb/head.go index 4c841b3741462..00e4f86b9ad90 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/head.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/head.go @@ -22,7 +22,7 @@ import ( "github.com/prometheus/prometheus/storage" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) /* diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/head_manager.go b/pkg/storage/stores/shipper/indexshipper/tsdb/head_manager.go index 7342fe851c577..5df88b04071e5 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/head_manager.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/head_manager.go @@ -21,10 +21,10 @@ import ( "github.com/prometheus/prometheus/tsdb/record" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - "github.com/grafana/loki/pkg/util/wal" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/util/wal" ) /* @@ -800,6 +800,14 @@ func (t *tenantHeads) Volume(ctx context.Context, userID string, from, through m return idx.Volume(ctx, userID, from, through, acc, fpFilter, shouldIncludeChunk, targetLabels, aggregateBy, matchers...) } +func (t *tenantHeads) ForSeries(ctx context.Context, userID string, fpFilter index.FingerprintFilter, from model.Time, through model.Time, fn func(labels.Labels, model.Fingerprint, []index.ChunkMeta) (stop bool), matchers ...*labels.Matcher) error { + idx, ok := t.tenantIndex(userID, from, through) + if !ok { + return nil + } + return idx.ForSeries(ctx, userID, fpFilter, from, through, fn, matchers...) +} + // helper only used in building TSDBs func (t *tenantHeads) forAll(fn func(user string, ls labels.Labels, fp uint64, chks index.ChunkMetas) error) error { for i, shard := range t.tenants { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/head_manager_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/head_manager_test.go index c58e556457174..daa2e0bafa588 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/head_manager_test.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/head_manager_test.go @@ -18,14 +18,14 @@ import ( "github.com/grafana/dskit/flagext" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/validation" ) type noopTSDBManager struct { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/head_read.go b/pkg/storage/stores/shipper/indexshipper/tsdb/head_read.go index 203e951a435d5..3a0cf3cdbfc7d 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/head_read.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/head_read.go @@ -21,7 +21,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/storage" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) // Index returns an IndexReader against the block. diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/head_wal.go b/pkg/storage/stores/shipper/indexshipper/tsdb/head_wal.go index 386067ff5dbb3..d1a3dcf2dc046 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/head_wal.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/head_wal.go @@ -8,8 +8,8 @@ import ( "github.com/prometheus/prometheus/tsdb/record" "github.com/prometheus/prometheus/tsdb/wlog" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - "github.com/grafana/loki/pkg/util/encoding" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/util/encoding" ) type WAL interface { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/head_wal_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/head_wal_test.go index 28255a4a7ec88..5e9b5e7d505f8 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/head_wal_test.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/head_wal_test.go @@ -9,7 +9,7 @@ import ( "github.com/prometheus/prometheus/tsdb/record" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func Test_Encoding_Series(t *testing.T) { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/index.go b/pkg/storage/stores/shipper/indexshipper/tsdb/index.go index bb294fb13f450..a60b86b6a6e00 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/index.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/index.go @@ -6,8 +6,9 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" ) type Series struct { @@ -22,13 +23,22 @@ type ChunkRef struct { Checksum uint32 } -// Compares by (Start, End) +// Compares by (Fp, Start, End, checksum) // Assumes User is equivalent func (r ChunkRef) Less(x ChunkRef) bool { + if r.Fingerprint != x.Fingerprint { + return r.Fingerprint < x.Fingerprint + } + if r.Start != x.Start { return r.Start < x.Start } - return r.End <= x.End + + if r.End != x.End { + return r.End < x.End + } + + return r.Checksum < x.Checksum } type shouldIncludeChunk func(index.ChunkMeta) bool @@ -37,6 +47,7 @@ type Index interface { Bounded SetChunkFilterer(chunkFilter chunk.RequestChunkFilterer) Close() error + sharding.ForSeries // GetChunkRefs accepts an optional []ChunkRef argument. // If not nil, it will use that slice to build the result, // allowing us to avoid unnecessary allocations at the caller's discretion. @@ -84,3 +95,7 @@ func (NoopIndex) SetChunkFilterer(_ chunk.RequestChunkFilterer) {} func (NoopIndex) Volume(_ context.Context, _ string, _, _ model.Time, _ VolumeAccumulator, _ index.FingerprintFilter, _ shouldIncludeChunk, _ []string, _ string, _ ...*labels.Matcher) error { return nil } + +func (NoopIndex) ForSeries(_ context.Context, _ string, _ index.FingerprintFilter, _ model.Time, _ model.Time, _ func(labels.Labels, model.Fingerprint, []index.ChunkMeta) (stop bool), _ ...*labels.Matcher) error { + return nil +} diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/index/chunk.go b/pkg/storage/stores/shipper/indexshipper/tsdb/index/chunk.go index c5f107478c538..8094e19af0c5e 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/index/chunk.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/index/chunk.go @@ -5,8 +5,8 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/encoding" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/encoding" ) // Meta holds information about a chunk of data. diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/index/chunk_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/index/chunk_test.go index 1eab509d489a4..a1f98f1ab71ae 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/index/chunk_test.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/index/chunk_test.go @@ -8,7 +8,7 @@ import ( tsdb_enc "github.com/prometheus/prometheus/tsdb/encoding" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/util/encoding" + "github.com/grafana/loki/v3/pkg/util/encoding" ) // Test all sort variants diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/index/index.go b/pkg/storage/stores/shipper/indexshipper/tsdb/index/index.go index 7aa429d367718..8d6f316acfa58 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/index/index.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/index/index.go @@ -38,7 +38,7 @@ import ( tsdb_errors "github.com/prometheus/prometheus/tsdb/errors" "github.com/prometheus/prometheus/tsdb/fileutil" - "github.com/grafana/loki/pkg/util/encoding" + "github.com/grafana/loki/v3/pkg/util/encoding" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/index/index_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/index/index_test.go index 6ef9ebee01105..2f8576b825649 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/index/index_test.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/index/index_test.go @@ -35,7 +35,7 @@ import ( tsdb_enc "github.com/prometheus/prometheus/tsdb/encoding" "github.com/prometheus/prometheus/util/testutil" - "github.com/grafana/loki/pkg/util/encoding" + "github.com/grafana/loki/v3/pkg/util/encoding" ) func TestMain(m *testing.M) { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/index/shard.go b/pkg/storage/stores/shipper/indexshipper/tsdb/index/shard.go index b188ebbcb24ed..e32d39f167f9b 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/index/shard.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/index/shard.go @@ -18,7 +18,10 @@ const ( var errDisallowedIdentityShard = errors.New("shard with factor of 1 is explicitly disallowed. It's equivalent to no sharding") type FingerprintFilter interface { + // TODO(owen-d): Match() is redundant and can be inferred from GetFromThrough() + // TODO(owen-d): GetFromThrough should just return FingerprintBounds as it's a better utility struct. Match(model.Fingerprint) bool + // GetFromThrough shows the [minimum, maximum) fingerprints. If there is no maximum, math.MaxUint64 may be used GetFromThrough() (model.Fingerprint, model.Fingerprint) } diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/index_client.go b/pkg/storage/stores/shipper/indexshipper/tsdb/index_client.go index d609dc0ed27f7..47d33fe632faf 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/index_client.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/index_client.go @@ -2,22 +2,27 @@ package tsdb import ( "context" + "sort" + "sync" "time" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/logql" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" "github.com/opentracing/opentracing-go" "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + "github.com/grafana/loki/v3/pkg/util" ) // implements stores.Index @@ -33,6 +38,9 @@ type IndexClientOptions struct { // duplicates when chunks are written to multiple // index buckets, which is of use in the (index-gateway|querier) // but not worth the memory costs in the ingesters. + // NB(owen-d): This is NOT the bloom-filter feature developed late 2023 onwards, + // but a smaller bloom filter used internally for probabalistic deduping of series counts + // in the index stats() method across index buckets (which can have the same series) UseBloomFilters bool } @@ -65,6 +73,20 @@ func NewIndexClient(idx Index, opts IndexClientOptions, l Limits) *IndexClient { } } +func shardFromMatchers(matchers []*labels.Matcher) (cleaned []*labels.Matcher, res logql.Shard, found bool, err error) { + for i, matcher := range matchers { + if matcher.Name == astmapper.ShardLabel && matcher.Type == labels.MatchEqual { + shard, _, err := logql.ParseShard(matcher.Value) + if err != nil { + return nil, shard, true, err + } + return append(matchers[:i], matchers[i+1:]...), shard, true, nil + } + } + + return matchers, logql.Shard{}, false, nil +} + // TODO(owen-d): This is a hack for compatibility with how the current query-mapping works. // Historically, Loki will read the index shard factor and the query planner will inject shard // labels accordingly. @@ -74,32 +96,21 @@ func NewIndexClient(idx Index, opts IndexClientOptions, l Limits) *IndexClient { func cleanMatchers(matchers ...*labels.Matcher) ([]*labels.Matcher, index.FingerprintFilter, error) { // first use withoutNameLabel to make a copy with the name label removed matchers = withoutNameLabel(matchers) - s, shardLabelIndex, err := astmapper.ShardFromMatchers(matchers) + + matchers, shard, found, err := shardFromMatchers(matchers) if err != nil { return nil, nil, err } - var fpFilter index.FingerprintFilter - if s != nil { - matchers = append(matchers[:shardLabelIndex], matchers[shardLabelIndex+1:]...) - shard := index.ShardAnnotation{ - Shard: uint32(s.Shard), - Of: uint32(s.Of), - } - fpFilter = shard - - if err := shard.Validate(); err != nil { - return nil, nil, err - } - } - if len(matchers) == 0 { // hack to query all data matchers = append(matchers, labels.MustNewMatcher(labels.MatchEqual, "", "")) } - return matchers, fpFilter, err - + if found { + return matchers, &shard, nil + } + return matchers, nil, nil } // TODO(owen-d): synchronize logproto.ChunkRef and tsdb.ChunkRef so we don't have to convert. @@ -269,6 +280,45 @@ func (c *IndexClient) Volume(ctx context.Context, userID string, from, through m return acc.Volumes(), nil } +func (c *IndexClient) GetShards(ctx context.Context, userID string, from, through model.Time, targetBytesPerShard uint64, predicate chunk.Predicate) (*logproto.ShardsResponse, error) { + + // TODO(owen-d): perf, this is expensive :( + var mtx sync.Mutex + + m := make(map[model.Fingerprint]index.ChunkMetas, 1024) + if err := c.idx.ForSeries(ctx, userID, v1.FullBounds, from, through, func(_ labels.Labels, fp model.Fingerprint, chks []index.ChunkMeta) (stop bool) { + mtx.Lock() + m[fp] = append(m[fp], chks...) + mtx.Unlock() + return false + }, predicate.Matchers...); err != nil { + return nil, err + } + + resp := &logproto.ShardsResponse{} + + series := sharding.SizedFPs(sharding.SizedFPsPool.Get(len(m))) + defer sharding.SizedFPsPool.Put(series) + + for fp, chks := range m { + x := sharding.SizedFP{Fp: fp} + deduped := chks.Finalize() + x.Stats.Chunks = uint64(len(deduped)) + resp.Statistics.Index.TotalChunks += int64(len(deduped)) + + for _, chk := range deduped { + x.Stats.Entries += uint64(chk.Entries) + x.Stats.Bytes += uint64(chk.KB << 10) + } + + series = append(series, x) + } + sort.Sort(series) + resp.Shards = series.ShardsFor(targetBytesPerShard) + + return resp, nil +} + // SetChunkFilterer sets a chunk filter to be used when retrieving chunks. // This is only used for GetSeries implementation. // Todo we might want to pass it as a parameter to GetSeries instead. @@ -293,3 +343,7 @@ func withoutNameLabel(matchers []*labels.Matcher) []*labels.Matcher { return dst } + +func (c *IndexClient) HasForSeries(_, _ model.Time) (sharding.ForSeries, bool) { + return c.idx, true +} diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/index_client_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/index_client_test.go index 596e53e62009f..c823a61528286 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/index_client_test.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/index_client_test.go @@ -11,10 +11,10 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - shipperindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + shipperindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" ) type mockIndexShipperIndexIterator struct { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/index_shipper_querier.go b/pkg/storage/stores/shipper/indexshipper/tsdb/index_shipper_querier.go index acace60c1e4b2..b0d1824936d59 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/index_shipper_querier.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/index_shipper_querier.go @@ -10,10 +10,10 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" - shipperindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - tsdbindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" + shipperindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + tsdbindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) type indexShipperIterator interface { @@ -134,31 +134,46 @@ func (i *indexShipperQuerier) Volume(ctx context.Context, userID string, from, t return idx.Volume(ctx, userID, from, through, acc, fpFilter, shouldIncludeChunk, targetLabels, aggregateBy, matchers...) } -type resultAccumulator struct { +func (i *indexShipperQuerier) ForSeries(ctx context.Context, userID string, fpFilter tsdbindex.FingerprintFilter, from, through model.Time, fn func(labels.Labels, model.Fingerprint, []tsdbindex.ChunkMeta) (stop bool), matchers ...*labels.Matcher) error { + idx, err := i.indices(ctx, from, through, userID) + if err != nil { + return err + } + + return idx.ForSeries(ctx, userID, fpFilter, from, through, fn, matchers...) +} + +type resultAccumulator[T any] struct { mtx sync.Mutex - items []interface{} - merge func(xs []interface{}) (interface{}, error) + items []T + merge func(xs []T) (T, error) } -func newResultAccumulator(merge func(xs []interface{}) (interface{}, error)) *resultAccumulator { - return &resultAccumulator{ +// TODO(owen-d): make generic to avoid casting at runtime. +func newResultAccumulator[T any](merge func(xs []T) (T, error)) *resultAccumulator[T] { + return &resultAccumulator[T]{ merge: merge, } } -func (acc *resultAccumulator) Add(item interface{}) { +func (acc *resultAccumulator[T]) Add(item T) { acc.mtx.Lock() defer acc.mtx.Unlock() acc.items = append(acc.items, item) } -func (acc *resultAccumulator) Merge() (interface{}, error) { +func (acc *resultAccumulator[T]) Merge() (res T, err error) { acc.mtx.Lock() defer acc.mtx.Unlock() - if len(acc.items) == 0 { - return nil, ErrEmptyAccumulator + ln := len(acc.items) + if ln == 0 { + return res, ErrEmptyAccumulator + } + + if ln == 1 { + return acc.items[0], nil } return acc.merge(acc.items) diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/lazy_index.go b/pkg/storage/stores/shipper/indexshipper/tsdb/lazy_index.go index 327566f1a0ecc..ad3fb3b086200 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/lazy_index.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/lazy_index.go @@ -6,8 +6,8 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) // Index adapter for a function which returns an index when queried. @@ -80,3 +80,11 @@ func (f LazyIndex) Volume(ctx context.Context, userID string, from, through mode } return i.Volume(ctx, userID, from, through, acc, fpFilter, shouldIncludeChunk, targetLabels, aggregateBy, matchers...) } + +func (f LazyIndex) ForSeries(ctx context.Context, userID string, fpFilter index.FingerprintFilter, from model.Time, through model.Time, fn func(labels.Labels, model.Fingerprint, []index.ChunkMeta) (stop bool), matchers ...*labels.Matcher) error { + i, err := f() + if err != nil { + return err + } + return i.ForSeries(ctx, userID, fpFilter, from, through, fn, matchers...) +} diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/manager.go b/pkg/storage/stores/shipper/indexshipper/tsdb/manager.go index 78ef447169ccd..0fed45d3252ee 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/manager.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/manager.go @@ -15,10 +15,10 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // nolint:revive diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/multi_file_index.go b/pkg/storage/stores/shipper/indexshipper/tsdb/multi_file_index.go index 08bf6bf4ff01e..0c6044843026b 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/multi_file_index.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/multi_file_index.go @@ -4,14 +4,15 @@ import ( "context" "math" "runtime" + "sort" "sync" "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" "golang.org/x/sync/errgroup" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) type MultiIndex struct { @@ -132,7 +133,7 @@ func (i *MultiIndex) forMatchingIndices(ctx context.Context, from, through model } func (i *MultiIndex) GetChunkRefs(ctx context.Context, userID string, from, through model.Time, res []ChunkRef, fpFilter index.FingerprintFilter, matchers ...*labels.Matcher) ([]ChunkRef, error) { - acc := newResultAccumulator(func(xs []interface{}) (interface{}, error) { + acc := newResultAccumulator(func(xs [][]ChunkRef) ([]ChunkRef, error) { if res == nil { res = ChunkRefsPool.Get() } @@ -143,9 +144,12 @@ func (i *MultiIndex) GetChunkRefs(ctx context.Context, userID string, from, thro // TODO(owen-d): Do this more efficiently, // not all indices overlap each other + // TODO(owen-d): loser-tree or some other heap? + for _, group := range xs { - g := group.([]ChunkRef) + g := group for _, ref := range g { + _, ok := seen[ref] if ok { continue @@ -154,9 +158,10 @@ func (i *MultiIndex) GetChunkRefs(ctx context.Context, userID string, from, thro res = append(res, ref) } ChunkRefsPool.Put(g) - } + sort.Slice(res, func(i, j int) bool { return res[i].Less(res[j]) }) + return res, nil }) @@ -183,12 +188,12 @@ func (i *MultiIndex) GetChunkRefs(ctx context.Context, userID string, from, thro } return nil, err } - return merged.([]ChunkRef), nil + return merged, nil } func (i *MultiIndex) Series(ctx context.Context, userID string, from, through model.Time, res []Series, fpFilter index.FingerprintFilter, matchers ...*labels.Matcher) ([]Series, error) { - acc := newResultAccumulator(func(xs []interface{}) (interface{}, error) { + acc := newResultAccumulator(func(xs [][]Series) ([]Series, error) { if res == nil { res = SeriesPool.Get() } @@ -197,7 +202,7 @@ func (i *MultiIndex) Series(ctx context.Context, userID string, from, through mo seen := make(map[model.Fingerprint]struct{}) for _, x := range xs { - seriesSet := x.([]Series) + seriesSet := x for _, s := range seriesSet { _, ok := seen[s.Fingerprint] if ok { @@ -235,17 +240,17 @@ func (i *MultiIndex) Series(ctx context.Context, userID string, from, through mo } return nil, err } - return merged.([]Series), nil + return merged, nil } func (i *MultiIndex) LabelNames(ctx context.Context, userID string, from, through model.Time, matchers ...*labels.Matcher) ([]string, error) { - acc := newResultAccumulator(func(xs []interface{}) (interface{}, error) { + acc := newResultAccumulator(func(xs [][]string) ([]string, error) { var ( maxLn int // maximum number of lNames, assuming no duplicates lists [][]string ) for _, group := range xs { - x := group.([]string) + x := group maxLn += len(x) lists = append(lists, x) } @@ -293,17 +298,17 @@ func (i *MultiIndex) LabelNames(ctx context.Context, userID string, from, throug } return nil, err } - return merged.([]string), nil + return merged, nil } func (i *MultiIndex) LabelValues(ctx context.Context, userID string, from, through model.Time, name string, matchers ...*labels.Matcher) ([]string, error) { - acc := newResultAccumulator(func(xs []interface{}) (interface{}, error) { + acc := newResultAccumulator(func(xs [][]string) ([]string, error) { var ( maxLn int // maximum number of lValues, assuming no duplicates lists [][]string ) for _, group := range xs { - x := group.([]string) + x := group maxLn += len(x) lists = append(lists, x) } @@ -351,7 +356,7 @@ func (i *MultiIndex) LabelValues(ctx context.Context, userID string, from, throu } return nil, err } - return merged.([]string), nil + return merged, nil } func (i *MultiIndex) Stats(ctx context.Context, userID string, from, through model.Time, acc IndexStatsAccumulator, fpFilter index.FingerprintFilter, shouldIncludeChunk shouldIncludeChunk, matchers ...*labels.Matcher) error { @@ -365,3 +370,9 @@ func (i *MultiIndex) Volume(ctx context.Context, userID string, from, through mo return idx.Volume(ctx, userID, from, through, acc, fpFilter, shouldIncludeChunk, targetLabels, aggregateBy, matchers...) }) } + +func (i MultiIndex) ForSeries(ctx context.Context, userID string, fpFilter index.FingerprintFilter, from model.Time, through model.Time, fn func(labels.Labels, model.Fingerprint, []index.ChunkMeta) (stop bool), matchers ...*labels.Matcher) error { + return i.forMatchingIndices(ctx, from, through, func(ctx context.Context, idx Index) error { + return idx.ForSeries(ctx, userID, fpFilter, from, through, fn, matchers...) + }) +} diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/multi_file_index_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/multi_file_index_test.go index 945402f954f5b..8139c52b39fc0 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/multi_file_index_test.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/multi_file_index_test.go @@ -8,7 +8,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func TestMultiIndex(t *testing.T) { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/multitenant.go b/pkg/storage/stores/shipper/indexshipper/tsdb/multitenant.go index ec582b6e21489..403443a805c0d 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/multitenant.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/multitenant.go @@ -7,8 +7,8 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) // TenantLabel is part of the reserved label namespace (__ prefix) @@ -96,3 +96,7 @@ func (m *MultiTenantIndex) Stats(ctx context.Context, userID string, from, throu func (m *MultiTenantIndex) Volume(ctx context.Context, userID string, from, through model.Time, acc VolumeAccumulator, fpFilter index.FingerprintFilter, shouldIncludeChunk shouldIncludeChunk, targetLabels []string, aggregateBy string, matchers ...*labels.Matcher) error { return m.idx.Volume(ctx, userID, from, through, acc, fpFilter, shouldIncludeChunk, targetLabels, aggregateBy, withTenantLabelMatcher(userID, matchers)...) } + +func (m *MultiTenantIndex) ForSeries(ctx context.Context, userID string, fpFilter index.FingerprintFilter, from, through model.Time, fn func(labels.Labels, model.Fingerprint, []index.ChunkMeta) (stop bool), matchers ...*labels.Matcher) error { + return m.idx.ForSeries(ctx, userID, fpFilter, from, through, fn, withTenantLabelMatcher(userID, matchers)...) +} diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/pool.go b/pkg/storage/stores/shipper/indexshipper/tsdb/pool.go index f5a009d37bc12..fd1ad97587638 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/pool.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/pool.go @@ -3,7 +3,7 @@ package tsdb import ( "sync" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) var ( diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/querier.go b/pkg/storage/stores/shipper/indexshipper/tsdb/querier.go index b1e3306b14d1f..48de47a70c3b4 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/querier.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/querier.go @@ -22,7 +22,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/storage" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) // Bitmap used by func isRegexMetaCharacter to check whether a character needs to be escaped. diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/querier_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/querier_test.go index a0873faeb6a20..a3c5caf5b81c4 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/querier_test.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/querier_test.go @@ -10,8 +10,8 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func mustParseLabels(s string) labels.Labels { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/for_series.go b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/for_series.go new file mode 100644 index 0000000000000..bca81214dc9ce --- /dev/null +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/for_series.go @@ -0,0 +1,66 @@ +package sharding + +import ( + "context" + + "github.com/prometheus/common/model" + "github.com/prometheus/prometheus/model/labels" + + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" +) + +// General purpose iteration over series. Makes it easier to build custom functionality on top of indices +// of different types without them all implementing the same feature. +// The passed callback must _not_ capture its arguments. They're reused for each call for performance. +// The passed callback may be executed concurrently, +// so any shared state must be protected by the caller. +// NB: This is a low-level API and should be used with caution. +// NB: It's possible for the callback to be called multiple times for the same series but possibly different chunks, +// such as when the Index is backed by multiple files with the same series present. +// NB(owen-d): mainly in this package to avoid circular dependencies elsewhere +type ForSeries interface { + ForSeries( + ctx context.Context, + userID string, + fpFilter index.FingerprintFilter, + from model.Time, + through model.Time, + fn func( + labels.Labels, + model.Fingerprint, + []index.ChunkMeta, + ) (stop bool), + matchers ...*labels.Matcher, + ) error +} + +// function Adapter for ForSeries implementation +type ForSeriesFunc func( + ctx context.Context, + userID string, + fpFilter index.FingerprintFilter, + from model.Time, + through model.Time, + fn func( + labels.Labels, + model.Fingerprint, + []index.ChunkMeta, + ) (stop bool), + matchers ...*labels.Matcher, +) error + +func (f ForSeriesFunc) ForSeries( + ctx context.Context, + userID string, + fpFilter index.FingerprintFilter, + from model.Time, + through model.Time, + fn func( + labels.Labels, + model.Fingerprint, + []index.ChunkMeta, + ) (stop bool), + matchers ...*labels.Matcher, +) error { + return f(ctx, userID, fpFilter, from, through, fn, matchers...) +} diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/power.go b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/power.go new file mode 100644 index 0000000000000..257c198ee2d75 --- /dev/null +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/power.go @@ -0,0 +1,117 @@ +package sharding + +import ( + "math" + + "github.com/prometheus/common/model" + + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" +) + +const ( + DefaultTSDBMaxBytesPerShard = 600 << 20 // 600MB +) + +// PowerOfTwoSharding is a slimmed down legacy sharding implementation +// designed for use as a fallback when the newer impls aren't found +// (i.e. during a system upgrade to support the new impl) +type PowerOfTwoSharding struct { + MaxShards int +} + +func (p PowerOfTwoSharding) ShardsFor(bytes uint64, maxBytesPerShard uint64) []logproto.Shard { + factor := GuessShardFactor(bytes, maxBytesPerShard, p.MaxShards) + + if factor < 2 { + return []logproto.Shard{{ + Bounds: logproto.FPBounds{ + Min: 0, + Max: math.MaxUint64, + }, + Stats: &stats.Stats{ + Bytes: bytes, + }, + }} + } + + return LinearShards(factor, bytes) + +} + +// LinearShards is a sharding implementation that splits the data into +// equal sized shards covering the entire keyspace. It populates +// the `bytes` of each shard's stats with a proportional estimation +func LinearShards(n int, bytes uint64) []logproto.Shard { + if n < 2 { + return []logproto.Shard{ + { + Bounds: logproto.FPBounds{ + Min: 0, + Max: math.MaxUint64, + }, + Stats: &stats.Stats{ + Bytes: bytes, + }, + }, + } + } + + bytesPerShard := bytes / uint64(n) + fpPerShard := model.Fingerprint(math.MaxUint64) / model.Fingerprint(n) + + shards := make([]logproto.Shard, n) + for i := range shards { + shards[i] = logproto.Shard{ + Bounds: logproto.FPBounds{ + Min: model.Fingerprint(i) * fpPerShard, + Max: model.Fingerprint(i+1) * fpPerShard, + }, + Stats: &stats.Stats{ + Bytes: bytesPerShard, + }, + } + + // The last shard should have the remainder of the bytes + // and the max bound should be math.MaxUint64 + // NB(owen-d): this can only happen when maxShards is used + // and the maxShards isn't a factor of 2 + shards[len(shards)-1].Stats.Bytes += bytes % uint64(n) + shards[len(shards)-1].Bounds.Max = math.MaxUint64 + } + + return shards + +} + +// Since we shard by powers of two and we increase shard factor +// once each shard surpasses maxBytesPerShard, if the shard factor +// is at least two, the range of data per shard is (maxBytesPerShard/2, maxBytesPerShard] +// For instance, for a maxBytesPerShard of 500MB and a query touching 1000MB, we split into two shards of 500MB. +// If there are 1004MB, we split into four shards of 251MB. +func GuessShardFactor(bytes, maxBytesPerShard uint64, maxShards int) int { + // If maxBytesPerShard is 0, we use the default value + // to avoid division by zero + if maxBytesPerShard < 1 { + maxBytesPerShard = DefaultTSDBMaxBytesPerShard + } + + minShards := float64(bytes) / float64(maxBytesPerShard) + + // round up to nearest power of 2 + power := math.Ceil(math.Log2(minShards)) + + // Since x^0 == 1 and we only support factors of 2 + // reset this edge case manually + factor := int(math.Pow(2, power)) + if maxShards > 0 { + factor = min(factor, maxShards) + } + + // shortcut: no need to run any sharding logic when factor=1 + // as it's the same as no sharding + if factor == 1 { + factor = 0 + } + return factor +} diff --git a/pkg/querier/queryrange/shard_resolver_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/power_test.go similarity index 55% rename from pkg/querier/queryrange/shard_resolver_test.go rename to pkg/storage/stores/shipper/indexshipper/tsdb/sharding/power_test.go index 8e963b065955f..940d6a53b554d 100644 --- a/pkg/querier/queryrange/shard_resolver_test.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/power_test.go @@ -1,4 +1,4 @@ -package queryrange +package sharding import ( "fmt" @@ -6,8 +6,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" ) func TestGuessShardFactor(t *testing.T) { @@ -23,52 +22,52 @@ func TestGuessShardFactor(t *testing.T) { { exp: 4, stats: stats.Stats{ - Bytes: validation.DefaultTSDBMaxBytesPerShard * 4, + Bytes: DefaultTSDBMaxBytesPerShard * 4, }, }, { // round up shard factor exp: 16, stats: stats.Stats{ - Bytes: validation.DefaultTSDBMaxBytesPerShard * 15, + Bytes: DefaultTSDBMaxBytesPerShard * 15, }, }, { exp: 2, stats: stats.Stats{ - Bytes: validation.DefaultTSDBMaxBytesPerShard + 1, + Bytes: DefaultTSDBMaxBytesPerShard + 1, }, }, { exp: 0, stats: stats.Stats{ - Bytes: validation.DefaultTSDBMaxBytesPerShard, + Bytes: DefaultTSDBMaxBytesPerShard, }, }, { maxShards: 8, exp: 4, stats: stats.Stats{ - Bytes: validation.DefaultTSDBMaxBytesPerShard * 4, + Bytes: DefaultTSDBMaxBytesPerShard * 4, }, }, { maxShards: 2, exp: 2, stats: stats.Stats{ - Bytes: validation.DefaultTSDBMaxBytesPerShard * 4, + Bytes: DefaultTSDBMaxBytesPerShard * 4, }, }, { maxShards: 1, exp: 0, stats: stats.Stats{ - Bytes: validation.DefaultTSDBMaxBytesPerShard * 4, + Bytes: DefaultTSDBMaxBytesPerShard * 4, }, }, } { t.Run(fmt.Sprintf("%+v", tc.stats), func(t *testing.T) { - require.Equal(t, tc.exp, guessShardFactor(tc.stats, validation.DefaultTSDBMaxBytesPerShard, tc.maxShards)) + require.Equal(t, tc.exp, GuessShardFactor(tc.stats.Bytes, uint64(DefaultTSDBMaxBytesPerShard), tc.maxShards)) }) } } diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/sharding.go b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/sharding.go new file mode 100644 index 0000000000000..b7f0e8cd46296 --- /dev/null +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/sharding.go @@ -0,0 +1,102 @@ +package sharding + +import ( + "math" + + "github.com/prometheus/common/model" + + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/queue" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" +) + +var ( + SizedFPsPool = queue.NewSlicePool[SizedFP](1<<8, 1<<16, 4) // 256->65536 +) + +type SizedFP struct { + Fp model.Fingerprint + Stats stats.Stats +} + +type SizedFPs []SizedFP + +func (xs SizedFPs) Len() int { + return len(xs) +} + +func (xs SizedFPs) Less(i, j int) bool { + return xs[i].Fp < xs[j].Fp +} + +func (xs SizedFPs) Swap(i, j int) { + xs[i], xs[j] = xs[j], xs[i] +} + +func (xs SizedFPs) newShard(minFP model.Fingerprint) logproto.Shard { + return logproto.Shard{ + Bounds: logproto.FPBounds{ + Min: minFP, + }, + Stats: &stats.Stats{}, + } +} + +func (xs SizedFPs) ShardsFor(targetShardBytes uint64) (res []logproto.Shard) { + if len(xs) == 0 { + full := xs.newShard(0) + full.Bounds.Max = model.Fingerprint(math.MaxUint64) + return []logproto.Shard{full} + } + + var ( + cur = xs.newShard(0) + ) + + for _, x := range xs { + + // easy path, there's space -- continue + if cur.SpaceFor(&x.Stats, targetShardBytes) { + cur.Stats.Streams++ + cur.Stats.Chunks += x.Stats.Chunks + cur.Stats.Entries += x.Stats.Entries + cur.Stats.Bytes += x.Stats.Bytes + + cur.Bounds.Max = x.Fp + continue + } + + // we've hit a stream larger than the target; + // create a shard with 1 stream + if cur.Stats.Streams == 0 { + cur.Stats = &stats.Stats{ + Streams: 1, + Chunks: x.Stats.Chunks, + Bytes: x.Stats.Bytes, + Entries: x.Stats.Entries, + } + cur.Bounds.Max = x.Fp + res = append(res, cur) + cur = xs.newShard(x.Fp + 1) + continue + } + + // Otherwise we've hit a stream that's too large but the current shard isn't empty; create a new shard + cur.Bounds.Max = x.Fp - 1 + res = append(res, cur) + cur = xs.newShard(x.Fp) + cur.Stats = &stats.Stats{ + Streams: 1, + Chunks: x.Stats.Chunks, + Bytes: x.Stats.Bytes, + Entries: x.Stats.Entries, + } + } + + if cur.Stats.Streams > 0 { + res = append(res, cur) + } + + res[len(res)-1].Bounds.Max = model.Fingerprint(math.MaxUint64) + return res +} diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/sharding_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/sharding_test.go new file mode 100644 index 0000000000000..fc476223848ae --- /dev/null +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/sharding_test.go @@ -0,0 +1,153 @@ +package sharding + +import ( + "math" + "sort" + "testing" + + "github.com/prometheus/common/model" + "github.com/stretchr/testify/require" + + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" +) + +func TestSizedFPs_Sort(t *testing.T) { + xs := SizedFPs{ + {Fp: 3}, + {Fp: 1}, + {Fp: 6}, + {Fp: 10}, + {Fp: 2}, + {Fp: 0}, + {Fp: 4}, + {Fp: 5}, + {Fp: 7}, + {Fp: 9}, + {Fp: 8}, + } + + sort.Sort(xs) + exp := []model.Fingerprint{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10} + + for i, x := range xs { + require.Equal(t, exp[i], x.Fp) + } +} + +func TestSizedFPs_ShardsFor(t *testing.T) { + mkShard := func(min, max model.Fingerprint, streams, chks, entries, bytes uint64) logproto.Shard { + return logproto.Shard{ + Bounds: logproto.FPBounds{ + Min: min, + Max: max, + }, + Stats: &stats.Stats{ + Streams: streams, + Chunks: chks, + Entries: entries, + Bytes: bytes, + }, + } + } + + mkFP := func(fp model.Fingerprint, chks, entries, bytes uint64) SizedFP { + return SizedFP{ + Fp: fp, + Stats: stats.Stats{ + Chunks: chks, + Entries: entries, + Bytes: bytes, + }, + } + } + + for _, tc := range []struct { + desc string + xs SizedFPs + exp []logproto.Shard + targetShardBytes uint64 + }{ + { + desc: "empty", + targetShardBytes: 100, + xs: SizedFPs{}, + exp: []logproto.Shard{ + mkShard(0, math.MaxUint64, 0, 0, 0, 0), + }, + }, + { + desc: "single stream", + targetShardBytes: 100, + xs: SizedFPs{ + mkFP(1, 1, 1, 1), + }, + exp: []logproto.Shard{ + mkShard(0, math.MaxUint64, 1, 1, 1, 1), + }, + }, + { + desc: "single stream too large", + targetShardBytes: 100, + xs: SizedFPs{ + mkFP(1, 1, 1, 201), + }, + exp: []logproto.Shard{ + mkShard(0, math.MaxUint64, 1, 1, 1, 201), + }, + }, + { + desc: "4 streams 2 shards", + targetShardBytes: 100, + xs: SizedFPs{ + // each has 45 bytes; can only fit 2 in a shard + mkFP(1, 1, 1, 45), + mkFP(2, 1, 1, 45), + mkFP(3, 1, 1, 45), + mkFP(4, 1, 1, 45), + }, + exp: []logproto.Shard{ + mkShard(0, 2, 2, 2, 2, 90), + mkShard(3, math.MaxUint64, 2, 2, 2, 90), + }, + }, + { + desc: "5 streams 3 shards (one leftover)", + targetShardBytes: 100, + xs: SizedFPs{ + // each has 45 bytes; can only fit 2 in a shard + mkFP(1, 1, 1, 45), + mkFP(2, 1, 1, 45), + mkFP(3, 1, 1, 45), + mkFP(4, 1, 1, 45), + mkFP(5, 1, 1, 45), + }, + exp: []logproto.Shard{ + mkShard(0, 2, 2, 2, 2, 90), + mkShard(3, 4, 2, 2, 2, 90), + mkShard(5, math.MaxUint64, 1, 1, 1, 45), + }, + }, + { + desc: "allowed overflow", + targetShardBytes: 100, + xs: SizedFPs{ + // each has 40 bytes; can fit 3 in a shard + // since overflow == underflow + mkFP(1, 1, 1, 40), + mkFP(2, 1, 1, 40), + mkFP(3, 1, 1, 40), + mkFP(4, 1, 1, 40), + mkFP(5, 1, 1, 40), + }, + exp: []logproto.Shard{ + mkShard(0, 3, 3, 3, 3, 120), + mkShard(4, math.MaxUint64, 2, 2, 2, 80), + }, + }, + } { + t.Run(tc.desc, func(t *testing.T) { + require.Equal(t, tc.exp, tc.xs.ShardsFor(tc.targetShardBytes)) + }) + } +} diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/single_file_index.go b/pkg/storage/stores/shipper/indexshipper/tsdb/single_file_index.go index 63809f6b1356e..7934b952ba88f 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/single_file_index.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/single_file_index.go @@ -15,12 +15,12 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - shipperindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + shipperindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var ErrAlreadyOnDesiredVersion = errors.New("tsdb file already on desired version") @@ -55,8 +55,9 @@ func RebuildWithVersion(ctx context.Context, path string, desiredVer int) (shipp } builder := NewBuilder(desiredVer) - err = indexFile.(*TSDBFile).Index.(*TSDBIndex).ForSeries(ctx, nil, 0, math.MaxInt64, func(lbls labels.Labels, fp model.Fingerprint, chks []index.ChunkMeta) { + err = indexFile.(*TSDBFile).Index.(*TSDBIndex).ForSeries(ctx, "", nil, 0, math.MaxInt64, func(lbls labels.Labels, fp model.Fingerprint, chks []index.ChunkMeta) (stop bool) { builder.AddSeries(lbls.Copy(), fp, chks) + return false }, labels.MustNewMatcher(labels.MatchEqual, "", "")) if err != nil { return nil, err @@ -157,9 +158,10 @@ func (i *TSDBIndex) SetChunkFilterer(chunkFilter chunk.RequestChunkFilterer) { // fn must NOT capture it's arguments. They're reused across series iterations and returned to // a pool after completion. -// TODO(owen-d): have callback return a bool whether to continue or not in order to short-circuit -// when applicable -func (i *TSDBIndex) ForSeries(ctx context.Context, fpFilter index.FingerprintFilter, from model.Time, through model.Time, fn func(labels.Labels, model.Fingerprint, []index.ChunkMeta), matchers ...*labels.Matcher) error { +// Iteration will stop if the callback returns true. +// Accepts a userID argument in order to implement `Index` interface, but since this is a single tenant index, +// it is ignored (it's enforced elsewhere in index selection) +func (i *TSDBIndex) ForSeries(ctx context.Context, _ string, fpFilter index.FingerprintFilter, from model.Time, through model.Time, fn func(labels.Labels, model.Fingerprint, []index.ChunkMeta) (stop bool), matchers ...*labels.Matcher) error { // TODO(owen-d): use pool var ls labels.Labels @@ -187,7 +189,9 @@ func (i *TSDBIndex) ForSeries(ctx context.Context, fpFilter index.FingerprintFil continue } - fn(ls, model.Fingerprint(hash), chks) + if stop := fn(ls, model.Fingerprint(hash), chks); stop { + break + } } return p.Err() }) @@ -214,7 +218,7 @@ func (i *TSDBIndex) GetChunkRefs(ctx context.Context, userID string, from, throu } res = res[:0] - if err := i.ForSeries(ctx, fpFilter, from, through, func(ls labels.Labels, fp model.Fingerprint, chks []index.ChunkMeta) { + if err := i.ForSeries(ctx, "", fpFilter, from, through, func(ls labels.Labels, fp model.Fingerprint, chks []index.ChunkMeta) (stop bool) { for _, chk := range chks { res = append(res, ChunkRef{ @@ -225,6 +229,7 @@ func (i *TSDBIndex) GetChunkRefs(ctx context.Context, userID string, from, throu Checksum: chk.Checksum, }) } + return false }, matchers...); err != nil { return nil, err } @@ -238,7 +243,7 @@ func (i *TSDBIndex) Series(ctx context.Context, _ string, from, through model.Ti } res = res[:0] - if err := i.ForSeries(ctx, fpFilter, from, through, func(ls labels.Labels, fp model.Fingerprint, chks []index.ChunkMeta) { + if err := i.ForSeries(ctx, "", fpFilter, from, through, func(ls labels.Labels, fp model.Fingerprint, chks []index.ChunkMeta) (stop bool) { if len(chks) == 0 { return } @@ -246,6 +251,7 @@ func (i *TSDBIndex) Series(ctx context.Context, _ string, from, through model.Ti Labels: ls.Copy(), Fingerprint: fp, }) + return false }, matchers...); err != nil { return nil, err } diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/single_file_index_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/single_file_index_test.go index 9d7b80ce161f6..068630c553a04 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/single_file_index_test.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/single_file_index_test.go @@ -12,11 +12,11 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func TestSingleIdx(t *testing.T) { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/store.go b/pkg/storage/stores/shipper/indexshipper/tsdb/store.go index 8f97997c5d401..1ef58c32a1e56 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/store.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/store.go @@ -13,14 +13,14 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/downloads" - tsdbindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/downloads" + tsdbindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) type IndexWriter interface { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/testutil/objstore.go b/pkg/storage/stores/shipper/indexshipper/tsdb/testutil/objstore.go index a5a3651a7a138..2a3a6ca57eeca 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/testutil/objstore.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/testutil/objstore.go @@ -6,7 +6,7 @@ import ( "github.com/stretchr/testify/require" "github.com/thanos-io/objstore" - "github.com/grafana/loki/pkg/storage/bucket/filesystem" + "github.com/grafana/loki/v3/pkg/storage/bucket/filesystem" ) func PrepareFilesystemBucket(t testing.TB) (objstore.Bucket, string) { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/util_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/util_test.go index 64827a926e466..10957a3510b84 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/util_test.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/util_test.go @@ -9,7 +9,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) type LoadableSeries struct { diff --git a/pkg/storage/stores/shipper/indexshipper/uploads/index_set.go b/pkg/storage/stores/shipper/indexshipper/uploads/index_set.go index 7cbe4b21b4d86..19bf88842b020 100644 --- a/pkg/storage/stores/shipper/indexshipper/uploads/index_set.go +++ b/pkg/storage/stores/shipper/indexshipper/uploads/index_set.go @@ -11,10 +11,10 @@ import ( "github.com/go-kit/log" "github.com/go-kit/log/level" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type IndexSet interface { diff --git a/pkg/storage/stores/shipper/indexshipper/uploads/index_set_test.go b/pkg/storage/stores/shipper/indexshipper/uploads/index_set_test.go index d6d772734195c..cc9b65e28588b 100644 --- a/pkg/storage/stores/shipper/indexshipper/uploads/index_set_test.go +++ b/pkg/storage/stores/shipper/indexshipper/uploads/index_set_test.go @@ -10,10 +10,10 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/testutil" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/testutil" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const userID = "user-id" diff --git a/pkg/storage/stores/shipper/indexshipper/uploads/table.go b/pkg/storage/stores/shipper/indexshipper/uploads/table.go index 2e092ce3727a2..44698834a344a 100644 --- a/pkg/storage/stores/shipper/indexshipper/uploads/table.go +++ b/pkg/storage/stores/shipper/indexshipper/uploads/table.go @@ -7,9 +7,9 @@ import ( "github.com/go-kit/log" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/uploads/table_manager.go b/pkg/storage/stores/shipper/indexshipper/uploads/table_manager.go index 6d9aa9e35d487..9fcdf43206c7e 100644 --- a/pkg/storage/stores/shipper/indexshipper/uploads/table_manager.go +++ b/pkg/storage/stores/shipper/indexshipper/uploads/table_manager.go @@ -9,8 +9,8 @@ import ( "github.com/go-kit/log/level" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" ) type Config struct { diff --git a/pkg/storage/stores/shipper/indexshipper/uploads/table_manager_test.go b/pkg/storage/stores/shipper/indexshipper/uploads/table_manager_test.go index 7013ff389c782..b307ee18d6b9a 100644 --- a/pkg/storage/stores/shipper/indexshipper/uploads/table_manager_test.go +++ b/pkg/storage/stores/shipper/indexshipper/uploads/table_manager_test.go @@ -10,9 +10,9 @@ import ( "github.com/go-kit/log" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" ) const objectsStorageDirName = "objects" diff --git a/pkg/storage/stores/shipper/indexshipper/uploads/table_test.go b/pkg/storage/stores/shipper/indexshipper/uploads/table_test.go index f67b3d3571c28..b58b05fa6ad9f 100644 --- a/pkg/storage/stores/shipper/indexshipper/uploads/table_test.go +++ b/pkg/storage/stores/shipper/indexshipper/uploads/table_test.go @@ -8,7 +8,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/util/queries.go b/pkg/storage/stores/shipper/indexshipper/util/queries.go index 8da02c02d9794..46291a58a6f5b 100644 --- a/pkg/storage/stores/shipper/indexshipper/util/queries.go +++ b/pkg/storage/stores/shipper/indexshipper/util/queries.go @@ -6,8 +6,8 @@ import ( "github.com/grafana/dskit/concurrency" - "github.com/grafana/loki/pkg/storage/stores/series/index" - util_math "github.com/grafana/loki/pkg/util/math" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + util_math "github.com/grafana/loki/v3/pkg/util/math" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/util/queries_test.go b/pkg/storage/stores/shipper/indexshipper/util/queries_test.go index 1968a2737a758..a33da42c264f0 100644 --- a/pkg/storage/stores/shipper/indexshipper/util/queries_test.go +++ b/pkg/storage/stores/shipper/indexshipper/util/queries_test.go @@ -8,7 +8,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) type mockTableQuerier struct { diff --git a/pkg/storage/stores/shipper/indexshipper/util/util.go b/pkg/storage/stores/shipper/indexshipper/util/util.go index 9150ab34a0cbc..f47cea40d6d7d 100644 --- a/pkg/storage/stores/shipper/indexshipper/util/util.go +++ b/pkg/storage/stores/shipper/indexshipper/util/util.go @@ -9,7 +9,7 @@ import ( "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" ) const maxStackSize = 8 * 1024 diff --git a/pkg/storage/util_test.go b/pkg/storage/util_test.go index 759b2e336aa29..7743bce2fb0f9 100644 --- a/pkg/storage/util_test.go +++ b/pkg/storage/util_test.go @@ -11,23 +11,24 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/assert" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/cache" - chunkclient "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores" - index_stats "github.com/grafana/loki/pkg/storage/stores/index/stats" - loki_util "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + chunkclient "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores" + index_stats "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + loki_util "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var ( @@ -270,6 +271,14 @@ func (m *mockChunkStore) Stats(_ context.Context, _ string, _, _ model.Time, _ . return nil, nil } +func (m *mockChunkStore) GetShards(_ context.Context, _ string, _, _ model.Time, _ uint64, _ chunk.Predicate) (*logproto.ShardsResponse, error) { + return nil, nil +} + +func (m *mockChunkStore) HasForSeries(_, _ model.Time) (sharding.ForSeries, bool) { + return nil, false +} + func (m *mockChunkStore) Volume(_ context.Context, _ string, _, _ model.Time, _ int32, _ []string, _ string, _ ...*labels.Matcher) (*logproto.VolumeResponse, error) { return nil, nil } diff --git a/pkg/util/config.go b/pkg/util/config.go index f54d469690c98..89d586b37e13e 100644 --- a/pkg/util/config.go +++ b/pkg/util/config.go @@ -10,7 +10,7 @@ import ( "github.com/prometheus/common/version" "gopkg.in/yaml.v2" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // LogConfig takes a pointer to a config object, marshalls it to YAML and prints each line in REVERSE order diff --git a/pkg/util/deletion/deletion.go b/pkg/util/deletion/deletion.go index e90b6a4c2f073..fd97205a6bf45 100644 --- a/pkg/util/deletion/deletion.go +++ b/pkg/util/deletion/deletion.go @@ -1,10 +1,10 @@ package deletion import ( - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/syntax" ) func SetupPipeline(req logql.SelectLogParams, p log.Pipeline) (log.Pipeline, error) { diff --git a/pkg/util/dns_watcher.go b/pkg/util/dns_watcher.go index 147af45e19e76..9fa698e872c06 100644 --- a/pkg/util/dns_watcher.go +++ b/pkg/util/dns_watcher.go @@ -9,7 +9,7 @@ import ( "github.com/grafana/dskit/services" "github.com/pkg/errors" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // Notifications about address resolution. All notifications are sent on the same goroutine. diff --git a/pkg/util/errors.go b/pkg/util/errors.go index cb994e1b46ffe..f05decc6223cd 100644 --- a/pkg/util/errors.go +++ b/pkg/util/errors.go @@ -10,7 +10,7 @@ import ( "google.golang.org/grpc/codes" "google.golang.org/grpc/status" - "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/log" ) // LogError logs any error returned by f; useful when deferring Close etc. diff --git a/pkg/util/extract/extract.go b/pkg/util/extract/extract.go index ad0eab10dcfb4..580eba0e2d927 100644 --- a/pkg/util/extract/extract.go +++ b/pkg/util/extract/extract.go @@ -6,7 +6,7 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) var errNoMetricNameLabel = fmt.Errorf("No metric name label") diff --git a/pkg/util/flagext/labelset.go b/pkg/util/flagext/labelset.go index 859e7eb07246e..79a72c07739e0 100644 --- a/pkg/util/flagext/labelset.go +++ b/pkg/util/flagext/labelset.go @@ -9,7 +9,7 @@ import ( "github.com/prometheus/common/model" "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) // LabelSet is a labelSet that can be used as a flag. diff --git a/pkg/util/http_test.go b/pkg/util/http_test.go index ba365f777de8f..d032085db5028 100644 --- a/pkg/util/http_test.go +++ b/pkg/util/http_test.go @@ -15,9 +15,9 @@ import ( "github.com/stretchr/testify/require" "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func TestRenderHTTPResponse(t *testing.T) { diff --git a/pkg/util/httpgrpc/carrier.go b/pkg/util/httpgrpc/carrier.go index ab1753ef6c271..6b160d615e1bc 100644 --- a/pkg/util/httpgrpc/carrier.go +++ b/pkg/util/httpgrpc/carrier.go @@ -4,7 +4,7 @@ import ( weaveworks_httpgrpc "github.com/grafana/dskit/httpgrpc" "github.com/opentracing/opentracing-go" - "github.com/grafana/loki/pkg/querier/queryrange" + "github.com/grafana/loki/v3/pkg/querier/queryrange" ) type Request interface { diff --git a/pkg/util/limiter/combined_limits.go b/pkg/util/limiter/combined_limits.go index ba0bf50e8003e..b1bc467e6cac4 100644 --- a/pkg/util/limiter/combined_limits.go +++ b/pkg/util/limiter/combined_limits.go @@ -1,17 +1,17 @@ package limiter import ( - "github.com/grafana/loki/pkg/bloomcompactor" - "github.com/grafana/loki/pkg/bloomgateway" - "github.com/grafana/loki/pkg/compactor" - "github.com/grafana/loki/pkg/distributor" - "github.com/grafana/loki/pkg/ingester" - querier_limits "github.com/grafana/loki/pkg/querier/limits" - queryrange_limits "github.com/grafana/loki/pkg/querier/queryrange/limits" - "github.com/grafana/loki/pkg/ruler" - scheduler_limits "github.com/grafana/loki/pkg/scheduler/limits" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/indexgateway" + "github.com/grafana/loki/v3/pkg/bloomcompactor" + "github.com/grafana/loki/v3/pkg/bloomgateway" + "github.com/grafana/loki/v3/pkg/compactor" + "github.com/grafana/loki/v3/pkg/distributor" + "github.com/grafana/loki/v3/pkg/ingester" + querier_limits "github.com/grafana/loki/v3/pkg/querier/limits" + queryrange_limits "github.com/grafana/loki/v3/pkg/querier/queryrange/limits" + "github.com/grafana/loki/v3/pkg/ruler" + scheduler_limits "github.com/grafana/loki/v3/pkg/scheduler/limits" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/indexgateway" ) type CombinedLimits interface { diff --git a/pkg/util/limiter/query_limiter.go b/pkg/util/limiter/query_limiter.go index a827ad82227a7..430eee3ebc8be 100644 --- a/pkg/util/limiter/query_limiter.go +++ b/pkg/util/limiter/query_limiter.go @@ -8,8 +8,8 @@ import ( "github.com/prometheus/common/model" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/logproto" ) type queryLimiterCtxKey struct{} diff --git a/pkg/util/log/experimental.go b/pkg/util/log/experimental.go index ed26c06af347d..30aa39c33f3a2 100644 --- a/pkg/util/log/experimental.go +++ b/pkg/util/log/experimental.go @@ -6,7 +6,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) var experimentalFeaturesInUse = promauto.NewCounter( diff --git a/pkg/util/log/log.go b/pkg/util/log/log.go index 7453b615118a0..93ccb86abf57a 100644 --- a/pkg/util/log/log.go +++ b/pkg/util/log/log.go @@ -16,7 +16,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) var ( diff --git a/pkg/util/loser/tree_test.go b/pkg/util/loser/tree_test.go index 956b420f129e6..9c6f5f1c2e99a 100644 --- a/pkg/util/loser/tree_test.go +++ b/pkg/util/loser/tree_test.go @@ -4,7 +4,7 @@ import ( "math" "testing" - "github.com/grafana/loki/pkg/util/loser" + "github.com/grafana/loki/v3/pkg/util/loser" ) type List struct { diff --git a/pkg/util/marshal/labels.go b/pkg/util/marshal/labels.go index 8998f133b921a..016e4bad44bd5 100644 --- a/pkg/util/marshal/labels.go +++ b/pkg/util/marshal/labels.go @@ -3,7 +3,7 @@ package marshal import ( "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/loghttp" ) // NewLabelSet constructs a Labelset from a promql metric list as a string diff --git a/pkg/util/marshal/labels_test.go b/pkg/util/marshal/labels_test.go index 8095ec3a1a908..b877e3ba675c0 100644 --- a/pkg/util/marshal/labels_test.go +++ b/pkg/util/marshal/labels_test.go @@ -4,7 +4,7 @@ import ( "reflect" "testing" - "github.com/grafana/loki/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/loghttp" ) func TestNewLabelSet(t *testing.T) { diff --git a/pkg/util/marshal/legacy/marshal.go b/pkg/util/marshal/legacy/marshal.go index 867268f8e59ee..82dd100999bb8 100644 --- a/pkg/util/marshal/legacy/marshal.go +++ b/pkg/util/marshal/legacy/marshal.go @@ -9,9 +9,9 @@ import ( "github.com/gorilla/websocket" json "github.com/json-iterator/go" - loghttp "github.com/grafana/loki/pkg/loghttp/legacy" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel" + loghttp "github.com/grafana/loki/v3/pkg/loghttp/legacy" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) // Note that the below methods directly marshal the values passed in. This is because these objects currently marshal diff --git a/pkg/util/marshal/legacy/marshal_test.go b/pkg/util/marshal/legacy/marshal_test.go index 88375ad842ab1..b535a390479f9 100644 --- a/pkg/util/marshal/legacy/marshal_test.go +++ b/pkg/util/marshal/legacy/marshal_test.go @@ -9,9 +9,9 @@ import ( json "github.com/json-iterator/go" "github.com/stretchr/testify/require" - loghttp "github.com/grafana/loki/pkg/loghttp/legacy" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel" + loghttp "github.com/grafana/loki/v3/pkg/loghttp/legacy" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) // covers responses from /api/prom/query @@ -56,6 +56,10 @@ var queryTests = []struct { } ], "stats" : { + "index": { + "postFilterChunks": 0, + "totalChunks": 0 + }, "ingester" : { "store": { "chunksDownloadTime": 0, @@ -64,6 +68,7 @@ var queryTests = []struct { "totalChunksDownloaded": 0, "chunkRefsFetchTime": 0, "queryReferencedStructuredMetadata": false, + "pipelineWrapperFilteredLines": 0, "chunk" :{ "compressedBytes": 0, "decompressedBytes": 0, @@ -89,6 +94,7 @@ var queryTests = []struct { "totalChunksDownloaded": 0, "chunkRefsFetchTime": 0, "queryReferencedStructuredMetadata": false, + "pipelineWrapperFilteredLines": 0, "chunk" :{ "compressedBytes": 0, "decompressedBytes": 0, diff --git a/pkg/util/marshal/marshal.go b/pkg/util/marshal/marshal.go index bb961039cdd35..b720bfa557bd1 100644 --- a/pkg/util/marshal/marshal.go +++ b/pkg/util/marshal/marshal.go @@ -11,14 +11,14 @@ import ( jsoniter "github.com/json-iterator/go" "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/loghttp" - legacy "github.com/grafana/loki/pkg/loghttp/legacy" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - indexStats "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/util/httpreq" - marshal_legacy "github.com/grafana/loki/pkg/util/marshal/legacy" + "github.com/grafana/loki/v3/pkg/loghttp" + legacy "github.com/grafana/loki/v3/pkg/loghttp/legacy" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + indexStats "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/util/httpreq" + marshal_legacy "github.com/grafana/loki/v3/pkg/util/marshal/legacy" ) func WriteResponseJSON(r *http.Request, v any, w http.ResponseWriter) error { @@ -155,6 +155,16 @@ func WriteIndexStatsResponseJSON(r *indexStats.Stats, w io.Writer) error { return s.Flush() } +// WriteIndexShardsResponseJSON marshals a indexgatewaypb.ShardsResponse to JSON and then +// writes it to the provided io.Writer. +func WriteIndexShardsResponseJSON(r *logproto.ShardsResponse, w io.Writer) error { + s := jsoniter.ConfigFastest.BorrowStream(w) + defer jsoniter.ConfigFastest.ReturnStream(s) + s.WriteVal(r) + s.WriteRaw("\n") + return s.Flush() +} + // WriteVolumeResponseJSON marshals a logproto.VolumeResponse to JSON and then // writes it to the provided io.Writer. func WriteVolumeResponseJSON(r *logproto.VolumeResponse, w io.Writer) error { @@ -164,3 +174,23 @@ func WriteVolumeResponseJSON(r *logproto.VolumeResponse, w io.Writer) error { s.WriteRaw("\n") return s.Flush() } + +// WriteDetectedFieldsResponseJSON marshals a logproto.DetectedFieldsResponse to JSON and then +// writes it to the provided io.Writer. +func WriteDetectedFieldsResponseJSON(r *logproto.DetectedFieldsResponse, w io.Writer) error { + s := jsoniter.ConfigFastest.BorrowStream(w) + defer jsoniter.ConfigFastest.ReturnStream(s) + s.WriteVal(r) + s.WriteRaw("\n") + return s.Flush() +} + +// WriteDetectedLabelsResponseJSON marshals a logproto.DetectedLabelsResponse to JSON and then +// writes it to the provided io.Writer. +func WriteDetectedLabelsResponseJSON(r *logproto.DetectedLabelsResponse, w io.Writer) error { + s := jsoniter.ConfigFastest.BorrowStream(w) + defer jsoniter.ConfigFastest.ReturnStream(s) + s.WriteVal(r) + s.WriteRaw("\n") + return s.Flush() +} diff --git a/pkg/util/marshal/marshal_test.go b/pkg/util/marshal/marshal_test.go index ca932064ca6c3..ba2acf5ee85ef 100644 --- a/pkg/util/marshal/marshal_test.go +++ b/pkg/util/marshal/marshal_test.go @@ -15,15 +15,19 @@ import ( "github.com/prometheus/prometheus/promql/parser" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/loghttp" - legacy "github.com/grafana/loki/pkg/loghttp/legacy" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/loghttp" + legacy "github.com/grafana/loki/v3/pkg/loghttp/legacy" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/util/httpreq" ) const emptyStats = `{ + "index": { + "postFilterChunks": 0, + "totalChunks": 0 + }, "ingester" : { "store": { "chunksDownloadTime": 0, @@ -32,6 +36,7 @@ const emptyStats = `{ "totalChunksDownloaded": 0, "chunkRefsFetchTime": 0, "queryReferencedStructuredMetadata": false, + "pipelineWrapperFilteredLines": 0, "chunk" :{ "compressedBytes": 0, "decompressedBytes": 0, @@ -57,6 +62,7 @@ const emptyStats = `{ "totalChunksDownloaded": 0, "chunkRefsFetchTime": 0, "queryReferencedStructuredMetadata": false, + "pipelineWrapperFilteredLines": 0, "chunk" :{ "compressedBytes": 0, "decompressedBytes": 0, diff --git a/pkg/util/marshal/query.go b/pkg/util/marshal/query.go index 8f41915c720a8..4591d851c553e 100644 --- a/pkg/util/marshal/query.go +++ b/pkg/util/marshal/query.go @@ -12,12 +12,12 @@ import ( "github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/loghttp" - legacy "github.com/grafana/loki/pkg/loghttp/legacy" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/loghttp" + legacy "github.com/grafana/loki/v3/pkg/loghttp/legacy" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/util/httpreq" ) // NewResultValue constructs a ResultValue from a promql.Value diff --git a/pkg/util/marshal/tail.go b/pkg/util/marshal/tail.go index 222b76c046b7d..4dbada1d83816 100644 --- a/pkg/util/marshal/tail.go +++ b/pkg/util/marshal/tail.go @@ -1,8 +1,8 @@ package marshal import ( - "github.com/grafana/loki/pkg/loghttp" - legacy "github.com/grafana/loki/pkg/loghttp/legacy" + "github.com/grafana/loki/v3/pkg/loghttp" + legacy "github.com/grafana/loki/v3/pkg/loghttp/legacy" ) // NewDroppedStream constructs a DroppedStream from a legacy.DroppedEntry diff --git a/pkg/util/metrics_helper.go b/pkg/util/metrics_helper.go index 58733c1c1e9ea..e4572b4e4a15c 100644 --- a/pkg/util/metrics_helper.go +++ b/pkg/util/metrics_helper.go @@ -13,7 +13,7 @@ import ( "github.com/prometheus/prometheus/model/labels" tsdb_errors "github.com/prometheus/prometheus/tsdb/errors" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var ( diff --git a/pkg/util/querylimits/limiter.go b/pkg/util/querylimits/limiter.go index 051e31270f137..9510a3fdc8250 100644 --- a/pkg/util/querylimits/limiter.go +++ b/pkg/util/querylimits/limiter.go @@ -8,8 +8,8 @@ import ( "github.com/go-kit/log" "github.com/go-kit/log/level" - "github.com/grafana/loki/pkg/util/limiter" - logutil "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/limiter" + logutil "github.com/grafana/loki/v3/pkg/util/log" ) type Limiter struct { diff --git a/pkg/util/querylimits/limiter_test.go b/pkg/util/querylimits/limiter_test.go index ad80fa34ec186..549972d32a2e9 100644 --- a/pkg/util/querylimits/limiter_test.go +++ b/pkg/util/querylimits/limiter_test.go @@ -9,7 +9,7 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/validation" ) type mockTenantLimits struct { diff --git a/pkg/util/querylimits/middleware.go b/pkg/util/querylimits/middleware.go index 58a93ad850f6b..a25d53949ba1d 100644 --- a/pkg/util/querylimits/middleware.go +++ b/pkg/util/querylimits/middleware.go @@ -7,7 +7,7 @@ import ( "github.com/go-kit/log/level" "github.com/grafana/dskit/middleware" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type queryLimitsMiddleware struct { diff --git a/pkg/util/querylimits/propagation.go b/pkg/util/querylimits/propagation.go index f0e5fbc8f6b49..a9cb06e347ca6 100644 --- a/pkg/util/querylimits/propagation.go +++ b/pkg/util/querylimits/propagation.go @@ -7,7 +7,7 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/util/flagext" + "github.com/grafana/loki/v3/pkg/util/flagext" ) // Context key type used to avoid collisions diff --git a/pkg/util/ring/ring.go b/pkg/util/ring/ring.go index bc8b2576984ea..27de64642497e 100644 --- a/pkg/util/ring/ring.go +++ b/pkg/util/ring/ring.go @@ -5,7 +5,7 @@ import ( "github.com/grafana/dskit/ring" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) // TokenFor generates a token used for finding ingesters from ring diff --git a/pkg/util/ring/ring_config.go b/pkg/util/ring/ring_config.go index 779c40f4dad5c..d64bea1759cc9 100644 --- a/pkg/util/ring/ring_config.go +++ b/pkg/util/ring/ring_config.go @@ -15,8 +15,8 @@ import ( "github.com/grafana/dskit/netutil" "github.com/grafana/dskit/ring" - util_flagext "github.com/grafana/loki/pkg/util/flagext" - util_log "github.com/grafana/loki/pkg/util/log" + util_flagext "github.com/grafana/loki/v3/pkg/util/flagext" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // RingConfig masks the ring lifecycler config which contains diff --git a/pkg/util/server/error.go b/pkg/util/server/error.go index 65cb430bb3f32..c120a79176f85 100644 --- a/pkg/util/server/error.go +++ b/pkg/util/server/error.go @@ -14,9 +14,9 @@ import ( "github.com/gogo/googleapis/google/rpc" "github.com/gogo/status" - "github.com/grafana/loki/pkg/logqlmodel" - storage_errors "github.com/grafana/loki/pkg/storage/errors" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logqlmodel" + storage_errors "github.com/grafana/loki/v3/pkg/storage/errors" + "github.com/grafana/loki/v3/pkg/util" ) // StatusClientClosedRequest is the status code for when a client request cancellation of an http request diff --git a/pkg/util/server/error_test.go b/pkg/util/server/error_test.go index 47b2453f14925..69f2bff163c6c 100644 --- a/pkg/util/server/error_test.go +++ b/pkg/util/server/error_test.go @@ -16,9 +16,9 @@ import ( "github.com/stretchr/testify/require" "google.golang.org/grpc/codes" - "github.com/grafana/loki/pkg/logqlmodel" - storage_errors "github.com/grafana/loki/pkg/storage/errors" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logqlmodel" + storage_errors "github.com/grafana/loki/v3/pkg/storage/errors" + "github.com/grafana/loki/v3/pkg/util" ) func Test_writeError(t *testing.T) { diff --git a/pkg/util/server/grpc_headers.go b/pkg/util/server/grpc_headers.go index 3b205a73d10d2..27f5bb9a75d62 100644 --- a/pkg/util/server/grpc_headers.go +++ b/pkg/util/server/grpc_headers.go @@ -6,7 +6,7 @@ import ( "google.golang.org/grpc" "google.golang.org/grpc/metadata" - "github.com/grafana/loki/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/util/httpreq" ) func injectHTTPHeadersIntoGRPCRequest(ctx context.Context) context.Context { diff --git a/pkg/util/server/grpc_headers_test.go b/pkg/util/server/grpc_headers_test.go index db222451f4bde..1c0e728659b83 100644 --- a/pkg/util/server/grpc_headers_test.go +++ b/pkg/util/server/grpc_headers_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/require" "google.golang.org/grpc/metadata" - "github.com/grafana/loki/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/util/httpreq" ) func TestInjectHTTPHeaderIntoGRPCRequest(t *testing.T) { diff --git a/pkg/util/server/grpc_query_tags.go b/pkg/util/server/grpc_query_tags.go index d5d6e58d2c938..cfd5e95ff23df 100644 --- a/pkg/util/server/grpc_query_tags.go +++ b/pkg/util/server/grpc_query_tags.go @@ -6,7 +6,7 @@ import ( "google.golang.org/grpc" "google.golang.org/grpc/metadata" - "github.com/grafana/loki/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/util/httpreq" ) func getQueryTags(ctx context.Context) string { diff --git a/pkg/util/server/grpc_query_tags_test.go b/pkg/util/server/grpc_query_tags_test.go index ae718178caa00..733cd488dce7a 100644 --- a/pkg/util/server/grpc_query_tags_test.go +++ b/pkg/util/server/grpc_query_tags_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/require" "google.golang.org/grpc/metadata" - "github.com/grafana/loki/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/util/httpreq" ) func TestInjectQueryTagsIntoGRPCRequest(t *testing.T) { diff --git a/pkg/util/server/recovery.go b/pkg/util/server/recovery.go index 713d77e44a306..ce3ad109512b7 100644 --- a/pkg/util/server/recovery.go +++ b/pkg/util/server/recovery.go @@ -13,8 +13,8 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/util/constants" ) const maxStacksize = 8 * 1024 diff --git a/pkg/util/server/recovery_test.go b/pkg/util/server/recovery_test.go index 3a98b01b1beb4..a8d1d3f1b6b9d 100644 --- a/pkg/util/server/recovery_test.go +++ b/pkg/util/server/recovery_test.go @@ -10,7 +10,7 @@ import ( "google.golang.org/grpc" "google.golang.org/grpc/metadata" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) func Test_onPanic(t *testing.T) { diff --git a/pkg/util/spanlogger/spanlogger.go b/pkg/util/spanlogger/spanlogger.go index dbbf7679f7c7c..03ba8ab06e2d5 100644 --- a/pkg/util/spanlogger/spanlogger.go +++ b/pkg/util/spanlogger/spanlogger.go @@ -7,7 +7,7 @@ import ( "github.com/grafana/dskit/spanlogger" "github.com/grafana/dskit/tenant" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/util/time.go b/pkg/util/time.go index b943fea92aad8..9de06f381c88c 100644 --- a/pkg/util/time.go +++ b/pkg/util/time.go @@ -10,7 +10,7 @@ import ( "github.com/grafana/dskit/httpgrpc" "github.com/prometheus/common/model" - utilsMath "github.com/grafana/loki/pkg/util/math" + utilsMath "github.com/grafana/loki/v3/pkg/util/math" ) const ( diff --git a/pkg/util/unmarshal/legacy/unmarshal.go b/pkg/util/unmarshal/legacy/unmarshal.go index 0f62b6df04c5f..5a58ca6bbfed1 100644 --- a/pkg/util/unmarshal/legacy/unmarshal.go +++ b/pkg/util/unmarshal/legacy/unmarshal.go @@ -5,7 +5,7 @@ import ( json "github.com/json-iterator/go" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) // DecodePushRequest directly decodes json to a logproto.PushRequest diff --git a/pkg/util/unmarshal/legacy/unmarshal_test.go b/pkg/util/unmarshal/legacy/unmarshal_test.go index 780a5ed268ed7..dc51815890dff 100644 --- a/pkg/util/unmarshal/legacy/unmarshal_test.go +++ b/pkg/util/unmarshal/legacy/unmarshal_test.go @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) // covers requests to /api/prom/push diff --git a/pkg/util/unmarshal/unmarshal.go b/pkg/util/unmarshal/unmarshal.go index 51e7d1108d9d9..4b048d7089c65 100644 --- a/pkg/util/unmarshal/unmarshal.go +++ b/pkg/util/unmarshal/unmarshal.go @@ -6,8 +6,8 @@ import ( jsoniter "github.com/json-iterator/go" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" ) // DecodePushRequest directly decodes json to a logproto.PushRequest diff --git a/pkg/util/unmarshal/unmarshal_test.go b/pkg/util/unmarshal/unmarshal_test.go index 93372f62ebef1..3ae76e1d9fbb0 100644 --- a/pkg/util/unmarshal/unmarshal_test.go +++ b/pkg/util/unmarshal/unmarshal_test.go @@ -10,10 +10,10 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/loghttp" - legacy_loghttp "github.com/grafana/loki/pkg/loghttp/legacy" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/util/marshal" + "github.com/grafana/loki/v3/pkg/loghttp" + legacy_loghttp "github.com/grafana/loki/v3/pkg/loghttp/legacy" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/util/marshal" ) func Test_DecodePushRequest(t *testing.T) { diff --git a/pkg/util/validation/limits.go b/pkg/util/validation/limits.go index fdbd31fa8bbe9..e5e80177c64d0 100644 --- a/pkg/util/validation/limits.go +++ b/pkg/util/validation/limits.go @@ -69,7 +69,7 @@ func MaxDurationPerTenant(tenantIDs []string, f func(string) time.Duration) time return result } -// MaxDurationOrDisabledPerTenant is returning the maximum duration per tenant or zero if one tenant has time.Duration(0). +// MaxDurationOrZeroPerTenant is returning the maximum duration per tenant or zero if one tenant has time.Duration(0). func MaxDurationOrZeroPerTenant(tenantIDs []string, f func(string) time.Duration) time.Duration { var result *time.Duration for _, tenantID := range tenantIDs { diff --git a/pkg/util/validation/notifications_limit_flag.go b/pkg/util/validation/notifications_limit_flag.go index 1b8524c1ab5b1..f05e634e87569 100644 --- a/pkg/util/validation/notifications_limit_flag.go +++ b/pkg/util/validation/notifications_limit_flag.go @@ -6,7 +6,7 @@ import ( "github.com/pkg/errors" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) var allowedIntegrationNames = []string{ diff --git a/pkg/validation/exporter.go b/pkg/validation/exporter.go index ad9dde8574dd0..78cd13a1dad84 100644 --- a/pkg/validation/exporter.go +++ b/pkg/validation/exporter.go @@ -7,7 +7,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/util/flagext" + "github.com/grafana/loki/v3/pkg/util/flagext" ) type ExportedLimits interface { diff --git a/pkg/validation/limits.go b/pkg/validation/limits.go index e159fbf018f1b..ed8f508447683 100644 --- a/pkg/validation/limits.go +++ b/pkg/validation/limits.go @@ -19,16 +19,18 @@ import ( "golang.org/x/time/rate" "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/compactor/deletionmode" - "github.com/grafana/loki/pkg/distributor/shardstreams" - "github.com/grafana/loki/pkg/loghttp/push" - "github.com/grafana/loki/pkg/logql/syntax" - ruler_config "github.com/grafana/loki/pkg/ruler/config" - "github.com/grafana/loki/pkg/ruler/util" - "github.com/grafana/loki/pkg/util/flagext" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/compactor/deletionmode" + "github.com/grafana/loki/v3/pkg/distributor/shardstreams" + "github.com/grafana/loki/v3/pkg/loghttp/push" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + ruler_config "github.com/grafana/loki/v3/pkg/ruler/config" + "github.com/grafana/loki/v3/pkg/ruler/util" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + "github.com/grafana/loki/v3/pkg/util/flagext" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/validation" ) const ( @@ -49,8 +51,8 @@ const ( bytesInMB = 1048576 - defaultPerStreamRateLimit = 3 << 20 // 3MB - DefaultTSDBMaxBytesPerShard = 600 << 20 // 600MB + defaultPerStreamRateLimit = 3 << 20 // 3MB + DefaultTSDBMaxBytesPerShard = sharding.DefaultTSDBMaxBytesPerShard defaultPerStreamBurstLimit = 5 * defaultPerStreamRateLimit DefaultPerTenantQueryTimeout = "1m" @@ -78,6 +80,8 @@ type Limits struct { MaxLineSize flagext.ByteSize `yaml:"max_line_size" json:"max_line_size"` MaxLineSizeTruncate bool `yaml:"max_line_size_truncate" json:"max_line_size_truncate"` IncrementDuplicateTimestamp bool `yaml:"increment_duplicate_timestamp" json:"increment_duplicate_timestamp"` + DiscoverServiceName []string `yaml:"discover_service_name" json:"discover_service_name"` + DiscoverLogLevels bool `yaml:"discover_log_levels" json:"discover_log_levels"` // Ingester enforced limits. MaxLocalStreamsPerUser int `yaml:"max_streams_per_user" json:"max_streams_per_user"` @@ -95,6 +99,7 @@ type Limits struct { MaxQueryParallelism int `yaml:"max_query_parallelism" json:"max_query_parallelism"` TSDBMaxQueryParallelism int `yaml:"tsdb_max_query_parallelism" json:"tsdb_max_query_parallelism"` TSDBMaxBytesPerShard flagext.ByteSize `yaml:"tsdb_max_bytes_per_shard" json:"tsdb_max_bytes_per_shard"` + TSDBShardingStrategy string `yaml:"tsdb_sharding_strategy" json:"tsdb_sharding_strategy"` CardinalityLimit int `yaml:"cardinality_limit" json:"cardinality_limit"` MaxStreamsMatchersPerQuery int `yaml:"max_streams_matchers_per_query" json:"max_streams_matchers_per_query"` MaxConcurrentTailRequests int `yaml:"max_concurrent_tail_requests" json:"max_concurrent_tail_requests"` @@ -237,6 +242,20 @@ func (l *Limits) RegisterFlags(f *flag.FlagSet) { f.IntVar(&l.MaxLabelNamesPerSeries, "validation.max-label-names-per-series", 15, "Maximum number of label names per series.") f.BoolVar(&l.RejectOldSamples, "validation.reject-old-samples", true, "Whether or not old samples will be rejected.") f.BoolVar(&l.IncrementDuplicateTimestamp, "validation.increment-duplicate-timestamps", false, "Alter the log line timestamp during ingestion when the timestamp is the same as the previous entry for the same stream. When enabled, if a log line in a push request has the same timestamp as the previous line for the same stream, one nanosecond is added to the log line. This will preserve the received order of log lines with the exact same timestamp when they are queried, by slightly altering their stored timestamp. NOTE: This is imperfect, because Loki accepts out of order writes, and another push request for the same stream could contain duplicate timestamps to existing entries and they will not be incremented.") + l.DiscoverServiceName = []string{ + "service", + "app", + "application", + "name", + "app_kubernetes_io_name", + "container", + "container_name", + "component", + "workload", + "job", + } + f.Var((*dskit_flagext.StringSlice)(&l.DiscoverServiceName), "validation.discover-service-name", "If no service_name label exists, Loki maps a single label from the configured list to service_name. If none of the configured labels exist in the stream, label is set to unknown_service. Empty list disables setting the label.") + f.BoolVar(&l.DiscoverLogLevels, "validation.discover-log-levels", false, "Discover and add log levels during ingestion, if not present already. Levels would be added to Structured Metadata with name 'level' and one of the values from 'debug', 'info', 'warn', 'error', 'critical', 'fatal'.") _ = l.RejectOldSamplesMaxAge.Set("7d") f.Var(&l.RejectOldSamplesMaxAge, "validation.reject-old-samples.max-age", "Maximum accepted sample age before rejecting.") @@ -270,7 +289,16 @@ func (l *Limits) RegisterFlags(f *flag.FlagSet) { f.IntVar(&l.MaxQueryParallelism, "querier.max-query-parallelism", 32, "Maximum number of queries that will be scheduled in parallel by the frontend.") f.IntVar(&l.TSDBMaxQueryParallelism, "querier.tsdb-max-query-parallelism", 128, "Maximum number of queries will be scheduled in parallel by the frontend for TSDB schemas.") _ = l.TSDBMaxBytesPerShard.Set(strconv.Itoa(DefaultTSDBMaxBytesPerShard)) - f.Var(&l.TSDBMaxBytesPerShard, "querier.tsdb-max-bytes-per-shard", "Maximum number of bytes assigned to a single sharded query. Also expressible in human readable forms (1GB, etc).") + f.Var(&l.TSDBMaxBytesPerShard, "querier.tsdb-max-bytes-per-shard", "Target maximum number of bytes assigned to a single sharded query. Also expressible in human readable forms (1GB, etc). Note: This is a _target_ and not an absolute limit. The actual limit can be higher, but the query planner will try to build shards up to this limit.") + f.StringVar( + &l.TSDBShardingStrategy, + "limits.tsdb-sharding-strategy", + logql.PowerOfTwoVersion.String(), + fmt.Sprintf( + "sharding strategy to use in query planning. Suggested to use %s once all nodes can recognize it.", + logql.BoundedVersion.String(), + ), + ) f.IntVar(&l.CardinalityLimit, "store.cardinality-limit", 1e5, "Cardinality limit for index queries.") f.IntVar(&l.MaxStreamsMatchersPerQuery, "querier.max-streams-matcher-per-query", 1000, "Maximum number of stream matchers per query.") f.IntVar(&l.MaxConcurrentTailRequests, "querier.max-concurrent-tail-requests", 10, "Maximum number of concurrent tail requests.") @@ -432,6 +460,10 @@ func (l *Limits) Validate() error { return err } + if _, err := logql.ParseShardVersion(l.TSDBShardingStrategy); err != nil { + return errors.Wrap(err, "invalid tsdb sharding strategy") + } + if _, err := chunkenc.ParseEncoding(l.BloomBlockEncoding); err != nil { return err } @@ -559,7 +591,7 @@ func (o *Overrides) MaxQueryLength(_ context.Context, userID string) time.Durati // so nooping in Loki until then. func (o *Overrides) MaxChunksPerQueryFromStore(_ string) int { return 0 } -// MaxQueryLength returns the limit of the series of metric queries. +// MaxQuerySeries returns the limit of the series of metric queries. func (o *Overrides) MaxQuerySeries(_ context.Context, userID string) int { return o.getOverridesForUser(userID).MaxQuerySeries } @@ -595,6 +627,11 @@ func (o *Overrides) TSDBMaxBytesPerShard(userID string) int { return o.getOverridesForUser(userID).TSDBMaxBytesPerShard.Val() } +// TSDBShardingStrategy returns the sharding strategy to use in query planning. +func (o *Overrides) TSDBShardingStrategy(userID string) string { + return o.getOverridesForUser(userID).TSDBShardingStrategy +} + // MaxQueryParallelism returns the limit to the number of sub-queries the // frontend will process in parallel. func (o *Overrides) MaxQueryParallelism(_ context.Context, userID string) int { @@ -667,7 +704,7 @@ func (o *Overrides) MaxLineSize(userID string) int { return o.getOverridesForUser(userID).MaxLineSize.Val() } -// MaxLineSizeShouldTruncate returns whether lines longer than max should be truncated. +// MaxLineSizeTruncate returns whether lines longer than max should be truncated. func (o *Overrides) MaxLineSizeTruncate(userID string) bool { return o.getOverridesForUser(userID).MaxLineSizeTruncate } @@ -876,6 +913,14 @@ func (o *Overrides) IncrementDuplicateTimestamps(userID string) bool { return o.getOverridesForUser(userID).IncrementDuplicateTimestamp } +func (o *Overrides) DiscoverServiceName(userID string) []string { + return o.getOverridesForUser(userID).DiscoverServiceName +} + +func (o *Overrides) DiscoverLogLevels(userID string) bool { + return o.getOverridesForUser(userID).DiscoverLogLevels +} + // VolumeEnabled returns whether volume endpoints are enabled for a user. func (o *Overrides) VolumeEnabled(userID string) bool { return o.getOverridesForUser(userID).VolumeEnabled diff --git a/pkg/validation/limits_test.go b/pkg/validation/limits_test.go index 59626aeb8cdbe..598a6f9033cde 100644 --- a/pkg/validation/limits_test.go +++ b/pkg/validation/limits_test.go @@ -12,9 +12,10 @@ import ( "github.com/stretchr/testify/require" "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/compactor/deletionmode" - "github.com/grafana/loki/pkg/loghttp/push" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/compactor/deletionmode" + "github.com/grafana/loki/v3/pkg/loghttp/push" + "github.com/grafana/loki/v3/pkg/logql" ) func TestLimitsTagsYamlMatchJson(t *testing.T) { @@ -214,6 +215,7 @@ ruler_remote_write_headers: `, exp: Limits{ RulerRemoteWriteHeaders: OverwriteMarshalingStringMap{map[string]string{"foo": "bar"}}, + DiscoverServiceName: []string{}, // Rest from new defaults StreamRetention: []StreamRetention{ @@ -231,6 +233,7 @@ ruler_remote_write_headers: ruler_remote_write_headers: `, exp: Limits{ + DiscoverServiceName: []string{}, // Rest from new defaults StreamRetention: []StreamRetention{ @@ -250,6 +253,7 @@ retention_stream: selector: '{foo="bar"}' `, exp: Limits{ + DiscoverServiceName: []string{}, StreamRetention: []StreamRetention{ { Period: model.Duration(24 * time.Hour), @@ -268,7 +272,8 @@ retention_stream: reject_old_samples: true `, exp: Limits{ - RejectOldSamples: true, + RejectOldSamples: true, + DiscoverServiceName: []string{}, // Rest from new defaults RulerRemoteWriteHeaders: OverwriteMarshalingStringMap{map[string]string{"a": "b"}}, @@ -287,7 +292,8 @@ reject_old_samples: true query_timeout: 5m `, exp: Limits{ - QueryTimeout: model.Duration(5 * time.Minute), + DiscoverServiceName: []string{}, + QueryTimeout: model.Duration(5 * time.Minute), // Rest from new defaults. RulerRemoteWriteHeaders: OverwriteMarshalingStringMap{map[string]string{"a": "b"}}, @@ -338,6 +344,7 @@ func TestLimitsValidation(t *testing.T) { } { desc := fmt.Sprintf("%s/%s", tc.limits.DeletionMode, tc.limits.BloomBlockEncoding) t.Run(desc, func(t *testing.T) { + tc.limits.TSDBShardingStrategy = logql.PowerOfTwoVersion.String() // hacky but needed for test if tc.expected == nil { require.NoError(t, tc.limits.Validate()) } else { diff --git a/pkg/validation/validate.go b/pkg/validation/validate.go index 09c444aa64987..4b02505b98e54 100644 --- a/pkg/validation/validate.go +++ b/pkg/validation/validate.go @@ -6,8 +6,8 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/flagext" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/flagext" ) const ( diff --git a/production/docker/config/loki.yaml b/production/docker/config/loki.yaml index 0a124e5ccfaae..512badb27f8d2 100644 --- a/production/docker/config/loki.yaml +++ b/production/docker/config/loki.yaml @@ -97,6 +97,13 @@ schema_config: index: prefix: index_ period: 24h + - from: 2024-03-29 + store: tsdb + object_store: s3 + schema: v13 + index: + prefix: index_ + period: 24h limits_config: diff --git a/production/helm/loki/CHANGELOG.md b/production/helm/loki/CHANGELOG.md index ff91ede3445a8..6e84181ced6cb 100644 --- a/production/helm/loki/CHANGELOG.md +++ b/production/helm/loki/CHANGELOG.md @@ -17,6 +17,10 @@ Entries should include a reference to the pull request that introduced the chang - [ENHANCEMENT] Allow the definition of PVC annotations for all volume claim templates in statefulsets +## 5.47.2 + +- [ENHANCEMENT] Allow for additional pipeline stages to be configured on the `selfMonitoring` `Podlogs` resource. + ## 5.47.1 - [BUGFIX] Increase default value of backend minReplicas to 3 diff --git a/production/helm/loki/templates/monitoring/pod-logs.yaml b/production/helm/loki/templates/monitoring/pod-logs.yaml index 3fc95c44e22b6..317339d7668f9 100644 --- a/production/helm/loki/templates/monitoring/pod-logs.yaml +++ b/production/helm/loki/templates/monitoring/pod-logs.yaml @@ -18,6 +18,9 @@ metadata: spec: pipelineStages: - cri: { } + {{- with .additionalPipelineStages }} + {{- toYaml . | nindent 4 }} + {{- end }} relabelings: - action: replace sourceLabels: diff --git a/production/helm/loki/values.yaml b/production/helm/loki/values.yaml index 3907d7df9424a..200dc05c8a8dd 100644 --- a/production/helm/loki/values.yaml +++ b/production/helm/loki/values.yaml @@ -691,6 +691,9 @@ monitoring: # -- PodLogs relabel configs to apply to samples before scraping # https://github.com/prometheus-operator/prometheus-operator/blob/master/Documentation/api.md#relabelconfig relabelings: [] + # -- Additional pipeline stages to process logs after scraping + # https://grafana.com/docs/agent/latest/operator/api/#pipelinestagespec-a-namemonitoringgrafanacomv1alpha1pipelinestagespeca + additionalPipelineStages: [] # LogsInstance configuration logsInstance: # -- LogsInstance annotations diff --git a/production/nomad/loki-distributed/config.yml b/production/nomad/loki-distributed/config.yml index 48fc8e166c688..a6abc8ae76d4e 100644 --- a/production/nomad/loki-distributed/config.yml +++ b/production/nomad/loki-distributed/config.yml @@ -72,9 +72,9 @@ frontend_worker: schema_config: configs: - from: 2022-05-15 - store: boltdb-shipper + store: tsdb object_store: s3 - schema: v12 + schema: v13 index: prefix: index_ period: 24h diff --git a/production/nomad/loki-simple/config.yml b/production/nomad/loki-simple/config.yml index 79b1d39d57a92..750c6c483cf97 100644 --- a/production/nomad/loki-simple/config.yml +++ b/production/nomad/loki-simple/config.yml @@ -27,9 +27,9 @@ ingester: schema_config: configs: - from: 2022-05-15 - store: boltdb-shipper + store: tsdb object_store: s3 - schema: v12 + schema: v13 index: prefix: index_ period: 24h diff --git a/production/nomad/loki/config.yml b/production/nomad/loki/config.yml index ceeda7d2e49ef..492f9d59a319d 100644 --- a/production/nomad/loki/config.yml +++ b/production/nomad/loki/config.yml @@ -27,9 +27,9 @@ ingester: schema_config: configs: - from: 2022-05-15 - store: boltdb-shipper + store: tsdb object_store: s3 - schema: v12 + schema: v13 index: prefix: index_ period: 24h diff --git a/tools/bloom/inspector/main.go b/tools/bloom/inspector/main.go index bb81d02b260b1..dfcc7c79cd86d 100644 --- a/tools/bloom/inspector/main.go +++ b/tools/bloom/inspector/main.go @@ -4,7 +4,7 @@ import ( "fmt" "os" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" ) func main() { @@ -17,8 +17,8 @@ func main() { fmt.Printf("Block directory: %s\n", path) r := v1.NewDirectoryBlockReader(path) - b := v1.NewBlock(r) - q := v1.NewBlockQuerier(b) + b := v1.NewBlock(r, v1.NewMetrics(nil)) + q := v1.NewBlockQuerier(b, true, v1.DefaultMaxPageSize) md, err := q.Metadata() if err != nil { diff --git a/tools/deprecated-config-checker/main.go b/tools/deprecated-config-checker/main.go index 6ce387c1eec88..61683333899ad 100644 --- a/tools/deprecated-config-checker/main.go +++ b/tools/deprecated-config-checker/main.go @@ -7,7 +7,7 @@ import ( "github.com/fatih/color" - "github.com/grafana/loki/tools/deprecated-config-checker/checker" + "github.com/grafana/loki/v3/tools/deprecated-config-checker/checker" ) const upgradeGuideURL = "https://grafana.com/docs/loki/latest/setup/upgrade/" diff --git a/tools/doc-generator/main.go b/tools/doc-generator/main.go index 3d28f91eaa29c..c2748cee925e4 100644 --- a/tools/doc-generator/main.go +++ b/tools/doc-generator/main.go @@ -13,8 +13,8 @@ import ( "strings" "text/template" - "github.com/grafana/loki/pkg/loki" - "github.com/grafana/loki/tools/doc-generator/parse" + "github.com/grafana/loki/v3/pkg/loki" + "github.com/grafana/loki/v3/tools/doc-generator/parse" ) const ( diff --git a/tools/doc-generator/parse/parser.go b/tools/doc-generator/parse/parser.go index d5896a0666bf5..f565bf2dc9c90 100644 --- a/tools/doc-generator/parse/parser.go +++ b/tools/doc-generator/parse/parser.go @@ -22,10 +22,10 @@ import ( prometheus_config "github.com/prometheus/prometheus/config" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/pkg/ruler/util" - storage_config "github.com/grafana/loki/pkg/storage/config" - util_validation "github.com/grafana/loki/pkg/util/validation" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/ruler/util" + storage_config "github.com/grafana/loki/v3/pkg/storage/config" + util_validation "github.com/grafana/loki/v3/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/validation" ) var ( diff --git a/tools/doc-generator/parse/root_blocks.go b/tools/doc-generator/parse/root_blocks.go index 25a4e785ded63..37debb7c41765 100644 --- a/tools/doc-generator/parse/root_blocks.go +++ b/tools/doc-generator/parse/root_blocks.go @@ -13,36 +13,36 @@ import ( "github.com/grafana/dskit/runtimeconfig" "github.com/grafana/dskit/server" - "github.com/grafana/loki/pkg/analytics" - "github.com/grafana/loki/pkg/bloomcompactor" - "github.com/grafana/loki/pkg/bloomgateway" - "github.com/grafana/loki/pkg/compactor" - "github.com/grafana/loki/pkg/distributor" - "github.com/grafana/loki/pkg/ingester" - ingester_client "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/loghttp/push" - "github.com/grafana/loki/pkg/loki/common" - frontend "github.com/grafana/loki/pkg/lokifrontend" - "github.com/grafana/loki/pkg/querier" - "github.com/grafana/loki/pkg/querier/queryrange" - querier_worker "github.com/grafana/loki/pkg/querier/worker" - "github.com/grafana/loki/pkg/ruler" - "github.com/grafana/loki/pkg/scheduler" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/client/alibaba" - "github.com/grafana/loki/pkg/storage/chunk/client/aws" - "github.com/grafana/loki/pkg/storage/chunk/client/azure" - "github.com/grafana/loki/pkg/storage/chunk/client/baidubce" - "github.com/grafana/loki/pkg/storage/chunk/client/gcp" - "github.com/grafana/loki/pkg/storage/chunk/client/ibmcloud" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/chunk/client/openstack" - storage_config "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/indexgateway" - "github.com/grafana/loki/pkg/tracing" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/analytics" + "github.com/grafana/loki/v3/pkg/bloomcompactor" + "github.com/grafana/loki/v3/pkg/bloomgateway" + "github.com/grafana/loki/v3/pkg/compactor" + "github.com/grafana/loki/v3/pkg/distributor" + "github.com/grafana/loki/v3/pkg/ingester" + ingester_client "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/loghttp/push" + "github.com/grafana/loki/v3/pkg/loki/common" + frontend "github.com/grafana/loki/v3/pkg/lokifrontend" + "github.com/grafana/loki/v3/pkg/querier" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + querier_worker "github.com/grafana/loki/v3/pkg/querier/worker" + "github.com/grafana/loki/v3/pkg/ruler" + "github.com/grafana/loki/v3/pkg/scheduler" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/alibaba" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/aws" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/azure" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/baidubce" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/gcp" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/ibmcloud" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/openstack" + storage_config "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/indexgateway" + "github.com/grafana/loki/v3/pkg/tracing" + "github.com/grafana/loki/v3/pkg/validation" ) var ( diff --git a/tools/doc-generator/writer.go b/tools/doc-generator/writer.go index f182d0a8600c5..7a04c891ed2bc 100644 --- a/tools/doc-generator/writer.go +++ b/tools/doc-generator/writer.go @@ -15,7 +15,7 @@ import ( "github.com/mitchellh/go-wordwrap" "gopkg.in/yaml.v3" - "github.com/grafana/loki/tools/doc-generator/parse" + "github.com/grafana/loki/v3/tools/doc-generator/parse" ) type specWriter struct { diff --git a/tools/lambda-promtail/go.mod b/tools/lambda-promtail/go.mod index e6a8e27578c21..2a40fa4e71bf5 100644 --- a/tools/lambda-promtail/go.mod +++ b/tools/lambda-promtail/go.mod @@ -111,7 +111,7 @@ require ( google.golang.org/appengine v1.6.7 // indirect google.golang.org/genproto v0.0.0-20230110181048-76db0878b65f // indirect google.golang.org/grpc v1.52.3 // indirect - google.golang.org/protobuf v1.28.1 // indirect + google.golang.org/protobuf v1.33.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/tools/lambda-promtail/go.sum b/tools/lambda-promtail/go.sum index bedf3022d5312..dbc6c52383867 100644 --- a/tools/lambda-promtail/go.sum +++ b/tools/lambda-promtail/go.sum @@ -1066,8 +1066,9 @@ google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp0 google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w= google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= +google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/tools/querytee/response_comparator.go b/tools/querytee/response_comparator.go index ae4db40185aee..04a28fff85c1d 100644 --- a/tools/querytee/response_comparator.go +++ b/tools/querytee/response_comparator.go @@ -12,8 +12,8 @@ import ( "github.com/pkg/errors" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/loghttp" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/loghttp" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // SamplesComparatorFunc helps with comparing different types of samples coming from /api/v1/query and /api/v1/query_range routes. @@ -50,7 +50,7 @@ type SamplesComparator struct { sampleTypesComparator map[string]SamplesComparatorFunc } -// RegisterSamplesComparator helps with registering custom sample types +// RegisterSamplesType helps with registering custom sample types func (s *SamplesComparator) RegisterSamplesType(samplesType string, comparator SamplesComparatorFunc) { s.sampleTypesComparator[samplesType] = comparator } diff --git a/tools/tsdb/bloom-tester/concurrent.go b/tools/tsdb/bloom-tester/concurrent.go index c42d403809aea..a6a2382a2a4ad 100644 --- a/tools/tsdb/bloom-tester/concurrent.go +++ b/tools/tsdb/bloom-tester/concurrent.go @@ -4,7 +4,7 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) type pool struct { diff --git a/tools/tsdb/bloom-tester/lib.go b/tools/tsdb/bloom-tester/lib.go index 2512a3e66bee4..0d3a505668047 100644 --- a/tools/tsdb/bloom-tester/lib.go +++ b/tools/tsdb/bloom-tester/lib.go @@ -7,8 +7,8 @@ import ( "flag" "fmt" - "github.com/grafana/loki/pkg/storage/bloom/v1/filter" - tsdbindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/bloom/v1/filter" + tsdbindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" "hash/fnv" "math" @@ -23,17 +23,17 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage" - bt "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - shipperindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/tools/tsdb/helpers" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage" + bt "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + shipperindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/tools/tsdb/helpers" ) const ( @@ -281,8 +281,8 @@ func analyze(metrics *Metrics, sampler Sampler, indexShipper indexshipper.IndexS casted := idx.(*tsdb.TSDBFile).Index.(*tsdb.TSDBIndex) _ = casted.ForSeries( context.Background(), - nil, model.Earliest, model.Latest, - func(ls labels.Labels, fp model.Fingerprint, chks []tsdbindex.ChunkMeta) { + "", nil, model.Earliest, model.Latest, + func(ls labels.Labels, fp model.Fingerprint, chks []tsdbindex.ChunkMeta) (stop bool) { seriesString := ls.String() seriesStringHash := FNV32a(seriesString) pos, _ := strconv.Atoi(seriesStringHash) @@ -399,6 +399,7 @@ func analyze(metrics *Metrics, sampler Sampler, indexShipper indexshipper.IndexS )*/ } // for each series + return false }, labels.MustNewMatcher(labels.MatchEqual, "", ""), ) diff --git a/tools/tsdb/bloom-tester/main.go b/tools/tsdb/bloom-tester/main.go index 916796b917042..ab5b9dfbcf1c1 100644 --- a/tools/tsdb/bloom-tester/main.go +++ b/tools/tsdb/bloom-tester/main.go @@ -7,7 +7,7 @@ import ( "github.com/go-kit/log/level" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // go build ./tools/tsdb/bloom-tester && HOSTNAME="bloom-tester-121" NUM_TESTERS="128" BUCKET="19625" DIR=/Users/progers/dev/bloom WRITE_MODE="false" BUCKET_PREFIX="new-experiments" ./tools/tsdb/bloom-tester/bloom-tester --config.file=/Users/progers/dev/bloom/config.yaml diff --git a/tools/tsdb/bloom-tester/metrics.go b/tools/tsdb/bloom-tester/metrics.go index cc0b0b345b7ff..3eea766b95f51 100644 --- a/tools/tsdb/bloom-tester/metrics.go +++ b/tools/tsdb/bloom-tester/metrics.go @@ -4,9 +4,9 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/bloom/v1/filter" + "github.com/grafana/loki/v3/pkg/storage/bloom/v1/filter" ) type Experiment struct { diff --git a/tools/tsdb/bloom-tester/readlib.go b/tools/tsdb/bloom-tester/readlib.go index 6e5c492f120de..5886b13e2a9d2 100644 --- a/tools/tsdb/bloom-tester/readlib.go +++ b/tools/tsdb/bloom-tester/readlib.go @@ -7,13 +7,13 @@ import ( "github.com/grafana/dskit/services" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - bt "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/bloom/v1/filter" - "github.com/grafana/loki/pkg/storage/chunk" - tsdbindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + bt "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/bloom/v1/filter" + "github.com/grafana/loki/v3/pkg/storage/chunk" + tsdbindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" "math" "os" @@ -27,18 +27,18 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" //indexshipper_index "github.com/grafana/loki/pkg/storage/stores/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - shipperindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + shipperindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" //"github.com/grafana/loki/pkg/storage/stores/tsdb" //"github.com/grafana/loki/pkg/storage/stores/tsdb/index" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/tools/tsdb/helpers" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/tools/tsdb/helpers" ) var queryExperiments = []QueryExperiment{ @@ -141,8 +141,9 @@ func analyzeRead(metrics *Metrics, sampler Sampler, shipper indexshipper.IndexSh casted := idx.(*tsdb.TSDBFile).Index.(*tsdb.TSDBIndex) _ = casted.ForSeries( context.Background(), - nil, model.Earliest, model.Latest, - func(ls labels.Labels, fp model.Fingerprint, chks []tsdbindex.ChunkMeta) { + "", nil, + model.Earliest, model.Latest, + func(ls labels.Labels, fp model.Fingerprint, chks []tsdbindex.ChunkMeta) (stop bool) { seriesString := ls.String() seriesStringHash := FNV32a(seriesString) pos, _ := strconv.Atoi(seriesStringHash) @@ -272,6 +273,7 @@ func analyzeRead(metrics *Metrics, sampler Sampler, shipper indexshipper.IndexSh ) */ } // For every series + return false }, labels.MustNewMatcher(labels.MatchEqual, "", ""), ) diff --git a/tools/tsdb/bloom-tester/tokenizer.go b/tools/tsdb/bloom-tester/tokenizer.go index 3d82c770020ef..c89844240455b 100644 --- a/tools/tsdb/bloom-tester/tokenizer.go +++ b/tools/tsdb/bloom-tester/tokenizer.go @@ -5,22 +5,22 @@ import ( "math" "time" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" "github.com/go-kit/log/level" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/util/encoding" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/util/encoding" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type metrics struct { diff --git a/tools/tsdb/helpers/setup.go b/tools/tsdb/helpers/setup.go index a0eb490269fc9..807adb4dd9897 100644 --- a/tools/tsdb/helpers/setup.go +++ b/tools/tsdb/helpers/setup.go @@ -12,14 +12,14 @@ import ( "github.com/prometheus/client_golang/prometheus/collectors" "github.com/prometheus/client_golang/prometheus/collectors/version" - "github.com/grafana/loki/pkg/loki" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/util/cfg" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/loki" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/util/cfg" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/validation" ) func Setup() (loki.Config, services.Service, string, error) { diff --git a/tools/tsdb/helpers/util.go b/tools/tsdb/helpers/util.go index b30e9e9b1f5cc..8cc0602045b08 100644 --- a/tools/tsdb/helpers/util.go +++ b/tools/tsdb/helpers/util.go @@ -12,8 +12,8 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/config" ) const ( diff --git a/tools/tsdb/index-analyzer/analytics.go b/tools/tsdb/index-analyzer/analytics.go index 7558c17fc8030..de01d47d6ec00 100644 --- a/tools/tsdb/index-analyzer/analytics.go +++ b/tools/tsdb/index-analyzer/analytics.go @@ -7,10 +7,10 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" - tsdb_index "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" + tsdb_index "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func analyze(indexShipper indexshipper.IndexShipper, tableName string, tenants []string) error { @@ -70,16 +70,17 @@ func analyze(indexShipper indexshipper.IndexShipper, tableName string, tenants [ err = casted.Index.(*tsdb.TSDBIndex).ForSeries( context.Background(), - nil, + "", nil, model.Earliest, model.Latest, - func(ls labels.Labels, fp model.Fingerprint, chks []tsdb_index.ChunkMeta) { + func(ls labels.Labels, fp model.Fingerprint, chks []tsdb_index.ChunkMeta) (stop bool) { if len(chks) > maxChunksPerSeries { maxChunksPerSeries = len(chks) if len(chks) > 1000 { seriesOver1kChunks++ } } + return false }, labels.MustNewMatcher(labels.MatchEqual, "", ""), ) diff --git a/tools/tsdb/index-analyzer/main.go b/tools/tsdb/index-analyzer/main.go index fd59bd4792fdf..2d19ad9c3c421 100644 --- a/tools/tsdb/index-analyzer/main.go +++ b/tools/tsdb/index-analyzer/main.go @@ -5,11 +5,11 @@ import ( "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/tools/tsdb/helpers" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/tools/tsdb/helpers" ) // go build ./tools/tsdb/index-analyzer && BUCKET=19453 DIR=/tmp/loki-index-analysis ./index-analyzer --config.file=/tmp/loki-config.yaml diff --git a/tools/tsdb/migrate-versions/main.go b/tools/tsdb/migrate-versions/main.go index b458c80d4c1b8..2c49906a56452 100644 --- a/tools/tsdb/migrate-versions/main.go +++ b/tools/tsdb/migrate-versions/main.go @@ -17,17 +17,17 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/loki" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - shipperindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - shipperstorage "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" - tsdbindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - "github.com/grafana/loki/pkg/util/cfg" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/loki" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + shipperindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + shipperstorage "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" + tsdbindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/util/cfg" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/tools/tsdb/migrate-versions/main_test.go b/tools/tsdb/migrate-versions/main_test.go index 2f4690fde0a7e..7ac68521545bd 100644 --- a/tools/tsdb/migrate-versions/main_test.go +++ b/tools/tsdb/migrate-versions/main_test.go @@ -14,13 +14,13 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" - shipperstorage "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" + shipperstorage "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/tools/tsdb/tsdb-map/main.go b/tools/tsdb/tsdb-map/main.go index 7748b48c800c2..0a72ac98db13d 100644 --- a/tools/tsdb/tsdb-map/main.go +++ b/tools/tsdb/tsdb-map/main.go @@ -11,12 +11,12 @@ import ( "go.etcd.io/bbolt" "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/storage/config" - boltdbcompactor "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/boltdb/compactor" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/util" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/storage/config" + boltdbcompactor "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/boltdb/compactor" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/util" ) var ( diff --git a/tools/tsdb/tsdb-map/main_test.go b/tools/tsdb/tsdb-map/main_test.go index bf8c802db8456..56fdcdbc3b255 100644 --- a/tools/tsdb/tsdb-map/main_test.go +++ b/tools/tsdb/tsdb-map/main_test.go @@ -11,8 +11,8 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func TestExtractChecksum(t *testing.T) { diff --git a/vendor/github.com/opentracing/opentracing-go/mocktracer/mocklogrecord.go b/vendor/github.com/opentracing/opentracing-go/mocktracer/mocklogrecord.go new file mode 100644 index 0000000000000..2ce96d9d3887e --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/mocktracer/mocklogrecord.go @@ -0,0 +1,105 @@ +package mocktracer + +import ( + "fmt" + "reflect" + "time" + + "github.com/opentracing/opentracing-go/log" +) + +// MockLogRecord represents data logged to a Span via Span.LogFields or +// Span.LogKV. +type MockLogRecord struct { + Timestamp time.Time + Fields []MockKeyValue +} + +// MockKeyValue represents a single key:value pair. +type MockKeyValue struct { + Key string + + // All MockLogRecord values are coerced to strings via fmt.Sprint(), though + // we retain their type separately. + ValueKind reflect.Kind + ValueString string +} + +// EmitString belongs to the log.Encoder interface +func (m *MockKeyValue) EmitString(key, value string) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitBool belongs to the log.Encoder interface +func (m *MockKeyValue) EmitBool(key string, value bool) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitInt belongs to the log.Encoder interface +func (m *MockKeyValue) EmitInt(key string, value int) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitInt32 belongs to the log.Encoder interface +func (m *MockKeyValue) EmitInt32(key string, value int32) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitInt64 belongs to the log.Encoder interface +func (m *MockKeyValue) EmitInt64(key string, value int64) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitUint32 belongs to the log.Encoder interface +func (m *MockKeyValue) EmitUint32(key string, value uint32) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitUint64 belongs to the log.Encoder interface +func (m *MockKeyValue) EmitUint64(key string, value uint64) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitFloat32 belongs to the log.Encoder interface +func (m *MockKeyValue) EmitFloat32(key string, value float32) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitFloat64 belongs to the log.Encoder interface +func (m *MockKeyValue) EmitFloat64(key string, value float64) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitObject belongs to the log.Encoder interface +func (m *MockKeyValue) EmitObject(key string, value interface{}) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitLazyLogger belongs to the log.Encoder interface +func (m *MockKeyValue) EmitLazyLogger(value log.LazyLogger) { + var meta MockKeyValue + value(&meta) + m.Key = meta.Key + m.ValueKind = meta.ValueKind + m.ValueString = meta.ValueString +} diff --git a/vendor/github.com/opentracing/opentracing-go/mocktracer/mockspan.go b/vendor/github.com/opentracing/opentracing-go/mocktracer/mockspan.go new file mode 100644 index 0000000000000..8c7932ce65b30 --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/mocktracer/mockspan.go @@ -0,0 +1,284 @@ +package mocktracer + +import ( + "fmt" + "sync" + "sync/atomic" + "time" + + "github.com/opentracing/opentracing-go" + "github.com/opentracing/opentracing-go/ext" + "github.com/opentracing/opentracing-go/log" +) + +// MockSpanContext is an opentracing.SpanContext implementation. +// +// It is entirely unsuitable for production use, but appropriate for tests +// that want to verify tracing behavior in other frameworks/applications. +// +// By default all spans have Sampled=true flag, unless {"sampling.priority": 0} +// tag is set. +type MockSpanContext struct { + TraceID int + SpanID int + Sampled bool + Baggage map[string]string +} + +var mockIDSource = uint32(42) + +func nextMockID() int { + return int(atomic.AddUint32(&mockIDSource, 1)) +} + +// ForeachBaggageItem belongs to the SpanContext interface +func (c MockSpanContext) ForeachBaggageItem(handler func(k, v string) bool) { + for k, v := range c.Baggage { + if !handler(k, v) { + break + } + } +} + +// WithBaggageItem creates a new context with an extra baggage item. +func (c MockSpanContext) WithBaggageItem(key, value string) MockSpanContext { + var newBaggage map[string]string + if c.Baggage == nil { + newBaggage = map[string]string{key: value} + } else { + newBaggage = make(map[string]string, len(c.Baggage)+1) + for k, v := range c.Baggage { + newBaggage[k] = v + } + newBaggage[key] = value + } + // Use positional parameters so the compiler will help catch new fields. + return MockSpanContext{c.TraceID, c.SpanID, c.Sampled, newBaggage} +} + +// MockSpan is an opentracing.Span implementation that exports its internal +// state for testing purposes. +type MockSpan struct { + sync.RWMutex + + ParentID int + + OperationName string + StartTime time.Time + FinishTime time.Time + + // All of the below are protected by the embedded RWMutex. + SpanContext MockSpanContext + tags map[string]interface{} + logs []MockLogRecord + tracer *MockTracer +} + +func newMockSpan(t *MockTracer, name string, opts opentracing.StartSpanOptions) *MockSpan { + tags := opts.Tags + if tags == nil { + tags = map[string]interface{}{} + } + traceID := nextMockID() + parentID := int(0) + var baggage map[string]string + sampled := true + if len(opts.References) > 0 { + traceID = opts.References[0].ReferencedContext.(MockSpanContext).TraceID + parentID = opts.References[0].ReferencedContext.(MockSpanContext).SpanID + sampled = opts.References[0].ReferencedContext.(MockSpanContext).Sampled + baggage = opts.References[0].ReferencedContext.(MockSpanContext).Baggage + } + spanContext := MockSpanContext{traceID, nextMockID(), sampled, baggage} + startTime := opts.StartTime + if startTime.IsZero() { + startTime = time.Now() + } + return &MockSpan{ + ParentID: parentID, + OperationName: name, + StartTime: startTime, + tags: tags, + logs: []MockLogRecord{}, + SpanContext: spanContext, + + tracer: t, + } +} + +// Tags returns a copy of tags accumulated by the span so far +func (s *MockSpan) Tags() map[string]interface{} { + s.RLock() + defer s.RUnlock() + tags := make(map[string]interface{}) + for k, v := range s.tags { + tags[k] = v + } + return tags +} + +// Tag returns a single tag +func (s *MockSpan) Tag(k string) interface{} { + s.RLock() + defer s.RUnlock() + return s.tags[k] +} + +// Logs returns a copy of logs accumulated in the span so far +func (s *MockSpan) Logs() []MockLogRecord { + s.RLock() + defer s.RUnlock() + logs := make([]MockLogRecord, len(s.logs)) + copy(logs, s.logs) + return logs +} + +// Context belongs to the Span interface +func (s *MockSpan) Context() opentracing.SpanContext { + s.Lock() + defer s.Unlock() + return s.SpanContext +} + +// SetTag belongs to the Span interface +func (s *MockSpan) SetTag(key string, value interface{}) opentracing.Span { + s.Lock() + defer s.Unlock() + if key == string(ext.SamplingPriority) { + if v, ok := value.(uint16); ok { + s.SpanContext.Sampled = v > 0 + return s + } + if v, ok := value.(int); ok { + s.SpanContext.Sampled = v > 0 + return s + } + } + s.tags[key] = value + return s +} + +// SetBaggageItem belongs to the Span interface +func (s *MockSpan) SetBaggageItem(key, val string) opentracing.Span { + s.Lock() + defer s.Unlock() + s.SpanContext = s.SpanContext.WithBaggageItem(key, val) + return s +} + +// BaggageItem belongs to the Span interface +func (s *MockSpan) BaggageItem(key string) string { + s.RLock() + defer s.RUnlock() + return s.SpanContext.Baggage[key] +} + +// Finish belongs to the Span interface +func (s *MockSpan) Finish() { + s.Lock() + s.FinishTime = time.Now() + s.Unlock() + s.tracer.recordSpan(s) +} + +// FinishWithOptions belongs to the Span interface +func (s *MockSpan) FinishWithOptions(opts opentracing.FinishOptions) { + s.Lock() + s.FinishTime = opts.FinishTime + s.Unlock() + + // Handle any late-bound LogRecords. + for _, lr := range opts.LogRecords { + s.logFieldsWithTimestamp(lr.Timestamp, lr.Fields...) + } + // Handle (deprecated) BulkLogData. + for _, ld := range opts.BulkLogData { + if ld.Payload != nil { + s.logFieldsWithTimestamp( + ld.Timestamp, + log.String("event", ld.Event), + log.Object("payload", ld.Payload)) + } else { + s.logFieldsWithTimestamp( + ld.Timestamp, + log.String("event", ld.Event)) + } + } + + s.tracer.recordSpan(s) +} + +// String allows printing span for debugging +func (s *MockSpan) String() string { + return fmt.Sprintf( + "traceId=%d, spanId=%d, parentId=%d, sampled=%t, name=%s", + s.SpanContext.TraceID, s.SpanContext.SpanID, s.ParentID, + s.SpanContext.Sampled, s.OperationName) +} + +// LogFields belongs to the Span interface +func (s *MockSpan) LogFields(fields ...log.Field) { + s.logFieldsWithTimestamp(time.Now(), fields...) +} + +// The caller MUST NOT hold s.Lock +func (s *MockSpan) logFieldsWithTimestamp(ts time.Time, fields ...log.Field) { + lr := MockLogRecord{ + Timestamp: ts, + Fields: make([]MockKeyValue, len(fields)), + } + for i, f := range fields { + outField := &(lr.Fields[i]) + f.Marshal(outField) + } + + s.Lock() + defer s.Unlock() + s.logs = append(s.logs, lr) +} + +// LogKV belongs to the Span interface. +// +// This implementations coerces all "values" to strings, though that is not +// something all implementations need to do. Indeed, a motivated person can and +// probably should have this do a typed switch on the values. +func (s *MockSpan) LogKV(keyValues ...interface{}) { + if len(keyValues)%2 != 0 { + s.LogFields(log.Error(fmt.Errorf("Non-even keyValues len: %v", len(keyValues)))) + return + } + fields, err := log.InterleavedKVToFields(keyValues...) + if err != nil { + s.LogFields(log.Error(err), log.String("function", "LogKV")) + return + } + s.LogFields(fields...) +} + +// LogEvent belongs to the Span interface +func (s *MockSpan) LogEvent(event string) { + s.LogFields(log.String("event", event)) +} + +// LogEventWithPayload belongs to the Span interface +func (s *MockSpan) LogEventWithPayload(event string, payload interface{}) { + s.LogFields(log.String("event", event), log.Object("payload", payload)) +} + +// Log belongs to the Span interface +func (s *MockSpan) Log(data opentracing.LogData) { + panic("MockSpan.Log() no longer supported") +} + +// SetOperationName belongs to the Span interface +func (s *MockSpan) SetOperationName(operationName string) opentracing.Span { + s.Lock() + defer s.Unlock() + s.OperationName = operationName + return s +} + +// Tracer belongs to the Span interface +func (s *MockSpan) Tracer() opentracing.Tracer { + return s.tracer +} diff --git a/vendor/github.com/opentracing/opentracing-go/mocktracer/mocktracer.go b/vendor/github.com/opentracing/opentracing-go/mocktracer/mocktracer.go new file mode 100644 index 0000000000000..4533da7b1f782 --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/mocktracer/mocktracer.go @@ -0,0 +1,105 @@ +package mocktracer + +import ( + "sync" + + "github.com/opentracing/opentracing-go" +) + +// New returns a MockTracer opentracing.Tracer implementation that's intended +// to facilitate tests of OpenTracing instrumentation. +func New() *MockTracer { + t := &MockTracer{ + finishedSpans: []*MockSpan{}, + injectors: make(map[interface{}]Injector), + extractors: make(map[interface{}]Extractor), + } + + // register default injectors/extractors + textPropagator := new(TextMapPropagator) + t.RegisterInjector(opentracing.TextMap, textPropagator) + t.RegisterExtractor(opentracing.TextMap, textPropagator) + + httpPropagator := &TextMapPropagator{HTTPHeaders: true} + t.RegisterInjector(opentracing.HTTPHeaders, httpPropagator) + t.RegisterExtractor(opentracing.HTTPHeaders, httpPropagator) + + return t +} + +// MockTracer is only intended for testing OpenTracing instrumentation. +// +// It is entirely unsuitable for production use, but appropriate for tests +// that want to verify tracing behavior in other frameworks/applications. +type MockTracer struct { + sync.RWMutex + finishedSpans []*MockSpan + injectors map[interface{}]Injector + extractors map[interface{}]Extractor +} + +// FinishedSpans returns all spans that have been Finish()'ed since the +// MockTracer was constructed or since the last call to its Reset() method. +func (t *MockTracer) FinishedSpans() []*MockSpan { + t.RLock() + defer t.RUnlock() + spans := make([]*MockSpan, len(t.finishedSpans)) + copy(spans, t.finishedSpans) + return spans +} + +// Reset clears the internally accumulated finished spans. Note that any +// extant MockSpans will still append to finishedSpans when they Finish(), +// even after a call to Reset(). +func (t *MockTracer) Reset() { + t.Lock() + defer t.Unlock() + t.finishedSpans = []*MockSpan{} +} + +// StartSpan belongs to the Tracer interface. +func (t *MockTracer) StartSpan(operationName string, opts ...opentracing.StartSpanOption) opentracing.Span { + sso := opentracing.StartSpanOptions{} + for _, o := range opts { + o.Apply(&sso) + } + return newMockSpan(t, operationName, sso) +} + +// RegisterInjector registers injector for given format +func (t *MockTracer) RegisterInjector(format interface{}, injector Injector) { + t.injectors[format] = injector +} + +// RegisterExtractor registers extractor for given format +func (t *MockTracer) RegisterExtractor(format interface{}, extractor Extractor) { + t.extractors[format] = extractor +} + +// Inject belongs to the Tracer interface. +func (t *MockTracer) Inject(sm opentracing.SpanContext, format interface{}, carrier interface{}) error { + spanContext, ok := sm.(MockSpanContext) + if !ok { + return opentracing.ErrInvalidSpanContext + } + injector, ok := t.injectors[format] + if !ok { + return opentracing.ErrUnsupportedFormat + } + return injector.Inject(spanContext, carrier) +} + +// Extract belongs to the Tracer interface. +func (t *MockTracer) Extract(format interface{}, carrier interface{}) (opentracing.SpanContext, error) { + extractor, ok := t.extractors[format] + if !ok { + return nil, opentracing.ErrUnsupportedFormat + } + return extractor.Extract(carrier) +} + +func (t *MockTracer) recordSpan(span *MockSpan) { + t.Lock() + defer t.Unlock() + t.finishedSpans = append(t.finishedSpans, span) +} diff --git a/vendor/github.com/opentracing/opentracing-go/mocktracer/propagation.go b/vendor/github.com/opentracing/opentracing-go/mocktracer/propagation.go new file mode 100644 index 0000000000000..8364f1d182528 --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/mocktracer/propagation.go @@ -0,0 +1,120 @@ +package mocktracer + +import ( + "fmt" + "net/url" + "strconv" + "strings" + + "github.com/opentracing/opentracing-go" +) + +const mockTextMapIdsPrefix = "mockpfx-ids-" +const mockTextMapBaggagePrefix = "mockpfx-baggage-" + +var emptyContext = MockSpanContext{} + +// Injector is responsible for injecting SpanContext instances in a manner suitable +// for propagation via a format-specific "carrier" object. Typically the +// injection will take place across an RPC boundary, but message queues and +// other IPC mechanisms are also reasonable places to use an Injector. +type Injector interface { + // Inject takes `SpanContext` and injects it into `carrier`. The actual type + // of `carrier` depends on the `format` passed to `Tracer.Inject()`. + // + // Implementations may return opentracing.ErrInvalidCarrier or any other + // implementation-specific error if injection fails. + Inject(ctx MockSpanContext, carrier interface{}) error +} + +// Extractor is responsible for extracting SpanContext instances from a +// format-specific "carrier" object. Typically the extraction will take place +// on the server side of an RPC boundary, but message queues and other IPC +// mechanisms are also reasonable places to use an Extractor. +type Extractor interface { + // Extract decodes a SpanContext instance from the given `carrier`, + // or (nil, opentracing.ErrSpanContextNotFound) if no context could + // be found in the `carrier`. + Extract(carrier interface{}) (MockSpanContext, error) +} + +// TextMapPropagator implements Injector/Extractor for TextMap and HTTPHeaders formats. +type TextMapPropagator struct { + HTTPHeaders bool +} + +// Inject implements the Injector interface +func (t *TextMapPropagator) Inject(spanContext MockSpanContext, carrier interface{}) error { + writer, ok := carrier.(opentracing.TextMapWriter) + if !ok { + return opentracing.ErrInvalidCarrier + } + // Ids: + writer.Set(mockTextMapIdsPrefix+"traceid", strconv.Itoa(spanContext.TraceID)) + writer.Set(mockTextMapIdsPrefix+"spanid", strconv.Itoa(spanContext.SpanID)) + writer.Set(mockTextMapIdsPrefix+"sampled", fmt.Sprint(spanContext.Sampled)) + // Baggage: + for baggageKey, baggageVal := range spanContext.Baggage { + safeVal := baggageVal + if t.HTTPHeaders { + safeVal = url.QueryEscape(baggageVal) + } + writer.Set(mockTextMapBaggagePrefix+baggageKey, safeVal) + } + return nil +} + +// Extract implements the Extractor interface +func (t *TextMapPropagator) Extract(carrier interface{}) (MockSpanContext, error) { + reader, ok := carrier.(opentracing.TextMapReader) + if !ok { + return emptyContext, opentracing.ErrInvalidCarrier + } + rval := MockSpanContext{0, 0, true, nil} + err := reader.ForeachKey(func(key, val string) error { + lowerKey := strings.ToLower(key) + switch { + case lowerKey == mockTextMapIdsPrefix+"traceid": + // Ids: + i, err := strconv.Atoi(val) + if err != nil { + return err + } + rval.TraceID = i + case lowerKey == mockTextMapIdsPrefix+"spanid": + // Ids: + i, err := strconv.Atoi(val) + if err != nil { + return err + } + rval.SpanID = i + case lowerKey == mockTextMapIdsPrefix+"sampled": + b, err := strconv.ParseBool(val) + if err != nil { + return err + } + rval.Sampled = b + case strings.HasPrefix(lowerKey, mockTextMapBaggagePrefix): + // Baggage: + if rval.Baggage == nil { + rval.Baggage = make(map[string]string) + } + safeVal := val + if t.HTTPHeaders { + // unescape errors are ignored, nothing can be done + if rawVal, err := url.QueryUnescape(val); err == nil { + safeVal = rawVal + } + } + rval.Baggage[lowerKey[len(mockTextMapBaggagePrefix):]] = safeVal + } + return nil + }) + if rval.TraceID == 0 || rval.SpanID == 0 { + return emptyContext, opentracing.ErrSpanContextNotFound + } + if err != nil { + return emptyContext, err + } + return rval, nil +} diff --git a/vendor/modules.txt b/vendor/modules.txt index 141a17a31996b..9bbf3e0af8662 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -1225,6 +1225,7 @@ github.com/opentracing-contrib/go-stdlib/nethttp github.com/opentracing/opentracing-go github.com/opentracing/opentracing-go/ext github.com/opentracing/opentracing-go/log +github.com/opentracing/opentracing-go/mocktracer # github.com/oschwald/geoip2-golang v1.9.0 ## explicit; go 1.19 github.com/oschwald/geoip2-golang