diff --git a/catalog/CHANGELOG.md b/catalog/CHANGELOG.md
index 5bb0bf9fd17..ab6c3f62946 100644
--- a/catalog/CHANGELOG.md
+++ b/catalog/CHANGELOG.md
@@ -17,6 +17,9 @@ where verb is one of
## Changes
+- [Fixed] Fix some doc URLs in catalog ([#4205](https://github.com/quiltdata/quilt/pull/4205))
+- [Changed] S3 Select -> GQL API calls for getting access counts ([#4218](https://github.com/quiltdata/quilt/pull/4218))
+- [Changed] Athena: improve loading state and errors visuals; fix minor bugs; alphabetize and persist selection in workgroups, catalog names and databases ([#4208](https://github.com/quiltdata/quilt/pull/4208))
- [Changed] Show stack release version in footer ([#4200](https://github.com/quiltdata/quilt/pull/4200))
- [Added] Selective package downloading ([#4173](https://github.com/quiltdata/quilt/pull/4173))
- [Added] Qurator Omni: initial public release ([#4032](https://github.com/quiltdata/quilt/pull/4032), [#4181](https://github.com/quiltdata/quilt/pull/4181))
diff --git a/catalog/Dockerfile b/catalog/Dockerfile
index a58b9f30e70..ad288b8fe8a 100644
--- a/catalog/Dockerfile
+++ b/catalog/Dockerfile
@@ -1,4 +1,4 @@
-FROM amazonlinux:2023.6.20241031.0
+FROM amazonlinux:2023.6.20241111.0
MAINTAINER Quilt Data, Inc. contact@quiltdata.io
ENV LC_ALL=C.UTF-8
diff --git a/catalog/app/components/FileEditor/QuiltConfigEditor/BucketPreferences.tsx b/catalog/app/components/FileEditor/QuiltConfigEditor/BucketPreferences.tsx
index 67737f681f5..9abbd53a7a4 100644
--- a/catalog/app/components/FileEditor/QuiltConfigEditor/BucketPreferences.tsx
+++ b/catalog/app/components/FileEditor/QuiltConfigEditor/BucketPreferences.tsx
@@ -12,7 +12,10 @@ function Header() {
return (
Configuration for Catalog UI: show and hide features, set default values. See{' '}
-
+
the docs
diff --git a/catalog/app/components/FileEditor/QuiltConfigEditor/Workflows.tsx b/catalog/app/components/FileEditor/QuiltConfigEditor/Workflows.tsx
index 23175e61171..5288698cdeb 100644
--- a/catalog/app/components/FileEditor/QuiltConfigEditor/Workflows.tsx
+++ b/catalog/app/components/FileEditor/QuiltConfigEditor/Workflows.tsx
@@ -18,7 +18,7 @@ function Header() {
return (
Configuration for data quality workflows. See{' '}
-
+
the docs
diff --git a/catalog/app/containers/Admin/Status/Status.tsx b/catalog/app/containers/Admin/Status/Status.tsx
index 79cd6360655..c91d6b74d2c 100644
--- a/catalog/app/containers/Admin/Status/Status.tsx
+++ b/catalog/app/containers/Admin/Status/Status.tsx
@@ -54,7 +54,10 @@ export default function Status() {
GxP and other compliance regimes.
-
+
Learn more
{' '}
or contact sales.
diff --git a/catalog/app/containers/Admin/UsersAndRoles/SsoConfig.tsx b/catalog/app/containers/Admin/UsersAndRoles/SsoConfig.tsx
index b740034202e..26f292af45f 100644
--- a/catalog/app/containers/Admin/UsersAndRoles/SsoConfig.tsx
+++ b/catalog/app/containers/Admin/UsersAndRoles/SsoConfig.tsx
@@ -121,7 +121,10 @@ function Form({
Learn more about{' '}
-
+
SSO permissions mapping
.
diff --git a/catalog/app/containers/Bucket/CodeSamples/Dir.tsx b/catalog/app/containers/Bucket/CodeSamples/Dir.tsx
index bfd77c042be..5b3491691d1 100644
--- a/catalog/app/containers/Bucket/CodeSamples/Dir.tsx
+++ b/catalog/app/containers/Bucket/CodeSamples/Dir.tsx
@@ -14,9 +14,9 @@ const TEMPLATES = {
dedent`
import quilt3 as q3
b = q3.Bucket("s3://${bucket}")
- # List files [[${docs}/api-reference/bucket#bucket.ls]]
+ # List files [[${docs}/quilt-python-sdk-developers/api-reference/bucket#bucket.ls]]
b.ls("${path}")
- # Download [[${docs}/api-reference/bucket#bucket.fetch]]
+ # Download [[${docs}/quilt-python-sdk-developers/api-reference/bucket#bucket.fetch]]
b.fetch("${path}", "./${dest}")
`,
CLI: (bucket: string, path: string, dest: string) =>
diff --git a/catalog/app/containers/Bucket/CodeSamples/File.tsx b/catalog/app/containers/Bucket/CodeSamples/File.tsx
index f1eea186809..006609ad393 100644
--- a/catalog/app/containers/Bucket/CodeSamples/File.tsx
+++ b/catalog/app/containers/Bucket/CodeSamples/File.tsx
@@ -14,7 +14,7 @@ const TEMPLATES = {
dedent`
import quilt3 as q3
b = q3.Bucket("s3://${bucket}")
- # Download [[${docs}/api-reference/bucket#bucket.fetch]]
+ # Download [[${docs}/quilt-python-sdk-developers/api-reference/bucket#bucket.fetch]]
b.fetch("${path}", "./${basename(path)}")
`,
CLI: (bucket: string, path: string) =>
diff --git a/catalog/app/containers/Bucket/CodeSamples/Package.tsx b/catalog/app/containers/Bucket/CodeSamples/Package.tsx
index 7dbce6e97ee..c2763b3d750 100644
--- a/catalog/app/containers/Bucket/CodeSamples/Package.tsx
+++ b/catalog/app/containers/Bucket/CodeSamples/Package.tsx
@@ -19,16 +19,16 @@ const TEMPLATES = {
const hashPy = hashDisplay && `, top_hash="${hashDisplay}"`
return dedent`
import quilt3 as q3
- # Browse [[${docs}/api-reference/package#package.browse]]
+ # Browse [[${docs}/quilt-python-sdk-developers/api-reference/package#package.browse]]
p = q3.Package.browse("${name}"${hashPy}, registry="s3://${bucket}")
- # make changes to package adding individual files [[${docs}/api-reference/package#package.set]]
+ # make changes to package adding individual files [[${docs}/quilt-python-sdk-developers/api-reference/package#package.set]]
p.set("data.csv", "data.csv")
- # or whole directories [[${docs}/api-reference/package#package.set_dir]]
+ # or whole directories [[${docs}/quilt-python-sdk-developers/api-reference/package#package.set_dir]]
p.set_dir("subdir", "subdir")
- # and push changes [[${docs}/api-reference/package#package.push]]
+ # and push changes [[${docs}/quilt-python-sdk-developers/api-reference/package#package.push]]
p.push("${name}", registry="s3://${bucket}", message="Hello World")
- # Download (be mindful of large packages) [[${docs}/api-reference/package#package.push]]
+ # Download (be mindful of large packages) [[${docs}/quilt-python-sdk-developers/api-reference/package#package.install]]
q3.Package.install("${name}"${pathPy}${hashPy}, registry="s3://${bucket}", dest=".")
`
},
@@ -36,13 +36,13 @@ const TEMPLATES = {
const pathCli = path && ` --path "${s3paths.ensureNoSlash(path)}"`
const hashCli = hashDisplay && ` --top-hash ${hashDisplay}`
return dedent`
- # Download package [[${docs}/api-reference/cli#install]]
+ # Download package [[${docs}/quilt-python-sdk-developers/api-reference/cli#install]]
quilt3 install "${name}"${pathCli}${hashCli} --registry s3://${bucket} --dest .
`
},
CLI_UPLOAD: (bucket: string, name: string) =>
dedent`
- # Upload package [[${docs}/api-reference/cli#push]]
+ # Upload package [[${docs}/quilt-python-sdk-developers/api-reference/cli#push]]
echo "Hello World" > README.md
quilt3 push "${name}" --registry s3://${bucket} --dir .
`,
diff --git a/catalog/app/containers/Bucket/File/Analytics.tsx b/catalog/app/containers/Bucket/File/Analytics.tsx
new file mode 100644
index 00000000000..4152b083953
--- /dev/null
+++ b/catalog/app/containers/Bucket/File/Analytics.tsx
@@ -0,0 +1,92 @@
+import * as dateFns from 'date-fns'
+import * as Eff from 'effect'
+import * as React from 'react'
+import * as M from '@material-ui/core'
+
+import Sparkline from 'components/Sparkline'
+import * as GQL from 'utils/GraphQL'
+import log from 'utils/Logging'
+import * as SVG from 'utils/SVG'
+import { readableQuantity } from 'utils/string'
+
+import Section from '../Section'
+
+import ACCESS_COUNTS_QUERY from './gql/ObjectAccessCounts.generated'
+
+const currentYear = new Date().getFullYear()
+
+const formatDate = (date: Date) =>
+ dateFns.format(date, currentYear === date.getFullYear() ? 'd MMM' : 'd MMM yyyy')
+
+interface AnalyticsProps {
+ bucket: string
+ path: string
+}
+
+export default function Analytics({ bucket, path }: AnalyticsProps) {
+ const [cursor, setCursor] = React.useState(null)
+
+ const result = GQL.useQuery(ACCESS_COUNTS_QUERY, { bucket, key: path })
+
+ const data = React.useMemo(() => {
+ if (result.fetching) return Eff.Option.none()
+ if (result.error) log.error('Error fetching object access counts:', result.error)
+ return Eff.Option.some(Eff.Option.fromNullable(result.data?.objectAccessCounts))
+ }, [result.fetching, result.error, result.data])
+
+ const defaultExpanded = Eff.Option.match(data, {
+ onNone: () => false,
+ onSome: Eff.Option.match({
+ onNone: () => false,
+ onSome: ({ total }) => !!total,
+ }),
+ })
+
+ return (
+
+ {Eff.Option.match(data, {
+ onNone: () => ,
+ onSome: Eff.Option.match({
+ onNone: () => No analytics available,
+ onSome: ({ counts, total }) =>
+ total ? (
+
+
+ Downloads
+
+ {readableQuantity(cursor === null ? total : counts[cursor].value)}
+
+
+ {cursor === null
+ ? `${counts.length} days`
+ : formatDate(counts[cursor].date)}
+
+
+
+ c.value)}
+ onCursor={setCursor}
+ width={1000}
+ height={60}
+ stroke={SVG.Paint.Server(
+
+
+
+ ,
+ )}
+ />
+
+
+ ) : (
+ No analytics available
+ ),
+ }),
+ })}
+
+ )
+}
diff --git a/catalog/app/containers/Bucket/FileAssistantContext.ts b/catalog/app/containers/Bucket/File/AssistantContext.ts
similarity index 98%
rename from catalog/app/containers/Bucket/FileAssistantContext.ts
rename to catalog/app/containers/Bucket/File/AssistantContext.ts
index 6f5876945f8..46f06c8ccf3 100644
--- a/catalog/app/containers/Bucket/FileAssistantContext.ts
+++ b/catalog/app/containers/Bucket/File/AssistantContext.ts
@@ -4,7 +4,7 @@ import * as React from 'react'
import * as Assistant from 'components/Assistant'
import * as XML from 'utils/XML'
-import { ObjectExistence } from './requests'
+import { ObjectExistence } from '../requests'
interface VersionsContextProps {
data: $TSFixMe
diff --git a/catalog/app/containers/Bucket/File.js b/catalog/app/containers/Bucket/File/File.js
similarity index 85%
rename from catalog/app/containers/Bucket/File.js
rename to catalog/app/containers/Bucket/File/File.js
index e91b4ed9fd1..fda4ceb1d40 100644
--- a/catalog/app/containers/Bucket/File.js
+++ b/catalog/app/containers/Bucket/File/File.js
@@ -1,6 +1,5 @@
import { basename } from 'path'
-import * as dateFns from 'date-fns'
import * as R from 'ramda'
import * as React from 'react'
import { Link, useHistory, useLocation, useParams } from 'react-router-dom'
@@ -11,7 +10,6 @@ import * as Buttons from 'components/Buttons'
import * as FileEditor from 'components/FileEditor'
import Message from 'components/Message'
import * as Preview from 'components/Preview'
-import Sparkline from 'components/Sparkline'
import cfg from 'constants/config'
import * as Bookmarks from 'containers/Bookmarks'
import * as Notifications from 'containers/Notifications'
@@ -21,23 +19,24 @@ import * as BucketPreferences from 'utils/BucketPreferences'
import { useData } from 'utils/Data'
import MetaTitle from 'utils/MetaTitle'
import * as NamedRoutes from 'utils/NamedRoutes'
-import * as SVG from 'utils/SVG'
import { linkStyle } from 'utils/StyledLink'
import copyToClipboard from 'utils/clipboard'
import * as Format from 'utils/format'
import parseSearch from 'utils/parseSearch'
import { up, decode, handleToHttpsUri } from 'utils/s3paths'
-import { readableBytes, readableQuantity } from 'utils/string'
-
-import AssistButton from './AssistButton'
-import FileCodeSamples from './CodeSamples/File'
-import * as AssistantContext from './FileAssistantContext'
-import FileProperties from './FileProperties'
-import * as FileView from './FileView'
-import Section from './Section'
-import renderPreview from './renderPreview'
-import * as requests from './requests'
-import { useViewModes, viewModeToSelectOption } from './viewModes'
+import { readableBytes } from 'utils/string'
+
+import AssistButton from '../AssistButton'
+import FileCodeSamples from '../CodeSamples/File'
+import FileProperties from '../FileProperties'
+import * as FileView from '../FileView'
+import Section from '../Section'
+import renderPreview from '../renderPreview'
+import * as requests from '../requests'
+import { useViewModes, viewModeToSelectOption } from '../viewModes'
+
+import Analytics from './Analytics'
+import * as AssistantContext from './AssistantContext'
const useVersionInfoStyles = M.makeStyles(({ typography }) => ({
version: {
@@ -203,69 +202,6 @@ function VersionInfo({ bucket, path, version }) {
)
}
-function Analytics({ bucket, path }) {
- const [cursor, setCursor] = React.useState(null)
- const s3 = AWS.S3.use()
- const today = React.useMemo(() => new Date(), [])
- const formatDate = (date) =>
- dateFns.format(
- date,
- today.getFullYear() === date.getFullYear() ? 'd MMM' : 'd MMM yyyy',
- )
- const data = useData(requests.objectAccessCounts, { s3, bucket, path, today })
-
- const defaultExpanded = data.case({
- Ok: ({ total }) => !!total,
- _: () => false,
- })
-
- return (
-
- {data.case({
- Ok: ({ counts, total }) =>
- total ? (
-
-
- Downloads
-
- {readableQuantity(cursor === null ? total : counts[cursor].value)}
-
-
- {cursor === null
- ? `${counts.length} days`
- : formatDate(counts[cursor].date)}
-
-
-
-
-
-
- ,
- )}
- />
-
-
- ) : (
- No analytics available
- ),
- Err: () => No analytics available,
- _: () => ,
- })}
-
- )
-}
-
function CenteredProgress() {
return (
diff --git a/catalog/app/containers/Bucket/File/gql/ObjectAccessCounts.generated.ts b/catalog/app/containers/Bucket/File/gql/ObjectAccessCounts.generated.ts
new file mode 100644
index 00000000000..94875020cfe
--- /dev/null
+++ b/catalog/app/containers/Bucket/File/gql/ObjectAccessCounts.generated.ts
@@ -0,0 +1,100 @@
+/* eslint-disable @typescript-eslint/naming-convention */
+import type { TypedDocumentNode as DocumentNode } from '@graphql-typed-document-node/core'
+import * as Types from '../../../../model/graphql/types.generated'
+
+export type containers_Bucket_File_gql_ObjectAccessCountsQueryVariables = Types.Exact<{
+ bucket: Types.Scalars['String']
+ key: Types.Scalars['String']
+}>
+
+export type containers_Bucket_File_gql_ObjectAccessCountsQuery = {
+ readonly __typename: 'Query'
+} & {
+ readonly objectAccessCounts: Types.Maybe<
+ { readonly __typename: 'AccessCounts' } & Pick & {
+ readonly counts: ReadonlyArray<
+ { readonly __typename: 'AccessCountForDate' } & Pick<
+ Types.AccessCountForDate,
+ 'date' | 'value'
+ >
+ >
+ }
+ >
+}
+
+export const containers_Bucket_File_gql_ObjectAccessCountsDocument = {
+ kind: 'Document',
+ definitions: [
+ {
+ kind: 'OperationDefinition',
+ operation: 'query',
+ name: { kind: 'Name', value: 'containers_Bucket_File_gql_ObjectAccessCounts' },
+ variableDefinitions: [
+ {
+ kind: 'VariableDefinition',
+ variable: { kind: 'Variable', name: { kind: 'Name', value: 'bucket' } },
+ type: {
+ kind: 'NonNullType',
+ type: { kind: 'NamedType', name: { kind: 'Name', value: 'String' } },
+ },
+ },
+ {
+ kind: 'VariableDefinition',
+ variable: { kind: 'Variable', name: { kind: 'Name', value: 'key' } },
+ type: {
+ kind: 'NonNullType',
+ type: { kind: 'NamedType', name: { kind: 'Name', value: 'String' } },
+ },
+ },
+ ],
+ selectionSet: {
+ kind: 'SelectionSet',
+ selections: [
+ {
+ kind: 'Field',
+ name: { kind: 'Name', value: 'objectAccessCounts' },
+ arguments: [
+ {
+ kind: 'Argument',
+ name: { kind: 'Name', value: 'bucket' },
+ value: { kind: 'Variable', name: { kind: 'Name', value: 'bucket' } },
+ },
+ {
+ kind: 'Argument',
+ name: { kind: 'Name', value: 'key' },
+ value: { kind: 'Variable', name: { kind: 'Name', value: 'key' } },
+ },
+ {
+ kind: 'Argument',
+ name: { kind: 'Name', value: 'window' },
+ value: { kind: 'IntValue', value: '365' },
+ },
+ ],
+ selectionSet: {
+ kind: 'SelectionSet',
+ selections: [
+ { kind: 'Field', name: { kind: 'Name', value: 'total' } },
+ {
+ kind: 'Field',
+ name: { kind: 'Name', value: 'counts' },
+ selectionSet: {
+ kind: 'SelectionSet',
+ selections: [
+ { kind: 'Field', name: { kind: 'Name', value: 'date' } },
+ { kind: 'Field', name: { kind: 'Name', value: 'value' } },
+ ],
+ },
+ },
+ ],
+ },
+ },
+ ],
+ },
+ },
+ ],
+} as unknown as DocumentNode<
+ containers_Bucket_File_gql_ObjectAccessCountsQuery,
+ containers_Bucket_File_gql_ObjectAccessCountsQueryVariables
+>
+
+export { containers_Bucket_File_gql_ObjectAccessCountsDocument as default }
diff --git a/catalog/app/containers/Bucket/File/gql/ObjectAccessCounts.graphql b/catalog/app/containers/Bucket/File/gql/ObjectAccessCounts.graphql
new file mode 100644
index 00000000000..431f1cb2ee2
--- /dev/null
+++ b/catalog/app/containers/Bucket/File/gql/ObjectAccessCounts.graphql
@@ -0,0 +1,9 @@
+query ($bucket: String!, $key: String!) {
+ objectAccessCounts(bucket: $bucket, key: $key, window: 365) {
+ total
+ counts {
+ date
+ value
+ }
+ }
+}
diff --git a/catalog/app/containers/Bucket/File/index.ts b/catalog/app/containers/Bucket/File/index.ts
new file mode 100644
index 00000000000..d1590f6b882
--- /dev/null
+++ b/catalog/app/containers/Bucket/File/index.ts
@@ -0,0 +1 @@
+export { default } from './File'
diff --git a/catalog/app/containers/Bucket/Overview.js b/catalog/app/containers/Bucket/Overview.js
deleted file mode 100644
index 3acef4e2ccb..00000000000
--- a/catalog/app/containers/Bucket/Overview.js
+++ /dev/null
@@ -1,963 +0,0 @@
-import cx from 'classnames'
-import * as dateFns from 'date-fns'
-import * as R from 'ramda'
-import * as React from 'react'
-import { Link as RRLink, useParams } from 'react-router-dom'
-import * as redux from 'react-redux'
-import * as M from '@material-ui/core'
-import { fade } from '@material-ui/core/styles'
-import useComponentSize from '@rehooks/component-size'
-
-import Skeleton from 'components/Skeleton'
-import StackedAreaChart from 'components/StackedAreaChart'
-import cfg from 'constants/config'
-import * as authSelectors from 'containers/Auth/selectors'
-import * as APIConnector from 'utils/APIConnector'
-import * as AWS from 'utils/AWS'
-import AsyncResult from 'utils/AsyncResult'
-import * as BucketPreferences from 'utils/BucketPreferences'
-import Data, { useData } from 'utils/Data'
-import { useQueryS } from 'utils/GraphQL'
-import * as LinkedData from 'utils/LinkedData'
-import * as NamedRoutes from 'utils/NamedRoutes'
-import * as SVG from 'utils/SVG'
-import { readableBytes, readableQuantity, formatQuantity } from 'utils/string'
-
-import * as Gallery from './Gallery'
-import * as Summarize from './Summarize'
-import * as requests from './requests'
-import BUCKET_CONFIG_QUERY from './OverviewBucketConfig.generated'
-
-import bg from './Overview-bg.jpg'
-
-const RODA_LINK = 'https://registry.opendata.aws'
-const RODA_BUCKET = 'quilt-open-data-bucket'
-const MAX_EXTS = 7
-// must have length >= MAX_EXTS
-const COLOR_MAP = [
- '#8ad3cb',
- '#d7ce69',
- '#bfbadb',
- '#f4806c',
- '#83b0d1',
- '#b2de67',
- '#bc81be',
- '#f0b5d3',
- '#7ba39f',
- '#9894ad',
- '#be7265',
- '#94ad6b',
-]
-
-function mkKeyedPool(pool) {
- const map = {}
- let poolIdx = 0
- const get = (key) => {
- if (!(key in map)) {
- // eslint-disable-next-line no-plusplus
- map[key] = pool[poolIdx++ % pool.length]
- }
- return map[key]
- }
- return { get }
-}
-
-function useConst(cons) {
- const ref = React.useRef(null)
- if (!ref.current) ref.current = { value: cons() }
- return ref.current.value
-}
-
-const useObjectsByExtStyles = M.makeStyles((t) => ({
- root: {
- display: 'grid',
- gridAutoRows: 20,
- gridColumnGap: t.spacing(1),
- gridRowGap: t.spacing(0.25),
- gridTemplateAreas: `
- ". heading heading"
- `,
- gridTemplateColumns: 'minmax(30px, max-content) 1fr minmax(30px, max-content)',
- gridTemplateRows: 'auto',
- [t.breakpoints.down('sm')]: {
- gridTemplateAreas: `
- "heading heading heading"
- `,
- },
- },
- heading: {
- ...t.typography.h6,
- gridArea: 'heading',
- marginBottom: t.spacing(1),
- [t.breakpoints.down('sm')]: {
- textAlign: 'center',
- },
- },
- ext: {
- color: t.palette.text.secondary,
- gridColumn: 1,
- fontSize: t.typography.overline.fontSize,
- fontWeight: t.typography.fontWeightMedium,
- letterSpacing: t.typography.subtitle2.letterSpacing,
- lineHeight: t.typography.pxToRem(20),
- textAlign: 'right',
- },
- count: {
- color: t.palette.text.secondary,
- gridColumn: 3,
- fontSize: t.typography.overline.fontSize,
- fontWeight: t.typography.fontWeightMedium,
- letterSpacing: t.typography.subtitle2.letterSpacing,
- lineHeight: t.typography.pxToRem(20),
- },
- bar: {
- background: t.palette.action.hover,
- gridColumn: 2,
- },
- gauge: {
- height: '100%',
- position: 'relative',
- },
- flip: {},
- size: {
- color: t.palette.common.white,
- fontSize: t.typography.overline.fontSize,
- fontWeight: t.typography.fontWeightMedium,
- letterSpacing: t.typography.subtitle2.letterSpacing,
- lineHeight: t.typography.pxToRem(20),
- position: 'absolute',
- right: t.spacing(1),
- '&$flip': {
- color: t.palette.text.hint,
- left: `calc(100% + ${t.spacing(1)}px)`,
- right: 'auto',
- },
- },
- skeleton: {
- gridColumn: '1 / span 3',
- },
- unavail: {
- ...t.typography.body2,
- alignItems: 'center',
- display: 'flex',
- gridColumn: '1 / span 3',
- gridRow: `2 / span ${MAX_EXTS}`,
- justifyContent: 'center',
- },
-}))
-
-function ObjectsByExt({ data, colorPool, ...props }) {
- const classes = useObjectsByExtStyles()
- return (
-
- Objects by File Extension
- {AsyncResult.case(
- {
- Ok: (exts) => {
- const capped = exts.slice(0, MAX_EXTS)
- const maxBytes = capped.reduce((max, e) => Math.max(max, e.bytes), 0)
- const max = Math.log(maxBytes + 1)
- const scale = (x) => Math.log(x + 1) / max
- return capped.map(({ ext, bytes, objects }, i) => {
- const color = colorPool.get(ext)
- return (
-
-
- {ext || 'other'}
-
-
-
-
- {readableBytes(bytes)}
-
-
-
-
- {readableQuantity(objects)}
-
-
- )
- })
- },
- _: (r) => (
- <>
- {R.times(
- (i) => (
-
- ),
- MAX_EXTS,
- )}
- {AsyncResult.Err.is(r) && (
- Data unavailable
- )}
- >
- ),
- },
- data,
- )}
-
- )
-}
-
-const skelData = R.times(
- R.pipe(
- () => R.times(Math.random, 30),
- R.scan(R.add, 0),
- R.drop(1),
- R.map((v) => Math.log(100 * v + 1)),
- ),
- 8,
-)
-
-const skelColors = [
- [M.colors.grey[300], M.colors.grey[100]],
- [M.colors.grey[400], M.colors.grey[200]],
-]
-
-const mkPulsingGradient = ({ colors: [c1, c2], animate = false }) =>
- SVG.Paint.Server(
-
-
- {animate && (
-
- )}
-
- ,
- )
-
-function ChartSkel({
- height,
- width,
- lines = skelData.length,
- animate = false,
- children,
-}) {
- const data = React.useMemo(
- () => R.times((i) => skelData[i % skelData.length], lines),
- [lines],
- )
- const fills = React.useMemo(
- () =>
- R.times(
- (i) => mkPulsingGradient({ colors: skelColors[i % skelColors.length], animate }),
- lines,
- ),
- [lines, animate],
- )
- return (
-
-
- {children}
-
- )
-}
-
-const ANALYTICS_WINDOW_OPTIONS = [
- { value: 31, label: 'Last 1 month' },
- { value: 91, label: 'Last 3 months' },
- { value: 182, label: 'Last 6 months' },
- { value: 365, label: 'Last 12 months' },
-]
-
-function DownloadsRange({ value, onChange, bucket, rawData }) {
- const [anchor, setAnchor] = React.useState(null)
-
- const open = React.useCallback(
- (e) => {
- setAnchor(e.target)
- },
- [setAnchor],
- )
-
- const close = React.useCallback(() => {
- setAnchor(null)
- }, [setAnchor])
-
- const choose = React.useCallback(
- (e) => {
- onChange(e.target.value)
- close()
- },
- [onChange, close],
- )
-
- const { label } = ANALYTICS_WINDOW_OPTIONS.find((o) => o.value === value) || {}
-
- return (
- <>
-
-
- {label} expand_more
-
-
- {ANALYTICS_WINDOW_OPTIONS.map((o) => (
-
- {o.label}
-
- ))}
-
-
- Download to file
-
-
- >
- )
-}
-
-const useStatsTipStyles = M.makeStyles((t) => ({
- root: {
- background: fade(t.palette.grey[700], 0.9),
- color: t.palette.common.white,
- padding: [[6, 8]],
- },
- head: {
- display: 'flex',
- justifyContent: 'space-between',
- marginBottom: 4,
- },
- date: {},
- total: {},
- extsContainer: {
- alignItems: 'center',
- display: 'grid',
- gridAutoRows: 'auto',
- gridColumnGap: 4,
- gridTemplateColumns: 'max-content max-content 1fr',
- },
- ext: {
- fontSize: 12,
- lineHeight: '16px',
- maxWidth: 70,
- opacity: 0.6,
- overflow: 'hidden',
- textAlign: 'right',
- textOverflow: 'ellipsis',
- },
- color: {
- borderRadius: '50%',
- height: 8,
- opacity: 0.6,
- width: 8,
- },
- number: {
- fontSize: 12,
- lineHeight: '16px',
- opacity: 0.6,
- },
- hl: {
- opacity: 1,
- },
-}))
-
-function StatsTip({ stats, colorPool, className, ...props }) {
- const classes = useStatsTipStyles()
- return (
-
-
-
{dateFns.format(stats.date, 'd MMM')}
-
- {readableQuantity(stats.combined.sum)} (+
- {readableQuantity(stats.combined.value)})
-
-
-
- {stats.byExt.map((s) => {
- const hl = stats.highlighted ? stats.highlighted.ext === s.ext : true
- return (
-
- {s.ext || 'other'}
-
-
- {readableQuantity(s.sum)} (+
- {readableQuantity(s.value)})
-
-
- )
- })}
-
-
- )
-}
-
-const Transition = ({ TransitionComponent = M.Grow, children, ...props }) => {
- const contentsRef = React.useRef(null)
- if (props.in) contentsRef.current = children()
- return (
- contentsRef.current && (
- {contentsRef.current}
- )
- )
-}
-
-// use the same height as the bar chart: 20px per bar with 2px margin
-const CHART_H = 22 * MAX_EXTS - 2
-
-const useDownloadsStyles = M.makeStyles((t) => ({
- root: {
- display: 'grid',
- gridRowGap: t.spacing(0.25),
- gridTemplateAreas: `
- "heading period"
- "chart chart"
- `,
- gridTemplateColumns: 'min-content 1fr',
- gridTemplateRows: 'auto auto',
- [t.breakpoints.down('sm')]: {
- gridTemplateAreas: `
- "heading"
- "chart"
- "period"
- `,
- gridTemplateColumns: '1fr',
- gridTemplateRows: 'auto auto auto',
- },
- },
- heading: {
- ...t.typography.h6,
- gridArea: 'heading',
- marginBottom: t.spacing(1),
- whiteSpace: 'nowrap',
- [t.breakpoints.down('sm')]: {
- marginBottom: 0,
- textAlign: 'center',
- },
- },
- ext: {
- display: 'inline-block',
- maxWidth: 100,
- overflow: 'hidden',
- textOverflow: 'ellipsis',
- verticalAlign: 'bottom',
- },
- period: {
- display: 'flex',
- gridArea: 'period',
- justifyContent: 'center',
- alignItems: 'center',
- [t.breakpoints.down('sm')]: {
- paddingBottom: t.spacing(1),
- paddingTop: t.spacing(2),
- },
- [t.breakpoints.up('md')]: {
- height: 37,
- justifyContent: 'flex-end',
- },
- },
- chart: {
- gridArea: 'chart',
- position: 'relative',
- },
- left: {},
- right: {},
- dateStats: {
- maxWidth: 180,
- position: 'absolute',
- top: 0,
- width: 'calc(50% - 8px)',
- zIndex: 1,
- '&$left': {
- left: 0,
- },
- '&$right': {
- right: 0,
- },
- },
- unavail: {
- ...t.typography.body2,
- alignItems: 'center',
- display: 'flex',
- height: '100%',
- justifyContent: 'center',
- position: 'absolute',
- top: 0,
- width: '100%',
- },
-}))
-
-function Downloads({ bucket, colorPool, ...props }) {
- const s3 = AWS.S3.use()
- const today = React.useMemo(() => new Date(), [])
- const classes = useDownloadsStyles()
- const ref = React.useRef(null)
- const { width } = useComponentSize(ref)
- const [window, setWindow] = React.useState(ANALYTICS_WINDOW_OPTIONS[0].value)
- const [cursor, setCursor] = React.useState(null)
- const cursorStats = (counts) => {
- if (!cursor) return null
- const { date, ...combined } = counts.combined.counts[cursor.j]
- const byExt = counts.byExtCollapsed.map((e) => ({
- ext: e.ext,
- ...e.counts[cursor.j],
- }))
- const highlighted = cursor.i == null ? null : counts.byExtCollapsed[cursor.i]
- const firstHalf = cursor.j < counts.combined.counts.length / 2
- return { date, combined, byExt, highlighted, firstHalf }
- }
-
- const mkRawData = AsyncResult.case({
- Ok: (data) => `data:application/json,${JSON.stringify(data)}`,
- _: () => null,
- })
-
- if (!cfg.analyticsBucket) {
- return (
-
- Requires CloudTrail
-
- )
- }
-
- return (
-
- {(data) => (
-
-
-
-
-
- {AsyncResult.case(
- {
- Ok: (counts) => {
- const stats = cursorStats(counts)
- const hl = stats && stats.highlighted
- const ext = hl ? hl.ext || 'other' : 'total'
- const total = hl ? hl.total : counts.combined.total
- if (!counts.byExtCollapsed.length) return 'Downloads'
- return (
- <>
- Downloads ({ext}):{' '}
- {readableQuantity(total)}
- >
- )
- },
- _: () => 'Downloads',
- },
- data,
- )}
-
-
- {AsyncResult.case(
- {
- Ok: (counts) => {
- if (!counts.byExtCollapsed.length) {
- return (
-
- No Data
-
- )
- }
-
- const stats = cursorStats(counts)
- return (
- <>
-
- e.counts.map((i) => Math.log(i.sum + 1)),
- )}
- onCursor={setCursor}
- height={CHART_H}
- width={width}
- areaFills={counts.byExtCollapsed.map((e) =>
- SVG.Paint.Color(colorPool.get(e.ext)),
- )}
- lineStroke={SVG.Paint.Color(M.colors.grey[500])}
- extendL
- extendR
- px={10}
- />
-
- {() => (
-
- )}
-
-
- {() => (
-
- )}
-
- >
- )
- },
- _: () => ,
- },
- data,
- )}
-
-
- )}
-
- )
-}
-
-const useStatDisplayStyles = M.makeStyles((t) => ({
- root: {
- alignItems: 'baseline',
- display: 'flex',
- '& + &': {
- marginLeft: t.spacing(1.5),
- [t.breakpoints.up('sm')]: {
- marginLeft: t.spacing(4),
- },
- [t.breakpoints.up('md')]: {
- marginLeft: t.spacing(6),
- },
- },
- },
- value: {
- fontSize: t.typography.h6.fontSize,
- fontWeight: t.typography.fontWeightBold,
- letterSpacing: 0,
- lineHeight: '20px',
- [t.breakpoints.up('sm')]: {
- fontSize: t.typography.h4.fontSize,
- lineHeight: '32px',
- },
- },
- label: {
- ...t.typography.body2,
- color: t.palette.grey[300],
- lineHeight: 1,
- marginLeft: t.spacing(0.5),
- [t.breakpoints.up('sm')]: {
- marginLeft: t.spacing(1),
- },
- },
- skeletonContainer: {
- alignItems: 'center',
- height: 20,
- [t.breakpoints.up('sm')]: {
- height: 32,
- },
- },
- skeleton: {
- borderRadius: t.shape.borderRadius,
- height: t.typography.h6.fontSize,
- width: 96,
- [t.breakpoints.up('sm')]: {
- height: t.typography.h4.fontSize,
- width: 120,
- },
- },
-}))
-
-function StatDisplay({ value, label, format, fallback }) {
- const classes = useStatDisplayStyles()
- return R.pipe(
- AsyncResult.case({
- Ok: R.pipe(format || R.identity, AsyncResult.Ok),
- Err: R.pipe(fallback || R.identity, AsyncResult.Ok),
- _: R.identity,
- }),
- AsyncResult.case({
- Ok: (v) =>
- v != null && (
-
- {v}
- {!!label && {label}}
-
- ),
- _: () => (
-
-
-
- ),
- }),
- )(value)
-}
-
-const useHeadStyles = M.makeStyles((t) => ({
- root: {
- position: 'relative',
- [t.breakpoints.down('xs')]: {
- borderRadius: 0,
- },
- [t.breakpoints.up('sm')]: {
- marginTop: t.spacing(2),
- },
- },
- top: {
- background: `center / cover url(${bg}) ${t.palette.grey[700]}`,
- borderTopLeftRadius: t.shape.borderRadius,
- borderTopRightRadius: t.shape.borderRadius,
- color: t.palette.common.white,
- overflow: 'hidden',
- paddingBottom: t.spacing(3),
- paddingLeft: t.spacing(2),
- paddingRight: t.spacing(2),
- paddingTop: t.spacing(4),
- position: 'relative',
- [t.breakpoints.up('sm')]: {
- padding: t.spacing(4),
- },
- [t.breakpoints.down('xs')]: {
- borderRadius: 0,
- },
- },
- settings: {
- color: t.palette.common.white,
- position: 'absolute',
- right: t.spacing(2),
- top: t.spacing(2),
- },
-}))
-
-function Head({ s3, overviewUrl, bucket, description }) {
- const classes = useHeadStyles()
- const req = APIConnector.use()
- const isRODA = !!overviewUrl && overviewUrl.includes(`/${RODA_BUCKET}/`)
- const colorPool = useConst(() => mkKeyedPool(COLOR_MAP))
- const statsData = useData(requests.bucketStats, { req, s3, bucket, overviewUrl })
- const pkgCountData = useData(requests.countPackageRevisions, { req, bucket })
- const { urls } = NamedRoutes.use()
- const isAdmin = redux.useSelector(authSelectors.isAdmin)
- return (
-
-
- {bucket}
- {!!description && (
-
- {description}
-
- )}
- {isRODA && (
-
-
- From the{' '}
-
- Registry of Open Data on AWS
-
-
-
- )}
-
- '? B'}
- />
- '?'}
- />
- null}
- />
-
- {isAdmin && (
-
-
- settings
-
-
- )}
-
-
-
-
-
-
-
-
-
-
-
- )
-}
-
-function Readmes({ s3, overviewUrl, bucket }) {
- return (
-
- {AsyncResult.case({
- Ok: (rs) =>
- (rs.discovered.length > 0 || !!rs.forced) && (
- <>
- {!!rs.forced && (
-
- )}
- {rs.discovered.map((h) => (
-
- ))}
- >
- ),
- _: () => ,
- })}
-
- )
-}
-
-function Imgs({ s3, overviewUrl, inStack, bucket }) {
- const req = APIConnector.use()
- return (
-
- {AsyncResult.case({
- Ok: (images) => (images.length ? : null),
- _: () => ,
- })}
-
- )
-}
-
-function ThumbnailsWrapper({
- s3,
- overviewUrl,
- inStack,
- bucket,
- preferences: galleryPrefs,
-}) {
- if (cfg.noOverviewImages || !galleryPrefs) return null
- if (!galleryPrefs.overview) return null
- return (
-
- {AsyncResult.case({
- Ok: (h) =>
- (!h || galleryPrefs.summarize) && (
-
- ),
- Err: () => ,
- Pending: () => ,
- _: () => null,
- })}
-
- )
-}
-
-export default function Overview() {
- const { bucket } = useParams()
-
- const s3 = AWS.S3.use()
- const { bucketConfig } = useQueryS(BUCKET_CONFIG_QUERY, { bucket })
- const inStack = !!bucketConfig
- const overviewUrl = bucketConfig?.overviewUrl
- const description = bucketConfig?.description
- const prefs = BucketPreferences.use()
- return (
-
- {inStack && (
-
-
-
- )}
- {bucketConfig ? (
-
- ) : (
-
- {bucket}
-
- )}
-
- {BucketPreferences.Result.match(
- {
- Ok: ({ ui: { blocks } }) => (
-
- ),
- Pending: () => ,
- Init: R.F,
- },
- prefs,
- )}
-
-
- )
-}
diff --git a/catalog/app/containers/Bucket/Overview/ColorPool.ts b/catalog/app/containers/Bucket/Overview/ColorPool.ts
new file mode 100644
index 00000000000..6be8be62756
--- /dev/null
+++ b/catalog/app/containers/Bucket/Overview/ColorPool.ts
@@ -0,0 +1,16 @@
+export interface ColorPool {
+ get: (key: string) => string
+}
+
+export function makeColorPool(pool: string[]): ColorPool {
+ const map: Record = {}
+ let poolIdx = 0
+ const get = (key: string): string => {
+ if (!(key in map)) {
+ // eslint-disable-next-line no-plusplus
+ map[key] = pool[poolIdx++ % pool.length]
+ }
+ return map[key]
+ }
+ return { get }
+}
diff --git a/catalog/app/containers/Bucket/Overview/Downloads.spec.ts b/catalog/app/containers/Bucket/Overview/Downloads.spec.ts
new file mode 100644
index 00000000000..0611d0b7b54
--- /dev/null
+++ b/catalog/app/containers/Bucket/Overview/Downloads.spec.ts
@@ -0,0 +1,194 @@
+import { processBucketAccessCounts } from './Downloads'
+
+jest.mock(
+ 'constants/config',
+ jest.fn(() => ({})),
+)
+
+describe('containers/Bucket/Overview/Downloads', () => {
+ describe('processBucketAccessCounts', () => {
+ it('should normalize the data received from GQL and compute some missing data', () => {
+ expect(
+ processBucketAccessCounts({
+ __typename: 'BucketAccessCounts',
+ byExt: [
+ {
+ __typename: 'AccessCountsGroup',
+ ext: 'csv',
+ counts: {
+ __typename: 'AccessCounts',
+ total: 10,
+ counts: [
+ {
+ __typename: 'AccessCountForDate',
+ value: 1,
+ date: new Date('2021-08-01'),
+ },
+ {
+ __typename: 'AccessCountForDate',
+ value: 2,
+ date: new Date('2021-08-02'),
+ },
+ {
+ __typename: 'AccessCountForDate',
+ value: 3,
+ date: new Date('2021-08-03'),
+ },
+ {
+ __typename: 'AccessCountForDate',
+ value: 4,
+ date: new Date('2021-08-04'),
+ },
+ ],
+ },
+ },
+ ],
+ byExtCollapsed: [
+ {
+ __typename: 'AccessCountsGroup',
+ ext: 'csv',
+ counts: {
+ __typename: 'AccessCounts',
+ total: 10,
+ counts: [
+ {
+ __typename: 'AccessCountForDate',
+ value: 1,
+ date: new Date('2021-08-01'),
+ },
+ {
+ __typename: 'AccessCountForDate',
+ value: 2,
+ date: new Date('2021-08-02'),
+ },
+ {
+ __typename: 'AccessCountForDate',
+ value: 3,
+ date: new Date('2021-08-03'),
+ },
+ {
+ __typename: 'AccessCountForDate',
+ value: 4,
+ date: new Date('2021-08-04'),
+ },
+ ],
+ },
+ },
+ ],
+ combined: {
+ __typename: 'AccessCounts',
+ total: 10,
+ counts: [
+ {
+ __typename: 'AccessCountForDate',
+ value: 1,
+ date: new Date('2021-08-01'),
+ },
+ {
+ __typename: 'AccessCountForDate',
+ value: 2,
+ date: new Date('2021-08-02'),
+ },
+ {
+ __typename: 'AccessCountForDate',
+ value: 3,
+ date: new Date('2021-08-03'),
+ },
+ {
+ __typename: 'AccessCountForDate',
+ value: 4,
+ date: new Date('2021-08-04'),
+ },
+ ],
+ },
+ }),
+ ).toEqual({
+ byExt: [
+ {
+ ext: '.csv',
+ counts: {
+ total: 10,
+ counts: [
+ {
+ date: new Date('2021-08-01'),
+ value: 1,
+ sum: 1,
+ },
+ {
+ date: new Date('2021-08-02'),
+ value: 2,
+ sum: 3,
+ },
+ {
+ date: new Date('2021-08-03'),
+ value: 3,
+ sum: 6,
+ },
+ {
+ date: new Date('2021-08-04'),
+ value: 4,
+ sum: 10,
+ },
+ ],
+ },
+ },
+ ],
+ byExtCollapsed: [
+ {
+ ext: '.csv',
+ counts: {
+ total: 10,
+ counts: [
+ {
+ date: new Date('2021-08-01'),
+ value: 1,
+ sum: 1,
+ },
+ {
+ date: new Date('2021-08-02'),
+ value: 2,
+ sum: 3,
+ },
+ {
+ date: new Date('2021-08-03'),
+ value: 3,
+ sum: 6,
+ },
+ {
+ date: new Date('2021-08-04'),
+ value: 4,
+ sum: 10,
+ },
+ ],
+ },
+ },
+ ],
+ combined: {
+ total: 10,
+ counts: [
+ {
+ date: new Date('2021-08-01'),
+ value: 1,
+ sum: 1,
+ },
+ {
+ date: new Date('2021-08-02'),
+ value: 2,
+ sum: 3,
+ },
+ {
+ date: new Date('2021-08-03'),
+ value: 3,
+ sum: 6,
+ },
+ {
+ date: new Date('2021-08-04'),
+ value: 4,
+ sum: 10,
+ },
+ ],
+ },
+ })
+ })
+ })
+})
diff --git a/catalog/app/containers/Bucket/Overview/Downloads.tsx b/catalog/app/containers/Bucket/Overview/Downloads.tsx
new file mode 100644
index 00000000000..065519d45b2
--- /dev/null
+++ b/catalog/app/containers/Bucket/Overview/Downloads.tsx
@@ -0,0 +1,593 @@
+import cx from 'classnames'
+import * as dateFns from 'date-fns'
+import * as Eff from 'effect'
+import * as React from 'react'
+import * as M from '@material-ui/core'
+import { fade } from '@material-ui/core/styles'
+import useComponentSize from '@rehooks/component-size'
+
+import StackedAreaChart from 'components/StackedAreaChart'
+import cfg from 'constants/config'
+import * as GQL from 'utils/GraphQL'
+import log from 'utils/Logging'
+import * as SVG from 'utils/SVG'
+import { readableQuantity } from 'utils/string'
+
+import { ColorPool } from './ColorPool'
+
+import BUCKET_ACCESS_COUNTS_QUERY from './gql/BucketAccessCounts.generated'
+
+type GQLBucketAccessCounts = NonNullable<
+ GQL.DataForDoc['bucketAccessCounts']
+>
+type GQLAccessCountsGroup = GQLBucketAccessCounts['byExt'][0]
+type GQLAccessCounts = GQLBucketAccessCounts['combined']
+type GQLAccessCountForDate = GQLAccessCounts['counts'][0]
+
+interface ProcessedAccessCountForDate {
+ date: Date
+ value: number
+ sum: number
+}
+
+interface ProcessedAccessCounts {
+ total: number
+ counts: readonly ProcessedAccessCountForDate[]
+}
+
+interface ProcessedAccessCountsGroup {
+ ext: string
+ counts: ProcessedAccessCounts
+}
+
+interface ProcessedBucketAccessCounts {
+ byExt: readonly ProcessedAccessCountsGroup[]
+ byExtCollapsed: readonly ProcessedAccessCountsGroup[]
+ combined: ProcessedAccessCounts
+}
+
+const processAccessCountForDateArr = (
+ counts: readonly GQLAccessCountForDate[],
+): readonly ProcessedAccessCountForDate[] =>
+ // compute running sum
+ Eff.Array.mapAccum(counts, 0, (acc, { value, date }) => [
+ acc + value,
+ {
+ value,
+ date,
+ sum: acc + value,
+ },
+ ])[1]
+
+const processAccessCounts = (counts: GQLAccessCounts): ProcessedAccessCounts => ({
+ total: counts.total,
+ counts: processAccessCountForDateArr(counts.counts),
+})
+
+const processAccessCountsGroup = (
+ group: GQLAccessCountsGroup,
+): ProcessedAccessCountsGroup => ({
+ ext: group.ext && `.${group.ext}`,
+ counts: processAccessCounts(group.counts),
+})
+
+export const processBucketAccessCounts = (
+ counts: GQLBucketAccessCounts,
+): ProcessedBucketAccessCounts => ({
+ byExt: counts.byExt.map(processAccessCountsGroup),
+ byExtCollapsed: counts.byExtCollapsed.map(processAccessCountsGroup),
+ combined: processAccessCounts(counts.combined),
+})
+
+interface Cursor {
+ i: number | null // ext
+ j: number // date
+}
+
+interface CursorStats {
+ date: Date
+ combined: {
+ sum: number
+ value: number
+ }
+ byExt: {
+ ext: string
+ sum: number
+ value: number
+ date: Date
+ }[]
+ highlighted: {
+ ext: string
+ counts: ProcessedAccessCounts
+ } | null
+ firstHalf: boolean
+}
+
+function getCursorStats(
+ counts: ProcessedBucketAccessCounts,
+ cursor: Cursor | null,
+): CursorStats | null {
+ if (!cursor) return null
+
+ const { date, ...combined } = counts.combined.counts[cursor.j]
+ const byExt = counts.byExtCollapsed.map((e) => ({
+ ext: e.ext,
+ ...e.counts.counts[cursor.j],
+ }))
+ const highlighted = cursor.i == null ? null : counts.byExtCollapsed[cursor.i]
+ const firstHalf = cursor.j < counts.combined.counts.length / 2
+ return { date, combined, byExt, highlighted, firstHalf }
+}
+
+const skelData = Eff.Array.makeBy(
+ 8,
+ Eff.flow(
+ () => Eff.Array.makeBy(30, Math.random),
+ Eff.Array.scan(0, Eff.Number.sum),
+ Eff.Array.drop(1),
+ Eff.Array.map((v) => Math.log(100 * v + 1)),
+ ),
+)
+
+const skelColors = [
+ [M.colors.grey[300], M.colors.grey[100]],
+ [M.colors.grey[400], M.colors.grey[200]],
+] as const
+
+const mkPulsingGradient = ([c1, c2]: readonly [string, string], animate: boolean) =>
+ SVG.Paint.Server(
+
+
+ {animate && (
+
+ )}
+
+ ,
+ )
+
+interface ChartSkelProps {
+ height: number
+ width: number
+ lines?: number
+ animate?: boolean
+ children?: React.ReactNode
+}
+
+function ChartSkel({
+ height,
+ width,
+ lines = skelData.length,
+ animate = false,
+ children,
+}: ChartSkelProps) {
+ const data = React.useMemo(
+ () => Eff.Array.makeBy(lines, (i) => skelData[i % skelData.length]),
+ [lines],
+ )
+ const fills = React.useMemo(
+ () =>
+ Eff.Array.makeBy(lines, (i) =>
+ mkPulsingGradient(skelColors[i % skelColors.length], animate),
+ ),
+ [lines, animate],
+ )
+ return (
+
+ {/* @ts-expect-error */}
+
+ {children}
+
+ )
+}
+
+const ANALYTICS_WINDOW_OPTIONS = [
+ { value: 31, label: 'Last 1 month' },
+ { value: 91, label: 'Last 3 months' },
+ { value: 182, label: 'Last 6 months' },
+ { value: 365, label: 'Last 12 months' },
+]
+
+interface DownloadsRangeProps {
+ value: number
+ onChange: (value: number) => void
+ bucket: string
+ data: Eff.Option.Option
+}
+
+function DownloadsRange({ value, onChange, bucket, data }: DownloadsRangeProps) {
+ const [anchor, setAnchor] = React.useState(null)
+
+ const open = React.useCallback(
+ (e) => {
+ setAnchor(e.target)
+ },
+ [setAnchor],
+ )
+
+ const close = React.useCallback(() => {
+ setAnchor(null)
+ }, [setAnchor])
+
+ const choose = React.useCallback(
+ (e) => {
+ onChange(e.target.value)
+ close()
+ },
+ [onChange, close],
+ )
+
+ const { label } = ANALYTICS_WINDOW_OPTIONS.find((o) => o.value === value) || {}
+
+ const jsonData = React.useMemo(
+ () =>
+ Eff.Option.match(data, {
+ onNone: () => null,
+ onSome: (d) => `data:application/json,${JSON.stringify(d)}`,
+ }),
+ [data],
+ )
+
+ return (
+ <>
+
+
+ {label} expand_more
+
+
+ {ANALYTICS_WINDOW_OPTIONS.map((o) => (
+
+ {o.label}
+
+ ))}
+
+
+ Download to file
+
+
+ >
+ )
+}
+
+const useStatsTipStyles = M.makeStyles((t) => ({
+ root: {
+ background: fade(t.palette.grey[700], 0.9),
+ color: t.palette.common.white,
+ padding: '6px 8px',
+ },
+ head: {
+ display: 'flex',
+ justifyContent: 'space-between',
+ marginBottom: 4,
+ },
+ date: {},
+ total: {},
+ extsContainer: {
+ alignItems: 'center',
+ display: 'grid',
+ gridAutoRows: 'auto',
+ gridColumnGap: 4,
+ gridTemplateColumns: 'max-content max-content 1fr',
+ },
+ ext: {
+ fontSize: 12,
+ lineHeight: '16px',
+ maxWidth: 70,
+ opacity: 0.6,
+ overflow: 'hidden',
+ textAlign: 'right',
+ textOverflow: 'ellipsis',
+ },
+ color: {
+ borderRadius: '50%',
+ height: 8,
+ opacity: 0.6,
+ width: 8,
+ },
+ number: {
+ fontSize: 12,
+ lineHeight: '16px',
+ opacity: 0.6,
+ },
+ hl: {
+ opacity: 1,
+ },
+}))
+
+interface StatsTipProps {
+ stats: CursorStats | null
+ colorPool: ColorPool
+ className?: string
+}
+
+function StatsTip({ stats, colorPool, className, ...props }: StatsTipProps) {
+ const classes = useStatsTipStyles()
+ if (!stats) return null
+ return (
+
+
+
{dateFns.format(stats.date, 'd MMM')}
+
+ {readableQuantity(stats.combined.sum)} (+
+ {readableQuantity(stats.combined.value)})
+
+
+
+ {stats.byExt.map((s) => {
+ const hl = stats.highlighted ? stats.highlighted.ext === s.ext : true
+ return (
+
+ {s.ext || 'other'}
+
+
+ {readableQuantity(s.sum)} (+
+ {readableQuantity(s.value)})
+
+
+ )
+ })}
+
+
+ )
+}
+
+interface TransitionProps {
+ children: JSX.Element
+ in: boolean
+}
+
+function Transition({ children, ...props }: TransitionProps) {
+ const contentsRef = React.useRef(null)
+ // when `in` is false, we want to keep the last rendered contents
+ if (props.in) contentsRef.current = children
+ return contentsRef.current && {contentsRef.current}
+}
+
+const useStyles = M.makeStyles((t) => ({
+ root: {
+ display: 'grid',
+ gridRowGap: t.spacing(0.25),
+ gridTemplateAreas: `
+ "heading period"
+ "chart chart"
+ `,
+ gridTemplateColumns: 'min-content 1fr',
+ gridTemplateRows: 'auto auto',
+ [t.breakpoints.down('sm')]: {
+ gridTemplateAreas: `
+ "heading"
+ "chart"
+ "period"
+ `,
+ gridTemplateColumns: '1fr',
+ gridTemplateRows: 'auto auto auto',
+ },
+ },
+ heading: {
+ ...t.typography.h6,
+ gridArea: 'heading',
+ marginBottom: t.spacing(1),
+ whiteSpace: 'nowrap',
+ [t.breakpoints.down('sm')]: {
+ marginBottom: 0,
+ textAlign: 'center',
+ },
+ },
+ ext: {
+ display: 'inline-block',
+ maxWidth: 100,
+ overflow: 'hidden',
+ textOverflow: 'ellipsis',
+ verticalAlign: 'bottom',
+ },
+ period: {
+ display: 'flex',
+ gridArea: 'period',
+ justifyContent: 'center',
+ alignItems: 'center',
+ [t.breakpoints.down('sm')]: {
+ paddingBottom: t.spacing(1),
+ paddingTop: t.spacing(2),
+ },
+ [t.breakpoints.up('md')]: {
+ height: 37,
+ justifyContent: 'flex-end',
+ },
+ },
+ chart: {
+ gridArea: 'chart',
+ position: 'relative',
+ },
+ left: {},
+ right: {},
+ dateStats: {
+ maxWidth: 180,
+ position: 'absolute',
+ top: 0,
+ width: 'calc(50% - 8px)',
+ zIndex: 1,
+ '&$left': {
+ left: 0,
+ },
+ '&$right': {
+ right: 0,
+ },
+ },
+ unavail: {
+ ...t.typography.body2,
+ alignItems: 'center',
+ display: 'flex',
+ height: '100%',
+ justifyContent: 'center',
+ position: 'absolute',
+ top: 0,
+ width: '100%',
+ },
+}))
+
+interface DownloadsProps extends M.BoxProps {
+ bucket: string
+ colorPool: ColorPool
+ chartHeight: number
+}
+
+export default function Downloads({
+ bucket,
+ colorPool,
+ chartHeight,
+ ...props
+}: DownloadsProps) {
+ const classes = useStyles()
+ const ref = React.useRef(null)
+ const { width } = useComponentSize(ref)
+ const [window, setWindow] = React.useState(ANALYTICS_WINDOW_OPTIONS[0].value)
+
+ const [cursor, setCursor] = React.useState(null)
+
+ const result = GQL.useQuery(
+ BUCKET_ACCESS_COUNTS_QUERY,
+ { bucket, window },
+ { pause: !cfg.analyticsBucket },
+ )
+
+ const processed = React.useMemo(
+ () =>
+ Eff.pipe(
+ result,
+ ({ fetching, data, error }) => {
+ if (fetching) return Eff.Option.none()
+ if (error) log.error('Error fetching bucket access counts:', error)
+ return Eff.Option.fromNullable(data?.bucketAccessCounts)
+ },
+ Eff.Option.map(processBucketAccessCounts),
+ ),
+ [result],
+ )
+
+ const processedWithCursor = React.useMemo(
+ () =>
+ Eff.pipe(
+ processed,
+ Eff.Option.map((counts) => ({
+ counts,
+ cursorStats: getCursorStats(counts, cursor),
+ })),
+ ),
+ [processed, cursor],
+ )
+
+ if (!cfg.analyticsBucket) {
+ return (
+
+ Requires CloudTrail
+
+ )
+ }
+
+ return (
+
+
+
+
+
+ {Eff.Option.match(processedWithCursor, {
+ onSome: ({ counts, cursorStats: stats }) => {
+ if (!counts?.byExtCollapsed.length) return 'Downloads'
+
+ const hl = stats?.highlighted
+ const ext = hl ? hl.ext || 'other' : 'total'
+ const total = hl ? hl.counts.total : counts.combined.total
+ return (
+ <>
+ Downloads ({ext}):{' '}
+ {readableQuantity(total)}
+ >
+ )
+ },
+ onNone: () => 'Downloads',
+ })}
+
+
+ {Eff.Option.match(processedWithCursor, {
+ onSome: ({ counts, cursorStats: stats }) => {
+ if (!counts.byExtCollapsed.length) {
+ return (
+
+ No Data
+
+ )
+ }
+
+ return (
+ <>
+ {/* @ts-expect-error */}
+
+ e.counts.counts.map((i) => Math.log(i.sum + 1)),
+ )}
+ onCursor={setCursor}
+ height={chartHeight}
+ width={width}
+ areaFills={counts.byExtCollapsed.map((e) =>
+ SVG.Paint.Color(colorPool.get(e.ext)),
+ )}
+ lineStroke={SVG.Paint.Color(M.colors.grey[500])}
+ extendL
+ extendR
+ px={10}
+ />
+
+
+
+
+
+
+ >
+ )
+ },
+ onNone: () => ,
+ })}
+
+
+ )
+}
diff --git a/catalog/app/containers/Bucket/Overview/Header.tsx b/catalog/app/containers/Bucket/Overview/Header.tsx
new file mode 100644
index 00000000000..3c76edd1330
--- /dev/null
+++ b/catalog/app/containers/Bucket/Overview/Header.tsx
@@ -0,0 +1,431 @@
+import type AWSSDK from 'aws-sdk'
+import cx from 'classnames'
+import * as Eff from 'effect'
+import * as React from 'react'
+import { Link as RRLink } from 'react-router-dom'
+import * as redux from 'react-redux'
+import * as M from '@material-ui/core'
+
+import Skeleton from 'components/Skeleton'
+import * as authSelectors from 'containers/Auth/selectors'
+import * as APIConnector from 'utils/APIConnector'
+import AsyncResult from 'utils/AsyncResult'
+import { useData } from 'utils/Data'
+import * as NamedRoutes from 'utils/NamedRoutes'
+import { readableBytes, readableQuantity, formatQuantity } from 'utils/string'
+import useConst from 'utils/useConstant'
+
+import * as requests from '../requests'
+
+import { ColorPool, makeColorPool } from './ColorPool'
+import Downloads from './Downloads'
+
+import bg from './Overview-bg.jpg'
+
+// interface StatsData {
+// exts: ExtData[]
+// totalObjects: number
+// totalBytes: number
+// }
+
+interface ExtData {
+ ext: string
+ bytes: number
+ objects: number
+}
+
+const RODA_LINK = 'https://registry.opendata.aws'
+const RODA_BUCKET = 'quilt-open-data-bucket'
+const MAX_EXTS = 7
+// must have length >= MAX_EXTS
+const COLOR_MAP = [
+ '#8ad3cb',
+ '#d7ce69',
+ '#bfbadb',
+ '#f4806c',
+ '#83b0d1',
+ '#b2de67',
+ '#bc81be',
+ '#f0b5d3',
+ '#7ba39f',
+ '#9894ad',
+ '#be7265',
+ '#94ad6b',
+]
+
+const useObjectsByExtStyles = M.makeStyles((t) => ({
+ root: {
+ display: 'grid',
+ gridAutoRows: 20,
+ gridColumnGap: t.spacing(1),
+ gridRowGap: t.spacing(0.25),
+ gridTemplateAreas: `
+ ". heading heading"
+ `,
+ gridTemplateColumns: 'minmax(30px, max-content) 1fr minmax(30px, max-content)',
+ gridTemplateRows: 'auto',
+ [t.breakpoints.down('sm')]: {
+ gridTemplateAreas: `
+ "heading heading heading"
+ `,
+ },
+ },
+ heading: {
+ ...t.typography.h6,
+ gridArea: 'heading',
+ marginBottom: t.spacing(1),
+ [t.breakpoints.down('sm')]: {
+ textAlign: 'center',
+ },
+ },
+ ext: {
+ color: t.palette.text.secondary,
+ gridColumn: 1,
+ fontSize: t.typography.overline.fontSize,
+ fontWeight: t.typography.fontWeightMedium,
+ letterSpacing: t.typography.subtitle2.letterSpacing,
+ lineHeight: t.typography.pxToRem(20),
+ textAlign: 'right',
+ },
+ count: {
+ color: t.palette.text.secondary,
+ gridColumn: 3,
+ fontSize: t.typography.overline.fontSize,
+ fontWeight: t.typography.fontWeightMedium,
+ letterSpacing: t.typography.subtitle2.letterSpacing,
+ lineHeight: t.typography.pxToRem(20),
+ },
+ bar: {
+ background: t.palette.action.hover,
+ gridColumn: 2,
+ },
+ gauge: {
+ height: '100%',
+ position: 'relative',
+ },
+ flip: {},
+ size: {
+ color: t.palette.common.white,
+ fontSize: t.typography.overline.fontSize,
+ fontWeight: t.typography.fontWeightMedium,
+ letterSpacing: t.typography.subtitle2.letterSpacing,
+ lineHeight: t.typography.pxToRem(20),
+ position: 'absolute',
+ right: t.spacing(1),
+ '&$flip': {
+ color: t.palette.text.hint,
+ left: `calc(100% + ${t.spacing(1)}px)`,
+ right: 'auto',
+ },
+ },
+ skeleton: {
+ gridColumn: '1 / span 3',
+ },
+ unavail: {
+ ...t.typography.body2,
+ alignItems: 'center',
+ display: 'flex',
+ gridColumn: '1 / span 3',
+ gridRow: `2 / span ${MAX_EXTS}`,
+ justifyContent: 'center',
+ },
+}))
+
+interface ObjectsByExtProps extends M.BoxProps {
+ data: $TSFixMe // AsyncResult
+ colorPool: ColorPool
+}
+
+function ObjectsByExt({ data, colorPool, ...props }: ObjectsByExtProps) {
+ const classes = useObjectsByExtStyles()
+ return (
+
+ Objects by File Extension
+ {AsyncResult.case(
+ {
+ Ok: (exts: ExtData[]) => {
+ const capped = exts.slice(0, MAX_EXTS)
+ const maxBytes = capped.reduce((max, e) => Math.max(max, e.bytes), 0)
+ const max = Math.log(maxBytes + 1)
+ const scale = (x: number) => Math.log(x + 1) / max
+ return capped.map(({ ext, bytes, objects }, i) => {
+ const color = colorPool.get(ext)
+ return (
+
+
+ {ext || 'other'}
+
+
+
+
+ {readableBytes(bytes)}
+
+
+
+
+ {readableQuantity(objects)}
+
+
+ )
+ })
+ },
+ _: (r: $TSFixMe) => (
+ <>
+ {Eff.Array.makeBy(MAX_EXTS, (i) => (
+
+ ))}
+ {AsyncResult.Err.is(r) && (
+ Data unavailable
+ )}
+ >
+ ),
+ },
+ data,
+ )}
+
+ )
+}
+
+const useStatDisplayStyles = M.makeStyles((t) => ({
+ root: {
+ alignItems: 'baseline',
+ display: 'flex',
+ '& + &': {
+ marginLeft: t.spacing(1.5),
+ [t.breakpoints.up('sm')]: {
+ marginLeft: t.spacing(4),
+ },
+ [t.breakpoints.up('md')]: {
+ marginLeft: t.spacing(6),
+ },
+ },
+ },
+ value: {
+ fontSize: t.typography.h6.fontSize,
+ fontWeight: t.typography.fontWeightBold,
+ letterSpacing: 0,
+ lineHeight: '20px',
+ [t.breakpoints.up('sm')]: {
+ fontSize: t.typography.h4.fontSize,
+ lineHeight: '32px',
+ },
+ },
+ label: {
+ ...t.typography.body2,
+ color: t.palette.grey[300],
+ lineHeight: 1,
+ marginLeft: t.spacing(0.5),
+ [t.breakpoints.up('sm')]: {
+ marginLeft: t.spacing(1),
+ },
+ },
+ skeletonContainer: {
+ alignItems: 'center',
+ height: 20,
+ [t.breakpoints.up('sm')]: {
+ height: 32,
+ },
+ },
+ skeleton: {
+ borderRadius: t.shape.borderRadius,
+ height: t.typography.h6.fontSize,
+ width: 96,
+ [t.breakpoints.up('sm')]: {
+ height: t.typography.h4.fontSize,
+ width: 120,
+ },
+ },
+}))
+
+interface StatDisplayProps {
+ value: $TSFixMe // AsyncResult>
+ label?: string
+ format?: (v: any) => any
+ fallback?: (v: any) => any
+}
+
+function StatDisplay({ value, label, format, fallback }: StatDisplayProps) {
+ const classes = useStatDisplayStyles()
+ return Eff.pipe(
+ value,
+ AsyncResult.case({
+ Ok: Eff.flow(format || Eff.identity, AsyncResult.Ok),
+ Err: Eff.flow(fallback || Eff.identity, AsyncResult.Ok),
+ _: Eff.identity,
+ }),
+ AsyncResult.case({
+ Ok: (v: $TSFixMe) =>
+ v != null && (
+
+ {v}
+ {!!label && {label}}
+
+ ),
+ _: () => (
+
+
+
+ ),
+ }),
+ ) as JSX.Element
+}
+
+// use the same height as the bar chart: 20px per bar with 2px margin
+const DOWNLOADS_CHART_H = 22 * MAX_EXTS - 2
+
+const useStyles = M.makeStyles((t) => ({
+ root: {
+ position: 'relative',
+ [t.breakpoints.down('xs')]: {
+ borderRadius: 0,
+ },
+ [t.breakpoints.up('sm')]: {
+ marginTop: t.spacing(2),
+ },
+ },
+ top: {
+ background: `center / cover url(${bg}) ${t.palette.grey[700]}`,
+ borderTopLeftRadius: t.shape.borderRadius,
+ borderTopRightRadius: t.shape.borderRadius,
+ color: t.palette.common.white,
+ overflow: 'hidden',
+ paddingBottom: t.spacing(3),
+ paddingLeft: t.spacing(2),
+ paddingRight: t.spacing(2),
+ paddingTop: t.spacing(4),
+ position: 'relative',
+ [t.breakpoints.up('sm')]: {
+ padding: t.spacing(4),
+ },
+ [t.breakpoints.down('xs')]: {
+ borderRadius: 0,
+ },
+ },
+ settings: {
+ color: t.palette.common.white,
+ position: 'absolute',
+ right: t.spacing(2),
+ top: t.spacing(2),
+ },
+}))
+
+interface HeaderProps {
+ s3: AWSSDK.S3
+ bucket: string
+ overviewUrl: string | null | undefined
+ description: string | null | undefined
+}
+
+export default function Header({ s3, overviewUrl, bucket, description }: HeaderProps) {
+ const classes = useStyles()
+ const req = APIConnector.use()
+ const isRODA = !!overviewUrl && overviewUrl.includes(`/${RODA_BUCKET}/`)
+ const colorPool = useConst(() => makeColorPool(COLOR_MAP))
+ const statsData = useData(requests.bucketStats, { req, s3, bucket, overviewUrl })
+ const pkgCountData = useData(requests.countPackageRevisions, { req, bucket })
+ const { urls } = NamedRoutes.use()
+ const isAdmin = redux.useSelector(authSelectors.isAdmin)
+ return (
+
+
+ {bucket}
+ {!!description && (
+
+ {description}
+
+ )}
+ {isRODA && (
+
+
+ From the{' '}
+
+ Registry of Open Data on AWS
+
+
+
+ )}
+
+ '? B'}
+ />
+ '?'}
+ />
+ null}
+ />
+
+ {isAdmin && (
+
+
+ settings
+
+
+ )}
+
+
+
+
+
+
+
+
+
+
+
+ )
+}
diff --git a/catalog/app/containers/Bucket/Overview-bg.jpg b/catalog/app/containers/Bucket/Overview/Overview-bg.jpg
similarity index 100%
rename from catalog/app/containers/Bucket/Overview-bg.jpg
rename to catalog/app/containers/Bucket/Overview/Overview-bg.jpg
diff --git a/catalog/app/containers/Bucket/Overview/Overview.tsx b/catalog/app/containers/Bucket/Overview/Overview.tsx
new file mode 100644
index 00000000000..2eee868fd4f
--- /dev/null
+++ b/catalog/app/containers/Bucket/Overview/Overview.tsx
@@ -0,0 +1,163 @@
+import type AWSSDK from 'aws-sdk'
+import * as React from 'react'
+import { useParams } from 'react-router-dom'
+import * as M from '@material-ui/core'
+
+import cfg from 'constants/config'
+import type * as Model from 'model'
+import * as APIConnector from 'utils/APIConnector'
+import * as AWS from 'utils/AWS'
+import AsyncResult from 'utils/AsyncResult'
+import * as BucketPreferences from 'utils/BucketPreferences'
+import Data from 'utils/Data'
+import * as GQL from 'utils/GraphQL'
+import * as LinkedData from 'utils/LinkedData'
+
+import * as Gallery from '../Gallery'
+import * as Summarize from '../Summarize'
+import * as requests from '../requests'
+
+import Header from './Header'
+import BUCKET_CONFIG_QUERY from './gql/BucketConfig.generated'
+
+interface BucketReadmes {
+ forced?: Model.S3.S3ObjectLocation
+ discovered: Model.S3.S3ObjectLocation[]
+}
+
+interface ReadmesProps {
+ s3: AWSSDK.S3
+ bucket: string
+ overviewUrl: string | undefined | null
+}
+
+function Readmes({ s3, overviewUrl, bucket }: ReadmesProps) {
+ return (
+ // @ts-expect-error
+
+ {AsyncResult.case({
+ Ok: (rs: BucketReadmes) =>
+ (rs.discovered.length > 0 || !!rs.forced) && (
+ <>
+ {!!rs.forced && (
+
+ )}
+ {rs.discovered.map((h) => (
+
+ ))}
+ >
+ ),
+ _: () => ,
+ })}
+
+ )
+}
+
+interface ImgsProps {
+ s3: AWSSDK.S3
+ bucket: string
+ overviewUrl: string | undefined | null
+ inStack: boolean
+}
+
+function Imgs({ s3, overviewUrl, inStack, bucket }: ImgsProps) {
+ const req = APIConnector.use()
+ return (
+ // @ts-expect-error
+
+ {AsyncResult.case({
+ Ok: (images: Model.S3.S3ObjectLocation[]) =>
+ images.length ? : null,
+ _: () => ,
+ })}
+
+ )
+}
+
+interface ThumbnailsWrapperProps extends ImgsProps {
+ preferences?:
+ | false
+ | {
+ overview: boolean
+ summarize: boolean
+ }
+}
+
+function ThumbnailsWrapper({
+ s3,
+ overviewUrl,
+ inStack,
+ bucket,
+ preferences: galleryPrefs,
+}: ThumbnailsWrapperProps) {
+ if (cfg.noOverviewImages || !galleryPrefs) return null
+ if (!galleryPrefs.overview) return null
+ return (
+ // @ts-expect-error
+
+ {AsyncResult.case({
+ Ok: (h?: Model.S3.S3ObjectLocation) =>
+ (!h || galleryPrefs.summarize) && (
+
+ ),
+ Err: () => ,
+ Pending: () => ,
+ _: () => null,
+ })}
+
+ )
+}
+
+export default function Overview() {
+ const { bucket } = useParams<{ bucket: string }>()
+
+ const s3 = AWS.S3.use()
+ const { bucketConfig } = GQL.useQueryS(BUCKET_CONFIG_QUERY, { bucket })
+ const inStack = !!bucketConfig
+ const overviewUrl = bucketConfig?.overviewUrl
+ const description = bucketConfig?.description
+ const prefs = BucketPreferences.use()
+ return (
+
+ {inStack && (
+
+
+
+ )}
+ {bucketConfig ? (
+
+ ) : (
+
+ {bucket}
+
+ )}
+
+ {BucketPreferences.Result.match(
+ {
+ Ok: ({ ui: { blocks } }) => (
+
+ ),
+ Pending: () => ,
+ Init: () => null,
+ },
+ prefs,
+ )}
+
+
+ )
+}
diff --git a/catalog/app/containers/Bucket/Overview/gql/BucketAccessCounts.generated.ts b/catalog/app/containers/Bucket/Overview/gql/BucketAccessCounts.generated.ts
new file mode 100644
index 00000000000..f7a763654e1
--- /dev/null
+++ b/catalog/app/containers/Bucket/Overview/gql/BucketAccessCounts.generated.ts
@@ -0,0 +1,207 @@
+/* eslint-disable @typescript-eslint/naming-convention */
+import type { TypedDocumentNode as DocumentNode } from '@graphql-typed-document-node/core'
+import * as Types from '../../../../model/graphql/types.generated'
+
+export type AccessCountsSelectionFragment = {
+ readonly __typename: 'AccessCounts'
+} & Pick & {
+ readonly counts: ReadonlyArray<
+ { readonly __typename: 'AccessCountForDate' } & Pick<
+ Types.AccessCountForDate,
+ 'date' | 'value'
+ >
+ >
+ }
+
+export type containers_Bucket_Overview_gql_BucketAccessCountsQueryVariables =
+ Types.Exact<{
+ bucket: Types.Scalars['String']
+ window: Types.Scalars['Int']
+ }>
+
+export type containers_Bucket_Overview_gql_BucketAccessCountsQuery = {
+ readonly __typename: 'Query'
+} & {
+ readonly bucketAccessCounts: Types.Maybe<
+ { readonly __typename: 'BucketAccessCounts' } & {
+ readonly byExt: ReadonlyArray<
+ { readonly __typename: 'AccessCountsGroup' } & Pick<
+ Types.AccessCountsGroup,
+ 'ext'
+ > & {
+ readonly counts: {
+ readonly __typename: 'AccessCounts'
+ } & AccessCountsSelectionFragment
+ }
+ >
+ readonly byExtCollapsed: ReadonlyArray<
+ { readonly __typename: 'AccessCountsGroup' } & Pick<
+ Types.AccessCountsGroup,
+ 'ext'
+ > & {
+ readonly counts: {
+ readonly __typename: 'AccessCounts'
+ } & AccessCountsSelectionFragment
+ }
+ >
+ readonly combined: {
+ readonly __typename: 'AccessCounts'
+ } & AccessCountsSelectionFragment
+ }
+ >
+}
+
+export const AccessCountsSelectionFragmentDoc = {
+ kind: 'Document',
+ definitions: [
+ {
+ kind: 'FragmentDefinition',
+ name: { kind: 'Name', value: 'AccessCountsSelection' },
+ typeCondition: { kind: 'NamedType', name: { kind: 'Name', value: 'AccessCounts' } },
+ selectionSet: {
+ kind: 'SelectionSet',
+ selections: [
+ { kind: 'Field', name: { kind: 'Name', value: 'total' } },
+ {
+ kind: 'Field',
+ name: { kind: 'Name', value: 'counts' },
+ selectionSet: {
+ kind: 'SelectionSet',
+ selections: [
+ { kind: 'Field', name: { kind: 'Name', value: 'date' } },
+ { kind: 'Field', name: { kind: 'Name', value: 'value' } },
+ ],
+ },
+ },
+ ],
+ },
+ },
+ ],
+} as unknown as DocumentNode
+export const containers_Bucket_Overview_gql_BucketAccessCountsDocument = {
+ kind: 'Document',
+ definitions: [
+ {
+ kind: 'OperationDefinition',
+ operation: 'query',
+ name: { kind: 'Name', value: 'containers_Bucket_Overview_gql_BucketAccessCounts' },
+ variableDefinitions: [
+ {
+ kind: 'VariableDefinition',
+ variable: { kind: 'Variable', name: { kind: 'Name', value: 'bucket' } },
+ type: {
+ kind: 'NonNullType',
+ type: { kind: 'NamedType', name: { kind: 'Name', value: 'String' } },
+ },
+ },
+ {
+ kind: 'VariableDefinition',
+ variable: { kind: 'Variable', name: { kind: 'Name', value: 'window' } },
+ type: {
+ kind: 'NonNullType',
+ type: { kind: 'NamedType', name: { kind: 'Name', value: 'Int' } },
+ },
+ },
+ ],
+ selectionSet: {
+ kind: 'SelectionSet',
+ selections: [
+ {
+ kind: 'Field',
+ name: { kind: 'Name', value: 'bucketAccessCounts' },
+ arguments: [
+ {
+ kind: 'Argument',
+ name: { kind: 'Name', value: 'bucket' },
+ value: { kind: 'Variable', name: { kind: 'Name', value: 'bucket' } },
+ },
+ {
+ kind: 'Argument',
+ name: { kind: 'Name', value: 'window' },
+ value: { kind: 'Variable', name: { kind: 'Name', value: 'window' } },
+ },
+ ],
+ selectionSet: {
+ kind: 'SelectionSet',
+ selections: [
+ {
+ kind: 'Field',
+ name: { kind: 'Name', value: 'byExt' },
+ selectionSet: {
+ kind: 'SelectionSet',
+ selections: [
+ { kind: 'Field', name: { kind: 'Name', value: 'ext' } },
+ {
+ kind: 'Field',
+ name: { kind: 'Name', value: 'counts' },
+ selectionSet: {
+ kind: 'SelectionSet',
+ selections: [
+ {
+ kind: 'FragmentSpread',
+ name: { kind: 'Name', value: 'AccessCountsSelection' },
+ },
+ ],
+ },
+ },
+ ],
+ },
+ },
+ {
+ kind: 'Field',
+ alias: { kind: 'Name', value: 'byExtCollapsed' },
+ name: { kind: 'Name', value: 'byExt' },
+ arguments: [
+ {
+ kind: 'Argument',
+ name: { kind: 'Name', value: 'groups' },
+ value: { kind: 'IntValue', value: '10' },
+ },
+ ],
+ selectionSet: {
+ kind: 'SelectionSet',
+ selections: [
+ { kind: 'Field', name: { kind: 'Name', value: 'ext' } },
+ {
+ kind: 'Field',
+ name: { kind: 'Name', value: 'counts' },
+ selectionSet: {
+ kind: 'SelectionSet',
+ selections: [
+ {
+ kind: 'FragmentSpread',
+ name: { kind: 'Name', value: 'AccessCountsSelection' },
+ },
+ ],
+ },
+ },
+ ],
+ },
+ },
+ {
+ kind: 'Field',
+ name: { kind: 'Name', value: 'combined' },
+ selectionSet: {
+ kind: 'SelectionSet',
+ selections: [
+ {
+ kind: 'FragmentSpread',
+ name: { kind: 'Name', value: 'AccessCountsSelection' },
+ },
+ ],
+ },
+ },
+ ],
+ },
+ },
+ ],
+ },
+ },
+ ...AccessCountsSelectionFragmentDoc.definitions,
+ ],
+} as unknown as DocumentNode<
+ containers_Bucket_Overview_gql_BucketAccessCountsQuery,
+ containers_Bucket_Overview_gql_BucketAccessCountsQueryVariables
+>
+
+export { containers_Bucket_Overview_gql_BucketAccessCountsDocument as default }
diff --git a/catalog/app/containers/Bucket/Overview/gql/BucketAccessCounts.graphql b/catalog/app/containers/Bucket/Overview/gql/BucketAccessCounts.graphql
new file mode 100644
index 00000000000..c54990cda53
--- /dev/null
+++ b/catalog/app/containers/Bucket/Overview/gql/BucketAccessCounts.graphql
@@ -0,0 +1,27 @@
+fragment AccessCountsSelection on AccessCounts {
+ total
+ counts {
+ date
+ value
+ }
+}
+
+query ($bucket: String!, $window: Int!) {
+ bucketAccessCounts(bucket: $bucket, window: $window) {
+ byExt {
+ ext
+ counts {
+ ...AccessCountsSelection
+ }
+ }
+ byExtCollapsed: byExt(groups: 10) {
+ ext
+ counts {
+ ...AccessCountsSelection
+ }
+ }
+ combined {
+ ...AccessCountsSelection
+ }
+ }
+}
diff --git a/catalog/app/containers/Bucket/OverviewBucketConfig.generated.ts b/catalog/app/containers/Bucket/Overview/gql/BucketConfig.generated.ts
similarity index 75%
rename from catalog/app/containers/Bucket/OverviewBucketConfig.generated.ts
rename to catalog/app/containers/Bucket/Overview/gql/BucketConfig.generated.ts
index 89a16de328f..293100b338b 100644
--- a/catalog/app/containers/Bucket/OverviewBucketConfig.generated.ts
+++ b/catalog/app/containers/Bucket/Overview/gql/BucketConfig.generated.ts
@@ -1,12 +1,12 @@
/* eslint-disable @typescript-eslint/naming-convention */
import type { TypedDocumentNode as DocumentNode } from '@graphql-typed-document-node/core'
-import * as Types from '../../model/graphql/types.generated'
+import * as Types from '../../../../model/graphql/types.generated'
-export type containers_Bucket_OverviewBucketConfigQueryVariables = Types.Exact<{
+export type containers_Bucket_Overview_gql_BucketConfigQueryVariables = Types.Exact<{
bucket: Types.Scalars['String']
}>
-export type containers_Bucket_OverviewBucketConfigQuery = {
+export type containers_Bucket_Overview_gql_BucketConfigQuery = {
readonly __typename: 'Query'
} & {
readonly bucketConfig: Types.Maybe<
@@ -17,13 +17,13 @@ export type containers_Bucket_OverviewBucketConfigQuery = {
>
}
-export const containers_Bucket_OverviewBucketConfigDocument = {
+export const containers_Bucket_Overview_gql_BucketConfigDocument = {
kind: 'Document',
definitions: [
{
kind: 'OperationDefinition',
operation: 'query',
- name: { kind: 'Name', value: 'containers_Bucket_OverviewBucketConfig' },
+ name: { kind: 'Name', value: 'containers_Bucket_Overview_gql_BucketConfig' },
variableDefinitions: [
{
kind: 'VariableDefinition',
@@ -61,8 +61,8 @@ export const containers_Bucket_OverviewBucketConfigDocument = {
},
],
} as unknown as DocumentNode<
- containers_Bucket_OverviewBucketConfigQuery,
- containers_Bucket_OverviewBucketConfigQueryVariables
+ containers_Bucket_Overview_gql_BucketConfigQuery,
+ containers_Bucket_Overview_gql_BucketConfigQueryVariables
>
-export { containers_Bucket_OverviewBucketConfigDocument as default }
+export { containers_Bucket_Overview_gql_BucketConfigDocument as default }
diff --git a/catalog/app/containers/Bucket/OverviewBucketConfig.graphql b/catalog/app/containers/Bucket/Overview/gql/BucketConfig.graphql
similarity index 100%
rename from catalog/app/containers/Bucket/OverviewBucketConfig.graphql
rename to catalog/app/containers/Bucket/Overview/gql/BucketConfig.graphql
diff --git a/catalog/app/containers/Bucket/Overview/index.tsx b/catalog/app/containers/Bucket/Overview/index.tsx
new file mode 100644
index 00000000000..1de667af70e
--- /dev/null
+++ b/catalog/app/containers/Bucket/Overview/index.tsx
@@ -0,0 +1 @@
+export { default } from './Overview'
diff --git a/catalog/app/containers/Bucket/PackageDialog/DialogError.tsx b/catalog/app/containers/Bucket/PackageDialog/DialogError.tsx
index 291cd6c3a75..3e04966ab87 100644
--- a/catalog/app/containers/Bucket/PackageDialog/DialogError.tsx
+++ b/catalog/app/containers/Bucket/PackageDialog/DialogError.tsx
@@ -42,7 +42,7 @@ const errorDisplay = R.cond([
Please fix the{' '}
workflows config{' '}
according to{' '}
-
+
the documentation
.
diff --git a/catalog/app/containers/Bucket/PackageDialog/SelectWorkflow.tsx b/catalog/app/containers/Bucket/PackageDialog/SelectWorkflow.tsx
index 55f14548fcb..d3a1bc8ca4b 100644
--- a/catalog/app/containers/Bucket/PackageDialog/SelectWorkflow.tsx
+++ b/catalog/app/containers/Bucket/PackageDialog/SelectWorkflow.tsx
@@ -71,7 +71,7 @@ export default function SelectWorkflow({
{!!error && {error}}
-
+
Learn about data quality workflows
, or edit{' '}
diff --git a/catalog/app/containers/Bucket/Queries/Athena/Athena.tsx b/catalog/app/containers/Bucket/Queries/Athena/Athena.tsx
index b7c20d6f26c..9093a8159e2 100644
--- a/catalog/app/containers/Bucket/Queries/Athena/Athena.tsx
+++ b/catalog/app/containers/Bucket/Queries/Athena/Athena.tsx
@@ -1,144 +1,156 @@
import cx from 'classnames'
-import invariant from 'invariant'
import * as R from 'ramda'
import * as React from 'react'
import * as RRDom from 'react-router-dom'
import * as M from '@material-ui/core'
import Code from 'components/Code'
+import Placeholder from 'components/Placeholder'
import Skeleton from 'components/Skeleton'
+import * as BucketPreferences from 'utils/BucketPreferences'
import * as NamedRoutes from 'utils/NamedRoutes'
import QuerySelect from '../QuerySelect'
-import * as requests from '../requests'
-import { Alert, Section, makeAsyncDataErrorHandler } from './Components'
-import CreatePackage from './CreatePackage'
+import { Alert, Section } from './Components'
import * as QueryEditor from './QueryEditor'
-import Results from './Results'
import History from './History'
+import Results from './Results'
import Workgroups from './Workgroups'
+import * as Model from './model'
+import { doQueryResultsContainManifestEntries } from './model/createPackage'
-interface QuerySelectSkeletonProps {
- className?: string
-}
+const CreatePackage = React.lazy(() => import('./CreatePackage'))
-function QuerySelectSkeleton({ className }: QuerySelectSkeletonProps) {
+function SeeDocsForCreatingPackage() {
return (
-
-
-
-
+
+
+
+ help_outline
+
+
+
)
}
-const useAthenaQueriesStyles = M.makeStyles((t) => ({
- form: {
- margin: t.spacing(3, 0, 0),
+const useRelieveMessageStyles = M.makeStyles((t) => ({
+ root: {
+ padding: t.spacing(2),
+ },
+ text: {
+ animation: '$show 0.3s ease-out',
+ },
+ '@keyframes show': {
+ from: {
+ opacity: 0,
+ },
+ to: {
+ opacity: 1,
+ },
},
}))
-interface QueryConstructorProps {
- bucket: string
- className?: string
- initialValue?: requests.athena.QueryExecution
- workgroup: requests.athena.Workgroup
+const RELIEVE_INITIAL_TIMEOUT = 1000
+
+interface RelieveMessageProps {
+ className: string
+ messages: string[]
}
-function QueryConstructor({
- bucket,
- className,
- initialValue,
- workgroup,
-}: QueryConstructorProps) {
- const [query, setQuery] = React.useState(null)
- const [prev, setPrev] = React.useState(null)
- const data = requests.athena.useQueries(workgroup, prev)
- const classes = useAthenaQueriesStyles()
- const [value, setValue] = React.useState(
- initialValue || null,
+function RelieveMessage({ className, messages }: RelieveMessageProps) {
+ const classes = useRelieveMessageStyles()
+ const [relieve, setRelieve] = React.useState('')
+ const timersData = React.useMemo(
+ () =>
+ messages.map((message, index) => ({
+ timeout: RELIEVE_INITIAL_TIMEOUT * (index + 1) ** 2,
+ message,
+ })),
+ [messages],
)
- const handleNamedQueryChange = React.useCallback(
- (q: requests.athena.AthenaQuery | null) => {
- setQuery(q)
- setValue((x) => ({
- ...x,
- query: q?.body,
- }))
- },
- [],
+ React.useEffect(() => {
+ const timers = timersData.map(({ timeout, message }) =>
+ setTimeout(() => setRelieve(message), timeout),
+ )
+ return () => {
+ timers.forEach((timer) => clearTimeout(timer))
+ }
+ }, [timersData])
+ if (!relieve) return null
+ return (
+
+
+ {relieve}
+
+
)
+}
- const handleChange = React.useCallback((x: requests.athena.QueryExecution) => {
- setValue(x)
- setQuery(null)
- }, [])
+interface QuerySelectSkeletonProps {
+ className?: string
+}
+function QuerySelectSkeleton({ className }: QuerySelectSkeletonProps) {
return (
- {data.case({
- Ok: (queries) => (
-
- {!!queries.list.length && (
-
- onChange={handleNamedQueryChange}
- onLoadMore={queries.next ? () => setPrev(queries) : undefined}
- queries={queries.list}
- value={query}
- />
- )}
-
- ),
- Err: makeAsyncDataErrorHandler('Select query'),
- _: () => ,
- })}
-
+
+
)
}
-function QueryConstructorSkeleton() {
- const classes = useStyles()
+interface QueryConstructorProps {
+ className?: string
+}
+
+function QueryConstructor({ className }: QueryConstructorProps) {
+ const { query, queries, queryRun } = Model.use()
+
+ if (Model.isError(queries.data)) {
+ return
+ }
+
+ if (!Model.hasData(queries.data) || !Model.isReady(query.value)) {
+ return
+ }
+
+ if (!queries.data.list.length && !Model.isError(query.value)) {
+ return No saved queries.
+ }
+
return (
<>
-
-
+
+ label="Select a query"
+ className={className}
+ disabled={Model.isLoading(queryRun)}
+ onChange={query.setValue}
+ onLoadMore={queries.data.next ? queries.loadMore : undefined}
+ queries={queries.data.list}
+ value={Model.isError(query.value) ? null : query.value}
+ />
+ {Model.isError(query.value) && (
+ {query.value.message}
+ )}
>
)
}
-interface HistoryContainerProps {
- bucket: string
- className: string
- workgroup: requests.athena.Workgroup
-}
-
-function HistoryContainer({ bucket, className, workgroup }: HistoryContainerProps) {
- const [prev, setPrev] = React.useState(
- null,
- )
- const data = requests.athena.useQueryExecutions(workgroup, prev)
+function HistoryContainer() {
+ const { bucket, executions } = Model.use()
+ if (Model.isError(executions.data)) {
+ return
+ }
+ if (!Model.hasData(executions.data)) {
+ return
+ }
return (
-
- {data.case({
- Ok: (executions) => (
- setPrev(executions) : undefined}
- workgroup={workgroup}
- />
- ),
- Err: makeAsyncDataErrorHandler('Executions Data'),
- _: () => ,
- })}
-
+
)
}
@@ -146,89 +158,90 @@ const useResultsContainerStyles = M.makeStyles((t) => ({
breadcrumbs: {
margin: t.spacing(0, 0, 1),
},
+ relieve: {
+ left: '50%',
+ position: 'absolute',
+ top: t.spacing(7),
+ transform: 'translateX(-50%)',
+ },
+ table: {
+ position: 'relative',
+ },
}))
interface ResultsContainerSkeletonProps {
bucket: string
className: string
- queryExecutionId: string
- workgroup: requests.athena.Workgroup
}
-function ResultsContainerSkeleton({
- bucket,
- className,
- queryExecutionId,
- workgroup,
-}: ResultsContainerSkeletonProps) {
+const relieveMessages = [
+ 'Still loading…',
+ 'This is taking a moment. Thanks for your patience!',
+ 'Looks like a heavy task! We’re still working on it.',
+ 'Hang in there, we haven’t forgotten about you! Your request is still being processed.',
+]
+
+function ResultsContainerSkeleton({ bucket, className }: ResultsContainerSkeletonProps) {
const classes = useResultsContainerStyles()
return (
)
}
interface ResultsContainerProps {
- bucket: string
className: string
- queryExecutionId: string
- queryResults: requests.athena.QueryResultsResponse
- workgroup: requests.athena.Workgroup
- onLoadMore?: () => void
}
-function ResultsContainer({
- bucket,
- className,
- queryExecutionId,
- queryResults,
- onLoadMore,
- workgroup,
-}: ResultsContainerProps) {
+function ResultsContainer({ className }: ResultsContainerProps) {
const classes = useResultsContainerStyles()
+ const { bucket, execution, results } = Model.use()
+
+ if (Model.isError(execution)) {
+ return (
+
+ )
+ }
+
+ if (Model.isError(results.data)) {
+ return (
+
+ )
+ }
+
+ if (!Model.isReady(execution) || !Model.isReady(results.data)) {
+ return
+ }
+
return (
-
- {!!queryResults.rows.length && (
-
+
+ {doQueryResultsContainManifestEntries(results.data) ? (
+ }>
+
+
+ ) : (
+
)}
- {/* eslint-disable-next-line no-nested-ternary */}
- {queryResults.rows.length ? (
-
- ) : // eslint-disable-next-line no-nested-ternary
- queryResults.queryExecution.error ? (
-
- ) : queryResults.queryExecution ? (
-
- ) : (
-
- )}
+
)
}
@@ -248,19 +261,6 @@ function TableSkeleton({ size }: TableSkeletonProps) {
)
}
-interface QueryResults {
- data: requests.AsyncData
- loadMore: (prev: requests.athena.QueryResultsResponse) => void
-}
-
-function useQueryResults(queryExecutionId?: string): QueryResults {
- const [prev, setPrev] = React.useState(
- null,
- )
- const data = requests.athena.useQueryResults(queryExecutionId || null, prev)
- return React.useMemo(() => ({ data, loadMore: setPrev }), [data])
-}
-
const useOverrideStyles = M.makeStyles({
li: {
'&::before': {
@@ -278,7 +278,7 @@ const useResultsBreadcrumbsStyles = M.makeStyles({
display: 'flex',
},
actions: {
- marginLeft: 'auto',
+ margin: '-3px 0 -3px auto',
},
breadcrumb: {
display: 'flex',
@@ -290,19 +290,12 @@ const useResultsBreadcrumbsStyles = M.makeStyles({
interface ResultsBreadcrumbsProps {
bucket: string
- children: React.ReactNode
+ children?: React.ReactNode
className?: string
- queryExecutionId?: string
- workgroup: requests.athena.Workgroup
}
-function ResultsBreadcrumbs({
- bucket,
- children,
- className,
- queryExecutionId,
- workgroup,
-}: ResultsBreadcrumbsProps) {
+function ResultsBreadcrumbs({ bucket, children, className }: ResultsBreadcrumbsProps) {
+ const { workgroup, queryExecutionId } = Model.use()
const classes = useResultsBreadcrumbsStyles()
const overrideClasses = useOverrideStyles()
const { urls } = NamedRoutes.use()
@@ -311,7 +304,7 @@ function ResultsBreadcrumbs({
Query Executions
@@ -320,7 +313,7 @@ function ResultsBreadcrumbs({
- {children}
+ {children && {children}
}
)
}
@@ -335,97 +328,13 @@ const useStyles = M.makeStyles((t) => ({
section: {
margin: t.spacing(3, 0, 0),
},
+ form: {
+ margin: t.spacing(3, 0, 0),
+ },
}))
-interface AthenaMainProps {
- bucket: string
- workgroup: string
-}
-
-function AthenaMain({ bucket, workgroup }: AthenaMainProps) {
- const classes = useStyles()
- const data = requests.athena.useDefaultQueryExecution()
- return (
-
- {data.case({
- Ok: (queryExecution) => (
-
- ),
- Err: makeAsyncDataErrorHandler('Default catalog and database'),
- _: () => ,
- })}
-
-
- )
-}
-
-interface AthenaExecutionProps {
- bucket: string
- queryExecutionId: string
- workgroup: string
-}
-
-function AthenaExecution({ bucket, workgroup, queryExecutionId }: AthenaExecutionProps) {
- const classes = useStyles()
- const results = useQueryResults(queryExecutionId)
- return (
-
- {results.data.case({
- Ok: (value) => (
-
- ),
- _: () => ,
- })}
-
- {results.data.case({
- Ok: (queryResults) => (
- results.loadMore(queryResults) : undefined
- }
- workgroup={workgroup}
- />
- ),
- _: () => (
-
- ),
- Err: makeAsyncDataErrorHandler('Query Results Data'),
- })}
-
- )
-}
-
-export default function AthenaContainer() {
- const { bucket, queryExecutionId, workgroup } = RRDom.useParams<{
- bucket: string
- queryExecutionId?: string
- workgroup?: string
- }>()
- invariant(!!bucket, '`bucket` must be defined')
+function AthenaContainer() {
+ const { bucket, queryExecutionId, workgroup } = Model.use()
const classes = useStyles()
return (
@@ -434,18 +343,38 @@ export default function AthenaContainer() {
Athena SQL
-
-
- {workgroup &&
- (queryExecutionId ? (
-
- ) : (
-
- ))}
+
+
+ {Model.hasData(workgroup.data) && (
+
+
+
+
+
+ {queryExecutionId ? (
+
+ ) : (
+
+ )}
+
+ )}
>
)
}
+
+export default function Wrapper() {
+ const prefs = BucketPreferences.use()
+ return BucketPreferences.Result.match(
+ {
+ Ok: ({ ui }) => (
+
+
+
+ ),
+ _: () => ,
+ },
+ prefs,
+ )
+}
diff --git a/catalog/app/containers/Bucket/Queries/Athena/Components.tsx b/catalog/app/containers/Bucket/Queries/Athena/Components.tsx
index 4521962c352..e4114ff055c 100644
--- a/catalog/app/containers/Bucket/Queries/Athena/Components.tsx
+++ b/catalog/app/containers/Bucket/Queries/Athena/Components.tsx
@@ -3,11 +3,10 @@ import * as React from 'react'
import * as M from '@material-ui/core'
import * as Lab from '@material-ui/lab'
-import * as Sentry from 'utils/Sentry'
-
const useSectionStyles = M.makeStyles((t) => ({
header: {
- margin: t.spacing(0, 0, 1),
+ ...t.typography.body1,
+ margin: t.spacing(2, 0, 1),
},
}))
@@ -20,36 +19,28 @@ interface SectionProps {
export function Section({ className, empty, title, children }: SectionProps) {
const classes = useSectionStyles()
- if (!children && empty)
- return {empty}
+ if (!children && empty) {
+ return {empty}
+ }
return (
-
{title}
+
{title}
{children}
)
}
interface AlertProps {
+ className?: string
error: Error
title: string
}
-export function Alert({ error, title }: AlertProps) {
- const sentry = Sentry.use()
-
- React.useEffect(() => {
- sentry('captureException', error)
- }, [error, sentry])
-
+export function Alert({ className, error, title }: AlertProps) {
return (
-
+
{title}
{error.message}
)
}
-
-export function makeAsyncDataErrorHandler(title: string) {
- return (error: Error) =>
-}
diff --git a/catalog/app/containers/Bucket/Queries/Athena/CreatePackage.tsx b/catalog/app/containers/Bucket/Queries/Athena/CreatePackage.tsx
index 43516279e67..5d0229e22c0 100644
--- a/catalog/app/containers/Bucket/Queries/Athena/CreatePackage.tsx
+++ b/catalog/app/containers/Bucket/Queries/Athena/CreatePackage.tsx
@@ -4,114 +4,16 @@ import * as M from '@material-ui/core'
import * as Dialog from 'components/Dialog'
import * as AddToPackage from 'containers/AddToPackage'
import { usePackageCreationDialog } from 'containers/Bucket/PackageDialog/PackageCreationForm'
-import type * as Model from 'model'
-import * as s3paths from 'utils/s3paths'
-import * as requests from '../requests'
+import type * as requests from './model/requests'
+import {
+ ParsedRows,
+ doQueryResultsContainManifestEntries,
+ parseQueryResults,
+} from './model/createPackage'
import Results from './Results'
-type ManifestKey = 'hash' | 'logical_key' | 'meta' | 'physical_keys' | 'size'
-type ManifestEntryStringified = Record
-
-function SeeDocsForCreatingPackage() {
- return (
-
-
-
- help_outline
-
-
-
- )
-}
-
-function doQueryResultsContainManifestEntries(
- queryResults: requests.athena.QueryResultsResponse,
-): queryResults is requests.athena.QueryManifestsResponse {
- const columnNames = queryResults.columns.map(({ name }) => name)
- return (
- columnNames.includes('size') &&
- columnNames.includes('physical_keys') &&
- columnNames.includes('logical_key')
- )
-}
-
-// TODO: this name doesn't make sense without `parseManifestEntryStringified`
-// merge it into one
-function rowToManifestEntryStringified(
- row: string[],
- columns: requests.athena.QueryResultsColumns,
-): ManifestEntryStringified {
- return row.reduce((acc, value, index) => {
- if (!columns[index].name) return acc
- return {
- ...acc,
- [columns[index].name]: value,
- }
- }, {} as ManifestEntryStringified)
-}
-
-function parseManifestEntryStringified(entry: ManifestEntryStringified): {
- [key: string]: Model.S3File
-} | null {
- if (!entry.logical_key) return null
- if (!entry.physical_keys) return null
- try {
- const handle = s3paths.parseS3Url(
- entry.physical_keys.replace(/^\[/, '').replace(/\]$/, ''),
- )
- const sizeParsed = Number(entry.size)
- const size = Number.isNaN(sizeParsed) ? 0 : sizeParsed
- return {
- [entry.logical_key]: {
- ...handle,
- size,
- },
- }
- } catch (e) {
- // eslint-disable-next-line no-console
- console.error(e)
- return null
- }
-}
-
-interface ParsedRows {
- valid: Record
- invalid: requests.athena.QueryResultsRows
-}
-
-function parseQueryResults(
- queryResults: requests.athena.QueryManifestsResponse,
-): ParsedRows {
- // TODO: use one reduce-loop
- // merge `rowToManifestEntryStringified` and `parseManifestEntryStringified` into one function
- const manifestEntries: ManifestEntryStringified[] = queryResults.rows.reduce(
- (memo, row) => memo.concat(rowToManifestEntryStringified(row, queryResults.columns)),
- [] as ManifestEntryStringified[],
- )
- return manifestEntries.reduce(
- (memo, entry, index) => {
- const parsed = parseManifestEntryStringified(entry)
- return parsed
- ? // if entry is ok then add it to valid map, and invalid is pristine
- {
- valid: {
- ...memo.valid,
- ...parsed,
- },
- invalid: memo.invalid,
- }
- : // if no entry then add original data to list of invalid, and valid is pristine
- {
- valid: memo.valid,
- invalid: [...memo.invalid, queryResults.rows[index]],
- }
- },
- { valid: {}, invalid: [] } as ParsedRows,
- )
-}
-
const useStyles = M.makeStyles((t) => ({
results: {
'div&': {
@@ -123,7 +25,7 @@ const useStyles = M.makeStyles((t) => ({
interface CreatePackageProps {
bucket: string
- queryResults: requests.athena.QueryResultsResponse
+ queryResults: requests.QueryResults
}
export default function CreatePackage({ bucket, queryResults }: CreatePackageProps) {
@@ -150,7 +52,6 @@ export default function CreatePackage({ bucket, queryResults }: CreatePackagePro
const onPackage = React.useCallback(() => {
if (!doQueryResultsContainManifestEntries(queryResults)) return
- // TODO: make it lazy, and disable button
const parsed = parseQueryResults(queryResults)
setEntries(parsed)
if (parsed.invalid.length) {
@@ -161,10 +62,6 @@ export default function CreatePackage({ bucket, queryResults }: CreatePackagePro
}
}, [addToPackage, confirm, createDialog, queryResults])
- if (!doQueryResultsContainManifestEntries(queryResults)) {
- return
- }
-
return (
<>
{createDialog.render({
diff --git a/catalog/app/containers/Bucket/Queries/Athena/Database.spec.tsx b/catalog/app/containers/Bucket/Queries/Athena/Database.spec.tsx
new file mode 100644
index 00000000000..582b1a6d433
--- /dev/null
+++ b/catalog/app/containers/Bucket/Queries/Athena/Database.spec.tsx
@@ -0,0 +1,144 @@
+import * as React from 'react'
+import renderer from 'react-test-renderer'
+
+import WithGlobalDialogs from 'utils/GlobalDialogs'
+
+import Database from './Database'
+
+import * as Model from './model'
+
+jest.mock(
+ 'constants/config',
+ jest.fn(() => ({})),
+)
+
+const noop = () => {}
+
+const emptyState: Model.State = {
+ bucket: 'any',
+
+ catalogName: { value: undefined, setValue: noop },
+ catalogNames: { data: undefined, loadMore: noop },
+ database: { value: undefined, setValue: noop },
+ databases: { data: undefined, loadMore: noop },
+ execution: undefined,
+ executions: { data: undefined, loadMore: noop },
+ queries: { data: undefined, loadMore: noop },
+ query: { value: undefined, setValue: noop },
+ queryBody: { value: undefined, setValue: noop },
+ results: { data: undefined, loadMore: noop },
+ workgroups: { data: undefined, loadMore: noop },
+ workgroup: { data: undefined, loadMore: noop },
+
+ submit: () => Promise.resolve({ id: 'bar' }),
+ queryRun: undefined,
+}
+
+interface ProviderProps {
+ children: React.ReactNode
+ value: Model.State
+}
+
+function Provider({ children, value }: ProviderProps) {
+ return {children}
+}
+
+describe('containers/Bucket/Queries/Athena/Database', () => {
+ beforeAll(() => {})
+
+ afterAll(() => {})
+
+ it('should render skeletons', () => {
+ const tree = renderer.create(
+
+
+ ,
+ )
+ expect(tree).toMatchSnapshot()
+ })
+
+ it('should render selected values', () => {
+ const tree = renderer.create(
+
+
+ ,
+ )
+ expect(tree).toMatchSnapshot()
+ })
+
+ it('should show no value (zero-width space) if selected no value', () => {
+ const tree = renderer.create(
+
+
+ ,
+ )
+ expect(tree).toMatchSnapshot()
+ })
+
+ it('should disable selection if no spare values', () => {
+ const tree = renderer.create(
+
+
+ ,
+ )
+ expect(tree).toMatchSnapshot()
+ })
+
+ it('should show error when values failed', () => {
+ const tree = renderer.create(
+
+
+
+
+ ,
+ )
+ expect(tree).toMatchSnapshot()
+ })
+
+ it('should show error when data failed', () => {
+ const tree = renderer.create(
+
+
+
+
+ ,
+ )
+ expect(tree).toMatchSnapshot()
+ })
+})
diff --git a/catalog/app/containers/Bucket/Queries/Athena/Database.tsx b/catalog/app/containers/Bucket/Queries/Athena/Database.tsx
index dbe46d38b72..7f73a8e36f6 100644
--- a/catalog/app/containers/Bucket/Queries/Athena/Database.tsx
+++ b/catalog/app/containers/Bucket/Queries/Athena/Database.tsx
@@ -6,7 +6,17 @@ import * as Lab from '@material-ui/lab'
import Skeleton from 'components/Skeleton'
import * as Dialogs from 'utils/GlobalDialogs'
-import * as requests from '../requests'
+import * as Model from './model'
+import * as storage from './model/storage'
+
+const useSelectErrorStyles = M.makeStyles((t) => ({
+ button: {
+ whiteSpace: 'nowrap',
+ },
+ dialog: {
+ padding: t.spacing(2),
+ },
+}))
interface SelectErrorProps {
className?: string
@@ -14,18 +24,24 @@ interface SelectErrorProps {
}
function SelectError({ className, error }: SelectErrorProps) {
+ const classes = useSelectErrorStyles()
const openDialog = Dialogs.use()
const handleClick = React.useCallback(() => {
openDialog(() => (
-
+
{error.message}
-
+
))
- }, [error.message, openDialog])
+ }, [classes.dialog, error.message, openDialog])
return (
+
Show more
}
@@ -39,6 +55,8 @@ function SelectError({ className, error }: SelectErrorProps) {
const LOAD_MORE = '__load-more__'
+const EMPTY = '__empty__'
+
interface Response {
list: string[]
next?: string
@@ -84,13 +102,22 @@ function Select({
return (
{label}
-
+
{data.list.map((item) => (
{item}
))}
{data.next && Load more}
+ {!data.list.length && (
+
+ {value || 'Empty list'}
+
+ )}
)
@@ -98,54 +125,78 @@ function Select({
interface SelectCatalogNameProps {
className?: string
- value: requests.athena.CatalogName | null
- onChange: (catalogName: requests.athena.CatalogName) => void
}
-function SelectCatalogName({ className, value, onChange }: SelectCatalogNameProps) {
- const [prev, setPrev] = React.useState(
- null,
+function SelectCatalogName({ className }: SelectCatalogNameProps) {
+ const { catalogName, catalogNames, queryRun } = Model.use()
+
+ const handleChange = React.useCallback(
+ (value) => {
+ storage.setCatalog(value)
+ storage.clearDatabase()
+ catalogName.setValue(value)
+ },
+ [catalogName],
+ )
+
+ if (Model.isError(catalogNames.data)) {
+ return
+ }
+ if (Model.isError(catalogName.value)) {
+ return
+ }
+ if (!Model.hasValue(catalogName.value) || !Model.hasData(catalogNames.data)) {
+ return
+ }
+
+ return (
+
)
- const data = requests.athena.useCatalogNames(prev)
- return data.case({
- Ok: (response) => (
-
- ),
- Err: (error) => ,
- _: () => ,
- })
}
-interface SelectDatabaseProps
- extends Omit {
- catalogName: requests.athena.CatalogName | null
- onChange: (database: requests.athena.Database) => void
- value: requests.athena.Database | null
+interface SelectDatabaseProps {
+ className: string
}
-function SelectDatabase({ catalogName, onChange, ...rest }: SelectDatabaseProps) {
- const [prev, setPrev] = React.useState(null)
- const data = requests.athena.useDatabases(catalogName, prev)
- return data.case({
- Ok: (response) => (
-
- ),
- Err: (error) => ,
- _: () => ,
- })
+function SelectDatabase({ className }: SelectDatabaseProps) {
+ const { catalogName, database, databases, queryRun } = Model.use()
+
+ const handleChange = React.useCallback(
+ (value) => {
+ storage.setDatabase(value)
+ database.setValue(value)
+ },
+ [database],
+ )
+
+ if (Model.isError(databases.data)) {
+ return
+ }
+ if (Model.isError(database.value)) {
+ return
+ }
+ if (!Model.hasValue(database.value) || !Model.hasData(databases.data)) {
+ return
+ }
+
+ return (
+
+ )
}
const useStyles = M.makeStyles((t) => ({
@@ -155,8 +206,8 @@ const useStyles = M.makeStyles((t) => ({
},
field: {
cursor: 'pointer',
+ flexBasis: '50%',
marginRight: t.spacing(2),
- width: '50%',
'& input': {
cursor: 'pointer',
},
@@ -171,42 +222,14 @@ const useStyles = M.makeStyles((t) => ({
interface DatabaseProps {
className?: string
- value: requests.athena.ExecutionContext | null
- onChange: (value: requests.athena.ExecutionContext | null) => void
}
-export default function Database({ className, value, onChange }: DatabaseProps) {
+export default function Database({ className }: DatabaseProps) {
const classes = useStyles()
- const [catalogName, setCatalogName] =
- React.useState(value?.catalogName || null)
- const handleCatalogName = React.useCallback(
- (name) => {
- setCatalogName(name)
- onChange(null)
- },
- [onChange],
- )
- const handleDatabase = React.useCallback(
- (database) => {
- if (!catalogName) return
- onChange({ catalogName, database })
- },
- [catalogName, onChange],
- )
return (
-
-
+
+
)
}
diff --git a/catalog/app/containers/Bucket/Queries/Athena/History.tsx b/catalog/app/containers/Bucket/Queries/Athena/History.tsx
index 2653c70a7be..23de3ea9811 100644
--- a/catalog/app/containers/Bucket/Queries/Athena/History.tsx
+++ b/catalog/app/containers/Bucket/Queries/Athena/History.tsx
@@ -2,16 +2,15 @@ import cx from 'classnames'
import * as dateFns from 'date-fns'
import * as R from 'ramda'
import * as React from 'react'
+import * as RRDom from 'react-router-dom'
import * as M from '@material-ui/core'
import * as Lab from '@material-ui/lab'
import * as Notifications from 'containers/Notifications'
import * as NamedRoutes from 'utils/NamedRoutes'
-import Link from 'utils/StyledLink'
import copyToClipboard from 'utils/clipboard'
-import { trimCenter } from 'utils/string'
-import * as requests from '../requests'
+import * as Model from './model'
const useToggleButtonStyles = M.makeStyles({
root: {
@@ -58,133 +57,154 @@ function Date({ date }: DateProps) {
return {formatted}
}
-interface QueryDateCompletedProps {
- bucket: string
- queryExecution: requests.athena.QueryExecution
- workgroup: requests.athena.Workgroup
-}
-
-function QueryDateCompleted({
- bucket,
- queryExecution,
- workgroup,
-}: QueryDateCompletedProps) {
- const { urls } = NamedRoutes.use()
- if (queryExecution.status !== 'SUCCEEDED') {
- return
- }
- return (
-
-
-
- )
-}
-
-interface CopyButtonProps {
- queryExecution: requests.athena.QueryExecution
-}
-
-function CopyButton({ queryExecution }: CopyButtonProps) {
- const { push } = Notifications.use()
- const handleCopy = React.useCallback(() => {
- if (queryExecution.query) {
- copyToClipboard(queryExecution.query)
- push('Query has been copied to clipboard')
- }
- }, [push, queryExecution.query])
- return (
-
- content_copy
-
- )
-}
-
const useFullQueryRowStyles = M.makeStyles((t) => ({
- cell: {
- paddingBottom: 0,
- paddingTop: 0,
- },
- collapsed: {
- borderBottom: 0,
+ root: {
+ borderBottom: `1px solid ${t.palette.divider}`,
+ padding: t.spacing(2, 7.5),
},
query: {
maxHeight: t.spacing(30),
maxWidth: '100%',
overflow: 'auto',
+ margin: t.spacing(0, 0, 2),
+ whiteSpace: 'pre-wrap',
+ },
+ button: {
+ '& + &': {
+ marginLeft: t.spacing(1),
+ },
},
}))
interface FullQueryRowProps {
expanded: boolean
- queryExecution: requests.athena.QueryExecution
+ query: string
}
-function FullQueryRow({ expanded, queryExecution }: FullQueryRowProps) {
+function FullQueryRow({ expanded, query }: FullQueryRowProps) {
+ const { push } = Notifications.use()
+ const { queryBody } = Model.use()
const classes = useFullQueryRowStyles()
+ const handleInsert = React.useCallback(() => {
+ queryBody.setValue(query)
+ push('Query has been pasted into editor')
+ }, [push, queryBody, query])
+ const handleCopy = React.useCallback(() => {
+ copyToClipboard(query)
+ push('Query has been copied to clipboard')
+ }, [push, query])
return (
-
-
- {!!expanded && }
-
-
-
- {queryExecution.query}
-
-
-
+
+
+
{query}
+
content_copy}
+ variant="outlined"
+ >
+ Copy
+
+
replay}
+ variant="outlined"
+ >
+ Paste into query editor
+
+
+
)
}
-interface ExecutionProps {
- bucket: string
- queryExecution: requests.athena.QueryExecution
- workgroup: requests.athena.Workgroup
+const useRowStyles = M.makeStyles((t) => ({
+ root: {
+ alignItems: 'center',
+ display: 'grid',
+ gridColumnGap: t.spacing(2),
+ gridTemplateColumns: '30px auto 160px 160px 160px',
+ padding: t.spacing(0, 2),
+ lineHeight: `${t.spacing(4)}px`,
+ borderBottom: `1px solid ${t.palette.divider}`,
+ whiteSpace: 'nowrap',
+ },
+}))
+
+interface RowProps {
+ className: string
+ children: React.ReactNode
}
-function Execution({ bucket, queryExecution, workgroup }: ExecutionProps) {
- const [expanded, setExpanded] = React.useState(false)
- const onToggle = React.useCallback(() => setExpanded(!expanded), [expanded])
+function Row({ className, children }: RowProps) {
+ const classes = useRowStyles()
+ return {children}
+}
+
+interface LinkCellProps {
+ children: React.ReactNode
+ className: string
+ to?: string
+}
- if (queryExecution.error)
+function LinkCell({ children, className, to }: LinkCellProps) {
+ if (to) {
return (
-
-
- {queryExecution.error.message}
-
-
+
+ {children}
+
)
+ }
+ return {children}
+}
+
+const useExecutionStyles = M.makeStyles((t) => ({
+ hover: {
+ '&:has($link:hover)': {
+ background: t.palette.action.hover,
+ },
+ },
+ failed: {
+ color: t.palette.text.disabled,
+ },
+ link: {},
+ query: {
+ overflow: 'hidden',
+ textOverflow: 'ellipsis',
+ },
+}))
+
+interface ExecutionProps {
+ to?: string
+ queryExecution: Model.QueryExecution
+}
+
+function Execution({ to, queryExecution }: ExecutionProps) {
+ const classes = useExecutionStyles()
+ const [expanded, setExpanded] = React.useState(false)
+ const onToggle = React.useCallback(() => setExpanded(!expanded), [expanded])
return (
<>
-
-
-
-
- {trimCenter(queryExecution.query || '', 50)}
-
+
+
+
+ {queryExecution.query}
+
+
{queryExecution.status || 'UNKNOWN'}
-
-
+
+
-
-
-
-
-
+
+
+
+
+
{queryExecution.query && (
-
+
)}
>
)
@@ -199,48 +219,42 @@ function Empty() {
)
}
+function isFailedExecution(
+ x: Model.QueryExecutionsItem,
+): x is Model.QueryExecutionFailed {
+ return !!(x as Model.QueryExecutionFailed).error
+}
+
const useStyles = M.makeStyles((t) => ({
- queryCell: {
- width: '40%',
- },
- actionCell: {
- width: '24px',
- },
header: {
- margin: t.spacing(0, 0, 1),
+ lineHeight: `${t.spacing(4.5)}px`,
+ fontWeight: 500,
},
footer: {
+ alignItems: 'center',
display: 'flex',
- padding: t.spacing(1),
+ padding: t.spacing(1, 2),
},
more: {
marginLeft: 'auto',
},
- table: {
- tableLayout: 'fixed',
- },
}))
interface HistoryProps {
bucket: string
- executions: requests.athena.QueryExecution[]
+ executions: Model.QueryExecutionsItem[]
onLoadMore?: () => void
- workgroup: requests.athena.Workgroup
}
-export default function History({
- bucket,
- executions,
- onLoadMore,
- workgroup,
-}: HistoryProps) {
+export default function History({ bucket, executions, onLoadMore }: HistoryProps) {
+ const { urls } = NamedRoutes.use()
const classes = useStyles()
const pageSize = 10
const [page, setPage] = React.useState(1)
const handlePagination = React.useCallback(
- (event, value) => {
+ (_event, value) => {
setPage(value)
},
[setPage],
@@ -249,8 +263,8 @@ export default function History({
const rowsSorted = React.useMemo(
() =>
R.sort(
- (a: requests.athena.QueryExecution, b: requests.athena.QueryExecution) =>
- b?.completed && a?.completed
+ (a: Model.QueryExecutionsItem, b: Model.QueryExecutionsItem) =>
+ !isFailedExecution(a) && !isFailedExecution(b) && b?.completed && a?.completed
? b.completed.valueOf() - a.completed.valueOf()
: -1,
executions,
@@ -260,54 +274,55 @@ export default function History({
const rowsPaginated = rowsSorted.slice(pageSize * (page - 1), pageSize * page)
const hasPagination = rowsSorted.length > rowsPaginated.length
+ const { workgroup } = Model.use()
+ if (!Model.hasValue(workgroup)) return null
+
return (
-
-
-
-
-
- Query
- Status
- Date created
- Date completed
-
-
-
- {rowsPaginated.map((queryExecution) => (
+ <>
+
+
+
+ Query
+ Status
+ Date created
+ Date completed
+
+ {rowsPaginated.map((queryExecution) =>
+ isFailedExecution(queryExecution) ? (
+
+ {queryExecution.error.message}
+
+ ) : (
- ))}
- {!executions.length && (
-
-
-
-
-
- )}
-
-
-
- {(hasPagination || !!onLoadMore) && (
-
- {hasPagination && (
-
- )}
- {onLoadMore && (
-
- Load more
-
- )}
-
- )}
-
+ ),
+ )}
+ {!executions.length && }
+ {(hasPagination || !!onLoadMore) && (
+
+ {hasPagination && (
+
+ )}
+ {onLoadMore && (
+
+ Load more
+
+ )}
+
+ )}
+
+ >
)
}
diff --git a/catalog/app/containers/Bucket/Queries/Athena/QueryEditor.tsx b/catalog/app/containers/Bucket/Queries/Athena/QueryEditor.tsx
index 584f53ff45f..f3a6b04746a 100644
--- a/catalog/app/containers/Bucket/Queries/Athena/QueryEditor.tsx
+++ b/catalog/app/containers/Bucket/Queries/Athena/QueryEditor.tsx
@@ -1,21 +1,18 @@
import * as React from 'react'
import AceEditor from 'react-ace'
-import * as RRDom from 'react-router-dom'
import * as M from '@material-ui/core'
import * as Lab from '@material-ui/lab'
import 'ace-builds/src-noconflict/mode-sql'
import 'ace-builds/src-noconflict/theme-eclipse'
-import { useConfirm } from 'components/Dialog'
+import Lock from 'components/Lock'
import Skeleton from 'components/Skeleton'
-import * as Notifications from 'containers/Notifications'
-import * as NamedRoutes from 'utils/NamedRoutes'
+import * as Dialogs from 'utils/GlobalDialogs'
import StyledLink from 'utils/StyledLink'
-import * as requests from '../requests'
-
import Database from './Database'
+import * as Model from './model'
const ATHENA_REF_INDEX = 'https://aws.amazon.com/athena/'
const ATHENA_REF_SQL =
@@ -46,23 +43,31 @@ function HelperText() {
const useStyles = M.makeStyles((t) => ({
editor: {
padding: t.spacing(1),
+ position: 'relative',
},
header: {
- margin: t.spacing(0, 0, 1),
+ margin: t.spacing(2, 0, 1),
},
}))
-interface EditorFieldProps {
- className?: string
- onChange: (value: string) => void
- query: string
-}
-
-function EditorField({ className, query, onChange }: EditorFieldProps) {
+function EditorField() {
const classes = useStyles()
+ const { queryBody, queryRun } = Model.use()
+
+ if (Model.isNone(queryBody.value)) {
+ return null
+ }
+
+ if (Model.isError(queryBody.value)) {
+ return {queryBody.value.message}
+ }
+
+ if (!Model.hasValue(queryBody.value)) {
+ return
+ }
return (
-
+
Query body
@@ -71,68 +76,19 @@ function EditorField({ className, query, onChange }: EditorFieldProps) {
editorProps={{ $blockScrolling: true }}
height="200px"
mode="sql"
- onChange={onChange}
+ onChange={queryBody.setValue}
theme="eclipse"
- value={query}
+ value={queryBody.value || ''}
width="100%"
/>
+ {Model.isLoading(queryRun) && }
)
}
-function useQueryRun(
- bucket: string,
- workgroup: requests.athena.Workgroup,
- queryExecutionId?: string,
-) {
- const { urls } = NamedRoutes.use()
- const history = RRDom.useHistory()
- const [loading, setLoading] = React.useState(false)
- const [error, setError] = React.useState
()
- const runQuery = requests.athena.useQueryRun(workgroup)
- const { push: notify } = Notifications.use()
- const goToExecution = React.useCallback(
- (id: string) => history.push(urls.bucketAthenaExecution(bucket, workgroup, id)),
- [bucket, history, urls, workgroup],
- )
- const onSubmit = React.useCallback(
- async (value: string, executionContext: requests.athena.ExecutionContext | null) => {
- setLoading(true)
- setError(undefined)
- try {
- const { id } = await runQuery(value, executionContext)
- if (id === queryExecutionId) notify('Query execution results remain unchanged')
- setLoading(false)
- goToExecution(id)
- } catch (e) {
- setLoading(false)
- if (e instanceof Error) {
- setError(e)
- } else {
- throw e
- }
- }
- },
- [goToExecution, notify, runQuery, queryExecutionId],
- )
- return React.useMemo(
- () => ({
- loading,
- error,
- onSubmit,
- }),
- [loading, error, onSubmit],
- )
-}
-
const useFormSkeletonStyles = M.makeStyles((t) => ({
- button: {
- height: t.spacing(4),
- marginTop: t.spacing(2),
- width: t.spacing(14),
- },
canvas: {
flexGrow: 1,
height: t.spacing(27),
@@ -157,7 +113,7 @@ const useFormSkeletonStyles = M.makeStyles((t) => ({
}))
interface FormSkeletonProps {
- className: string
+ className?: string
}
function FormSkeleton({ className }: FormSkeletonProps) {
@@ -170,18 +126,43 @@ function FormSkeleton({ className }: FormSkeletonProps) {
-
)
}
+interface FormConfirmProps {
+ close: () => void
+ submit: () => void
+}
+
+function FormConfirm({ close, submit }: FormConfirmProps) {
+ return (
+ <>
+
+ Database is not selected. Run the query without it?
+
+
+ Close
+ {
+ close()
+ submit()
+ }}
+ >
+ Confirm, run without
+
+
+ >
+ )
+}
+
export { FormSkeleton as Skeleton }
const useFormStyles = M.makeStyles((t) => ({
actions: {
display: 'flex',
justifyContent: 'space-between',
- margin: t.spacing(2, 0),
+ margin: t.spacing(2, 0, 4),
[t.breakpoints.up('sm')]: {
alignItems: 'center',
},
@@ -203,86 +184,38 @@ const useFormStyles = M.makeStyles((t) => ({
}))
interface FormProps {
- bucket: string
- className?: string
- onChange: (value: requests.athena.QueryExecution) => void
- value: requests.athena.QueryExecution | null
- workgroup: requests.athena.Workgroup
+ className: string
}
-export function Form({ bucket, className, onChange, value, workgroup }: FormProps) {
+export function Form({ className }: FormProps) {
const classes = useFormStyles()
- const executionContext = React.useMemo(
- () =>
- value?.catalog && value?.db
- ? {
- catalogName: value.catalog,
- database: value.db,
- }
- : null,
- [value],
- )
- const confirm = useConfirm({
- onSubmit: (confirmed) => {
- if (confirmed) {
- if (!value?.query) {
- throw new Error('Query is not set')
- }
- onSubmit(value!.query, executionContext)
- }
- },
- submitTitle: 'Proceed',
- title: 'Execution context is not set',
- })
- const { loading, error, onSubmit } = useQueryRun(bucket, workgroup, value?.id)
- const handleSubmit = React.useCallback(() => {
- if (!value?.query) return
- if (!executionContext) {
- return confirm.open()
+ const { submit, queryRun } = Model.use()
+
+ const openDialog = Dialogs.use()
+ const handleSubmit = React.useCallback(async () => {
+ const output = await submit(false)
+ if (output === Model.NO_DATABASE) {
+ openDialog(({ close }) => submit(true)} />)
}
- onSubmit(value.query, executionContext)
- }, [confirm, executionContext, onSubmit, value])
- const handleExecutionContext = React.useCallback(
- (exeContext) => {
- if (!exeContext) {
- onChange({ ...value, catalog: undefined, db: undefined })
- return
- }
- const { catalogName, database } = exeContext
- onChange({ ...value, catalog: catalogName, db: database })
- },
- [onChange, value],
- )
+ }, [openDialog, submit])
return (
- {confirm.render(
-
- Data catalog and database are not set. Run query without them?
- ,
- )}
-
onChange({ ...value, query })}
- query={value?.query || ''}
- />
+
- {error && (
+ {Model.isError(queryRun) && (
- {error.message}
+ {queryRun.message}
)}
-
+
Run query
diff --git a/catalog/app/containers/Bucket/Queries/Athena/Results.tsx b/catalog/app/containers/Bucket/Queries/Athena/Results.tsx
index f454f3ca198..e0ba16741df 100644
--- a/catalog/app/containers/Bucket/Queries/Athena/Results.tsx
+++ b/catalog/app/containers/Bucket/Queries/Athena/Results.tsx
@@ -9,7 +9,7 @@ import log from 'utils/Logging'
import * as NamedRoutes from 'utils/NamedRoutes'
import * as s3paths from 'utils/s3paths'
-import * as requests from '../requests'
+import * as Model from './model'
function Empty() {
return (
@@ -63,9 +63,9 @@ const useResultsStyles = M.makeStyles((t) => ({
interface ResultsProps {
className?: string
- columns: requests.athena.QueryResultsColumns
+ columns: Model.QueryResultsColumns
onLoadMore?: () => void
- rows: requests.athena.QueryResultsRows
+ rows: Model.QueryResultsRows
}
export default function Results({ className, columns, onLoadMore, rows }: ResultsProps) {
diff --git a/catalog/app/containers/Bucket/Queries/Athena/Workgroups.tsx b/catalog/app/containers/Bucket/Queries/Athena/Workgroups.tsx
index 14209c44b33..f0387219d4d 100644
--- a/catalog/app/containers/Bucket/Queries/Athena/Workgroups.tsx
+++ b/catalog/app/containers/Bucket/Queries/Athena/Workgroups.tsx
@@ -4,40 +4,31 @@ import * as M from '@material-ui/core'
import * as Lab from '@material-ui/lab'
import { docs } from 'constants/urls'
-import * as NamedRoutes from 'utils/NamedRoutes'
import Skeleton from 'components/Skeleton'
+import * as NamedRoutes from 'utils/NamedRoutes'
import StyledLink from 'utils/StyledLink'
-import * as requests from '../requests'
-import * as storage from '../requests/storage'
-
-import { Alert, Section } from './Components'
-
-const useStyles = M.makeStyles((t) => ({
- selectWrapper: {
- width: '100%',
- },
- select: {
- padding: t.spacing(1),
- },
-}))
+import { Alert } from './Components'
+import * as Model from './model'
+import * as storage from './model/storage'
const LOAD_MORE = 'load-more'
interface WorkgroupSelectProps {
bucket: string
- onLoadMore: (workgroups: requests.athena.WorkgroupsResponse) => void
- value: requests.athena.Workgroup | null
- workgroups: requests.athena.WorkgroupsResponse
+ disabled?: boolean
+ onLoadMore: (workgroups: Model.List) => void
+ value: Model.Workgroup | null
+ workgroups: Model.List
}
function WorkgroupSelect({
bucket,
+ disabled,
onLoadMore,
value,
workgroups,
}: WorkgroupSelectProps) {
- const classes = useStyles()
const { urls } = NamedRoutes.use()
const history = RRDom.useHistory()
@@ -61,29 +52,27 @@ function WorkgroupSelect({
)
return (
-
-
-
- {workgroups.list.map((name) => (
-
- {name}
-
- ))}
- {workgroups.next && (
-
-
- Load more
-
-
- )}
-
-
-
+
+ Select workgroup
+
+ {workgroups.list.map((name) => (
+
+ {name}
+
+ ))}
+ {workgroups.next && (
+
+
+ Load more
+
+
+ )}
+
+
)
}
@@ -108,62 +97,43 @@ function WorkgroupsEmpty({ error }: WorkgroupsEmptyProps) {
Check{' '}
- Athena Queries docs on
- setup and correct usage
+
+ Athena Queries docs
+ {' '}
+ on setup and correct usage
>
)
}
-interface RedirectToDefaultWorkgroupProps {
- bucket: string
- workgroups: requests.athena.WorkgroupsResponse
-}
-
-function RedirectToDefaultWorkgroup({
- bucket,
- workgroups,
-}: RedirectToDefaultWorkgroupProps) {
- const { urls } = NamedRoutes.use()
- return (
-
- )
-}
-
interface AthenaWorkgroupsProps {
bucket: string
- workgroup: requests.athena.Workgroup | null
}
-export default function AthenaWorkgroups({ bucket, workgroup }: AthenaWorkgroupsProps) {
- const [prev, setPrev] = React.useState(null)
- const data = requests.athena.useWorkgroups(prev)
- return data.case({
- Ok: (workgroups) => {
- if (!workgroup && workgroups.defaultWorkgroup)
- return
- return (
- }>
- {workgroups.list.length && (
-
- )}
-
- )
- },
- Err: (error) => ,
- _: () => (
+export default function AthenaWorkgroups({ bucket }: AthenaWorkgroupsProps) {
+ const { queryRun, workgroup, workgroups } = Model.use()
+
+ if (Model.isError(workgroups.data)) return
+ if (Model.isError(workgroup.data)) return
+ if (!Model.hasData(workgroups.data) || !Model.hasData(workgroup.data)) {
+ return (
<>
>
- ),
- })
+ )
+ }
+
+ if (!workgroups.data.list.length) return
+
+ return (
+
+ )
}
diff --git a/catalog/app/containers/Bucket/Queries/Athena/__snapshots__/Database.spec.tsx.snap b/catalog/app/containers/Bucket/Queries/Athena/__snapshots__/Database.spec.tsx.snap
new file mode 100644
index 00000000000..f8826f5a665
--- /dev/null
+++ b/catalog/app/containers/Bucket/Queries/Athena/__snapshots__/Database.spec.tsx.snap
@@ -0,0 +1,553 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`containers/Bucket/Queries/Athena/Database should disable selection if no spare values 1`] = `
+
+
+
+
+
+ Empty list
+
+
+
+
+
+
+
+
+
+ Empty list
+
+
+
+
+
+
+`;
+
+exports[`containers/Bucket/Queries/Athena/Database should render selected values 1`] = `
+
+`;
+
+exports[`containers/Bucket/Queries/Athena/Database should render skeletons 1`] = `
+
+`;
+
+exports[`containers/Bucket/Queries/Athena/Database should show error when data failed 1`] = `
+
+
+
+
+ Error
+
+
+
+
+
+
+
+
+ Error
+
+
+
+
+
+
+`;
+
+exports[`containers/Bucket/Queries/Athena/Database should show error when values failed 1`] = `
+
+
+
+
+ Error
+
+
+
+
+
+
+
+
+ Error
+
+
+
+
+
+
+`;
+
+exports[`containers/Bucket/Queries/Athena/Database should show no value (zero-width space) if selected no value 1`] = `
+
+`;
diff --git a/catalog/app/containers/Bucket/Queries/Athena/model/createPackage.spec.ts b/catalog/app/containers/Bucket/Queries/Athena/model/createPackage.spec.ts
new file mode 100644
index 00000000000..8dc33cc15c7
--- /dev/null
+++ b/catalog/app/containers/Bucket/Queries/Athena/model/createPackage.spec.ts
@@ -0,0 +1,213 @@
+import Log from 'utils/Logging'
+
+import type * as Model from './requests'
+import { doQueryResultsContainManifestEntries, parseQueryResults } from './createPackage'
+
+jest.mock(
+ 'constants/config',
+ jest.fn(() => ({})),
+)
+
+describe('containers/Bucket/Queries/Athena/model/createPackage', () => {
+ describe('parseQueryResults', () => {
+ it('should return empty', () => {
+ const results: Model.QueryManifests = {
+ rows: [],
+ columns: [],
+ }
+ expect(parseQueryResults(results)).toEqual({
+ valid: {},
+ invalid: [],
+ })
+ })
+
+ it('should return invalid rows', () => {
+ const results1: Model.QueryManifests = {
+ rows: [['s3://foo']],
+ columns: [{ name: 'physical_key', type: 'varchar' }],
+ }
+ const results2: Model.QueryManifests = {
+ rows: [['s3://foo/a/b/c', 'foo'], ['s3://foo'], ['s3://foo/d/e/f', 'bar', 'baz']],
+ columns: [
+ { name: 'physical_key', type: 'varchar' },
+ { name: 'logical_key', type: 'varchar' },
+ ],
+ }
+ const results3: Model.QueryManifests = {
+ rows: [['foo', 'bar']],
+ columns: [
+ { name: 'size', type: 'varchar' },
+ { name: 'logical_key', type: 'varchar' },
+ ],
+ }
+ expect(parseQueryResults(results1)).toEqual({
+ valid: {},
+ invalid: [
+ // Not enough columns for a manifest entry
+ ['s3://foo'],
+ ],
+ })
+ expect(parseQueryResults(results2)).toEqual({
+ valid: {
+ foo: {
+ bucket: 'foo',
+ key: 'a/b/c',
+ size: 0,
+ version: undefined,
+ },
+ bar: {
+ bucket: 'foo',
+ key: 'd/e/f',
+ size: 0,
+ version: undefined,
+ },
+ },
+ invalid: [
+ // Not enough row elements for a manifest entry
+ ['s3://foo'],
+ ],
+ })
+ expect(parseQueryResults(results3)).toEqual({
+ valid: {},
+ invalid: [
+ // Not enough columns for a manifest entry
+ ['foo', 'bar'],
+ ],
+ })
+ })
+
+ it('should return all valid rows', () => {
+ const results: Model.QueryManifests = {
+ rows: [
+ ['abc', 'a/b/c', '{"a": "b"}', '[s3://a/b/c/d?versionId=def]', '123'],
+ ['def', 'd/e/f', '{"d": "e"}', '[s3://d/e/f/g?versionId=ghi]', '456', 'extra'],
+ ['xyz', 'x/y/z', '{"x": "y"}', '[s3://x/y/z/w?versionId=uvw]', '789'],
+ ],
+ columns: [
+ { name: 'hash', type: 'varchar' },
+ { name: 'logical_key', type: 'varchar' },
+ { name: 'meta', type: 'varchar' },
+ { name: 'physical_keys', type: 'varchar' },
+ { name: 'size', type: 'varchar' },
+ ],
+ }
+ expect(parseQueryResults(results)).toEqual({
+ valid: {
+ 'a/b/c': {
+ bucket: 'a',
+ key: 'b/c/d',
+ size: 123,
+ version: 'def',
+ // meta: { a: 'b' }, discarded, not supported for creating packages yet
+ },
+ 'd/e/f': {
+ bucket: 'd',
+ key: 'e/f/g',
+ size: 456,
+ version: 'ghi',
+ // meta: { d: 'e' }, discarded, not supported for creating packages yet
+ },
+ 'x/y/z': {
+ bucket: 'x',
+ key: 'y/z/w',
+ size: 789,
+ version: 'uvw',
+ // meta: { x: 'y' }, discarded, not supported for creating packages yet
+ },
+ },
+ invalid: [],
+ })
+ })
+ it('should catch error', () => {
+ const results: Model.QueryManifests = {
+ rows: [
+ ['abc', 'a/b/c', '{"a": "b"}', '[s3://a/b/c/d?versionId=def]', '123'],
+ ['def', 'd/e/f', '{"d": "e"}', '[s3://]', '456', 'extra'],
+ ],
+ columns: [
+ { name: 'hash', type: 'varchar' },
+ { name: 'logical_key', type: 'varchar' },
+ { name: 'meta', type: 'varchar' },
+ { name: 'physical_keys', type: 'varchar' },
+ { name: 'size', type: 'varchar' },
+ ],
+ }
+ const loglevel = Log.getLevel()
+ Log.setLevel('silent')
+ expect(parseQueryResults(results)).toEqual({
+ valid: {
+ 'a/b/c': {
+ bucket: 'a',
+ key: 'b/c/d',
+ size: 123,
+ version: 'def',
+ // meta: { a: 'b' }, discarded, not supported for creating packages yet
+ },
+ },
+ invalid: [['def', 'd/e/f', '{"d": "e"}', '[s3://]', '456', 'extra']],
+ })
+ Log.setLevel(loglevel)
+ })
+ })
+
+ describe('doQueryResultsContainManifestEntries', () => {
+ it('does not contain rows', () => {
+ expect(doQueryResultsContainManifestEntries({ columns: [], rows: [] })).toBe(false)
+ })
+
+ it('does not contain valid columns', () => {
+ expect(
+ doQueryResultsContainManifestEntries({
+ columns: [
+ { name: 'foo', type: 'varchar' },
+ { name: 'bar', type: 'varchar' },
+ ],
+ rows: [['some']],
+ }),
+ ).toBe(false)
+ })
+
+ it('does not contain enough columns', () => {
+ expect(
+ doQueryResultsContainManifestEntries({
+ columns: [
+ { name: 'size', type: 'varchar' },
+ { name: 'physical_keys', type: 'varchar' },
+ ],
+ rows: [['some']],
+ }),
+ ).toBe(false)
+ expect(
+ doQueryResultsContainManifestEntries({
+ columns: [
+ { name: 'size', type: 'varchar' },
+ { name: 'physical_key', type: 'varchar' },
+ ],
+ rows: [['some']],
+ }),
+ ).toBe(false)
+ expect(
+ doQueryResultsContainManifestEntries({
+ columns: [
+ { name: 'size', type: 'varchar' },
+ { name: 'logical_key', type: 'varchar' },
+ ],
+ rows: [['some']],
+ }),
+ ).toBe(false)
+ })
+
+ it('does contain enough valid data', () => {
+ expect(
+ doQueryResultsContainManifestEntries({
+ columns: [
+ { name: 'size', type: 'varchar' },
+ { name: 'physical_key', type: 'varchar' },
+ { name: 'logical_key', type: 'varchar' },
+ ],
+ rows: [['some']],
+ }),
+ ).toBe(true)
+ })
+ })
+})
diff --git a/catalog/app/containers/Bucket/Queries/Athena/model/createPackage.ts b/catalog/app/containers/Bucket/Queries/Athena/model/createPackage.ts
new file mode 100644
index 00000000000..d7f83ebaa7f
--- /dev/null
+++ b/catalog/app/containers/Bucket/Queries/Athena/model/createPackage.ts
@@ -0,0 +1,81 @@
+import type * as Model from 'model'
+import * as s3paths from 'utils/s3paths'
+
+import Log from 'utils/Logging'
+import type * as requests from './requests'
+
+export function doQueryResultsContainManifestEntries(
+ queryResults: requests.QueryResults,
+): queryResults is requests.QueryManifests {
+ if (!queryResults.rows.length) return false
+ const columnNames = queryResults.columns.map(({ name }) => name)
+ return (
+ columnNames.includes('size') &&
+ (columnNames.includes('physical_keys') || columnNames.includes('physical_key')) &&
+ columnNames.includes('logical_key')
+ )
+}
+
+type Row = requests.QueryManifests['rows'][0]
+function parseRow(
+ row: Row,
+ columns: requests.QueryResultsColumns,
+): { fail?: undefined; ok: [string, Model.S3File] } | { fail: Row; ok?: undefined } {
+ try {
+ const entry = row.reduce(
+ (acc, value, index) => {
+ if (!columns[index]?.name) return acc
+ return {
+ ...acc,
+ [columns[index].name]: value,
+ }
+ },
+ {} as Record,
+ )
+ if (!entry.logical_key) return { fail: row }
+ if (!entry.physical_key && !entry.physical_keys) return { fail: row }
+ const handle = entry.physical_key
+ ? s3paths.parseS3Url(entry.physical_key)
+ : s3paths.parseS3Url(entry.physical_keys.replace(/^\[/, '').replace(/\]$/, ''))
+ const sizeParsed = Number(entry.size)
+ const size = Number.isNaN(sizeParsed) ? 0 : sizeParsed
+ return {
+ ok: [
+ entry.logical_key,
+ {
+ ...handle,
+ size,
+ },
+ ],
+ }
+ } catch (e) {
+ Log.error(e)
+ return { fail: row }
+ }
+}
+
+export interface ParsedRows {
+ valid: Record
+ invalid: requests.QueryResultsRows
+}
+
+export function parseQueryResults(queryResults: requests.QueryManifests): ParsedRows {
+ return queryResults.rows
+ .map((row) => parseRow(row, queryResults.columns))
+ .reduce(
+ (memo, { ok, fail }) =>
+ ok
+ ? {
+ valid: {
+ ...memo.valid,
+ [ok[0]]: ok[1],
+ },
+ invalid: memo.invalid,
+ }
+ : {
+ valid: memo.valid,
+ invalid: [...memo.invalid, fail],
+ },
+ { valid: {}, invalid: [] } as ParsedRows,
+ )
+}
diff --git a/catalog/app/containers/Bucket/Queries/Athena/model/index.ts b/catalog/app/containers/Bucket/Queries/Athena/model/index.ts
new file mode 100644
index 00000000000..8c441c4a0ab
--- /dev/null
+++ b/catalog/app/containers/Bucket/Queries/Athena/model/index.ts
@@ -0,0 +1,4 @@
+export type * from './requests'
+export { NO_DATABASE } from './requests'
+export * from './state'
+export * from './utils'
diff --git a/catalog/app/containers/Bucket/Queries/Athena/model/requests.spec.ts b/catalog/app/containers/Bucket/Queries/Athena/model/requests.spec.ts
new file mode 100644
index 00000000000..bf8a0793922
--- /dev/null
+++ b/catalog/app/containers/Bucket/Queries/Athena/model/requests.spec.ts
@@ -0,0 +1,1199 @@
+import type A from 'aws-sdk/clients/athena'
+import { act, renderHook } from '@testing-library/react-hooks'
+
+import Log from 'utils/Logging'
+
+import * as Model from './utils'
+import * as requests from './requests'
+
+jest.mock(
+ 'utils/Logging',
+ jest.fn(() => ({
+ error: jest.fn(),
+ info: jest.fn(),
+ })),
+)
+
+jest.mock(
+ 'constants/config',
+ jest.fn(() => ({})),
+)
+
+const getStorageKey = jest.fn((): string => '')
+jest.mock('utils/storage', () => () => ({
+ get: jest.fn(() => getStorageKey()),
+}))
+
+function req(output: O, delay = 100) {
+ return jest.fn((_x: I, callback: (e: Error | null, d: O) => void) => {
+ const timer = setTimeout(() => {
+ callback(null, output)
+ }, delay)
+ return {
+ abort: jest.fn(() => {
+ clearTimeout(timer)
+ }),
+ }
+ })
+}
+
+function reqThen(output: (x: I) => O, delay = 100) {
+ return jest.fn((x: I) => ({
+ promise: () =>
+ new Promise((resolve) => {
+ setTimeout(() => {
+ resolve(output(x))
+ }, delay)
+ }),
+ }))
+}
+
+const reqThrow = jest.fn(() => ({
+ promise: () => {
+ throw new Error()
+ },
+}))
+
+const reqThrowWith = (o: unknown) =>
+ jest.fn(() => ({
+ promise: () => {
+ throw o
+ },
+ }))
+
+const batchGetQueryExecution = jest.fn()
+const getWorkGroup = jest.fn()
+const listDataCatalogs = jest.fn()
+const listDatabases = jest.fn()
+const listQueryExecutions = jest.fn()
+const listWorkGroups = jest.fn()
+const getQueryExecution = jest.fn()
+const listNamedQueries = jest.fn()
+const batchGetNamedQuery = jest.fn()
+const getQueryResults = jest.fn()
+const startQueryExecution = jest.fn()
+
+jest.mock('utils/AWS', () => ({
+ Athena: {
+ use: () => ({
+ batchGetNamedQuery,
+ batchGetQueryExecution,
+ getQueryExecution,
+ getQueryResults,
+ getWorkGroup,
+ listDataCatalogs,
+ listDatabases,
+ listNamedQueries,
+ listQueryExecutions,
+ listWorkGroups,
+ startQueryExecution,
+ }),
+ },
+}))
+
+describe('containers/Bucket/Queries/Athena/model/requests', () => {
+ describe('useCatalogNames', () => {
+ it('return catalog names', async () => {
+ listDataCatalogs.mockImplementationOnce(
+ req({
+ DataCatalogsSummary: [{ CatalogName: 'foo' }, { CatalogName: 'bar' }],
+ }),
+ )
+ const { result, waitForNextUpdate } = renderHook(() => requests.useCatalogNames())
+ expect(result.current.data).toBe(undefined)
+
+ await act(async () => {
+ await waitForNextUpdate()
+ })
+ expect(result.current.data).toMatchObject({ list: ['foo', 'bar'] })
+ })
+
+ it('return empty list', async () => {
+ listDataCatalogs.mockImplementationOnce(
+ req({
+ DataCatalogsSummary: [],
+ }),
+ )
+ const { result, waitForNextUpdate } = renderHook(() => requests.useCatalogNames())
+
+ await act(async () => {
+ await waitForNextUpdate()
+ })
+ expect(result.current.data).toMatchObject({ list: [] })
+ })
+
+ it('return unknowns on invalid data', async () => {
+ listDataCatalogs.mockImplementationOnce(
+ req({
+ // @ts-expect-error
+ DataCatalogsSummary: [{ Nonsense: true }, { Absurd: false }],
+ }),
+ )
+ const { result, waitForNextUpdate } = renderHook(() => requests.useCatalogNames())
+
+ await act(async () => {
+ await waitForNextUpdate()
+ })
+ expect(result.current.data).toMatchObject({ list: ['Unknown', 'Unknown'] })
+ })
+
+ it('return empty list on invalid data', async () => {
+ listDataCatalogs.mockImplementationOnce(
+ req({
+ // @ts-expect-error
+ Invalid: [],
+ }),
+ )
+ const { result, waitForNextUpdate } = renderHook(() => requests.useCatalogNames())
+
+ await act(async () => {
+ await waitForNextUpdate()
+ })
+ expect(result.current.data).toMatchObject({ list: [] })
+ })
+ })
+
+ describe('useCatalogName', () => {
+ // hooks doesn't support multiple arguments
+ // https://github.com/testing-library/react-testing-library/issues/1350
+ function useWrapper(props: Parameters) {
+ return requests.useCatalogName(...props)
+ }
+
+ it('wait for catalog names list', async () => {
+ const { result, rerender, unmount, waitForNextUpdate } = renderHook(
+ (x: Parameters) => useWrapper(x),
+ { initialProps: [undefined, null] },
+ )
+ expect(result.current.value).toBe(undefined)
+
+ const error = new Error('Fail')
+ await act(async () => {
+ rerender([error, null])
+ await waitForNextUpdate()
+ })
+ expect(result.current.value).toBe(error)
+
+ await act(async () => {
+ rerender([{ list: ['foo', 'bar'] }, null])
+ await waitForNextUpdate()
+ })
+ expect(result.current.value).toBe('foo')
+ unmount()
+ })
+
+ it('switch catalog when execution query loaded', async () => {
+ const { result, rerender, unmount, waitForNextUpdate } = renderHook(
+ (x: Parameters) => useWrapper(x),
+ { initialProps: [undefined, undefined] },
+ )
+ await act(async () => {
+ rerender([{ list: ['foo', 'bar'] }, undefined])
+ await waitForNextUpdate()
+ })
+ expect(result.current.value).toBe('foo')
+ await act(async () => {
+ rerender([{ list: ['foo', 'bar'] }, { catalog: 'bar' }])
+ await waitForNextUpdate()
+ })
+ expect(result.current.value).toBe('bar')
+ unmount()
+ })
+
+ it('select execution catalog when catalog list loaded after execution', async () => {
+ const { result, rerender, unmount, waitForNextUpdate } = renderHook(
+ (x: Parameters) => useWrapper(x),
+ { initialProps: [undefined, undefined] },
+ )
+
+ await act(async () => {
+ rerender([Model.Loading, { catalog: 'bar' }])
+ await waitForNextUpdate()
+ })
+ expect(result.current.value).toBe(Model.Loading)
+
+ await act(async () => {
+ rerender([{ list: ['foo', 'bar'] }, { catalog: 'bar' }])
+ await waitForNextUpdate()
+ })
+ expect(result.current.value).toBe('bar')
+
+ unmount()
+ })
+
+ it('keep selection when execution has catalog that doesnt exist', async () => {
+ const { result, rerender, unmount, waitForNextUpdate } = renderHook(
+ (x: Parameters) => useWrapper(x),
+ { initialProps: [undefined, undefined] },
+ )
+
+ await act(async () => {
+ rerender([{ list: ['foo', 'bar'] }, undefined])
+ await waitForNextUpdate()
+ })
+ expect(result.current.value).toBe('foo')
+
+ await act(async () => {
+ rerender([{ list: ['foo', 'bar'] }, { catalog: 'baz' }])
+ await waitForNextUpdate()
+ })
+ expect(result.current.value).toBe('foo')
+
+ unmount()
+ })
+
+ it('select null when catalog doesnt exist', async () => {
+ const { result, rerender, unmount, waitForNextUpdate } = renderHook(
+ (x: Parameters) => useWrapper(x),
+ { initialProps: [undefined, undefined] },
+ )
+
+ await act(async () => {
+ rerender([{ list: [] }, undefined])
+ await waitForNextUpdate()
+ })
+ expect(result.current.value).toBe(null)
+
+ act(() => {
+ result.current.setValue('baz')
+ })
+ expect(result.current.value).toBe('baz')
+
+ unmount()
+ })
+
+ it('select initial catalog from local storage', async () => {
+ getStorageKey.mockImplementationOnce(() => 'catalog-bar')
+ const { result, rerender, unmount, waitForNextUpdate } = renderHook(
+ (x: Parameters) => useWrapper(x),
+ { initialProps: [undefined, undefined] },
+ )
+
+ await act(async () => {
+ rerender([{ list: ['foo', 'catalog-bar'] }, null])
+ await waitForNextUpdate()
+ })
+ expect(result.current.value).toBe('catalog-bar')
+
+ unmount()
+ })
+ })
+
+ describe('useDatabases', () => {
+ it('wait for catalogName', async () => {
+ const { result, rerender, waitForNextUpdate } = renderHook(
+ (...c: Parameters) => requests.useDatabases(...c),
+ {
+ initialProps: undefined,
+ },
+ )
+
+ await act(async () => {
+ rerender(Model.Loading)
+ await waitForNextUpdate()
+ })
+ expect(result.current.data).toBe(Model.Loading)
+
+ const error = new Error('foo')
+ await act(async () => {
+ rerender(error)
+ await waitForNextUpdate()
+ })
+ expect(result.current.data).toBe(error)
+ })
+
+ it('return databases', async () => {
+ listDatabases.mockImplementation(
+ req({
+ DatabaseList: [{ Name: 'bar' }, { Name: 'baz' }],
+ }),
+ )
+ const { result, waitFor } = renderHook(() => requests.useDatabases('foo'))
+
+ expect((result.all[0] as Model.DataController).data).toBe(undefined)
+ expect((result.all[1] as Model.DataController).data).toBe(Model.Loading)
+ await waitFor(() =>
+ expect(result.current.data).toMatchObject({ list: ['bar', 'baz'] }),
+ )
+ })
+
+ it('handle invalid database', async () => {
+ listDatabases.mockImplementation(
+ req({
+ // @ts-expect-error
+ DatabaseList: [{ A: 'B' }, { C: 'D' }],
+ }),
+ )
+ const { result, waitFor } = renderHook(() => requests.useDatabases('foo'))
+ await waitFor(() =>
+ expect(result.current.data).toMatchObject({ list: ['Unknown', 'Unknown'] }),
+ )
+ })
+
+ it('handle invalid list', async () => {
+ listDatabases.mockImplementation(
+ req({
+ // @ts-expect-error
+ Foo: 'Bar',
+ }),
+ )
+ const { result, waitFor } = renderHook(() => requests.useDatabases('foo'))
+ await waitFor(() => expect(result.current.data).toMatchObject({ list: [] }))
+ })
+ })
+
+ describe('useDatabase', () => {
+ function useWrapper(props: Parameters) {
+ return requests.useDatabase(...props)
+ }
+
+ it('wait for databases', async () => {
+ const { result, rerender, waitForNextUpdate, unmount } = renderHook(
+ (x: Parameters) => useWrapper(x),
+ { initialProps: [undefined, null] },
+ )
+ expect(result.current.value).toBe(undefined)
+
+ await act(async () => {
+ rerender([Model.Loading, null])
+ await waitForNextUpdate()
+ })
+ expect(result.current.value).toBe(Model.Loading)
+
+ const error = new Error('Fail')
+ await act(async () => {
+ rerender([error, null])
+ await waitForNextUpdate()
+ })
+ expect(result.current.value).toBe(error)
+
+ await act(async () => {
+ rerender([{ list: ['foo', 'bar'] }, null])
+ await waitForNextUpdate()
+ })
+ expect(result.current.value).toBe('foo')
+
+ unmount()
+ })
+
+ it('switch database when execution query loaded', async () => {
+ const { result, rerender, waitForNextUpdate, unmount } = renderHook(
+ (x: Parameters) => useWrapper(x),
+ { initialProps: [undefined, undefined] },
+ )
+
+ await act(async () => {
+ rerender([{ list: ['foo', 'bar'] }, undefined])
+ await waitForNextUpdate()
+ })
+ expect(result.current.value).toBe('foo')
+
+ await act(async () => {
+ rerender([{ list: ['foo', 'bar'] }, { db: 'bar' }])
+ await waitForNextUpdate()
+ })
+ expect(result.current.value).toBe('bar')
+
+ unmount()
+ })
+
+ it('select execution db when databases loaded after execution', async () => {
+ const { result, rerender, waitForNextUpdate, unmount } = renderHook(
+ (x: Parameters) => useWrapper(x),
+ { initialProps: [undefined, undefined] },
+ )
+
+ await act(async () => {
+ rerender([Model.Loading, { db: 'bar' }])
+ await waitForNextUpdate()
+ })
+ expect(result.current.value).toBe(Model.Loading)
+
+ await act(async () => {
+ rerender([{ list: ['foo', 'bar'] }, { db: 'bar' }])
+ await waitForNextUpdate()
+ })
+ expect(result.current.value).toBe('bar')
+
+ unmount()
+ })
+
+ it('keep selection when execution has db that doesn’t exist', async () => {
+ const { result, rerender, waitForNextUpdate, unmount } = renderHook(
+ (x: Parameters) => useWrapper(x),
+ { initialProps: [undefined, undefined] },
+ )
+
+ await act(async () => {
+ rerender([{ list: ['foo', 'bar'] }, undefined])
+ await waitForNextUpdate()
+ })
+ expect(result.current.value).toBe('foo')
+
+ await act(async () => {
+ rerender([{ list: ['foo', 'bar'] }, { db: 'baz' }])
+ await waitForNextUpdate()
+ })
+ expect(result.current.value).toBe('foo')
+
+ unmount()
+ })
+
+ it('select null when db doesn’t exist', async () => {
+ const { result, rerender, waitForNextUpdate, unmount } = renderHook(
+ (x: Parameters) => useWrapper(x),
+ { initialProps: [undefined, undefined] },
+ )
+
+ await act(async () => {
+ rerender([{ list: [] }, undefined])
+ await waitForNextUpdate()
+ })
+ expect(result.current.value).toBe(null)
+
+ act(() => {
+ result.current.setValue('baz')
+ })
+ expect(result.current.value).toBe('baz')
+
+ unmount()
+ })
+
+ it('select initial db from local storage', async () => {
+ getStorageKey.mockImplementationOnce(() => 'bar')
+ const { result, rerender, waitForNextUpdate, unmount } = renderHook(
+ (x: Parameters) => useWrapper(x),
+ { initialProps: [undefined, undefined] },
+ )
+
+ await act(async () => {
+ rerender([{ list: ['foo', 'bar'] }, null])
+ await waitForNextUpdate()
+ })
+ expect(result.current.value).toBe('bar')
+
+ unmount()
+ })
+ })
+
+ describe('useWorkgroups', () => {
+ listWorkGroups.mockImplementation(
+ reqThen(() => ({
+ WorkGroups: [{ Name: 'foo' }, { Name: 'bar' }],
+ })),
+ )
+
+ it('return workgroups', async () => {
+ await act(async () => {
+ getWorkGroup.mockImplementation(
+ reqThen(({ WorkGroup: Name }) => ({
+ WorkGroup: {
+ Configuration: {
+ ResultConfiguration: {
+ OutputLocation: 'any',
+ },
+ },
+ State: 'ENABLED',
+ Name,
+ },
+ })),
+ )
+ const { result, unmount, waitFor } = renderHook(() => requests.useWorkgroups())
+ await waitFor(() =>
+ expect(result.current.data).toMatchObject({ list: ['bar', 'foo'] }),
+ )
+ unmount()
+ })
+ })
+
+ it('return only valid workgroups', async () => {
+ await act(async () => {
+ getWorkGroup.mockImplementation(
+ reqThen(({ WorkGroup: Name }) => ({
+ WorkGroup: {
+ Configuration: {
+ ResultConfiguration: {
+ OutputLocation: 'any',
+ },
+ },
+ State: Name === 'foo' ? 'DISABLED' : 'ENABLED',
+ Name,
+ },
+ })),
+ )
+ const { result, unmount, waitFor } = renderHook(() => requests.useWorkgroups())
+ await waitFor(() => expect(result.current.data).toMatchObject({ list: ['bar'] }))
+ unmount()
+ })
+ })
+
+ it('handle invalid workgroup', async () => {
+ await act(async () => {
+ getWorkGroup.mockImplementation(
+ // @ts-expect-error
+ reqThen(() => ({
+ Invalid: 'foo',
+ })),
+ )
+ const { result, unmount, waitFor } = renderHook(() => requests.useWorkgroups())
+ await waitFor(() => typeof result.current.data === 'object')
+ expect(result.current.data).toMatchObject({ list: [] })
+ unmount()
+ })
+ })
+
+ it('handle fail in workgroup', async () => {
+ await act(async () => {
+ getWorkGroup.mockImplementation(reqThrow)
+ const { result, unmount, waitFor } = renderHook(() => requests.useWorkgroups())
+ await waitFor(() => typeof result.current.data === 'object')
+ expect(Log.error).toBeCalledWith(
+ 'Fetching "bar" workgroup failed:',
+ expect.any(Error),
+ )
+ expect(Log.error).toBeCalledWith(
+ 'Fetching "foo" workgroup failed:',
+ expect.any(Error),
+ )
+ expect(result.current.data).toMatchObject({ list: [] })
+ unmount()
+ })
+ })
+
+ it('handle access denied for workgroup list', async () => {
+ await act(async () => {
+ getWorkGroup.mockImplementation(
+ reqThrowWith({
+ code: 'AccessDeniedException',
+ }),
+ )
+ const { result, unmount, waitFor } = renderHook(() => requests.useWorkgroups())
+ await waitFor(() => typeof result.current.data === 'object')
+ expect(Log.info).toBeCalledWith(
+ 'Fetching "bar" workgroup failed: AccessDeniedException',
+ )
+ expect(Log.info).toBeCalledWith(
+ 'Fetching "foo" workgroup failed: AccessDeniedException',
+ )
+ expect(result.current.data).toMatchObject({ list: [] })
+ unmount()
+ })
+ })
+
+ it('handle invalid list', async () => {
+ await act(async () => {
+ listWorkGroups.mockImplementation(
+ // @ts-expect-error
+ reqThen(() => ({
+ Invalid: [{ Name: 'foo' }, { Name: 'bar' }],
+ })),
+ )
+ const { result, unmount, waitFor } = renderHook(() => requests.useWorkgroups())
+ await waitFor(() => typeof result.current.data === 'object')
+ expect(result.current.data).toMatchObject({ list: [] })
+ unmount()
+ })
+ })
+
+ it('handle no data in list', async () => {
+ await act(async () => {
+ listWorkGroups.mockImplementation(
+ // @ts-expect-error
+ reqThen(() => null),
+ )
+ const { result, unmount, waitFor } = renderHook(() => requests.useWorkgroups())
+ await waitFor(() => result.current.data instanceof Error)
+ expect(Log.error).toBeCalledWith(
+ new TypeError(`Cannot read properties of null (reading 'WorkGroups')`),
+ )
+ expect(result.current.data).toBeInstanceOf(TypeError)
+ unmount()
+ })
+ })
+
+ it('handle fail in list', async () => {
+ await act(async () => {
+ listWorkGroups.mockImplementation(reqThrow)
+ const { result, unmount, waitFor } = renderHook(() => requests.useWorkgroups())
+ await waitFor(() => result.current.data instanceof Error)
+ expect(Log.error).toBeCalledWith(expect.any(Error))
+ expect(result.current.data).toBeInstanceOf(Error)
+ unmount()
+ })
+ })
+ })
+
+ describe('useExecutions', () => {
+ listQueryExecutions.mockImplementation(
+ req({
+ QueryExecutionIds: ['foo', 'bar'],
+ }),
+ )
+ it('return results', async () => {
+ batchGetQueryExecution.mockImplementation(
+ req({
+ QueryExecutions: [
+ {
+ QueryExecutionId: '$foo',
+ },
+ {
+ QueryExecutionId: '$bar',
+ },
+ ],
+ UnprocessedQueryExecutionIds: [
+ { QueryExecutionId: '$baz', ErrorMessage: 'fail' },
+ ],
+ }),
+ )
+ await act(async () => {
+ const { result, unmount, waitFor } = renderHook(() =>
+ requests.useExecutions('any'),
+ )
+ await waitFor(() => typeof result.current.data === 'object')
+ expect(result.current.data).toMatchObject({
+ list: [
+ { id: '$foo' },
+ { id: '$bar' },
+ { id: '$baz', error: new Error('fail') },
+ ],
+ })
+ unmount()
+ })
+ })
+ })
+
+ describe('useWaitForQueryExecution', () => {
+ it('return execution', async () => {
+ getQueryExecution.mockImplementation(
+ req({
+ QueryExecution: { QueryExecutionId: '$foo', Status: { State: 'SUCCEEDED' } },
+ }),
+ )
+ await act(async () => {
+ const { result, unmount, waitFor } = renderHook(() =>
+ requests.useWaitForQueryExecution('any'),
+ )
+ await waitFor(() => typeof result.current === 'object')
+ expect(result.current).toMatchObject({
+ id: '$foo',
+ })
+ unmount()
+ })
+ })
+ })
+
+ describe('useQueries', () => {
+ listNamedQueries.mockImplementation(
+ req({
+ NamedQueryIds: ['foo', 'bar'],
+ }),
+ )
+ it('return results', async () => {
+ batchGetNamedQuery.mockImplementation(
+ req({
+ NamedQueries: [
+ {
+ Database: 'any',
+ QueryString: 'SELECT * FROM *',
+ NamedQueryId: '$foo',
+ Name: 'Foo',
+ },
+ {
+ Database: 'any',
+ QueryString: 'SELECT * FROM *',
+ NamedQueryId: '$bar',
+ Name: 'Bar',
+ },
+ ],
+ }),
+ )
+ await act(async () => {
+ const { result, unmount, waitFor } = renderHook(() => requests.useQueries('any'))
+ await waitFor(() => typeof result.current.data === 'object')
+ expect(result.current.data).toMatchObject({
+ list: [
+ { name: 'Bar', key: '$bar', body: 'SELECT * FROM *' },
+ { name: 'Foo', key: '$foo', body: 'SELECT * FROM *' },
+ ],
+ })
+ unmount()
+ })
+ })
+ })
+
+ describe('useResults', () => {
+ it('handle empty results', async () => {
+ getQueryResults.mockImplementation(
+ req({
+ ResultSet: {
+ Rows: [],
+ ResultSetMetadata: {
+ ColumnInfo: [{ Name: 'any', Type: 'some' }],
+ },
+ },
+ }),
+ )
+ await act(async () => {
+ const { result, unmount, waitFor } = renderHook(() =>
+ requests.useResults({ id: 'any' }),
+ )
+ await waitFor(() => typeof result.current.data === 'object')
+ expect(result.current.data).toMatchObject({
+ rows: [],
+ columns: [],
+ })
+ unmount()
+ })
+ })
+
+ it('return results', async () => {
+ getQueryResults.mockImplementation(
+ req({
+ ResultSet: {
+ Rows: [
+ {
+ Data: [{ VarCharValue: 'foo' }, { VarCharValue: 'bar' }],
+ },
+ {
+ Data: [{ VarCharValue: 'bar' }, { VarCharValue: 'baz' }],
+ },
+ ],
+ ResultSetMetadata: {
+ ColumnInfo: [
+ { Name: 'foo', Type: 'some' },
+ { Name: 'bar', Type: 'another' },
+ ],
+ },
+ },
+ }),
+ )
+ await act(async () => {
+ const { result, unmount, waitFor } = renderHook(() =>
+ requests.useResults({ id: 'any' }),
+ )
+ await waitFor(() => typeof result.current.data === 'object')
+ expect(result.current.data).toMatchObject({
+ rows: [['bar', 'baz']],
+ columns: [
+ { name: 'foo', type: 'some' },
+ { name: 'bar', type: 'another' },
+ ],
+ })
+ unmount()
+ })
+ })
+ })
+
+ describe('useQueryRun', () => {
+ it('return execution id', async () => {
+ startQueryExecution.mockImplementation(
+ reqThen(() => ({
+ QueryExecutionId: 'foo',
+ })),
+ )
+ await act(async () => {
+ const { result, unmount, waitForNextUpdate } = renderHook(() =>
+ requests.useQueryRun({
+ workgroup: 'a',
+ catalogName: 'b',
+ database: 'c',
+ queryBody: 'd',
+ }),
+ )
+ await waitForNextUpdate()
+ const run = await result.current[1](false)
+ expect(run).toMatchObject({
+ id: 'foo',
+ })
+ unmount()
+ })
+ })
+
+ it('return error if no execution id', async () => {
+ startQueryExecution.mockImplementation(
+ reqThen(() => ({})),
+ )
+ await act(async () => {
+ const { result, unmount, waitForNextUpdate } = renderHook(() =>
+ requests.useQueryRun({
+ workgroup: 'a',
+ catalogName: 'b',
+ database: 'c',
+ queryBody: 'd',
+ }),
+ )
+ await waitForNextUpdate()
+ const run = await result.current[1](false)
+ expect(run).toBeInstanceOf(Error)
+ expect(Log.error).toBeCalledWith(new Error('No execution id'))
+ if (Model.isError(run)) {
+ expect(run.message).toBe('No execution id')
+ } else {
+ throw new Error('queryRun is not an error')
+ }
+ unmount()
+ })
+ })
+
+ it('handle fail in request', async () => {
+ startQueryExecution.mockImplementation(reqThrow)
+ await act(async () => {
+ const { result, unmount, waitForNextUpdate } = renderHook(() =>
+ requests.useQueryRun({
+ workgroup: 'a',
+ catalogName: 'b',
+ database: 'c',
+ queryBody: 'd',
+ }),
+ )
+ await waitForNextUpdate()
+ const run = await result.current[1](false)
+ expect(run).toBeInstanceOf(Error)
+ unmount()
+ })
+ })
+ })
+
+ describe('useWorkgroup', () => {
+ function useWrapper(props: Parameters) {
+ return requests.useWorkgroup(...props)
+ }
+
+ it('select requested workgroup if it exists', async () => {
+ await act(async () => {
+ const workgroups = {
+ data: { list: ['foo', 'bar'] },
+ loadMore: jest.fn(),
+ }
+ const { result, waitFor } = renderHook(() =>
+ useWrapper([workgroups, 'bar', undefined]),
+ )
+ await waitFor(() => typeof result.current.data === 'string')
+ expect(result.current.data).toBe('bar')
+ })
+ })
+
+ it('select initial workgroup from storage if valid', async () => {
+ const storageMock = getStorageKey.getMockImplementation()
+ getStorageKey.mockImplementation(() => 'bar')
+ const workgroups = {
+ data: { list: ['foo', 'bar'] },
+ loadMore: jest.fn(),
+ }
+
+ const { result, waitFor, unmount } = renderHook(() =>
+ useWrapper([workgroups, undefined, undefined]),
+ )
+
+ await act(async () => {
+ await waitFor(() => typeof result.current.data === 'string')
+ expect(result.current.data).toBe('bar')
+ })
+ getStorageKey.mockImplementation(storageMock)
+ unmount()
+ })
+
+ it('select default workgroup from preferences if valid', async () => {
+ const workgroups = {
+ data: { list: ['foo', 'bar'] },
+ loadMore: jest.fn(),
+ }
+ const preferences = { defaultWorkgroup: 'bar' }
+
+ const { result, waitFor, unmount } = renderHook(() =>
+ useWrapper([workgroups, undefined, preferences]),
+ )
+
+ await act(async () => {
+ await waitFor(() => typeof result.current.data === 'string')
+ expect(result.current.data).toBe('bar')
+ })
+ unmount()
+ })
+
+ it('select the first available workgroup if no requested or default', async () => {
+ await act(async () => {
+ const workgroups = {
+ data: { list: ['foo', 'bar', 'baz'] },
+ loadMore: jest.fn(),
+ }
+
+ const { result, waitFor } = renderHook(() =>
+ useWrapper([workgroups, undefined, undefined]),
+ )
+
+ await waitFor(() => typeof result.current.data === 'string')
+ expect(result.current.data).toBe('foo')
+ })
+ })
+
+ it('return error if no workgroups are available', async () => {
+ await act(async () => {
+ const workgroups = {
+ data: { list: [] },
+ loadMore: jest.fn(),
+ }
+
+ const { result, waitFor } = renderHook(() =>
+ useWrapper([workgroups, undefined, undefined]),
+ )
+
+ await waitFor(() => result.current.data instanceof Error)
+ if (Model.isError(result.current.data)) {
+ expect(result.current.data.message).toBe('Workgroup not found')
+ } else {
+ throw new Error('Not an error')
+ }
+ })
+ })
+
+ it('wait for workgroups', async () => {
+ const workgroups = {
+ data: undefined,
+ loadMore: jest.fn(),
+ }
+
+ const { result, rerender, unmount, waitForNextUpdate } = renderHook(
+ (x: Parameters) => useWrapper(x),
+ { initialProps: [workgroups, undefined, undefined] },
+ )
+ expect(result.current.data).toBeUndefined()
+
+ await act(async () => {
+ rerender()
+ await waitForNextUpdate()
+ })
+ expect(result.current.data).toBeUndefined()
+ unmount()
+ })
+ })
+
+ describe('useQuery', () => {
+ function useWrapper(props: Parameters) {
+ return requests.useQuery(...props)
+ }
+
+ it('sets query to the one matching the execution query', () => {
+ const queries = {
+ list: [
+ { key: 'foo', name: 'Foo', body: 'SELECT * FROM foo' },
+ { key: 'bar', name: 'Bar', body: 'SELECT * FROM bar' },
+ ],
+ }
+ const execution = { query: 'SELECT * FROM bar' }
+ const { result } = renderHook(() => useWrapper([queries, execution]))
+
+ if (Model.hasData(result.current.value)) {
+ expect(result.current.value.body).toBe('SELECT * FROM bar')
+ } else {
+ throw new Error('No data')
+ }
+ })
+
+ it('unsets query if no matching execution query', () => {
+ const queries = {
+ list: [
+ { key: 'foo', name: 'Foo', body: 'SELECT * FROM foo' },
+ { key: 'bar', name: 'Bar', body: 'SELECT * FROM bar' },
+ ],
+ }
+ const execution = { query: 'SELECT * FROM baz' }
+ const { result } = renderHook(() => useWrapper([queries, execution]))
+
+ if (Model.hasValue(result.current.value)) {
+ expect(result.current.value).toBe(null)
+ } else {
+ throw new Error('No data')
+ }
+ })
+
+ it('sets query to the first one if no execution query is set', () => {
+ const queries = {
+ list: [
+ { key: 'foo', name: 'Foo', body: 'SELECT * FROM foo' },
+ { key: 'bar', name: 'Bar', body: 'SELECT * FROM bar' },
+ ],
+ }
+ const execution = {}
+ const { result } = renderHook(() => useWrapper([queries, execution]))
+
+ if (Model.hasData(result.current.value)) {
+ expect(result.current.value.body).toBe('SELECT * FROM foo')
+ } else {
+ throw new Error('No data')
+ }
+ })
+
+ it('sets query to null if no queries are available', () => {
+ const queries = { list: [] }
+ const execution = {}
+ const { result } = renderHook(() => useWrapper([queries, execution]))
+
+ if (Model.hasValue(result.current.value)) {
+ expect(result.current.value).toBeNull()
+ } else {
+ throw new Error('No data')
+ }
+ })
+
+ it('does not change query if a valid query is already selected', async () => {
+ const queries = {
+ list: [
+ { key: 'foo', name: 'Foo', body: 'SELECT * FROM foo' },
+ { key: 'bar', name: 'Bar', body: 'SELECT * FROM bar' },
+ ],
+ }
+ const execution = {
+ query: 'SELECT * FROM bar',
+ }
+ const { result, rerender, waitForNextUpdate } = renderHook(
+ (props: Parameters) => useWrapper(props),
+ {
+ initialProps: [queries, execution],
+ },
+ )
+
+ if (Model.hasData(result.current.value)) {
+ expect(result.current.value.body).toBe('SELECT * FROM bar')
+ } else {
+ throw new Error('No data')
+ }
+ await act(async () => {
+ rerender([
+ {
+ list: [
+ { key: 'baz', name: 'Baz', body: 'SELECT * FROM baz' },
+ { key: 'foo', name: 'Foo', body: 'SELECT * FROM foo' },
+ { key: 'bar', name: 'Bar', body: 'SELECT * FROM bar' },
+ ],
+ },
+ execution,
+ ])
+ await waitForNextUpdate()
+ })
+ if (Model.hasData(result.current.value)) {
+ expect(result.current.value.body).toBe('SELECT * FROM bar')
+ } else {
+ throw new Error('No data')
+ }
+ })
+ })
+
+ describe('useQueryBody', () => {
+ function useWrapper(props: Parameters) {
+ return requests.useQueryBody(...props)
+ }
+
+ it('sets query body from query if query is ready', () => {
+ const query = { name: 'Foo', key: 'foo', body: 'SELECT * FROM foo' }
+ const execution = {}
+ const setQuery = jest.fn()
+
+ const { result } = renderHook(() => useWrapper([query, setQuery, execution]))
+
+ if (Model.hasData(result.current.value)) {
+ expect(result.current.value).toBe('SELECT * FROM foo')
+ } else {
+ throw new Error('No data')
+ }
+ })
+
+ it('sets query body from execution if query is not ready', () => {
+ const query = null
+ const execution = { query: 'SELECT * FROM bar' }
+ const setQuery = jest.fn()
+
+ const { result } = renderHook(() => useWrapper([query, setQuery, execution]))
+
+ if (Model.hasData(result.current.value)) {
+ expect(result.current.value).toBe('SELECT * FROM bar')
+ } else {
+ throw new Error('No data')
+ }
+ })
+
+ it('sets query body to null if query is an error', () => {
+ const query = new Error('Query failed')
+ const execution = {}
+ const setQuery = jest.fn()
+
+ const { result } = renderHook(() => useWrapper([query, setQuery, execution]))
+
+ if (Model.hasValue(result.current.value)) {
+ expect(result.current.value).toBeNull()
+ } else {
+ throw new Error('Unexpected state')
+ }
+ })
+
+ it('does not change value if query and execution are both not ready', async () => {
+ const query = null
+ const execution = null
+ const setQuery = jest.fn()
+
+ const { result, rerender, waitForNextUpdate } = renderHook(
+ (x: Parameters) => useWrapper(x),
+ {
+ initialProps: [query, setQuery, execution],
+ },
+ )
+
+ expect(result.current.value).toBeUndefined()
+ act(() => {
+ result.current.setValue('foo')
+ })
+ expect(result.current.value).toBe('foo')
+
+ await act(async () => {
+ rerender([query, setQuery, execution])
+ await waitForNextUpdate()
+ })
+ expect(result.current.value).toBe('foo')
+ })
+
+ it('updates query body and resets query when handleValue is called', async () => {
+ const query = { name: 'Foo', key: 'foo', body: 'SELECT * FROM foo' }
+ const execution = {}
+ const setQuery = jest.fn()
+
+ const { result } = renderHook(() => useWrapper([query, setQuery, execution]))
+
+ act(() => {
+ result.current.setValue('SELECT * FROM bar')
+ })
+
+ expect(result.current.value).toBe('SELECT * FROM bar')
+ expect(setQuery).toHaveBeenCalledWith(null)
+ })
+
+ it('retains value when execution and query are initially empty but later updates', async () => {
+ const initialQuery = null
+ const initialExecution = null
+ const setQuery = jest.fn()
+
+ const { result, rerender, waitForNextUpdate } = renderHook(
+ (props: Parameters) => useWrapper(props),
+ {
+ initialProps: [initialQuery, setQuery, initialExecution],
+ },
+ )
+
+ expect(result.current.value).toBeUndefined()
+
+ await act(async () => {
+ rerender([
+ { key: 'up', name: 'Updated', body: 'SELECT * FROM updated' },
+ setQuery,
+ initialExecution,
+ ])
+ await waitForNextUpdate()
+ })
+
+ if (Model.hasData(result.current.value)) {
+ expect(result.current.value).toBe('SELECT * FROM updated')
+ } else {
+ throw new Error('No data')
+ }
+ })
+ })
+})
diff --git a/catalog/app/containers/Bucket/Queries/Athena/model/requests.ts b/catalog/app/containers/Bucket/Queries/Athena/model/requests.ts
new file mode 100644
index 00000000000..227e6ecbde2
--- /dev/null
+++ b/catalog/app/containers/Bucket/Queries/Athena/model/requests.ts
@@ -0,0 +1,754 @@
+import type Athena from 'aws-sdk/clients/athena'
+import * as React from 'react'
+import * as Sentry from '@sentry/react'
+
+import * as AWS from 'utils/AWS'
+import * as BucketPreferences from 'utils/BucketPreferences'
+import Log from 'utils/Logging'
+
+import * as storage from './storage'
+import * as Model from './utils'
+
+export interface Query {
+ // TODO: database?
+ body: string
+ description?: string
+ key: string
+ name: string
+}
+
+function parseNamedQuery(query: Athena.NamedQuery): Query {
+ // TODO: database: query.Database!
+ return {
+ body: query.QueryString,
+ description: query.Description,
+ key: query.NamedQueryId!,
+ name: query.Name,
+ }
+}
+
+function listIncludes(list: string[], value: string): boolean {
+ return list.map((x) => x.toLowerCase()).includes(value.toLowerCase())
+}
+
+export type Workgroup = string
+
+interface WorkgroupArgs {
+ athena: Athena
+ workgroup: Workgroup
+}
+
+async function fetchWorkgroup({
+ athena,
+ workgroup,
+}: WorkgroupArgs): Promise {
+ try {
+ const workgroupOutput = await athena.getWorkGroup({ WorkGroup: workgroup }).promise()
+ if (
+ workgroupOutput?.WorkGroup?.Configuration?.ResultConfiguration?.OutputLocation &&
+ workgroupOutput?.WorkGroup?.State === 'ENABLED' &&
+ workgroupOutput?.WorkGroup?.Name
+ ) {
+ return workgroupOutput.WorkGroup.Name
+ }
+ return null
+ } catch (error) {
+ if ((error as $TSFixMe).code === 'AccessDeniedException') {
+ Log.info(`Fetching "${workgroup}" workgroup failed: ${(error as $TSFixMe).code}`)
+ } else {
+ Log.error(`Fetching "${workgroup}" workgroup failed:`, error)
+ }
+ return null
+ }
+}
+
+async function fetchWorkgroups(
+ athena: Athena,
+ prev: Model.List | null,
+): Promise> {
+ try {
+ const workgroupsOutput = await athena
+ .listWorkGroups({ NextToken: prev?.next })
+ .promise()
+ const parsed = (workgroupsOutput.WorkGroups || [])
+ .map(({ Name }) => Name || '')
+ .filter(Boolean)
+ .sort()
+ const available = (
+ await Promise.all(parsed.map((workgroup) => fetchWorkgroup({ athena, workgroup })))
+ ).filter(Boolean)
+ const list = (prev?.list || []).concat(available as Workgroup[])
+ return {
+ list,
+ next: workgroupsOutput.NextToken,
+ }
+ } catch (e) {
+ Log.error(e)
+ throw e
+ }
+}
+
+export function useWorkgroups(): Model.DataController> {
+ const athena = AWS.Athena.use()
+ const [prev, setPrev] = React.useState | null>(null)
+ const [data, setData] = React.useState>>()
+ React.useEffect(() => {
+ let mounted = true
+ if (!athena) return
+ fetchWorkgroups(athena, prev)
+ .then((d) => mounted && setData(d))
+ .catch((d) => mounted && setData(d))
+ return () => {
+ mounted = false
+ }
+ }, [athena, prev])
+ return React.useMemo(() => Model.wrapData(data, setPrev), [data])
+}
+
+export function useWorkgroup(
+ workgroups: Model.DataController>,
+ requestedWorkgroup?: Workgroup,
+ preferences?: BucketPreferences.AthenaPreferences,
+): Model.DataController {
+ const [data, setData] = React.useState>()
+ React.useEffect(() => {
+ if (!Model.hasData(workgroups.data)) return
+ setData((d) => {
+ if (!Model.hasData(workgroups.data)) return d
+ if (requestedWorkgroup && listIncludes(workgroups.data.list, requestedWorkgroup)) {
+ return requestedWorkgroup
+ }
+ const initialWorkgroup = storage.getWorkgroup() || preferences?.defaultWorkgroup
+ if (initialWorkgroup && listIncludes(workgroups.data.list, initialWorkgroup)) {
+ return initialWorkgroup
+ }
+ return workgroups.data.list[0] || new Error('Workgroup not found')
+ })
+ }, [preferences, requestedWorkgroup, workgroups])
+ return React.useMemo(
+ () => Model.wrapData(data, workgroups.loadMore),
+ [data, workgroups.loadMore],
+ )
+}
+
+export interface QueryExecution {
+ catalog?: string
+ completed?: Date
+ created?: Date
+ db?: string
+ id?: string
+ outputBucket?: string
+ query?: string
+ status?: string // 'QUEUED' | 'RUNNING' | 'SUCCEEDED' | 'FAILED' | 'CANCELLED'
+ workgroup?: Athena.WorkGroupName
+}
+
+export interface QueryExecutionFailed {
+ id?: string
+ error: Error
+}
+
+function parseQueryExecution(queryExecution: Athena.QueryExecution): QueryExecution {
+ return {
+ catalog: queryExecution?.QueryExecutionContext?.Catalog,
+ completed: queryExecution?.Status?.CompletionDateTime,
+ created: queryExecution?.Status?.SubmissionDateTime,
+ db: queryExecution?.QueryExecutionContext?.Database,
+ id: queryExecution?.QueryExecutionId,
+ outputBucket: queryExecution?.ResultConfiguration?.OutputLocation,
+ query: queryExecution?.Query,
+ status: queryExecution?.Status?.State,
+ workgroup: queryExecution?.WorkGroup,
+ }
+}
+
+function parseQueryExecutionError(
+ error: Athena.UnprocessedQueryExecutionId,
+): QueryExecutionFailed {
+ return {
+ error: new Error(error?.ErrorMessage || 'Unknown'),
+ id: error?.QueryExecutionId,
+ }
+}
+
+export type QueryExecutionsItem = QueryExecution | QueryExecutionFailed
+
+export function useExecutions(
+ workgroup: Model.Data,
+ queryExecutionId?: string,
+): Model.DataController> {
+ const athena = AWS.Athena.use()
+ const [prev, setPrev] = React.useState | null>(null)
+ const [data, setData] = React.useState>>()
+
+ React.useEffect(() => {
+ if (queryExecutionId) return
+ if (!Model.hasValue(workgroup)) {
+ setData(workgroup)
+ return
+ }
+ setData(Model.Loading)
+ let batchRequest: ReturnType['batchGetQueryExecution']>
+
+ const request = athena?.listQueryExecutions(
+ { WorkGroup: workgroup, NextToken: prev?.next },
+ (error, d) => {
+ const { QueryExecutionIds, NextToken: next } = d || {}
+ if (error) {
+ Sentry.captureException(error)
+ setData(error)
+ return
+ }
+ if (!QueryExecutionIds || !QueryExecutionIds.length) {
+ setData({
+ list: [],
+ next,
+ })
+ return
+ }
+ batchRequest = athena?.batchGetQueryExecution(
+ { QueryExecutionIds },
+ (batchErr, batchData) => {
+ const { QueryExecutions, UnprocessedQueryExecutionIds } = batchData || {}
+ if (batchErr) {
+ Sentry.captureException(batchErr)
+ setData(batchErr)
+ return
+ }
+ const parsed = (QueryExecutions || [])
+ .map(parseQueryExecution)
+ .concat((UnprocessedQueryExecutionIds || []).map(parseQueryExecutionError))
+ const list = (prev?.list || []).concat(parsed)
+ setData({
+ list,
+ next,
+ })
+ },
+ )
+ },
+ )
+ return () => {
+ request?.abort()
+ batchRequest?.abort()
+ }
+ }, [athena, workgroup, prev, queryExecutionId])
+ return React.useMemo(() => Model.wrapData(data, setPrev), [data])
+}
+
+function useFetchQueryExecution(
+ QueryExecutionId?: string,
+): [Model.Value, () => void] {
+ const athena = AWS.Athena.use()
+ const [data, setData] = React.useState>(
+ QueryExecutionId ? undefined : null,
+ )
+ const [counter, setCounter] = React.useState(0)
+ React.useEffect(() => {
+ if (!QueryExecutionId) {
+ setData(null)
+ return
+ }
+ setData(Model.Loading)
+ const request = athena?.getQueryExecution({ QueryExecutionId }, (error, d) => {
+ const { QueryExecution } = d || {}
+ if (error) {
+ Sentry.captureException(error)
+ setData(error)
+ return
+ }
+ const status = QueryExecution?.Status?.State
+ const parsed = QueryExecution
+ ? parseQueryExecution(QueryExecution)
+ : { id: QueryExecutionId }
+ switch (status) {
+ case 'FAILED':
+ case 'CANCELLED': {
+ const reason = QueryExecution?.Status?.StateChangeReason || ''
+ setData(new Error(`${status}: ${reason}`))
+ break
+ }
+ case 'SUCCEEDED':
+ setData(parsed)
+ break
+ case 'QUEUED':
+ case 'RUNNING':
+ break
+ default:
+ setData(new Error('Unknown query execution status'))
+ break
+ }
+ })
+ return () => request?.abort()
+ }, [athena, QueryExecutionId, counter])
+ const fetch = React.useCallback(() => setCounter((prev) => prev + 1), [])
+ return [data, fetch]
+}
+
+export function useWaitForQueryExecution(
+ queryExecutionId?: string,
+): Model.Value {
+ const [data, fetch] = useFetchQueryExecution(queryExecutionId)
+ const [timer, setTimer] = React.useState(null)
+ React.useEffect(() => {
+ const t = setInterval(fetch, 1000)
+ setTimer(t)
+ return () => clearInterval(t)
+ }, [queryExecutionId, fetch])
+ React.useEffect(() => {
+ if (Model.isReady(data) && timer) {
+ clearInterval(timer)
+ }
+ }, [timer, data])
+ return data
+}
+
+export type QueryResultsValue = Athena.datumString
+
+interface QueryResultsColumnInfo {
+ name: T
+ type: Athena.String
+}
+
+export type QueryResultsColumns = QueryResultsColumnInfo[]
+type Row = QueryResultsValue[]
+export type QueryResultsRows = Row[]
+
+export interface QueryResults {
+ columns: QueryResultsColumns
+ next?: string
+ rows: QueryResultsRows
+}
+
+export type ManifestKey =
+ | 'hash'
+ | 'logical_key'
+ | 'meta'
+ | 'physical_key'
+ | 'physical_keys'
+ | 'size'
+
+export interface QueryManifests extends QueryResults {
+ columns: QueryResultsColumns
+}
+
+const emptyRow: Row = []
+const emptyList: QueryResultsRows = []
+const emptyColumns: QueryResultsColumns = []
+
+export interface QueryRun {
+ id: string
+}
+
+export type CatalogName = string
+
+export type Database = string
+
+export type QueryId = string
+export interface QueriesIdsResponse {
+ list: QueryId[]
+ next?: string
+}
+
+export function useQueries(
+ workgroup: Model.Data,
+): Model.DataController> {
+ const athena = AWS.Athena.use()
+ const [prev, setPrev] = React.useState | null>(null)
+ const [data, setData] = React.useState>>()
+ React.useEffect(() => {
+ if (!Model.hasValue(workgroup)) {
+ setData(workgroup)
+ return
+ }
+ setData(Model.Loading)
+
+ let batchRequest: ReturnType['batchGetNamedQuery']>
+ const request = athena?.listNamedQueries(
+ {
+ WorkGroup: workgroup,
+ NextToken: prev?.next,
+ },
+ async (error, d) => {
+ const { NamedQueryIds, NextToken: next } = d || {}
+ if (error) {
+ Sentry.captureException(error)
+ setData(error)
+ return
+ }
+ if (!NamedQueryIds || !NamedQueryIds.length) {
+ setData({
+ list: prev?.list || [],
+ next,
+ })
+ return
+ }
+ batchRequest = athena?.batchGetNamedQuery(
+ { NamedQueryIds },
+ (batchErr, batchData) => {
+ const { NamedQueries } = batchData || {}
+ if (batchErr) {
+ Sentry.captureException(batchErr)
+ setData(batchErr)
+ return
+ }
+ const parsed = (NamedQueries || [])
+ .map(parseNamedQuery)
+ .sort((a, b) => a.name.localeCompare(b.name))
+ const list = (prev?.list || []).concat(parsed)
+ setData({
+ list,
+ next,
+ })
+ },
+ )
+ },
+ )
+ return () => {
+ request?.abort()
+ batchRequest?.abort()
+ }
+ }, [athena, workgroup, prev])
+ return React.useMemo(() => Model.wrapData(data, setPrev), [data])
+}
+
+export function useResults(
+ execution: Model.Value,
+): Model.DataController {
+ const athena = AWS.Athena.use()
+ const [prev, setPrev] = React.useState(null)
+ const [data, setData] = React.useState>()
+
+ React.useEffect(() => {
+ if (execution === null) {
+ setData(undefined)
+ return
+ }
+ if (!Model.hasValue(execution)) {
+ setData(execution)
+ return
+ }
+ if (!execution.id) {
+ setData(new Error('Query execution has no ID'))
+ return
+ }
+
+ const request = athena?.getQueryResults(
+ { QueryExecutionId: execution.id, NextToken: prev?.next },
+ (error, d) => {
+ const { ResultSet, NextToken: next } = d || {}
+ if (error) {
+ Sentry.captureException(error)
+ setData(error)
+ return
+ }
+ const parsed =
+ ResultSet?.Rows?.map(
+ (row) => row?.Data?.map((item) => item?.VarCharValue || '') || emptyRow,
+ ) || emptyList
+ const rows = [...(prev?.rows || emptyList), ...parsed]
+ if (!rows.length) {
+ setData({
+ rows: [],
+ columns: [],
+ next,
+ })
+ return
+ }
+ const columns =
+ ResultSet?.ResultSetMetadata?.ColumnInfo?.map(({ Name, Type }) => ({
+ name: Name,
+ type: Type,
+ })) || emptyColumns
+ const isHeadColumns = columns.every(({ name }, index) => name === rows[0][index])
+ setData({
+ rows: isHeadColumns ? rows.slice(1) : rows,
+ columns,
+ next,
+ })
+ },
+ )
+ return () => request?.abort()
+ }, [athena, execution, prev])
+ return React.useMemo(() => Model.wrapData(data, setPrev), [data])
+}
+
+export function useDatabases(
+ catalogName: Model.Value,
+): Model.DataController> {
+ const athena = AWS.Athena.use()
+ const [prev, setPrev] = React.useState | null>(null)
+ const [data, setData] = React.useState>>()
+ React.useEffect(() => {
+ if (!Model.hasData(catalogName)) {
+ setData(catalogName || undefined)
+ return
+ }
+ setData(Model.Loading)
+ const request = athena?.listDatabases(
+ {
+ CatalogName: catalogName,
+ NextToken: prev?.next,
+ },
+ (error, d) => {
+ const { DatabaseList, NextToken: next } = d || {}
+ if (error) {
+ Sentry.captureException(error)
+ setData(error)
+ return
+ }
+ const list = DatabaseList?.map(({ Name }) => Name || 'Unknown').sort() || []
+ setData({ list: (prev?.list || []).concat(list), next })
+ },
+ )
+ return () => request?.abort()
+ }, [athena, catalogName, prev])
+ return React.useMemo(() => Model.wrapData(data, setPrev), [data])
+}
+
+export function useDatabase(
+ databases: Model.Data>,
+ execution: Model.Value,
+): Model.ValueController {
+ const [value, setValue] = React.useState>()
+ React.useEffect(() => {
+ if (!Model.hasData(databases)) {
+ setValue(databases)
+ return
+ }
+ setValue((v) => {
+ if (
+ Model.hasData(execution) &&
+ execution.db &&
+ listIncludes(databases.list, execution.db)
+ ) {
+ return execution.db
+ }
+ if (Model.hasData(v) && listIncludes(databases.list, v)) {
+ return v
+ }
+ const initialDatabase = storage.getDatabase()
+ if (initialDatabase && listIncludes(databases.list, initialDatabase)) {
+ return initialDatabase
+ }
+ return databases.list[0] || null
+ })
+ }, [databases, execution])
+ return React.useMemo(() => Model.wrapValue(value, setValue), [value])
+}
+
+export function useCatalogNames(): Model.DataController> {
+ const athena = AWS.Athena.use()
+ const [prev, setPrev] = React.useState | null>(null)
+ const [data, setData] = React.useState>>()
+ React.useEffect(() => {
+ const request = athena?.listDataCatalogs({ NextToken: prev?.next }, (error, d) => {
+ const { DataCatalogsSummary, NextToken: next } = d || {}
+ setData(Model.Loading)
+ if (error) {
+ Sentry.captureException(error)
+ setData(error)
+ return
+ }
+ const list = DataCatalogsSummary?.map(({ CatalogName }) => CatalogName || 'Unknown')
+ setData({
+ list: (prev?.list || []).concat(list || []),
+ next,
+ })
+ })
+ return () => request?.abort()
+ }, [athena, prev])
+ return React.useMemo(() => Model.wrapData(data, setPrev), [data])
+}
+
+export function useCatalogName(
+ catalogNames: Model.Data>,
+ execution: Model.Value,
+): Model.ValueController {
+ const [value, setValue] = React.useState>()
+ React.useEffect(() => {
+ if (!Model.hasData(catalogNames)) {
+ setValue(catalogNames)
+ return
+ }
+ setValue((v) => {
+ if (
+ Model.hasData(execution) &&
+ execution.catalog &&
+ listIncludes(catalogNames.list, execution.catalog)
+ ) {
+ return execution.catalog
+ }
+ if (Model.hasData(v) && listIncludes(catalogNames.list, v)) {
+ return v
+ }
+ const initialCatalogName = storage.getCatalog()
+ if (initialCatalogName && listIncludes(catalogNames.list, initialCatalogName)) {
+ return initialCatalogName
+ }
+ return catalogNames.list[0] || null
+ })
+ }, [catalogNames, execution])
+ return React.useMemo(() => Model.wrapValue(value, setValue), [value])
+}
+
+export function useQuery(
+ queries: Model.Data>,
+ execution: Model.Value,
+): Model.ValueController {
+ const [value, setValue] = React.useState>()
+ React.useEffect(() => {
+ if (!Model.hasData(queries)) {
+ setValue(queries)
+ return
+ }
+ setValue((v) => {
+ if (Model.hasData(execution) && execution.query) {
+ const executionQuery = queries.list.find((q) => execution.query === q.body)
+ return executionQuery || null
+ }
+ if (Model.hasData(v) && queries.list.includes(v)) {
+ return v
+ }
+ return queries.list[0] || null
+ })
+ }, [execution, queries])
+ return React.useMemo(() => Model.wrapValue(value, setValue), [value])
+}
+
+export function useQueryBody(
+ query: Model.Value,
+ setQuery: (value: null) => void,
+ execution: Model.Value,
+): Model.ValueController {
+ const [value, setValue] = React.useState>()
+ React.useEffect(() => {
+ if (!Model.isReady(query)) {
+ setValue(query)
+ return
+ }
+ setValue((v) => {
+ if (Model.isError(query)) return null
+ if (Model.hasData(query)) return query.body
+ if (Model.hasData(execution) && execution.query) return execution.query
+ return v
+ })
+ }, [execution, query])
+ const handleValue = React.useCallback(
+ (v: string | null) => {
+ setQuery(null)
+ setValue(v)
+ },
+ [setQuery],
+ )
+ return React.useMemo(() => Model.wrapValue(value, handleValue), [value, handleValue])
+}
+
+export interface ExecutionContext {
+ catalogName: CatalogName
+ database: Database
+}
+
+export const NO_DATABASE = new Error('No database')
+
+interface QueryRunArgs {
+ workgroup: Model.Data
+ catalogName: Model.Value
+ database: Model.Value
+ queryBody: Model.Value
+}
+
+export function useQueryRun({
+ workgroup,
+ catalogName,
+ database,
+ queryBody,
+}: QueryRunArgs): [
+ Model.Value,
+ (force: boolean) => Promise>,
+] {
+ const athena = AWS.Athena.use()
+ // `undefined` = "is not initialized" → is not ready for run
+ // `null` = is ready but not set, because not submitted for new run
+ const [value, setValue] = React.useState>()
+ const prepare = React.useCallback(
+ (forceDefaultExecutionContext?: boolean) => {
+ if (!Model.hasData(workgroup)) {
+ return new Error('No workgroup')
+ }
+
+ if (!Model.hasValue(catalogName)) {
+ return catalogName
+ }
+
+ if (!Model.hasValue(database)) {
+ return database
+ }
+ if (!database && !forceDefaultExecutionContext) {
+ // We only check if database is selected,
+ // because if catalogName is not selected, no databases loaded and no database selected as well
+ return NO_DATABASE
+ }
+
+ if (!Model.hasData(queryBody)) {
+ return queryBody
+ }
+ return { workgroup, catalogName, database, queryBody }
+ },
+ [workgroup, catalogName, database, queryBody],
+ )
+ React.useEffect(() => {
+ const init = prepare(true)
+ setValue(Model.hasData(init) ? null : undefined)
+ }, [prepare])
+ const run = React.useCallback(
+ async (forceDefaultExecutionContext: boolean) => {
+ const init = prepare(forceDefaultExecutionContext)
+ if (!Model.hasData(init)) {
+ // Error shouldn't be here, because we already checked for errors
+ // Except `NO_DATABASE`, and if there is some mistake in code
+ setValue(init)
+ return init
+ }
+
+ const options: Athena.Types.StartQueryExecutionInput = {
+ QueryString: init.queryBody,
+ ResultConfiguration: {
+ EncryptionConfiguration: {
+ EncryptionOption: 'SSE_S3',
+ },
+ },
+ WorkGroup: init.workgroup,
+ }
+ if (init.catalogName && init.database) {
+ options.QueryExecutionContext = {
+ Catalog: init.catalogName,
+ Database: init.database,
+ }
+ }
+ setValue(Model.Loading)
+ try {
+ const d = await athena?.startQueryExecution(options).promise()
+ const { QueryExecutionId } = d || {}
+ if (!QueryExecutionId) {
+ const error = new Error('No execution id')
+ Log.error(error)
+ setValue(error)
+ return error
+ }
+ const output = { id: QueryExecutionId }
+ setValue(output)
+ return output
+ } catch (error) {
+ if (error) {
+ Log.error(error)
+ if (error instanceof Error) {
+ setValue(error)
+ }
+ }
+ return error as Error
+ }
+ },
+ [athena, prepare],
+ )
+ return [value, run]
+}
diff --git a/catalog/app/containers/Bucket/Queries/Athena/model/state.spec.tsx b/catalog/app/containers/Bucket/Queries/Athena/model/state.spec.tsx
new file mode 100644
index 00000000000..27bfad46eb6
--- /dev/null
+++ b/catalog/app/containers/Bucket/Queries/Athena/model/state.spec.tsx
@@ -0,0 +1,110 @@
+import * as React from 'react'
+import renderer from 'react-test-renderer'
+import { act, renderHook } from '@testing-library/react-hooks'
+
+import * as Model from './'
+
+jest.mock('utils/NamedRoutes', () => ({
+ ...jest.requireActual('utils/NamedRoutes'),
+ use: jest.fn(() => ({
+ urls: {
+ bucketAthenaExecution: () => 'bucket-route',
+ bucketAthenaWorkgroup: () => 'workgroup-route',
+ },
+ })),
+}))
+
+const useParams = jest.fn(
+ () =>
+ ({
+ bucket: 'b',
+ workgroup: 'w',
+ }) as Record,
+)
+
+jest.mock('react-router-dom', () => ({
+ ...jest.requireActual('react-router-dom'),
+ useParams: jest.fn(() => useParams()),
+ Redirect: jest.fn(() => null),
+}))
+
+const batchGetQueryExecution = jest.fn()
+const getWorkGroup = jest.fn()
+const listDataCatalogs = jest.fn()
+const listDatabases = jest.fn()
+const listQueryExecutions = jest.fn()
+const listWorkGroups = jest.fn()
+const getQueryExecution = jest.fn()
+const listNamedQueries = jest.fn()
+const batchGetNamedQuery = jest.fn()
+const getQueryResults = jest.fn()
+const startQueryExecution = jest.fn()
+
+const AthenaApi = {
+ batchGetNamedQuery,
+ batchGetQueryExecution,
+ getQueryExecution,
+ getQueryResults,
+ getWorkGroup,
+ listDataCatalogs,
+ listDatabases,
+ listNamedQueries,
+ listQueryExecutions,
+ listWorkGroups,
+ startQueryExecution,
+}
+
+jest.mock('utils/AWS', () => ({ Athena: { use: () => AthenaApi } }))
+
+describe('app/containers/Queries/Athena/model/state', () => {
+ it('throw error when no bucket', () => {
+ jest.spyOn(console, 'error').mockImplementationOnce(jest.fn())
+ useParams.mockImplementationOnce(() => ({}))
+ const Component = () => {
+ const state = Model.useState()
+ return <>{JSON.stringify(state, null, 2)}>
+ }
+ const tree = () =>
+ renderer.create(
+
+
+ ,
+ )
+ expect(tree).toThrowError('`bucket` must be defined')
+ })
+
+ it('load workgroups and set current workgroup', async () => {
+ listWorkGroups.mockImplementation(() => ({
+ promise: () =>
+ Promise.resolve({
+ WorkGroups: [{ Name: 'foo' }, { Name: 'bar' }, { Name: 'w' }],
+ }),
+ }))
+ getWorkGroup.mockImplementation(({ WorkGroup: Name }: { WorkGroup: string }) => ({
+ promise: () =>
+ Promise.resolve({
+ WorkGroup: {
+ Configuration: { ResultConfiguration: { OutputLocation: 'any' } },
+ State: 'ENABLED',
+ Name,
+ },
+ }),
+ }))
+ listQueryExecutions.mockImplementation((_x, cb) => {
+ cb(undefined, { QueryExecutionIds: [] })
+ return {
+ abort: jest.fn(),
+ }
+ })
+ const wrapper = ({ children }: { children: React.ReactNode }) => (
+ {children}
+ )
+ const { result, waitFor, unmount } = renderHook(() => Model.useState(), { wrapper })
+ await act(async () => {
+ await waitFor(() => typeof result.current.executions.data === 'object')
+ })
+ expect(result.current.workgroups.data).toMatchObject({ list: ['bar', 'foo', 'w'] })
+ expect(result.current.workgroup.data).toBe('w')
+ unmount()
+ })
+})
diff --git a/catalog/app/containers/Bucket/Queries/Athena/model/state.tsx b/catalog/app/containers/Bucket/Queries/Athena/model/state.tsx
new file mode 100644
index 00000000000..a7bc4631fcb
--- /dev/null
+++ b/catalog/app/containers/Bucket/Queries/Athena/model/state.tsx
@@ -0,0 +1,149 @@
+import invariant from 'invariant'
+import * as React from 'react'
+import * as RRDom from 'react-router-dom'
+
+import type * as BucketPreferences from 'utils/BucketPreferences'
+import * as NamedRoutes from 'utils/NamedRoutes'
+
+import * as requests from './requests'
+import * as Model from './utils'
+
+export interface State {
+ bucket: string
+ queryExecutionId?: string
+
+ /**
+ * Query execution loaded by id on the corresponding page.
+ * On the index page (where there is no queryExecutionId) its value is null.
+ */
+ execution: Model.Value
+
+ /** List of workgroups from Athena */
+ workgroups: Model.DataController>
+ /**
+ * Workgroup selected by user explicitly or from page URL, and validated that it does exist
+ * If workgroup doesn't exist, then its value is Error
+ * It can't be null
+ */
+ workgroup: Model.DataController
+ /** List of named queries, including query body for each query */
+ queries: Model.DataController>
+ /** Selected named query */
+ query: Model.ValueController
+ /** Query body, typed by user or set from selected named query or query execution */
+ queryBody: Model.ValueController
+ /** List of catalog names from Athena */
+ catalogNames: Model.DataController>
+ /** Catalog name selected by user, or set initially */
+ catalogName: Model.ValueController
+ /** List of databases from Athena */
+ databases: Model.DataController>
+ /** Database selected by user, or set initially */
+ database: Model.ValueController
+ /** List of query executions, in other words, history of executions */
+ executions: Model.DataController>
+ /** Rows and columns of query results */
+ results: Model.DataController
+
+ /**
+ * Submit query to Athena with values memoized here in state
+ * If catalog name or database is not selected, then it will return specific output
+ * Which is handled and then user can re-submit with `forceDefaultExecutionContext: true`
+ */
+ submit: (
+ forceDefaultExecutionContext: boolean,
+ ) => Promise>
+ /**
+ * Query run is `undefined` when there is not enough data to run the query
+ * It is `null` when it is ready to run
+ * Error when submit failed or when validation failed (e.g. no database selected)
+ */
+ queryRun: Model.Value
+}
+
+export const Ctx = React.createContext(null)
+
+interface ProviderProps {
+ preferences?: BucketPreferences.AthenaPreferences
+ children: React.ReactNode
+}
+
+export function Provider({ preferences, children }: ProviderProps) {
+ const { urls } = NamedRoutes.use()
+
+ const {
+ bucket,
+ queryExecutionId,
+ workgroup: workgroupId,
+ } = RRDom.useParams<{
+ bucket: string
+ queryExecutionId?: string
+ workgroup?: requests.Workgroup
+ }>()
+ invariant(!!bucket, '`bucket` must be defined')
+
+ const execution = requests.useWaitForQueryExecution(queryExecutionId)
+
+ const workgroups = requests.useWorkgroups()
+ const workgroup = requests.useWorkgroup(workgroups, workgroupId, preferences)
+ const queries = requests.useQueries(workgroup.data)
+ const query = requests.useQuery(queries.data, execution)
+ const queryBody = requests.useQueryBody(query.value, query.setValue, execution)
+ const catalogNames = requests.useCatalogNames()
+ const catalogName = requests.useCatalogName(catalogNames.data, execution)
+ const databases = requests.useDatabases(catalogName.value)
+ const database = requests.useDatabase(databases.data, execution)
+ const executions = requests.useExecutions(workgroup.data, queryExecutionId)
+ const results = requests.useResults(execution)
+
+ const [queryRun, submit] = requests.useQueryRun({
+ workgroup: workgroup.data,
+ catalogName: catalogName.value,
+ database: database.value,
+ queryBody: queryBody.value,
+ })
+
+ const value: State = {
+ bucket,
+ queryExecutionId,
+ workgroup,
+
+ catalogName,
+ catalogNames,
+ database,
+ databases,
+ execution,
+ executions,
+ queries,
+ query,
+ queryBody,
+ results,
+ workgroups,
+
+ submit,
+ queryRun,
+ }
+
+ if (Model.hasData(queryRun) && queryExecutionId !== queryRun.id) {
+ return (
+
+ )
+ }
+
+ if (Model.hasData(workgroup.data) && !workgroupId) {
+ return
+ }
+
+ return {children}
+}
+
+/** state object is not memoized, use destructuring down to memoized properties */
+export function useState() {
+ const model = React.useContext(Ctx)
+ invariant(model, 'Athena state accessed outside of provider')
+ return model
+}
+
+export const use = useState
diff --git a/catalog/app/containers/Bucket/Queries/Athena/model/storage.ts b/catalog/app/containers/Bucket/Queries/Athena/model/storage.ts
new file mode 100644
index 00000000000..1c1fd07d5bc
--- /dev/null
+++ b/catalog/app/containers/Bucket/Queries/Athena/model/storage.ts
@@ -0,0 +1,28 @@
+import mkStorage from 'utils/storage'
+
+const ATHENA_WORKGROUP_KEY = 'ATHENA_WORKGROUP'
+
+const ATHENA_CATALOG_KEY = 'ATHENA_CATALOG'
+
+const ATHENA_DATABASE_KEY = 'ATHENA_DATABASE'
+
+const storage = mkStorage({
+ athenaCatalog: ATHENA_CATALOG_KEY,
+ athenaDatabase: ATHENA_DATABASE_KEY,
+ athenaWorkgroup: ATHENA_WORKGROUP_KEY,
+})
+
+export const getCatalog = () => storage.get('athenaCatalog')
+
+export const setCatalog = (catalog: string) => storage.set('athenaCatalog', catalog)
+
+export const getDatabase = () => storage.get('athenaDatabase')
+
+export const setDatabase = (database: string) => storage.set('athenaDatabase', database)
+
+export const clearDatabase = () => storage.remove('athenaDatabase')
+
+export const getWorkgroup = () => storage.get('athenaWorkgroup')
+
+export const setWorkgroup = (workgroup: string) =>
+ storage.set('athenaWorkgroup', workgroup)
diff --git a/catalog/app/containers/Bucket/Queries/Athena/model/utils.ts b/catalog/app/containers/Bucket/Queries/Athena/model/utils.ts
new file mode 100644
index 00000000000..429b8f144d3
--- /dev/null
+++ b/catalog/app/containers/Bucket/Queries/Athena/model/utils.ts
@@ -0,0 +1,96 @@
+export const Loading = Symbol('loading')
+
+export type Maybe = T | null
+
+// `T` is the value
+// `undefined` is no data. It is not initialized
+// `Loading` is loading
+// `Error` is error
+export type Data = T | undefined | typeof Loading | Error
+
+export interface DataController {
+ data: Data
+ loadMore: () => void
+}
+
+export function wrapData(data: Data, setPrev: (d: T) => void): DataController {
+ return {
+ data,
+ loadMore: () => hasData(data) && setPrev(data),
+ }
+}
+
+export interface List {
+ list: T[]
+ next?: string
+}
+
+// `T` is the value
+// `null` is no value, explicitly set by user
+// `undefined` is no value. It is not initialized
+// `Loading` is loading
+// `Error` is error
+export type Value = Maybe>
+
+export interface ValueController {
+ value: Value
+ setValue: (v: T | null) => void
+}
+
+export function wrapValue(
+ value: Value,
+ setValue: (d: T | null) => void,
+): ValueController {
+ return {
+ value,
+ setValue,
+ }
+}
+
+/** Data is loaded, or value is set to actual value */
+export function hasData(value: Value): value is T {
+ if (
+ value === undefined ||
+ value === Loading ||
+ value instanceof Error ||
+ value === null
+ ) {
+ return false
+ }
+ return true
+}
+
+/** No value yet: value or data was just initialized */
+export function isNone(value: Value): value is undefined {
+ return value === undefined
+}
+
+/** Data is loading, or value is waiting for data */
+export function isLoading(value: Value): value is typeof Loading {
+ return value === Loading
+}
+
+export function isError(value: Value): value is Error {
+ return value instanceof Error
+}
+
+/** Value is selected with some or no value, or resolved with error, or data is loaded (successfully or not) */
+export function isReady(value: Value): value is T | null | Error {
+ if (value === undefined || value === Loading) {
+ return false
+ }
+ return true
+}
+
+/** Value is selected with some or no value, or data is loaded successfully */
+export function hasValue(value: Value): value is T | null {
+ if (value === undefined || value === Loading || value instanceof Error) {
+ return false
+ }
+ return true
+}
+
+/** User explicitly set no value */
+export function isNoneSelected(value: Value): value is null {
+ return value === null
+}
diff --git a/catalog/app/containers/Bucket/Queries/ElasticSearch.tsx b/catalog/app/containers/Bucket/Queries/ElasticSearch.tsx
index c9d6d23d071..03b793435fa 100644
--- a/catalog/app/containers/Bucket/Queries/ElasticSearch.tsx
+++ b/catalog/app/containers/Bucket/Queries/ElasticSearch.tsx
@@ -24,9 +24,6 @@ const useStyles = M.makeStyles((t) => ({
form: {
margin: t.spacing(0, 0, 4),
},
- sectionHeader: {
- margin: t.spacing(0, 0, 1),
- },
select: {
margin: t.spacing(3, 0),
},
@@ -62,7 +59,7 @@ interface QueriesStateRenderProps {
error: Error | null
handleError: (error: Error | null) => void
handleQueryBodyChange: (q: requests.ElasticSearchQuery | null) => void
- handleQueryMetaChange: (q: requests.Query | requests.athena.AthenaQuery | null) => void
+ handleQueryMetaChange: (q: requests.Query | requests.athena.Query | null) => void
handleSubmit: (q: requests.ElasticSearchQuery) => () => void
queries: requests.Query[]
queryData: requests.AsyncData
@@ -99,7 +96,7 @@ function QueriesState({ bucket, children }: QueriesStateProps) {
)
const handleQueryMetaChange = React.useCallback(
- (q: requests.athena.AthenaQuery | requests.Query | null) => {
+ (q: requests.athena.Query | requests.Query | null) => {
setQueryMeta(q as requests.Query | null)
setCustomQueryBody(null)
},
@@ -212,10 +209,8 @@ export default function ElastiSearch() {
ElasticSearch queries
-
- Select query
-
+ label="Select query"
queries={queries}
onChange={handleQueryMetaChange}
value={customQueryBody ? null : queryMeta}
diff --git a/catalog/app/containers/Bucket/Queries/QuerySelect.spec.tsx b/catalog/app/containers/Bucket/Queries/QuerySelect.spec.tsx
index 61425326cd4..f681ed82b50 100644
--- a/catalog/app/containers/Bucket/Queries/QuerySelect.spec.tsx
+++ b/catalog/app/containers/Bucket/Queries/QuerySelect.spec.tsx
@@ -6,7 +6,7 @@ import QuerySelect from './QuerySelect'
describe('containers/Bucket/Queries/QuerySelect', () => {
it('should render', () => {
const tree = renderer
- .create( {}} value={null} />)
+ .create( {}} value={null} />)
.toJSON()
expect(tree).toMatchSnapshot()
})
@@ -16,7 +16,14 @@ describe('containers/Bucket/Queries/QuerySelect', () => {
{ key: 'key2', name: 'name2', url: 'url2' },
]
const tree = renderer
- .create( {}} value={queries[1]} />)
+ .create(
+ {}}
+ value={queries[1]}
+ />,
+ )
.toJSON()
expect(tree).toMatchSnapshot()
})
diff --git a/catalog/app/containers/Bucket/Queries/QuerySelect.tsx b/catalog/app/containers/Bucket/Queries/QuerySelect.tsx
index 38bb51218bc..d0f30c71684 100644
--- a/catalog/app/containers/Bucket/Queries/QuerySelect.tsx
+++ b/catalog/app/containers/Bucket/Queries/QuerySelect.tsx
@@ -8,34 +8,26 @@ interface AbstractQuery {
}
interface QuerySelectProps {
+ className?: string
+ disabled?: boolean
+ label: React.ReactNode
onChange: (value: T | null) => void
onLoadMore?: () => void
queries: T[]
value: T | null
}
-const useStyles = M.makeStyles((t) => ({
- header: {
- margin: t.spacing(0, 0, 1),
- },
- selectWrapper: {
- width: '100%',
- },
- select: {
- padding: t.spacing(1),
- },
-}))
-
const LOAD_MORE = 'load-more'
export default function QuerySelect({
- queries,
+ className,
+ disabled,
+ label,
onChange,
onLoadMore,
+ queries,
value,
}: QuerySelectProps) {
- const classes = useStyles()
-
const handleChange = React.useCallback(
(event) => {
if (event.target.value === LOAD_MORE && onLoadMore) {
@@ -48,31 +40,29 @@ export default function QuerySelect({
)
return (
-
-
-
-
- Custom
+
+ {label}
+
+
+ Custom
+
+ {queries.map((query) => (
+
+
+
+ ))}
+ {!!onLoadMore && (
+
+
+ Load more
+
- {queries.map((query) => (
-
-
-
- ))}
- {!!onLoadMore && (
-
-
- Load more
-
-
- )}
-
-
-
+ )}
+
+
)
}
diff --git a/catalog/app/containers/Bucket/Queries/__snapshots__/QuerySelect.spec.tsx.snap b/catalog/app/containers/Bucket/Queries/__snapshots__/QuerySelect.spec.tsx.snap
index 03981390857..ff49164ebb1 100644
--- a/catalog/app/containers/Bucket/Queries/__snapshots__/QuerySelect.spec.tsx.snap
+++ b/catalog/app/containers/Bucket/Queries/__snapshots__/QuerySelect.spec.tsx.snap
@@ -2,111 +2,115 @@
exports[`containers/Bucket/Queries/QuerySelect should render 1`] = `
+
-
-
- Custom
-
-
+ Custom
+
-
-
+
+
`;
exports[`containers/Bucket/Queries/QuerySelect should render with selected value 1`] = `
+
-
-
- name2
-
-
+ name2
+
-
-
+
+
`;
diff --git a/catalog/app/containers/Bucket/Queries/requests/athena.ts b/catalog/app/containers/Bucket/Queries/requests/athena.ts
deleted file mode 100644
index 135976b574a..00000000000
--- a/catalog/app/containers/Bucket/Queries/requests/athena.ts
+++ /dev/null
@@ -1,582 +0,0 @@
-import Athena from 'aws-sdk/clients/athena'
-import * as React from 'react'
-
-import * as AWS from 'utils/AWS'
-import * as BucketPreferences from 'utils/BucketPreferences'
-import { useData } from 'utils/Data'
-import wait from 'utils/wait'
-
-import * as storage from './storage'
-
-import { AsyncData } from './requests'
-
-// TODO: rename to requests.athena.Query
-export interface AthenaQuery {
- body: string
- description?: string
- key: string
- name: string
-}
-
-export interface QueriesResponse {
- list: AthenaQuery[]
- next?: string
-}
-
-interface QueriesArgs {
- athena: Athena
- prev: QueriesResponse | null
- workgroup: string
-}
-
-function parseNamedQuery(query: Athena.NamedQuery): AthenaQuery {
- return {
- body: query.QueryString,
- description: query.Description,
- key: query.NamedQueryId!,
- name: query.Name,
- }
-}
-
-async function fetchQueries({
- athena,
- prev,
- workgroup,
-}: QueriesArgs): Promise {
- try {
- const queryIdsOutput = await athena
- ?.listNamedQueries({ WorkGroup: workgroup, NextToken: prev?.next })
- .promise()
- if (!queryIdsOutput.NamedQueryIds || !queryIdsOutput.NamedQueryIds.length)
- return {
- list: prev?.list || [],
- next: queryIdsOutput.NextToken,
- }
-
- const queriesOutput = await athena
- ?.batchGetNamedQuery({
- NamedQueryIds: queryIdsOutput.NamedQueryIds,
- })
- .promise()
- const parsed = (queriesOutput.NamedQueries || []).map(parseNamedQuery)
- const list = (prev?.list || []).concat(parsed)
- return {
- list,
- next: queryIdsOutput.NextToken,
- }
- } catch (e) {
- // eslint-disable-next-line no-console
- console.log('Unable to fetch')
- // eslint-disable-next-line no-console
- console.error(e)
- throw e
- }
-}
-
-export function useQueries(
- workgroup: string,
- prev: QueriesResponse | null,
-): AsyncData {
- const athena = AWS.Athena.use()
- return useData(fetchQueries, { athena, prev, workgroup }, { noAutoFetch: !workgroup })
-}
-
-export type Workgroup = string
-
-function getDefaultWorkgroup(
- list: Workgroup[],
- preferences?: BucketPreferences.AthenaPreferences,
-): Workgroup {
- const workgroupFromConfig = preferences?.defaultWorkgroup
- if (workgroupFromConfig && list.includes(workgroupFromConfig)) {
- return workgroupFromConfig
- }
- return storage.getWorkgroup() || list[0]
-}
-
-interface WorkgroupArgs {
- athena: Athena
- workgroup: Workgroup
-}
-
-async function fetchWorkgroup({
- athena,
- workgroup,
-}: WorkgroupArgs): Promise {
- try {
- const workgroupOutput = await athena.getWorkGroup({ WorkGroup: workgroup }).promise()
- if (
- workgroupOutput?.WorkGroup?.Configuration?.ResultConfiguration?.OutputLocation &&
- workgroupOutput?.WorkGroup?.State === 'ENABLED' &&
- workgroupOutput?.WorkGroup?.Name
- ) {
- return workgroupOutput.WorkGroup.Name
- }
- return null
- } catch (error) {
- return null
- }
-}
-
-export interface WorkgroupsResponse {
- defaultWorkgroup: Workgroup
- list: Workgroup[]
- next?: string
-}
-
-interface WorkgroupsArgs {
- athena: Athena
- prev: WorkgroupsResponse | null
- preferences?: BucketPreferences.AthenaPreferences
-}
-
-async function fetchWorkgroups({
- athena,
- prev,
- preferences,
-}: WorkgroupsArgs): Promise {
- try {
- const workgroupsOutput = await athena
- .listWorkGroups({ NextToken: prev?.next })
- .promise()
- const parsed = (workgroupsOutput.WorkGroups || []).map(
- ({ Name }) => Name || 'Unknown',
- )
- const available = (
- await Promise.all(parsed.map((workgroup) => fetchWorkgroup({ athena, workgroup })))
- ).filter(Boolean)
- const list = (prev?.list || []).concat(available as Workgroup[])
- return {
- defaultWorkgroup: getDefaultWorkgroup(list, preferences),
- list,
- next: workgroupsOutput.NextToken,
- }
- } catch (e) {
- // eslint-disable-next-line no-console
- console.log('Unable to fetch')
- // eslint-disable-next-line no-console
- console.error(e)
- throw e
- }
-}
-
-export function useWorkgroups(
- prev: WorkgroupsResponse | null,
-): AsyncData {
- const athena = AWS.Athena.use()
- const prefs = BucketPreferences.use()
- const preferences = React.useMemo(
- () =>
- BucketPreferences.Result.match(
- {
- Ok: ({ ui }) => ui.athena,
- _: () => undefined,
- },
- prefs,
- ),
- [prefs],
- )
- return useData(fetchWorkgroups, { athena, prev, preferences })
-}
-
-export interface QueryExecution {
- catalog?: string
- completed?: Date
- created?: Date
- db?: string
- error?: Error
- id?: string
- outputBucket?: string
- query?: string
- status?: string // 'QUEUED' | 'RUNNING' | 'SUCCEEDED' | 'FAILED' | 'CANCELLED'
- workgroup?: Athena.WorkGroupName
-}
-
-export interface QueryExecutionsResponse {
- list: QueryExecution[]
- next?: string
-}
-
-interface QueryExecutionsArgs {
- athena: Athena
- prev: QueryExecutionsResponse | null
- workgroup: string
-}
-
-function parseQueryExecution(queryExecution: Athena.QueryExecution): QueryExecution {
- return {
- catalog: queryExecution?.QueryExecutionContext?.Catalog,
- completed: queryExecution?.Status?.CompletionDateTime,
- created: queryExecution?.Status?.SubmissionDateTime,
- db: queryExecution?.QueryExecutionContext?.Database,
- id: queryExecution?.QueryExecutionId,
- outputBucket: queryExecution?.ResultConfiguration?.OutputLocation,
- query: queryExecution?.Query,
- status: queryExecution?.Status?.State,
- workgroup: queryExecution?.WorkGroup,
- }
-}
-
-function parseQueryExecutionError(
- error: Athena.UnprocessedQueryExecutionId,
-): QueryExecution {
- return {
- error: new Error(error?.ErrorMessage || 'Unknown'),
- id: error?.QueryExecutionId,
- }
-}
-
-async function fetchQueryExecutions({
- athena,
- prev,
- workgroup,
-}: QueryExecutionsArgs): Promise {
- try {
- const executionIdsOutput = await athena
- .listQueryExecutions({ WorkGroup: workgroup, NextToken: prev?.next })
- .promise()
-
- const ids = executionIdsOutput.QueryExecutionIds
- if (!ids || !ids.length)
- return {
- list: [],
- next: executionIdsOutput.NextToken,
- }
-
- const executionsOutput = await athena
- ?.batchGetQueryExecution({ QueryExecutionIds: ids })
- .promise()
- const parsed = (executionsOutput.QueryExecutions || [])
- .map(parseQueryExecution)
- .concat(
- (executionsOutput.UnprocessedQueryExecutionIds || []).map(
- parseQueryExecutionError,
- ),
- )
- const list = (prev?.list || []).concat(parsed)
- return {
- list,
- next: executionIdsOutput.NextToken,
- }
- } catch (e) {
- // eslint-disable-next-line no-console
- console.log('Unable to fetch')
- // eslint-disable-next-line no-console
- console.error(e)
- throw e
- }
-}
-
-export function useQueryExecutions(
- workgroup: string,
- prev: QueryExecutionsResponse | null,
-): AsyncData {
- const athena = AWS.Athena.use()
- return useData(
- fetchQueryExecutions,
- { athena, prev, workgroup },
- { noAutoFetch: !workgroup },
- )
-}
-
-async function waitForQueryStatus(
- athena: Athena,
- QueryExecutionId: string,
-): Promise {
- // eslint-disable-next-line no-constant-condition
- while (true) {
- // NOTE: await is used to intentionally pause loop and make requests in series
- // eslint-disable-next-line no-await-in-loop
- const statusData = await athena.getQueryExecution({ QueryExecutionId }).promise()
- const status = statusData?.QueryExecution?.Status?.State
- const parsed = statusData?.QueryExecution
- ? parseQueryExecution(statusData?.QueryExecution)
- : {
- id: QueryExecutionId,
- }
- if (status === 'FAILED' || status === 'CANCELLED') {
- const reason = statusData?.QueryExecution?.Status?.StateChangeReason || ''
- return {
- ...parsed,
- error: new Error(`${status}: ${reason}`),
- }
- }
-
- if (!status) {
- return {
- ...parsed,
- error: new Error('Unknown query execution status'),
- }
- }
-
- if (status === 'SUCCEEDED') {
- return parsed
- }
-
- // eslint-disable-next-line no-await-in-loop
- await wait(1000)
- }
-}
-
-export type QueryResultsValue = Athena.datumString
-
-export interface QueryResultsColumnInfo {
- name: Athena.String
- type: Athena.String
-}
-
-export type QueryResultsColumns = QueryResultsColumnInfo[]
-type Row = QueryResultsValue[]
-export type QueryResultsRows = Row[]
-
-export interface QueryResultsResponse {
- columns: QueryResultsColumns
- next?: string
- queryExecution: QueryExecution
- rows: QueryResultsRows
-}
-
-type ManifestKey = 'hash' | 'logical_key' | 'meta' | 'physical_keys' | 'size'
-
-export interface QueryManifestsResponse extends QueryResultsResponse {
- rows: [ManifestKey[], ...string[][]]
-}
-
-interface QueryResultsArgs {
- athena: Athena
- queryExecutionId: string
- prev: QueryResultsResponse | null
-}
-
-const emptyRow: Row = []
-const emptyList: QueryResultsRows = []
-const emptyColumns: QueryResultsColumns = []
-
-async function fetchQueryResults({
- athena,
- queryExecutionId,
- prev,
-}: QueryResultsArgs): Promise {
- const queryExecution = await waitForQueryStatus(athena, queryExecutionId)
- if (queryExecution.error) {
- return {
- rows: emptyList,
- columns: emptyColumns,
- queryExecution,
- }
- }
-
- try {
- const queryResultsOutput = await athena
- .getQueryResults({
- QueryExecutionId: queryExecutionId,
- NextToken: prev?.next,
- })
- .promise()
- const parsed =
- queryResultsOutput.ResultSet?.Rows?.map(
- (row) => row?.Data?.map((item) => item?.VarCharValue || '') || emptyRow,
- ) || emptyList
- const rows = [...(prev?.rows || emptyList), ...parsed]
- const columns =
- queryResultsOutput.ResultSet?.ResultSetMetadata?.ColumnInfo?.map(
- ({ Name, Type }) => ({
- name: Name,
- type: Type,
- }),
- ) || emptyColumns
- const isHeadColumns = columns.every(({ name }, index) => name === rows[0][index])
- return {
- rows: isHeadColumns ? rows.slice(1) : rows,
- columns,
- next: queryResultsOutput.NextToken,
- queryExecution,
- }
- } catch (error) {
- return {
- rows: emptyList,
- columns: emptyColumns,
- queryExecution: {
- ...queryExecution,
- error: error instanceof Error ? error : new Error(`${error}`),
- },
- }
- }
-}
-
-export function useQueryResults(
- queryExecutionId: string | null,
- prev: QueryResultsResponse | null,
-): AsyncData {
- const athena = AWS.Athena.use()
- return useData(
- fetchQueryResults,
- { athena, prev, queryExecutionId },
- { noAutoFetch: !queryExecutionId },
- )
-}
-
-export interface QueryRunResponse {
- id: string
-}
-
-export type CatalogName = string
-export interface CatalogNamesResponse {
- list: CatalogName[]
- next?: string
-}
-
-interface CatalogNamesArgs {
- athena: Athena
- prev?: CatalogNamesResponse
-}
-
-async function fetchCatalogNames({
- athena,
- prev,
-}: CatalogNamesArgs): Promise {
- const catalogsOutput = await athena
- ?.listDataCatalogs({ NextToken: prev?.next })
- .promise()
- const list =
- catalogsOutput?.DataCatalogsSummary?.map(
- ({ CatalogName }) => CatalogName || 'Unknown',
- ) || []
- return {
- list: (prev?.list || []).concat(list),
- next: catalogsOutput.NextToken,
- }
-}
-
-export function useCatalogNames(
- prev: CatalogNamesResponse | null,
-): AsyncData {
- const athena = AWS.Athena.use()
- return useData(fetchCatalogNames, { athena, prev })
-}
-
-export type Database = string
-export interface DatabasesResponse {
- list: CatalogName[]
- next?: string
-}
-
-interface DatabasesArgs {
- athena: Athena
- catalogName: CatalogName
- prev?: DatabasesResponse
-}
-
-async function fetchDatabases({
- athena,
- catalogName,
- prev,
-}: DatabasesArgs): Promise {
- const databasesOutput = await athena
- ?.listDatabases({ CatalogName: catalogName, NextToken: prev?.next })
- .promise()
- // TODO: add `Description` besides `Name`
- const list = databasesOutput?.DatabaseList?.map(({ Name }) => Name || 'Unknown') || []
- return {
- list: (prev?.list || []).concat(list),
- next: databasesOutput.NextToken,
- }
-}
-
-export function useDatabases(
- catalogName: CatalogName | null,
- prev: DatabasesResponse | null,
-): AsyncData {
- const athena = AWS.Athena.use()
- return useData(
- fetchDatabases,
- { athena, catalogName, prev },
- { noAutoFetch: !catalogName },
- )
-}
-
-interface DefaultDatabaseArgs {
- athena: Athena
-}
-
-async function fetchDefaultQueryExecution({
- athena,
-}: DefaultDatabaseArgs): Promise {
- const catalogNames = await fetchCatalogNames({ athena })
- if (!catalogNames.list.length) {
- return null
- }
- const catalogName = catalogNames.list[0]
- const databases = await fetchDatabases({ athena, catalogName })
- if (!databases.list.length) {
- return null
- }
- return {
- catalog: catalogName,
- db: databases.list[0],
- }
-}
-
-export function useDefaultQueryExecution(): AsyncData {
- const athena = AWS.Athena.use()
- return useData(fetchDefaultQueryExecution, { athena })
-}
-
-export interface ExecutionContext {
- catalogName: CatalogName
- database: Database
-}
-
-interface RunQueryArgs {
- athena: Athena
- queryBody: string
- workgroup: string
- executionContext: ExecutionContext | null
-}
-
-export async function runQuery({
- athena,
- queryBody,
- workgroup,
- executionContext,
-}: RunQueryArgs): Promise {
- try {
- const options: Athena.Types.StartQueryExecutionInput = {
- QueryString: queryBody,
- ResultConfiguration: {
- EncryptionConfiguration: {
- EncryptionOption: 'SSE_S3',
- },
- },
- WorkGroup: workgroup,
- }
- if (executionContext) {
- options.QueryExecutionContext = {
- Catalog: executionContext.catalogName,
- Database: executionContext.database,
- }
- }
- const { QueryExecutionId } = await athena.startQueryExecution(options).promise()
- if (!QueryExecutionId) throw new Error('No execution id')
- return {
- id: QueryExecutionId,
- }
- } catch (e) {
- // eslint-disable-next-line no-console
- console.log('Unable to fetch')
- // eslint-disable-next-line no-console
- console.error(e)
- throw e
- }
-}
-
-export function useQueryRun(workgroup: string) {
- const athena = AWS.Athena.use()
- return React.useCallback(
- (queryBody: string, executionContext: ExecutionContext | null) => {
- if (!athena) return Promise.reject(new Error('No Athena available'))
- return runQuery({ athena, queryBody, workgroup, executionContext })
- },
- [athena, workgroup],
- )
-}
diff --git a/catalog/app/containers/Bucket/Queries/requests/index.ts b/catalog/app/containers/Bucket/Queries/requests/index.ts
index 0dc2f97cd35..b02355f69bb 100644
--- a/catalog/app/containers/Bucket/Queries/requests/index.ts
+++ b/catalog/app/containers/Bucket/Queries/requests/index.ts
@@ -1,4 +1,4 @@
-export * as athena from './athena'
+export type * as athena from '../Athena/model/requests'
export * from './queriesConfig'
diff --git a/catalog/app/containers/Bucket/Queries/requests/storage.ts b/catalog/app/containers/Bucket/Queries/requests/storage.ts
deleted file mode 100644
index 5cbcc8d7b64..00000000000
--- a/catalog/app/containers/Bucket/Queries/requests/storage.ts
+++ /dev/null
@@ -1,10 +0,0 @@
-import mkStorage from 'utils/storage'
-
-const ATHENA_WORKGROUP_KEY = 'ATHENA_WORKGROUP'
-
-const storage = mkStorage({ athenaWorkgroup: ATHENA_WORKGROUP_KEY })
-
-export const getWorkgroup = () => storage.get('athenaWorkgroup')
-
-export const setWorkgroup = (workgroup: string) =>
- storage.set('athenaWorkgroup', workgroup)
diff --git a/catalog/app/containers/Bucket/Successors.tsx b/catalog/app/containers/Bucket/Successors.tsx
index 3a0e5449b34..5fc1a51cc40 100644
--- a/catalog/app/containers/Bucket/Successors.tsx
+++ b/catalog/app/containers/Bucket/Successors.tsx
@@ -28,7 +28,7 @@ function EmptySlot({ bucket }: EmptySlotProps) {
Learn more
@@ -52,7 +52,7 @@ function ErrorSlot({ error }: ErrorSlotProps) {
{error instanceof ERRORS.WorkflowsConfigInvalid && (
Please fix the workflows config according to{' '}
-
+
the documentation
diff --git a/catalog/app/containers/Bucket/Summarize.tsx b/catalog/app/containers/Bucket/Summarize.tsx
index e644215263c..ebc2116b441 100644
--- a/catalog/app/containers/Bucket/Summarize.tsx
+++ b/catalog/app/containers/Bucket/Summarize.tsx
@@ -258,7 +258,7 @@ interface FilePreviewProps {
expanded?: boolean
file?: SummarizeFile
handle: LogicalKeyResolver.S3SummarizeHandle
- headingOverride: React.ReactNode
+ headingOverride?: React.ReactNode
packageHandle?: PackageHandle
}
@@ -270,7 +270,7 @@ export function FilePreview({
packageHandle,
}: FilePreviewProps) {
const description = file?.description ? : null
- const heading = headingOverride != null ? headingOverride :
+ const heading = headingOverride ??
const key = handle.logicalKey || handle.key
const props = React.useMemo(() => Preview.getRenderProps(key, file), [key, file])
@@ -566,7 +566,7 @@ interface SummaryRootProps {
s3: S3
bucket: string
inStack: boolean
- overviewUrl: string
+ overviewUrl?: string | null
}
export function SummaryRoot({ s3, bucket, inStack, overviewUrl }: SummaryRootProps) {
@@ -618,7 +618,9 @@ function SummaryFailed({ error }: SummaryFailedProps) {
Check your quilt_summarize.json file for errors.
See the{' '}
-
+
summarize docs
{' '}
for more.
diff --git a/catalog/app/containers/Bucket/errors.tsx b/catalog/app/containers/Bucket/errors.tsx
index fd45d3399ca..5a91f621431 100644
--- a/catalog/app/containers/Bucket/errors.tsx
+++ b/catalog/app/containers/Bucket/errors.tsx
@@ -124,7 +124,7 @@ const defaultHandlers: ErrorHandler[] = [
Learn how to configure the bucket for Quilt
@@ -167,7 +167,7 @@ const defaultHandlers: ErrorHandler[] = [
Learn about access control in Quilt
diff --git a/catalog/app/containers/Bucket/requests/requestsUntyped.js b/catalog/app/containers/Bucket/requests/requestsUntyped.js
index 2ba9722da61..5efb639049f 100644
--- a/catalog/app/containers/Bucket/requests/requestsUntyped.js
+++ b/catalog/app/containers/Bucket/requests/requestsUntyped.js
@@ -1,7 +1,6 @@
import { join as pathJoin } from 'path'
-import * as dateFns from 'date-fns'
-import * as FP from 'fp-ts'
+import * as Eff from 'effect'
import sampleSize from 'lodash/fp/sampleSize'
import * as R from 'ramda'
@@ -9,7 +8,6 @@ import quiltSummarizeSchema from 'schemas/quilt_summarize.json'
import { SUPPORTED_EXTENSIONS as IMG_EXTS } from 'components/Thumbnail'
import * as quiltConfigs from 'constants/quiltConfigs'
-import cfg from 'constants/config'
import * as Resource from 'utils/Resource'
import { makeSchemaValidator } from 'utils/json-schema'
import mkSearch from 'utils/mkSearch'
@@ -24,106 +22,6 @@ import { decodeS3Key } from './utils'
const promiseProps = (obj) =>
Promise.all(Object.values(obj)).then(R.zipObj(Object.keys(obj)))
-const MAX_BANDS = 10
-
-export const bucketAccessCounts = async ({ s3, bucket, today, window }) => {
- if (!cfg.analyticsBucket)
- throw new Error('bucketAccessCounts: "analyticsBucket" required')
-
- const dates = R.unfold(
- (daysLeft) => daysLeft >= 0 && [dateFns.subDays(today, daysLeft), daysLeft - 1],
- window,
- )
-
- try {
- const result = await s3Select({
- s3,
- Bucket: cfg.analyticsBucket,
- Key: `${ACCESS_COUNTS_PREFIX}/Exts.csv`,
- Expression: `
- SELECT ext, counts FROM s3object
- WHERE eventname = 'GetObject'
- AND bucket = '${sqlEscape(bucket)}'
- `,
- InputSerialization: {
- CSV: {
- FileHeaderInfo: 'Use',
- AllowQuotedRecordDelimiter: true,
- },
- },
- })
- return FP.function.pipe(
- result,
- R.map((r) => {
- const recordedCounts = JSON.parse(r.counts)
- const { counts, total } = dates.reduce(
- (acc, date) => {
- const value = recordedCounts[dateFns.format(date, 'yyyy-MM-dd')] || 0
- const sum = acc.total + value
- return {
- total: sum,
- counts: acc.counts.concat({ date, value, sum }),
- }
- },
- { total: 0, counts: [] },
- )
- return { ext: r.ext && `.${r.ext}`, total, counts }
- }),
- R.filter((i) => i.total),
- R.sort(R.descend(R.prop('total'))),
- R.applySpec({
- byExt: R.identity,
- byExtCollapsed: (bands) => {
- if (bands.length <= MAX_BANDS) return bands
- const [other, rest] = R.partition((b) => b.ext === '', bands)
- const [toKeep, toMerge] = R.splitAt(MAX_BANDS - 1, rest)
- const merged = [...other, ...toMerge].reduce((acc, band) => ({
- ext: '',
- total: acc.total + band.total,
- counts: R.zipWith(
- (a, b) => ({
- date: a.date,
- value: a.value + b.value,
- sum: a.sum + b.sum,
- }),
- acc.counts,
- band.counts,
- ),
- }))
- return R.sort(R.descend(R.prop('total')), toKeep.concat(merged))
- },
- combined: {
- total: R.reduce((sum, { total }) => sum + total, 0),
- counts: R.pipe(
- R.pluck('counts'),
- R.transpose,
- R.map(
- R.reduce(
- (acc, { date, value, sum }) => ({
- date,
- value: acc.value + value,
- sum: acc.sum + sum,
- }),
- { value: 0, sum: 0 },
- ),
- ),
- ),
- },
- }),
- )
- } catch (e) {
- // eslint-disable-next-line no-console
- console.log('Unable to fetch bucket access counts:')
- // eslint-disable-next-line no-console
- console.error(e)
- return {
- byExt: [],
- byExtCollapsed: [],
- combined: { total: 0, counts: [] },
- }
- }
-}
-
const parseDate = (d) => d && new Date(d)
const getOverviewBucket = (url) => s3paths.parseS3Url(url).bucket
@@ -373,7 +271,7 @@ export const bucketSummary = async ({ s3, req, bucket, overviewUrl, inStack }) =
Key: getOverviewKey(overviewUrl, 'summary.json'),
})
.promise()
- return FP.function.pipe(
+ return Eff.pipe(
JSON.parse(r.Body.toString('utf-8')),
R.pathOr([], ['aggregations', 'other', 'keys', 'buckets']),
R.map((b) => ({
@@ -403,7 +301,7 @@ export const bucketSummary = async ({ s3, req, bucket, overviewUrl, inStack }) =
try {
const qs = mkSearch({ action: 'sample', index: bucket })
const result = await req(`/search${qs}`)
- return FP.function.pipe(
+ return Eff.pipe(
result,
R.pathOr([], ['aggregations', 'objects', 'buckets']),
R.map((h) => {
@@ -425,7 +323,7 @@ export const bucketSummary = async ({ s3, req, bucket, overviewUrl, inStack }) =
const result = await s3
.listObjectsV2({ Bucket: bucket, EncodingType: 'url' })
.promise()
- return FP.function.pipe(
+ return Eff.pipe(
result,
R.path(['Contents']),
R.map(R.evolve({ Key: decodeS3Key })),
@@ -477,7 +375,7 @@ export const bucketImgs = async ({ req, s3, bucket, overviewUrl, inStack }) => {
Key: getOverviewKey(overviewUrl, 'summary.json'),
})
.promise()
- return FP.function.pipe(
+ return Eff.pipe(
JSON.parse(r.Body.toString('utf-8')),
R.pathOr([], ['aggregations', 'images', 'keys', 'buckets']),
R.map((b) => ({
@@ -498,7 +396,7 @@ export const bucketImgs = async ({ req, s3, bucket, overviewUrl, inStack }) => {
try {
const qs = mkSearch({ action: 'images', index: bucket })
const result = await req(`/search${qs}`)
- return FP.function.pipe(
+ return Eff.pipe(
result,
R.pathOr([], ['aggregations', 'objects', 'buckets']),
R.map((h) => {
@@ -519,7 +417,7 @@ export const bucketImgs = async ({ req, s3, bucket, overviewUrl, inStack }) => {
const result = await s3
.listObjectsV2({ Bucket: bucket, EncodingType: 'url' })
.promise()
- return FP.function.pipe(
+ return Eff.pipe(
result,
R.path(['Contents']),
R.map(R.evolve({ Key: decodeS3Key })),
@@ -656,8 +554,6 @@ export const summarize = async ({ s3, handle: inputHandle, resolveLogicalKey })
}
}
-const MANIFESTS_PREFIX = '.quilt/packages/'
-
const withCalculatedRevisions = (s) => ({
scripted_metric: {
init_script: `
@@ -712,113 +608,33 @@ export const countPackageRevisions = ({ req, bucket, name }) =>
.then(R.path(['aggregations', 'revisions', 'value']))
.catch(errors.catchErrors())
-// TODO: Preview endpoint only allows up to 512 lines right now. Increase it to 1000.
-const MAX_PACKAGE_ENTRIES = 500
-
-// TODO: remove
-export const getRevisionData = async ({
- endpoint,
- sign,
- bucket,
- hash,
- maxKeys = MAX_PACKAGE_ENTRIES,
-}) => {
- const url = sign({ bucket, key: `${MANIFESTS_PREFIX}${hash}` })
- const maxLines = maxKeys + 2 // 1 for the meta and 1 for checking overflow
- const r = await fetch(
- `${endpoint}/preview?url=${encodeURIComponent(url)}&input=txt&line_count=${maxLines}`,
- )
- const [header, ...entries] = await r
- .json()
- .then((json) => json.info.data.head.map((l) => JSON.parse(l)))
- const files = Math.min(maxKeys, entries.length)
- const bytes = entries.slice(0, maxKeys).reduce((sum, i) => sum + i.size, 0)
- const truncated = entries.length > maxKeys
- return {
- stats: { files, bytes, truncated },
- message: header.message,
- header,
- }
-}
-
-const s3Select = ({
- s3,
- ExpressionType = 'SQL',
- InputSerialization = { JSON: { Type: 'LINES' } },
- ...rest
-}) =>
- s3
- .selectObjectContent({
- ExpressionType,
- InputSerialization,
- OutputSerialization: { JSON: {} },
- ...rest,
- })
- .promise()
- .then(
- R.pipe(
- R.prop('Payload'),
- R.reduce((acc, evt) => {
- if (!evt.Records) return acc
- const s = evt.Records.Payload.toString()
- return acc + s
- }, ''),
- R.trim,
- R.ifElse(R.isEmpty, R.always([]), R.pipe(R.split('\n'), R.map(JSON.parse))),
- ),
- )
-
-const sqlEscape = (arg) => arg.replace(/'/g, "''")
+// const MANIFESTS_PREFIX = '.quilt/packages/'
-const ACCESS_COUNTS_PREFIX = 'AccessCounts'
-
-const queryAccessCounts = async ({ s3, type, query, today, window = 365 }) => {
- try {
- const records = await s3Select({
- s3,
- Bucket: cfg.analyticsBucket,
- Key: `${ACCESS_COUNTS_PREFIX}/${type}.csv`,
- Expression: query,
- InputSerialization: {
- CSV: {
- FileHeaderInfo: 'Use',
- AllowQuotedRecordDelimiter: true,
- },
- },
- })
-
- const recordedCounts = records.length ? JSON.parse(records[0].counts) : {}
-
- const counts = R.times((i) => {
- const date = dateFns.subDays(today, window - i - 1)
- return {
- date,
- value: recordedCounts[dateFns.format(date, 'yyyy-MM-dd')] || 0,
- }
- }, window)
-
- const total = Object.values(recordedCounts).reduce(R.add, 0)
-
- return { counts, total }
- } catch (e) {
- // eslint-disable-next-line no-console
- console.log('queryAccessCounts: error caught')
- // eslint-disable-next-line no-console
- console.error(e)
- throw e
- }
-}
-
-export const objectAccessCounts = ({ s3, bucket, path, today }) =>
- queryAccessCounts({
- s3,
- type: 'Objects',
- query: `
- SELECT counts FROM s3object
- WHERE eventname = 'GetObject'
- AND bucket = '${sqlEscape(bucket)}'
- AND "key" = '${sqlEscape(path)}'
- `,
- today,
- window: 365,
- })
+// TODO: Preview endpoint only allows up to 512 lines right now. Increase it to 1000.
+// const MAX_PACKAGE_ENTRIES = 500
+
+// TODO: remove: used in a comented-out code in PackageList
+// export const getRevisionData = async ({
+// endpoint,
+// sign,
+// bucket,
+// hash,
+// maxKeys = MAX_PACKAGE_ENTRIES,
+// }) => {
+// const url = sign({ bucket, key: `${MANIFESTS_PREFIX}${hash}` })
+// const maxLines = maxKeys + 2 // 1 for the meta and 1 for checking overflow
+// const r = await fetch(
+// `${endpoint}/preview?url=${encodeURIComponent(url)}&input=txt&line_count=${maxLines}`,
+// )
+// const [header, ...entries] = await r
+// .json()
+// .then((json) => json.info.data.head.map((l) => JSON.parse(l)))
+// const files = Math.min(maxKeys, entries.length)
+// const bytes = entries.slice(0, maxKeys).reduce((sum, i) => sum + i.size, 0)
+// const truncated = entries.length > maxKeys
+// return {
+// stats: { files, bytes, truncated },
+// message: header.message,
+// header,
+// }
+// }
diff --git a/catalog/app/containers/NavBar/Suggestions/Suggestions.tsx b/catalog/app/containers/NavBar/Suggestions/Suggestions.tsx
index 4fa912fe30d..d459995f009 100644
--- a/catalog/app/containers/NavBar/Suggestions/Suggestions.tsx
+++ b/catalog/app/containers/NavBar/Suggestions/Suggestions.tsx
@@ -61,7 +61,10 @@ function SuggestionsList({ items, selected }: SuggestionsProps) {
))}
Learn the{' '}
-
+
advanced search syntax
{' '}
for query string queries in ElasticSearch {ES_V}.
diff --git a/catalog/app/embed/File.js b/catalog/app/embed/File.js
index bc47739202b..247a19ff163 100644
--- a/catalog/app/embed/File.js
+++ b/catalog/app/embed/File.js
@@ -1,7 +1,5 @@
import { basename } from 'path'
-import * as dateFns from 'date-fns'
-import * as R from 'ramda'
import * as React from 'react'
import { Link, useLocation, useParams } from 'react-router-dom'
import * as M from '@material-ui/core'
@@ -9,22 +7,21 @@ import * as M from '@material-ui/core'
import * as BreadCrumbs from 'components/BreadCrumbs'
import Message from 'components/Message'
import * as Preview from 'components/Preview'
-import Sparkline from 'components/Sparkline'
import cfg from 'constants/config'
import * as Notifications from 'containers/Notifications'
import * as AWS from 'utils/AWS'
import AsyncResult from 'utils/AsyncResult'
import { useData } from 'utils/Data'
import * as NamedRoutes from 'utils/NamedRoutes'
-import * as SVG from 'utils/SVG'
import { linkStyle } from 'utils/StyledLink'
import copyToClipboard from 'utils/clipboard'
import * as Format from 'utils/format'
import parseSearch from 'utils/parseSearch'
import * as s3paths from 'utils/s3paths'
-import { readableBytes, readableQuantity } from 'utils/string'
+import { readableBytes } from 'utils/string'
import FileCodeSamples from 'containers/Bucket/CodeSamples/File'
+import Analytics from 'containers/Bucket/File/Analytics'
import FileProperties from 'containers/Bucket/FileProperties'
import * as FileView from 'containers/Bucket/FileView'
import Section from 'containers/Bucket/Section'
@@ -229,74 +226,6 @@ function VersionInfo({ bucket, path, version }) {
)
}
-function Analytics({ bucket, path }) {
- const [cursor, setCursor] = React.useState(null)
- const s3 = AWS.S3.use()
- const today = React.useMemo(() => new Date(), [])
- const formatDate = (date) =>
- dateFns.format(
- date,
- today.getFullYear() === date.getFullYear() ? 'd MMM' : 'd MMM yyyy',
- )
- const data = useData(requests.objectAccessCounts, {
- s3,
- bucket,
- path,
- today,
- })
-
- const defaultExpanded = data.case({
- Ok: ({ total }) => !!total,
- _: () => false,
- })
-
- return (
-
- {data.case({
- Ok: ({ counts, total }) =>
- total ? (
-
-
- Downloads
-
- {readableQuantity(cursor === null ? total : counts[cursor].value)}
-
-
- {cursor === null
- ? `${counts.length} days`
- : formatDate(counts[cursor].date)}
-
-
-
-
-
-
- ,
- )}
- />
-
-
- ) : (
- No analytics available
- ),
- Err: () => No analytics available,
- _: () => ,
- })}
-
- )
-}
-
function CenteredProgress() {
return (
diff --git a/catalog/app/model/graphql/schema.generated.ts b/catalog/app/model/graphql/schema.generated.ts
index ba8ed87e3fd..be04791e97a 100644
--- a/catalog/app/model/graphql/schema.generated.ts
+++ b/catalog/app/model/graphql/schema.generated.ts
@@ -82,6 +82,41 @@ export default {
],
interfaces: [],
},
+ {
+ kind: 'OBJECT',
+ name: 'AccessCountsGroup',
+ fields: [
+ {
+ name: 'ext',
+ type: {
+ kind: 'NON_NULL',
+ ofType: {
+ kind: 'SCALAR',
+ name: 'String',
+ ofType: null,
+ },
+ },
+ args: [],
+ },
+ {
+ name: 'counts',
+ type: {
+ kind: 'NON_NULL',
+ ofType: {
+ kind: 'OBJECT',
+ name: 'AccessCounts',
+ ofType: null,
+ },
+ },
+ args: [],
+ },
+ ],
+ interfaces: [],
+ },
+ {
+ kind: 'SCALAR',
+ name: 'String',
+ },
{
kind: 'OBJECT',
name: 'AdminMutations',
@@ -208,10 +243,6 @@ export default {
],
interfaces: [],
},
- {
- kind: 'SCALAR',
- name: 'String',
- },
{
kind: 'OBJECT',
name: 'AdminQueries',
@@ -365,6 +396,52 @@ export default {
},
],
},
+ {
+ kind: 'OBJECT',
+ name: 'BucketAccessCounts',
+ fields: [
+ {
+ name: 'byExt',
+ type: {
+ kind: 'NON_NULL',
+ ofType: {
+ kind: 'LIST',
+ ofType: {
+ kind: 'NON_NULL',
+ ofType: {
+ kind: 'OBJECT',
+ name: 'AccessCountsGroup',
+ ofType: null,
+ },
+ },
+ },
+ },
+ args: [
+ {
+ name: 'groups',
+ type: {
+ kind: 'SCALAR',
+ name: 'Int',
+ ofType: null,
+ },
+ },
+ ],
+ },
+ {
+ name: 'combined',
+ type: {
+ kind: 'NON_NULL',
+ ofType: {
+ kind: 'OBJECT',
+ name: 'AccessCounts',
+ ofType: null,
+ },
+ },
+ args: [],
+ },
+ ],
+ interfaces: [],
+ },
{
kind: 'UNION',
name: 'BucketAddResult',
@@ -4188,6 +4265,81 @@ export default {
},
args: [],
},
+ {
+ name: 'bucketAccessCounts',
+ type: {
+ kind: 'OBJECT',
+ name: 'BucketAccessCounts',
+ ofType: null,
+ },
+ args: [
+ {
+ name: 'bucket',
+ type: {
+ kind: 'NON_NULL',
+ ofType: {
+ kind: 'SCALAR',
+ name: 'String',
+ ofType: null,
+ },
+ },
+ },
+ {
+ name: 'window',
+ type: {
+ kind: 'NON_NULL',
+ ofType: {
+ kind: 'SCALAR',
+ name: 'Int',
+ ofType: null,
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'objectAccessCounts',
+ type: {
+ kind: 'OBJECT',
+ name: 'AccessCounts',
+ ofType: null,
+ },
+ args: [
+ {
+ name: 'bucket',
+ type: {
+ kind: 'NON_NULL',
+ ofType: {
+ kind: 'SCALAR',
+ name: 'String',
+ ofType: null,
+ },
+ },
+ },
+ {
+ name: 'key',
+ type: {
+ kind: 'NON_NULL',
+ ofType: {
+ kind: 'SCALAR',
+ name: 'String',
+ ofType: null,
+ },
+ },
+ },
+ {
+ name: 'window',
+ type: {
+ kind: 'NON_NULL',
+ ofType: {
+ kind: 'SCALAR',
+ name: 'Int',
+ ofType: null,
+ },
+ },
+ },
+ ],
+ },
{
name: 'admin',
type: {
diff --git a/catalog/app/model/graphql/types.generated.ts b/catalog/app/model/graphql/types.generated.ts
index 8ad7b159639..fb5d1b2a862 100644
--- a/catalog/app/model/graphql/types.generated.ts
+++ b/catalog/app/model/graphql/types.generated.ts
@@ -36,6 +36,12 @@ export interface AccessCounts {
readonly counts: ReadonlyArray
}
+export interface AccessCountsGroup {
+ readonly __typename: 'AccessCountsGroup'
+ readonly ext: Scalars['String']
+ readonly counts: AccessCounts
+}
+
export interface AdminMutations {
readonly __typename: 'AdminMutations'
readonly user: UserAdminMutations
@@ -89,6 +95,16 @@ export type BrowsingSessionDisposeResult = Ok | OperationError
export type BrowsingSessionRefreshResult = BrowsingSession | InvalidInput | OperationError
+export interface BucketAccessCounts {
+ readonly __typename: 'BucketAccessCounts'
+ readonly byExt: ReadonlyArray
+ readonly combined: AccessCounts
+}
+
+export interface BucketAccessCountsbyExtArgs {
+ groups: Maybe
+}
+
export interface BucketAddInput {
readonly name: Scalars['String']
readonly title: Scalars['String']
@@ -864,6 +880,8 @@ export interface Query {
readonly searchMoreObjects: ObjectsSearchMoreResult
readonly searchMorePackages: PackagesSearchMoreResult
readonly subscription: SubscriptionState
+ readonly bucketAccessCounts: Maybe
+ readonly objectAccessCounts: Maybe
readonly admin: AdminQueries
readonly policies: ReadonlyArray
readonly policy: Maybe
@@ -910,6 +928,17 @@ export interface QuerysearchMorePackagesArgs {
size?: Maybe
}
+export interface QuerybucketAccessCountsArgs {
+ bucket: Scalars['String']
+ window: Scalars['Int']
+}
+
+export interface QueryobjectAccessCountsArgs {
+ bucket: Scalars['String']
+ key: Scalars['String']
+ window: Scalars['Int']
+}
+
export interface QuerypolicyArgs {
id: Scalars['ID']
}
diff --git a/catalog/app/utils/AWS/Bedrock/History.spec.ts b/catalog/app/utils/AWS/Bedrock/History.spec.ts
index 6ff1fb4773a..d635295b0b9 100644
--- a/catalog/app/utils/AWS/Bedrock/History.spec.ts
+++ b/catalog/app/utils/AWS/Bedrock/History.spec.ts
@@ -21,7 +21,7 @@ describe('utils/AWS/Bedrock/History', () => {
})
describe('foldMessages', () => {
- it('Fold same-role messages', async () => {
+ it('Fold same-role messages', () => {
const userFoo = Message.createMessage('foo')
const userBar = Message.createMessage('bar')
const assistantFoo = Message.createMessage('foo', 'assistant')
@@ -32,7 +32,7 @@ describe('utils/AWS/Bedrock/History', () => {
expect(list[1].content).toBe('foo\nbaz')
})
- it('Fold system and user messages', async () => {
+ it('Fold system and user messages', () => {
const userFoo = Message.createMessage('foo')
const userBar = Message.createMessage('bar')
const systemFoo = Message.createMessage('foo', 'system')
diff --git a/catalog/app/utils/AWS/S3.js b/catalog/app/utils/AWS/S3.js
index a51c29b5566..6b052a52927 100644
--- a/catalog/app/utils/AWS/S3.js
+++ b/catalog/app/utils/AWS/S3.js
@@ -43,44 +43,28 @@ function useSmartS3() {
return useConstant(() => {
class SmartS3 extends S3 {
- getReqType(req) {
+ shouldSign(req) {
const bucket = req.params.Bucket
if (cfg.mode === 'LOCAL') {
- return 'signed'
+ return true
}
- if (isAuthenticated()) {
- if (
- // sign if operation is not bucket-specific
- // (not sure if there are any such operations that can be used from the browser)
- !bucket ||
- cfg.analyticsBucket === bucket ||
+ if (
+ isAuthenticated() &&
+ // sign if operation is not bucket-specific
+ // (not sure if there are any such operations that can be used from the browser)
+ (!bucket ||
cfg.serviceBucket === bucket ||
statusReportsBucket === bucket ||
- (cfg.mode !== 'OPEN' && isInStack(bucket))
- ) {
- return 'signed'
- }
- } else if (req.operation === 'selectObjectContent') {
- return 'select'
+ (cfg.mode !== 'OPEN' && isInStack(bucket)))
+ ) {
+ return true
}
- return 'unsigned'
- }
-
- populateURI(req) {
- if (req.service.getReqType(req) === 'select') {
- return
- }
- super.populateURI(req)
+ return false
}
customRequestHandler(req) {
- const b = req.params.Bucket
- const type = this.getReqType(req)
-
- if (b) {
- const endpoint = new AWS.Endpoint(
- type === 'select' ? `${cfg.apiGatewayEndpoint}/s3select/` : cfg.s3Proxy,
- )
+ if (req.params.Bucket) {
+ const endpoint = new AWS.Endpoint(cfg.s3Proxy)
req.on('sign', () => {
if (req.httpRequest[PRESIGN]) return
@@ -96,10 +80,7 @@ function useSmartS3() {
const basePath = endpoint.path.replace(/\/$/, '')
req.httpRequest.endpoint = endpoint
- req.httpRequest.path =
- type === 'select'
- ? `${basePath}${origPath}`
- : `${basePath}/${origEndpoint.host}${origPath}`
+ req.httpRequest.path = `${basePath}/${origEndpoint.host}${origPath}`
})
req.on(
'retry',
@@ -138,9 +119,8 @@ function useSmartS3() {
if (forceProxy) {
req.httpRequest[FORCE_PROXY] = true
}
- const type = this.getReqType(req)
- if (type !== 'signed') {
+ if (!this.shouldSign(req)) {
req.toUnauthenticated()
}
diff --git a/catalog/app/utils/AWS/Signer.js b/catalog/app/utils/AWS/Signer.js
index 0c0c24b1ac8..404fe0f4d73 100644
--- a/catalog/app/utils/AWS/Signer.js
+++ b/catalog/app/utils/AWS/Signer.js
@@ -25,7 +25,7 @@ export function useS3Signer({ urlExpiration: exp, forceProxy = false } = {}) {
const statusReportsBucket = useStatusReportsBucket()
const s3 = S3.use()
const inStackOrSpecial = React.useCallback(
- (b) => isInStack(b) || cfg.analyticsBucket === b || statusReportsBucket === b,
+ (b) => isInStack(b) || statusReportsBucket === b,
[isInStack, statusReportsBucket],
)
return React.useCallback(
diff --git a/catalog/app/utils/GraphQL/Provider.tsx b/catalog/app/utils/GraphQL/Provider.tsx
index 592b71e58e6..05c34cd7238 100644
--- a/catalog/app/utils/GraphQL/Provider.tsx
+++ b/catalog/app/utils/GraphQL/Provider.tsx
@@ -90,6 +90,8 @@ export default function GraphQLProvider({ children }: React.PropsWithChildren<{}
keys: {
AccessCountForDate: () => null,
AccessCounts: () => null,
+ AccessCountsGroup: () => null,
+ BucketAccessCounts: () => null,
BucketConfig: (b) => b.name as string,
Canary: (c) => c.name as string,
Collaborator: (c) => c.username as string,
diff --git a/catalog/app/utils/Sentry.ts b/catalog/app/utils/Sentry.ts
index 02b99626958..6ca5da74976 100644
--- a/catalog/app/utils/Sentry.ts
+++ b/catalog/app/utils/Sentry.ts
@@ -60,13 +60,13 @@ export const UserTracker = function SentryUserTracker({
return children
}
-/** @deprecated */
+/** @deprecated use '@sentry/react' */
async function callSentry(method: string, ...args: $TSFixMe[]) {
return (Sentry as $TSFixMe)[method](...args)
}
-/** @deprecated */
+/** @deprecated use '@sentry/react' */
export const useSentry = () => callSentry
-/** @deprecated */
+/** @deprecated use '@sentry/react' */
export const use = useSentry
diff --git a/docs/Catalog/SearchQuery.md b/docs/Catalog/SearchQuery.md
index 4b51655b4a4..1f4aaa7951e 100644
--- a/docs/Catalog/SearchQuery.md
+++ b/docs/Catalog/SearchQuery.md
@@ -128,10 +128,13 @@ run them. You must first set up you an Athena workgroup and Saved queries per
[AWS's Athena documentation](https://docs.aws.amazon.com/athena/latest/ug/getting-started.html).
### Configuration
-You can hide the "Queries" tab by setting `ui > nav > queries: false` ([learn more](./Preferences.md)).
+You can hide the "Queries" tab by setting `ui > nav > queries: false`.
+It is also possible to set the default workgroup in `ui > athena > defaultWorkgroup: 'your-default-workgroup'`.
+[Learn more](./Preferences.md).
+
+The tab will remember the last workgroup, catalog name and database that was selected.
### Basics
"Run query" executes the selected query and waits for the result.
-![](../imgs/athena-ui.png)
-![](../imgs/athena-history.png)
+![Athena page](../imgs/athena-ui.png)
diff --git a/docs/README.md b/docs/README.md
index eb5afe0ba28..0582c038cb3 100644
--- a/docs/README.md
+++ b/docs/README.md
@@ -7,6 +7,30 @@ data integrity at scale.
---
+## How to Get Started
+
+Quilt consists of three main elements:
+
+- [Quilt Platform](#quilt-platform-overview) which is a cloud platform for
+ interacting with, visualizing, searching and querying Quilt Packages, which is
+ hosted in an organization's AWS Account.
+- [Quilt Python SDK](#quilt-python-sdk) which provides the ability to create,
+ push, install and delete Quilt Packages.
+- [Quilt Ecosystem](#quilt-ecosystem-and-integrations) which provide extension
+ of the core Quilt Capabilities to enable typical elements of life sciences
+ workflows, such as incorporating orchestration data, and connecting packages
+ to Electronic Lab Notebooks.
+
+To dive deeper into the capabilities of Quilt, start with our [Quick Start
+Guide](Quickstart.md) or explore the [Installation
+Instructions](Installation.md) for setting up your environment.
+
+If you have any questions or need help, join our [Slack
+community](https://slack.quiltdata.com/) or submit a support request to
+.
+
+---
+
## Navigating the Documentation
The Quilt documentation is structured to guide users through different layers of
@@ -24,8 +48,7 @@ capabilities like embeddable previews and metadata collection.
**Core Sections:**
- [Architecture](Architecture.md) - Learn how Quilt is architected.
-- [Mental Model](MentalModel.md) - Understand the guiding principles behind
- Quilt.
+- [Mental Model](MentalModel.md) - Understand the guiding principles behind Quilt.
- [Metadata Management](Catalog/Metadata.md) - Manage metadata at scale.
For users of the Quilt Platform (often referred to as the Catalog):
@@ -40,11 +63,9 @@ For users of the Quilt Platform (often referred to as the Catalog):
For administrators managing Quilt deployments:
-- [Admin Settings UI](Catalog/Admin.md) - Control platform settings and user
- access.
+- [Admin Settings UI](Catalog/Admin.md) - Control platform settings and user access.
- [Catalog Configuration](Catalog/Preferences.md) - Set platform preferences.
-- [Cross-Account Access](CrossAccount.md) - Manage multi-account access to S3
- data.
+- [Cross-Account Access](CrossAccount.md) - Manage multi-account access to S3 data.
### Quilt Python SDK
@@ -58,8 +79,7 @@ flexibility needed for deeper integrations.
managing data packages.
- [Editing and Uploading Packages](walkthrough/editing-a-package.md) - Learn how
to version, edit, and share data.
-- [API Reference](api-reference/api.md) - Detailed API documentation for
- developers.
+- [API Reference](api-reference/api.md) - Detailed API documentation for developers.
### Quilt Ecosystem and Integrations
@@ -67,9 +87,8 @@ The **Quilt Ecosystem** extends the platform with integrations and plugins to
fit your workflow. Whether you're managing scientific data or automating
packaging tasks, Quilt can be tailored to your needs with these tools:
-- [Benchling
- Packager](https://open.quiltdata.com/b/quilt-example/packages/examples/benchling-packager)
- - Package biological data from Benchling.
+- [Benchling Packager](examples/benchling.md) - Package electronic lab notebooks
+ from Benchling.
- [Nextflow Plugin](examples/nextflow.md) - Integrate with Nextflow pipelines
for bioinformatics.
@@ -89,18 +108,7 @@ administrator, Quilt helps streamline your data management workflows.
better insights.
- **Discover**: Use metadata and search tools to explore data relationships
across projects.
-- **Model**: Version and manage large data sets that don't fit traditional git
- repositories.
+- **Model**: Version and manage large data sets that don't fit traditional git repositories.
- **Decide**: Empower your team with auditable data for better decision-making.
---
-
-## How to Get Started
-
-To dive deeper into the capabilities of Quilt, start with our [Quick Start
-Guide](Quickstart.md) or explore the [Installation
-Instructions](Installation.md) for setting up your environment.
-
-If you have any questions or need help, join our [Slack
-community](https://slack.quiltdata.com/) or visit our full [documentation
-site](https://docs.quiltdata.com/).
diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md
index 9581a9033be..15295bfce40 100644
--- a/docs/SUMMARY.md
+++ b/docs/SUMMARY.md
@@ -39,13 +39,7 @@
* [GxP for Security & Compliance](advanced-features/good-practice.md)
* [Organizing S3 Buckets](advanced-features/s3-bucket-organization.md)
-## Quilt Ecosystem Integrations
-
-* [Benchling Packager](https://open.quiltdata.com/b/quilt-example/packages/examples/benchling-packager)
-* [Event-Driven Packaging](advanced-features/event-driven-packaging.md)
-* [Nextflow Plugin](examples/nextflow.md)
-
-## Quilt Python SDK Developers
+## Quilt Python SDK
* [Installation](Installation.md)
* [Quick Start](Quickstart.md)
@@ -74,3 +68,9 @@
* [Contributing](CONTRIBUTING.md)
* [Frequently Asked Questions](FAQ.md)
* [Troubleshooting](Troubleshooting.md)
+
+## Quilt Ecosystem Integrations
+
+* [Benchling Packager](examples/benchling.md)
+* [Event-Driven Packaging](advanced-features/event-driven-packaging.md)
+* [Nextflow Plugin](examples/nextflow.md)
diff --git a/docs/advanced-features/athena.md b/docs/advanced-features/athena.md
index 993e8448a58..cb61b2d9312 100644
--- a/docs/advanced-features/athena.md
+++ b/docs/advanced-features/athena.md
@@ -1,4 +1,5 @@
+
# Querying package metadata with Athena
Quilt stores package data and metadata in S3. Metadata lives in a per-package manifest file
in a each bucket's `.quilt/` directory.
@@ -9,9 +10,11 @@ using predicates based on package or object-level metadata.
Packages can be created from the resulting tabular data.
To be able to create a package,
-the table must contain the columns `logical_key`, `physical_keys` and `size` as shown below.
+the table must contain the columns `logical_key`, `physical_keys` (or `physical_key`) and `size`.
(See also [Mental Model](https://docs.quiltdata.com/mentalmodel))
+![Athena page with results ready to be packaged](../imgs/athena-package.png)
+
## Defining package tables and views in Athena
> This step is not required for users of Quilt enterprise, since tables and views
diff --git a/docs/examples/benchling.md b/docs/examples/benchling.md
new file mode 100644
index 00000000000..68a0a5a56bb
--- /dev/null
+++ b/docs/examples/benchling.md
@@ -0,0 +1,32 @@
+
+The Benchling Packager is a lambda you can deploy in your own AWS private cloud
+to process [Benchling](https://benchling.com/) events in order to create (and
+link back, if possible) a dedicated [Quilt](https://quiltdata.com/) package for
+every Benchling notebook.
+
+The CloudFormation template is available as a package on
+[open.quiltdata.com](https://open.quiltdata.com/b/quilt-example/packages/examples/benchling-packager).
+
+## Prerequisites
+
+In order to install the benchling packager, you will need to know, and have
+administrative access to:
+
+- Your Benchling tenant domain (e.g., `` from
+ `.benchling.com`), for ßconfiguring event subscriptions and
+ metadata schemas.
+- The AWS Account ID (e.g. 12345689123) and AWS Region (e.g., us-west-2) used by
+ your Quilt stack, for configuring the CloudFormation stack and lambdas.
+
+## Installation
+
+Go to the [Benchling Packager
+package](https://open.quiltdata.com/b/quilt-example/packages/examples/benchling-packager)
+on open.quiltdata.com and follow the instructions in the README.
+
+## References
+
+- [AWS CloudFormation templates](https://aws.amazon.com/cloudformation/resources/templates/)
+- [AWS Lambda functions](https://aws.amazon.com/lambda/)
+- [Benchling EventBridge events](https://docs.benchling.com/docs/events-getting-started#event-types)
+- [Benchling Schemas](https://help.benchling.com/hc/en-us/articles/9684227216781)
diff --git a/docs/imgs/athena-history.png b/docs/imgs/athena-history.png
deleted file mode 100644
index 7ef0916506e..00000000000
Binary files a/docs/imgs/athena-history.png and /dev/null differ
diff --git a/docs/imgs/athena-package.png b/docs/imgs/athena-package.png
new file mode 100644
index 00000000000..bdae54c2776
Binary files /dev/null and b/docs/imgs/athena-package.png differ
diff --git a/docs/imgs/athena-ui.png b/docs/imgs/athena-ui.png
index 8c417e376cd..3f185385b8d 100644
Binary files a/docs/imgs/athena-ui.png and b/docs/imgs/athena-ui.png differ
diff --git a/lambdas/indexer/CHANGELOG.md b/lambdas/indexer/CHANGELOG.md
new file mode 100644
index 00000000000..c7ea99597d5
--- /dev/null
+++ b/lambdas/indexer/CHANGELOG.md
@@ -0,0 +1,21 @@
+
+# Changelog
+
+Changes are listed in reverse chronological order (newer entries at the top).
+The entry format is
+
+```markdown
+- [Verb] Change description ([#](https://github.com/quiltdata/quilt/pull/))
+```
+
+where verb is one of
+
+- Removed
+- Added
+- Fixed
+- Changed
+
+## Changes
+
+- [Changed] Stop using S3 select ([#4212](https://github.com/quiltdata/quilt/pull/4212))
+- [Added] Bootstrap the change log ([#4212](https://github.com/quiltdata/quilt/pull/4212))
diff --git a/lambdas/indexer/index.py b/lambdas/indexer/index.py
index 80b6861a11f..bb6a9422229 100644
--- a/lambdas/indexer/index.py
+++ b/lambdas/indexer/index.py
@@ -47,6 +47,7 @@
import datetime
+import functools
import json
import os
import pathlib
@@ -92,7 +93,6 @@
POINTER_PREFIX_V1,
get_available_memory,
get_quilt_logger,
- query_manifest_content,
separated_env_to_iter,
)
@@ -168,12 +168,7 @@
# currently only affects .parquet, TODO: extend to other extensions
assert 'SKIP_ROWS_EXTS' in os.environ
SKIP_ROWS_EXTS = separated_env_to_iter('SKIP_ROWS_EXTS')
-SELECT_PACKAGE_META = "SELECT * from S3Object o WHERE o.version IS NOT MISSING LIMIT 1"
-# No WHERE clause needed for aggregations since S3 Select skips missing fields for aggs
-SELECT_PACKAGE_STATS = (
- "SELECT COALESCE(SUM(obj['size']), 0) as total_bytes,"
- " COUNT(obj['size']) as total_files from S3Object obj"
-)
+DUCKDB_SELECT_LAMBDA_ARN = os.environ["DUCKDB_SELECT_LAMBDA_ARN"]
TEST_EVENT = "s3:TestEvent"
# we need to filter out GetObject and HeadObject calls generated by the present
# lambda in order to display accurate analytics in the Quilt catalog
@@ -182,6 +177,7 @@
logger = get_quilt_logger()
+s3_client = boto3.client("s3", config=botocore.config.Config(user_agent_extra=USER_AGENT_EXTRA))
def now_like_boto3():
@@ -247,13 +243,10 @@ def select_manifest_meta(s3_client, bucket: str, key: str):
wrapper for retry and returning a string
"""
try:
- raw = query_manifest_content(
- s3_client,
- bucket=bucket,
- key=key,
- sql_stmt=SELECT_PACKAGE_META
- )
- return json.load(raw)
+ body = s3_client.get_object(Bucket=bucket, Key=key)["Body"]
+ with body: # this *might* be needed to close the stream ASAP
+ for line in body.iter_lines():
+ return json.loads(line)
except (botocore.exceptions.ClientError, json.JSONDecodeError) as cle:
print(f"Unable to S3 select manifest: {cle}")
@@ -439,7 +432,7 @@ def get_pkg_data():
first = select_manifest_meta(s3_client, bucket, manifest_key)
if not first:
return
- stats = select_package_stats(s3_client, bucket, manifest_key)
+ stats = select_package_stats(bucket, manifest_key)
if not stats:
return
@@ -472,33 +465,54 @@ def get_pkg_data():
return True
-def select_package_stats(s3_client, bucket, manifest_key) -> str:
+@functools.lru_cache(maxsize=None)
+def get_bucket_region(bucket: str) -> str:
+ resp = s3_client.head_bucket(Bucket=bucket)
+ return resp["ResponseMetadata"]["HTTPHeaders"]["x-amz-bucket-region"]
+
+
+@functools.lru_cache(maxsize=None)
+def get_presigner_client(bucket: str):
+ return boto3.client(
+ "s3",
+ region_name=get_bucket_region(bucket),
+ config=botocore.config.Config(signature_version="s3v4"),
+ )
+
+
+def select_package_stats(bucket, manifest_key) -> Optional[dict]:
"""use s3 select to generate file stats for package"""
logger_ = get_quilt_logger()
- try:
- raw_stats = query_manifest_content(
- s3_client,
- bucket=bucket,
- key=manifest_key,
- sql_stmt=SELECT_PACKAGE_STATS
- ).read()
-
- if raw_stats:
- stats = json.loads(raw_stats)
- assert isinstance(stats['total_bytes'], int)
- assert isinstance(stats['total_files'], int)
-
- return stats
-
- except (
- AssertionError,
- botocore.exceptions.ClientError,
- json.JSONDecodeError,
- KeyError,
- ) as err:
- logger_.exception("Unable to compute package stats via S3 select")
+ presigner_client = get_presigner_client(bucket)
+ url = presigner_client.generate_presigned_url(
+ ClientMethod="get_object",
+ Params={
+ "Bucket": bucket,
+ "Key": manifest_key,
+ },
+ )
+ lambda_ = make_lambda_client()
+ q = f"""
+ SELECT
+ COALESCE(SUM(size), 0) AS total_bytes,
+ COUNT(size) AS total_files FROM read_ndjson('{url}', columns={{size: 'UBIGINT'}}) obj
+ """
+ resp = lambda_.invoke(
+ FunctionName=DUCKDB_SELECT_LAMBDA_ARN,
+ Payload=json.dumps({"query": q, "user_agent": f"DuckDB Select {USER_AGENT_EXTRA}"}),
+ )
- return None
+ payload = resp["Payload"].read()
+ if "FunctionError" in resp:
+ logger_.error("DuckDB select unhandled error: %s", payload)
+ return None
+ parsed = json.loads(payload)
+ if "error" in parsed:
+ logger_.error("DuckDB select error: %s", parsed["error"])
+ return None
+
+ rows = parsed["rows"]
+ return rows[0] if rows else None
def extract_pptx(fileobj, max_size: int) -> str:
@@ -732,6 +746,11 @@ def make_s3_client():
return boto3.client("s3", config=configuration)
+@functools.lru_cache(maxsize=None)
+def make_lambda_client():
+ return boto3.client("lambda")
+
+
def map_event_name(event: dict):
"""transform eventbridge names into S3-like ones"""
input_ = event["eventName"]
diff --git a/lambdas/indexer/pytest.ini b/lambdas/indexer/pytest.ini
index dd07825516f..f9355a4fbaf 100644
--- a/lambdas/indexer/pytest.ini
+++ b/lambdas/indexer/pytest.ini
@@ -1,4 +1,6 @@
[pytest]
+env =
+ DUCKDB_SELECT_LAMBDA_ARN = "arn:aws:lambda:us-west-2:123456789012:function:select-lambda"
log_cli = True
# This is set above critical to prevent logger events from confusing output in CI
-log_level = 51
+log_level = 51
diff --git a/lambdas/indexer/test-requirements.txt b/lambdas/indexer/test-requirements.txt
index e75e43e319b..b8fc13134ea 100644
--- a/lambdas/indexer/test-requirements.txt
+++ b/lambdas/indexer/test-requirements.txt
@@ -5,4 +5,5 @@ pluggy==0.9
py==1.10.0
pytest==4.4.0
pytest-cov==2.6.1
+pytest-env==0.6.2
responses==0.10.14
diff --git a/lambdas/indexer/test/test_index.py b/lambdas/indexer/test/test_index.py
index c53e3bfa8de..05cc0c85a1f 100644
--- a/lambdas/indexer/test/test_index.py
+++ b/lambdas/indexer/test/test_index.py
@@ -23,7 +23,6 @@
import responses
from botocore import UNSIGNED
from botocore.client import Config
-from botocore.exceptions import ParamValidationError
from botocore.stub import Stubber
from dateutil.tz import tzutc
from document_queue import EVENT_PREFIX, RetryError
@@ -979,7 +978,7 @@ def test_index_if_package_select_stats_fail(self, append_mock, select_meta_mock,
)
select_meta_mock.assert_called_once_with(self.s3_client, bucket, manifest_key)
- select_stats_mock.assert_called_once_with(self.s3_client, bucket, manifest_key)
+ select_stats_mock.assert_called_once_with(bucket, manifest_key)
append_mock.assert_called_once_with({
"_index": bucket + PACKAGE_INDEX_SUFFIX,
"_id": key,
@@ -1023,7 +1022,7 @@ def test_index_if_package(self, append_mock, select_meta_mock, select_stats_mock
)
select_meta_mock.assert_called_once_with(self.s3_client, bucket, manifest_key)
- select_stats_mock.assert_called_once_with(self.s3_client, bucket, manifest_key)
+ select_stats_mock.assert_called_once_with(bucket, manifest_key)
append_mock.assert_called_once_with({
"_index": bucket + PACKAGE_INDEX_SUFFIX,
"_id": key,
@@ -1182,51 +1181,6 @@ def test_extension_overrides(self):
assert self._get_contents('foo.txt', '.txt') == ""
assert self._get_contents('foo.ipynb', '.ipynb') == ""
- @pytest.mark.xfail(
- raises=ParamValidationError,
- reason="boto bug https://github.com/boto/botocore/issues/1621",
- strict=True,
- )
- def test_stub_select_object_content(self):
- """Demonstrate that mocking S3 select with boto3 is broken"""
- sha_hash = "50f4d0fc2c22a70893a7f356a4929046ce529b53c1ef87e28378d92b884691a5"
- manifest_key = f"{MANIFEST_PREFIX_V1}{sha_hash}"
- # this SHOULD work, but due to botocore bugs it does not
- self.s3_stubber.add_response(
- method="select_object_content",
- service_response={
- "ResponseMetadata": ANY,
- # it is sadly not possible to mock S3 select responses because
- # boto incorrectly believes "Payload"'s value should be a dict
- # but it's really an iterable in realworld code
- # see https://github.com/boto/botocore/issues/1621
- "Payload": [
- {
- "Stats": {}
- },
- {
- "Records": {
- "Payload": json.dumps(MANIFEST_DATA).encode(),
- },
- },
- {
- "End": {}
- },
- ]
- },
- expected_params={
- "Bucket": "test-bucket",
- "Key": manifest_key,
- "Expression": index.SELECT_PACKAGE_META,
- "ExpressionType": "SQL",
- "InputSerialization": {
- 'JSON': {'Type': 'LINES'},
- 'CompressionType': 'NONE'
- },
- "OutputSerialization": {'JSON': {'RecordDelimiter': '\n'}}
- }
- )
-
def test_synthetic_copy_event(self):
"""check synthetic ObjectCreated:Copy event vs organic obtained on 26-May-2020
(bucket versioning on)
diff --git a/lambdas/tabular_preview/requirements.txt b/lambdas/tabular_preview/requirements.txt
index c787be65d4d..b10ea779083 100644
--- a/lambdas/tabular_preview/requirements.txt
+++ b/lambdas/tabular_preview/requirements.txt
@@ -4,7 +4,7 @@
#
# pip-compile --output-file=requirements.txt ../shared/setup.py setup.py
#
-aiohttp==3.10.2
+aiohttp==3.10.11
# via fsspec
aiosignal==1.2.0
# via aiohttp
diff --git a/s3-proxy/Dockerfile b/s3-proxy/Dockerfile
index adf17c7a6a8..72fe0690fb6 100644
--- a/s3-proxy/Dockerfile
+++ b/s3-proxy/Dockerfile
@@ -1,4 +1,4 @@
-FROM amazonlinux:2023.6.20241031.0
+FROM amazonlinux:2023.6.20241111.0
MAINTAINER Quilt Data, Inc. contact@quiltdata.io
# Based on:
diff --git a/shared/graphql/schema.graphql b/shared/graphql/schema.graphql
index 0bb997e7809..ea342cd5806 100644
--- a/shared/graphql/schema.graphql
+++ b/shared/graphql/schema.graphql
@@ -222,6 +222,16 @@ type AccessCounts {
counts: [AccessCountForDate!]!
}
+type AccessCountsGroup {
+ ext: String!
+ counts: AccessCounts!
+}
+
+type BucketAccessCounts {
+ byExt(groups: Int): [AccessCountsGroup!]!
+ combined: AccessCounts!
+}
+
type PackageDir {
path: String!
metadata: JsonRecord
@@ -556,6 +566,9 @@ type Query {
searchMorePackages(after: String!, size: Int = 30): PackagesSearchMoreResult!
subscription: SubscriptionState!
+ bucketAccessCounts(bucket: String!, window: Int!): BucketAccessCounts
+ objectAccessCounts(bucket: String!, key: String!, window: Int!): AccessCounts
+
admin: AdminQueries! @admin
policies: [Policy!]! @admin