Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Generating import file for postman #770

Draft
wants to merge 11 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .cspell
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ aoss
APIV
cbor
evals
formdata
lucene
millis
mxyz
Expand Down
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
- Added `GET /_plugins/_ml/connectors/{connector_id}`, `_search`, `POST /_plugins/_ml/connectors/_search`, and `PUT /_plugins/_ml/connectors/{connector_id}` ([#764](https://github.com/opensearch-project/opensearch-api-specification/pull/764))
- Added the ability to skip an individual chapter test ([#765](https://github.com/opensearch-project/opensearch-api-specification/pull/765))
- Added uploading of test spec logs ([#767](https://github.com/opensearch-project/opensearch-api-specification/pull/767))
- Added generation of file for Postman ([#770](https://github.com/opensearch-project/opensearch-api-specification/pull/770))

### Removed
- Removed unsupported `_common.mapping:SourceField`'s `mode` field and associated `_common.mapping:SourceFieldMode` enum ([#652](https://github.com/opensearch-project/opensearch-api-specification/pull/652))
Expand Down
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
"coverage:spec": "ts-node tools/src/coverage/coverage.ts",
"dump-cluster-spec": "ts-node tools/src/dump-cluster-spec/dump-cluster-spec.ts",
"generate-types": "ts-node tools/src/tester/_generate_story_types.ts",
"export:postman": "ts-node tools/src/exporter/export.ts",
"lint:spec": "ts-node tools/src/linter/lint.ts",
"lint": "eslint . --report-unused-disable-directives",
"lint--fix": "eslint . --fix --report-unused-disable-directives",
Expand Down
7 changes: 7 additions & 0 deletions tools/src/OpenSearchHttpClient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -221,6 +221,13 @@ export class OpenSearchHttpClient {
}
}

get_url(): string | undefined {
if (this._opts != null && this._opts.url != null && this._opts.url !== '') {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think lodash isEmpty can this this cleaner, maybe just _.isEmpty(this._opts?.url) ? DEFAULT_URL : this._opts?.url.

return this._opts.url;
}
return DEFAULT_URL;
}

async request<T = any, R = AxiosResponse<T>, D = any>(config: AxiosRequestConfig<D>): Promise<R> {
return await this._axios.request(config)
}
Expand Down
132 changes: 132 additions & 0 deletions tools/src/exporter/ExportChapters.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,132 @@
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

import fs from 'fs'
import { read_yaml, to_ndjson } from '../helpers'
import { basename, resolve } from 'path'
import _ from 'lodash'
import { StoryEvaluations, StoryFile } from 'tester/types/eval.types'
import { Logger } from 'Logger'
import StoryParser from './StoryParser'
import { PostmanManager } from './PostmanManager'
import { APPLICATION_JSON } from './MimeTypes'
import { Parameter } from 'tester/types/story.types'

export default class ExportChapters {
private readonly _story_files: Record<string, StoryFile[]> = {}
private readonly _logger: Logger
private readonly _postman_manager: PostmanManager

constructor (logger: Logger, postman_manager: PostmanManager) {
this._logger = logger
this._postman_manager = postman_manager
}

async run (story_path: string): Promise<{ results: StoryEvaluations, failed: boolean }> {
let failed = false
const story_files = this.story_files(story_path)
const results: StoryEvaluations = { evaluations: [] }

for (const story_file of story_files) {
for(const chapter of story_file.story.chapters) {
const [headers, content_type] = this.#serialize_headers(chapter.request?.headers, chapter.request?.content_type)
let params = {}, url_path = {};
if(chapter.parameters !== undefined) {
[url_path, params] = this.#parse_url(chapter.path, chapter.parameters)
}
const request_data = chapter.request?.payload !== undefined ? this.#serialize_payload(
chapter.request.payload,
content_type
) : {}
this._postman_manager.add_to_collection('url', chapter.method, chapter.path, headers, params, request_data, content_type, story_file.full_path);
}
this._logger.info(`Evaluating ${story_file.display_path} ...`)
}
this._postman_manager.save_collection()

return { results, failed }
}

story_files(story_path: string): StoryFile[] {
if (this._story_files[story_path] !== undefined) return this._story_files[story_path]
this._story_files[story_path] = this.#sort_story_files(this.#collect_story_files(resolve(story_path), '', ''))
return this._story_files[story_path]
}

#collect_story_files (folder: string, file: string, prefix: string): StoryFile[] {
const path = file === '' ? folder : `${folder}/${file}`
const next_prefix = prefix === '' ? file : `${prefix}/${file}`
if (file.startsWith('.') || file == 'docker-compose.yml' || file == 'Dockerfile' || file.endsWith('.py')) {
return []
} else if (fs.statSync(path).isFile()) {
const story = StoryParser.parse(read_yaml(path))
return [{
display_path: next_prefix === '' ? basename(path) : next_prefix,
full_path: path,
story
}]
} else {
return _.compact(fs.readdirSync(path).flatMap(next_file => {
return this.#collect_story_files(path, next_file, next_prefix)
}))
}
}

#sort_story_files (story_files: StoryFile[]): StoryFile[] {
return story_files.sort(({ display_path: a }, { display_path: b }) => {
const a_depth = a.split('/').length
const b_depth = b.split('/').length
if (a_depth !== b_depth) return a_depth - b_depth
return a.localeCompare(b)
})
}

#serialize_headers(headers?: Record<string, any>, content_type?: string): [Record<string, any> | undefined, string] {
headers = _.cloneDeep(headers)
content_type = content_type ?? APPLICATION_JSON
if (!headers) return [headers, content_type]
_.forEach(headers, (v, k) => {
if (k.toLowerCase() == 'content-type') {
content_type = v.toString()
if (headers) delete headers[k]
}
})
return [headers, content_type]
}

#serialize_payload(payload: any, content_type: string): any {
if (payload === undefined) return undefined
switch (content_type) {
case 'application/x-ndjson': return to_ndjson(payload as any[])
default: return payload
}
}

resolve_params (parameters: Record<string, Parameter>): Record<string, Parameter> {
const resolved_params: Record<string, Parameter> = {}
for (const [param_name, param_value] of Object.entries(parameters ?? {})) {
if (typeof param_value === 'string') {
resolved_params[param_name] = param_value
} else {
resolved_params[param_name] = param_value
}
}
return resolved_params
}

#parse_url (path: string, parameters: Record<string, Parameter>): [string, Record<string, Parameter>] {
const path_params = new Set<string>()
const parsed_path = path.replace(/{(\w+)}/g, (_, key) => {
path_params.add(key as string)
return parameters[key] as string
})
const query_params = Object.fromEntries(Object.entries(parameters).filter(([key]) => !path_params.has(key)))
return [parsed_path, query_params]
}
}
14 changes: 14 additions & 0 deletions tools/src/exporter/MimeTypes.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

export const APPLICATION_CBOR = 'application/cbor'
export const APPLICATION_JSON = 'application/json'
export const APPLICATION_SMILE = 'application/smile'
export const APPLICATION_YAML = 'application/yaml'
export const TEXT_PLAIN = 'text/plain'
86 changes: 86 additions & 0 deletions tools/src/exporter/PostmanManager.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

import fs from 'fs';

export class PostmanManager {
private readonly collection: any;
private readonly collection_path: string;

constructor(collection_path: string = './postman_collection.json') {
this.collection_path = collection_path;
this.collection = {
info: {
name: "OpenSearch tests",
schema: "https://schema.getpostman.com/json/collection/v2.1.0/collection.json",
},
item: [],
};
}

add_to_collection(
url: string | undefined,
method: string,
path: string,
headers: Record<string, any> | undefined,
params: Record<string, any>,
body: any,
content_type: string,
full_path?: string
): void {
const folders: string[] = [];

if (full_path) {
const pathParts = full_path.split('/').filter(Boolean);

const startIndex = pathParts.indexOf('tests');

if (startIndex !== -1) {
folders.push(...pathParts.slice(startIndex + 1));
}
}

let currentFolder = this.collection.item;

folders.forEach(folder => {
let existingFolder = currentFolder.find((item: any) => item.name === folder);

if (!existingFolder) {
existingFolder = { name: folder, item: [] };
currentFolder.push(existingFolder);
}

currentFolder = existingFolder.item;
});

const item = {
name: path,
request: {
method,
header: Object.entries(headers ?? {}).map(([key, value]) => ({ key, value })),
url: {
raw: `${url}${path}`,
host: url,
path: path.split('/').filter(Boolean),
query: Object.entries(params).map(([key, value]) => ({ key, value: String(value) })),
},
body: body ? { mode: content_type === 'application/json' ? 'raw' : 'formdata', raw: JSON.stringify(body) } : undefined,
},
};

const exists = currentFolder.some((existingItem: any) => existingItem.name === item.name);
if (!exists) {
currentFolder.push(item);
}
}

save_collection(): void {
fs.writeFileSync(this.collection_path, JSON.stringify(this.collection, null, 2));
}
}
41 changes: 41 additions & 0 deletions tools/src/exporter/StoryParser.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

import _ from "lodash";
import { ParsedChapter, ParsedStory } from "tester/types/parsed_story.types";
import { Chapter, Story } from "tester/types/story.types";

export default class StoryParser {
static parse(story: Story): ParsedStory {
return {
...story,
chapters: this.#expand_chapters(story.chapters),
}
}

static #chapter_methods(methods: string[] | string): string[] {
return [...(Array.isArray(methods) ? methods : [methods])]
}

static #expand_chapters(chapters?: Chapter[]): ParsedChapter[] {
if (chapters === undefined) return []
return _.flatMap(_.map(chapters, (chapter) => {
return _.map(this.#chapter_methods(chapter.method), (method) => {
let synopsis = chapter.synopsis && Array.isArray(chapter.method) ?
`${chapter.synopsis} [${method}]` :
chapter.synopsis
return {
...chapter,
synopsis,
method
}
})
})) as ParsedChapter[]
}
}
49 changes: 49 additions & 0 deletions tools/src/exporter/export.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

import { Logger, LogLevel } from '../Logger'
import { Command, Option } from '@commander-js/extra-typings'
import {
AWS_ACCESS_KEY_ID_OPTION,
AWS_REGION_OPTION,
AWS_SECRET_ACCESS_KEY_OPTION,
AWS_SERVICE_OPTION,
AWS_SESSION_TOKEN_OPTION,
get_opensearch_opts_from_cli,
OPENSEARCH_CERT_OPTION,
OPENSEARCH_INSECURE_OPTION,
OPENSEARCH_KEY_OPTION,
OPENSEARCH_PASSWORD_OPTION,
OPENSEARCH_URL_OPTION,
OPENSEARCH_USERNAME_OPTION,
OpenSearchHttpClient
} from '../OpenSearchHttpClient'
import * as process from 'node:process'
import ChapterReader from 'tester/ChapterReader'
import SupplementalChapterEvaluator from 'tester/SupplementalChapterEvaluator'
import StoryValidator from 'tester/StoryValidator'
import StoryEvaluator from 'tester/StoryEvaluator'
import { ConsoleResultLogger } from 'tester/ResultLogger'
import TestRunner from 'tester/TestRunner'
import ExportChapters from './ExportChapters'
import { PostmanManager } from './PostmanManager'

const command = new Command()
.description('Run test stories against the OpenSearch spec.')
.addOption(new Option('--tests, --tests-path <path>', 'path to the root folder of the tests').default('./tests/default'))
.allowExcessArguments(false)
.parse()


const opts = command.opts()
const logger = new Logger(LogLevel.warn)
const postman_manager = new PostmanManager()
const runner = new ExportChapters(logger, postman_manager)

runner.run(opts.testsPath)