diff --git a/CHANGELOG.md b/CHANGELOG.md index 6af42ab2b..1e4c64c16 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ## [Unreleased] ### Added - Added deprecation warnings for Index Template APIs ([654](https://github.com/opensearch-project/opensearch-js/pull/645)) +- Added `http` functions: `connect`, `delete`, `get`, `head`, `options`, `patch`, `post`, `put`, and `trace` ([#649](https://github.com/opensearch-project/opensearch-js/pull/649)) ### Dependencies - Bumps `@aws-sdk/types` from 3.418.0 to 3.451.0 - Bumps `@types/node` from 20.6.5 to 20.9.0 @@ -18,6 +19,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ### Changed ### Deprecated ### Removed +- Removed fixture lines that git-secrets wrongly flagged as passwords/secrets ([654](https://github.com/opensearch-project/opensearch-js/pull/645)) ### Fixed ### Security @@ -196,4 +198,4 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ### Fixed -- Fix mutability of connection headers ([#291](https://github.com/opensearch-project/opensearch-js/issues/291)) \ No newline at end of file +- Fix mutability of connection headers ([#291](https://github.com/opensearch-project/opensearch-js/issues/291)) diff --git a/api/api/http.js b/api/api/http.js new file mode 100644 index 000000000..4ed677761 --- /dev/null +++ b/api/api/http.js @@ -0,0 +1,213 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + */ + +'use strict'; + +/** @namespace API-HTTP */ + +const { normalizeArguments, kConfigurationError } = require('../utils'); + +function HttpApi(transport, ConfigurationError) { + this.transport = transport; + this[kConfigurationError] = ConfigurationError; +} + +HttpApi.prototype.request = function (method, params, options, callback) { + [params, options, callback] = normalizeArguments(params, options, callback); + if (Array.isArray(params.body)) { + const { path, querystring, headers, body } = params; + params = { path, querystring, headers, bulkBody: body }; + } + options = options || {}; + options.headers = params.headers || options.headers; + return this.transport.request({ ...params, method }, options, callback); +}; + +/** + * Make a customized CONNECT request. + * + * @memberOf API-HTTP + * + * @param {Object} params + * @param {Object} params.path - The URL of the request + * @param {Object} [params.querystring] - The querystring parameters + * @param {Object} [params.headers] - The request headers + * @param {Object} [params.body] - The request body + * + * @param {Object} [options] - Options for {@link Transport#request} + * @param {function} [callback] - Callback that handles errors and response + * + * @returns {{abort: function(), then: function(), catch: function()}|Promise|*} + */ +HttpApi.prototype.connect = function (params, options, callback) { + return this.request('CONNECT', params, options, callback); +}; + +/** + * Make a customized DELETE request. + * + * @memberOf API-HTTP + * + * @param {Object} params + * @param {Object} params.path - The URL of the request + * @param {Object} [params.querystring] - The querystring parameters + * @param {Object} [params.headers] - The request headers + * @param {Object} [params.body] - The request body + * + * @param {Object} [options] - Options for {@link Transport#request} + * @param {function} [callback] - Callback that handles errors and response + * + * @returns {{abort: function(), then: function(), catch: function()}|Promise|*} + */ +HttpApi.prototype.delete = function (params, options, callback) { + return this.request('DELETE', params, options, callback); +}; + +/** + * Make a customized GET request. + * + * @memberOf API-HTTP + * + * @param {Object} params + * @param {Object} params.path - The URL of the request + * @param {Object} [params.querystring] - The querystring parameters + * @param {Object} [params.headers] - The request headers + * @param {Object} [params.body] - The request body + * + * @param {Object} [options] - Options for {@link Transport#request} + * @param {function} [callback] - Callback that handles errors and response + * + * @returns {{abort: function(), then: function(), catch: function()}|Promise|*} + */ +HttpApi.prototype.get = function (params, options, callback) { + return this.request('GET', params, options, callback); +}; + +/** + * Make a customized HEAD request. + * + * @memberOf API-HTTP + * + * @param {Object} params + * @param {Object} params.path - The URL of the request + * @param {Object} [params.querystring] - The querystring parameters + * @param {Object} [params.headers] - The request headers + * @param {Object} [params.body] - The request body + * + * @param {Object} [options] - Options for {@link Transport#request} + * @param {function} [callback] - Callback that handles errors and response + * + * @returns {{abort: function(), then: function(), catch: function()}|Promise|*} + */ +HttpApi.prototype.head = function (params, options, callback) { + return this.request('HEAD', params, options, callback); +}; + +/** + * Make a customized OPTIONS request. + * + * @memberOf API-HTTP + * + * @param {Object} params + * @param {Object} params.path - The URL of the request + * @param {Object} [params.querystring] - The querystring parameters + * @param {Object} [params.headers] - The request headers + * @param {Object} [params.body] - The request body + * + * @param {Object} [options] - Options for {@link Transport#request} + * @param {function} [callback] - Callback that handles errors and response + * + * @returns {{abort: function(), then: function(), catch: function()}|Promise|*} + */ +HttpApi.prototype.options = function (params, options, callback) { + return this.request('OPTIONS', params, options, callback); +}; + +/** + * Make a customized PATCH request. + * + * @memberOf API-HTTP + * + * @param {Object} params + * @param {Object} params.path - The URL of the request + * @param {Object} [params.querystring] - The querystring parameters + * @param {Object} [params.headers] - The request headers + * @param {Object} [params.body] - The request body + * + * @param {Object} [options] - Options for {@link Transport#request} + * @param {function} [callback] - Callback that handles errors and response + * + * @returns {{abort: function(), then: function(), catch: function()}|Promise|*} + */ +HttpApi.prototype.patch = function (params, options, callback) { + return this.request('PATCH', params, options, callback); +}; + +/** + * Make a customized POST request. + * + * @memberOf API-HTTP + * + * @param {Object} params + * @param {Object} params.path - The URL of the request + * @param {Object} [params.querystring] - The querystring parameters + * @param {Object} [params.headers] - The request headers + * @param {Object} [params.body] - The request body + * + * @param {Object} [options] - Options for {@link Transport#request} + * @param {function} [callback] - Callback that handles errors and response + * + * @returns {{abort: function(), then: function(), catch: function()}|Promise|*} + */ +HttpApi.prototype.post = function (params, options, callback) { + return this.request('POST', params, options, callback); +}; + +/** + * Make a customized PUT request. + * + * @memberOf API-HTTP + * + * @param {Object} params + * @param {Object} params.path - The URL of the request + * @param {Object} [params.querystring] - The querystring parameters + * @param {Object} [params.headers] - The request headers + * @param {Object} [params.body] - The request body + * + * @param {Object} [options] - Options for {@link Transport#request} + * @param {function} [callback] - Callback that handles errors and response + * + * @returns {{abort: function(), then: function(), catch: function()}|Promise|*} + */ +HttpApi.prototype.put = function (params, options, callback) { + return this.request('PUT', params, options, callback); +}; + +/** + * Make a customized TRACE request. + * + * @memberOf API-HTTP + * + * @param {Object} params + * @param {Object} params.path - The URL of the request + * @param {Object} [params.querystring] - The querystring parameters + * @param {Object} [params.headers] - The request headers + * @param {Object} [params.body] - The request body + * + * @param {Object} [options] - Options for {@link Transport#request} + * @param {function} [callback] - Callback that handles errors and response + * + * @returns {{abort: function(), then: function(), catch: function()}|Promise|*} + */ +HttpApi.prototype.trace = function (params, options, callback) { + return this.request('TRACE', params, options, callback); +}; + +module.exports = HttpApi; diff --git a/api/index.js b/api/index.js index d98e3a280..63bed5eec 100644 --- a/api/index.js +++ b/api/index.js @@ -54,6 +54,7 @@ const getScriptApi = require('./api/get_script'); const getScriptContextApi = require('./api/get_script_context'); const getScriptLanguagesApi = require('./api/get_script_languages'); const getSourceApi = require('./api/get_source'); +const HttpApi = require('./api/http'); const indexApi = require('./api/index'); const IndicesApi = require('./api/indices'); const infoApi = require('./api/info'); @@ -89,6 +90,7 @@ const kCat = Symbol('Cat'); const kCluster = Symbol('Cluster'); const kDanglingIndices = Symbol('DanglingIndices'); const kFeatures = Symbol('Features'); +const kHttp = Symbol('Http'); const kIndices = Symbol('Indices'); const kIngest = Symbol('Ingest'); const kNodes = Symbol('Nodes'); @@ -103,6 +105,7 @@ function OpenSearchAPI(opts) { this[kCluster] = null; this[kDanglingIndices] = null; this[kFeatures] = null; + this[kHttp] = null; this[kIndices] = null; this[kIngest] = null; this[kNodes] = null; @@ -264,6 +267,14 @@ Object.defineProperties(OpenSearchAPI.prototype, { return this.getSource; }, }, + http: { + get() { + if (this[kHttp] === null) { + this[kHttp] = new HttpApi(this.transport, this[kConfigurationError]); + } + return this[kHttp]; + }, + }, indices: { get() { if (this[kIndices] === null) { diff --git a/api/requestParams.d.ts b/api/requestParams.d.ts index 3afc4fc57..7bc01bc07 100644 --- a/api/requestParams.d.ts +++ b/api/requestParams.d.ts @@ -750,6 +750,13 @@ export interface GetSource extends Generic { version_type?: 'internal' | 'external' | 'external_gte' | 'force'; } +export interface HttpParams extends Generic { + path: string; + querystring?: Record; + headers?: Record; + body?: Record | string | Array>; +} + export interface Index extends Generic { id?: string; index: string; diff --git a/guides/json.md b/guides/json.md new file mode 100644 index 000000000..cfd5a1731 --- /dev/null +++ b/guides/json.md @@ -0,0 +1,60 @@ +# Making raw HTTP requests + +The OpenSearch client implements many high-level REST DSLs that invoke OpenSearch APIs. However, you may find yourself in a situation that requires you to invoke an API that is not supported by the client. In this case, you can use raw HTTP requests to invoke any OpenSearch API. This guide shows you different ways to make custom API calls using the OpenSearch JS client. + +## Setup +First, create a client instance with the following code to interact with an OpenSearch cluster with default security settings: + +```javascript +const client = new Client({ + ssl: { + rejectUnauthorized: false, + }, + node: 'https://localhost:9200', + auth: { + username: 'admin', + password: 'admin', + }, +}); +``` + +## The http Namespace +The `http` namespace comes with all functions representing HTTP methods like `connect`, `delete`, `get`, `head`, `options`, `patch`, `post`, `put`, and `trace`. + +Let's create in index using `http.put`: +```javascript +await client.http.put({ + path: '/movies', + body: { + mappings: { + properties: { + title: { type: 'text' }, + director: { type: 'text' }, + year: { type: 'integer' }, + }, + }, + }, +}); +``` + +Make sure that the index was created successfully using `http.head`: + +```javascript +await client.http.head({ path: '/movies' }); +``` + +Add two documents to the index using `http.post` and the bulk endpoint +```javascript +const body = [ + { index: { _index: index } }, + { title: 'The quick brown fox' }, + { index: { _index: index } }, + { title: 'The quick brown fox jumps over the lazy dog' }, + ]; +await client.http.post({ path: `_bulk`, body }); +``` + +Delete the index using `http.delete`: +```javascript +await client.http.delete({ path: '/movies' }); +``` diff --git a/index.d.ts b/index.d.ts index f80d8f22d..2316ff203 100644 --- a/index.d.ts +++ b/index.d.ts @@ -2073,6 +2073,53 @@ declare class Client { options: TransportRequestOptions, callback: callbackFn ): TransportRequestCallback; + http: { + connect, TContext = Context>( + params: RequestParams.HttpParams, + options?: TransportRequestOptions, + callback?: callbackFn + ): TransportRequestPromise>; + delete, TContext = Context>( + params: RequestParams.HttpParams, + options?: TransportRequestOptions, + callback?: callbackFn + ): TransportRequestPromise>; + get, TContext = Context>( + params: RequestParams.HttpParams, + options?: TransportRequestOptions, + callback?: callbackFn + ): TransportRequestPromise>; + head, TContext = Context>( + params: RequestParams.HttpParams, + options?: TransportRequestOptions, + callback?: callbackFn + ): TransportRequestPromise>; + options, TContext = Context>( + params: RequestParams.HttpParams, + options?: TransportRequestOptions, + callback?: callbackFn + ): TransportRequestPromise>; + patch, TContext = Context>( + params: RequestParams.HttpParams, + options?: TransportRequestOptions, + callback?: callbackFn + ): TransportRequestPromise>; + post, TContext = Context>( + params: RequestParams.HttpParams, + options?: TransportRequestOptions, + callback?: callbackFn + ): TransportRequestPromise>; + put, TContext = Context>( + params: RequestParams.HttpParams, + options?: TransportRequestOptions, + callback?: callbackFn + ): TransportRequestPromise>; + trace, TContext = Context>( + params: RequestParams.HttpParams, + options?: TransportRequestOptions, + callback?: callbackFn + ): TransportRequestPromise>; + }; index< TResponse = Record, TRequestBody extends RequestBody = Record, diff --git a/samples/README.md b/samples/README.md new file mode 100644 index 000000000..dbf72cb52 --- /dev/null +++ b/samples/README.md @@ -0,0 +1,14 @@ +# OpenSearch JavaScript Samples + +Most samples can be run using OpenSearch installed locally with docker. + +``` +docker pull opensearchproject/opensearch:latest +docker run -d -p 9200:9200 -p 9600:9600 -e "discovery.type=single-node" opensearchproject/opensearch:latest +``` + +## Run Samples + +``` +node json.js +``` diff --git a/samples/json.js b/samples/json.js new file mode 100644 index 000000000..85a5a474a --- /dev/null +++ b/samples/json.js @@ -0,0 +1,75 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + */ + +// For more information on how to make custom HTTP requests, check out: ./guides/json.md + +const { Client } = require('@opensearch-project/opensearch'); + +// Instantiate a client with basic auth +const client = new Client({ + ssl: { + rejectUnauthorized: false, + }, + node: 'https://localhost:9200', + auth: { + username: 'admin', + password: 'admin', + }, +}); + +let response; + +const print_response = (title) => { + console.log(`\n${title}:`); + console.log(response.body); +}; + +const start = async () => { + // Check the cluster health + response = await client.http.get({ path: '/_cluster/health' }); + print_response('Get Cluster Health'); + + // Create the movies index + response = await client.http.put({ + path: '/movies', + body: { + mappings: { + properties: { + title: { type: 'text' }, + director: { type: 'text' }, + year: { type: 'integer' }, + }, + }, + }, + }); + print_response('Create `movies` Index'); + + // Make sure the index is created before indexing documents + response = await client.http.head({ path: '/movies' }); + print_response('Check If Index Exists'); + + // Add 2 documents to the index using the bulk endpoint + response = await client.http.post({ + path: '/_bulk', + body: [ + { index: { _index: 'movies' } }, + { title: 'The Godfather', director: 'Francis Ford Coppola', year: 1972 }, + { index: { _index: 'movies' } }, + { title: 'The Godfather: Part II', director: 'Francis Ford Coppola', year: 1974 }, + ], + }); + print_response('Add 2 documents to the `movies` Index using the Bulk API'); + + // Delete the movies index + response = await client.http.delete({ path: '/movies' }); + print_response('Delete `movies` Index'); +}; + +start(); diff --git a/test/fixtures/stackoverflow.ndjson b/test/fixtures/stackoverflow.ndjson index 428c46cb9..164a4987d 100644 --- a/test/fixtures/stackoverflow.ndjson +++ b/test/fixtures/stackoverflow.ndjson @@ -1045,7 +1045,6 @@ {"id":"1135734","title":"It's possible to share a cookie between 'some' subdomains?","body":"\u003cp\u003eI've been reading some posts about web performance, one of the points is to\u003cbr\u003e\nserve static content from a cookie-free domain, my question is:\u003c/p\u003e\n\n\u003cp\u003eCan I share cookies between, let's say example.com and www.example.com, while excluding static1.example.com, static2.example.com, etc?\u003c/p\u003e\n\n\u003cp\u003eOr do I need to set a different top level domain?\u003c/p\u003e\n\n\u003cp\u003eI know (or I think) that I could set the domain of the cookie to '.example.com', but\u003cbr\u003e\ncorrect me if I'm wrong this shares the cookies across \u003cem\u003eall\u003c/em\u003e sub-domains.\u003c/p\u003e","accepted_answer_id":"1135777","answer_count":"3","comment_count":"0","creation_date":"2009-07-16 06:23:51.95 UTC","favorite_count":"6","last_activity_date":"2011-02-23 23:33:38.17 UTC","last_edit_date":"2009-07-16 06:57:45.563 UTC","last_editor_display_name":"","last_editor_user_id":"28169","owner_display_name":"","owner_user_id":"61327","post_type_id":"1","score":"22","tags":"http|cookies","view_count":"14579"} {"id":"34306941","title":"Set font face and size in Scala TextArea","body":"\u003cp\u003eAn old thread (2009) mentioned the following:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eval area = new TextArea {\n font = new Font(\"Arial\", 0, 8)\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHowever that code has no effect on current version of \u003ccode\u003escala.swing\u003c/code\u003e . I also tried\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003earea.peer.setFont(new Font(\"Arial\", 0,8).\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThat also had no effect. So what is the correct way?\u003c/p\u003e","accepted_answer_id":"34307048","answer_count":"1","comment_count":"0","creation_date":"2015-12-16 08:17:18.75 UTC","last_activity_date":"2015-12-16 08:24:03.85 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1056563","post_type_id":"1","score":"1","tags":"swing|scala","view_count":"157"} {"id":"12377119","title":"android tcp client file receive","body":"\u003cp\u003eI am trying to send a file (png to be specific) over sockets from python server to android client. I know that my python server is sending the data, I just can't figure out how to receive the data on the android side. Here is what the code looks like to receive the file.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e String path = Environment.getExternalStorageDirectory().toString() +\"/tmp/test.png\"; \n try {\n socket = new Socket(\"192.168.1.129\", 29877);\n\n is = socket.getInputStream();\n out = new FileOutputStream(path);\n byte[] temp = new byte[1024];\n for(int c = is.read(temp,0,1024); c \u0026gt; 0; c = is.read(temp,0,1024)){\n out.write(temp,0,c);\n Log.d(\"debug tag\", out.toString());\n }\n Log.d(\"debug tag\", temp.toString());\n\n Bitmap myBitmap = BitmapFactory.decodeByteArray(temp, 0, temp.length);\n imageView.setImageBitmap(myBitmap);\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThanks for any advice.\u003c/p\u003e","accepted_answer_id":"12377248","answer_count":"1","comment_count":"0","creation_date":"2012-09-11 20:07:44.913 UTC","favorite_count":"1","last_activity_date":"2012-09-11 20:16:16.523 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1661396","post_type_id":"1","score":"1","tags":"java|android|sockets","view_count":"715"} -{"id":"19634066","title":"Duplicate commit on GitHub - Using SmartGit","body":"\u003cp\u003eI use SmartGit o control my repo. When I commited and pushed my project, I had a problem and closed the SmartGit, after that I realized that the operation didn´t finish, then, I did it again, but the first commit was sent. Now I have a duplicate commit on GitHub and cannot do the pull request.\u003c/p\u003e\n\n\u003cp\u003eCould you pls help me with this issue.\u003c/p\u003e\n\n\u003cp\u003ePls, keep in mind that I using SmartGi, so, no command line here! =)\u003c/p\u003e\n\n\u003cp\u003eThanks!\u003c/p\u003e\n\n\u003cp\u003ePls, don´t care about the portuguese. I´m from Brazil! =)\u003c/p\u003e\n\n\u003cp\u003e\u003cimg src=\"https://i.stack.imgur.com/OW5aK.png\" alt=\"enter image description here\"\u003e\u003c/p\u003e","answer_count":"0","comment_count":"4","creation_date":"2013-10-28 11:49:30.723 UTC","last_activity_date":"2013-10-28 18:33:40.877 UTC","last_edit_date":"2013-10-28 18:33:40.877 UTC","last_editor_display_name":"","last_editor_user_id":"1669797","owner_display_name":"","owner_user_id":"1669797","post_type_id":"1","score":"0","tags":"git|github|commit|git-commit|smartgit","view_count":"286"} {"id":"46451410","title":"How to automate open office online apps","body":"\u003cp\u003eI need to automate excel online functionality that is being used by a web app. How can I automate excel online (open office app integrated with my web app)? I'm using selenium for my web app..\u003c/p\u003e","answer_count":"0","comment_count":"2","creation_date":"2017-09-27 15:10:33.983 UTC","favorite_count":"1","last_activity_date":"2017-09-27 15:10:33.983 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"5294234","post_type_id":"1","score":"0","tags":"c#|selenium-webdriver|ui-automation|openoffice-api","view_count":"7"} {"id":"1906123","title":"Tomcat 6.0 does not allow generic ArrayList to be used for useBean","body":"\u003cp\u003eIn a jsp file I have this declaration:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;jsp:useBean scope=\"request\" id=\"products\" class=\"java.util.ArrayList\u0026lt;sgt.supermarket.entity.Product\u0026gt;\"/\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThis declaration works fine with GlassFish 2.1, however, when I switch to Tomcat 6.0, exceptions is thrown:\u003c/p\u003e\n\n\u003cp\u003eThe value for the useBean class attribute java.util.ArrayList is invalid.\u003c/p\u003e\n\n\u003cp\u003eIs there any library missed for Tomcat that makes it behave different from Glass Fish 2.1?\u003c/p\u003e","accepted_answer_id":"1908571","answer_count":"2","comment_count":"3","creation_date":"2009-12-15 09:05:52.603 UTC","last_activity_date":"2013-02-12 12:25:21.987 UTC","last_edit_date":"2009-12-15 10:13:28.243 UTC","last_editor_display_name":"","last_editor_user_id":"82804","owner_display_name":"","owner_user_id":"169209","post_type_id":"1","score":"3","tags":"jsp|tomcat|generic-list|usebean","view_count":"3375"} {"id":"32116266","title":"R: aggregate several colums at once","body":"\u003cp\u003eI am new to R and this is the first time I use stackoverflow so excuse me if I ask for something obvious or my question is not clear enough.\u003c/p\u003e\n\n\u003cp\u003eI am working with the following data set\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003edim(storm)\n[1] 883602 39\n\n\n names(storm)\n [1] \"STATE__\" \"BGN_DATE\" \"BGN_TIME\" \"TIME_ZONE\" \"COUNTY\"\n [6] \"COUNTYNAME\" \"STATE\" \"EVTYPE\" \"BGN_RANGE\" \"BGN_AZI\"\n [11] \"BGN_LOCATI\" \"END_DATE\" \"END_TIME\" \"COUNTY_END\" \"COUNTYENDN\"\n [16] \"END_RANGE\" \"END_AZI\" \"END_LOCATI\" \"LENGTH\" \"WIDTH\"\n [21] \"F\" \"MAG\" \"FATALITIES\" \"INJURIES\" \"PROPDMG\"\n [26] \"PROPDMGEXP\" \"CROPDMG\" \"CROPDMGEXP\" \"WFO\" \"STATEOFFIC\"\n [31] \"ZONENAMES\" \"LATITUDE\" \"LONGITUDE\" \"LATITUDE_E\" \"LONGITUDE_\"\n [36] \"REMARKS\" \"REFNUM\" \"PROPTOTAL\" \"CROPTOTAL\"\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI am interested to use \u003ccode\u003eEVTYPE\u003c/code\u003e (a factor variable) to aggregate 4 other numerical variables (\u003ccode\u003ePROPTOTAL, CROPTOTAL, FATALITIES, INJURIES\u003c/code\u003e)\u003c/p\u003e\n\n\u003cp\u003eThe factor variable as 950 levels:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003elength(unique(storm$EVTYPE))\n[1] 950\n\n\nclass(storm$EVTYPE)\n[1] \"factor\"\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eSo I would expect an aggregated data frame with 950 observations and 5 variables when I run the following command:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e storm_tidy\u0026lt;-\naggregate(cbind(PROPTOTAL,CROPTOTAL,FATALITIES,INJURIES)~EVTYPE,FUN=sum,data=storm)\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHowever I get only \u003ccode\u003e155\u003c/code\u003e rows\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003edim(storm_tidy)\n[1] 155 5\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI am using the aggregate with several columns following the help page of the function (use cbind): \u003c/p\u003e\n\n\u003cp\u003eFormulas, one ~ one, one ~ many, \u003cstrong\u003emany ~ one\u003c/strong\u003e, and many ~ many:\u003cbr\u003e\n\u003ccode\u003eaggregate(weight ~ feed, data = chickwts, mean)\naggregate(breaks ~ wool + tension, data = warpbreaks, mean)\n**aggregate(cbind(Ozone, Temp) ~ Month, data = airquality, mean)**\naggregate(cbind(ncases, ncontrols) ~ alcgp + tobgp, data = esoph, sum)\u003c/code\u003e\u003c/p\u003e\n\n\u003cp\u003eI am loosing information at some point:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003esum(storm$PROPTOTAL)\n[1] 424769204805\n\nsum(storm_tidy$PROPTOTAL)\n[1] 228366211339\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHowever, if I aggregate column by column it seems to work fine:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003estorm_tidy \u0026lt;- aggregate(PROPTOTAL~EVTYPE,FUN = sum, data = storm)\ndim(storm_tidy)\n[1] 950 2\n\n\n\n\n\nsum(storm_tidy$PROPTOTAL)\n[1] 424769204805\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWhat am I missing? What am I doing wrong?\u003c/p\u003e\n\n\u003cp\u003eThanks.\u003c/p\u003e","accepted_answer_id":"32116458","answer_count":"1","comment_count":"0","creation_date":"2015-08-20 10:50:40.28 UTC","last_activity_date":"2015-08-20 11:00:21.507 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"5230862","post_type_id":"1","score":"1","tags":"r|aggregate|many-to-one|cbind","view_count":"134"} @@ -2446,7 +2445,6 @@ {"id":"32150369","title":"java - Calling a PL/SQL Stored Procedure With Arrays","body":"\u003cp\u003eI have a PL/SQL stored procedure similar to the following that I need to call in Java:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eTYPE AssocArrayVarchar20_t is table of VARCHAR2(20) index by BINARY_INTEGER\nTYPE AssocArrayVarchar4100_t is table of VARCHAR2(4100) index by BINARY_INTEGER\nTYPE AssocArrayNumber_t is table of NUMBER index by BINARY_INTEGER\n\nPROCEDURE DATA_WRITE( I_NAME IN AssocArrayVarchar20_t,\n I_NUM IN AssocArrayNumber_t,\n I_NOTE IN AssocArrayVarchar4100_t)\n // Do Stuff\nEND DATA_WRITE;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI tried the following in Java:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eCallableStatement stmt = conn.prepareCall(\"begin DATA_WRITE(?, ?, ?); end;\");\nstmt.setArray(0, conn.createArrayOf(\"VARCHAR\", new String[]{ name }));\nstmt.setArray(1, conn.createArrayOf(\"NUMBER\", new Integer[]{ num }));\nstmt.setArray(2, conn.createArrayOf(\"VARCHAR2\", new String[]{ notes }));\nstmet.execute;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWhen I do this I get a \u003ccode\u003eSQLException: Unsupported Feature\"\u003c/code\u003e on the \u003ccode\u003ecreateArrayOf()\u003c/code\u003e method. I've also tried \u003ccode\u003esetObject()\u003c/code\u003e and inside of \u003ccode\u003ecreateArrayOf\u003c/code\u003e: \u003ccode\u003e\"varchar\"\u003c/code\u003e, \u003ccode\u003e\"AssocArrayVarchar20_t\"\u003c/code\u003e, \u003ccode\u003e\"varchar_t\"\u003c/code\u003e. Nothing seems to change that outcome.\u003c/p\u003e\n\n\u003cp\u003eDoes anyone know what I'm doing wrong? I can't seem to get it to work.\u003c/p\u003e\n\n\u003cp\u003eUPDATE: Success!\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eOracleCallableStatement pStmt = (OracleCallableStatement) conn.prepareCall(\"begin DATA_WRITE(?, ?, ?); end;\");\npStmt.setPlsqlIndexTable(1, new String[]{ name }, 1, 1, OracleTypes.VARCHAR, 20);\npStmt.setPlsqlIndexTable(2, new Integer[]{ num }, 1, 1, OracleTypes.NUMBER, 0);\npStmt.setPlsqlIndexTable(3, new String[]{ notes }, 1, 1, OracleTypes.VARCHAR, 4100);\npStmt.execute();\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"32150569","answer_count":"2","comment_count":"1","creation_date":"2015-08-21 22:50:50.453 UTC","last_activity_date":"2016-01-19 13:47:49.537 UTC","last_edit_date":"2015-08-24 18:43:13.89 UTC","last_editor_display_name":"","last_editor_user_id":"3499973","owner_display_name":"","owner_user_id":"3499973","post_type_id":"1","score":"2","tags":"java|oracle|jdbc|plsql|associative-array","view_count":"2635"} {"id":"6526512","title":"Hibernate HQL Query Subselects or Joins","body":"\u003cp\u003eIn a custom blog platform with User(s), Post(s), and Message(s).\nI know how to do an HQL query with distinct Users and their Post count. Also distinct Users and their Message count.\u003c/p\u003e\n\n\u003cp\u003eBut if I combine the two with \"inner join\" technique I get the same count for Posts and Messages. I understand why this is happening because of the joins. How could I do sub-selects in the HQL to get the two counts as separately but as one trip to the database?\u003c/p\u003e\n\n\u003cp\u003eHere is an example of the last HQL query I tried.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e select u.username, count(m), count(p) from User as u \n inner join u.Messages as m\n inner join u.Posts as p\n group by u.id \n order by count(m) desc\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eNote: I will be changing the order by based off of an option on a web page.\u003c/p\u003e","accepted_answer_id":"6526783","answer_count":"1","comment_count":"2","creation_date":"2011-06-29 20:00:34.553 UTC","last_activity_date":"2012-09-05 11:12:57.663 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"36590","post_type_id":"1","score":"0","tags":"java|hibernate|hql","view_count":"3003"} {"id":"31998654","title":"Access iframe elements after refresh","body":"\u003cp\u003eI am trying to extract data from a school peoplesoft portal page with an iframe element. \nOn the Homepage, the page source has the iframe element. When I click on a button to leave that page(i.e. search classes) the page source stays the same but the elements on the page are different(observed using inspect element). \nI can access elements on the homepage, but not any of the others because I don't think I take into account the reloading of data/injection of other elements in the iframe. How can I access the elements post reload?\nthis is the snippet i'm using to access elements:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003evar iframe = document.getElementById(\"id of iframe\");\nvar innerDoc = iframe.contentDocument || iframe.contentWindow.document;\nvar element = innerDoc.getElementById(\"name of id desired in iframe\");\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"31998729","answer_count":"1","comment_count":"0","creation_date":"2015-08-13 21:22:01.91 UTC","last_activity_date":"2015-08-14 12:37:47.487 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"5222488","post_type_id":"1","score":"0","tags":"javascript|dom|iframe|peoplesoft","view_count":"111"} -{"id":"46382505","title":"Modeling optional filter params in react-apollo","body":"\u003cp\u003eI am using \u003ca href=\"https://github.com/apollographql/react-apollo\" rel=\"nofollow noreferrer\"\u003e\u003ccode\u003ereact-apollo\u003c/code\u003e\u003c/a\u003e to access a \u003ccode\u003egraphql\u003c/code\u003e in a web app.\u003c/p\u003e\n\n\u003cp\u003eI have a query that looks like this that allows me to filter a schedule of games:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003equery($pagination:Int!, $divisionId:ID, $teamId:ID, $startDate:DateTime!, $endDate:DateTime!\n){\n games: allGames (\n orderBy: date_ASC,\n first: $pagination,\n filter: {\n date_gte: $startDate,\n date_lte: $endDate,\n division: {id: $divisionId},\n OR: [\n {homeTeam: {id: $teamId} },\n {awayTeam: {id: $teamId} },\n ]\n }\n){\n id\n .... more fields\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe \u003ccode\u003estartDate\u003c/code\u003e and \u003ccode\u003eendDate\u003c/code\u003e variables are required for every request, but the \u003ccode\u003eteamId\u003c/code\u003e and \u003ccode\u003edivisionId\u003c/code\u003e are not. I would like to display all \u003ccode\u003eteamIds\u003c/code\u003e and \u003ccode\u003edivisionIds\u003c/code\u003e in the initial request, and allow the use to filter / drill down as needed.\u003c/p\u003e\n\n\u003cp\u003eI was looking to see if I could add a wildcard (\u003ccode\u003e*\u003c/code\u003e) or something of that sort, but I am not sure if it's possible. With graphql mutations, a \u003ccode\u003enull\u003c/code\u003e value for a variable allows me to write a single mutation that is applicable to multiple use cases (partial updates \u0026amp; creates), but I cannot figure out how to achieve similar functionality with queries.\u003c/p\u003e\n\n\u003cp\u003eDo I need to call a different query for each of the filter scenarios (one for no \u003ccode\u003edivisionId\u003c/code\u003e \u0026amp; \u003ccode\u003eteamId\u003c/code\u003e, one for just divisionId, one for just \u003ccode\u003eteamId\u003c/code\u003e, and one for both \u003ccode\u003edivisionId\u003c/code\u003e and \u003ccode\u003eteamId\u003c/code\u003e? Are \u003ca href=\"http://dev.apollodata.com/react/fragments.html\" rel=\"nofollow noreferrer\"\u003efragments\u003c/a\u003e something that would help me achieve this with less overhead (because the last sentence makes the process seem a bit too cumbersome / not DRY).\u003c/p\u003e\n\n\u003cp\u003eOr do i switch \u003ccode\u003edivision\u003c/code\u003e and \u003ccode\u003eid\u003c/code\u003e to \u003ccode\u003edivision_in\u003c/code\u003e and \u003ccode\u003eid_in\u003c/code\u003e and pass them (somewhat large) arrays with all the possible values for \u003ccode\u003edivisionIds\u003c/code\u003e and \u003ccode\u003eteamdIds\u003c/code\u003e as initial props? \u003c/p\u003e","accepted_answer_id":"46386940","answer_count":"1","comment_count":"0","creation_date":"2017-09-23 17:41:03.073 UTC","last_activity_date":"2017-09-24 05:35:16.537 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"5045662","post_type_id":"1","score":"0","tags":"reactjs|graphql|apollo|react-apollo","view_count":"37"} {"id":"4109025","title":"why does datagridview disappear after refreshing twice?","body":"\u003cp\u003eon form load, my datagridview display a datatable. once the user clicks the delete button, it deletes one of the rows in the data source:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eprivate void btnDelete_Click(object sender, EventArgs e)\n {\n Int32 selectedRowCount =\n dataGridView1.Rows.GetRowCount(DataGridViewElementStates.Selected);\n if (selectedRowCount == 1)\n {\n\n\n qResults.Rows.RemoveAt(dataGridView1.SelectedRows[0].Index);\n chart1.DataSource = qResults;\n InitializeChart();\n\n dataGridView1.Columns.Clear();\n dataGridView1.DataBindings.Clear();\n dataGridView1.DataSource = qResults;\n\n\n }\n }\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ethe first time i click this button, it works. it deletes the datapoint from the source and refreshes the chart. however, the second time i click it, it completely wipes out the datagridview and display NOTHING. \u003cstrong\u003ebut please note that the chart displays correctly\u003c/strong\u003e\u003c/p\u003e\n\n\u003cp\u003eanyone know what am i donig wrong?\u003c/p\u003e","accepted_answer_id":"4109072","answer_count":"1","comment_count":"0","creation_date":"2010-11-05 18:42:24.693 UTC","last_activity_date":"2010-11-05 18:49:29.1 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"117700","post_type_id":"1","score":"1","tags":"c#|.net|data-binding","view_count":"878"} {"id":"7594973","title":"Language Oriented Programming Articles/Papers/Tutorials","body":"\u003cp\u003eThere is number of tools on the market like MPS, that promote Language Oriented Programming, which supposedly gives ability to programmer to design a (ideal?)language for task. This sounds interesting and boring at same time for some reason, so I was wondering if anyone know and can recommend articles regarding subject.\u003c/p\u003e\n\n\u003cp\u003eThanks\u003c/p\u003e","accepted_answer_id":"7595983","answer_count":"3","comment_count":"0","creation_date":"2011-09-29 09:08:18.597 UTC","favorite_count":"4","last_activity_date":"2011-10-30 04:07:43.077 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"462076","post_type_id":"1","score":"4","tags":"programming-languages|computer-science|dsl|mps","view_count":"267"} {"id":"37823718","title":"1 to many select and concat","body":"\u003cp\u003eI have two tables:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e[customer]\nid\nname\n\n[customer_photo]\nid\ncustomer_id\nphoto\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI want to select all customers and their photos.\u003c/p\u003e\n\n\u003cp\u003eThis query is doing it, but getting only users who have at least one photo:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eSELECT customer.id, name, GROUP_CONCAT(cp.photo) as photos \nFROM customer \nJOIN customer_photo cp ON cp.customer_id = customer.id \nGROUP BY customer.id\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI want to get all users, even if they don't have a photo.\u003c/p\u003e","accepted_answer_id":"37823781","answer_count":"1","comment_count":"1","creation_date":"2016-06-14 23:22:54.613 UTC","last_activity_date":"2016-06-14 23:28:28.49 UTC","last_edit_date":"2016-06-14 23:27:51.063 UTC","last_editor_display_name":"","last_editor_user_id":"6466877","owner_display_name":"","owner_user_id":"6466877","post_type_id":"1","score":"0","tags":"mysql|sql","view_count":"20"} diff --git a/test/integration/helpers-secure/http.test.js b/test/integration/helpers-secure/http.test.js new file mode 100644 index 000000000..5d856b218 --- /dev/null +++ b/test/integration/helpers-secure/http.test.js @@ -0,0 +1,60 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + */ + +const { test } = require('tap'); +const { Client } = require('../../../'); + +const client = new Client({ + ssl: { + rejectUnauthorized: false, + }, + node: 'https://localhost:9200', + auth: { + username: 'admin', + password: 'admin', + }, +}); +const http = client.http; +const index = 'books'; + +test('Create an index using HTTP functions', async (t) => { + const response = await http.put({ + path: index, + body: { settings: { number_of_shards: 5, number_of_replicas: 2 } }, + }); + t.equal(response.body.acknowledged, true); + t.equal(response.body.index, index); +}); + +test('Check that the previously created index exists', async (t) => { + const response = await http.head({ path: index }); + t.equal(response.statusCode, 200); +}); + +test('Add documents using bulk endpoint', async (t) => { + const body = [ + { index: { _index: index } }, + { title: 'The quick brown fox' }, + { index: { _index: index } }, + { title: 'The quick brown fox jumps over the lazy dog' }, + ]; + const response = await http.post({ path: `_bulk`, body }); + t.equal(response.body.errors, false); +}); + +test('Close the previously created index', async (t) => { + const response = await http.post({ path: `${index}/_close` }); + t.equal(response.body.acknowledged, true); +}); + +test('Delete the previously created index', async (t) => { + const response = await http.delete({ path: index }); + t.equal(response.body.acknowledged, true); +}); diff --git a/test/integration/helpers/bulk.test.js b/test/integration/helpers/bulk.test.js index 8fd9ac825..7997cbb48 100644 --- a/test/integration/helpers/bulk.test.js +++ b/test/integration/helpers/bulk.test.js @@ -67,15 +67,15 @@ test('bulk index', async (t) => { t.type(result.time, 'number'); t.type(result.bytes, 'number'); t.match(result, { - total: 5000, - successful: 5000, + total: 4998, + successful: 4998, retry: 0, failed: 0, aborted: false, }); const { body } = await client.count({ index: INDEX }); - t.match(body, { count: 5000 }); + t.match(body, { count: 4998 }); }); test('bulk index with custom id', async (t) => { @@ -98,8 +98,8 @@ test('bulk index with custom id', async (t) => { t.type(result.time, 'number'); t.type(result.bytes, 'number'); t.match(result, { - total: 5000, - successful: 5000, + total: 4998, + successful: 4998, retry: 0, failed: 0, aborted: false, @@ -172,15 +172,15 @@ test('bulk delete', async (t) => { t.type(indexResult.time, 'number'); t.type(indexResult.bytes, 'number'); t.match(indexResult, { - total: 5000, - successful: 5000, + total: 4998, + successful: 4998, retry: 0, failed: 0, aborted: false, }); const { body: afterIndex } = await client.count({ index: INDEX }); - t.match(afterIndex, { count: 5000 }); + t.match(afterIndex, { count: 4998 }); const deleteResult = await client.helpers.bulk({ datasource: createReadStream(datasetPath).pipe(split(JSON.parse)), @@ -201,8 +201,8 @@ test('bulk delete', async (t) => { t.type(deleteResult.time, 'number'); t.type(deleteResult.bytes, 'number'); t.match(deleteResult, { - total: 5000, - successful: 5000, + total: 4998, + successful: 4998, retry: 0, failed: 0, aborted: false,