Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

upgrade mongodb driver #2128

Draft
wants to merge 19 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions jest.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,9 @@ module.exports = {
'/node_modules/(?!quick-lru)',
],
testTimeout: 8000,
setupFilesAfterEnv: [
'./jest-setup.js',
]
},
],
};
4,423 changes: 1,207 additions & 3,216 deletions package-lock.json

Large diffs are not rendered by default.

6 changes: 3 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
"clean:modules": "modclean -Pr -n ./modclean.config.js:*",
"clean:workers": "modclean -Pr -n ./modclean.config.js:* -D workers",
"clean:packages": "modclean -Pr -n ./modclean.config.js:* -D packages",
"test:api:e2e": "./node_modules/jest/bin/jest.js --colors --runInBand --detectOpenHandles -c src/api/e2e/jest.config.js src/api/e2e",
"test:api:e2e": "./node_modules/jest/bin/jest.js --colors --runInBand --force-exit --detectOpenHandles -c src/api/e2e/jest.config.js src/api/e2e",
"test:api:e2e:watch": "./node_modules/jest/bin/jest.js --colors --runInBand --detectOpenHandles -c src/api/e2e/jest.config.js src/api/e2e --watch",
"test:unit": "./node_modules/jest/bin/jest.js --force-exit --detectOpenHandles --colors",
"test:unit:watch": "./node_modules/jest/bin/jest.js --watch --colors",
Expand Down Expand Up @@ -141,7 +141,7 @@
"mime": "1.6.0",
"mime-types": "2.1.35",
"moment": "2.29.4",
"mongodb": "4.17.1",
"mongodb": "6.8.0",
"mongodb-restore": "1.6.2",
"mui-file-dropzone": "4.0.2",
"multistream": "^2.1.1",
Expand Down Expand Up @@ -236,7 +236,7 @@
},
"devDependencies": {
"@babel/eslint-parser": "^7.23.10",
"@shelf/jest-mongodb": "3.0.2",
"@shelf/jest-mongodb": "4.3.2",
"@testing-library/jest-dom": "5.16.5",
"@testing-library/react": "11.2.7",
"@types/ejs": "3.1.5",
Expand Down
1 change: 1 addition & 0 deletions packages/ezsLodex/jest-setup.js
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
global.performance = require('perf_hooks').performance;
2 changes: 1 addition & 1 deletion packages/ezsLodex/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
"version": "1.16.4",
"dependencies": {
"lodash": "4.17.21",
"mongodb": "4.17.0",
"mongodb": "6.8.0",
"node-object-hash": "2.3.10",
"quick-lru": "4.0.1",
"relaxed-json": "1.0.3",
Expand Down
29 changes: 18 additions & 11 deletions packages/ezsLodex/src/reduceQuery.js
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,15 @@ export const createFunction = () => async function LodexReduceQuery(data, feed)
const collName = String('mp_').concat(
hashCoerce.hash({ reducer, fields }),
);
const options = {
const connectionStringURI = this.getParam(
'connectionStringURI',
data.connectionStringURI || '',
);
const db = await mongoDatabase(connectionStringURI);
const command = {
mapReduce: 'publishedDataset',
map: map.toString(),
reduce: reduce.toString(),
query: filter,
finalize,
out: {
Expand All @@ -65,16 +73,13 @@ export const createFunction = () => async function LodexReduceQuery(data, feed)
fields,
},
};
const connectionStringURI = this.getParam(
'connectionStringURI',
data.connectionStringURI || '',
);
const db = await mongoDatabase(connectionStringURI);
const collection = db.collection('publishedDataset');

const result = await collection.mapReduce(map, reduce, options);
const { result: collectionResult, ok } = await db.command(command);
if (ok !== 1) {
return feed.stop(new Error('MongoDB command return an error'))
}

const total = await result.count();
const result = await db.collection(collectionResult);
const total = await result.estimatedDocumentCount();

const findFilter = {};

Expand All @@ -101,6 +106,7 @@ export const createFunction = () => async function LodexReduceQuery(data, feed)
const cursor = result.find(findFilter);
const count = await cursor.count();


if (total === 0 || count === 0) {
return feed.send({ total: 0 });
}
Expand All @@ -110,14 +116,15 @@ export const createFunction = () => async function LodexReduceQuery(data, feed)
path.push('referer');
value.push(referer);
}

const stream = cursor
.sort(sort)
.skip(skip)
.limit(limit)
.stream()
.on('error', (e) => feed.stop(e))
.pipe(ezs('assign', { path, value }));
await feed.flow(stream);
feed.flow(stream);
};

export default {
Expand Down
68 changes: 37 additions & 31 deletions packages/ezsLodex/test/mongoQueries.spec.js
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import {performance, PerformanceObserver} from 'perf_hooks';
import from from 'from';
import { MongoClient } from 'mongodb';
import _ from 'lodash';
Expand All @@ -14,22 +15,25 @@ ezs.use(ezsLodex);

describe('mongo queries', () => {
const connectionStringURI = process.env.MONGO_URL;
let client;
let connection;
let db;

beforeAll(async () => {
connection = await MongoClient.connect(process.env.MONGO_URL, {
useNewUrlParser: true,
useUnifiedTopology: true,
});
db = await connection.db();
client = new MongoClient(process.env.MONGO_URL);
try {
await client.connect();
db = await client.db();
} catch (e) {
console.error(`Unable to connect to ${connectionStringURI}` , e);
}
});

afterAll(async () => {
await Promise.all(
Object.keys(handles).map(key => handles[key].close()),
Object.keys(handles).map((key) => handles[key].close()),
);
await connection.close();
await client.close();
});

const initDb = (url, data) => {
Expand All @@ -40,11 +44,11 @@ describe('mongo queries', () => {
return Promise.all(requests);
};

const drop = () => db.dropDatabase();
const drop = async () => await db.dropDatabase();

describe('getCharacteristics', () => {
beforeEach(() => initDb(connectionStringURI, publishedCharacteristic));
afterEach(() => drop());
beforeEach(async () => await initDb(connectionStringURI, publishedCharacteristic));
afterEach(async () => await drop());

it('should return characteristics', done => {
let res = [];
Expand Down Expand Up @@ -93,8 +97,8 @@ describe('mongo queries', () => {
});

describe('getFields', () => {
beforeEach(() => initDb(connectionStringURI, field));
afterEach(() => drop());
beforeEach(async () => await initDb(connectionStringURI, field));
afterEach(async () => await drop());

it('should return the fields', done => {
let res = [];
Expand Down Expand Up @@ -137,8 +141,8 @@ describe('mongo queries', () => {
});

describe('runQuery', () => {
beforeEach(() => initDb(connectionStringURI, publishedDataset));
afterEach(() => drop());
beforeEach(async () => await initDb(connectionStringURI, publishedDataset));
afterEach(async () => await drop());

it('should return results', done => {
let res = [];
Expand Down Expand Up @@ -306,8 +310,8 @@ describe('mongo queries', () => {
});

describe('reduceQuery', () => {
beforeEach(() => initDb(connectionStringURI, publishedDataset));
afterEach(() => drop());
beforeEach(async () => await initDb(connectionStringURI, publishedDataset));
afterEach(async () => await drop());

it('should throw when no reducer is given', done => {
from([
Expand Down Expand Up @@ -360,6 +364,8 @@ describe('mongo queries', () => {
field: 'publicationDate',
}),
)
.pipe(ezs.catch())
.on('error', done)
.on('data', data => {
res = [...res, data];
})
Expand Down Expand Up @@ -506,7 +512,7 @@ describe('mongo queries', () => {
initDb(connectionStringURI, field),
]),
);
afterEach(() => drop());
afterEach(async () => await drop());

it('should inject title & summary in each item', done => {
const res = [];
Expand Down Expand Up @@ -555,8 +561,8 @@ describe('mongo queries', () => {
});

describe('injectDatasetFields', () => {
beforeEach(() => initDb(connectionStringURI, publishedCharacteristic));
afterEach(() => drop());
beforeEach(async () => await initDb(connectionStringURI, publishedCharacteristic));
afterEach(async () => await drop());

it('should inject dataset fiels in each item', done => {
const res = [];
Expand Down Expand Up @@ -593,8 +599,8 @@ describe('mongo queries', () => {
});

describe('labelizeFieldID', () => {
beforeEach(() => initDb(connectionStringURI, field));
afterEach(() => drop());
beforeEach(async () => await initDb(connectionStringURI, field));
afterEach(async () => await drop());

const input = [
{
Expand Down Expand Up @@ -668,8 +674,8 @@ describe('mongo queries', () => {
});
});
describe('buildContext', () => {
beforeEach(() => initDb(connectionStringURI, field));
afterEach(() => drop());
beforeEach(async () => await initDb(connectionStringURI, field));
afterEach(async () => await drop());

it('with a standard context', done => {
const res = [];
Expand Down Expand Up @@ -745,8 +751,8 @@ describe('mongo queries', () => {
});

describe('countField', () => {
beforeEach(() => initDb(connectionStringURI, publishedDataset));
afterEach(() => drop());
beforeEach(async () => await initDb(connectionStringURI, publishedDataset));
afterEach(async () => await drop());

it('should return results', done => {
let res = [];
Expand Down Expand Up @@ -776,8 +782,8 @@ describe('mongo queries', () => {
});

describe('aggregateQuery', () => {
beforeEach(() => initDb(connectionStringURI, publishedDataset));
afterEach(() => drop());
beforeEach(async () => await initDb(connectionStringURI, publishedDataset));
afterEach(async () => await drop());

it('should return results', done => {
let res = [];
Expand Down Expand Up @@ -898,10 +904,10 @@ describe('mongo queries', () => {
});

describe('#joinQuery', () => {
beforeEach(() =>
initDb(connectionStringURI, publishedDatasetWithSubResource),
beforeEach(async () =>
await initDb(connectionStringURI, publishedDatasetWithSubResource),
);
afterEach(() => drop());
afterEach(async () => await drop());

it('should return no results with parameters matchField, matchValue, joinField as empty string', done => {
const results = [];
Expand Down Expand Up @@ -1097,7 +1103,7 @@ describe('mongo queries', () => {
await initDb(connectionStringURI, precomputedDataset);
});

afterEach(() => drop());
afterEach(async () => await drop());

it('should return 5 result for segments precomputed', done => {
const results = [];
Expand Down
1 change: 1 addition & 0 deletions src/api/app.js
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,7 @@ app.use(function* (next) {
try {
yield next;
} catch (err) {
console.error('Sending error as a body', err);
this.status = err.status || 500;
this.body = err.message;
this.app.emit('error', err, this);
Expand Down
10 changes: 6 additions & 4 deletions src/api/controller/api/field.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ import {
} from '../../../common/scope';
import { dropJobs } from '../../workers/tools';
import { ENRICHER } from '../../workers/enricher';
import { ObjectID } from 'mongodb';
import { ObjectId } from 'mongodb';
import generateUid from '../../services/generateUid';
import { restoreEnrichments } from '../../services/enrichment/enrichment';
import { restorePrecomputed } from '../../services/precomputed/precomputed';
Expand All @@ -35,7 +35,7 @@ export const restoreFields = (fileStream, ctx) => {
.then((fieldsString) => JSON.parse(fieldsString))
.then((fields) => {
ctx.field
.remove({})
.drop()
.then(() =>
Promise.all(
fields
Expand Down Expand Up @@ -70,7 +70,7 @@ export const restoreFields = (fileStream, ctx) => {
};

return ctx.field
.remove({})
.drop()
.then(restoreTask)
.then(() =>
Promise.all([
Expand Down Expand Up @@ -156,6 +156,7 @@ export const setup = async (ctx, next) => {
try {
await next();
} catch (error) {
console.error('Sending error as a body', error);
ctx.status = 500;
ctx.body = { error: error.message };
}
Expand Down Expand Up @@ -228,7 +229,7 @@ export const patchSearchableFields = async (ctx) => {
const fields = ctx.request.body;

try {
const ids = fields.map((field) => new ObjectID(field._id));
const ids = fields.map((field) => new ObjectId(field._id));
await ctx.field.updateMany(
{ _id: { $in: ids } },
{ $set: { searchable: true } },
Expand Down Expand Up @@ -314,6 +315,7 @@ export const importFields = (asyncBusboyImpl) => async (ctx) => {
};
ctx.status = 200;
} catch (e) {
console.error('Sending error as a body', e);
ctx.status = 500;
ctx.body = e.message;
}
Expand Down
4 changes: 3 additions & 1 deletion src/api/controller/api/publish.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,9 @@ jest.mock('../../workers', () => ({
describe.skip('publish', () => {
describe('doPublish', () => {
beforeAll(async () => {
await doPublish({});
await doPublish({
tenant: 'lodex_test',
});
});
it('should add event to publisher queue', () => {
expect(workerQueues['lodex_test'].add).toHaveBeenCalledWith({
Expand Down
2 changes: 1 addition & 1 deletion src/api/controller/api/publishFacets.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ export default async (ctx, fields, withProgress = false) => {
jobLogger.info(ctx.job, 'Publishing facets');

const names = fields.map(({ name }) => name);
await ctx.publishedFacet.remove({ field: { $in: names } });
await ctx.publishedFacet.deleteOne({ field: { $in: names } });

await facetFields
.reduce(
Expand Down
Loading
Loading