Skip to content

Commit

Permalink
Merge pull request #46 from voslartomas/feat/aggregate
Browse files Browse the repository at this point in the history
feat: implement aggregate
  • Loading branch information
ExtraBB authored Apr 30, 2024
2 parents 7ea9170 + 960f4a0 commit cd72b9b
Show file tree
Hide file tree
Showing 5 changed files with 213 additions and 3 deletions.
15 changes: 15 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,21 @@ interface IPaginateResult<T> {
}
```

### aggregatePaged()

`aggregatePaged()` will return ordered and paged results based on a field (`sortField`) that you pass in using MongoDB aggregate, which allows for more complicated queries compared to simple `findPaged()`.

### Parameters

Call `aggregatePaged()` with the following parameters:

- options {IPaginateOptions} (The paginate options)
- _pipeline {PipelineStage[]} (The aggregation pipeline array)

### Response

Same as for `findPaged()`

### Typegoose Model
Create your typegoose model as follows:

Expand Down
54 changes: 52 additions & 2 deletions src/index.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { Schema, PopulateOptions } from "mongoose";
import { generateCursorQuery, generateSort } from "./query";
import { Schema, PopulateOptions, PipelineStage, Model } from "mongoose";
import { generateAggregatePipeline, generateCursorQuery, generateSort } from "./query";
import { prepareResponse } from "./response";
import { IPaginateOptions, IPaginateResult, VerboseMode } from "./types";

Expand Down Expand Up @@ -64,8 +64,58 @@ export default function (schema: Schema, pluginOptions?: IPluginOptions) {
return await createFindPromise(this, options, _query, _projection).explain(verbose);
}

function createAggregatePromise<T>(
mongoCollection: Model<T>,
options: IPaginateOptions,
pipeline: PipelineStage[],
) {
// Determine sort and limit for pagination
const sort = generateSort(options);

const defaultLimit = (pluginOptions && pluginOptions.defaultLimit ? pluginOptions.defaultLimit : 10);
const useDefaultLimit = isNaN(options.limit) || options.limit < 0 || options.limit === 0 && pluginOptions && pluginOptions.dontAllowUnlimitedResults;
const unlimited = options.limit === 0 && (!pluginOptions || !pluginOptions.dontAllowUnlimitedResults);
options.limit = useDefaultLimit ? defaultLimit : options.limit;

// Apply pagination to the pipeline
const paginatedPipeline = [...generateAggregatePipeline(options), ...pipeline, { $sort: sort as any }];

if (!unlimited) {
paginatedPipeline.push({ $limit: options.limit + 1 });
}

// Execute the aggregate query
const cursor = mongoCollection.aggregate<T>(paginatedPipeline);

return cursor;
}

async function aggregatePaged<T>(
options: IPaginateOptions,
pipeline: PipelineStage[],
): Promise<IPaginateResult<T>> {
// Execute the aggregate query
const cursor = createAggregatePromise<T>(this, options, pipeline);

// Fetch documents
const docs = await cursor.exec();

// Count total documents (if needed)
let totalDocs = 0;
if (pluginOptions && pluginOptions.dontReturnTotalDocs) {
return prepareResponse<T>(docs, options);
} else {
const countPipeline = [...pipeline, { $group: { _id: null, count: { $sum: 1 } } }];
const countCursor = this.aggregate(countPipeline);
const countResult = await countCursor.exec();
totalDocs = countResult.length > 0 ? countResult[0].count : 0;
return prepareResponse<T>(docs, options, totalDocs);
}
}

schema.statics.findPaged = findPaged;
schema.statics.findPagedExplain = findPagedExplain;
schema.statics.aggregatePaged = aggregatePaged;
}

export * from "./types";
42 changes: 42 additions & 0 deletions src/query.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import * as bsonUrlEncoding from "./utils/bsonUrlEncoding";
import { IPaginateOptions } from "./types";
import { PipelineStage } from "mongoose";

/**
* Generate a query object for the next/previous page
Expand Down Expand Up @@ -31,6 +32,47 @@ export function generateCursorQuery(options: IPaginateOptions) {
return query;
}

/**
* Generate aggregation pipeline stages for cursor-based pagination
* @param options The pagination options
*/
export function generateAggregatePipeline(options: IPaginateOptions): PipelineStage[] {
const pipeline: PipelineStage[] = [];

if (!options.next && !options.previous) {
return pipeline;
}

// Determine the cursor value
const cursorValue = options.next ? options.next : options.previous;

// Decode cursor string
const decoded = bsonUrlEncoding.decode(cursorValue);

const sortAscending = (!options.sortAscending && options.previous) || (options.sortAscending && !options.previous);
const sortComparer = sortAscending ? "$gt" : "$lt";

// Add match stage based on cursor
if (options.sortField && options.sortField !== "_id") {
pipeline.push({
$match: {
$or: [
{ [options.sortField]: { [sortComparer]: decoded[0] } },
{ [options.sortField]: decoded[0], _id: { [sortComparer]: decoded[1] } }
]
}
});
} else {
pipeline.push({
$match: {
_id: { [sortComparer]: decoded[0] }
}
});
}

return pipeline;
}

/**
* Generate a sort object to sort the find() in the correct order
* @param options The pagination options
Expand Down
6 changes: 5 additions & 1 deletion src/types.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { Model, Query } from "mongoose";
import { Model, PipelineStage, Query } from "mongoose";
import { DocumentType } from "@typegoose/typegoose";

/**
Expand Down Expand Up @@ -45,6 +45,10 @@ export interface IPaginateModel<T> extends Model<DocumentType<T>, {}> {
_query?: Object,
_projection?: Object
): Promise<any>;
aggregatePaged(
options: IPaginateOptions,
pipeline: PipelineStage[],
): Query<IPaginateResult<DocumentType<T>>, DocumentType<T>>;
}

/**
Expand Down
99 changes: 99 additions & 0 deletions test/index.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -67,17 +67,27 @@ describe("limit", () => {
it("should use a default limit of 10 when none is specified", async () => {
const result = await Post.findPaged({});
assert.equal(result.docs.length, 10);

const aggregateResult = await Post.aggregatePaged({}, []);
assert.equal(aggregateResult.docs.length, 10);
});

it("should use no limit when set to 0", async () => {
const result = await Post.findPaged({ limit: 0 });
assert.equal(result.docs.length, 100);

// aggregatation cannot be done without any pipeline
const aggregateResult = await Post.aggregatePaged({ limit: 0 }, []);
assert.equal(aggregateResult.docs.length, 100);
});
``;

it("should use a limit when set", async () => {
const result = await Post.findPaged({ limit: 20 });
assert.equal(result.docs.length, 20);

const aggregateResult = await Post.aggregatePaged({ limit: 20 }, []);
assert.equal(aggregateResult.docs.length, 20);
``;
});
});
Expand Down Expand Up @@ -123,6 +133,7 @@ describe("sort", () => {
describe("next/previous", () => {
const baseOptions = { limit: 2, sortField: "title", sortAscending: true };
const query = { title: { $in: ["Post #1", "Post #2", "Post #3", "Post #4", "Post #5"] } };
const aggregatePipeline = [{ $match: { title: { $in: ["Post #1", "Post #2", "Post #3", "Post #4", "Post #5"] } } }];

it("should return correct first page", async () => {
const page1 = await Post.findPaged(baseOptions, query);
Expand All @@ -136,6 +147,18 @@ describe("next/previous", () => {
assert.equal(page1.docs[1].title, "Post #2");
});

it("should return correct first page for aggregation", async () => {
const page1 = await Post.aggregatePaged(baseOptions, aggregatePipeline);

assert.equal(typeof page1.next, "string");
assert.equal(page1.previous, undefined);
assert.equal(page1.hasNext, true);
assert.equal(page1.hasPrevious, false);
assert.equal(page1.docs.length, 2);
assert.equal(page1.docs[0].title, "Post #1");
assert.equal(page1.docs[1].title, "Post #2");
});

it("should return correct second page (on next)", async () => {
const page1 = await Post.findPaged(baseOptions, query);
const page2 = await Post.findPaged({ ...baseOptions, next: page1.next }, query);
Expand All @@ -149,6 +172,19 @@ describe("next/previous", () => {
assert.equal(page2.docs[1].title, "Post #4");
});

it("should return correct second page (on next) for aggregation", async () => {
const page1 = await Post.aggregatePaged(baseOptions, aggregatePipeline);
const page2 = await Post.aggregatePaged({ ...baseOptions, next: page1.next }, aggregatePipeline);

assert.equal(typeof page2.next, "string");
assert.equal(typeof page2.previous, "string");
assert.equal(page2.hasNext, true);
assert.equal(page2.hasPrevious, true);
assert.equal(page2.docs.length, 2);
assert.equal(page2.docs[0].title, "Post #3");
assert.equal(page2.docs[1].title, "Post #4");
});

it("should return correct third page (on next)", async () => {
const page1 = await Post.findPaged(baseOptions, query);
const page2 = await Post.findPaged({ ...baseOptions, next: page1.next }, query);
Expand All @@ -162,6 +198,19 @@ describe("next/previous", () => {
assert.equal(page3.docs[0].title, "Post #5");
});

it("should return correct third page (on next) aggregation", async () => {
const page1 = await Post.aggregatePaged(baseOptions, aggregatePipeline);
const page2 = await Post.aggregatePaged({ ...baseOptions, next: page1.next }, aggregatePipeline);
const page3 = await Post.aggregatePaged({ ...baseOptions, next: page2.next }, aggregatePipeline);

assert.equal(typeof page2.next, "string");
assert.equal(typeof page2.previous, "string");
assert.equal(page3.hasNext, false);
assert.equal(page3.hasPrevious, true);
assert.equal(page3.docs.length, 1);
assert.equal(page3.docs[0].title, "Post #5");
});

it("should return correct second page (on previous)", async () => {
const page1 = await Post.findPaged(baseOptions, query);
const page2 = await Post.findPaged({ ...baseOptions, next: page1.next }, query);
Expand All @@ -175,6 +224,19 @@ describe("next/previous", () => {
assert.equal(previousPage2.docs[1].title, "Post #4");
});

it("should return correct second page (on previous) aggregation", async () => {
const page1 = await Post.aggregatePaged(baseOptions, aggregatePipeline);
const page2 = await Post.aggregatePaged({ ...baseOptions, next: page1.next }, aggregatePipeline);
const page3 = await Post.aggregatePaged({ ...baseOptions, next: page2.next }, aggregatePipeline);
const previousPage2 = await Post.aggregatePaged({ ...baseOptions, previous: page3.previous }, aggregatePipeline);

assert.equal(previousPage2.hasNext, true);
assert.equal(previousPage2.hasPrevious, true);
assert.equal(previousPage2.docs.length, 2);
assert.equal(previousPage2.docs[0].title, "Post #3");
assert.equal(previousPage2.docs[1].title, "Post #4");
});

it("should return correct first page (on previous)", async () => {
const page1 = await Post.findPaged(baseOptions, query);
const page2 = await Post.findPaged({ ...baseOptions, next: page1.next }, query);
Expand All @@ -188,6 +250,20 @@ describe("next/previous", () => {
assert.equal(previousPage1.docs[0].title, "Post #1");
assert.equal(previousPage1.docs[1].title, "Post #2");
});

it("should return correct first page (on previous) aggregation", async () => {
const page1 = await Post.aggregatePaged(baseOptions, aggregatePipeline);
const page2 = await Post.aggregatePaged({ ...baseOptions, next: page1.next }, aggregatePipeline);
const page3 = await Post.aggregatePaged({ ...baseOptions, next: page2.next }, aggregatePipeline);
const previousPage2 = await Post.aggregatePaged({ ...baseOptions, previous: page3.previous }, aggregatePipeline);
const previousPage1 = await Post.aggregatePaged({ ...baseOptions, previous: previousPage2.previous }, aggregatePipeline);

assert.equal(previousPage1.hasNext, true);
assert.equal(previousPage1.hasPrevious, false);
assert.equal(previousPage1.docs.length, 2);
assert.equal(previousPage1.docs[0].title, "Post #1");
assert.equal(previousPage1.docs[1].title, "Post #2");
});
});

describe("query", () => {
Expand All @@ -197,6 +273,13 @@ describe("query", () => {
assert.equal(result.docs[0].title, "Post #27");
assert.equal(result.docs[1].title, "Post #3");
});

it("should allow aggregations", async () => {
const result = await Post.aggregatePaged({}, [{ $match: { title: { $in: ["Post #3", "Post #27"] } } }]);
assert.equal(result.docs.length, 2);
assert.equal(result.docs[0].title, "Post #27");
assert.equal(result.docs[1].title, "Post #3");
});
});

describe("explain", () => {
Expand Down Expand Up @@ -260,6 +343,11 @@ describe("Plugin Options", () => {
const result = await Genre.findPaged({ limit: 1 });
assert.equal(result.docs.length, 1);
assert.equal(result.totalDocs, undefined);

// aggregate result
const aggregateResult = await Genre.aggregatePaged({ limit: 1 }, []);
assert.equal(aggregateResult.docs.length, 1);
assert.equal(aggregateResult.totalDocs, undefined);
});

it("should not allow unlimited results when option is set", async () => {
Expand All @@ -285,6 +373,13 @@ describe("Plugin Options", () => {

const result2 = await ISBN.findPaged({ limit: -2 });
assert.equal(result2.docs.length, 10);

// negative limit defaults to default limit - aggregation
const aggregateResult = await ISBN.aggregatePaged({ limit: 0 }, []);
assert.equal(aggregateResult.docs.length, 10);

const aggregateResult2 = await ISBN.aggregatePaged({ limit: -2 }, []);
assert.equal(aggregateResult2.docs.length, 10);
});

it("should set default limit when set", async () => {
Expand All @@ -307,5 +402,9 @@ describe("Plugin Options", () => {
// negative limit defaults to default limit
const result = await ISBNShort.findPaged({});
assert.equal(result.docs.length, 12);

// negative limit defaults to default limit - aggregation
const aggregateResult = await ISBNShort.aggregatePaged({}, []);
assert.equal(aggregateResult.docs.length, 12);
});
});

0 comments on commit cd72b9b

Please sign in to comment.