diff --git a/.github/workflows/ci-mongodb-controller.yml b/.github/workflows/ci-mongodb-controller.yml new file mode 100644 index 00000000..b9972282 --- /dev/null +++ b/.github/workflows/ci-mongodb-controller.yml @@ -0,0 +1,113 @@ +name: Continuous Integration - MongoDB Controller + +on: + push: + paths: + - ".github/workflows/ci-mongodb-controller.yml" + - "packages/mongodb-controller/**" + pull_request: + paths: + - ".github/workflows/ci-mongodb-controller.yml" + - "packages/mongodb-controller/**" + +jobs: + linter: + name: Lint Code + runs-on: ubuntu-latest + permissions: + contents: read + steps: + - name: Check out repo + uses: actions/checkout@v4 + with: + persist-credentials: false + + - name: Setup Node + uses: actions/setup-node@v4 + with: + node-version: 18 + + - name: Install pnpm + uses: pnpm/action-setup@v4 + with: + run_install: false + + - name: Get pnpm store directory + shell: bash + run: | + echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV + + - uses: actions/cache@v4 + name: Setup pnpm cache + with: + path: ${{ env.STORE_PATH }} + key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} + restore-keys: | + ${{ runner.os }}-pnpm-store- + + - name: Install dependencies + run: pnpm install + + - name: Lint code + run: pnpm --filter "./packages/mongodb-controller" run lint + + test: + name: Test + needs: linter + runs-on: ${{ matrix.os }} + permissions: + contents: read + strategy: + matrix: + node-version: [20, 22] + os: [ubuntu-latest] + mongodb: ['6.0', '7.0'] + steps: + - name: Check out repo + uses: actions/checkout@v4 + with: + persist-credentials: false + + - name: Setup Node ${{ matrix.node-version }} + uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node-version }} + + - name: Start MongoDB + uses: supercharge/mongodb-github-action@1.11.0 + with: + mongodb-replica-set: rs0 + mongodb-version: ${{ matrix.mongodb }} + + - name: Install pnpm + uses: pnpm/action-setup@v4 + with: + run_install: false + + - name: Get pnpm store directory + shell: bash + run: | + echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV + + - uses: actions/cache@v4 + name: Setup pnpm cache + with: + path: ${{ env.STORE_PATH }} + key: ${{ runner.os }}-${{ matrix.node-version }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} + restore-keys: | + ${{ runner.os }}-${{ matrix.node-version }}-pnpm-store- + + - name: Install dependencies + run: pnpm install + + - name: Build @kakang/unit + run: pnpm --filter "./packages/unit" run build + + - name: Build @kakang/eventemitter + run: pnpm --filter "./packages/eventemitter" run build + + - name: Build @kakang/validator + run: pnpm --filter "./packages/validator" run build + + - name: Run tests + run: pnpm --filter "./packages/mongodb-controller" run test diff --git a/packages/mongodb-controller/eslint.config.js b/packages/mongodb-controller/eslint.config.js new file mode 100644 index 00000000..ea013b92 --- /dev/null +++ b/packages/mongodb-controller/eslint.config.js @@ -0,0 +1,10 @@ +'use strict' + +const neostandard = require('neostandard') + +module.exports = [ + { + ignores: ['**/*.d.ts'], + }, + ...neostandard({ ts: true }), +] diff --git a/packages/mongodb-controller/lib/controller/default.ts b/packages/mongodb-controller/lib/controller/default.ts new file mode 100644 index 00000000..59b91af4 --- /dev/null +++ b/packages/mongodb-controller/lib/controller/default.ts @@ -0,0 +1,359 @@ +import { EventEmitter, Listener } from '@kakang/eventemitter' +import { isArray, isEmpty, isExist, isNumber, isObject, isString } from '@kakang/validator' +import { type AggregateOptions, type Collection, type CreateIndexesOptions, type DeleteOptions, type Document, type Filter, type FindOneAndDeleteOptions, type FindOneAndReplaceOptions, type FindOneAndUpdateOptions, type FindOptions, type IndexSpecification, type UpdateFilter, type UpdateOptions } from 'mongodb' +import { AggregateBuilder, type MatchStage, type ProjectStage, type SortStage } from '../utils/aggregate-builder' +import { appendCreateFields, appendUpdateFields } from '../utils/append' +import { noop } from '../utils/noop' +import { buildOperatorMap } from '../utils/operator-map' +import { normalizeQueryData, splitFieldOperator } from '../utils/query' + +export interface SearchOptions { + // shortcut of where.or multiple field like + search?: string + // we prefix with where + [key: `where.${string}`]: string + // orderby + [key: `orderby.${string}`]: 'asc' | 'desc' | 'text' + // project + fields?: string | string[] + // pagination + page?: number + pageSize?: number +} + +export interface ControllerOptions { + schema?: unknown + searchFields?: string[] + indexes?: Array<[IndexSpecification, CreateIndexesOptions | undefined]> +} + +export class Controller extends EventEmitter { + #collection: Collection + readonly #indexes: Array<[IndexSpecification, CreateIndexesOptions | undefined]> + #schema: unknown + #fields: string[] + searchFields: string[] + operatorMap: Map + + get collection (): Collection { + return this.#collection + } + + set collection (collection: Collection) { + if (isEmpty(collection)) throw TypeError(`collection expected to be "object", but recieved "${typeof collection}"`) + this.#collection = collection + } + + get collectionName (): string { + return this.collection.collectionName + } + + get schema (): unknown { + return this.#schema + } + + set schema (schema: unknown) { + if (!isObject(schema)) throw TypeError(`schema expected to be "object", but recieved "${typeof schema}"`) + this.#schema = schema + this.#fields = Object.keys((schema as any).properties as Record) + } + + get fields (): string[] { + return this.#fields + } + + constructor (collection: Collection, options?: ControllerOptions) { + super() + // pre-allocation + this.#collection = null as never as Collection + this.collection = collection + this.#indexes = [] + this.#indexes.push([{ id: 1 }, { background: false, unique: true }]) + this.#indexes.push(...(options?.indexes ?? [])) + this.#fields = [] + this.schema = options?.schema ?? { type: 'object', properties: {} } + this.searchFields = options?.searchFields ?? ['uid'] + // use build operator map to allows per controller + this.operatorMap = buildOperatorMap() + + this.#createIndexes() + + this._emit('initialized') + } + + // wrapper of emit that do not want to wait + _emit (eventName: string, ...args: unknown[]): void { + this.emit(eventName, ...args) + .catch((err) => { + // note that if you re-thrown inside the error listener + // it may cause you infinite loop + this._emit('error', err) + }) + } + + #createIndexes (): void { + for (const [indexSpec, options] of this.#indexes) { + this.createIndex(indexSpec, options).catch(noop) + } + } + + async createIndex (indexSpec: IndexSpecification, options?: CreateIndexesOptions): Promise { + try { + return await this.collection.createIndex(indexSpec, options) + } catch { + return '' + } + } + + async count (options?: SearchOptions, o?: AggregateOptions): Promise { + options ??= {} + const pipeline = this.buildAggregateBuilder(options).concat(this.pipelineQuery(options)).count('count').toArray() + const found = await this.collection.aggregate(pipeline, o).toArray() + const result = found.at(0)?.count ?? 0 + return result + } + + async search (options?: SearchOptions, o?: AggregateOptions): Promise { + options ??= {} + const pipeline = this.pipeline(options).toArray() + const result = await this.collection.aggregate(pipeline, o).toArray() + return result + } + + async insertOne (docs: TSchema, options?: FindOneAndReplaceOptions): Promise { + const doc = appendCreateFields(docs) + options ??= {} + // we must use upsert = true here + options.upsert = true + options.returnDocument = 'after' + const result = await this.collection.findOneAndReplace({ uid: doc.uid }, doc, options) + return result as TSchema | null + } + + async find (filter?: Filter, options?: FindOptions): Promise { + filter ??= {} + options ??= {} + const result = await this.collection.find(filter, options).toArray() + return result as TSchema[] + } + + async findOne (filter?: Filter, options?: FindOptions): Promise { + options ??= {} + filter ??= {} + const result = await this.collection.findOne(filter, options) + return result as TSchema + } + + async updateOne (filter: Filter, docs: UpdateFilter | Partial, options?: FindOneAndUpdateOptions): Promise { + options ??= {} + options.returnDocument ??= 'after' + const doc = appendUpdateFields(docs) + const result = await this.collection.findOneAndUpdate(filter, normalizeQueryData(doc), options) + return result as TSchema | null + } + + async updateMany (filter: Filter, docs: UpdateFilter | Partial, options?: UpdateOptions): Promise { + options ??= {} + const doc = appendUpdateFields(docs) + const session = options.session + // since mongodb do not provide a single transaction operate + // to batch update and return new document + // we pre-fetch the pending update documents and update + // those records by uid to prevent non-consistency between + // three operation without transaction. + const _uids: string[] = await this.collection.find(filter, { session }).map((o) => o.uid).toArray() + const _filter: Filter = { uid: { $in: _uids } } as any + await this.collection.updateMany(_filter, normalizeQueryData(doc), options) + const result = await this.collection.find(_filter, { session }).toArray() + return result + } + + async deleteOne (filter: Filter, options?: FindOneAndDeleteOptions): Promise { + options ??= {} + const result = await this.collection.findOneAndDelete(filter, options) + return result as TSchema | null + } + + async deleteMany (filter?: Filter, options?: DeleteOptions): Promise { + filter ??= {} + options ??= {} + const session = options.session + const result = await this.collection.find(filter, { session }).toArray() + const _filter: Filter = { uid: { $in: result.map((o) => o.uid) } } as any + await this.collection.deleteMany(_filter, options) + return result + } + + pipelineQuery (options?: SearchOptions): AggregateBuilder { + const builder = new AggregateBuilder() + // search and where should be concat with $and + // if anyone just like to filter, it should not + // pass search + const $and: unknown[] = [] + const option: MatchStage = { $and } + if (isObject(options)) { + if (isString(options.search) && isExist(options.search) && this.searchFields.length > 0) { + // we update search first + const search = [] + for (const field of this.searchFields) { + search.push({ [field]: { $regexp: options.search, $options: 'i' } }) + } + $and.push({ $or: search }) + } + const obj: Record> = {} + for (const _field in options) { + // TODO: consider support empty string value + const value = options[_field as `where.${string}`] + const fields = _field.split('.') + const prefix = fields.shift() + const field = fields.join('.') + // field must be prefix with where. + if (prefix !== 'where') continue + // no field or value, then skip + if (!isExist(field) || !isExist(value)) continue + + // where.or=([field].[operator]|[field].[operator]) + if (field === 'or' && value[0] === '(' && value[value.length - 1] === ')') { + const $or: unknown[] = [] + for (const pair of value.substring(1, value.length - 1).split('|')) { + const [field, value] = pair.split('=') + const { field: _field, operator } = splitFieldOperator(field, this.operatorMap) + if (!isExist(operator) || !isExist(_field)) continue + + $or.push({ [_field]: { [operator]: value } }) + } + if ($or.length) { + $and.push({ $or }) + } + continue + } + + // where.[field].[operator] + const { field: name, operator } = splitFieldOperator(field, this.operatorMap) + if (!isExist(operator) || !isExist(name)) continue + obj[name] ??= {} + obj[name][operator] = value + } + + if (Object.keys(obj).length) { + $and.push(obj) + } + } + + if ($and.length) { + builder.match(option) + } + return builder + } + + // project + pipelineFields (fields?: string | string[]): AggregateBuilder { + const builder = new AggregateBuilder() + if (isString(fields)) { + // we split fields by comma + fields = fields.split(',') + } + if (isArray(fields) && fields.length > 0) { + const option: ProjectStage = {} + for (const field of fields) { + if (isString(field) && isExist(field)) { + option[field] = 1 + } + } + if (Object.keys(option).length) { + builder.project(option) + } + } + return builder + } + + // limit / skip + pipelinePagination (page?: number, pageSize?: number): AggregateBuilder { + const builder = new AggregateBuilder() + // page & pageSize must be used to togather + if (isNumber(page) && isNumber(pageSize)) { + const skip = page > 0 ? (page - 1) * pageSize : 0 + // MongoDB will increase the limit amount automatically when + // we place skip before limit. Instead of rely on optimization + // we do it ourselve. + builder.limit(pageSize + skip) + builder.skip(skip) + } + return builder + } + + // sort + pipelineOrderBy (options?: Record<`orderby.${string}`, 'asc' | 'desc' | 'text'>): AggregateBuilder { + const builder = new AggregateBuilder() + if (isObject(options)) { + const option: SortStage = {} + for (const _field in options) { + const value = options[_field as `orderby.${string}`] + const fields = _field.split('.') + const prefix = fields.shift() + const field = fields.join('.') + // field must be prefix with orderby. + if (prefix !== 'orderby') continue + // no field string, then skip + if (!isExist(field)) continue + let order: 1 | -1 | { $meta: 'textScore' } | undefined + switch (value) { + case 'asc': { + order = 1 + break + } + case 'desc': { + order = -1 + break + } + case 'text': { + order = { $meta: 'textScore' } + break + } + } + // no order, then skip + if (!isExist(order)) continue + option[field] = order + } + + if (Object.keys(option).length) { + builder.sort(option) + } + } + return builder + } + + pipeline (options: SearchOptions = {}): AggregateBuilder { + const builder = this.buildAggregateBuilder(options) + // matching in the first + builder.concat(this.pipelineQuery(options)) + // we should sort before pagination + builder.concat(this.pipelineOrderBy(options)) + builder.concat(this.pipelinePagination(options.page, options.pageSize)) + // project must be the last stage + builder.concat(this.pipelineFields(options.fields)) + return builder + } + + buildAggregateBuilder (_options: SearchOptions): AggregateBuilder { + return new AggregateBuilder() + } + + async resetDatabase (): Promise { + try { + await this.collection.drop() + } catch (err: any) { + // ns not found means the database is not exist + if (err.message !== 'ns not found') throw err + } + this.#createIndexes() + return true + } +} + +// eslint-disable-next-line @typescript-eslint/no-unused-vars +export interface Controller extends EventEmitter { + on(eventName: 'initialized', listener: Listener): this + once(eventName: 'initialized', listener: Listener): this + addListener(eventName: 'initialized', listener: Listener): this +} diff --git a/packages/mongodb-controller/lib/index.ts b/packages/mongodb-controller/lib/index.ts new file mode 100644 index 00000000..f4690094 --- /dev/null +++ b/packages/mongodb-controller/lib/index.ts @@ -0,0 +1,4 @@ +export { Controller, type ControllerOptions, type SearchOptions } from './controller/default' +export * from './utils/aggregate-builder' +export { operatorMap } from './utils/operator-map' +export * from './utils/query' diff --git a/packages/mongodb-controller/lib/mjs/package.json b/packages/mongodb-controller/lib/mjs/package.json new file mode 100644 index 00000000..3dbc1ca5 --- /dev/null +++ b/packages/mongodb-controller/lib/mjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/packages/mongodb-controller/lib/utils/aggregate-builder.ts b/packages/mongodb-controller/lib/utils/aggregate-builder.ts new file mode 100644 index 00000000..e72d1941 --- /dev/null +++ b/packages/mongodb-controller/lib/utils/aggregate-builder.ts @@ -0,0 +1,428 @@ +export type Pipeline = Record + +export class AggregateBuilder { + #pipeline: Pipeline[] + + constructor () { + this.#pipeline = [] + } + + addFields (stage: AddFieldsStage): this { + return this.#push('addFields', stage) + } + + bucket (stage: BucketStage): this { + return this.#push('bucket', stage) + } + + bucketAuto (stage: BucketAutoStage): this { + return this.#push('bucketAuto', stage) + } + + changeStream (stage: ChangeStreamStage): this { + return this.#push('changeStream', stage) + } + + changeStreamSplitLargeEvent (): this { + return this.#push('changeStreamSplitLargeEvent', {}) + } + + collStats (stage: CollStatsStage): this { + return this.#push('collStats', stage) + } + + count (stage: CountStage): this { + return this.#push('count', stage) + } + + currentOp (stage: CurrentOpStage): this { + return this.#push('currentOp', stage) + } + + densify (stage: DensifyStage): this { + return this.#push('densify', stage) + } + + documents (stage: DocumentsStage): this { + return this.#push('documents', stage) + } + + facet (stage: FacetStage): this { + return this.#push('facet', stage) + } + + fill (stage: FillStage): this { + return this.#push('fill', stage) + } + + geoNear (stage: GeoNearStage): this { + return this.#push('geoNear', stage) + } + + graphLookup (stage: GraphLookupStage): this { + return this.#push('graphLookup', stage) + } + + group (stage: GroupStage): this { + return this.#push('group', stage) + } + + indexStats (): this { + return this.#push('indexStats', {}) + } + + limit (stage: LimitStage): this { + return this.#push('limit', stage) + } + + listLocalSessions (stage: ListLocalSessionsStage): this { + return this.#push('listLocalSessions', stage) + } + + listSampledQueries (stage: ListSampledQueriesStage): this { + return this.#push('listSampledQueries', stage) + } + + listSearchIndexes (stage: ListSearchIndexesStage): this { + return this.#push('listSearchIndexes', stage) + } + + listSessions (stage: ListSessionsStage): this { + return this.#push('listSessions', stage) + } + + lookup (stage: LookupStage): this { + return this.#push('lookup', stage) + } + + match (stage: MatchStage): this { + return this.#push('match', stage) + } + + merge (stage: MergeStage): this { + return this.#push('merge', stage) + } + + out (stage: OutStage): this { + return this.#push('out', stage) + } + + planCacheStats (): this { + return this.#push('planCacheStats', {}) + } + + project (stage: ProjectStage): this { + return this.#push('project', stage) + } + + redact (stage: RedactStage): this { + return this.#push('redact', stage) + } + + replaceRoot (stage: ReplaceRootStage): this { + return this.#push('replaceRoot', stage) + } + + replaceWith (stage: ReplaceWithStage): this { + return this.#push('replaceWith', stage) + } + + sample (stage: SampleStage): this { + return this.#push('sample', stage) + } + + search (stage: SearchStage): this { + return this.#push('search', stage) + } + + searchMeta (stage: SearchMetaStage): this { + return this.#push('searchMeta', stage) + } + + set (stage: SetStage): this { + return this.#push('set', stage) + } + + setWindowFields (stage: SetWindowFieldsStage): this { + return this.#push('setWindowFields', stage) + } + + shardedDataDistribution (): this { + return this.#push('shardedDataDistribution', {}) + } + + skip (stage: SkipStage): this { + return this.#push('skip', stage) + } + + sort (stage: SortStage): this { + return this.#push('sort', stage) + } + + sortByCount (stage: SortByCountStage): this { + return this.#push('sortByCount', stage) + } + + unionWith (stage: UnionWithStage): this { + return this.#push('unionWith', stage) + } + + unset (stage: UnsetStage): this { + return this.#push('unset', stage) + } + + unwind (stage: UnwindStage): this { + return this.#push('unwind', stage) + } + + #push (operator: string, stage: Stages): this { + this.#pipeline.push({ + [`$${operator}`]: stage, + }) + return this + } + + concat (pipeline: AggregateBuilder | Pipeline[]): this { + if (pipeline instanceof AggregateBuilder) { + pipeline = pipeline.toArray() + } + this.#pipeline = this.#pipeline.concat(pipeline) + return this + } + + toArray (): Pipeline[] { + return this.#pipeline + } +} + +export type Stages = + AddFieldsStage | + BucketStage | + BucketAutoStage | + ChangeStreamStage | + CollStatsStage | + CountStage | + CurrentOpStage | + DensifyStage | + DocumentsStage | + FacetStage | + FillStage | + GeoNearStage | + GraphLookupStage | + GroupStage | + LimitStage | + ListLocalSessionsStage | + ListSampledQueriesStage | + ListSearchIndexesStage | + ListSessionsStage | + LookupStage | + MatchStage | + MergeStage | + OutStage | + ProjectStage | + RedactStage | + ReplaceRootStage | + ReplaceWithStage | + SampleStage | + SearchStage | + SearchMetaStage | + SetStage | + SetWindowFieldsStage | + SkipStage | + SortStage | + SortByCountStage | + UnionWithStage | + UnsetStage | + UnwindStage + +export type AddFieldsStage = Record + +export interface BucketStage { + groupBy: string | Record + boundaries: unknown[] + default: string + output?: Record +} + +export interface BucketAutoStage { + groupBy: string | Record + buckets: number + output?: Record + granularity?: 'R5' | 'R10' | 'R20' | 'R40' | 'R80' | '1-2-5' | 'E6' | 'E12' | 'E24' | 'E48' | 'E96' | 'E192' | 'POWERSOF2' +} + +export interface ChangeStreamStage { + allChangesForCluster?: boolean + fullDocument?: 'default' | 'required' | 'updateLookup' | 'whenAvailable' + fullDocumentBeforeChange: 'off' | 'whenAvailable' | 'required' + resumeAfter: number + showExpandedEvents: boolean + startAfter: unknown + startAtOperationTime: number +} + +export interface CollStatsStage { + latencyStats?: { histograms: boolean } + storageStats?: { scale: number } + count?: Record + queryExecStats?: Record +} + +export type CountStage = string + +export interface CurrentOpStage { + allUsers?: boolean + idleConnections?: boolean + idleCursors?: boolean + idleSessions?: boolean + localOps?: boolean + backtrace?: boolean +} + +export interface DensifyStage { + field: string + partitionByFields?: string[] + range: { + step: number + unit: 'millisecond' | 'second' | 'minute' | 'hour' | 'day' | 'week' | 'month' | 'quarter' | 'year' + bounds: 'full' | 'partition' | [number, number] + } +} + +export type DocumentsStage = unknown[] + +export type FacetStage = Record + +export interface FillStage { + partitionBy: Record + partitionByFields: string[] + sortBy: Record + output: Record +} + +export interface GeoNearStage { + near: unknown + distanceField: string + spherical?: boolean + maxDistance?: number + query?: Record + distanceMultiplier?: number + includeLocs?: string + uniqueDocs?: boolean + minDistance?: number + key?: unknown +} + +export interface GraphLookupStage { + from: string + startWith: string + connectFromField: string + connectToField: string + as: string + maxDepth?: number + depthField?: string + restrictSearchWithMatch?: Record +} + +export interface GroupStage { + _id: string + [key: string]: unknown +} + +export type LimitStage = number + +export type ListLocalSessionsStage = Record | { + users: Array<{ user: string, db: string }> +} | { allUsers: true } + +export interface ListSampledQueriesStage { + namespace?: string +} + +export interface ListSearchIndexesStage { + id?: string + name?: string +} + +export type ListSessionsStage = Record | { + users: Array<{ user: string, db: string }> +} | { allUsers: true } + +export interface LookupStage { + from: string + localField?: string + foreignField?: string + let?: string + pipeline?: unknown[] + as: string +} + +export type MatchStage = Record + +export interface MergeStage { + into: OutStage + on?: string | string[] + let?: Record + whenMatched?: 'replace' | 'keepExisting' | 'merge' | 'fail' + whenNotMatched?: 'insert' | 'discard' | 'fail' +} + +export type OutStage = string | { + db: string + coll: string +} + +export interface ProjectStage { + _id?: 0 | false + [key: string]: 0 | 1 | boolean | unknown +} + +export type RedactStage = Record + +export interface ReplaceRootStage { + newRoot: ReplaceWithStage +} + +export type ReplaceWithStage = `$${string}` | Record + +export interface SampleStage { + size: number +} + +export type SearchStage = unknown + +export type SearchMetaStage = unknown + +export type SetStage = Record + +export interface SetWindowFieldsStage { + partitionBy?: `$${string}` | Record + sortBy: Record + output: Record + range?: ['current' | 'unbound' | number, 'current' | 'unbound' | number] + unit?: 'millisecond' | 'second' | 'minute' | 'hour' | 'day' | 'week' | 'month' | 'quarter' | 'year' + } + [key: string]: unknown + }> +} + +export type SkipStage = number + +export type SortStage = Record + +export type SortByCountStage = `$${string}` | Record + +export type UnionWithStage = string | { + coll: string + pipeline?: unknown[] +} + +export type UnsetStage = string | string[] + +export type UnwindStage = string | { + path: `$${string}` + includeArrayIndex?: string + preserveNullAndEmptyArrays?: boolean +} diff --git a/packages/mongodb-controller/lib/utils/append.ts b/packages/mongodb-controller/lib/utils/append.ts new file mode 100644 index 00000000..22ff333b --- /dev/null +++ b/packages/mongodb-controller/lib/utils/append.ts @@ -0,0 +1,35 @@ +import { isArray } from '@kakang/validator' +import { randomUUID } from 'crypto' +import { type Document, type UpdateFilter } from 'mongodb' +import { isUpdateQuery, retrieveUpdateQueryData } from './query' + +export function appendCreateFields (docs: TSchema): TSchema +export function appendCreateFields (docs: TSchema[]): TSchema[] +export function appendCreateFields (docs: TSchema | TSchema[]): TSchema[] | TSchema { + const now = new Date() + if (isArray(docs)) { + return docs.map((d) => appendCreateFields(d)) + } else { + const doc: any = { ...docs } + doc.uid = randomUUID() + doc.createdAt = now + doc.updatedAt = now + return doc + } +} + +export function appendUpdateFields (docs: UpdateFilter): UpdateFilter +export function appendUpdateFields (docs: Partial): TSchema +export function appendUpdateFields (docs: UpdateFilter | Partial): UpdateFilter | TSchema { + const now = new Date() + const doc: Record = retrieveUpdateQueryData(docs) + // we remove uid and createdAt + const { uid, createdAt, ...item } = doc + item.updatedAt = now + if (isUpdateQuery(docs)) { + docs.$set = item as Partial + return docs + } else { + return item + } +} diff --git a/packages/mongodb-controller/lib/utils/noop.ts b/packages/mongodb-controller/lib/utils/noop.ts new file mode 100644 index 00000000..2422ebd2 --- /dev/null +++ b/packages/mongodb-controller/lib/utils/noop.ts @@ -0,0 +1 @@ +export function noop (): void {} diff --git a/packages/mongodb-controller/lib/utils/operator-map.ts b/packages/mongodb-controller/lib/utils/operator-map.ts new file mode 100644 index 00000000..0f9cf11e --- /dev/null +++ b/packages/mongodb-controller/lib/utils/operator-map.ts @@ -0,0 +1,16 @@ +export const operatorMap = new Map() +operatorMap.set('eq', '$eq') +operatorMap.set('in', '$in') +operatorMap.set('ne', '$ne') +operatorMap.set('gt', '$gt') +operatorMap.set('gte', '$gte') +operatorMap.set('lt', '$lt') +operatorMap.set('lte', '$lte') + +export function buildOperatorMap () { + const map = new Map() + for (const [key, value] of operatorMap) { + map.set(key, value) + } + return map +} diff --git a/packages/mongodb-controller/lib/utils/query.ts b/packages/mongodb-controller/lib/utils/query.ts new file mode 100644 index 00000000..2e1e5583 --- /dev/null +++ b/packages/mongodb-controller/lib/utils/query.ts @@ -0,0 +1,52 @@ +import { type Document, type UpdateFilter } from 'mongodb' + +const UpdateQueryKeys = new Set([ + '$currentDate', + '$inc', + '$min', + '$max', + '$mul', + '$rename', + '$set', + '$setOnInsert', + '$unset', + '$addToSet', + '$pop', + '$pull', + '$push', + '$pushAll', + '$bit', +]) + +export function isUpdateQuery (docs: UpdateFilter | Partial): docs is UpdateFilter { + for (const key of Object.keys(docs)) { + if (UpdateQueryKeys.has(key)) return true + } + return false +} + +export function retrieveUpdateQueryData (docs: UpdateFilter | Partial): TSchema { + return isUpdateQuery(docs) ? Object.assign({}, docs.$set) as TSchema : docs as TSchema +} + +export function normalizeQueryData (docs: UpdateFilter | Partial): UpdateFilter { + return isUpdateQuery(docs) ? docs : { $set: docs } as any +} + +export function mergeUpdateQueryData (from: UpdateFilter | Partial, to: UpdateFilter | Partial): UpdateFilter | Partial { + from = normalizeQueryData(from) + to = normalizeQueryData(to) + const data = Object.assign({}, from.$set, to.$set) + return { ...from, ...to, $set: data } +} + +// [field].[operator] +export function splitFieldOperator (field: string, operatorMap: Map): { field?: string, operator?: string } { + const fields = field.split('.') + const operator = operatorMap.get(fields.pop() ?? '') + + return { + field: fields.join('.'), + operator, + } +} diff --git a/packages/mongodb-controller/package.json b/packages/mongodb-controller/package.json new file mode 100644 index 00000000..33302931 --- /dev/null +++ b/packages/mongodb-controller/package.json @@ -0,0 +1,60 @@ +{ + "name": "@kakang/mongodb-controller", + "version": "3.0.2", + "description": "", + "main": "lib/index.js", + "types": "lib/index.d.ts", + "exports": { + ".": { + "import": "./lib/mjs/index.js", + "require": "./lib/index.js" + } + }, + "directories": { + "lib": "lib" + }, + "scripts": { + "clean": "node ../../scripts/build.mjs --clean", + "lint": "eslint", + "lint:fix": "npm run lint -- --fix", + "build": "node ../../scripts/build.mjs --build=\"all\"", + "build:cjs": "node ../../scripts/build.mjs --build='cjs'", + "build:mjs": "node ../../scripts/build.mjs --build='mjs'", + "unit": "cross-env \"NODE_OPTIONS=--require ts-node/register\" unit", + "test": "npm run lint && npm run unit", + "coverage": "cross-env \"NODE_OPTIONS=--require ts-node/register\" c8 unit", + "prepublishOnly": "npm run build", + "postpublish": "npm run clean" + }, + "publishConfig": { + "access": "public", + "registry": "https://registry.npmjs.org/" + }, + "repository": { + "type": "git", + "url": "https://github.com/kaka-ng/nodejs.git" + }, + "author": "KaKa ", + "license": "GPL-3.0", + "devDependencies": { + "@kakang/unit": "workspace:^", + "@types/node": "^20.14.11", + "c8": "^10.1.2", + "cross-env": "^7.0.3", + "dotenv": "^16.4.5", + "eslint": "^9.7.0", + "mongodb": "^6.3.0", + "neostandard": "^0.11.1", + "rimraf": "^6.0.1", + "ts-node": "^10.9.2", + "tsc-alias": "^1.8.10", + "typescript": "~5.5.3" + }, + "peerDependencies": { + "mongodb": "^6.3.0" + }, + "dependencies": { + "@kakang/eventemitter": "workspace:^", + "@kakang/validator": "workspace:^" + } +} diff --git a/packages/mongodb-controller/test/aggregate-builder.test.ts b/packages/mongodb-controller/test/aggregate-builder.test.ts new file mode 100644 index 00000000..177a0645 --- /dev/null +++ b/packages/mongodb-controller/test/aggregate-builder.test.ts @@ -0,0 +1,430 @@ +import { ExtendedTestContext, test } from '@kakang/unit' +import { AggregateBuilder, ChangeStreamStage, DensifyStage, FillStage, MergeStage, SetWindowFieldsStage, SortStage } from '../lib' + +function validatePipeline (t: ExtendedTestContext, builder: AggregateBuilder) { + const pipeline = builder.toArray() + for (const stage of pipeline) { + if (`$${t.name}` in stage) { + t.equal(`$${t.name}` in stage, true) + } + } +} + +test('methods', function (t) { + t.plan(45) + const builder = new AggregateBuilder() + + t.test('addFields', function (t) { + t.plan(1) + const opt = { foo: 'bar' } + builder.addFields(opt) + validatePipeline(t, builder) + }) + + t.test('bucket', function (t) { + t.plan(1) + const opt = { + groupBy: '$year_born', // Field to group by + boundaries: [1840, 1850, 1860, 1870, 1880], // Boundaries for the buckets + default: 'Other', // Bucket id for documents which do not fall into a bucket + output: { // Output for each bucket + count: { $sum: 1 }, + artists: + { + $push: { + name: { $concat: ['$first_name', ' ', '$last_name'] }, + year_born: '$year_born', + }, + }, + }, + } + builder.bucket(opt) + validatePipeline(t, builder) + }) + + t.test('bucketAuto', function (t) { + t.plan(1) + const opt = { + groupBy: { + $multiply: ['$dimensions.height', '$dimensions.width'], + }, + buckets: 4, + output: { + count: { $sum: 1 }, + titles: { $push: '$title' }, + }, + } + builder.bucketAuto(opt) + validatePipeline(t, builder) + }) + + t.test('changeStream', function (t) { + t.plan(1) + const opt: ChangeStreamStage = { + fullDocumentBeforeChange: 'whenAvailable', + resumeAfter: 0, + showExpandedEvents: false, + startAfter: {}, + startAtOperationTime: 0, + } + builder.changeStream(opt) + validatePipeline(t, builder) + }) + + t.test('changeStreamSplitLargeEvent', function (t) { + t.plan(1) + builder.changeStreamSplitLargeEvent() + validatePipeline(t, builder) + }) + + t.test('collStats', function (t) { + t.plan(1) + const opt = { latencyStats: { histograms: true } } + builder.collStats(opt) + validatePipeline(t, builder) + }) + + t.test('count', function (t) { + t.plan(1) + const opt = 'passing_scores' + builder.count(opt) + validatePipeline(t, builder) + }) + + t.test('currentOp', function (t) { + t.plan(1) + const opt = { allUsers: true, idleSessions: true } + builder.currentOp(opt) + validatePipeline(t, builder) + }) + + t.test('densify', function (t) { + t.plan(1) + const opt: DensifyStage = { + field: 'foo', + range: { + step: 5, + unit: 'millisecond', + bounds: 'full', + }, + } + builder.densify(opt) + validatePipeline(t, builder) + }) + + t.test('documents', function (t) { + t.plan(1) + const opt: unknown[] = [] + builder.documents(opt) + validatePipeline(t, builder) + }) + + t.test('facet', function (t) { + t.plan(1) + const opt = { + categorizedByTags: [ + { $unwind: '$tags' }, + { $sortByCount: '$tags' }, + ], + categorizedByPrice: [ + // Filter out documents without a price e.g., _id: 7 + { $match: { price: { $exists: 1 } } }, + { + $bucket: { + groupBy: '$price', + boundaries: [0, 150, 200, 300, 400], + default: 'Other', + output: { + count: { $sum: 1 }, + titles: { $push: '$title' }, + }, + }, + }, + ], + 'categorizedByYears(Auto)': [ + { + $bucketAuto: { + groupBy: '$year', + buckets: 4, + }, + }, + ], + } + builder.facet(opt) + validatePipeline(t, builder) + }) + + t.test('fill', function (t) { + t.plan(1) + const opt: FillStage = { + partitionBy: { foo: '$foo' }, + partitionByFields: ['foo'], + sortBy: { foo: 1 }, + output: { foo: { value: '$foo' } }, + } + builder.fill(opt) + validatePipeline(t, builder) + }) + + t.test('geoNear', function (t) { + t.plan(1) + const opt = { + near: { type: 'Point', coordinates: [-73.99279, 40.719296] }, + distanceField: 'dist.calculated', + maxDistance: 2, + query: { category: 'Parks' }, + includeLocs: 'dist.location', + spherical: true, + } + builder.geoNear(opt) + validatePipeline(t, builder) + }) + + t.test('graphLookup', function (t) { + t.plan(1) + const opt = { + from: 'employees', + startWith: '$reportsTo', + connectFromField: 'reportsTo', + connectToField: 'name', + as: 'reportingHierarchy', + } + builder.graphLookup(opt) + validatePipeline(t, builder) + }) + + t.test('group', function (t) { + t.plan(1) + const opt = { _id: '$item' } + builder.group(opt) + validatePipeline(t, builder) + }) + + t.test('indexStats', function (t) { + t.plan(1) + builder.indexStats() + validatePipeline(t, builder) + }) + + t.test('limit', function (t) { + t.plan(1) + const opt = 5 + builder.limit(opt) + validatePipeline(t, builder) + }) + + t.test('listLocalSessions', function (t) { + t.plan(1) + const opt = { users: [{ user: 'myAppReader', db: 't.test' }] } + builder.listLocalSessions(opt) + validatePipeline(t, builder) + }) + + t.test('listSampledQueries', function (t) { + t.plan(1) + const opt = { namespace: 'id' } + builder.listSampledQueries(opt) + validatePipeline(t, builder) + }) + + t.test('listSearchIndexes', function (t) { + t.plan(1) + const opt = { id: '_id', name: 'id' } + builder.listSearchIndexes(opt) + validatePipeline(t, builder) + }) + + t.test('listSessions', function (t) { + t.plan(1) + const opt = { users: [{ user: 'myAppReader', db: 't.test' }] } + builder.listSessions(opt) + validatePipeline(t, builder) + }) + + t.test('lookup', function (t) { + t.plan(1) + const opt = { + from: 'inventory', + localField: 'item', + foreignField: 'sku', + as: 'inventory_docs', + } + builder.lookup(opt) + validatePipeline(t, builder) + }) + + t.test('match', function (t) { + t.plan(1) + const opt = { $or: [{ score: { $gt: 70, $lt: 90 } }, { views: { $gte: 1000 } }] } + builder.match(opt) + validatePipeline(t, builder) + }) + + t.test('merge', function (t) { + t.plan(1) + const opt: MergeStage = { into: 'myOutput', on: '_id', whenMatched: 'replace', whenNotMatched: 'insert' } + builder.merge(opt) + validatePipeline(t, builder) + }) + + t.test('out', function (t) { + t.plan(1) + const opt = 'authors' + builder.out(opt) + validatePipeline(t, builder) + }) + + t.test('planCacheStats', function (t) { + t.plan(1) + builder.planCacheStats() + validatePipeline(t, builder) + }) + + t.test('project', function (t) { + t.plan(1) + const opt = { contact: 1, 'contact.address.country': 1 } + builder.project(opt) + validatePipeline(t, builder) + }) + + t.test('redact', function (t) { + t.plan(1) + const opt = { + $cond: { + if: { $gt: [{ $size: { $setIntersection: ['$tags', ['STLW', 'G']] } }, 0] }, + then: '$$DESCEND', + else: '$$PRUNE', + }, + } + builder.redact(opt) + validatePipeline(t, builder) + }) + + t.test('replaceRoot', function (t) { + t.plan(1) + const opt = { newRoot: { $mergeObjects: [{ _id: '$_id', first: '', last: '' }, '$name'] } } + builder.replaceRoot(opt) + validatePipeline(t, builder) + }) + + t.test('replaceWith', function (t) { + t.plan(1) + const opt = { $mergeObjects: [{ _id: '$_id', first: '', last: '' }, '$name'] } + builder.replaceWith(opt) + validatePipeline(t, builder) + }) + + t.test('sample', function (t) { + t.plan(1) + const opt = { size: 3 } + builder.sample(opt) + validatePipeline(t, builder) + }) + + t.test('search', function (t) { + t.plan(1) + const opt = { + foo: 'bar', + } + builder.search(opt) + validatePipeline(t, builder) + }) + + t.test('searchMeta', function (t) { + t.plan(1) + const opt = { + foo: 'bar', + } + builder.searchMeta(opt) + validatePipeline(t, builder) + }) + + t.test('set', function (t) { + t.plan(1) + const opt = { + totalHomework: { $sum: '$homework' }, + totalQuiz: { $sum: '$quiz' }, + } + builder.set(opt) + validatePipeline(t, builder) + }) + + t.test('setWindowFields', function (t) { + t.plan(1) + const opt: SetWindowFieldsStage = { + sortBy: { foo: 1 }, + output: { window: { documents: ['current'], range: ['current'], unit: 'millisecond' } }, + } + builder.setWindowFields(opt) + validatePipeline(t, builder) + }) + + t.test('shardedDataDistribution', function (t) { + t.plan(1) + builder.shardedDataDistribution() + validatePipeline(t, builder) + }) + + t.test('skip', function (t) { + t.plan(1) + const opt = 5 + builder.skip(opt) + validatePipeline(t, builder) + }) + + t.test('sort', function (t) { + t.plan(1) + const opt: SortStage = { borough: 1, _id: 1 } + builder.sort(opt) + validatePipeline(t, builder) + }) + + t.test('sortByCount', function (t) { + t.plan(1) + const opt = '$tags' + builder.sortByCount(opt) + validatePipeline(t, builder) + }) + + t.test('unionWith', function (t) { + t.plan(1) + const opt = { coll: 'warehouses', pipeline: [{ $project: { state: 1, _id: 0 } }] } + builder.unionWith(opt) + validatePipeline(t, builder) + }) + + t.test('unset', function (t) { + t.plan(1) + const opt = 'copies' + builder.unset(opt) + validatePipeline(t, builder) + }) + + t.test('unwind', function (t) { + t.plan(1) + const opt = '$sizes' + builder.unwind(opt) + validatePipeline(t, builder) + }) + + t.test('concat - array', function (t) { + t.plan(1) + const opt = [{ $count: 'concat' }] + builder.concat(opt) + t.equal(builder.toArray().length, 43) + }) + + t.test('concat - AggregateBuilder', function (t) { + t.plan(1) + const opt = new AggregateBuilder() + opt.count('AggregateBuilder') + builder.concat(opt) + t.equal(builder.toArray().length, 44) + }) + + t.test('toArray', function (t) { + t.plan(1) + const pipeline = builder.toArray() + t.equal(pipeline.length, 44) + }) +}) diff --git a/packages/mongodb-controller/test/config.ts b/packages/mongodb-controller/test/config.ts new file mode 100644 index 00000000..463f47f2 --- /dev/null +++ b/packages/mongodb-controller/test/config.ts @@ -0,0 +1,6 @@ +import dotenv from 'dotenv' +dotenv.config() + +// Use environment variables for local +// Use fallback value for Github Actions +export const MONGODB_URL = process.env.MONGODB_URL ?? 'mongodb://127.0.0.1:27017/?replicaSet=rs0' diff --git a/packages/mongodb-controller/test/controller.test.ts b/packages/mongodb-controller/test/controller.test.ts new file mode 100644 index 00000000..96f40fff --- /dev/null +++ b/packages/mongodb-controller/test/controller.test.ts @@ -0,0 +1,110 @@ +import { test } from '@kakang/unit' +import { MongoClient } from 'mongodb' +import { Controller } from '../lib' +import { noop } from '../lib/utils/noop' +import { MONGODB_URL } from './config' + +test('controller', async (t) => { + const client = new MongoClient(MONGODB_URL) + await client.connect() + const collection = client.db('cicd').collection('controller') + const controller = new Controller(collection, { + searchFields: ['uid', 'foo', 'bar'], + }) + + t.after(async () => { + // we doesn't care if the database is properly removed or not + await collection.deleteMany().catch(noop) + await client.close() + }) + + t.test('insert', async t => { + t.plan(7) + const ok: typeof t.ok = t.ok + const doc: any = await controller.insertOne({ foo: 'bar', bar: 'baz', hello: 'world' }) + ok(doc) + // appended fields + ok(doc.uid) + ok(doc.createdAt) + ok(doc.updatedAt) + // inserted fields + t.equal(doc.foo, 'bar') + t.equal(doc.bar, 'baz') + t.equal(doc.hello, 'world') + }) + + t.test('find one', async t => { + t.plan(7) + const ok: typeof t.ok = t.ok + const doc: any = await controller.findOne({ foo: 'bar' }) + ok(doc) + // appended fields + ok(doc.uid) + ok(doc.createdAt) + ok(doc.updatedAt) + // inserted fields + t.equal(doc.foo, 'bar') + t.equal(doc.bar, 'baz') + t.equal(doc.hello, 'world') + }) + + t.test('find', async t => { + t.plan(7) + const ok: typeof t.ok = t.ok + const [doc]: any = await controller.find({ foo: 'bar' }) + ok(doc) + // appended fields + ok(doc.uid) + ok(doc.createdAt) + ok(doc.updatedAt) + // inserted fields + t.equal(doc.foo, 'bar') + t.equal(doc.bar, 'baz') + t.equal(doc.hello, 'world') + }) + + t.test('partial update', async t => { + t.plan(7) + const ok: typeof t.ok = t.ok + const doc: any = await controller.updateOne({ foo: 'bar' }, { foo: 'barrrrr' }) + ok(doc) + // appended fields + ok(doc.uid) + ok(doc.createdAt) + ok(doc.updatedAt) + // inserted fields + t.equal(doc.foo, 'barrrrr') + t.equal(doc.bar, 'baz') + t.equal(doc.hello, 'world') + }) + + t.test('update', async t => { + t.plan(7) + const ok: typeof t.ok = t.ok + const doc: any = await controller.updateOne({ foo: 'barrrrr' }, { foo: 'barrrrr', bar: 'bazzzzz', hello: 'worlddddd' }) + ok(doc) + // appended fields + ok(doc.uid) + ok(doc.createdAt) + ok(doc.updatedAt) + // inserted fields + t.equal(doc.foo, 'barrrrr') + t.equal(doc.bar, 'bazzzzz') + t.equal(doc.hello, 'worlddddd') + }) + + t.test('delete', async t => { + t.plan(7) + const ok: typeof t.ok = t.ok + const doc: any = await controller.deleteOne({ foo: 'barrrrr' }) + ok(doc) + // appended fields + ok(doc.uid) + ok(doc.createdAt) + ok(doc.updatedAt) + // inserted fields + t.equal(doc.foo, 'barrrrr') + t.equal(doc.bar, 'bazzzzz') + t.equal(doc.hello, 'worlddddd') + }) +}) diff --git a/packages/mongodb-controller/test/pipeline.test.ts b/packages/mongodb-controller/test/pipeline.test.ts new file mode 100644 index 00000000..795ba852 --- /dev/null +++ b/packages/mongodb-controller/test/pipeline.test.ts @@ -0,0 +1,248 @@ +import { test } from '@kakang/unit' +import { MongoClient } from 'mongodb' +import { Controller } from '../lib' +import { noop } from '../lib/utils/noop' +import { MONGODB_URL } from './config' + +test('pipeline', async (t) => { + const client = new MongoClient(MONGODB_URL) + await client.connect() + const collection = client.db('cicd').collection('pipeline') + const controller = new Controller(collection, { + searchFields: ['uid', 'foo', 'bar'], + }) + + t.after(async () => { + // we doesn't care if the database is properly removed or not + await collection.deleteMany().catch(noop) + await client.close() + }) + + t.test('no options', t => { + t.plan(1) + const pipeline = controller.pipeline().toArray() + t.equal(pipeline.length, 0) + }) + + t.test('empty object', t => { + t.plan(1) + const pipeline = controller.pipeline({}).toArray() + t.equal(pipeline.length, 0) + }) + + t.test('pagination', t => { + t.plan(4) + + t.test('page only', t => { + t.plan(1) + const pipeline = controller.pipeline({ page: 1 }).toArray() + t.equal(pipeline.length, 0) + }) + + t.test('pageSize only', t => { + t.plan(1) + const pipeline = controller.pipeline({ pageSize: 10 }).toArray() + t.equal(pipeline.length, 0) + }) + + t.test('page: 1, pageSize: 10', t => { + t.plan(3) + const pipeline = controller.pipeline({ page: 1, pageSize: 10 }).toArray() + t.equal(pipeline.length, 2) + t.equal(pipeline[0].$limit, 10) + t.equal(pipeline[1].$skip, 0) + }) + + t.test('page: 10, pageSize: 1', t => { + t.plan(3) + const pipeline = controller.pipeline({ page: 10, pageSize: 1 }).toArray() + t.equal(pipeline.length, 2) + t.equal(pipeline[0].$limit, 10) + t.equal(pipeline[1].$skip, 9) + }) + }) + + t.test('sort', t => { + t.plan(7) + + t.test('no field', t => { + t.plan(1) + // @ts-expect-error + const pipeline = controller.pipeline({ orderby: 'asc' }).toArray() + t.equal(pipeline.length, 0) + }) + + t.test('no field', t => { + t.plan(1) + const pipeline = controller.pipeline({ 'orderby.': 'asc' }).toArray() + t.equal(pipeline.length, 0) + }) + + t.test('no value', t => { + t.plan(1) + // @ts-expect-error + const pipeline = controller.pipeline({ 'orderby.id': 'non-exist' }).toArray() + t.equal(pipeline.length, 0) + }) + + t.test('asc', t => { + t.plan(2) + const pipeline = controller.pipeline({ 'orderby.id': 'asc' }).toArray() + t.equal(pipeline.length, 1) + t.deepEqual(pipeline[0].$sort, { id: 1 }) + }) + + t.test('desc', t => { + t.plan(2) + const pipeline = controller.pipeline({ 'orderby.id': 'desc' }).toArray() + t.equal(pipeline.length, 1) + t.deepEqual(pipeline[0].$sort, { id: -1 }) + }) + + t.test('text', t => { + t.plan(2) + const pipeline = controller.pipeline({ 'orderby.id': 'text' }).toArray() + t.equal(pipeline.length, 1) + t.deepEqual(pipeline[0].$sort, { id: { $meta: 'textScore' } }) + }) + + t.test('mixed', t => { + t.plan(2) + const pipeline = controller.pipeline({ + 'orderby.id': 'asc', + 'orderby.foo': 'asc', + 'orderby.bar': 'desc', + 'orderby.baz': 'text', + 'orderby.hello.world': 'asc', + }).toArray() + t.equal(pipeline.length, 1) + t.deepEqual(pipeline[0].$sort, { + id: 1, + foo: 1, + bar: -1, + baz: { $meta: 'textScore' }, + 'hello.world': 1, + }) + }) + }) + + t.test('projection', t => { + t.plan(5) + + t.test('no field', t => { + t.plan(1) + const pipeline = controller.pipeline({ fields: '' }).toArray() + t.equal(pipeline.length, 0) + }) + + t.test('no field', t => { + t.plan(1) + const pipeline = controller.pipeline({ fields: [] }).toArray() + t.equal(pipeline.length, 0) + }) + + t.test('no field', t => { + t.plan(1) + const pipeline = controller.pipeline({ fields: [''] }).toArray() + t.equal(pipeline.length, 0) + }) + + t.test('string field', t => { + t.plan(2) + const pipeline = controller.pipeline({ fields: 'foo,bar,,baz' }).toArray() + t.equal(pipeline.length, 1) + t.deepEqual(pipeline[0].$project, { + foo: 1, + bar: 1, + baz: 1, + }) + }) + + t.test('array field', t => { + t.plan(2) + const pipeline = controller.pipeline({ fields: ['foo', '', 'bar', 'baz'] }).toArray() + t.equal(pipeline.length, 1) + t.deepEqual(pipeline[0].$project, { + foo: 1, + bar: 1, + baz: 1, + }) + }) + }) + + t.test('search', t => { + t.plan(2) + + t.test('search empty string', t => { + t.plan(1) + const pipeline = controller.pipeline({ search: '' }).toArray() + t.equal(pipeline.length, 0) + }) + + t.test('search string', t => { + t.plan(2) + const pipeline = controller.pipeline({ search: 'hello' }).toArray() + t.equal(pipeline.length, 1) + t.deepEqual(pipeline[0].$match, { + $and: [ + { + $or: [ + { uid: { $regexp: 'hello', $options: 'i' } }, + { foo: { $regexp: 'hello', $options: 'i' } }, + { bar: { $regexp: 'hello', $options: 'i' } }, + ], + }, + ], + }) + }) + }) + + t.test('filter', t => { + t.test('where empty string', t => { + t.plan(1) + const pipeline = controller.pipeline({ 'where.id.eq': '' }).toArray() + t.equal(pipeline.length, 0) + }) + + t.test('where string', t => { + t.plan(2) + const pipeline = controller.pipeline({ 'where.id.eq': 'hello' }).toArray() + t.equal(pipeline.length, 1) + t.deepEqual(pipeline[0].$match, { + $and: [ + { id: { $eq: 'hello' } }, + ], + }) + }) + + t.test('multiple where', t => { + t.plan(2) + const pipeline = controller.pipeline({ 'where.id.eq': 'hello', 'where.foo.ne': 'bar' }).toArray() + t.equal(pipeline.length, 1) + t.deepEqual(pipeline[0].$match, { + $and: [ + { + id: { $eq: 'hello' }, + foo: { $ne: 'bar' }, + }, + ], + }) + }) + + t.test('where or', t => { + t.plan(2) + const pipeline = controller.pipeline({ 'where.or': '(id.eq=hello|foo.ne=bar)' }).toArray() + t.equal(pipeline.length, 1) + t.deepEqual(pipeline[0].$match, { + $and: [ + { + $or: [ + { id: { $eq: 'hello' } }, + { foo: { $ne: 'bar' } }, + ], + }, + ], + }) + }) + }) +}) diff --git a/packages/mongodb-controller/tsconfig.cjs.json b/packages/mongodb-controller/tsconfig.cjs.json new file mode 100644 index 00000000..97d519dc --- /dev/null +++ b/packages/mongodb-controller/tsconfig.cjs.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "outDir": "lib" + }, + "include": ["lib/**/*"] +} diff --git a/packages/mongodb-controller/tsconfig.json b/packages/mongodb-controller/tsconfig.json new file mode 100644 index 00000000..dc5c37ac --- /dev/null +++ b/packages/mongodb-controller/tsconfig.json @@ -0,0 +1,26 @@ +{ + "compilerOptions": { + "lib": ["ESNext"], + "module": "CommonJS", + "target": "ES2018", + "moduleResolution": "Node", + + "resolveJsonModule": true, + + "removeComments": true, + "preserveConstEnums": true, + + "sourceMap": true, + + "declaration": true, + + "strict": true, + "noImplicitAny": true, + "noImplicitThis": true, + + "skipLibCheck": true, + "esModuleInterop": true + }, + "include": ["lib/**/*", "test/**/*"], + "exclude": ["node_modules"] +} diff --git a/packages/mongodb-controller/tsconfig.mjs.json b/packages/mongodb-controller/tsconfig.mjs.json new file mode 100644 index 00000000..c119ce6b --- /dev/null +++ b/packages/mongodb-controller/tsconfig.mjs.json @@ -0,0 +1,11 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "module": "ESNext", + "outDir": "lib/mjs" + }, + "include": ["lib/**/*"], + "tsc-alias": { + "resolveFullPaths": true + } +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index f237b990..fda6df20 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -74,6 +74,52 @@ importers: specifier: ~5.5.3 version: 5.5.3 + packages/mongodb-controller: + dependencies: + '@kakang/eventemitter': + specifier: workspace:^ + version: link:../eventemitter + '@kakang/validator': + specifier: workspace:^ + version: link:../validator + devDependencies: + '@kakang/unit': + specifier: workspace:^ + version: link:../unit + '@types/node': + specifier: ^20.14.11 + version: 20.14.11 + c8: + specifier: ^10.1.2 + version: 10.1.2 + cross-env: + specifier: ^7.0.3 + version: 7.0.3 + dotenv: + specifier: ^16.4.5 + version: 16.4.5 + eslint: + specifier: ^9.7.0 + version: 9.7.0 + mongodb: + specifier: ^6.3.0 + version: 6.8.0 + neostandard: + specifier: ^0.11.1 + version: 0.11.1(eslint@9.7.0)(typescript@5.5.3) + rimraf: + specifier: ^6.0.1 + version: 6.0.1 + ts-node: + specifier: ^10.9.2 + version: 10.9.2(@types/node@20.14.11)(typescript@5.5.3) + tsc-alias: + specifier: ^1.8.10 + version: 1.8.10 + typescript: + specifier: ~5.5.3 + version: 5.5.3 + packages/unit: dependencies: glob: @@ -281,6 +327,9 @@ packages: '@jridgewell/trace-mapping@0.3.9': resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} + '@mongodb-js/saslprep@1.1.7': + resolution: {integrity: sha512-dCHW/oEX0KJ4NjDULBo3JiOaK5+6axtpBbS+ao2ZInoAL9/YRQLhXzSNAFz7hP4nzLkIqsfYAK/PDE3+XHny0Q==} + '@nodelib/fs.scandir@2.1.5': resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} engines: {node: '>= 8'} @@ -353,6 +402,12 @@ packages: '@types/node@20.14.11': resolution: {integrity: sha512-kprQpL8MMeszbz6ojB5/tU8PLN4kesnN8Gjzw349rDlNgsSzg90lAVj3llK99Dh7JON+t9AuscPPFW6mPbTnSA==} + '@types/webidl-conversions@7.0.3': + resolution: {integrity: sha512-CiJJvcRtIgzadHCYXw7dqEnMNRjhGZlYK05Mj9OyktqV8uVT8fD2BFOB7S1uwBE3Kj2Z+4UyPmFw/Ixgw/LAlA==} + + '@types/whatwg-url@11.0.5': + resolution: {integrity: sha512-coYR071JRaHa+xoEvvYqvnIHaVqaYrLPbsufM9BF63HkwI5Lgmy2QR8Q5K/lYDYo5AK82wOvSOS0UsLTpTG7uQ==} + '@typescript-eslint/eslint-plugin@8.0.0-alpha.44': resolution: {integrity: sha512-3hqJa/Ak3ahypkcNoNmkkmUg54zV3AWSaalSWAKTQKF5UtXMvRjM5w3nKqS2AQP0dQAkM1u9cXCnOuLeUZr7rw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -474,6 +529,10 @@ packages: resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} engines: {node: '>=8'} + bson@6.8.0: + resolution: {integrity: sha512-iOJg8pr7wq2tg/zSlCCHMi3hMm5JTOxLTagf3zxhcenHsFp+c6uOs6K7W5UE7A4QIJGtqh/ZovFNMP4mOPJynQ==} + engines: {node: '>=16.20.1'} + c8@10.1.2: resolution: {integrity: sha512-Qr6rj76eSshu5CgRYvktW0uM0CFY0yi4Fd5D0duDXO6sYinyopmftUiJVuzBQxQcwQLor7JWDVRP+dUfCmzgJw==} engines: {node: '>=18'} @@ -549,6 +608,10 @@ packages: resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} engines: {node: '>=8'} + dotenv@16.4.5: + resolution: {integrity: sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==} + engines: {node: '>=12'} + eastasianwidth@0.2.0: resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} @@ -826,6 +889,9 @@ packages: make-error@1.3.6: resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==} + memory-pager@1.5.0: + resolution: {integrity: sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==} + merge2@1.4.1: resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} engines: {node: '>= 8'} @@ -849,6 +915,36 @@ packages: resolution: {integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==} engines: {node: '>=16 || 14 >=14.17'} + mongodb-connection-string-url@3.0.1: + resolution: {integrity: sha512-XqMGwRX0Lgn05TDB4PyG2h2kKO/FfWJyCzYQbIhXUxz7ETt0I/FqHjUeqj37irJ+Dl1ZtU82uYyj14u2XsZKfg==} + + mongodb@6.8.0: + resolution: {integrity: sha512-HGQ9NWDle5WvwMnrvUxsFYPd3JEbqD3RgABHBQRuoCEND0qzhsd0iH5ypHsf1eJ+sXmvmyKpP+FLOKY8Il7jMw==} + engines: {node: '>=16.20.1'} + peerDependencies: + '@aws-sdk/credential-providers': ^3.188.0 + '@mongodb-js/zstd': ^1.1.0 + gcp-metadata: ^5.2.0 + kerberos: ^2.0.1 + mongodb-client-encryption: '>=6.0.0 <7' + snappy: ^7.2.2 + socks: ^2.7.1 + peerDependenciesMeta: + '@aws-sdk/credential-providers': + optional: true + '@mongodb-js/zstd': + optional: true + gcp-metadata: + optional: true + kerberos: + optional: true + mongodb-client-encryption: + optional: true + snappy: + optional: true + socks: + optional: true + ms@2.1.2: resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} @@ -991,6 +1087,9 @@ packages: resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} engines: {node: '>=8'} + sparse-bitfield@3.0.3: + resolution: {integrity: sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==} + string-width@4.2.3: resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} engines: {node: '>=8'} @@ -1030,6 +1129,10 @@ packages: resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} engines: {node: '>=8.0'} + tr46@4.1.1: + resolution: {integrity: sha512-2lv/66T7e5yNyhAAC4NaKe5nVavzuGJQVVtRYLyQ2OI8tsJ61PMLlelehb0wi2Hx6+hT/OJUWZcw8MjlSRnxvw==} + engines: {node: '>=14'} + ts-api-utils@1.3.0: resolution: {integrity: sha512-UQMIo7pb8WRomKR1/+MFVLTroIvDVtMX3K6OUir8ynLyzB8Jeriont2bTAtmNPa1ekAgN7YPDyf6V+ygrdU+eQ==} engines: {node: '>=16'} @@ -1085,6 +1188,14 @@ packages: resolution: {integrity: sha512-/EH/sDgxU2eGxajKdwLCDmQ4FWq+kpi3uCmBGpw1xJtnAxEjlD8j8PEiGWpCIMIs3ciNAgH0d3TTJiUkYzyZjA==} engines: {node: '>=10.12.0'} + webidl-conversions@7.0.0: + resolution: {integrity: sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==} + engines: {node: '>=12'} + + whatwg-url@13.0.0: + resolution: {integrity: sha512-9WWbymnqj57+XEuqADHrCJ2eSXzn8WXIW/YSGaZtb2WKAInQ6CHfaUUcTyyver0p8BDg5StLQq8h1vtZuwmOig==} + engines: {node: '>=16'} + which@2.0.2: resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} engines: {node: '>= 8'} @@ -1194,6 +1305,10 @@ snapshots: '@jridgewell/resolve-uri': 3.1.2 '@jridgewell/sourcemap-codec': 1.4.15 + '@mongodb-js/saslprep@1.1.7': + dependencies: + sparse-bitfield: 3.0.3 + '@nodelib/fs.scandir@2.1.5': dependencies: '@nodelib/fs.stat': 2.0.5 @@ -1279,6 +1394,12 @@ snapshots: dependencies: undici-types: 5.26.5 + '@types/webidl-conversions@7.0.3': {} + + '@types/whatwg-url@11.0.5': + dependencies: + '@types/webidl-conversions': 7.0.3 + '@typescript-eslint/eslint-plugin@8.0.0-alpha.44(@typescript-eslint/parser@8.0.0-alpha.44(eslint@9.7.0)(typescript@5.5.3))(eslint@9.7.0)(typescript@5.5.3)': dependencies: '@eslint-community/regexpp': 4.11.0 @@ -1415,6 +1536,8 @@ snapshots: dependencies: fill-range: 7.1.1 + bson@6.8.0: {} + c8@10.1.2: dependencies: '@bcoe/v8-coverage': 0.2.3 @@ -1490,6 +1613,8 @@ snapshots: dependencies: path-type: 4.0.0 + dotenv@16.4.5: {} + eastasianwidth@0.2.0: {} emoji-regex@8.0.0: {} @@ -1785,6 +1910,8 @@ snapshots: make-error@1.3.6: {} + memory-pager@1.5.0: {} + merge2@1.4.1: {} micromatch@4.0.7: @@ -1806,6 +1933,17 @@ snapshots: minipass@7.1.2: {} + mongodb-connection-string-url@3.0.1: + dependencies: + '@types/whatwg-url': 11.0.5 + whatwg-url: 13.0.0 + + mongodb@6.8.0: + dependencies: + '@mongodb-js/saslprep': 1.1.7 + bson: 6.8.0 + mongodb-connection-string-url: 3.0.1 + ms@2.1.2: {} mylas@2.1.13: {} @@ -1922,6 +2060,10 @@ snapshots: slash@3.0.0: {} + sparse-bitfield@3.0.3: + dependencies: + memory-pager: 1.5.0 + string-width@4.2.3: dependencies: emoji-regex: 8.0.0 @@ -1962,6 +2104,10 @@ snapshots: dependencies: is-number: 7.0.0 + tr46@4.1.1: + dependencies: + punycode: 2.3.1 + ts-api-utils@1.3.0(typescript@5.5.3): dependencies: typescript: 5.5.3 @@ -2024,6 +2170,13 @@ snapshots: '@types/istanbul-lib-coverage': 2.0.6 convert-source-map: 2.0.0 + webidl-conversions@7.0.0: {} + + whatwg-url@13.0.0: + dependencies: + tr46: 4.1.1 + webidl-conversions: 7.0.0 + which@2.0.2: dependencies: isexe: 2.0.0