diff --git a/.changeset/config.json b/.changeset/config.json index ef96d5d7..9513c8a6 100644 --- a/.changeset/config.json +++ b/.changeset/config.json @@ -2,7 +2,7 @@ "$schema": "https://unpkg.com/@changesets/config@3.0.0/schema.json", "changelog": "@changesets/cli/changelog", "commit": false, - "fixed": [["@powersync/service-core", "@powersync/service-image"]], + "fixed": [["@powersync/service-core", "@powersync/service-image", "@powersync/service-schema"]], "linked": [], "access": "restricted", "baseBranch": "origin/main", diff --git a/.changeset/lemon-hotels-fold.md b/.changeset/lemon-hotels-fold.md new file mode 100644 index 00000000..ed4e4289 --- /dev/null +++ b/.changeset/lemon-hotels-fold.md @@ -0,0 +1,7 @@ +--- +'@powersync/service-types': minor +'@powersync/service-core': minor +'@powersync/service-schema': minor +--- + +Added JSON schema export for base PowerSyncConfig diff --git a/packages/schema/.gitignore b/packages/schema/.gitignore new file mode 100644 index 00000000..d9b4b4c3 --- /dev/null +++ b/packages/schema/.gitignore @@ -0,0 +1 @@ +json-schema/ \ No newline at end of file diff --git a/packages/schema/CHANGELOG.md b/packages/schema/CHANGELOG.md new file mode 100644 index 00000000..7bcc401b --- /dev/null +++ b/packages/schema/CHANGELOG.md @@ -0,0 +1 @@ +# @powersync/service-schema diff --git a/packages/schema/LICENSE b/packages/schema/LICENSE new file mode 100644 index 00000000..c8efd46c --- /dev/null +++ b/packages/schema/LICENSE @@ -0,0 +1,67 @@ +# Functional Source License, Version 1.1, Apache 2.0 Future License + +## Abbreviation + +FSL-1.1-Apache-2.0 + +## Notice + +Copyright 2023-2024 Journey Mobile, Inc. + +## Terms and Conditions + +### Licensor ("We") + +The party offering the Software under these Terms and Conditions. + +### The Software + +The "Software" is each version of the software that we make available under these Terms and Conditions, as indicated by our inclusion of these Terms and Conditions with the Software. + +### License Grant + +Subject to your compliance with this License Grant and the Patents, Redistribution and Trademark clauses below, we hereby grant you the right to use, copy, modify, create derivative works, publicly perform, publicly display and redistribute the Software for any Permitted Purpose identified below. + +### Permitted Purpose + +A Permitted Purpose is any purpose other than a Competing Use. A Competing Use means making the Software available to others in a commercial product or service that: + +1. substitutes for the Software; +2. substitutes for any other product or service we offer using the Software that exists as of the date we make the Software available; or +3. offers the same or substantially similar functionality as the Software. + +Permitted Purposes specifically include using the Software: + +1. for your internal use and access; +2. for non-commercial education; +3. for non-commercial research; and +4. in connection with professional services that you provide to a licensee using the Software in accordance with these Terms and Conditions. + +### Patents + +To the extent your use for a Permitted Purpose would necessarily infringe our patents, the license grant above includes a license under our patents. If you make a claim against any party that the Software infringes or contributes to the infringement of any patent, then your patent license to the Software ends immediately. + +### Redistribution + +The Terms and Conditions apply to all copies, modifications and derivatives of the Software. +If you redistribute any copies, modifications or derivatives of the Software, you must include a copy of or a link to these Terms and Conditions and not remove any copyright notices provided in or with the Software. + +### Disclaimer + +THE SOFTWARE IS PROVIDED "AS IS" AND WITHOUT WARRANTIES OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION WARRANTIES OF FITNESS FOR A PARTICULAR PURPOSE, MERCHANTABILITY, TITLE OR NON-INFRINGEMENT. +IN NO EVENT WILL WE HAVE ANY LIABILITY TO YOU ARISING OUT OF OR RELATED TO THE SOFTWARE, INCLUDING INDIRECT, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES, EVEN IF WE HAVE BEEN INFORMED OF THEIR POSSIBILITY IN ADVANCE. + +### Trademarks + +Except for displaying the License Details and identifying us as the origin of the Software, you have no right under these Terms and Conditions to use our trademarks, trade names, service marks or product names. + +## Grant of Future License + +We hereby irrevocably grant you an additional license to use the Software under the Apache License, Version 2.0 that is effective on the second anniversary of the date we make the Software available. On or after that date, you may use the Software under the Apache License, Version 2.0, in which case the following will apply: + +Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. diff --git a/packages/schema/README.md b/packages/schema/README.md new file mode 100644 index 00000000..b5295c16 --- /dev/null +++ b/packages/schema/README.md @@ -0,0 +1,9 @@ +# PowerSync Service Schema + +This package includes a JSON Schema for the PowerSync service configuration file. + +This can be used to provide validations and suggestions for configuration files + +```yaml +# yaml-language-server: $schema=https://unpkg.com/@powersync/service-schema@latest/json-schema/powersync-config.json +``` diff --git a/packages/schema/package.json b/packages/schema/package.json new file mode 100644 index 00000000..f01c2091 --- /dev/null +++ b/packages/schema/package.json @@ -0,0 +1,31 @@ +{ + "name": "@powersync/service-schema", + "version": "1.11.3", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "license": "FSL-1.1-Apache-2.0", + "type": "module", + "publishConfig": { + "access": "public" + }, + "files": [ + "dist/**/*", + "json-schema/*" + ], + "repository": "https://github.com/powersync-ja/powersync-service", + "scripts": { + "clean": "rm -r ./dist && tsc -b --clean", + "build:ts": "tsc -b", + "build": "pnpm build:ts && node ./dist/scripts/compile-json-schema.js" + }, + "dependencies": {}, + "devDependencies": { + "@powersync/service-module-postgres": "workspace:*", + "@powersync/service-module-postgres-storage": "workspace:*", + "@powersync/service-module-mongodb": "workspace:*", + "@powersync/service-module-mongodb-storage": "workspace:*", + "@powersync/service-module-mysql": "workspace:*", + "@powersync/service-types": "workspace:*", + "ts-codec": "^1.3.0" + } +} diff --git a/packages/schema/src/index.ts b/packages/schema/src/index.ts new file mode 100644 index 00000000..08710ae4 --- /dev/null +++ b/packages/schema/src/index.ts @@ -0,0 +1 @@ +// This project does not yet include TypeScript source, but it does use TypeScript to depend on the build of @powersync/service-types. diff --git a/packages/schema/src/scripts/compile-json-schema.ts b/packages/schema/src/scripts/compile-json-schema.ts new file mode 100644 index 00000000..e5a309da --- /dev/null +++ b/packages/schema/src/scripts/compile-json-schema.ts @@ -0,0 +1,43 @@ +import { MongoStorageConfig } from '@powersync/service-module-mongodb-storage/types'; +import { MongoConnectionConfig } from '@powersync/service-module-mongodb/types'; +import { MySQLConnectionConfig } from '@powersync/service-module-mysql/types'; +import { PostgresStorageConfig } from '@powersync/service-module-postgres-storage/types'; +import { PostgresConnectionConfig } from '@powersync/service-module-postgres/types'; +import { configFile } from '@powersync/service-types'; +import fs from 'fs'; +import path from 'path'; +import * as t from 'ts-codec'; +import { fileURLToPath } from 'url'; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); +const schemaDir = path.join(__dirname, '../../json-schema'); + +fs.mkdirSync(schemaDir, { recursive: true }); + +// Merge configs for modules +const baseShape = configFile.powerSyncConfig.props.shape; + +const mergedDataSourceConfig = configFile.genericDataSourceConfig + .or(PostgresConnectionConfig) + .or(MongoConnectionConfig) + .or(MySQLConnectionConfig); + +const mergedStorageConfig = configFile.GenericStorageConfig.or(PostgresStorageConfig).or(MongoStorageConfig); + +const mergedConfig = t.object({ + ...baseShape, + replication: t + .object({ + ...baseShape.replication.props.shape, + connections: t.array(mergedDataSourceConfig).optional() + }) + .optional(), + storage: mergedStorageConfig.optional() +}); + +const mergedConfigSchema = t.generateJSONSchema(mergedConfig, { + allowAdditional: true, + parsers: [configFile.portParser] +}); + +fs.writeFileSync(path.join(schemaDir, 'powersync-config.json'), JSON.stringify(mergedConfigSchema, null, '\t')); diff --git a/packages/schema/tsconfig.json b/packages/schema/tsconfig.json new file mode 100644 index 00000000..b107a308 --- /dev/null +++ b/packages/schema/tsconfig.json @@ -0,0 +1,30 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "outDir": "dist", + "declarationDir": "dist", + "rootDir": "src", + "skipLibCheck": true + }, + "include": ["src"], + "references": [ + { + "path": "../types" + }, + { + "path": "../../modules/module-postgres" + }, + { + "path": "../../modules/module-postgres-storage" + }, + { + "path": "../../modules/module-mongodb" + }, + { + "path": "../../modules/module-mongodb-storage" + }, + { + "path": "../../modules/module-mysql" + } + ] +} diff --git a/packages/service-core/src/util/config/collectors/config-collector.ts b/packages/service-core/src/util/config/collectors/config-collector.ts index 86b25c65..940e1743 100644 --- a/packages/service-core/src/util/config/collectors/config-collector.ts +++ b/packages/service-core/src/util/config/collectors/config-collector.ts @@ -1,8 +1,7 @@ -import * as t from 'ts-codec'; import * as yaml from 'yaml'; -import { configFile } from '@powersync/service-types'; import { schema } from '@powersync/lib-services-framework'; +import { configFile } from '@powersync/service-types'; import { RunnerConfig } from '../types.js'; @@ -24,11 +23,7 @@ export enum ConfigFileFormat { const YAML_ENV_PREFIX = 'PS_'; // ts-codec itself doesn't give great validation errors, so we use json schema for that -const configSchemaValidator = schema - .parseJSONSchema( - t.generateJSONSchema(configFile.powerSyncConfig, { allowAdditional: true, parsers: [configFile.portParser] }) - ) - .validator(); +const configSchemaValidator = schema.parseJSONSchema(configFile.PowerSyncConfigJSONSchema).validator(); export abstract class ConfigCollector { abstract get name(): string; diff --git a/packages/types/package.json b/packages/types/package.json index a99ba365..1a599eec 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -13,6 +13,7 @@ "build": "tsc -b" }, "dependencies": { + "dedent": "^1.6.0", "ts-codec": "^1.3.0", "uri-js": "^4.4.1" } diff --git a/packages/types/src/config/PowerSyncConfig.ts b/packages/types/src/config/PowerSyncConfig.ts index 9a8c70ce..1cc37287 100644 --- a/packages/types/src/config/PowerSyncConfig.ts +++ b/packages/types/src/config/PowerSyncConfig.ts @@ -1,13 +1,24 @@ +import dedent from 'dedent'; import * as t from 'ts-codec'; +/** + * The meta tags here are used in the generated JSON schema. + * The JSON schema can be used to help self hosted users edit the YAML config file. + */ + /** * Users might specify ports as strings if using YAML custom tag environment substitutions */ -export const portCodec = t.codec( - 'Port', - (value) => value, - (value) => (typeof value == 'number' ? value : parseInt(value)) -); +export const portCodec = t + .codec( + 'Port', + (value) => value, + (value) => (typeof value == 'number' ? value : parseInt(value)) + ) + .meta({ + description: + 'A network port value that can be specified as either a number or a string that will be parsed to a number.' + }); /** * This gets used whenever generating a JSON schema @@ -19,18 +30,36 @@ export const portParser = { }) }; -export const DataSourceConfig = t.object({ - // Unique string identifier for the data source - type: t.string, - /** Unique identifier for the connection - optional when a single connection is present. */ - id: t.string.optional(), - /** Additional meta tag for connection */ - tag: t.string.optional(), - /** - * Allows for debug query execution - */ - debug_api: t.boolean.optional() -}); +export const DataSourceConfig = t + .object({ + // Unique string identifier for the data source + type: t.string.meta({ + description: 'Unique string identifier for the data source type (e.g., "postgresql", "mysql", etc.).' + }), + /** Unique identifier for the connection - optional when a single connection is present. */ + id: t.string + .meta({ + description: 'Unique identifier for the connection. Optional when only a single connection is present.' + }) + .optional(), + /** Additional meta tag for connection */ + tag: t.string + .meta({ + description: 'Additional meta tag for the connection, used for categorization or grouping.' + }) + .optional(), + /** + * Allows for debug query execution + */ + debug_api: t.boolean + .meta({ + description: 'When enabled, allows query execution.' + }) + .optional() + }) + .meta({ + description: 'Base configuration for a replication data source connection.' + }); export type DataSourceConfig = t.Decoded; @@ -53,61 +82,158 @@ export type ResolvedDataSourceConfig = t.Decoded; -export const jwkRSA = t.object({ - kty: t.literal('RSA'), - kid: t.string, - n: t.string, - e: t.string, - alg: t.literal('RS256').or(t.literal('RS384')).or(t.literal('RS512')).optional(), - use: t.string.optional() -}); - -export const jwkHmac = t.object({ - kty: t.literal('oct'), - /** - * undefined kid indicates it can match any JWT, with or without a kid. - * Use a kid wherever possible. - */ - kid: t.string.optional(), - k: t.string, - alg: t.literal('HS256').or(t.literal('HS384')).or(t.literal('HS512')), - use: t.string.optional() -}); +export const jwkRSA = t + .object({ + kty: t.literal('RSA').meta({ + description: 'Key type identifier, must be "RSA" for RSA keys.' + }), + kid: t.string.meta({ + description: 'Key ID, a unique identifier for the key.' + }), + n: t.string.meta({ + description: 'RSA modulus, Base64 URL encoded.' + }), + e: t.string.meta({ + description: 'RSA exponent, Base64 URL encoded.' + }), + alg: t + .literal('RS256') + .or(t.literal('RS384')) + .or(t.literal('RS512')) + .meta({ + description: 'The algorithm intended for use with this key (RS256, RS384, or RS512).' + }) + .optional(), + use: t.string + .meta({ + description: 'The intended use of the key (e.g., "sig" for signature).' + }) + .optional() + }) + .meta({ + description: 'JSON Web Key (JWK) representation of an RSA key.' + }); -export const jwkOKP = t.object({ - kty: t.literal('OKP'), - kid: t.string.optional(), - /** Other curves have security issues so only these two are supported. */ - crv: t.literal('Ed25519').or(t.literal('Ed448')), - x: t.string, - alg: t.literal('EdDSA'), - use: t.string.optional() -}); +export const jwkHmac = t + .object({ + kty: t.literal('oct').meta({ + description: 'Key type identifier, must be "oct" for HMAC keys.' + }), + kid: t.string + .meta({ + description: + 'Key ID. Undefined kid indicates it can match any JWT, with or without a kid. Use a kid wherever possible.' + }) + .optional(), + k: t.string.meta({ + description: 'The HMAC key value, Base64 URL encoded.' + }), + alg: t.literal('HS256').or(t.literal('HS384')).or(t.literal('HS512')).meta({ + description: 'The algorithm intended for use with this key (HS256, HS384, or HS512).' + }), + use: t.string + .meta({ + description: 'The intended use of the key (e.g., "sig" for signature).' + }) + .optional() + }) + .meta({ + description: 'JSON Web Key (JWK) representation of an HMAC key.' + }); -export const jwkEC = t.object({ - kty: t.literal('EC'), - kid: t.string.optional(), - crv: t.literal('P-256').or(t.literal('P-384')).or(t.literal('P-512')), - x: t.string, - y: t.string, - alg: t.literal('ES256').or(t.literal('ES384')).or(t.literal('ES512')), - use: t.string.optional() -}); +export const jwkOKP = t + .object({ + kty: t.literal('OKP').meta({ + description: 'Key type identifier, must be "OKP" for Octet Key Pair keys.' + }), + kid: t.string + .meta({ + description: 'Key ID, a unique identifier for the key.' + }) + .optional(), + /** Other curves have security issues so only these two are supported. */ + crv: t.literal('Ed25519').or(t.literal('Ed448')).meta({ + description: 'The cryptographic curve used with this key. Only Ed25519 and Ed448 are supported.' + }), + x: t.string.meta({ + description: 'The public key, Base64 URL encoded.' + }), + alg: t.literal('EdDSA').meta({ + description: 'The algorithm intended for use with this key (EdDSA).' + }), + use: t.string + .meta({ + description: 'The intended use of the key (e.g., "sig" for signature).' + }) + .optional() + }) + .meta({ + description: + 'JSON Web Key (JWK) representation of an Octet Key Pair (OKP) key used with Edwards-curve Digital Signature Algorithm.' + }); -const jwk = t.union(t.union(t.union(jwkRSA, jwkHmac), jwkOKP), jwkEC); +export const jwkEC = t + .object({ + kty: t.literal('EC').meta({ + description: 'Key type identifier, must be "EC" for Elliptic Curve keys.' + }), + kid: t.string + .meta({ + description: 'Key ID, a unique identifier for the key.' + }) + .optional(), + crv: t.literal('P-256').or(t.literal('P-384')).or(t.literal('P-512')).meta({ + description: 'The cryptographic curve used with this key (P-256, P-384, or P-512).' + }), + x: t.string.meta({ + description: 'The x coordinate for the Elliptic Curve point, Base64 URL encoded.' + }), + y: t.string.meta({ + description: 'The y coordinate for the Elliptic Curve point, Base64 URL encoded.' + }), + alg: t.literal('ES256').or(t.literal('ES384')).or(t.literal('ES512')).meta({ + description: 'The algorithm intended for use with this key (ES256, ES384, or ES512).' + }), + use: t.string + .meta({ + description: 'The intended use of the key (e.g., "sig" for signature).' + }) + .optional() + }) + .meta({ + description: 'JSON Web Key (JWK) representation of an Elliptic Curve key.' + }); -export const strictJwks = t.object({ - keys: t.array(jwk) +const jwk = t.union(t.union(t.union(jwkRSA, jwkHmac), jwkOKP), jwkEC).meta({ + description: 'A JSON Web Key (JWK) representing a cryptographic key. Can be RSA, HMAC, OKP, or EC key types.' }); +export const strictJwks = t + .object({ + keys: t.array(jwk).meta({ + description: 'An array of JSON Web Keys (JWKs).' + }) + }) + .meta({ + description: 'A JSON Web Key Set (JWKS) containing a collection of JWKs.' + }); + export type StrictJwk = t.Decoded; -export const BaseStorageConfig = t.object({ - type: t.string, - // Maximum number of connections to the storage database, per process. - // Defaults to 8. - max_pool_size: t.number.optional() -}); +export const BaseStorageConfig = t + .object({ + type: t.string.meta({ + description: 'The type of storage backend to use (e.g., "postgresql", "mongodb").' + }), + max_pool_size: t.number + .meta({ + description: 'Maximum number of connections to the storage database, per process. Defaults to 8.' + }) + .optional() + }) + .meta({ + description: 'Base configuration for storage connections.' + }); /** * Base configuration for Bucket storage connections. @@ -120,95 +246,252 @@ export type BaseStorageConfig = t.Encoded; export const GenericStorageConfig = BaseStorageConfig.and(t.record(t.any)); export type GenericStorageConfig = t.Encoded; -export const powerSyncConfig = t.object({ - replication: t - .object({ - // This uses the generic config which may have additional fields - connections: t.array(genericDataSourceConfig).optional() - }) - .optional(), - - dev: t - .object({ - demo_auth: t.boolean.optional(), - /** @deprecated */ - demo_password: t.string.optional(), - /** @deprecated */ - crud_api: t.boolean.optional(), - /** @deprecated */ - demo_client: t.boolean.optional() - }) - .optional(), - - client_auth: t - .object({ - jwks_uri: t.string.or(t.array(t.string)).optional(), - block_local_jwks: t.boolean.optional(), - jwks_reject_ip_ranges: t.array(t.string).optional(), - jwks: strictJwks.optional(), - supabase: t.boolean.optional(), - supabase_jwt_secret: t.string.optional(), - audience: t.array(t.string).optional() - }) - .optional(), - - api: t - .object({ - tokens: t.array(t.string).optional(), - parameters: t - .object({ - // Maximum number of connections (http streams or websockets) per API process. - // Default of 200. - max_concurrent_connections: t.number.optional(), - // This should not be siginificantly more than storage.max_pool_size, otherwise it would block on the - // pool. Increasing this can significantly increase memory usage in some cases. - // Default of 10. - max_data_fetch_concurrency: t.number.optional(), - // Maximum number of buckets for each connection. - // More buckets increase latency and memory usage. While the actual number is controlled by sync rules, - // having a hard limit ensures that the service errors instead of crashing when a sync rule is misconfigured. - // Default of 1000. - max_buckets_per_connection: t.number.optional(), - - // Related to max_buckets_per_connection, but this limit applies directly on the parameter - // query results, _before_ we convert it into an unique set. - // Default of 1000. - max_parameter_query_results: t.number.optional() - }) - .optional() - }) - .optional(), +export const powerSyncConfig = t + .object({ + replication: t + .object({ + // This uses the generic config which may have additional fields + connections: t + .array(genericDataSourceConfig) + .meta({ + description: 'Array of data source connections used for replication.' + }) + .optional() + }) + .meta({ + description: 'Configuration for data replication services.' + }) + .optional(), - storage: GenericStorageConfig, + dev: t + .object({ + demo_auth: t.boolean + .meta({ + description: 'Enables demo authentication for development purposes.' + }) + .optional(), + /** @deprecated */ + demo_password: t.string + .meta({ + description: 'Deprecated. Demo password for development authentication.' + }) + .optional(), + /** @deprecated */ + crud_api: t.boolean + .meta({ + description: 'Deprecated. Enables CRUD API for development.' + }) + .optional(), + /** @deprecated */ + demo_client: t.boolean + .meta({ + description: 'Deprecated. Enables demo client for development.' + }) + .optional() + }) + .meta({ + description: 'Development-specific configuration options.' + }) + .optional(), - port: portCodec.optional(), - sync_rules: t - .object({ - path: t.string.optional(), - content: t.string.optional(), - exit_on_error: t.boolean.optional() - }) - .optional(), + client_auth: t + .object({ + jwks_uri: t.string + .or(t.array(t.string)) + .meta({ + description: 'URI or array of URIs pointing to JWKS endpoints for client authentication.' + }) + .optional(), + block_local_jwks: t.boolean + .meta({ + description: 'When true, blocks JWKS URIs that resolve to local network addresses.' + }) + .optional(), + jwks_reject_ip_ranges: t + .array(t.string) + .meta({ + description: 'IP ranges to reject when validating JWKS URIs.' + }) + .optional(), + jwks: strictJwks + .meta({ + description: 'Inline JWKS configuration for client authentication.' + }) + .optional(), + supabase: t.boolean + .meta({ + description: 'Enables Supabase authentication integration.' + }) + .optional(), + supabase_jwt_secret: t.string + .meta({ + description: 'JWT secret for Supabase authentication.' + }) + .optional(), + audience: t + .array(t.string) + .meta({ + description: 'Valid audiences for JWT validation.' + }) + .optional() + }) + .meta({ + description: 'Configuration for client authentication mechanisms.' + }) + .optional(), - metadata: t.record(t.string).optional(), + api: t + .object({ + tokens: t + .array(t.string) + .meta({ + description: 'API access tokens for administrative operations.' + }) + .optional(), + parameters: t + .object({ + max_concurrent_connections: t.number + .meta({ + description: dedent` + Maximum number of connections (http streams or websockets) per API process. + Default of 200. + ` + }) + .optional(), - migrations: t - .object({ - disable_auto_migration: t.boolean.optional() - }) - .optional(), - - telemetry: t - .object({ - // When set, metrics will be available on this port for scraping by Prometheus. - prometheus_port: portCodec.optional(), - disable_telemetry_sharing: t.boolean, - internal_service_endpoint: t.string.optional() - }) - .optional(), + max_data_fetch_concurrency: t.number + .meta({ + description: dedent` + This should not be siginificantly more than storage.max_pool_size, otherwise it would block on the + pool. Increasing this can significantly increase memory usage in some cases. + Default of 10. + ` + }) + .optional(), - parameters: t.record(t.number.or(t.string).or(t.boolean).or(t.Null)).optional() -}); + max_buckets_per_connection: t.number + .meta({ + description: dedent` + Maximum number of buckets for each connection. + More buckets increase latency and memory usage. While the actual number is controlled by sync rules, + having a hard limit ensures that the service errors instead of crashing when a sync rule is misconfigured. + Default of 1000. + ` + }) + .optional(), + + max_parameter_query_results: t.number + .meta({ + description: dedent` + Related to max_buckets_per_connection, but this limit applies directly on the parameter + query results, _before_ we convert it into an unique set. + Default of 1000. + ` + }) + .optional() + }) + .meta({ + description: 'Performance and safety parameters for the API service.' + }) + .optional() + }) + .meta({ + description: 'API service configuration and parameters.' + }) + .optional(), + + storage: GenericStorageConfig.meta({ + description: 'Configuration for the storage backend.' + }), + + port: portCodec + .meta({ + description: + 'The port on which the service will listen for connections. Can be specified as a number or string.' + }) + .optional(), + + sync_rules: t + .object({ + path: t.string + .meta({ + description: 'Path to the sync rules YAML file.' + }) + .optional(), + content: t.string + .meta({ + description: 'Inline sync rules content as a string.' + }) + .optional(), + exit_on_error: t.boolean + .meta({ + description: 'Whether to exit the process if there is an error parsing sync rules.' + }) + .optional() + }) + .meta({ + description: 'Configuration for synchronization rules that define data access patterns.' + }) + .optional(), + + metadata: t + .record(t.string) + .meta({ + description: 'Custom metadata key-value pairs for the service.' + }) + .optional(), + + migrations: t + .object({ + disable_auto_migration: t.boolean + .meta({ + description: 'When true, disables automatic storage database schema migrations.' + }) + .optional() + }) + .meta({ + description: 'Configuration for database schema migrations.' + }) + .optional(), + + telemetry: t + .object({ + // When set, metrics will be available on this port for scraping by Prometheus. + prometheus_port: portCodec + .meta({ + description: + 'Port on which Prometheus metrics will be exposed. When set, metrics will be available on this port for scraping.' + }) + .optional(), + disable_telemetry_sharing: t.boolean.meta({ + description: 'When true, disables sharing of anonymized telemetry data.' + }), + internal_service_endpoint: t.string + .meta({ + description: 'Internal endpoint for telemetry services.' + }) + .optional() + }) + .meta({ + description: 'Configuration for service telemetry and monitoring.' + }) + .optional(), + + parameters: t + .record(t.number.or(t.string).or(t.boolean).or(t.Null)) + .meta({ + description: 'Global parameters that can be referenced in sync rules and other configurations.' + }) + .optional() + }) + .meta({ + description: 'Root configuration object for PowerSync service.' + }); export type PowerSyncConfig = t.Decoded; export type SerializedPowerSyncConfig = t.Encoded; + +export const PowerSyncConfigJSONSchema = t.generateJSONSchema(powerSyncConfig, { + allowAdditional: true, + parsers: [portParser] +}); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 6ab9f4b2..68f8349b 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -458,6 +458,30 @@ importers: specifier: 1.0.0-alpha.3 version: 1.0.0-alpha.3 + packages/schema: + devDependencies: + '@powersync/service-module-mongodb': + specifier: workspace:* + version: link:../../modules/module-mongodb + '@powersync/service-module-mongodb-storage': + specifier: workspace:* + version: link:../../modules/module-mongodb-storage + '@powersync/service-module-mysql': + specifier: workspace:* + version: link:../../modules/module-mysql + '@powersync/service-module-postgres': + specifier: workspace:* + version: link:../../modules/module-postgres + '@powersync/service-module-postgres-storage': + specifier: workspace:* + version: link:../../modules/module-postgres-storage + '@powersync/service-types': + specifier: workspace:* + version: link:../types + ts-codec: + specifier: ^1.3.0 + version: 1.3.0 + packages/service-core: dependencies: '@js-sdsl/ordered-set': @@ -618,6 +642,9 @@ importers: packages/types: dependencies: + dedent: + specifier: ^1.6.0 + version: 1.6.0 ts-codec: specifier: ^1.3.0 version: 1.3.0 @@ -2010,6 +2037,14 @@ packages: babel-plugin-macros: optional: true + dedent@1.6.0: + resolution: {integrity: sha512-F1Z+5UCFpmQUzJa11agbyPVMbpgT/qA3/SKyJ1jyBgm7dUcUEa8v9JwDkerSQXfakBwFljIxhOJqGkjUwZ9FSA==} + peerDependencies: + babel-plugin-macros: ^3.1.0 + peerDependenciesMeta: + babel-plugin-macros: + optional: true + deep-eql@5.0.2: resolution: {integrity: sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==} engines: {node: '>=6'} @@ -2856,6 +2891,7 @@ packages: node-domexception@1.0.0: resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} engines: {node: '>=10.5.0'} + deprecated: Use your platform's native DOMException instead node-fetch@3.3.2: resolution: {integrity: sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==} @@ -5480,6 +5516,8 @@ snapshots: dedent@1.5.3: {} + dedent@1.6.0: {} + deep-eql@5.0.2: {} deep-extend@0.6.0: {}