diff --git a/.changeset/real-ideas-sort.md b/.changeset/real-ideas-sort.md
new file mode 100644
index 00000000..58f02bac
--- /dev/null
+++ b/.changeset/real-ideas-sort.md
@@ -0,0 +1,6 @@
+---
+"@proofkit/typegen": minor
+---
+
+New command: `npx @proofkit/typegen@latest ui` will launch a web UI for configuring and running your typegen config.
+(beta) support for @proofkit/fmodata typegen config.
diff --git a/.coderabbit.yaml b/.coderabbit.yaml
index 9f9270fb..c0036945 100644
--- a/.coderabbit.yaml
+++ b/.coderabbit.yaml
@@ -4,11 +4,11 @@
reviews:
# Enable automated review for pull requests
auto_review:
- enabled: true
+ enabled: false
base_branches:
- ".*" # Matches all branches using regex
review_status: false
poem: false
-
-path_filters:
- - "!apps/demo/**" # exclude the demo app from reivews
+ high_level_summary: false
+ path_filters:
+ - "!apps/demo/**" # exclude the demo app from reivews
diff --git a/.gitignore b/.gitignore
index 315461c7..add6ff10 100644
--- a/.gitignore
+++ b/.gitignore
@@ -72,3 +72,4 @@ server/dist
public/dist
.turbo
packages/fmdapi/test/typegen/*
+packages/typegen/schema/metadata.xml
diff --git a/Users/ericluce/Documents/Code/work/proofkit/apps/demo/tests/typegen-output/without-zod/client/index.ts b/Users/ericluce/Documents/Code/work/proofkit/apps/demo/tests/typegen-output/without-zod/client/index.ts
index 899d9556..ecc4abeb 100644
--- a/Users/ericluce/Documents/Code/work/proofkit/apps/demo/tests/typegen-output/without-zod/client/index.ts
+++ b/Users/ericluce/Documents/Code/work/proofkit/apps/demo/tests/typegen-output/without-zod/client/index.ts
@@ -1,2 +1,2 @@
-export { client as testLayoutClient } from "./testLayout";
-export { client as weirdPortalsClient } from "./weirdPortals";
+export { client as testLayoutLayout } from "./testLayout";
+export { client as weirdPortalsLayout } from "./weirdPortals";
diff --git a/apps/demo/package.json b/apps/demo/package.json
index e3125aad..66341516 100644
--- a/apps/demo/package.json
+++ b/apps/demo/package.json
@@ -19,23 +19,29 @@
"dotenv": "^16.5.0",
"fm-odata-client": "^3.0.1",
"fs-extra": "^11.3.0",
- "next": "^15.4.9",
- "react": "^19.1.1",
- "react-dom": "^19.1.1",
- "zod": "3.25.64"
+ "next": "16.1.0",
+ "react": "19.2.3",
+ "react-dom": "19.2.3",
+ "zod": "^4.1.13"
},
"devDependencies": {
"@eslint/eslintrc": "^3",
"@tailwindcss/postcss": "^4.1.11",
"@types/fs-extra": "^11.0.4",
"@types/node": "^22.17.1",
- "@types/react": "^19.1.10",
- "@types/react-dom": "^19.1.7",
+ "@types/react": "19.2.7",
+ "@types/react-dom": "19.2.3",
"dotenv-cli": "^8.0.0",
"eslint": "^9.23.0",
- "eslint-config-next": "^15.3.3",
+ "eslint-config-next": "16.1.0",
"tailwindcss": "^4.1.11",
- "typescript": "^5.9.2",
- "vitest": "^3.2.4"
+ "typescript": "^5.9.3",
+ "vitest": "^4.0.7"
+ },
+ "pnpm": {
+ "overrides": {
+ "@types/react": "19.2.7",
+ "@types/react-dom": "19.2.3"
+ }
}
}
diff --git a/apps/demo/tsconfig.json b/apps/demo/tsconfig.json
index 19004bbc..9ba68802 100644
--- a/apps/demo/tsconfig.json
+++ b/apps/demo/tsconfig.json
@@ -1,7 +1,11 @@
{
"compilerOptions": {
"target": "ES2017",
- "lib": ["dom", "dom.iterable", "esnext"],
+ "lib": [
+ "dom",
+ "dom.iterable",
+ "esnext"
+ ],
"allowJs": true,
"skipLibCheck": true,
"strict": true,
@@ -11,7 +15,7 @@
"moduleResolution": "bundler",
"resolveJsonModule": true,
"isolatedModules": true,
- "jsx": "preserve",
+ "jsx": "react-jsx",
"incremental": true,
"plugins": [
{
@@ -19,9 +23,20 @@
}
],
"paths": {
- "@/*": ["./src/*"]
+ "@/*": [
+ "./src/*"
+ ]
}
},
- "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
- "exclude": ["node_modules", "tests"]
+ "include": [
+ "next-env.d.ts",
+ "**/*.ts",
+ "**/*.tsx",
+ ".next/types/**/*.ts",
+ ".next/dev/types/**/*.ts"
+ ],
+ "exclude": [
+ "node_modules",
+ "tests"
+ ]
}
diff --git a/apps/docs/content/docs/typegen/config-odata.mdx b/apps/docs/content/docs/typegen/config-odata.mdx
new file mode 100644
index 00000000..6e8bac15
--- /dev/null
+++ b/apps/docs/content/docs/typegen/config-odata.mdx
@@ -0,0 +1,294 @@
+---
+title: Configuration (OData)
+---
+
+import { TypeTable } from "fumadocs-ui/components/type-table";
+import { Tabs, Tab } from "fumadocs-ui/components/tabs";
+
+The typegen tool supports OData-based type generation using the `fmodata` config type. This is configured using the `proofkit-typegen-config.jsonc` file at the root of your project.
+
+
+The `@proofkit/fmodata` package is still in beta. Some of these options may change.
+
+
+The config key can also be an array of configs, which is useful if you need to connect to multiple databases, or with different settings for different sets of tables.
+
+```jsonc title="proofkit-typegen-config.jsonc" tab="Single OData config"
+{
+ "$schema": "https://proofkit.dev/typegen-config-schema.json",
+ "config": {
+ "type": "fmodata",
+ // ... your OData config here
+ },
+}
+```
+
+```jsonc title="proofkit-typegen-config.jsonc" tab="Multiple configs"
+{
+ "$schema": "https://proofkit.dev/typegen-config-schema.json",
+ "config": [
+ {
+ "type": "fmodata",
+ // ... your OData config here
+ },
+ {
+ "type": "fmdapi",
+ // ... your Data API config here
+ },
+ ],
+}
+```
+
+## Config options
+
+
+
+### `type` (required)
+Must be set to `"fmodata"` to use OData-based type generation.
+
+### `configName` (optional)
+An optional name for this configuration. Useful when using multiple configs to identify which config is being used.
+
+### `path` (default: `"schema"`)
+The path to the directory where the generated files will be saved.
+
+### `reduceMetadata` (optional)
+If set to `true`, reduced OData annotations will be requested from the server to reduce payload size. This will prevent comments, entity ids, and other properties from being generated.
+
+
+ This can also be set per-table in the `tables` array to override the top-level setting for specific tables.
+
+
+### `clearOldFiles` (default: `false`)
+If set to `false`, the path will not be cleared before the new files are written. Only the `client` and `generated` directories are cleared to allow for potential overrides to be kept.
+
+
+ This is different from the Data API config, which defaults to `true`. For OData configs, we preserve existing files by default to allow for customizations.
+
+
+### `alwaysOverrideFieldNames` (default: `true`)
+If set to `true` (default), field names will always be updated to match metadata, even when matching by entity ID. If set to `false`, existing field names are preserved when matching by entity ID.
+
+
+ This can also be set per-table in the `tables` array to override the top-level setting for specific tables.
+
+
+### `envNames` (optional)
+If set, will use the specified environment variable names for your OData connection.
+
+
+ Only use the **names** of your environment variables, not the values for security reasons.
+
+
+The `envNames` object supports:
+- `server`: The environment variable name for the OData server URL
+- `db`: The environment variable name for the database name
+- `auth`: An object with either:
+ - `apiKey`: The environment variable name for the API key, or
+ - `username` and `password`: The environment variable names for username and password
+
+## Table options
+
+The `tables` array in the config is where you define the tables (entity sets) that you want to generate types for. You must define at least one table in the config.
+
+
+
+### `tableName` (required)
+The entity set name (table occurrence name) to generate. This table will be included in metadata download and type generation. Must match exactly the name of an entity set in your OData service.
+
+### `variableName` (optional)
+Override the generated TypeScript variable name. The original entity set name is still used for the OData path, but you can use a different name in your TypeScript code.
+
+For example, if your entity set is named `"Customers_Table"` but you want to use `Customers` in your code:
+
+```jsonc
+{
+ "tableName": "Customers_Table",
+ "variableName": "Customers"
+}
+```
+
+### `reduceMetadata` (optional)
+If undefined, the top-level setting will be used. If set to `true` or `false`, it will override the top-level `reduceMetadata` setting for this specific table.
+
+### `alwaysOverrideFieldNames` (optional)
+If undefined, the top-level setting will be used. If set to `true` or `false`, it will override the top-level `alwaysOverrideFieldNames` setting for this specific table.
+
+## Field options
+
+Within each table's `fields` array, you can specify field-level overrides.
+
+
+
+### `fieldName` (required)
+The field name this override applies to. Must match exactly the name of a field in the table's metadata.
+
+### `exclude` (optional)
+If set to `true`, this field will be excluded from generation entirely. Useful for fields you don't need in your TypeScript types.
+
+### `typeOverride` (optional)
+Override the inferred field type from metadata. The available options are:
+
+- `"text"`: Treats the field as a text field
+- `"number"`: Treats the field as a number field
+- `"boolean"`: Treats the field as a boolean (validated with `z.coerce.boolean()`)
+- `"fmBooleanNumber"`: Same as boolean, explicit FileMaker 0/1 pattern
+- `"date"`: Treats the field as a date field
+- `"timestamp"`: Treats the field as a timestamp field
+- `"container"`: Treats the field as a container field
+
+
+ The typegen tool will attempt to infer the correct field type from the OData metadata. Use `typeOverride` only when you need to override the inferred type.
+
+
+## Example configuration
+
+Here's a complete example of an OData configuration:
+
+```jsonc title="proofkit-typegen-config.jsonc"
+{
+ "$schema": "https://proofkit.dev/typegen-config-schema.json",
+ "config": {
+ "type": "fmodata",
+ "configName": "Production OData",
+ "path": "schema/odata",
+ "reduceMetadata": true,
+ "clearOldFiles": false,
+ "alwaysOverrideFieldNames": true,
+ "envNames": {
+ "server": "ODATA_SERVER_URL",
+ "db": "ODATA_DATABASE_NAME",
+ "auth": {
+ "apiKey": "ODATA_API_KEY"
+ }
+ },
+ "tables": [
+ {
+ "tableName": "Customers",
+ "variableName": "Customers",
+ "fields": [
+ {
+ "fieldName": "InternalID",
+ "exclude": true
+ },
+ {
+ "fieldName": "Status",
+ "typeOverride": "boolean"
+ }
+ ]
+ },
+ {
+ "tableName": "Orders",
+ "reduceMetadata": false,
+ "fields": [
+ {
+ "fieldName": "OrderDate",
+ "typeOverride": "date"
+ }
+ ]
+ }
+ ]
+ }
+}
+```
+
diff --git a/apps/docs/content/docs/typegen/config.mdx b/apps/docs/content/docs/typegen/config.mdx
index fb3262fc..9ef5a49f 100644
--- a/apps/docs/content/docs/typegen/config.mdx
+++ b/apps/docs/content/docs/typegen/config.mdx
@@ -1,5 +1,5 @@
---
-title: Configuration
+title: Configuration (Data API)
---
import { TypeTable } from "fumadocs-ui/components/type-table";
diff --git a/apps/docs/content/docs/typegen/meta.json b/apps/docs/content/docs/typegen/meta.json
index 3ab99607..75e3fdb2 100644
--- a/apps/docs/content/docs/typegen/meta.json
+++ b/apps/docs/content/docs/typegen/meta.json
@@ -8,8 +8,10 @@
"index",
"faq",
"customization",
+ "ui",
"---Reference---",
"config",
+ "config-odata",
"options"
]
}
diff --git a/apps/docs/content/docs/typegen/options.mdx b/apps/docs/content/docs/typegen/options.mdx
index c342c82b..da215e83 100644
--- a/apps/docs/content/docs/typegen/options.mdx
+++ b/apps/docs/content/docs/typegen/options.mdx
@@ -26,7 +26,6 @@ npx @proofkit/typegen generate
This is also the default command, so "generate" is optional. If this command is run without any config file detected, you will be prompted to create the config file (the `init` command).
-See [Global Options](#global-options) for `--config` usage.
### `--env-path `
@@ -36,9 +35,13 @@ Set a custom path for where your environment variables are stored.
Recreate the overrides file(s), even if they already exist.
-### `--skip-env-check`
-Ignore loading environment variables from a file. Use this option if you are injecting environment variables directly as the command runs.
+## `ui` command
+```bash
+npx @proofkit/typegen ui
+```
+Launch the typegen web interface for easy configuration.
+
## `init` command
diff --git a/apps/docs/content/docs/typegen/ui.mdx b/apps/docs/content/docs/typegen/ui.mdx
new file mode 100644
index 00000000..20ae4b14
--- /dev/null
+++ b/apps/docs/content/docs/typegen/ui.mdx
@@ -0,0 +1,25 @@
+---
+title: Typegen UI
+---
+
+The typegen tool has a built-in web interface for editing your JSON config file and running the typegen scripts. It's helpful for making sure your environment variables are setup correctly and can help autocomplete layout/field/table names into the config file.
+
+To launch the UI, run the following command and a browser window will open at `http://localhost:3141`:
+
+```bash
+npx @proofkit/typegen@latest ui
+```
+
+
+## CLI options
+
+The UI can be configured with the following CLI options:
+
+### `--port `
+Set the port for the UI server.
+
+### `--config `
+Set a custom filename/path for where the config file is located or will be created. The file name must end with either `jsonc` or `json`.
+
+### `--no-open`
+Don't automatically open the browser.
\ No newline at end of file
diff --git a/apps/docs/next.config.ts b/apps/docs/next.config.ts
index 61483a15..2a41229c 100644
--- a/apps/docs/next.config.ts
+++ b/apps/docs/next.config.ts
@@ -2,6 +2,7 @@ import { createMDX } from "fumadocs-mdx/next";
import { type NextConfig } from "next";
import { validateRegistry } from "@proofkit/registry";
import { source } from "./src/lib/source";
+import path from "path";
const withMDX = createMDX();
// validateRegistry();
@@ -9,7 +10,24 @@ const withMDX = createMDX();
const config: NextConfig = {
reactStrictMode: true,
serverExternalPackages: ["typescript", "twoslash", "shiki"],
- transpilePackages: ["@proofkit/fmdapi", "@proofkit/registry"],
+ transpilePackages: [
+ "@proofkit/fmdapi",
+ "@proofkit/registry",
+ "@proofkit/typegen",
+ ],
+ turbopack: {
+ root: path.resolve(__dirname, "../.."),
+ },
+ webpack: (config, { isServer }) => {
+ // Resolve @proofkit/typegen/config to source files for development
+ config.resolve.alias = {
+ ...config.resolve.alias,
+ "@proofkit/typegen/config": require.resolve(
+ "@proofkit/typegen/src/types.ts",
+ ),
+ };
+ return config;
+ },
async redirects() {
return [
{
diff --git a/apps/docs/package.json b/apps/docs/package.json
index 9731a6e7..789954e7 100644
--- a/apps/docs/package.json
+++ b/apps/docs/package.json
@@ -3,8 +3,8 @@
"version": "0.0.2",
"private": true,
"scripts": {
- "build": "node scripts/bundle-registry-templates.js && next build",
- "dev": "next dev -p 3005 --turbo",
+ "build": "pnpm --filter @proofkit/typegen build && node scripts/bundle-registry-templates.js && next build",
+ "dev": "next dev -p 3005",
"start": "next start -p 3005",
"postinstall": "fumadocs-mdx",
"test": "vitest run"
@@ -31,16 +31,16 @@
"hono": "^4.9.0",
"jiti": "^1.21.7",
"lucide-react": "^0.511.0",
- "next": "^15.5.8",
+ "next": "16.1.0",
"next-themes": "^0.4.6",
- "react": "^19.1.1",
- "react-dom": "^19.1.1",
+ "react": "19.2.3",
+ "react-dom": "19.2.3",
"shadcn": "^2.10.0",
"shiki": "^3.13.0",
"tailwind-merge": "^3.3.1",
"ts-morph": "^26.0.0",
"twoslash": "^0.3.4",
- "zod": "3.25.64"
+ "zod": "^4.1.13"
},
"devDependencies": {
"@proofkit/fmdapi": "workspace:*",
@@ -48,14 +48,20 @@
"@types/jest": "^29.5.14",
"@types/mdx": "^2.0.13",
"@types/node": "^22.17.1",
- "@types/react": "^19.1.10",
- "@types/react-dom": "^19.1.7",
+ "@types/react": "19.2.7",
+ "@types/react-dom": "19.2.3",
"eslint-plugin-prettier": "^5.5.4",
"happy-dom": "^15.11.7",
"postcss": "^8.5.6",
"tailwindcss": "^4.1.11",
"tw-animate-css": "^1.3.6",
- "typescript": "^5.9.2",
- "vitest": "^3.2.4"
+ "typescript": "^5.9.3",
+ "vitest": "^4.0.7"
+ },
+ "pnpm": {
+ "overrides": {
+ "@types/react": "19.2.7",
+ "@types/react-dom": "19.2.3"
+ }
}
}
diff --git a/apps/docs/tests/utils.manifest.test.ts b/apps/docs/tests/utils.manifest.test.ts
index 604f90b6..60983014 100644
--- a/apps/docs/tests/utils.manifest.test.ts
+++ b/apps/docs/tests/utils.manifest.test.ts
@@ -12,9 +12,10 @@ describe("Registry utils (dynamic scanning)", () => {
// Should find the mode-toggle template
expect(index.length).toBeGreaterThan(0);
expect(index[0]).toHaveProperty("name");
- expect(index[0]).toHaveProperty("type");
expect(index[0]).toHaveProperty("category");
- // RegistryIndexItem only has name, type, and category - not files
+ expect(index[0]).toHaveProperty("title");
+ expect(index[0]).toHaveProperty("description");
+ // RegistryIndexItem has name, category, title, description - not type or files
});
it("reads a known template (mode-toggle)", async () => {
diff --git a/apps/docs/tsconfig.json b/apps/docs/tsconfig.json
index cecc2912..9b1f5c91 100644
--- a/apps/docs/tsconfig.json
+++ b/apps/docs/tsconfig.json
@@ -2,7 +2,11 @@
"compilerOptions": {
"baseUrl": ".",
"target": "ESNext",
- "lib": ["dom", "dom.iterable", "esnext"],
+ "lib": [
+ "dom",
+ "dom.iterable",
+ "esnext"
+ ],
"allowJs": true,
"skipLibCheck": true,
"strict": true,
@@ -13,13 +17,21 @@
"moduleResolution": "bundler",
"resolveJsonModule": true,
"isolatedModules": true,
- "jsx": "preserve",
+ "jsx": "react-jsx",
"incremental": true,
"paths": {
- "@/.source": ["./.source/index.ts"],
- "@/registry/*": ["./src/registry/*"],
- "@/*": ["./src/*"],
- "@/components/*": ["./src/components/*"]
+ "@/.source": [
+ "./.source/index.ts"
+ ],
+ "@/registry/*": [
+ "./src/registry/*"
+ ],
+ "@/*": [
+ "./src/*"
+ ],
+ "@/components/*": [
+ "./src/components/*"
+ ]
},
"plugins": [
{
@@ -27,6 +39,15 @@
}
]
},
- "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
- "exclude": ["node_modules", "public/registry-templates/**/*"]
+ "include": [
+ "next-env.d.ts",
+ "**/*.ts",
+ "**/*.tsx",
+ ".next/types/**/*.ts",
+ ".next/dev/types/**/*.ts"
+ ],
+ "exclude": [
+ "node_modules",
+ "public/registry-templates/**/*"
+ ]
}
diff --git a/package.json b/package.json
index 1f103314..95a8ca02 100644
--- a/package.json
+++ b/package.json
@@ -21,8 +21,8 @@
"knip": "^5.56.0",
"prettier": "^3.5.3",
"turbo": "^2.5.4",
- "typescript": "^5.9.2",
- "vitest": "^3.2.4"
+ "typescript": "^5.9.3",
+ "vitest": "^4.0.7"
},
"packageManager": "pnpm@10.14.0",
"engines": {
diff --git a/packages/better-auth/package.json b/packages/better-auth/package.json
index 4b85b8e5..e6985554 100644
--- a/packages/better-auth/package.json
+++ b/packages/better-auth/package.json
@@ -58,7 +58,7 @@
"odata-query": "^8.0.4",
"prompts": "^2.4.2",
"vite": "^6.3.4",
- "zod": "3.25.64"
+ "zod": "^4.1.13"
},
"devDependencies": {
"@types/fs-extra": "^11.0.4",
@@ -66,7 +66,7 @@
"@vitest/ui": "^3.2.4",
"fm-odata-client": "^3.0.1",
"publint": "^0.3.12",
- "typescript": "^5.9.2",
- "vitest": "^3.2.4"
+ "typescript": "^5.9.3",
+ "vitest": "^4.0.7"
}
}
diff --git a/packages/cli/package.json b/packages/cli/package.json
index 14636c7d..aea6db7d 100644
--- a/packages/cli/package.json
+++ b/packages/cli/package.json
@@ -67,7 +67,7 @@
"chalk": "5.4.1",
"commander": "^14.0.0",
"dotenv": "^16.5.0",
- "es-toolkit": "^1.15.1",
+ "es-toolkit": "^1.38.0",
"execa": "^9.5.1",
"fast-glob": "^3.3.3",
"fs-extra": "^11.3.0",
@@ -96,7 +96,7 @@
"@proofkit/registry": "workspace:*",
"@rollup/plugin-replace": "^6.0.3",
"@t3-oss/env-nextjs": "^0.10.1",
- "@tanstack/react-query": "^5.49.2",
+ "@tanstack/react-query": "^5.76.1",
"@trpc/client": "11.0.0-rc.441",
"@trpc/next": "11.0.0-rc.441",
"@trpc/react-query": "11.0.0-rc.441",
@@ -108,7 +108,7 @@
"@types/randomstring": "^1.3.0",
"@types/react": "^19.1.10",
"@types/semver": "^7.7.0",
- "@vitest/coverage-v8": "^1.4.0",
+ "@vitest/coverage-v8": "^2.1.8",
"drizzle-kit": "^0.21.4",
"drizzle-orm": "^0.30.10",
"mysql2": "^3.9.7",
@@ -123,8 +123,8 @@
"tailwindcss": "^4.1.11",
"tsdown": "^0.14.1",
"type-fest": "^3.13.1",
- "typescript": "^5.9.2",
- "vitest": "^3.2.4",
- "zod": "3.25.64"
+ "typescript": "^5.9.3",
+ "vitest": "^4.0.7",
+ "zod": "^4.1.13"
}
}
diff --git a/packages/cli/src/utils/formatting.ts b/packages/cli/src/utils/formatting.ts
index 25959dbb..4edbeaba 100644
--- a/packages/cli/src/utils/formatting.ts
+++ b/packages/cli/src/utils/formatting.ts
@@ -1,4 +1,4 @@
-import { format, getFileInfo } from "prettier";
+import * as prettier from "prettier";
import { Project } from "ts-morph";
import { state } from "~/state.js";
@@ -14,11 +14,13 @@ export async function formatAndSaveSourceFiles(project: Project) {
// run each file through the prettier formatter
for await (const file of files) {
const filePath = file.getFilePath();
- const fileInfo = await getFileInfo(filePath);
+ const fileInfo = (await prettier.getFileInfo?.(filePath)) ?? {
+ ignored: false,
+ };
if (fileInfo.ignored) continue;
- const formatted = await format(file.getFullText(), {
+ const formatted = await prettier.format(file.getFullText(), {
filepath: filePath,
});
file.replaceWithText(formatted);
diff --git a/packages/cli/vitest.config.ts b/packages/cli/vitest.config.ts
index b6e696ab..545dbfbb 100644
--- a/packages/cli/vitest.config.ts
+++ b/packages/cli/vitest.config.ts
@@ -11,9 +11,11 @@ export default defineConfig({
environment: "node",
setupFiles: ["./tests/setup.ts"],
include: ["tests/**/*.test.ts"],
+ testTimeout: 60000, // 60 seconds for CLI tests which can be slow
coverage: {
provider: "v8",
reporter: ["text", "json", "html"],
+ include: ["src/**/*.ts"],
},
},
});
diff --git a/packages/fmdapi/package.json b/packages/fmdapi/package.json
index c1a9d95e..97be322d 100644
--- a/packages/fmdapi/package.json
+++ b/packages/fmdapi/package.json
@@ -58,7 +58,7 @@
"fs-extra": "^11.3.0",
"ts-morph": "^26.0.0",
"vite": "^6.3.4",
- "zod": "3.25.64"
+ "zod": "^4.1.13"
},
"devDependencies": {
"@types/fs-extra": "^11.0.4",
@@ -72,8 +72,8 @@
"prettier": "^3.5.3",
"publint": "^0.3.12",
"ts-toolbelt": "^9.6.0",
- "typescript": "^5.9.2",
- "vitest": "^3.2.4"
+ "typescript": "^5.9.3",
+ "vitest": "^4.0.7"
},
"engines": {
"node": ">=18.0.0"
diff --git a/packages/fmodata/README.md b/packages/fmodata/README.md
index 13950147..2d742318 100644
--- a/packages/fmodata/README.md
+++ b/packages/fmodata/README.md
@@ -778,6 +778,173 @@ console.log(result.result.recordId);
**Note:** OData doesn't support script names with special characters (e.g., `@`, `&`, `/`) or script names beginning with a number. TypeScript will catch these at compile time.
+## Webhooks
+
+Webhooks allow you to receive notifications when data changes in your FileMaker database. The library provides a type-safe API for managing webhooks through the `db.webhook` property.
+
+### Adding a Webhook
+
+Create a new webhook to monitor a table for changes:
+
+```typescript
+// Basic webhook
+const result = await db.webhook.add({
+ webhook: "https://example.com/webhook",
+ tableName: contactsTable,
+});
+
+// Access the created webhook ID
+console.log(result.webHookResult.webHookID);
+```
+
+### Webhook Configuration Options
+
+Webhooks support various configuration options:
+
+```typescript
+// With custom headers
+const result = await db.webhook.add({
+ webhook: "https://example.com/webhook",
+ tableName: contactsTable,
+ headers: {
+ "X-Custom-Header": "value",
+ Authorization: "Bearer token",
+ },
+ notifySchemaChanges: true, // Notify when schema changes
+});
+
+// With field selection (using column references)
+const result = await db.webhook.add({
+ webhook: "https://example.com/webhook",
+ tableName: contacts,
+ select: [contacts.name, contacts.email, contacts.PrimaryKey],
+});
+
+// With filtering (using filter expressions)
+import { eq, gt } from "@proofkit/fmodata";
+
+const result = await db.webhook.add({
+ webhook: "https://example.com/webhook",
+ tableName: contacts,
+ filter: eq(contacts.active, true),
+ select: [contacts.name, contacts.email],
+});
+
+// Complex filter example
+const result = await db.webhook.add({
+ webhook: "https://example.com/webhook",
+ tableName: users,
+ filter: and(eq(users.active, true), gt(users.age, 18)),
+ select: [users.username, users.email],
+});
+```
+
+**Webhook Configuration Properties:**
+
+- `webhook` (required) - The URL to call when the webhook is triggered
+- `tableName` (required) - The `FMTable` instance for the table to monitor
+- `headers` (optional) - Custom headers to include in webhook requests
+- `notifySchemaChanges` (optional) - Whether to notify on schema changes
+- `select` (optional) - Field selection as a string or array of `Column` references
+- `filter` (optional) - Filter expression (string or `FilterExpression`) to limit which records trigger the webhook
+
+### Listing Webhooks
+
+Get all webhooks configured for the database:
+
+```typescript
+const result = await db.webhook.list();
+
+console.log(result.Status); // Status of the operation
+console.log(result.WebHook); // Array of webhook configurations
+
+result.WebHook.forEach((webhook) => {
+ console.log(`Webhook ${webhook.webHookID}:`);
+ console.log(` Table: ${webhook.tableName}`);
+ console.log(` URL: ${webhook.url}`);
+ console.log(` Notify Schema Changes: ${webhook.notifySchemaChanges}`);
+ console.log(` Select: ${webhook.select}`);
+ console.log(` Filter: ${webhook.filter}`);
+ console.log(` Pending Operations: ${webhook.pendingOperations.length}`);
+});
+```
+
+### Getting a Webhook
+
+Retrieve a specific webhook by ID:
+
+```typescript
+const webhook = await db.webhook.get(1);
+
+console.log(webhook.webHookID);
+console.log(webhook.tableName);
+console.log(webhook.url);
+console.log(webhook.headers);
+console.log(webhook.notifySchemaChanges);
+console.log(webhook.select);
+console.log(webhook.filter);
+console.log(webhook.pendingOperations);
+```
+
+### Removing a Webhook
+
+Delete a webhook by ID:
+
+```typescript
+await db.webhook.remove(1);
+```
+
+### Invoking a Webhook
+
+Manually trigger a webhook. This is useful for testing or triggering webhooks on-demand:
+
+```typescript
+// Invoke for all rows matching the webhook's filter
+await db.webhook.invoke(1);
+
+// Invoke for specific row IDs
+await db.webhook.invoke(1, { rowIDs: [63, 61] });
+```
+
+### Complete Example
+
+Here's a complete example of setting up and managing webhooks:
+
+```typescript
+import { eq } from "@proofkit/fmodata";
+
+// Add a webhook to monitor active contacts
+const addResult = await db.webhook.add({
+ webhook: "https://api.example.com/webhooks/contacts",
+ tableName: contacts,
+ headers: {
+ "X-API-Key": "your-api-key",
+ },
+ filter: eq(contacts.active, true),
+ select: [contacts.name, contacts.email, contacts.PrimaryKey],
+ notifySchemaChanges: false,
+});
+
+const webhookId = addResult.webHookResult.webHookID;
+console.log(`Created webhook with ID: ${webhookId}`);
+
+// List all webhooks
+const listResult = await db.webhook.list();
+console.log(`Total webhooks: ${listResult.WebHook.length}`);
+
+// Get the webhook we just created
+const webhook = await db.webhook.get(webhookId);
+console.log(`Webhook URL: ${webhook.url}`);
+
+// Manually invoke the webhook for specific records
+await db.webhook.invoke(webhookId, { rowIDs: [1, 2, 3] });
+
+// Remove the webhook when done
+await db.webhook.remove(webhookId);
+```
+
+**Note:** Webhooks are triggered automatically by FileMaker when records matching the webhook's filter are created, updated, or deleted. The `invoke()` method allows you to manually trigger webhooks for testing or on-demand processing.
+
## Batch Operations
Batch operations allow you to execute multiple queries and operations together in a single request. All operations in a batch are executed atomically - they all succeed or all fail together. This is both more efficient (fewer network round-trips) and ensures data consistency across related operations.
@@ -1275,6 +1442,37 @@ const users = fmTableOccurrence(
);
```
+### Special Columns (ROWID and ROWMODID)
+
+FileMaker provides special columns `ROWID` and `ROWMODID` that uniquely identify records and track modifications. These can be included in query responses when enabled.
+
+Enable special columns at the database level:
+
+```typescript
+const db = connection.database("MyDatabase", {
+ includeSpecialColumns: true,
+});
+
+const result = await db.from(users).list().execute();
+// result.data[0] will have ROWID and ROWMODID properties
+```
+
+Override at the request level:
+
+```typescript
+// Enable for this request only
+const result = await db.from(users).list().execute({
+ includeSpecialColumns: true,
+});
+
+// Disable for this request
+const result = await db.from(users).list().execute({
+ includeSpecialColumns: false,
+});
+```
+
+**Important:** Special columns are only included when no `$select` query is applied (per OData specification). When using `.select()`, special columns are excluded even if `includeSpecialColumns` is enabled.
+
### Error Handling
All operations return a `Result` type with either `data` or `error`. The library provides rich error types that help you handle different error scenarios appropriately.
diff --git a/packages/fmodata/package.json b/packages/fmodata/package.json
index 72e875e0..7b51a9f0 100644
--- a/packages/fmodata/package.json
+++ b/packages/fmodata/package.json
@@ -1,6 +1,6 @@
{
"name": "@proofkit/fmodata",
- "version": "0.1.0-alpha.19",
+ "version": "0.1.0-alpha.20",
"description": "FileMaker OData API client",
"repository": "git@github.com:proofgeist/proofkit.git",
"author": "Eric <37158449+eluce2@users.noreply.github.com>",
@@ -63,7 +63,7 @@
"vite": "^6.3.4",
"vite-plugin-dts": "^4.5.4",
"vitest": "^4.0.7",
- "zod": "4.1.12"
+ "zod": "^4.1.13"
},
"engines": {
"node": ">=18.0.0"
diff --git a/packages/fmodata/scripts/capture-responses.ts b/packages/fmodata/scripts/capture-responses.ts
index 7fa66266..fb52ab75 100644
--- a/packages/fmodata/scripts/capture-responses.ts
+++ b/packages/fmodata/scripts/capture-responses.ts
@@ -35,6 +35,7 @@ import path from "path";
import { fileURLToPath } from "url";
import { config } from "dotenv";
import { writeFileSync } from "fs";
+import * as prettier from "prettier";
import createClient from "@fetchkit/ffetch";
import { MOCK_SERVER_URL } from "../tests/utils/mock-server-url";
@@ -189,7 +190,7 @@ const queriesToCapture: {
expectError?: boolean;
execute: (
client: ReturnType,
- ) => Promise<{ url: string; response: Response }>;
+ ) => Promise<{ url: string; method: string; response: Response }>;
}[] = [
{
name: "list-basic",
@@ -199,7 +200,7 @@ const queriesToCapture: {
const response = await client(path);
// Get the full URL from the response
const url = response.url;
- return { url, response };
+ return { url, method: "GET", response };
},
},
{
@@ -209,7 +210,7 @@ const queriesToCapture: {
const path = "/contacts?$select=name,PrimaryKey&$top=10";
const response = await client(path);
const url = response.url;
- return { url, response };
+ return { url, method: "GET", response };
},
},
{
@@ -219,7 +220,7 @@ const queriesToCapture: {
const path = "/contacts?$orderby=name&$top=5";
const response = await client(path);
const url = response.url;
- return { url, response };
+ return { url, method: "GET", response };
},
},
{
@@ -229,7 +230,7 @@ const queriesToCapture: {
const path = "/contacts?$top=2&$skip=2";
const response = await client(path);
const url = response.url;
- return { url, response };
+ return { url, method: "GET", response };
},
},
@@ -248,7 +249,7 @@ const queriesToCapture: {
},
});
const url = response.url;
- return { url, response };
+ return { url, method: "POST", response };
},
},
@@ -266,7 +267,7 @@ const queriesToCapture: {
});
const url = response.url;
- return { url, response };
+ return { url, method: "POST", response };
},
},
{
@@ -304,7 +305,7 @@ const queriesToCapture: {
const path = `/contacts('${recordId}')`;
const response = await client(path);
const url = response.url;
- return { url, response };
+ return { url, method: "GET", response };
},
},
// Error cases - intentionally invalid queries to capture error responses
@@ -316,7 +317,7 @@ const queriesToCapture: {
const path = "/contacts?$select=InvalidFieldName";
const response = await client(path);
const url = response.url;
- return { url, response };
+ return { url, method: "GET", response };
},
},
{
@@ -327,7 +328,7 @@ const queriesToCapture: {
const path = "/contacts?$orderby=InvalidFieldName";
const response = await client(path);
const url = response.url;
- return { url, response };
+ return { url, method: "GET", response };
},
},
{
@@ -339,7 +340,7 @@ const queriesToCapture: {
const path = "/contacts('00000000-0000-0000-0000-000000000000')";
const response = await client(path);
const url = response.url;
- return { url, response };
+ return { url, method: "GET", response };
},
},
{
@@ -350,7 +351,7 @@ const queriesToCapture: {
const path = "/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')/name";
const response = await client(path);
const url = response.url;
- return { url, response };
+ return { url, method: "GET", response };
},
},
{
@@ -361,7 +362,7 @@ const queriesToCapture: {
const path = "/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')/users";
const response = await client(path);
const url = response.url;
- return { url, response };
+ return { url, method: "GET", response };
},
},
{
@@ -371,7 +372,7 @@ const queriesToCapture: {
const path = "/contacts?$expand=users($select=not_real_field)";
const response = await client(path);
const url = response.url;
- return { url, response };
+ return { url, method: "GET", response };
},
},
{
@@ -382,7 +383,7 @@ const queriesToCapture: {
"/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')?$expand=users";
const response = await client(path);
const url = response.url;
- return { url, response };
+ return { url, method: "GET", response };
},
},
{
@@ -393,7 +394,7 @@ const queriesToCapture: {
"/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')?$expand=users($expand=user_customer($select=name))";
const response = await client(path);
const url = response.url;
- return { url, response };
+ return { url, method: "GET", response };
},
},
{
@@ -403,7 +404,123 @@ const queriesToCapture: {
const path = `/contacts?$top=2&$expand=users($expand=user_customer($select=name))`;
const response = await client(path);
const url = response.url;
- return { url, response };
+ return { url, method: "GET", response };
+ },
+ },
+ // Webhook API queries
+ {
+ name: "webhook-list",
+ description: "List all webhooks",
+ execute: async (client) => {
+ const path = "/Webhook.GetAll";
+ const response = await client(path);
+ const url = response.url;
+ return { url, method: "GET", response };
+ },
+ },
+ {
+ name: "webhook-add",
+ description: "Add a new webhook",
+ execute: async (client) => {
+ const path = "/Webhook.Add";
+ const response = await client(path, {
+ method: "POST",
+ body: {
+ webhook: "https://example.com/webhook",
+ tableName: "contacts",
+ headers: {
+ "X-Custom-Header": "test-value",
+ },
+ notifySchemaChanges: false,
+ select: "",
+ filter: "",
+ },
+ });
+ const url = response.url;
+
+ // Clone the response before extracting the data
+ const cloned = response.clone();
+ const newWebhookId = (await cloned.json()).webHookResult.webHookID;
+ await client(`/Webhook.Delete(${newWebhookId})`);
+
+ return { url, method: "POST", response };
+ },
+ },
+ {
+ name: "webhook-add-with-options",
+ description: "Add a new webhook",
+ execute: async (client) => {
+ const path = "/Webhook.Add";
+ const response = await client(path, {
+ method: "POST",
+ body: {
+ webhook: "https://example.com/webhook",
+ tableName: "contacts",
+ headers: {
+ "X-Custom-Header": "test-value",
+ },
+ notifySchemaChanges: false,
+ select: "name, age",
+ filter: "name eq 'John'",
+ },
+ });
+ const url = response.url;
+
+ // Clone the response before extracting the data
+ const cloned = response.clone();
+ const newWebhookId = (await cloned.json()).webHookResult.webHookID;
+ await client(`/Webhook.Delete(${newWebhookId})`);
+
+ return { url, method: "POST", response };
+ },
+ },
+ {
+ name: "webhook-get",
+ description: "Get a webhook by ID",
+ execute: async (client) => {
+ const listResponse = await client("/Webhook.GetAll");
+ const listData = await listResponse.json();
+ const webhookId = listData.WebHook?.[0]?.webHookID;
+ if (!webhookId) {
+ throw new Error("No webhook ID found");
+ }
+
+ // First, try to get webhook ID 1, or use a known ID if available
+ const path = `/Webhook.Get(${webhookId})`;
+ const response = await client(path);
+ const url = response.url;
+ return { url, method: "GET", response };
+ },
+ },
+ {
+ name: "webhook-get-not-found",
+ description: "Error response for non-existent webhook",
+ expectError: true,
+ execute: async (client) => {
+ const path = "/Webhook.Get(99999)";
+ const response = await client(path);
+ const url = response.url;
+ return { url, method: "GET", response };
+ },
+ },
+ {
+ name: "webhook-delete",
+ description: "Delete a webhook by ID",
+ execute: async (client) => {
+ const listResponse = await client("/Webhook.GetAll");
+ const listData = await listResponse.json();
+ const webhookId = listData.WebHook?.[0]?.webHookID;
+ if (!webhookId) {
+ throw new Error("No webhook ID found");
+ }
+
+ // Use webhook ID 1, or a known ID if available
+ const path = `/Webhook.Delete(${webhookId})`;
+ const response = await client(path, {
+ method: "POST",
+ });
+ const url = response.url;
+ return { url, method: "POST", response };
},
},
];
@@ -489,7 +606,7 @@ function generateResponsesFile(
* 2. Run: pnpm capture
* 3. The captured response will be added to this file automatically
*
- * You can manually edit responses here if you need to modify test data.
+ * You MUST NOT manually edit this file. Any changes will be overwritten by the capture script.
*/
export type MockResponse = {
@@ -539,7 +656,7 @@ async function main() {
console.log(`Capturing: ${queryDef.name} - ${queryDef.description}`);
// Execute the query directly with ffetch
- const { url, response } = await queryDef.execute(client);
+ const { url, method, response } = await queryDef.execute(client);
// Capture the response data (even for error status codes)
const status = response.status;
@@ -567,7 +684,7 @@ async function main() {
// Store captured response (including error responses)
capturedResponses[queryDef.name] = {
url: sanitizedUrl,
- method: "GET",
+ method,
status,
headers:
contentType || location
@@ -625,6 +742,8 @@ async function main() {
serverUrl,
);
+ // For error cases, we don't have the method from execute, so default to GET
+ // This should rarely happen as most errors still return a response
capturedResponses[queryDef.name] = {
url: sanitizedUrl,
method: "GET",
@@ -669,7 +788,13 @@ async function main() {
"../tests/fixtures/responses.ts",
);
const fileContent = generateResponsesFile(capturedResponses);
- writeFileSync(fixturesPath, fileContent, "utf-8");
+
+ // Format the file content with prettier
+ const formattedContent = await prettier.format(fileContent, {
+ filepath: fixturesPath,
+ });
+
+ writeFileSync(fixturesPath, formattedContent, "utf-8");
console.log(`\nResponses written to: ${fixturesPath}`);
console.log("\nYou can now use these mocks in your tests!");
diff --git a/packages/fmodata/scripts/experiment-batch.ts b/packages/fmodata/scripts/experiment-batch.ts
deleted file mode 100644
index 44174f20..00000000
--- a/packages/fmodata/scripts/experiment-batch.ts
+++ /dev/null
@@ -1,614 +0,0 @@
-/**
- * Batch Operations Experiment Script
- *
- * This script experiments with batch operations containing inserts, updates,
- * and deletes to understand how FileMaker handles them, especially when
- * some operations fail.
- *
- * Usage:
- * cd packages/fmodata && pnpm tsx scripts/experiment-batch.ts
- */
-
-import { config } from "dotenv";
-import path from "path";
-import { fileURLToPath } from "url";
-import { z } from "zod/v4";
-import {
- FMServerConnection,
- fmTableOccurrence,
- textField,
- timestampField,
- eq,
-} from "../src/index";
-
-// Get __dirname equivalent in ES modules
-const __filename = fileURLToPath(import.meta.url);
-const __dirname = path.dirname(__filename);
-
-// Load environment variables
-config({ path: path.resolve(__dirname, "../.env.local") });
-
-const serverUrl = process.env.FMODATA_SERVER_URL;
-const username = process.env.FMODATA_USERNAME;
-const password = process.env.FMODATA_PASSWORD;
-const database = process.env.FMODATA_DATABASE;
-
-if (!serverUrl || !username || !password || !database) {
- throw new Error(
- "Environment variables required: FMODATA_SERVER_URL, FMODATA_USERNAME, FMODATA_PASSWORD, FMODATA_DATABASE",
- );
-}
-
-// Define schemas
-const contactsTO = fmTableOccurrence("contacts", {
- PrimaryKey: textField().primaryKey(),
- CreationTimestamp: timestampField(),
- CreatedBy: textField(),
- ModificationTimestamp: timestampField(),
- ModifiedBy: textField(),
- name: textField(),
- hobby: textField(),
- id_user: textField(),
-});
-
-// Create connection
-const connection = new FMServerConnection({
- serverUrl,
- auth: { username, password },
-});
-
-const db = connection.database(database, {
- occurrences: [contactsTO],
-});
-
-// Track created records for cleanup
-const createdRecordIds: string[] = [];
-
-async function cleanup() {
- console.log("\n🧹 Cleaning up created records...");
- for (const id of createdRecordIds) {
- try {
- await db.from("contacts").delete().byId(id).execute();
- console.log(` Deleted: ${id}`);
- } catch (error) {
- console.log(` Failed to delete ${id}:`, error);
- }
- }
-}
-
-async function experiment1_MultipleInserts() {
- console.log("\n" + "=".repeat(60));
- console.log("EXPERIMENT 1: Multiple Inserts in a Batch");
- console.log("=".repeat(60));
-
- const timestamp = Date.now();
- const insert1 = db.from("contacts").insert({
- name: `Batch Insert 1 - ${timestamp}`,
- hobby: "Insert Test",
- });
-
- const insert2 = db.from("contacts").insert({
- name: `Batch Insert 2 - ${timestamp}`,
- hobby: "Insert Test",
- });
-
- const insert3 = db.from("contacts").insert({
- name: `Batch Insert 3 - ${timestamp}`,
- hobby: "Insert Test",
- });
-
- console.log("\nExecuting batch with 3 insert operations...");
-
- const result = await db.batch([insert1, insert2, insert3]).execute();
-
- console.log("\nResult:");
- console.log(JSON.stringify(result, null, 2));
-
- if (result.data) {
- // Track for cleanup
- for (const item of result.data) {
- if (item && typeof item === "object" && "PrimaryKey" in item) {
- createdRecordIds.push(item.PrimaryKey as string);
- }
- }
- }
-
- return result;
-}
-
-async function experiment2_MixedOperations() {
- console.log("\n" + "=".repeat(60));
- console.log(
- "EXPERIMENT 2: Mixed Operations (GET + INSERT + UPDATE + DELETE)",
- );
- console.log("=".repeat(60));
-
- // First, create a record we can update/delete
- const timestamp = Date.now();
- const setupResult = await db
- .from("contacts")
- .insert({
- name: `Setup Record - ${timestamp}`,
- hobby: "Will be updated",
- })
- .execute();
-
- if (setupResult.error || !setupResult.data) {
- console.log("Failed to create setup record:", setupResult.error);
- return;
- }
-
- const setupRecordId = setupResult.data.PrimaryKey;
- console.log(`\nCreated setup record: ${setupRecordId}`);
-
- // Now create a batch with mixed operations
- const listQuery = db.from("contacts").list().top(2);
-
- const insertOp = db.from("contacts").insert({
- name: `Mixed Batch Insert - ${timestamp}`,
- hobby: "Mixed Test",
- });
-
- const updateOp = db
- .from("contacts")
- .update({ hobby: "Updated via batch" })
- .byId(setupRecordId);
-
- const deleteOp = db.from("contacts").delete().byId(setupRecordId);
-
- console.log("\nExecuting batch with: GET, INSERT, UPDATE, DELETE...");
-
- const result = await db
- .batch([listQuery, insertOp, updateOp, deleteOp])
- .execute();
-
- console.log("\nResult:");
- console.log(JSON.stringify(result, null, 2));
-
- if (result.data) {
- // Track insert result for cleanup
- const insertResult = result.data[1];
- if (
- insertResult &&
- typeof insertResult === "object" &&
- "PrimaryKey" in insertResult
- ) {
- createdRecordIds.push(insertResult.PrimaryKey as string);
- }
- }
-
- return result;
-}
-
-async function experiment3_FailingOperation() {
- console.log("\n" + "=".repeat(60));
- console.log("EXPERIMENT 3: Batch with a Failing Operation in the Middle");
- console.log("=".repeat(60));
-
- const timestamp = Date.now();
-
- // Create a valid insert
- const insert1 = db.from("contacts").insert({
- name: `Before Failure - ${timestamp}`,
- hobby: "Should succeed",
- });
-
- // Try to update a non-existent record (should fail)
- const failingUpdate = db
- .from("contacts")
- .update({ hobby: "This should fail" })
- .byId("00000000-0000-0000-0000-000000000000");
-
- // Another valid insert (should this succeed or fail?)
- const insert2 = db.from("contacts").insert({
- name: `After Failure - ${timestamp}`,
- hobby: "Should this succeed?",
- });
-
- console.log(
- "\nExecuting batch with: INSERT (valid), UPDATE (invalid ID), INSERT (valid)...",
- );
- console.log(
- "Question: What happens to the third operation when the second fails?",
- );
-
- const result = await db.batch([insert1, failingUpdate, insert2]).execute();
-
- console.log("\nResult:");
- console.log(JSON.stringify(result, null, 2));
-
- if (result.data) {
- for (const item of result.data) {
- if (item && typeof item === "object" && "PrimaryKey" in item) {
- createdRecordIds.push(item.PrimaryKey as string);
- }
- }
- }
-
- return result;
-}
-
-async function experiment4_FailingDelete() {
- console.log("\n" + "=".repeat(60));
- console.log("EXPERIMENT 4: Batch with a Failing Delete");
- console.log("=".repeat(60));
-
- const timestamp = Date.now();
-
- // Create a valid insert
- const insert1 = db.from("contacts").insert({
- name: `Before Delete Fail - ${timestamp}`,
- hobby: "Should succeed",
- });
-
- // Try to delete a non-existent record
- const failingDelete = db
- .from("contacts")
- .delete()
- .byId("00000000-0000-0000-0000-000000000000");
-
- // Another valid insert
- const insert2 = db.from("contacts").insert({
- name: `After Delete Fail - ${timestamp}`,
- hobby: "Should this succeed?",
- });
-
- console.log("\nExecuting batch with: INSERT, DELETE (invalid ID), INSERT...");
-
- const result = await db.batch([insert1, failingDelete, insert2]).execute();
-
- console.log("\nResult:");
- console.log(JSON.stringify(result, null, 2));
-
- if (result.data) {
- for (const item of result.data) {
- if (item && typeof item === "object" && "PrimaryKey" in item) {
- createdRecordIds.push(item.PrimaryKey as string);
- }
- }
- }
-
- return result;
-}
-
-async function experiment5_AllGetWithOneFailure() {
- console.log("\n" + "=".repeat(60));
- console.log(
- "EXPERIMENT 5: Multiple GETs with One Filter that Returns Nothing",
- );
- console.log("=".repeat(60));
-
- // Query that should return results
- const query1 = db.from("contacts").list().top(2);
-
- // Query with a filter that returns empty (not an error, just no results)
- const query2 = db
- .from(contactsTO)
- .list()
- .where(eq(contactsTO.name, "THIS_NAME_DEFINITELY_DOES_NOT_EXIST_12345"));
-
- // Another query that should return results
- const query3 = db.from("contacts").list().top(1);
-
- console.log(
- "\nExecuting batch with: GET (valid), GET (empty filter), GET (valid)...",
- );
-
- const result = await db.batch([query1, query2, query3]).execute();
-
- console.log("\nResult:");
- console.log(JSON.stringify(result, null, 2));
-
- return result;
-}
-
-async function experiment6_RawResponseInspection() {
- console.log("\n" + "=".repeat(60));
- console.log("EXPERIMENT 6: Raw Response Inspection - Direct Fetch");
- console.log("=".repeat(60));
-
- // Make a direct batch request to see raw response
- const timestamp = Date.now();
- const boundary = "batch_direct_test_123";
-
- const baseUrl = `${serverUrl}/fmi/odata/v4/${database}`;
- const batchUrl = `${baseUrl}/$batch`;
-
- // Build a simple batch body with one GET
- const batchBody = [
- `--${boundary}`,
- "Content-Type: application/http",
- "Content-Transfer-Encoding: binary",
- "",
- `GET ${baseUrl}/contacts?$top=1 HTTP/1.1`,
- "",
- "",
- `--${boundary}--`,
- ].join("\r\n");
-
- console.log("\n--- Sending Request ---");
- console.log("URL:", batchUrl);
- console.log("Body:", batchBody);
-
- const authHeader = `Basic ${Buffer.from(`${username}:${password}`).toString("base64")}`;
-
- const response = await fetch(batchUrl, {
- method: "POST",
- headers: {
- Authorization: authHeader,
- "Content-Type": `multipart/mixed; boundary=${boundary}`,
- "OData-Version": "4.0",
- },
- body: batchBody,
- });
-
- console.log("\n--- Response Info ---");
- console.log("Status:", response.status, response.statusText);
- console.log("Content-Type:", response.headers.get("content-type"));
-
- const responseText = await response.text();
- console.log("\n--- Raw Response Body ---");
- console.log(responseText);
- console.log("--- End Raw Response ---");
-}
-
-async function experiment7_RawResponseWithInsert() {
- console.log("\n" + "=".repeat(60));
- console.log("EXPERIMENT 7: Raw Response - Insert with Prefer header");
- console.log("=".repeat(60));
-
- const timestamp = Date.now();
- const boundary = "batch_insert_test_456";
- const changesetBoundary = "changeset_insert_789";
-
- const baseUrl = `${serverUrl}/fmi/odata/v4/${database}`;
- const batchUrl = `${baseUrl}/$batch`;
-
- const insertBody = JSON.stringify({
- name: `Direct Insert Test - ${timestamp}`,
- hobby: "Testing",
- });
-
- // Build a batch with INSERT using return=representation
- const batchBody = [
- `--${boundary}`,
- `Content-Type: multipart/mixed; boundary=${changesetBoundary}`,
- "",
- `--${changesetBoundary}`,
- "Content-Type: application/http",
- "Content-Transfer-Encoding: binary",
- "",
- `POST ${baseUrl}/contacts HTTP/1.1`,
- "Content-Type: application/json",
- "Prefer: return=representation",
- `Content-Length: ${insertBody.length}`,
- "",
- insertBody,
- `--${changesetBoundary}--`,
- `--${boundary}--`,
- ].join("\r\n");
-
- console.log("\n--- Sending Insert Request ---");
- console.log("Body:\n", batchBody);
-
- const authHeader = `Basic ${Buffer.from(`${username}:${password}`).toString("base64")}`;
-
- const response = await fetch(batchUrl, {
- method: "POST",
- headers: {
- Authorization: authHeader,
- "Content-Type": `multipart/mixed; boundary=${boundary}`,
- "OData-Version": "4.0",
- },
- body: batchBody,
- });
-
- console.log("\n--- Response Info ---");
- console.log("Status:", response.status, response.statusText);
- console.log("Content-Type:", response.headers.get("content-type"));
-
- const responseText = await response.text();
- console.log("\n--- Raw Response Body ---");
- console.log(responseText);
- console.log("--- End Raw Response ---");
-
- // Try to extract created record ID for cleanup
- const pkMatch = responseText.match(/"PrimaryKey":\s*"([^"]+)"/);
- if (pkMatch && pkMatch[1]) {
- createdRecordIds.push(pkMatch[1]);
- console.log("\nCreated record ID:", pkMatch[1]);
- }
-}
-
-async function experiment8_TrueError() {
- console.log("\n" + "=".repeat(60));
- console.log("EXPERIMENT 8: Raw Response - Query Non-Existent Table");
- console.log("=".repeat(60));
-
- const boundary = "batch_error_test";
- const baseUrl = `${serverUrl}/fmi/odata/v4/${database}`;
- const batchUrl = `${baseUrl}/$batch`;
-
- // Build: GET (valid), GET (non-existent table), GET (valid)
- const batchBody = [
- `--${boundary}`,
- "Content-Type: application/http",
- "Content-Transfer-Encoding: binary",
- "",
- `GET ${baseUrl}/contacts?$top=1 HTTP/1.1`,
- "",
- "",
- `--${boundary}`,
- "Content-Type: application/http",
- "Content-Transfer-Encoding: binary",
- "",
- `GET ${baseUrl}/THIS_TABLE_DOES_NOT_EXIST?$top=1 HTTP/1.1`,
- "",
- "",
- `--${boundary}`,
- "Content-Type: application/http",
- "Content-Transfer-Encoding: binary",
- "",
- `GET ${baseUrl}/contacts?$top=2 HTTP/1.1`,
- "",
- "",
- `--${boundary}--`,
- ].join("\r\n");
-
- console.log("\n--- Sending Request with Non-Existent Table ---");
-
- const authHeader = `Basic ${Buffer.from(`${username}:${password}`).toString("base64")}`;
-
- const response = await fetch(batchUrl, {
- method: "POST",
- headers: {
- Authorization: authHeader,
- "Content-Type": `multipart/mixed; boundary=${boundary}`,
- "OData-Version": "4.0",
- },
- body: batchBody,
- });
-
- console.log("\n--- Response Info ---");
- console.log("Status:", response.status, response.statusText);
-
- const responseText = await response.text();
- console.log("\n--- Raw Response Body ---");
- console.log(responseText);
- console.log("--- End Raw Response ---");
-}
-
-async function experiment9_RawResponseWithFailure() {
- console.log("\n" + "=".repeat(60));
- console.log("EXPERIMENT 9: Raw Response - Mixed with Failure");
- console.log("=".repeat(60));
-
- const timestamp = Date.now();
- const boundary = "batch_fail_test";
- const cs1 = "changeset_1";
- const cs2 = "changeset_2";
-
- const baseUrl = `${serverUrl}/fmi/odata/v4/${database}`;
- const batchUrl = `${baseUrl}/$batch`;
-
- const insertBody1 = JSON.stringify({
- name: `Before Fail - ${timestamp}`,
- hobby: "Test",
- });
- const updateBody = JSON.stringify({ hobby: "Should fail" });
- const insertBody2 = JSON.stringify({
- name: `After Fail - ${timestamp}`,
- hobby: "Test",
- });
-
- // Build: INSERT (valid), UPDATE (invalid ID), INSERT (valid)
- const batchBody = [
- // First changeset: valid insert
- `--${boundary}`,
- `Content-Type: multipart/mixed; boundary=${cs1}`,
- "",
- `--${cs1}`,
- "Content-Type: application/http",
- "Content-Transfer-Encoding: binary",
- "",
- `POST ${baseUrl}/contacts HTTP/1.1`,
- "Content-Type: application/json",
- "Prefer: return=representation",
- `Content-Length: ${insertBody1.length}`,
- "",
- insertBody1,
- `--${cs1}--`,
- // Second changeset: invalid update
- `--${boundary}`,
- `Content-Type: multipart/mixed; boundary=${cs2}`,
- "",
- `--${cs2}`,
- "Content-Type: application/http",
- "Content-Transfer-Encoding: binary",
- "",
- `PATCH ${baseUrl}/contacts('00000000-0000-0000-0000-000000000000') HTTP/1.1`,
- "Content-Type: application/json",
- `Content-Length: ${updateBody.length}`,
- "",
- updateBody,
- `--${cs2}--`,
- // Third changeset: valid insert
- `--${boundary}`,
- `Content-Type: multipart/mixed; boundary=changeset_3`,
- "",
- `--changeset_3`,
- "Content-Type: application/http",
- "Content-Transfer-Encoding: binary",
- "",
- `POST ${baseUrl}/contacts HTTP/1.1`,
- "Content-Type: application/json",
- "Prefer: return=representation",
- `Content-Length: ${insertBody2.length}`,
- "",
- insertBody2,
- `--changeset_3--`,
- `--${boundary}--`,
- ].join("\r\n");
-
- console.log("\n--- Sending Mixed Request with Invalid Update ---");
-
- const authHeader = `Basic ${Buffer.from(`${username}:${password}`).toString("base64")}`;
-
- const response = await fetch(batchUrl, {
- method: "POST",
- headers: {
- Authorization: authHeader,
- "Content-Type": `multipart/mixed; boundary=${boundary}`,
- "OData-Version": "4.0",
- },
- body: batchBody,
- });
-
- console.log("\n--- Response Info ---");
- console.log("Status:", response.status, response.statusText);
-
- const responseText = await response.text();
- console.log("\n--- Raw Response Body ---");
- console.log(responseText);
- console.log("--- End Raw Response ---");
-
- // Extract created record IDs for cleanup
- const pkMatches = responseText.matchAll(/"PrimaryKey":\s*"([^"]+)"/g);
- for (const match of pkMatches) {
- if (match[1]) {
- createdRecordIds.push(match[1]);
- console.log("Created record ID:", match[1]);
- }
- }
-}
-
-async function main() {
- console.log("🔬 Batch Operations Experiment");
- console.log("================================");
- console.log(`Server: ${serverUrl}`);
- console.log(`Database: ${database}`);
- console.log("");
-
- try {
- // Run experiments
- await experiment1_MultipleInserts();
- await experiment2_MixedOperations();
- await experiment3_FailingOperation();
- await experiment4_FailingDelete();
- await experiment5_AllGetWithOneFailure();
- await experiment6_RawResponseInspection();
- await experiment7_RawResponseWithInsert();
- await experiment8_TrueError();
- await experiment9_RawResponseWithFailure();
-
- console.log("\n" + "=".repeat(60));
- console.log("ALL EXPERIMENTS COMPLETE");
- console.log("=".repeat(60));
- } catch (error) {
- console.error("\n❌ Experiment failed with error:", error);
- } finally {
- await cleanup();
- }
-}
-
-main().catch(console.error);
diff --git a/packages/fmodata/scripts/test-webhooks.ts b/packages/fmodata/scripts/test-webhooks.ts
new file mode 100644
index 00000000..c7e4dc96
--- /dev/null
+++ b/packages/fmodata/scripts/test-webhooks.ts
@@ -0,0 +1,237 @@
+/**
+ * Webhook API Test Script
+ *
+ * This script tests all webhook methods against FileMaker Server
+ * to understand the exact format and types returned.
+ *
+ * Usage:
+ * bun run scripts/test-webhooks.ts
+ */
+
+import { config } from "dotenv";
+import path from "path";
+import { fileURLToPath } from "url";
+import {
+ FMServerConnection,
+ fmTableOccurrence,
+ textField,
+} from "@proofkit/fmodata";
+
+// Get __dirname equivalent in ES modules
+const __filename = fileURLToPath(import.meta.url);
+const __dirname = path.dirname(__filename);
+
+// Load environment variables
+config({ path: path.resolve(__dirname, "../.env.local") });
+
+const serverUrl = process.env.FMODATA_SERVER_URL;
+const apiKey = process.env.FMODATA_API_KEY;
+const username = process.env.FMODATA_USERNAME;
+const password = process.env.FMODATA_PASSWORD;
+const database = process.env.FMODATA_DATABASE;
+
+if (!serverUrl) {
+ throw new Error("FMODATA_SERVER_URL environment variable is required");
+}
+
+if (!database) {
+ throw new Error("FMODATA_DATABASE environment variable is required");
+}
+
+// Use API key if available, otherwise username/password
+const auth = apiKey
+ ? { apiKey }
+ : username && password
+ ? { username, password }
+ : null;
+
+if (!auth) {
+ throw new Error(
+ "Either FMODATA_API_KEY or (FMODATA_USERNAME and FMODATA_PASSWORD) environment variables are required",
+ );
+}
+
+// Create a simple table occurrence for testing
+const contacts = fmTableOccurrence("contacts", {
+ PrimaryKey: textField().primaryKey(),
+ name: textField(),
+});
+
+async function testWebhookMethods() {
+ console.log("FileMaker OData Webhook API Test");
+ console.log("=================================\n");
+
+ const connection = new FMServerConnection({
+ serverUrl,
+ auth,
+ });
+
+ const db = connection.database(database!);
+
+ try {
+ // Test 1: List all webhooks
+ console.log("=== Test 1: List All Webhooks ===\n");
+ try {
+ const listResult = await db.webhook.list();
+ console.log("✅ list() succeeded");
+ console.log("Type:", typeof listResult);
+ console.log("Is Array:", Array.isArray(listResult));
+ console.log("Result structure:");
+ console.log(JSON.stringify(listResult, null, 2));
+ console.log("\nTypeScript type should be:");
+ console.log(" { Status: string; WebHook: Array<{ webHookID: number; tableName: string; url: string; ... }> }");
+ console.log("\n");
+ } catch (error: any) {
+ console.log("❌ list() failed:", error.message);
+ console.log("Error:", error);
+ console.log("\n");
+ }
+
+ // Test 2: Add a webhook
+ console.log("=== Test 2: Add Webhook ===\n");
+ let webhookId: string | number | undefined;
+ try {
+ const addResult = await db.webhook.add({
+ webhook: "https://example.com/webhook",
+ tableName: contacts,
+ headers: { "X-Custom-Header": "test-value" },
+ });
+ console.log("✅ add() succeeded");
+ console.log("Type:", typeof addResult);
+ console.log("Is Array:", Array.isArray(addResult));
+ console.log("Result structure:");
+ console.log(JSON.stringify(addResult, null, 2));
+ console.log("\nTypeScript type should be:");
+ console.log(" { webHookResult: { webHookID: number } }");
+
+ // Try to extract webhook ID from nested structure
+ if (typeof addResult === "object" && addResult !== null) {
+ if ("webHookResult" in addResult) {
+ const webHookResult = (addResult as any).webHookResult;
+ if (webHookResult && "webHookID" in webHookResult) {
+ webhookId = webHookResult.webHookID;
+ }
+ } else if ("id" in addResult) {
+ webhookId = (addResult as any).id;
+ } else if ("ID" in addResult) {
+ webhookId = (addResult as any).ID;
+ } else if ("webhookId" in addResult) {
+ webhookId = (addResult as any).webhookId;
+ }
+ }
+ console.log("Extracted webhook ID:", webhookId);
+ console.log("\n");
+ } catch (error: any) {
+ console.log("❌ add() failed:", error.message);
+ console.log("Error:", error);
+ console.log("\n");
+ }
+
+ // Test 3: Get a webhook (if we have an ID)
+ if (webhookId !== undefined) {
+ console.log("=== Test 3: Get Webhook ===\n");
+ try {
+ const getResult = await db.webhook.get(webhookId);
+ console.log("✅ get() succeeded");
+ console.log("Type:", typeof getResult);
+ console.log("Is Array:", Array.isArray(getResult));
+ console.log("Result structure:");
+ console.log(JSON.stringify(getResult, null, 2));
+ console.log("\nTypeScript type should be:");
+ console.log(" { webHookID: number; tableName: string; url: string; headers?: Record; notifySchemaChanges: boolean; select: string; filter: string; pendingOperations: unknown[] }");
+ console.log("\n");
+ } catch (error: any) {
+ console.log("❌ get() failed:", error.message);
+ console.log("Error:", error);
+ console.log("\n");
+ }
+ } else {
+ console.log("=== Test 3: Get Webhook ===\n");
+ console.log("⚠️ Skipping - no webhook ID available from add()");
+ console.log("\n");
+ }
+
+ // Test 4: Invoke a webhook (if we have an ID)
+ if (webhookId !== undefined) {
+ console.log("=== Test 4: Invoke Webhook (without rowIDs) ===\n");
+ try {
+ const invokeResult = await db.webhook.invoke(webhookId);
+ console.log("✅ invoke() succeeded (no rowIDs)");
+ console.log("Type:", typeof invokeResult);
+ console.log("Is Array:", Array.isArray(invokeResult));
+ console.log("Result:", JSON.stringify(invokeResult, null, 2));
+ console.log("\n");
+ } catch (error: any) {
+ console.log("❌ invoke() failed:", error.message);
+ console.log("Error:", error);
+ console.log("\n");
+ }
+
+ console.log("=== Test 5: Invoke Webhook (with rowIDs) ===\n");
+ try {
+ const invokeResult = await db.webhook.invoke(webhookId, {
+ rowIDs: [1, 2, 3],
+ });
+ console.log("✅ invoke() succeeded (with rowIDs)");
+ console.log("Type:", typeof invokeResult);
+ console.log("Is Array:", Array.isArray(invokeResult));
+ console.log("Result:", JSON.stringify(invokeResult, null, 2));
+ console.log("\n");
+ } catch (error: any) {
+ console.log("❌ invoke() failed:", error.message);
+ console.log("Error:", error);
+ console.log("\n");
+ }
+ } else {
+ console.log("=== Test 4 & 5: Invoke Webhook ===\n");
+ console.log("⚠️ Skipping - no webhook ID available from add()");
+ console.log("\n");
+ }
+
+ // Test 6: Remove a webhook (if we have an ID)
+ if (webhookId !== undefined) {
+ console.log("=== Test 6: Remove Webhook ===\n");
+ try {
+ await db.webhook.remove(webhookId);
+ console.log("✅ remove() succeeded");
+ console.log("(remove returns void, no data)");
+ console.log("\n");
+ } catch (error: any) {
+ console.log("❌ remove() failed:", error.message);
+ console.log("Error:", error);
+ console.log("\n");
+ }
+ } else {
+ console.log("=== Test 6: Remove Webhook ===\n");
+ console.log("⚠️ Skipping - no webhook ID available from add()");
+ console.log("\n");
+ }
+
+ // Test 7: Try to get a webhook that doesn't exist (error case)
+ console.log("=== Test 7: Get Non-Existent Webhook (Error Case) ===\n");
+ try {
+ await db.webhook.get(99999);
+ console.log("⚠️ get() succeeded (unexpected - webhook should not exist)");
+ console.log("\n");
+ } catch (error: any) {
+ console.log("✅ get() failed as expected");
+ console.log("Error type:", error.constructor.name);
+ console.log("Error message:", error.message);
+ console.log("Error:", JSON.stringify(error, Object.getOwnPropertyNames(error), 2));
+ console.log("\n");
+ }
+
+ } catch (error: any) {
+ console.error("\n❌ Test script failed:", error);
+ throw error;
+ }
+
+ console.log("=================================");
+ console.log("All tests complete!");
+}
+
+testWebhookMethods().catch((error) => {
+ console.error("Test script failed:", error);
+ process.exit(1);
+});
+
diff --git a/packages/fmodata/src/client/builders/default-select.ts b/packages/fmodata/src/client/builders/default-select.ts
index 0256db05..21bb1e4d 100644
--- a/packages/fmodata/src/client/builders/default-select.ts
+++ b/packages/fmodata/src/client/builders/default-select.ts
@@ -20,9 +20,13 @@ function getContainerFieldNames(table: FMTable): string[] {
* Gets default select fields from a table definition.
* Returns undefined if defaultSelect is "all".
* Automatically filters out container fields since they cannot be selected via $select.
+ *
+ * @param table - The table occurrence
+ * @param includeSpecialColumns - If true, includes ROWID and ROWMODID when defaultSelect is "schema"
*/
export function getDefaultSelectFields(
table: FMTable | undefined,
+ includeSpecialColumns?: boolean,
): string[] | undefined {
if (!table) return undefined;
@@ -33,7 +37,14 @@ export function getDefaultSelectFields(
const baseTableConfig = getBaseTableConfig(table);
const allFields = Object.keys(baseTableConfig.schema);
// Filter out container fields
- return [...new Set(allFields.filter((f) => !containerFields.includes(f)))];
+ const fields = [...new Set(allFields.filter((f) => !containerFields.includes(f)))];
+
+ // Add special columns if requested
+ if (includeSpecialColumns) {
+ fields.push("ROWID", "ROWMODID");
+ }
+
+ return fields;
}
if (Array.isArray(defaultSelect)) {
diff --git a/packages/fmodata/src/client/builders/expand-builder.ts b/packages/fmodata/src/client/builders/expand-builder.ts
index 89d5ae20..97373eba 100644
--- a/packages/fmodata/src/client/builders/expand-builder.ts
+++ b/packages/fmodata/src/client/builders/expand-builder.ts
@@ -40,7 +40,7 @@ export class ExpandBuilder {
return configs.map((config) => {
const targetTable = config.targetTable;
- let targetSchema: Record | undefined;
+ let targetSchema: Partial> | undefined;
if (targetTable) {
const baseTableConfig = getBaseTableConfig(targetTable);
const containerFields = baseTableConfig.containerFields || [];
diff --git a/packages/fmodata/src/client/builders/query-string-builder.ts b/packages/fmodata/src/client/builders/query-string-builder.ts
index a9fb68df..ee3694dd 100644
--- a/packages/fmodata/src/client/builders/query-string-builder.ts
+++ b/packages/fmodata/src/client/builders/query-string-builder.ts
@@ -17,12 +17,18 @@ export function buildSelectExpandQueryString(config: {
table?: FMTable;
useEntityIds: boolean;
logger: InternalLogger;
+ includeSpecialColumns?: boolean;
}): string {
const parts: string[] = [];
const expandBuilder = new ExpandBuilder(config.useEntityIds, config.logger);
// Build $select
if (config.selectedFields && config.selectedFields.length > 0) {
+ // Important: do NOT implicitly add system columns (ROWID/ROWMODID) here.
+ // - `includeSpecialColumns` controls the Prefer header + response parsing, but should not
+ // mutate/expand an explicit `$select` (e.g. when the user calls `.select({ ... })`).
+ // - If system columns are desired with `.select()`, they must be explicitly included via
+ // the `systemColumns` argument, which will already have added them to `selectedFields`.
const selectString = formatSelectFields(
config.selectedFields,
config.table,
diff --git a/packages/fmodata/src/client/builders/response-processor.ts b/packages/fmodata/src/client/builders/response-processor.ts
index 783b1a72..9f171d15 100644
--- a/packages/fmodata/src/client/builders/response-processor.ts
+++ b/packages/fmodata/src/client/builders/response-processor.ts
@@ -17,6 +17,7 @@ export interface ProcessResponseConfig {
expandValidationConfigs?: ExpandValidationConfig[];
skipValidation?: boolean;
useEntityIds?: boolean;
+ includeSpecialColumns?: boolean;
// Mapping from field names to output keys (for renamed fields in select)
fieldMapping?: Record;
}
@@ -37,6 +38,7 @@ export async function processODataResponse(
expandValidationConfigs,
skipValidation,
useEntityIds,
+ includeSpecialColumns,
fieldMapping,
} = config;
@@ -67,6 +69,9 @@ export async function processODataResponse(
}
// Validation path
+ // Note: Special columns are excluded when using QueryBuilder.single() method,
+ // but included for RecordBuilder.get() method (both use singleMode: "exact")
+ // The exclusion is handled in QueryBuilder's processQueryResponse, not here
if (singleMode !== false) {
const validation = await validateSingleResponse(
response,
@@ -74,6 +79,7 @@ export async function processODataResponse(
selectedFields as any,
expandValidationConfigs,
singleMode,
+ includeSpecialColumns,
);
if (!validation.valid) {
@@ -96,6 +102,7 @@ export async function processODataResponse(
schema,
selectedFields as any,
expandValidationConfigs,
+ includeSpecialColumns,
);
if (!validation.valid) {
@@ -223,6 +230,7 @@ export async function processQueryResponse(
expandConfigs: ExpandConfig[];
skipValidation?: boolean;
useEntityIds?: boolean;
+ includeSpecialColumns?: boolean;
// Mapping from field names to output keys (for renamed fields in select)
fieldMapping?: Record;
logger: InternalLogger;
@@ -235,6 +243,7 @@ export async function processQueryResponse(
expandConfigs,
skipValidation,
useEntityIds,
+ includeSpecialColumns,
fieldMapping,
logger,
} = config;
@@ -258,6 +267,7 @@ export async function processQueryResponse(
expandValidationConfigs,
skipValidation,
useEntityIds,
+ includeSpecialColumns,
});
// Rename fields if field mapping is provided (for renamed fields in select)
diff --git a/packages/fmodata/src/client/database.ts b/packages/fmodata/src/client/database.ts
index b4eba64d..2017e8b3 100644
--- a/packages/fmodata/src/client/database.ts
+++ b/packages/fmodata/src/client/database.ts
@@ -4,10 +4,26 @@ import { EntitySet } from "./entity-set";
import { BatchBuilder } from "./batch-builder";
import { SchemaManager } from "./schema-manager";
import { FMTable } from "../orm/table";
+import { WebhookManager } from "./webhook-builder";
-export class Database {
+type MetadataArgs = {
+ format?: "xml" | "json";
+ /**
+ * If provided, only the metadata for the specified table will be returned.
+ * Requires FileMaker Server 22.0.4 or later.
+ */
+ tableName?: string;
+ /**
+ * If true, a reduced payload size will be returned by omitting certain annotations.
+ */
+ reduceAnnotations?: boolean;
+};
+
+export class Database {
private _useEntityIds: boolean = false;
+ private _includeSpecialColumns: IncludeSpecialColumns;
public readonly schema: SchemaManager;
+ public readonly webhook: WebhookManager;
constructor(
private readonly databaseName: string,
@@ -19,14 +35,24 @@ export class Database {
* If set to false but some occurrences do not use entity IDs, an error will be thrown
*/
useEntityIds?: boolean;
+ /**
+ * Whether to include special columns (ROWID and ROWMODID) in responses.
+ * Note: Special columns are only included when there is no $select query.
+ */
+ includeSpecialColumns?: IncludeSpecialColumns;
},
) {
// Initialize schema manager
this.schema = new SchemaManager(this.databaseName, this.context);
+ this.webhook = new WebhookManager(this.databaseName, this.context);
this._useEntityIds = config?.useEntityIds ?? false;
+ this._includeSpecialColumns = (config?.includeSpecialColumns ??
+ false) as IncludeSpecialColumns;
}
- from>(table: T): EntitySet {
+ from>(
+ table: T,
+ ): EntitySet {
// Only override database-level useEntityIds if table explicitly sets it
// (not if it's undefined, which would override the database setting)
if (
@@ -37,7 +63,7 @@ export class Database {
this._useEntityIds = tableUseEntityIds;
}
}
- return new EntitySet({
+ return new EntitySet({
occurrence: table as T,
databaseName: this.databaseName,
context: this.context,
@@ -49,19 +75,35 @@ export class Database {
* Retrieves the OData metadata for this database.
* @param args Optional configuration object
* @param args.format The format to retrieve metadata in. Defaults to "json".
+ * @param args.tableName If provided, only the metadata for the specified table will be returned. Requires FileMaker Server 22.0.4 or later.
+ * @param args.reduceAnnotations If true, a reduced payload size will be returned by omitting certain annotations.
* @returns The metadata in the specified format
*/
- async getMetadata(args: { format: "xml" }): Promise;
- async getMetadata(args?: { format?: "json" }): Promise;
- async getMetadata(args?: {
- format?: "xml" | "json";
- }): Promise {
+ async getMetadata(args: { format: "xml" } & MetadataArgs): Promise;
+ async getMetadata(
+ args?: { format?: "json" } & MetadataArgs,
+ ): Promise;
+ async getMetadata(args?: MetadataArgs): Promise {
+ // Build the URL - if tableName is provided, append %23{tableName} to the path
+ let url = `/${this.databaseName}/$metadata`;
+ if (args?.tableName) {
+ url = `/${this.databaseName}/$metadata%23${args.tableName}`;
+ }
+
+ // Build headers
+ const headers: Record = {
+ Accept: args?.format === "xml" ? "application/xml" : "application/json",
+ };
+
+ // Add Prefer header if reduceAnnotations is true
+ if (args?.reduceAnnotations) {
+ headers["Prefer"] = 'include-annotations="-*"';
+ }
+
const result = await this.context._makeRequest<
Record | string
- >(`/${this.databaseName}/$metadata`, {
- headers: {
- Accept: args?.format === "xml" ? "application/xml" : "application/json",
- },
+ >(url, {
+ headers,
});
if (result.error) {
throw result.error;
diff --git a/packages/fmodata/src/client/delete-builder.ts b/packages/fmodata/src/client/delete-builder.ts
index 0df96248..fd742f2a 100644
--- a/packages/fmodata/src/client/delete-builder.ts
+++ b/packages/fmodata/src/client/delete-builder.ts
@@ -2,7 +2,7 @@ import type {
ExecutionContext,
ExecutableBuilder,
Result,
- WithSystemFields,
+ WithSpecialColumns,
ExecuteOptions,
ExecuteMethodOptions,
} from "../types";
@@ -26,17 +26,21 @@ export class DeleteBuilder> {
private context: ExecutionContext;
private table: Occ;
private databaseUseEntityIds: boolean;
+ private databaseIncludeSpecialColumns: boolean;
constructor(config: {
occurrence: Occ;
databaseName: string;
context: ExecutionContext;
databaseUseEntityIds?: boolean;
+ databaseIncludeSpecialColumns?: boolean;
}) {
this.table = config.occurrence;
this.databaseName = config.databaseName;
this.context = config.context;
this.databaseUseEntityIds = config.databaseUseEntityIds ?? false;
+ this.databaseIncludeSpecialColumns =
+ config.databaseIncludeSpecialColumns ?? false;
}
/**
diff --git a/packages/fmodata/src/client/entity-set.ts b/packages/fmodata/src/client/entity-set.ts
index fb03d177..a43afd71 100644
--- a/packages/fmodata/src/client/entity-set.ts
+++ b/packages/fmodata/src/client/entity-set.ts
@@ -19,6 +19,7 @@ import {
getDefaultSelect,
getTableName,
getTableColumns,
+ getTableSchema,
} from "../orm/table";
import type { FieldBuilder } from "../orm/field-builders";
import { createLogger, InternalLogger } from "../logger";
@@ -41,16 +42,20 @@ type ExtractColumnsFromOcc =
: never
: never;
-export class EntitySet> {
+export class EntitySet<
+ Occ extends FMTable,
+ DatabaseIncludeSpecialColumns extends boolean = false,
+> {
private occurrence: Occ;
private databaseName: string;
private context: ExecutionContext;
- private database: Database; // Database instance for accessing occurrences
+ private database: Database; // Database instance for accessing occurrences
private isNavigateFromEntitySet?: boolean;
private navigateRelation?: string;
private navigateSourceTableName?: string;
private navigateBasePath?: string; // Full base path for chained navigations
private databaseUseEntityIds: boolean;
+ private databaseIncludeSpecialColumns: DatabaseIncludeSpecialColumns;
private logger: InternalLogger;
constructor(config: {
@@ -66,17 +71,23 @@ export class EntitySet> {
// Get useEntityIds from database if available, otherwise default to false
this.databaseUseEntityIds =
(config.database as any)?._useEntityIds ?? false;
+ // Get includeSpecialColumns from database if available, otherwise default to false
+ this.databaseIncludeSpecialColumns =
+ (config.database as any)?._includeSpecialColumns ?? false;
this.logger = config.context?._getLogger?.() ?? createLogger();
}
// Type-only method to help TypeScript infer the schema from table
- static create>(config: {
+ static create<
+ Occ extends FMTable,
+ DatabaseIncludeSpecialColumns extends boolean = false,
+ >(config: {
occurrence: Occ;
databaseName: string;
context: ExecutionContext;
- database: Database;
- }): EntitySet {
- return new EntitySet({
+ database: Database;
+ }): EntitySet {
+ return new EntitySet({
occurrence: config.occurrence,
databaseName: config.databaseName,
context: config.context,
@@ -89,33 +100,30 @@ export class EntitySet> {
keyof InferSchemaOutputFromFMTable,
false,
false,
- {}
+ {},
+ DatabaseIncludeSpecialColumns
> {
- const builder = new QueryBuilder({
+ const builder = new QueryBuilder<
+ Occ,
+ keyof InferSchemaOutputFromFMTable,
+ false,
+ false,
+ {},
+ DatabaseIncludeSpecialColumns
+ >({
occurrence: this.occurrence as Occ,
databaseName: this.databaseName,
context: this.context,
databaseUseEntityIds: this.databaseUseEntityIds,
+ databaseIncludeSpecialColumns: this.databaseIncludeSpecialColumns,
});
// Apply defaultSelect if occurrence exists and select hasn't been called
if (this.occurrence) {
// FMTable - access via helper functions
const defaultSelectValue = getDefaultSelect(this.occurrence);
- const tableSchema = (this.occurrence as any)[FMTableClass.Symbol.Schema];
- let schema: Record | undefined;
-
- if (tableSchema) {
- // Extract schema from StandardSchemaV1
- const zodSchema = tableSchema["~standard"]?.schema;
- if (
- zodSchema &&
- typeof zodSchema === "object" &&
- "shape" in zodSchema
- ) {
- schema = zodSchema.shape as Record;
- }
- }
+ // Schema is stored directly as Partial>
+ const schema = getTableSchema(this.occurrence);
if (defaultSelectValue === "schema") {
// Use getTableColumns to get all columns and select them
@@ -124,12 +132,22 @@ export class EntitySet> {
const allColumns = getTableColumns(
this.occurrence,
) as ExtractColumnsFromOcc;
- return builder.select(allColumns).top(1000) as QueryBuilder<
+
+ // Include special columns if enabled at database level
+ const systemColumns = this.databaseIncludeSpecialColumns
+ ? { ROWID: true, ROWMODID: true }
+ : undefined;
+
+ return builder
+ .select(allColumns, systemColumns)
+ .top(1000) as QueryBuilder<
Occ,
keyof InferSchemaOutputFromFMTable,
false,
false,
- {}
+ {},
+ DatabaseIncludeSpecialColumns,
+ typeof systemColumns
>;
} else if (typeof defaultSelectValue === "object") {
// defaultSelectValue is a select object (Record)
@@ -141,7 +159,8 @@ export class EntitySet> {
keyof InferSchemaOutputFromFMTable,
false,
false,
- {}
+ {},
+ DatabaseIncludeSpecialColumns
>;
}
// If defaultSelect is "all", no changes needed (current behavior)
@@ -173,34 +192,31 @@ export class EntitySet> {
false,
undefined,
keyof InferSchemaOutputFromFMTable,
- {}
+ {},
+ DatabaseIncludeSpecialColumns
> {
- const builder = new RecordBuilder({
+ const builder = new RecordBuilder<
+ Occ,
+ false,
+ undefined,
+ keyof InferSchemaOutputFromFMTable,
+ {},
+ DatabaseIncludeSpecialColumns
+ >({
occurrence: this.occurrence,
databaseName: this.databaseName,
context: this.context,
recordId: id,
databaseUseEntityIds: this.databaseUseEntityIds,
+ databaseIncludeSpecialColumns: this.databaseIncludeSpecialColumns,
});
// Apply defaultSelect if occurrence exists
if (this.occurrence) {
// FMTable - access via helper functions
const defaultSelectValue = getDefaultSelect(this.occurrence);
- const tableSchema = (this.occurrence as any)[FMTableClass.Symbol.Schema];
- let schema: Record | undefined;
-
- if (tableSchema) {
- // Extract schema from StandardSchemaV1
- const zodSchema = tableSchema["~standard"]?.schema;
- if (
- zodSchema &&
- typeof zodSchema === "object" &&
- "shape" in zodSchema
- ) {
- schema = zodSchema.shape as Record;
- }
- }
+ // Schema is stored directly as Partial>
+ const schema = getTableSchema(this.occurrence);
if (defaultSelectValue === "schema") {
// Use getTableColumns to get all columns and select them
@@ -209,7 +225,13 @@ export class EntitySet> {
const allColumns = getTableColumns(
this.occurrence as any,
) as ExtractColumnsFromOcc;
- const selectedBuilder = builder.select(allColumns);
+
+ // Include special columns if enabled at database level
+ const systemColumns = this.databaseIncludeSpecialColumns
+ ? { ROWID: true, ROWMODID: true }
+ : undefined;
+
+ const selectedBuilder = builder.select(allColumns, systemColumns);
// Propagate navigation context if present
if (
this.isNavigateFromEntitySet &&
@@ -293,6 +315,7 @@ export class EntitySet> {
data: data as any, // Input type is validated/transformed at runtime
returnPreference: returnPreference as any,
databaseUseEntityIds: this.databaseUseEntityIds,
+ databaseIncludeSpecialColumns: this.databaseIncludeSpecialColumns,
});
}
@@ -323,6 +346,7 @@ export class EntitySet> {
data: data as any, // Input type is validated/transformed at runtime
returnPreference: returnPreference as any,
databaseUseEntityIds: this.databaseUseEntityIds,
+ databaseIncludeSpecialColumns: this.databaseIncludeSpecialColumns,
});
}
@@ -332,13 +356,17 @@ export class EntitySet> {
databaseName: this.databaseName,
context: this.context,
databaseUseEntityIds: this.databaseUseEntityIds,
+ databaseIncludeSpecialColumns: this.databaseIncludeSpecialColumns,
}) as any;
}
// Implementation
navigate>(
targetTable: ValidExpandTarget,
- ): EntitySet ? TargetTable : never> {
+ ): EntitySet<
+ TargetTable extends FMTable ? TargetTable : never,
+ DatabaseIncludeSpecialColumns
+ > {
// Check if it's an FMTable object or a string
let relationName: string;
@@ -361,7 +389,7 @@ export class EntitySet> {
}
// Create EntitySet with target table
- const entitySet = new EntitySet({
+ const entitySet = new EntitySet({
occurrence: targetTable,
databaseName: this.databaseName,
context: this.context,
diff --git a/packages/fmodata/src/client/error-parser.ts b/packages/fmodata/src/client/error-parser.ts
index fd31d12e..01d31fc2 100644
--- a/packages/fmodata/src/client/error-parser.ts
+++ b/packages/fmodata/src/client/error-parser.ts
@@ -54,3 +54,7 @@ export async function parseErrorResponse(
// Fall back to generic HTTPError
return new HTTPError(url, response.status, response.statusText, errorBody);
}
+
+
+
+
diff --git a/packages/fmodata/src/client/filemaker-odata.ts b/packages/fmodata/src/client/filemaker-odata.ts
index a82233c6..df31d3db 100644
--- a/packages/fmodata/src/client/filemaker-odata.ts
+++ b/packages/fmodata/src/client/filemaker-odata.ts
@@ -24,6 +24,7 @@ export class FMServerConnection implements ExecutionContext {
private serverUrl: string;
private auth: Auth;
private useEntityIds: boolean = false;
+ private includeSpecialColumns: boolean = false;
private logger: InternalLogger;
constructor(config: {
serverUrl: string;
@@ -63,6 +64,22 @@ export class FMServerConnection implements ExecutionContext {
return this.useEntityIds;
}
+ /**
+ * @internal
+ * Sets whether to include special columns (ROWID and ROWMODID) in requests
+ */
+ _setIncludeSpecialColumns(includeSpecialColumns: boolean): void {
+ this.includeSpecialColumns = includeSpecialColumns;
+ }
+
+ /**
+ * @internal
+ * Gets whether to include special columns (ROWID and ROWMODID) in requests
+ */
+ _getIncludeSpecialColumns(): boolean {
+ return this.includeSpecialColumns;
+ }
+
/**
* @internal
* Gets the base URL for OData requests
@@ -84,7 +101,11 @@ export class FMServerConnection implements ExecutionContext {
*/
async _makeRequest(
url: string,
- options?: RequestInit & FFetchOptions & { useEntityIds?: boolean },
+ options?: RequestInit &
+ FFetchOptions & {
+ useEntityIds?: boolean;
+ includeSpecialColumns?: boolean;
+ },
): Promise> {
const logger = this._getLogger();
const baseUrl = `${this.serverUrl}${"apiKey" in this.auth ? `/otto` : ""}/fmi/odata/v4`;
@@ -92,10 +113,21 @@ export class FMServerConnection implements ExecutionContext {
// Use per-request override if provided, otherwise use the database-level setting
const useEntityIds = options?.useEntityIds ?? this.useEntityIds;
+ const includeSpecialColumns =
+ options?.includeSpecialColumns ?? this.includeSpecialColumns;
// Get includeODataAnnotations from options (it's passed through from execute options)
const includeODataAnnotations = (options as any)?.includeODataAnnotations;
+ // Build Prefer header as comma-separated list when multiple preferences are set
+ const preferValues: string[] = [];
+ if (useEntityIds) {
+ preferValues.push("fmodata.entity-ids");
+ }
+ if (includeSpecialColumns) {
+ preferValues.push("fmodata.include-specialcolumns");
+ }
+
const headers = {
Authorization:
"apiKey" in this.auth
@@ -103,7 +135,7 @@ export class FMServerConnection implements ExecutionContext {
: `Basic ${btoa(`${this.auth.username}:${this.auth.password}`)}`,
"Content-Type": "application/json",
Accept: getAcceptHeader(includeODataAnnotations),
- ...(useEntityIds ? { Prefer: "fmodata.entity-ids" } : {}),
+ ...(preferValues.length > 0 ? { Prefer: preferValues.join(", ") } : {}),
...(options?.headers || {}),
};
@@ -271,13 +303,14 @@ export class FMServerConnection implements ExecutionContext {
}
}
- database(
+ database(
name: string,
config?: {
useEntityIds?: boolean;
+ includeSpecialColumns?: IncludeSpecialColumns;
},
- ): Database {
- return new Database(name, this, config);
+ ): Database {
+ return new Database(name, this, config);
}
/**
@@ -287,7 +320,7 @@ export class FMServerConnection implements ExecutionContext {
async listDatabaseNames(): Promise {
const result = await this._makeRequest<{
value?: Array<{ name: string }>;
- }>("/");
+ }>("/$metadata", { headers: { Accept: "application/json" } });
if (result.error) {
throw result.error;
}
diff --git a/packages/fmodata/src/client/insert-builder.ts b/packages/fmodata/src/client/insert-builder.ts
index 01b74113..0294c9ec 100644
--- a/packages/fmodata/src/client/insert-builder.ts
+++ b/packages/fmodata/src/client/insert-builder.ts
@@ -52,6 +52,7 @@ export class InsertBuilder<
private returnPreference: ReturnPreference;
private databaseUseEntityIds: boolean;
+ private databaseIncludeSpecialColumns: boolean;
constructor(config: {
occurrence?: Occ;
@@ -60,6 +61,7 @@ export class InsertBuilder<
data: Partial>>;
returnPreference?: ReturnPreference;
databaseUseEntityIds?: boolean;
+ databaseIncludeSpecialColumns?: boolean;
}) {
this.table = config.occurrence;
this.databaseName = config.databaseName;
@@ -68,6 +70,8 @@ export class InsertBuilder<
this.returnPreference = (config.returnPreference ||
"representation") as ReturnPreference;
this.databaseUseEntityIds = config.databaseUseEntityIds ?? false;
+ this.databaseIncludeSpecialColumns =
+ config.databaseIncludeSpecialColumns ?? false;
}
/**
diff --git a/packages/fmodata/src/client/query/query-builder.ts b/packages/fmodata/src/client/query/query-builder.ts
index 90b12d61..f6989ac2 100644
--- a/packages/fmodata/src/client/query/query-builder.ts
+++ b/packages/fmodata/src/client/query/query-builder.ts
@@ -6,15 +6,13 @@ import type {
Result,
ExecuteOptions,
ConditionallyWithODataAnnotations,
- ExtractSchemaFromOccurrence,
+ ConditionallyWithSpecialColumns,
+ NormalizeIncludeSpecialColumns,
ExecuteMethodOptions,
} from "../../types";
import { RecordCountMismatchError } from "../../errors";
import { type FFetchOptions } from "@fetchkit/ffetch";
-import {
- transformFieldNamesArray,
- transformOrderByField,
-} from "../../transform";
+import { transformOrderByField } from "../../transform";
import { safeJsonParse } from "../sanitize-json";
import { parseErrorResponse } from "../error-parser";
import { isColumn, type Column } from "../../orm/column";
@@ -28,7 +26,6 @@ import {
type InferSchemaOutputFromFMTable,
type ValidExpandTarget,
type ExtractTableName,
- type ValidateNoContainerFields,
getTableName,
} from "../../orm/table";
import {
@@ -37,14 +34,17 @@ import {
type ExpandedRelations,
resolveTableId,
mergeExecuteOptions,
- formatSelectFields,
processQueryResponse,
processSelectWithRenames,
buildSelectExpandQueryString,
createODataRequest,
} from "../builders/index";
import { QueryUrlBuilder, type NavigationConfig } from "./url-builder";
-import type { TypeSafeOrderBy, QueryReturnType } from "./types";
+import type {
+ TypeSafeOrderBy,
+ QueryReturnType,
+ SystemColumnsOption,
+} from "./types";
import { createLogger, InternalLogger } from "../../logger";
// Re-export QueryReturnType for backward compatibility
@@ -70,6 +70,8 @@ export class QueryBuilder<
SingleMode extends "exact" | "maybe" | false = false,
IsCount extends boolean = false,
Expands extends ExpandedRelations = {},
+ DatabaseIncludeSpecialColumns extends boolean = false,
+ SystemCols extends SystemColumnsOption | undefined = undefined,
> implements
ExecutableBuilder<
QueryReturnType<
@@ -77,7 +79,8 @@ export class QueryBuilder<
Selected,
SingleMode,
IsCount,
- Expands
+ Expands,
+ SystemCols
>
>
{
@@ -92,10 +95,13 @@ export class QueryBuilder<
private context: ExecutionContext;
private navigation?: NavigationConfig;
private databaseUseEntityIds: boolean;
+ private databaseIncludeSpecialColumns: boolean;
private expandBuilder: ExpandBuilder;
private urlBuilder: QueryUrlBuilder;
// Mapping from field names to output keys (for renamed fields in select)
private fieldMapping?: Record;
+ // System columns requested via select() second argument
+ private systemColumns?: SystemColumnsOption;
private logger: InternalLogger;
constructor(config: {
@@ -103,12 +109,15 @@ export class QueryBuilder<
databaseName: string;
context: ExecutionContext;
databaseUseEntityIds?: boolean;
+ databaseIncludeSpecialColumns?: boolean;
}) {
this.occurrence = config.occurrence;
this.databaseName = config.databaseName;
this.context = config.context;
this.logger = config.context?._getLogger?.() ?? createLogger();
this.databaseUseEntityIds = config.databaseUseEntityIds ?? false;
+ this.databaseIncludeSpecialColumns =
+ config.databaseIncludeSpecialColumns ?? false;
this.expandBuilder = new ExpandBuilder(
this.databaseUseEntityIds,
this.logger,
@@ -121,12 +130,21 @@ export class QueryBuilder<
}
/**
- * Helper to merge database-level useEntityIds with per-request options
+ * Helper to merge database-level useEntityIds and includeSpecialColumns with per-request options
*/
private mergeExecuteOptions(
options?: RequestInit & FFetchOptions & ExecuteOptions,
- ): RequestInit & FFetchOptions & { useEntityIds?: boolean } {
- return mergeExecuteOptions(options, this.databaseUseEntityIds);
+ ): RequestInit &
+ FFetchOptions & {
+ useEntityIds?: boolean;
+ includeSpecialColumns?: boolean;
+ } {
+ const merged = mergeExecuteOptions(options, this.databaseUseEntityIds);
+ return {
+ ...merged,
+ includeSpecialColumns:
+ options?.includeSpecialColumns ?? this.databaseIncludeSpecialColumns,
+ };
}
/**
@@ -159,24 +177,37 @@ export class QueryBuilder<
| Record>> = Selected,
NewSingle extends "exact" | "maybe" | false = SingleMode,
NewCount extends boolean = IsCount,
+ NewSystemCols extends SystemColumnsOption | undefined = SystemCols,
>(changes: {
selectedFields?: NewSelected;
singleMode?: NewSingle;
isCountMode?: NewCount;
queryOptions?: Partial>>;
fieldMapping?: Record;
- }): QueryBuilder {
+ systemColumns?: NewSystemCols;
+ }): QueryBuilder<
+ Occ,
+ NewSelected,
+ NewSingle,
+ NewCount,
+ Expands,
+ DatabaseIncludeSpecialColumns,
+ NewSystemCols
+ > {
const newBuilder = new QueryBuilder<
Occ,
NewSelected,
NewSingle,
NewCount,
- Expands
+ Expands,
+ DatabaseIncludeSpecialColumns,
+ NewSystemCols
>({
occurrence: this.occurrence,
databaseName: this.databaseName,
context: this.context,
databaseUseEntityIds: this.databaseUseEntityIds,
+ databaseIncludeSpecialColumns: this.databaseIncludeSpecialColumns,
});
newBuilder.queryOptions = {
...this.queryOptions,
@@ -186,6 +217,10 @@ export class QueryBuilder<
newBuilder.singleMode = (changes.singleMode ?? this.singleMode) as any;
newBuilder.isCountMode = (changes.isCountMode ?? this.isCountMode) as any;
newBuilder.fieldMapping = changes.fieldMapping ?? this.fieldMapping;
+ newBuilder.systemColumns =
+ changes.systemColumns !== undefined
+ ? changes.systemColumns
+ : this.systemColumns;
// Copy navigation metadata
newBuilder.navigation = this.navigation;
newBuilder.urlBuilder = new QueryUrlBuilder(
@@ -207,7 +242,15 @@ export class QueryBuilder<
* userEmail: users.email // renamed!
* })
*
+ * @example
+ * // Include system columns (ROWID, ROWMODID) when using select()
+ * db.from(users).list().select(
+ * { name: users.name },
+ * { ROWID: true, ROWMODID: true }
+ * )
+ *
* @param fields - Object mapping output keys to column references (container fields excluded)
+ * @param systemColumns - Optional object to request system columns (ROWID, ROWMODID)
* @returns QueryBuilder with updated selected fields
*/
select<
@@ -215,7 +258,19 @@ export class QueryBuilder<
string,
Column, false>
>,
- >(fields: TSelect): QueryBuilder {
+ TSystemCols extends SystemColumnsOption = {},
+ >(
+ fields: TSelect,
+ systemColumns?: TSystemCols,
+ ): QueryBuilder<
+ Occ,
+ TSelect,
+ SingleMode,
+ IsCount,
+ Expands,
+ DatabaseIncludeSpecialColumns,
+ TSystemCols
+ > {
const tableName = getTableName(this.occurrence);
const { selectedFields, fieldMapping } = processSelectWithRenames(
fields,
@@ -223,13 +278,23 @@ export class QueryBuilder<
this.logger,
);
+ // Add system columns to selectedFields if requested
+ const finalSelectedFields = [...selectedFields];
+ if (systemColumns?.ROWID) {
+ finalSelectedFields.push("ROWID");
+ }
+ if (systemColumns?.ROWMODID) {
+ finalSelectedFields.push("ROWMODID");
+ }
+
return this.cloneWithChanges({
selectedFields: fields as any,
queryOptions: {
- select: selectedFields,
+ select: finalSelectedFields,
},
fieldMapping:
Object.keys(fieldMapping).length > 0 ? fieldMapping : undefined,
+ systemColumns: systemColumns as any,
});
}
@@ -245,7 +310,15 @@ export class QueryBuilder<
*/
where(
expression: FilterExpression | string,
- ): QueryBuilder {
+ ): QueryBuilder<
+ Occ,
+ Selected,
+ SingleMode,
+ IsCount,
+ Expands,
+ DatabaseIncludeSpecialColumns,
+ SystemCols
+ > {
// Handle raw string filters (escape hatch)
if (typeof expression === "string") {
this.queryOptions.filter = expression;
@@ -295,7 +368,15 @@ export class QueryBuilder<
| OrderByExpression>
>,
]
- ): QueryBuilder {
+ ): QueryBuilder<
+ Occ,
+ Selected,
+ SingleMode,
+ IsCount,
+ Expands,
+ DatabaseIncludeSpecialColumns,
+ SystemCols
+ > {
const tableName = getTableName(this.occurrence);
// Handle variadic arguments (multiple fields)
@@ -440,14 +521,30 @@ export class QueryBuilder<
top(
count: number,
- ): QueryBuilder {
+ ): QueryBuilder<
+ Occ,
+ Selected,
+ SingleMode,
+ IsCount,
+ Expands,
+ DatabaseIncludeSpecialColumns,
+ SystemCols
+ > {
this.queryOptions.top = count;
return this;
}
skip(
count: number,
- ): QueryBuilder {
+ ): QueryBuilder<
+ Occ,
+ Selected,
+ SingleMode,
+ IsCount,
+ Expands,
+ DatabaseIncludeSpecialColumns,
+ SystemCols
+ > {
this.queryOptions.skip = count;
return this;
}
@@ -483,7 +580,9 @@ export class QueryBuilder<
selected: TSelected;
nested: TNestedExpands;
};
- }
+ },
+ DatabaseIncludeSpecialColumns,
+ SystemCols
> {
// Use ExpandBuilder.processExpand to handle the expand logic
type TargetBuilder = QueryBuilder<
@@ -491,7 +590,8 @@ export class QueryBuilder<
keyof InferSchemaOutputFromFMTable,
false,
false,
- {}
+ {},
+ DatabaseIncludeSpecialColumns
>;
const expandConfig = this.expandBuilder.processExpand<
TargetTable,
@@ -501,11 +601,20 @@ export class QueryBuilder<
this.occurrence,
callback as ((builder: TargetBuilder) => TargetBuilder) | undefined,
() =>
- new QueryBuilder({
+ new QueryBuilder<
+ TargetTable,
+ any,
+ any,
+ any,
+ any,
+ DatabaseIncludeSpecialColumns,
+ undefined
+ >({
occurrence: targetTable,
databaseName: this.databaseName,
context: this.context,
databaseUseEntityIds: this.databaseUseEntityIds,
+ databaseIncludeSpecialColumns: this.databaseIncludeSpecialColumns,
}),
);
@@ -513,15 +622,39 @@ export class QueryBuilder<
return this as any;
}
- single(): QueryBuilder {
+ single(): QueryBuilder<
+ Occ,
+ Selected,
+ "exact",
+ IsCount,
+ Expands,
+ DatabaseIncludeSpecialColumns,
+ SystemCols
+ > {
return this.cloneWithChanges({ singleMode: "exact" as const });
}
- maybeSingle(): QueryBuilder {
+ maybeSingle(): QueryBuilder<
+ Occ,
+ Selected,
+ "maybe",
+ IsCount,
+ Expands,
+ DatabaseIncludeSpecialColumns,
+ SystemCols
+ > {
return this.cloneWithChanges({ singleMode: "maybe" as const });
}
- count(): QueryBuilder {
+ count(): QueryBuilder<
+ Occ,
+ Selected,
+ SingleMode,
+ true,
+ Expands,
+ DatabaseIncludeSpecialColumns,
+ SystemCols
+ > {
return this.cloneWithChanges({
isCountMode: true as const,
queryOptions: { count: true },
@@ -531,7 +664,7 @@ export class QueryBuilder<
/**
* Builds the OData query string from current query options and expand configs.
*/
- private buildQueryString(): string {
+ private buildQueryString(includeSpecialColumns?: boolean): string {
// Build query without expand and select (we'll add them manually if using entity IDs)
const queryOptionsWithoutExpandAndSelect = { ...this.queryOptions };
const originalSelect = queryOptionsWithoutExpandAndSelect.select;
@@ -547,12 +680,17 @@ export class QueryBuilder<
: [String(originalSelect)]
: undefined;
+ // Use merged includeSpecialColumns if provided, otherwise use database-level default
+ const finalIncludeSpecialColumns =
+ includeSpecialColumns ?? this.databaseIncludeSpecialColumns;
+
const selectExpandString = buildSelectExpandQueryString({
selectedFields: selectArray,
expandConfigs: this.expandConfigs,
table: this.occurrence,
useEntityIds: this.databaseUseEntityIds,
logger: this.logger,
+ includeSpecialColumns: finalIncludeSpecialColumns,
});
// Append select/expand to existing query string
@@ -573,19 +711,35 @@ export class QueryBuilder<
): Promise<
Result<
ConditionallyWithODataAnnotations<
- QueryReturnType<
- InferSchemaOutputFromFMTable,
- Selected,
- SingleMode,
- IsCount,
- Expands
+ ConditionallyWithSpecialColumns<
+ QueryReturnType<
+ InferSchemaOutputFromFMTable,
+ Selected,
+ SingleMode,
+ IsCount,
+ Expands,
+ SystemCols
+ >,
+ // Use the merged value: if explicitly provided in options, use that; otherwise use database default
+ NormalizeIncludeSpecialColumns<
+ EO["includeSpecialColumns"],
+ DatabaseIncludeSpecialColumns
+ >,
+ // Check if select was applied: if Selected is Record (object select) or a subset of keys, select was applied
+ Selected extends Record>
+ ? true
+ : Selected extends keyof InferSchemaOutputFromFMTable
+ ? false
+ : true
>,
EO["includeODataAnnotations"] extends true ? true : false
>
>
> {
const mergedOptions = this.mergeExecuteOptions(options);
- const queryString = this.buildQueryString();
+ const queryString = this.buildQueryString(
+ mergedOptions.includeSpecialColumns,
+ );
// Handle $count endpoint
if (this.isCountMode) {
@@ -618,6 +772,9 @@ export class QueryBuilder<
return { data: undefined, error: result.error };
}
+ // Check if select was applied (runtime check)
+ const hasSelect = this.queryOptions.select !== undefined;
+
return processQueryResponse(result.data, {
occurrence: this.occurrence,
singleMode: this.singleMode,
@@ -625,6 +782,7 @@ export class QueryBuilder<
expandConfigs: this.expandConfigs,
skipValidation: options?.skipValidation,
useEntityIds: mergedOptions.useEntityIds,
+ includeSpecialColumns: mergedOptions.includeSpecialColumns,
fieldMapping: this.fieldMapping,
logger: this.logger,
});
@@ -667,7 +825,8 @@ export class QueryBuilder<
Selected,
SingleMode,
IsCount,
- Expands
+ Expands,
+ SystemCols
>
>
> {
@@ -728,6 +887,9 @@ export class QueryBuilder<
}
const mergedOptions = this.mergeExecuteOptions(options);
+ // Check if select was applied (runtime check)
+ const hasSelect = this.queryOptions.select !== undefined;
+
return processQueryResponse(rawData, {
occurrence: this.occurrence,
singleMode: this.singleMode,
@@ -735,6 +897,7 @@ export class QueryBuilder<
expandConfigs: this.expandConfigs,
skipValidation: options?.skipValidation,
useEntityIds: mergedOptions.useEntityIds,
+ includeSpecialColumns: mergedOptions.includeSpecialColumns,
fieldMapping: this.fieldMapping,
logger: this.logger,
});
diff --git a/packages/fmodata/src/client/query/response-processor.ts b/packages/fmodata/src/client/query/response-processor.ts
index c3140601..1ccf3f00 100644
--- a/packages/fmodata/src/client/query/response-processor.ts
+++ b/packages/fmodata/src/client/query/response-processor.ts
@@ -7,7 +7,7 @@ import { transformResponseFields } from "../../transform";
import { validateListResponse, validateSingleResponse } from "../../validation";
import type { ExpandValidationConfig } from "../../validation";
import type { ExpandConfig } from "./expand-builder";
-import { FMTable as FMTableClass } from "../../orm/table";
+import { FMTable as FMTableClass, getTableSchema } from "../../orm/table";
import { InternalLogger } from "../../logger";
/**
@@ -20,6 +20,7 @@ export interface ProcessQueryResponseConfig {
expandConfigs: ExpandConfig[];
skipValidation?: boolean;
useEntityIds?: boolean;
+ includeSpecialColumns?: boolean;
// Mapping from field names to output keys (for renamed fields in select)
fieldMapping?: Record;
logger: InternalLogger;
@@ -37,20 +38,12 @@ function buildExpandValidationConfigs(
const targetTable = config.targetTable;
// Extract schema from target table/occurrence
- let targetSchema: Record | undefined;
- if (targetTable) {
- const tableSchema = (targetTable as any)[FMTableClass.Symbol.Schema];
- if (tableSchema) {
- const zodSchema = tableSchema["~standard"]?.schema;
- if (
- zodSchema &&
- typeof zodSchema === "object" &&
- "shape" in zodSchema
- ) {
- targetSchema = zodSchema.shape as Record;
- }
- }
- }
+ // Schema is stored directly as Partial>
+ const targetSchema = targetTable
+ ? (getTableSchema(targetTable) as
+ | Record
+ | undefined)
+ : undefined;
// Extract selected fields from options
const selectedFields = config.options?.select
@@ -193,16 +186,8 @@ export async function processQueryResponse(
// Validation path
// Get schema from occurrence if available
- let schema: Record | undefined;
- if (occurrence) {
- const tableSchema = (occurrence as any)[FMTableClass.Symbol.Schema];
- if (tableSchema) {
- const zodSchema = tableSchema["~standard"]?.schema;
- if (zodSchema && typeof zodSchema === "object" && "shape" in zodSchema) {
- schema = zodSchema.shape as Record;
- }
- }
- }
+ // Schema is stored directly as Partial>
+ const schema = occurrence ? getTableSchema(occurrence) : undefined;
const selectedFields = config.queryOptions.select
? ((Array.isArray(config.queryOptions.select)
@@ -214,6 +199,12 @@ export async function processQueryResponse(
);
// Validate with original field names
+ // Special columns are excluded when using single() method (per OData spec behavior)
+ // Note: While FileMaker may return special columns in single mode if requested via header,
+ // we exclude them here to maintain OData spec compliance. The types will also not include
+ // special columns for single mode to match this runtime behavior.
+ const shouldIncludeSpecialColumns =
+ singleMode === false ? (config.includeSpecialColumns ?? false) : false;
const validationResult =
singleMode !== false
? await validateSingleResponse(
@@ -222,12 +213,14 @@ export async function processQueryResponse(
selectedFields as string[] | undefined,
expandValidationConfigs,
singleMode,
+ shouldIncludeSpecialColumns,
)
: await validateListResponse(
data,
schema,
selectedFields as string[] | undefined,
expandValidationConfigs,
+ shouldIncludeSpecialColumns,
);
if (!validationResult.valid) {
diff --git a/packages/fmodata/src/client/query/types.ts b/packages/fmodata/src/client/query/types.ts
index a3b81441..9aae8637 100644
--- a/packages/fmodata/src/client/query/types.ts
+++ b/packages/fmodata/src/client/query/types.ts
@@ -70,30 +70,59 @@ export type ResolveExpandedRelations = {
[K in keyof Exps]: ResolveExpandType[];
};
+/**
+ * System columns option for select() method.
+ * Allows explicitly requesting ROWID and/or ROWMODID when using select().
+ */
+export type SystemColumnsOption = {
+ ROWID?: boolean;
+ ROWMODID?: boolean;
+};
+
+/**
+ * Extract system columns type from SystemColumnsOption.
+ * Returns an object type with ROWID and/or ROWMODID properties when set to true.
+ */
+export type SystemColumnsFromOption<
+ T extends SystemColumnsOption | undefined,
+> = (T extends { ROWID: true } ? { ROWID: number } : {}) &
+ (T extends { ROWMODID: true } ? { ROWMODID: number } : {});
+
export type QueryReturnType<
T extends Record,
Selected extends keyof T | Record>,
SingleMode extends "exact" | "maybe" | false,
IsCount extends boolean,
Expands extends ExpandedRelations,
+ SystemCols extends SystemColumnsOption | undefined = undefined,
> = IsCount extends true
? number
: // Use tuple wrapping [Selected] extends [...] to prevent distribution over unions
[Selected] extends [Record>]
? SingleMode extends "exact"
- ? MapSelectToReturnType & ResolveExpandedRelations
+ ? MapSelectToReturnType &
+ ResolveExpandedRelations &
+ SystemColumnsFromOption
: SingleMode extends "maybe"
?
| (MapSelectToReturnType &
- ResolveExpandedRelations)
+ ResolveExpandedRelations &
+ SystemColumnsFromOption)
| null
: (MapSelectToReturnType &
- ResolveExpandedRelations)[]
+ ResolveExpandedRelations &
+ SystemColumnsFromOption)[]
: // Use tuple wrapping to prevent distribution over union of keys
[Selected] extends [keyof T]
? SingleMode extends "exact"
- ? Pick & ResolveExpandedRelations
+ ? Pick &
+ ResolveExpandedRelations &
+ SystemColumnsFromOption
: SingleMode extends "maybe"
- ? (Pick & ResolveExpandedRelations) | null
- : (Pick & ResolveExpandedRelations)[]
+ ? (Pick &
+ ResolveExpandedRelations &
+ SystemColumnsFromOption) | null
+ : (Pick &
+ ResolveExpandedRelations &
+ SystemColumnsFromOption)[]
: never;
diff --git a/packages/fmodata/src/client/record-builder.ts b/packages/fmodata/src/client/record-builder.ts
index 48f66b8f..484c49c7 100644
--- a/packages/fmodata/src/client/record-builder.ts
+++ b/packages/fmodata/src/client/record-builder.ts
@@ -5,6 +5,8 @@ import type {
ODataFieldResponse,
ExecuteOptions,
ConditionallyWithODataAnnotations,
+ ConditionallyWithSpecialColumns,
+ NormalizeIncludeSpecialColumns,
ExecuteMethodOptions,
} from "../types";
import type {
@@ -35,6 +37,8 @@ import {
import {
type ResolveExpandedRelations,
type ResolveExpandType,
+ type SystemColumnsOption,
+ type SystemColumnsFromOption,
} from "./query/types";
import { createLogger, InternalLogger, Logger } from "../logger";
@@ -64,6 +68,7 @@ export type RecordReturnType<
| keyof Schema
| Record>>>,
Expands extends ExpandedRelations,
+ SystemCols extends SystemColumnsOption | undefined = undefined,
> = IsSingleField extends true
? FieldColumn extends Column
? TOutput
@@ -71,10 +76,13 @@ export type RecordReturnType<
: // Use tuple wrapping [Selected] extends [...] to prevent distribution over unions
[Selected] extends [Record>]
? MapSelectToReturnType &
- ResolveExpandedRelations
+ ResolveExpandedRelations &
+ SystemColumnsFromOption
: // Use tuple wrapping to prevent distribution over union of keys
[Selected] extends [keyof Schema]
- ? Pick & ResolveExpandedRelations
+ ? Pick &
+ ResolveExpandedRelations &
+ SystemColumnsFromOption
: never;
export class RecordBuilder<
@@ -88,6 +96,8 @@ export class RecordBuilder<
Column>>
> = keyof InferSchemaOutputFromFMTable>,
Expands extends ExpandedRelations = {},
+ DatabaseIncludeSpecialColumns extends boolean = false,
+ SystemCols extends SystemColumnsOption | undefined = undefined,
> implements
ExecutableBuilder<
RecordReturnType<
@@ -95,7 +105,8 @@ export class RecordBuilder<
IsSingleField,
FieldColumn,
Selected,
- Expands
+ Expands,
+ SystemCols
>
>
{
@@ -111,12 +122,15 @@ export class RecordBuilder<
private navigateSourceTableName?: string;
private databaseUseEntityIds: boolean;
+ private databaseIncludeSpecialColumns: boolean;
// Properties for select/expand support
private selectedFields?: string[];
private expandConfigs: ExpandConfig[] = [];
// Mapping from field names to output keys (for renamed fields in select)
private fieldMapping?: Record;
+ // System columns requested via select() second argument
+ private systemColumns?: SystemColumnsOption;
private logger: InternalLogger;
@@ -126,22 +140,34 @@ export class RecordBuilder<
context: ExecutionContext;
recordId: string | number;
databaseUseEntityIds?: boolean;
+ databaseIncludeSpecialColumns?: boolean;
}) {
this.table = config.occurrence;
this.databaseName = config.databaseName;
this.context = config.context;
this.recordId = config.recordId;
this.databaseUseEntityIds = config.databaseUseEntityIds ?? false;
+ this.databaseIncludeSpecialColumns =
+ config.databaseIncludeSpecialColumns ?? false;
this.logger = config.context?._getLogger?.() ?? createLogger();
}
/**
- * Helper to merge database-level useEntityIds with per-request options
+ * Helper to merge database-level useEntityIds and includeSpecialColumns with per-request options
*/
private mergeExecuteOptions(
options?: RequestInit & FFetchOptions & ExecuteOptions,
- ): RequestInit & FFetchOptions & { useEntityIds?: boolean } {
- return mergeExecuteOptions(options, this.databaseUseEntityIds);
+ ): RequestInit &
+ FFetchOptions & {
+ useEntityIds?: boolean;
+ includeSpecialColumns?: boolean;
+ } {
+ const merged = mergeExecuteOptions(options, this.databaseUseEntityIds);
+ return {
+ ...merged,
+ includeSpecialColumns:
+ options?.includeSpecialColumns ?? this.databaseIncludeSpecialColumns,
+ };
}
/**
@@ -171,25 +197,42 @@ export class RecordBuilder<
string,
Column>>
> = Selected,
+ NewSystemCols extends SystemColumnsOption | undefined = SystemCols,
>(changes: {
selectedFields?: string[];
fieldMapping?: Record;
- }): RecordBuilder {
+ systemColumns?: NewSystemCols;
+ }): RecordBuilder<
+ Occ,
+ false,
+ FieldColumn,
+ NewSelected,
+ Expands,
+ DatabaseIncludeSpecialColumns,
+ NewSystemCols
+ > {
const newBuilder = new RecordBuilder<
Occ,
false,
FieldColumn,
NewSelected,
- Expands
+ Expands,
+ DatabaseIncludeSpecialColumns,
+ NewSystemCols
>({
occurrence: this.table,
databaseName: this.databaseName,
context: this.context,
recordId: this.recordId,
databaseUseEntityIds: this.databaseUseEntityIds,
+ databaseIncludeSpecialColumns: this.databaseIncludeSpecialColumns,
});
newBuilder.selectedFields = changes.selectedFields ?? this.selectedFields;
newBuilder.fieldMapping = changes.fieldMapping ?? this.fieldMapping;
+ newBuilder.systemColumns =
+ changes.systemColumns !== undefined
+ ? changes.systemColumns
+ : this.systemColumns;
newBuilder.expandConfigs = [...this.expandConfigs];
// Preserve navigation context
newBuilder.isNavigateFromEntitySet = this.isNavigateFromEntitySet;
@@ -208,7 +251,8 @@ export class RecordBuilder<
true,
TColumn,
keyof InferSchemaOutputFromFMTable>,
- {}
+ {},
+ DatabaseIncludeSpecialColumns
> {
// Runtime validation: ensure column is from the correct table
const tableName = getTableName(this.table);
@@ -223,13 +267,15 @@ export class RecordBuilder<
true,
TColumn,
keyof InferSchemaOutputFromFMTable>,
- {}
+ {},
+ DatabaseIncludeSpecialColumns
>({
occurrence: this.table,
databaseName: this.databaseName,
context: this.context,
recordId: this.recordId,
databaseUseEntityIds: this.databaseUseEntityIds,
+ databaseIncludeSpecialColumns: this.databaseIncludeSpecialColumns,
});
newBuilder.operation = "getSingleField";
newBuilder.operationColumn = column;
@@ -254,7 +300,15 @@ export class RecordBuilder<
* userEmail: contacts.email // renamed!
* })
*
+ * @example
+ * // Include system columns (ROWID, ROWMODID) when using select()
+ * db.from(contacts).get("uuid").select(
+ * { name: contacts.name },
+ * { ROWID: true, ROWMODID: true }
+ * )
+ *
* @param fields - Object mapping output keys to column references (container fields excluded)
+ * @param systemColumns - Optional object to request system columns (ROWID, ROWMODID)
* @returns RecordBuilder with updated selected fields
*/
select<
@@ -262,7 +316,19 @@ export class RecordBuilder<
string,
Column, false>
>,
- >(fields: TSelect): RecordBuilder {
+ TSystemCols extends SystemColumnsOption = {},
+ >(
+ fields: TSelect,
+ systemColumns?: TSystemCols,
+ ): RecordBuilder<
+ Occ,
+ false,
+ FieldColumn,
+ TSelect,
+ Expands,
+ DatabaseIncludeSpecialColumns,
+ TSystemCols
+ > {
const tableName = getTableName(this.table);
const { selectedFields, fieldMapping } = processSelectWithRenames(
fields,
@@ -270,10 +336,20 @@ export class RecordBuilder<
this.logger,
);
+ // Add system columns to selectedFields if requested
+ const finalSelectedFields = [...selectedFields];
+ if (systemColumns?.ROWID) {
+ finalSelectedFields.push("ROWID");
+ }
+ if (systemColumns?.ROWMODID) {
+ finalSelectedFields.push("ROWMODID");
+ }
+
return this.cloneWithChanges({
- selectedFields,
+ selectedFields: finalSelectedFields,
fieldMapping:
Object.keys(fieldMapping).length > 0 ? fieldMapping : undefined,
+ systemColumns: systemColumns as any,
}) as any;
}
@@ -323,7 +399,9 @@ export class RecordBuilder<
selected: TSelected;
nested: TNestedExpands;
};
- }
+ },
+ DatabaseIncludeSpecialColumns,
+ SystemCols
> {
// Create new builder with updated types
const newBuilder = new RecordBuilder<
@@ -331,18 +409,21 @@ export class RecordBuilder<
false,
FieldColumn,
Selected,
- any
+ any,
+ DatabaseIncludeSpecialColumns
>({
occurrence: this.table,
databaseName: this.databaseName,
context: this.context,
recordId: this.recordId,
databaseUseEntityIds: this.databaseUseEntityIds,
+ databaseIncludeSpecialColumns: this.databaseIncludeSpecialColumns,
});
// Copy existing state
newBuilder.selectedFields = this.selectedFields;
newBuilder.fieldMapping = this.fieldMapping;
+ newBuilder.systemColumns = this.systemColumns;
newBuilder.expandConfigs = [...this.expandConfigs];
newBuilder.isNavigateFromEntitySet = this.isNavigateFromEntitySet;
newBuilder.navigateRelation = this.navigateRelation;
@@ -369,11 +450,20 @@ export class RecordBuilder<
this.table ?? undefined,
callback as ((builder: TargetBuilder) => TargetBuilder) | undefined,
() =>
- new QueryBuilder({
+ new QueryBuilder<
+ TargetTable,
+ any,
+ any,
+ any,
+ any,
+ DatabaseIncludeSpecialColumns,
+ undefined
+ >({
occurrence: targetTable,
databaseName: this.databaseName,
context: this.context,
databaseUseEntityIds: this.databaseUseEntityIds,
+ databaseIncludeSpecialColumns: this.databaseIncludeSpecialColumns,
}),
);
@@ -387,7 +477,10 @@ export class RecordBuilder<
TargetTable,
keyof InferSchemaOutputFromFMTable,
false,
- false
+ false,
+ {},
+ DatabaseIncludeSpecialColumns,
+ undefined
> {
// Extract name and validate
const relationName = getTableName(targetTable);
@@ -403,11 +496,20 @@ export class RecordBuilder<
}
// Create QueryBuilder with target table
- const builder = new QueryBuilder({
+ const builder = new QueryBuilder<
+ TargetTable,
+ any,
+ any,
+ any,
+ any,
+ DatabaseIncludeSpecialColumns,
+ undefined
+ >({
occurrence: targetTable,
databaseName: this.databaseName,
context: this.context,
databaseUseEntityIds: this.databaseUseEntityIds,
+ databaseIncludeSpecialColumns: this.databaseIncludeSpecialColumns,
});
// Store the navigation info - we'll use it in execute
@@ -452,13 +554,18 @@ export class RecordBuilder<
/**
* Builds the complete query string including $select and $expand parameters.
*/
- private buildQueryString(): string {
+ private buildQueryString(includeSpecialColumns?: boolean): string {
+ // Use merged includeSpecialColumns if provided, otherwise use database-level default
+ const finalIncludeSpecialColumns =
+ includeSpecialColumns ?? this.databaseIncludeSpecialColumns;
+
return buildSelectExpandQueryString({
selectedFields: this.selectedFields,
expandConfigs: this.expandConfigs,
table: this.table,
useEntityIds: this.databaseUseEntityIds,
logger: this.logger,
+ includeSpecialColumns: finalIncludeSpecialColumns,
});
}
@@ -467,12 +574,30 @@ export class RecordBuilder<
): Promise<
Result<
ConditionallyWithODataAnnotations<
- RecordReturnType<
- InferSchemaOutputFromFMTable>,
- IsSingleField,
- FieldColumn,
- Selected,
- Expands
+ ConditionallyWithSpecialColumns<
+ RecordReturnType<
+ InferSchemaOutputFromFMTable>,
+ IsSingleField,
+ FieldColumn,
+ Selected,
+ Expands,
+ SystemCols
+ >,
+ // Use the merged value: if explicitly provided in options, use that; otherwise use database default
+ NormalizeIncludeSpecialColumns<
+ EO["includeSpecialColumns"],
+ DatabaseIncludeSpecialColumns
+ >,
+ // Check if select was applied: if Selected is Record (object select) or a subset of keys, select was applied
+ IsSingleField extends true
+ ? false // Single field operations don't include special columns
+ : Selected extends Record>
+ ? true
+ : Selected extends keyof InferSchemaOutputFromFMTable<
+ NonNullable
+ >
+ ? false
+ : true
>,
EO["includeODataAnnotations"] extends true ? true : false
>
@@ -496,15 +621,17 @@ export class RecordBuilder<
url = `/${this.databaseName}/${tableId}('${this.recordId}')`;
}
+ const mergedOptions = this.mergeExecuteOptions(options);
+
if (this.operation === "getSingleField" && this.operationParam) {
url += `/${this.operationParam}`;
} else {
// Add query string for select/expand (only when not getting a single field)
- const queryString = this.buildQueryString();
+ const queryString = this.buildQueryString(
+ mergedOptions.includeSpecialColumns,
+ );
url += queryString;
}
-
- const mergedOptions = this.mergeExecuteOptions(options);
const result = await this.context._makeRequest(url, mergedOptions);
if (result.error) {
@@ -538,6 +665,7 @@ export class RecordBuilder<
expandValidationConfigs,
skipValidation: options?.skipValidation,
useEntityIds: mergedOptions.useEntityIds,
+ includeSpecialColumns: mergedOptions.includeSpecialColumns,
fieldMapping: this.fieldMapping,
});
}
@@ -626,7 +754,8 @@ export class RecordBuilder<
IsSingleField,
FieldColumn,
Selected,
- Expands
+ Expands,
+ SystemCols
>
>
> {
@@ -652,10 +781,7 @@ export class RecordBuilder<
}
// Use shared response processor
- const mergedOptions = mergeExecuteOptions(
- options,
- this.databaseUseEntityIds,
- );
+ const mergedOptions = this.mergeExecuteOptions(options);
const expandBuilder = new ExpandBuilder(
mergedOptions.useEntityIds ?? false,
this.logger,
@@ -672,6 +798,7 @@ export class RecordBuilder<
expandValidationConfigs,
skipValidation: options?.skipValidation,
useEntityIds: mergedOptions.useEntityIds,
+ includeSpecialColumns: mergedOptions.includeSpecialColumns,
fieldMapping: this.fieldMapping,
});
}
diff --git a/packages/fmodata/src/client/update-builder.ts b/packages/fmodata/src/client/update-builder.ts
index a2b2292b..adb540ac 100644
--- a/packages/fmodata/src/client/update-builder.ts
+++ b/packages/fmodata/src/client/update-builder.ts
@@ -2,7 +2,6 @@ import type {
ExecutionContext,
ExecutableBuilder,
Result,
- WithSystemFields,
ExecuteOptions,
ExecuteMethodOptions,
} from "../types";
@@ -35,6 +34,7 @@ export class UpdateBuilder<
private returnPreference: ReturnPreference;
private databaseUseEntityIds: boolean;
+ private databaseIncludeSpecialColumns: boolean;
constructor(config: {
occurrence: Occ;
@@ -43,6 +43,7 @@ export class UpdateBuilder<
data: Partial>;
returnPreference: ReturnPreference;
databaseUseEntityIds?: boolean;
+ databaseIncludeSpecialColumns?: boolean;
}) {
this.table = config.occurrence;
this.databaseName = config.databaseName;
@@ -50,6 +51,8 @@ export class UpdateBuilder<
this.data = config.data;
this.returnPreference = config.returnPreference;
this.databaseUseEntityIds = config.databaseUseEntityIds ?? false;
+ this.databaseIncludeSpecialColumns =
+ config.databaseIncludeSpecialColumns ?? false;
}
/**
diff --git a/packages/fmodata/src/client/webhook-builder.ts b/packages/fmodata/src/client/webhook-builder.ts
new file mode 100644
index 00000000..52d1cac8
--- /dev/null
+++ b/packages/fmodata/src/client/webhook-builder.ts
@@ -0,0 +1,285 @@
+import { FMTable, getTableName } from "../orm";
+import type { ExecutionContext, ExecuteMethodOptions } from "../types";
+import type { FFetchOptions } from "@fetchkit/ffetch";
+import { FilterExpression } from "../orm/operators";
+import { isColumn, type Column } from "../orm/column";
+import { formatSelectFields } from "./builders/select-utils";
+
+export type Webhook = {
+ webhook: string;
+ headers?: Record;
+ tableName: TableName;
+ notifySchemaChanges?: boolean;
+ select?: string | Column[];
+ filter?: string | FilterExpression;
+};
+
+/**
+ * Webhook information returned by the API
+ */
+export type WebhookInfo = {
+ webHookID: number;
+ tableName: string;
+ url: string;
+ headers?: Record;
+ notifySchemaChanges: boolean;
+ select: string;
+ filter: string;
+ pendingOperations: unknown[];
+};
+
+/**
+ * Response from listing all webhooks
+ */
+export type WebhookListResponse = {
+ Status: string;
+ WebHook: WebhookInfo[];
+};
+
+/**
+ * Response from adding a webhook
+ */
+export type WebhookAddResponse = {
+ webHookResult: {
+ webHookID: number;
+ };
+};
+
+export class WebhookManager {
+ constructor(
+ private readonly databaseName: string,
+ private readonly context: ExecutionContext,
+ ) {}
+
+ /**
+ * Adds a new webhook to the database.
+ * @param webhook - The webhook configuration object
+ * @param webhook.webhook - The webhook URL to call
+ * @param webhook.tableName - The FMTable instance for the table to monitor
+ * @param webhook.headers - Optional custom headers to include in webhook requests
+ * @param webhook.notifySchemaChanges - Whether to notify on schema changes
+ * @param webhook.select - Optional field selection (string or array of Column references)
+ * @param webhook.filter - Optional filter (string or FilterExpression)
+ * @returns Promise resolving to the created webhook data with ID
+ * @example
+ * ```ts
+ * const result = await db.webhook.add({
+ * webhook: "https://example.com/webhook",
+ * tableName: contactsTable,
+ * headers: { "X-Custom-Header": "value" },
+ * });
+ * // result.webHookResult.webHookID contains the new webhook ID
+ * ```
+ * @example
+ * ```ts
+ * // Using filter expressions and column arrays (same DX as query builder)
+ * const result = await db.webhook.add({
+ * webhook: "https://example.com/webhook",
+ * tableName: contacts,
+ * filter: eq(contacts.name, "John"),
+ * select: [contacts.name, contacts.PrimaryKey],
+ * });
+ * ```
+ */
+ async add(
+ webhook: Webhook,
+ options?: ExecuteMethodOptions,
+ ): Promise {
+ // Extract the string table name from the FMTable instance
+ const tableName = getTableName(webhook.tableName);
+
+ // Get useEntityIds setting (check options first, then context, default to false)
+ const useEntityIds =
+ options?.useEntityIds ?? this.context._getUseEntityIds?.() ?? false;
+
+ // Transform filter if it's a FilterExpression
+ let filter: string | undefined;
+ if (webhook.filter !== undefined) {
+ if (webhook.filter instanceof FilterExpression) {
+ filter = webhook.filter.toODataFilter(useEntityIds);
+ } else {
+ filter = webhook.filter;
+ }
+ }
+
+ // Transform select if it's an array of Columns
+ let select: string | undefined;
+ if (webhook.select !== undefined) {
+ if (Array.isArray(webhook.select)) {
+ // Extract field identifiers from columns or use strings as-is
+ const fieldNames = webhook.select.map((item) => {
+ if (isColumn(item)) {
+ return item.getFieldIdentifier(useEntityIds);
+ }
+ return String(item);
+ });
+ // Use formatSelectFields to properly format the select string
+ select = formatSelectFields(
+ fieldNames,
+ webhook.tableName,
+ useEntityIds,
+ );
+ } else {
+ // Already a string, use as-is
+ select = webhook.select;
+ }
+ }
+
+ // Create request body with string table name and transformed filter/select
+ const requestBody: {
+ webhook: string;
+ headers?: Record;
+ tableName: string;
+ notifySchemaChanges?: boolean;
+ select?: string;
+ filter?: string;
+ } = {
+ webhook: webhook.webhook,
+ tableName,
+ };
+
+ if (webhook.headers !== undefined) {
+ requestBody.headers = webhook.headers;
+ }
+ if (webhook.notifySchemaChanges !== undefined) {
+ requestBody.notifySchemaChanges = webhook.notifySchemaChanges;
+ }
+ if (select !== undefined) {
+ requestBody.select = select;
+ }
+ if (filter !== undefined) {
+ requestBody.filter = filter;
+ }
+
+ const result = await this.context._makeRequest(
+ `/${this.databaseName}/Webhook.Add`,
+ {
+ method: "POST",
+ body: JSON.stringify(requestBody),
+ ...options,
+ },
+ );
+
+ if (result.error) {
+ throw result.error;
+ }
+
+ return result.data;
+ }
+
+ /**
+ * Deletes a webhook by ID.
+ * @param webhookId - The ID of the webhook to delete
+ * @returns Promise that resolves when the webhook is deleted
+ * @example
+ * ```ts
+ * await db.webhook.remove(1);
+ * ```
+ */
+ async remove(
+ webhookId: number,
+ options?: ExecuteMethodOptions,
+ ): Promise {
+ const result = await this.context._makeRequest(
+ `/${this.databaseName}/Webhook.Delete(${webhookId})`,
+ {
+ method: "POST",
+ ...options,
+ },
+ );
+
+ if (result.error) {
+ throw result.error;
+ }
+ }
+
+ /**
+ * Gets a webhook by ID.
+ * @param webhookId - The ID of the webhook to retrieve
+ * @returns Promise resolving to the webhook data
+ * @example
+ * ```ts
+ * const webhook = await db.webhook.get(1);
+ * // webhook.webHookID, webhook.tableName, webhook.url, etc.
+ * ```
+ */
+ async get(
+ webhookId: number,
+ options?: ExecuteMethodOptions,
+ ): Promise {
+ const result = await this.context._makeRequest(
+ `/${this.databaseName}/Webhook.Get(${webhookId})`,
+ options,
+ );
+
+ if (result.error) {
+ throw result.error;
+ }
+
+ return result.data;
+ }
+
+ /**
+ * Lists all webhooks.
+ * @returns Promise resolving to webhook list response with status and webhooks array
+ * @example
+ * ```ts
+ * const result = await db.webhook.list();
+ * // result.Status contains the status
+ * // result.WebHook contains the array of webhooks
+ * ```
+ */
+ async list(options?: ExecuteMethodOptions): Promise {
+ const result = await this.context._makeRequest(
+ `/${this.databaseName}/Webhook.GetAll`,
+ options,
+ );
+
+ if (result.error) {
+ throw result.error;
+ }
+
+ return result.data;
+ }
+
+ /**
+ * Invokes a webhook by ID, optionally for specific row IDs.
+ * @param webhookId - The ID of the webhook to invoke
+ * @param options - Optional configuration
+ * @param options.rowIDs - Array of row IDs to trigger the webhook for
+ * @returns Promise resolving to the invocation result (type unknown until API behavior is confirmed)
+ * @example
+ * ```ts
+ * // Invoke for all rows
+ * await db.webhook.invoke(1);
+ *
+ * // Invoke for specific rows
+ * await db.webhook.invoke(1, { rowIDs: [63, 61] });
+ * ```
+ */
+ async invoke(
+ webhookId: number,
+ options?: { rowIDs?: number[] },
+ executeOptions?: ExecuteMethodOptions,
+ ): Promise {
+ const body: { rowIDs?: number[] } = {};
+ if (options?.rowIDs !== undefined) {
+ body.rowIDs = options.rowIDs;
+ }
+
+ const result = await this.context._makeRequest(
+ `/${this.databaseName}/Webhook.Invoke(${webhookId})`,
+ {
+ method: "POST",
+ body: Object.keys(body).length > 0 ? JSON.stringify(body) : undefined,
+ ...executeOptions,
+ },
+ );
+
+ if (result.error) {
+ throw result.error;
+ }
+
+ return result.data;
+ }
+}
diff --git a/packages/fmodata/src/index.ts b/packages/fmodata/src/index.ts
index 745d4f99..a2fe9340 100644
--- a/packages/fmodata/src/index.ts
+++ b/packages/fmodata/src/index.ts
@@ -67,6 +67,12 @@ export type {
TimestampField,
ContainerField,
} from "./client/schema-manager";
+export type {
+ Webhook,
+ WebhookInfo,
+ WebhookListResponse,
+ WebhookAddResponse,
+} from "./client/webhook-builder";
// Utility types for type annotations
export type {
diff --git a/packages/fmodata/src/orm/field-builders.ts b/packages/fmodata/src/orm/field-builders.ts
index d7acbaa7..b856d1d5 100644
--- a/packages/fmodata/src/orm/field-builders.ts
+++ b/packages/fmodata/src/orm/field-builders.ts
@@ -29,6 +29,7 @@ export class FieldBuilder<
private _outputValidator?: StandardSchemaV1;
private _inputValidator?: StandardSchemaV1;
private _fieldType: string;
+ private _comment?: string;
constructor(fieldType: string) {
this._fieldType = fieldType;
@@ -36,11 +37,17 @@ export class FieldBuilder<
/**
* Mark this field as the primary key for the table.
- * Primary keys are automatically read-only.
+ * Primary keys are automatically read-only and non-nullable.
*/
- primaryKey(): FieldBuilder {
+ primaryKey(): FieldBuilder<
+ NonNullable,
+ NonNullable,
+ NonNullable,
+ true
+ > {
const builder = this._clone() as any;
builder._primaryKey = true;
+ builder._notNull = true; // Primary keys are automatically non-nullable
builder._readOnly = true; // Primary keys are automatically read-only
return builder;
}
@@ -114,6 +121,19 @@ export class FieldBuilder<
return builder;
}
+ /**
+ * Add a comment to this field for metadata purposes.
+ * This helps future developers understand the purpose of the field.
+ *
+ * @example
+ * textField().comment("Account name of the user who last modified each record")
+ */
+ comment(comment: string): FieldBuilder {
+ const builder = this._clone();
+ builder._comment = comment;
+ return builder;
+ }
+
/**
* Get the metadata configuration for this field.
* @internal Used by fmTableOccurrence to extract field configuration
@@ -127,6 +147,7 @@ export class FieldBuilder<
entityId: this._entityId,
outputValidator: this._outputValidator,
inputValidator: this._inputValidator,
+ comment: this._comment,
};
}
@@ -144,6 +165,7 @@ export class FieldBuilder<
builder._entityId = this._entityId;
builder._outputValidator = this._outputValidator;
builder._inputValidator = this._inputValidator;
+ builder._comment = this._comment;
return builder;
}
}
diff --git a/packages/fmodata/src/orm/table.ts b/packages/fmodata/src/orm/table.ts
index b61d9d73..3bd2808e 100644
--- a/packages/fmodata/src/orm/table.ts
+++ b/packages/fmodata/src/orm/table.ts
@@ -2,7 +2,7 @@ import type { StandardSchemaV1 } from "@standard-schema/spec";
import { FieldBuilder, type ContainerDbType } from "./field-builders";
import type { FieldBuilder as FieldBuilderType } from "./field-builders";
import { Column, createColumn } from "./column";
-import { z } from "zod/v4";
+// import { z } from "zod/v4";
/**
* Extract the output type from a FieldBuilder.
@@ -89,6 +89,7 @@ const FMTableNavigationPaths = Symbol.for("fmodata:FMTableNavigationPaths");
const FMTableDefaultSelect = Symbol.for("fmodata:FMTableDefaultSelect");
const FMTableBaseTableConfig = Symbol.for("fmodata:FMTableBaseTableConfig");
const FMTableUseEntityIds = Symbol.for("fmodata:FMTableUseEntityIds");
+const FMTableComment = Symbol.for("fmodata:FMTableComment");
/**
* Base table class with Symbol-based internal properties.
@@ -113,6 +114,7 @@ export class FMTable<
NavigationPaths: FMTableNavigationPaths,
DefaultSelect: FMTableDefaultSelect,
BaseTableConfig: FMTableBaseTableConfig,
+ Comment: FMTableComment,
};
/** @internal */
@@ -125,7 +127,10 @@ export class FMTable<
[FMTableUseEntityIds]?: boolean;
/** @internal */
- [FMTableSchema]: StandardSchemaV1>;
+ [FMTableComment]?: string;
+
+ /** @internal */
+ [FMTableSchema]: Partial>;
/** @internal */
[FMTableFields]: TFields;
@@ -141,8 +146,8 @@ export class FMTable<
/** @internal */
[FMTableBaseTableConfig]: {
- schema: Record;
- inputSchema?: Record;
+ schema: Partial>;
+ inputSchema?: Partial>;
idField?: keyof TFields;
required: readonly (keyof TFields)[];
readOnly: readonly (keyof TFields)[];
@@ -154,13 +159,14 @@ export class FMTable<
name: TName;
entityId?: `FMTID:${string}`;
useEntityIds?: boolean;
- schema: StandardSchemaV1>;
+ comment?: string;
+ schema: Partial>;
fields: TFields;
navigationPaths: TNavigationPaths;
defaultSelect: "all" | "schema" | Record>;
baseTableConfig: {
- schema: Record;
- inputSchema?: Record;
+ schema: Partial>;
+ inputSchema?: Partial>;
idField?: keyof TFields;
required: readonly (keyof TFields)[];
readOnly: readonly (keyof TFields)[];
@@ -171,6 +177,7 @@ export class FMTable<
this[FMTableName] = config.name;
this[FMTableEntityId] = config.entityId;
this[FMTableUseEntityIds] = config.useEntityIds;
+ this[FMTableComment] = config.comment;
this[FMTableSchema] = config.schema;
this[FMTableFields] = config.fields;
this[FMTableNavigationPaths] = config.navigationPaths;
@@ -267,6 +274,9 @@ export interface FMTableOccurrenceOptions<
/** The entity ID (FMTID) for this table occurrence */
entityId?: `FMTID:${string}`;
+ /** The comment for this table */
+ comment?: string;
+
/**
* Default select behavior:
* - "all": Select all fields (including related tables)
@@ -358,39 +368,14 @@ export function fmTableOccurrence<
}
// Build Zod schema from field builders (output/read validators)
- const zodSchema: Record = {};
+ const outputSchema: Partial> = {};
// Build input schema from field builders (input/write validators)
const inputSchema: Record = {};
for (const { fieldName, config } of fieldConfigs) {
- // Use outputValidator if provided, otherwise create a basic validator
+ // Use outputValidator if provided
if (config.outputValidator) {
- zodSchema[fieldName] = config.outputValidator;
- } else {
- // Create a default validator based on field type and nullability
- let validator: any;
- switch (config.fieldType) {
- case "text":
- case "date":
- case "time":
- case "timestamp":
- case "container":
- case "calculated":
- validator = z.string();
- break;
- case "number":
- validator = z.number();
- break;
- default:
- validator = z.unknown();
- }
-
- // Add nullability if not marked as notNull
- if (!config.notNull) {
- validator = validator.nullable();
- }
-
- zodSchema[fieldName] = validator;
+ outputSchema[fieldName as keyof TFields] = config.outputValidator;
}
// Store inputValidator if provided (for write operations)
@@ -399,18 +384,13 @@ export function fmTableOccurrence<
}
}
- // Create a schema validator for the entire table
- const tableSchema = z.object(zodSchema) as unknown as StandardSchemaV1<
- any,
- InferSchemaFromFields
- >;
-
// Build BaseTable-compatible config
const baseTableConfig = {
- schema: zodSchema as Record,
- inputSchema: (Object.keys(inputSchema).length > 0
- ? inputSchema
- : undefined) as Record | undefined,
+ schema: outputSchema as Partial>,
+ inputSchema:
+ Object.keys(inputSchema).length > 0
+ ? (inputSchema as Partial>)
+ : undefined,
idField: idField as keyof TFields | undefined,
required: required as readonly (keyof TFields)[],
readOnly: readOnly as readonly (keyof TFields)[],
@@ -449,7 +429,8 @@ export function fmTableOccurrence<
name,
entityId: options?.entityId,
useEntityIds: options?.useEntityIds,
- schema: tableSchema,
+ comment: options?.comment,
+ schema: outputSchema,
fields,
navigationPaths,
defaultSelect: resolvedDefaultSelect,
@@ -621,11 +602,11 @@ export function getTableEntityId>(
/**
* Get the schema validator from an FMTable instance.
* @param table - FMTable instance
- * @returns The StandardSchemaV1 validator
+ * @returns The StandardSchemaV1 validator record (partial - only fields with validators)
*/
export function getTableSchema>(
table: T,
-): StandardSchemaV1 {
+): Partial> {
return table[FMTableSchema];
}
@@ -732,6 +713,17 @@ export function getTableId>(table: T): string {
return table[FMTableEntityId] ?? table[FMTableName];
}
+/**
+ * Get the comment from an FMTable instance.
+ * @param table - FMTable instance
+ * @returns The comment string or undefined if not set
+ */
+export function getTableComment>(
+ table: T,
+): string | undefined {
+ return table[FMTableComment];
+}
+
/**
* Get all columns from a table as an object.
* Useful for selecting all fields except some using destructuring.
diff --git a/packages/fmodata/src/types.ts b/packages/fmodata/src/types.ts
index 4c310d0d..820fe982 100644
--- a/packages/fmodata/src/types.ts
+++ b/packages/fmodata/src/types.ts
@@ -32,10 +32,16 @@ export interface ExecutableBuilder {
export interface ExecutionContext {
_makeRequest(
url: string,
- options?: RequestInit & FFetchOptions & { useEntityIds?: boolean },
+ options?: RequestInit &
+ FFetchOptions & {
+ useEntityIds?: boolean;
+ includeSpecialColumns?: boolean;
+ },
): Promise>;
_setUseEntityIds?(useEntityIds: boolean): void;
_getUseEntityIds?(): boolean;
+ _setIncludeSpecialColumns?(includeSpecialColumns: boolean): void;
+ _getIncludeSpecialColumns?(): boolean;
_getBaseUrl?(): string;
_getLogger?(): InternalLogger;
}
@@ -46,7 +52,7 @@ export type InferSchemaType> = {
: never;
};
-export type WithSystemFields =
+export type WithSpecialColumns =
T extends Record
? T & {
ROWID: number;
@@ -54,15 +60,12 @@ export type WithSystemFields =
}
: never;
-// Helper type to exclude system fields from a union of keys
+// Helper type to exclude special columns from a union of keys
export type ExcludeSystemFields = Exclude<
T,
"ROWID" | "ROWMODID"
>;
-// Helper type to omit system fields from an object type
-export type OmitSystemFields = Omit;
-
// OData record metadata fields (present on each record)
export type ODataRecordMetadata = {
"@id": string;
@@ -158,6 +161,11 @@ export type ExecuteOptions = {
* Overrides the default behavior of the database to use entity IDs (rather than field names) in THIS REQUEST ONLY
*/
useEntityIds?: boolean;
+ /**
+ * Overrides the default behavior of the database to include special columns (ROWID and ROWMODID) in THIS REQUEST ONLY.
+ * Note: Special columns are only included when there is no $select query.
+ */
+ includeSpecialColumns?: boolean;
};
/**
@@ -213,6 +221,54 @@ export type ConditionallyWithODataAnnotations<
}
: T;
+/**
+ * Normalizes includeSpecialColumns with a database-level default.
+ * Uses distributive conditional types to handle unions correctly.
+ * @template IncludeSpecialColumns - The includeSpecialColumns value from execute options
+ * @template DatabaseDefault - The database-level includeSpecialColumns setting (defaults to false)
+ */
+export type NormalizeIncludeSpecialColumns<
+ IncludeSpecialColumns extends boolean | undefined,
+ DatabaseDefault extends boolean = false,
+> = [IncludeSpecialColumns] extends [true]
+ ? true
+ : [IncludeSpecialColumns] extends [false]
+ ? false
+ : DatabaseDefault; // When undefined, use database-level default
+
+/**
+ * Conditionally adds ROWID and ROWMODID special columns to a type.
+ * Special columns are only included when:
+ * - includeSpecialColumns is true AND
+ * - hasSelect is false (no $select query was applied) AND
+ * - T is an object type (not a primitive like string or number)
+ *
+ * Handles both single objects and arrays of objects.
+ */
+export type ConditionallyWithSpecialColumns<
+ T,
+ IncludeSpecialColumns extends boolean,
+ HasSelect extends boolean,
+> = IncludeSpecialColumns extends true
+ ? HasSelect extends false
+ ? // Handle array types
+ T extends readonly (infer U)[]
+ ? U extends Record
+ ? (U & {
+ ROWID: number;
+ ROWMODID: number;
+ })[]
+ : T
+ : // Handle single object types
+ T extends Record
+ ? T & {
+ ROWID: number;
+ ROWMODID: number;
+ }
+ : T // Don't add special columns to primitives (e.g., single field queries)
+ : T
+ : T;
+
// Helper type to extract schema from a FMTable
export type ExtractSchemaFromOccurrence = Occ extends {
baseTable: { schema: infer S };
diff --git a/packages/fmodata/src/validation.ts b/packages/fmodata/src/validation.ts
index 116ba375..6382bf3d 100644
--- a/packages/fmodata/src/validation.ts
+++ b/packages/fmodata/src/validation.ts
@@ -19,7 +19,7 @@ import {
*/
export async function validateAndTransformInput>(
data: Partial,
- inputSchema?: Record,
+ inputSchema?: Partial>,
): Promise> {
// If no input schema, return data as-is
if (!inputSchema) {
@@ -30,6 +30,9 @@ export async function validateAndTransformInput>(
// Process each field that has an input validator
for (const [fieldName, fieldSchema] of Object.entries(inputSchema)) {
+ // Skip if no schema for this field
+ if (!fieldSchema) continue;
+
// Only process fields that are present in the input data
if (fieldName in data) {
const inputValue = data[fieldName];
@@ -83,7 +86,7 @@ export async function validateAndTransformInput>(
// Type for expand validation configuration
export type ExpandValidationConfig = {
relation: string;
- targetSchema?: Record;
+ targetSchema?: Partial>;
targetTable?: FMTable