diff --git a/.changeset/real-ideas-sort.md b/.changeset/real-ideas-sort.md new file mode 100644 index 00000000..58f02bac --- /dev/null +++ b/.changeset/real-ideas-sort.md @@ -0,0 +1,6 @@ +--- +"@proofkit/typegen": minor +--- + +New command: `npx @proofkit/typegen@latest ui` will launch a web UI for configuring and running your typegen config. +(beta) support for @proofkit/fmodata typegen config. diff --git a/.coderabbit.yaml b/.coderabbit.yaml index 9f9270fb..c0036945 100644 --- a/.coderabbit.yaml +++ b/.coderabbit.yaml @@ -4,11 +4,11 @@ reviews: # Enable automated review for pull requests auto_review: - enabled: true + enabled: false base_branches: - ".*" # Matches all branches using regex review_status: false poem: false - -path_filters: - - "!apps/demo/**" # exclude the demo app from reivews + high_level_summary: false + path_filters: + - "!apps/demo/**" # exclude the demo app from reivews diff --git a/.gitignore b/.gitignore index 315461c7..add6ff10 100644 --- a/.gitignore +++ b/.gitignore @@ -72,3 +72,4 @@ server/dist public/dist .turbo packages/fmdapi/test/typegen/* +packages/typegen/schema/metadata.xml diff --git a/Users/ericluce/Documents/Code/work/proofkit/apps/demo/tests/typegen-output/without-zod/client/index.ts b/Users/ericluce/Documents/Code/work/proofkit/apps/demo/tests/typegen-output/without-zod/client/index.ts index 899d9556..ecc4abeb 100644 --- a/Users/ericluce/Documents/Code/work/proofkit/apps/demo/tests/typegen-output/without-zod/client/index.ts +++ b/Users/ericluce/Documents/Code/work/proofkit/apps/demo/tests/typegen-output/without-zod/client/index.ts @@ -1,2 +1,2 @@ -export { client as testLayoutClient } from "./testLayout"; -export { client as weirdPortalsClient } from "./weirdPortals"; +export { client as testLayoutLayout } from "./testLayout"; +export { client as weirdPortalsLayout } from "./weirdPortals"; diff --git a/apps/demo/package.json b/apps/demo/package.json index e3125aad..66341516 100644 --- a/apps/demo/package.json +++ b/apps/demo/package.json @@ -19,23 +19,29 @@ "dotenv": "^16.5.0", "fm-odata-client": "^3.0.1", "fs-extra": "^11.3.0", - "next": "^15.4.9", - "react": "^19.1.1", - "react-dom": "^19.1.1", - "zod": "3.25.64" + "next": "16.1.0", + "react": "19.2.3", + "react-dom": "19.2.3", + "zod": "^4.1.13" }, "devDependencies": { "@eslint/eslintrc": "^3", "@tailwindcss/postcss": "^4.1.11", "@types/fs-extra": "^11.0.4", "@types/node": "^22.17.1", - "@types/react": "^19.1.10", - "@types/react-dom": "^19.1.7", + "@types/react": "19.2.7", + "@types/react-dom": "19.2.3", "dotenv-cli": "^8.0.0", "eslint": "^9.23.0", - "eslint-config-next": "^15.3.3", + "eslint-config-next": "16.1.0", "tailwindcss": "^4.1.11", - "typescript": "^5.9.2", - "vitest": "^3.2.4" + "typescript": "^5.9.3", + "vitest": "^4.0.7" + }, + "pnpm": { + "overrides": { + "@types/react": "19.2.7", + "@types/react-dom": "19.2.3" + } } } diff --git a/apps/demo/tsconfig.json b/apps/demo/tsconfig.json index 19004bbc..9ba68802 100644 --- a/apps/demo/tsconfig.json +++ b/apps/demo/tsconfig.json @@ -1,7 +1,11 @@ { "compilerOptions": { "target": "ES2017", - "lib": ["dom", "dom.iterable", "esnext"], + "lib": [ + "dom", + "dom.iterable", + "esnext" + ], "allowJs": true, "skipLibCheck": true, "strict": true, @@ -11,7 +15,7 @@ "moduleResolution": "bundler", "resolveJsonModule": true, "isolatedModules": true, - "jsx": "preserve", + "jsx": "react-jsx", "incremental": true, "plugins": [ { @@ -19,9 +23,20 @@ } ], "paths": { - "@/*": ["./src/*"] + "@/*": [ + "./src/*" + ] } }, - "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"], - "exclude": ["node_modules", "tests"] + "include": [ + "next-env.d.ts", + "**/*.ts", + "**/*.tsx", + ".next/types/**/*.ts", + ".next/dev/types/**/*.ts" + ], + "exclude": [ + "node_modules", + "tests" + ] } diff --git a/apps/docs/content/docs/typegen/config-odata.mdx b/apps/docs/content/docs/typegen/config-odata.mdx new file mode 100644 index 00000000..6e8bac15 --- /dev/null +++ b/apps/docs/content/docs/typegen/config-odata.mdx @@ -0,0 +1,294 @@ +--- +title: Configuration (OData) +--- + +import { TypeTable } from "fumadocs-ui/components/type-table"; +import { Tabs, Tab } from "fumadocs-ui/components/tabs"; + +The typegen tool supports OData-based type generation using the `fmodata` config type. This is configured using the `proofkit-typegen-config.jsonc` file at the root of your project. + + +The `@proofkit/fmodata` package is still in beta. Some of these options may change. + + +The config key can also be an array of configs, which is useful if you need to connect to multiple databases, or with different settings for different sets of tables. + +```jsonc title="proofkit-typegen-config.jsonc" tab="Single OData config" +{ + "$schema": "https://proofkit.dev/typegen-config-schema.json", + "config": { + "type": "fmodata", + // ... your OData config here + }, +} +``` + +```jsonc title="proofkit-typegen-config.jsonc" tab="Multiple configs" +{ + "$schema": "https://proofkit.dev/typegen-config-schema.json", + "config": [ + { + "type": "fmodata", + // ... your OData config here + }, + { + "type": "fmdapi", + // ... your Data API config here + }, + ], +} +``` + +## Config options + + + +### `type` (required) +Must be set to `"fmodata"` to use OData-based type generation. + +### `configName` (optional) +An optional name for this configuration. Useful when using multiple configs to identify which config is being used. + +### `path` (default: `"schema"`) +The path to the directory where the generated files will be saved. + +### `reduceMetadata` (optional) +If set to `true`, reduced OData annotations will be requested from the server to reduce payload size. This will prevent comments, entity ids, and other properties from being generated. + + + This can also be set per-table in the `tables` array to override the top-level setting for specific tables. + + +### `clearOldFiles` (default: `false`) +If set to `false`, the path will not be cleared before the new files are written. Only the `client` and `generated` directories are cleared to allow for potential overrides to be kept. + + + This is different from the Data API config, which defaults to `true`. For OData configs, we preserve existing files by default to allow for customizations. + + +### `alwaysOverrideFieldNames` (default: `true`) +If set to `true` (default), field names will always be updated to match metadata, even when matching by entity ID. If set to `false`, existing field names are preserved when matching by entity ID. + + + This can also be set per-table in the `tables` array to override the top-level setting for specific tables. + + +### `envNames` (optional) +If set, will use the specified environment variable names for your OData connection. + + + Only use the **names** of your environment variables, not the values for security reasons. + + +The `envNames` object supports: +- `server`: The environment variable name for the OData server URL +- `db`: The environment variable name for the database name +- `auth`: An object with either: + - `apiKey`: The environment variable name for the API key, or + - `username` and `password`: The environment variable names for username and password + +## Table options + +The `tables` array in the config is where you define the tables (entity sets) that you want to generate types for. You must define at least one table in the config. + + + +### `tableName` (required) +The entity set name (table occurrence name) to generate. This table will be included in metadata download and type generation. Must match exactly the name of an entity set in your OData service. + +### `variableName` (optional) +Override the generated TypeScript variable name. The original entity set name is still used for the OData path, but you can use a different name in your TypeScript code. + +For example, if your entity set is named `"Customers_Table"` but you want to use `Customers` in your code: + +```jsonc +{ + "tableName": "Customers_Table", + "variableName": "Customers" +} +``` + +### `reduceMetadata` (optional) +If undefined, the top-level setting will be used. If set to `true` or `false`, it will override the top-level `reduceMetadata` setting for this specific table. + +### `alwaysOverrideFieldNames` (optional) +If undefined, the top-level setting will be used. If set to `true` or `false`, it will override the top-level `alwaysOverrideFieldNames` setting for this specific table. + +## Field options + +Within each table's `fields` array, you can specify field-level overrides. + + + +### `fieldName` (required) +The field name this override applies to. Must match exactly the name of a field in the table's metadata. + +### `exclude` (optional) +If set to `true`, this field will be excluded from generation entirely. Useful for fields you don't need in your TypeScript types. + +### `typeOverride` (optional) +Override the inferred field type from metadata. The available options are: + +- `"text"`: Treats the field as a text field +- `"number"`: Treats the field as a number field +- `"boolean"`: Treats the field as a boolean (validated with `z.coerce.boolean()`) +- `"fmBooleanNumber"`: Same as boolean, explicit FileMaker 0/1 pattern +- `"date"`: Treats the field as a date field +- `"timestamp"`: Treats the field as a timestamp field +- `"container"`: Treats the field as a container field + + + The typegen tool will attempt to infer the correct field type from the OData metadata. Use `typeOverride` only when you need to override the inferred type. + + +## Example configuration + +Here's a complete example of an OData configuration: + +```jsonc title="proofkit-typegen-config.jsonc" +{ + "$schema": "https://proofkit.dev/typegen-config-schema.json", + "config": { + "type": "fmodata", + "configName": "Production OData", + "path": "schema/odata", + "reduceMetadata": true, + "clearOldFiles": false, + "alwaysOverrideFieldNames": true, + "envNames": { + "server": "ODATA_SERVER_URL", + "db": "ODATA_DATABASE_NAME", + "auth": { + "apiKey": "ODATA_API_KEY" + } + }, + "tables": [ + { + "tableName": "Customers", + "variableName": "Customers", + "fields": [ + { + "fieldName": "InternalID", + "exclude": true + }, + { + "fieldName": "Status", + "typeOverride": "boolean" + } + ] + }, + { + "tableName": "Orders", + "reduceMetadata": false, + "fields": [ + { + "fieldName": "OrderDate", + "typeOverride": "date" + } + ] + } + ] + } +} +``` + diff --git a/apps/docs/content/docs/typegen/config.mdx b/apps/docs/content/docs/typegen/config.mdx index fb3262fc..9ef5a49f 100644 --- a/apps/docs/content/docs/typegen/config.mdx +++ b/apps/docs/content/docs/typegen/config.mdx @@ -1,5 +1,5 @@ --- -title: Configuration +title: Configuration (Data API) --- import { TypeTable } from "fumadocs-ui/components/type-table"; diff --git a/apps/docs/content/docs/typegen/meta.json b/apps/docs/content/docs/typegen/meta.json index 3ab99607..75e3fdb2 100644 --- a/apps/docs/content/docs/typegen/meta.json +++ b/apps/docs/content/docs/typegen/meta.json @@ -8,8 +8,10 @@ "index", "faq", "customization", + "ui", "---Reference---", "config", + "config-odata", "options" ] } diff --git a/apps/docs/content/docs/typegen/options.mdx b/apps/docs/content/docs/typegen/options.mdx index c342c82b..da215e83 100644 --- a/apps/docs/content/docs/typegen/options.mdx +++ b/apps/docs/content/docs/typegen/options.mdx @@ -26,7 +26,6 @@ npx @proofkit/typegen generate This is also the default command, so "generate" is optional. If this command is run without any config file detected, you will be prompted to create the config file (the `init` command). -See [Global Options](#global-options) for `--config` usage. ### `--env-path ` @@ -36,9 +35,13 @@ Set a custom path for where your environment variables are stored. Recreate the overrides file(s), even if they already exist. -### `--skip-env-check` -Ignore loading environment variables from a file. Use this option if you are injecting environment variables directly as the command runs. +## `ui` command +```bash +npx @proofkit/typegen ui +``` +Launch the typegen web interface for easy configuration. + ## `init` command diff --git a/apps/docs/content/docs/typegen/ui.mdx b/apps/docs/content/docs/typegen/ui.mdx new file mode 100644 index 00000000..20ae4b14 --- /dev/null +++ b/apps/docs/content/docs/typegen/ui.mdx @@ -0,0 +1,25 @@ +--- +title: Typegen UI +--- + +The typegen tool has a built-in web interface for editing your JSON config file and running the typegen scripts. It's helpful for making sure your environment variables are setup correctly and can help autocomplete layout/field/table names into the config file. + +To launch the UI, run the following command and a browser window will open at `http://localhost:3141`: + +```bash +npx @proofkit/typegen@latest ui +``` + + +## CLI options + +The UI can be configured with the following CLI options: + +### `--port ` +Set the port for the UI server. + +### `--config ` +Set a custom filename/path for where the config file is located or will be created. The file name must end with either `jsonc` or `json`. + +### `--no-open` +Don't automatically open the browser. \ No newline at end of file diff --git a/apps/docs/next.config.ts b/apps/docs/next.config.ts index 61483a15..2a41229c 100644 --- a/apps/docs/next.config.ts +++ b/apps/docs/next.config.ts @@ -2,6 +2,7 @@ import { createMDX } from "fumadocs-mdx/next"; import { type NextConfig } from "next"; import { validateRegistry } from "@proofkit/registry"; import { source } from "./src/lib/source"; +import path from "path"; const withMDX = createMDX(); // validateRegistry(); @@ -9,7 +10,24 @@ const withMDX = createMDX(); const config: NextConfig = { reactStrictMode: true, serverExternalPackages: ["typescript", "twoslash", "shiki"], - transpilePackages: ["@proofkit/fmdapi", "@proofkit/registry"], + transpilePackages: [ + "@proofkit/fmdapi", + "@proofkit/registry", + "@proofkit/typegen", + ], + turbopack: { + root: path.resolve(__dirname, "../.."), + }, + webpack: (config, { isServer }) => { + // Resolve @proofkit/typegen/config to source files for development + config.resolve.alias = { + ...config.resolve.alias, + "@proofkit/typegen/config": require.resolve( + "@proofkit/typegen/src/types.ts", + ), + }; + return config; + }, async redirects() { return [ { diff --git a/apps/docs/package.json b/apps/docs/package.json index 9731a6e7..789954e7 100644 --- a/apps/docs/package.json +++ b/apps/docs/package.json @@ -3,8 +3,8 @@ "version": "0.0.2", "private": true, "scripts": { - "build": "node scripts/bundle-registry-templates.js && next build", - "dev": "next dev -p 3005 --turbo", + "build": "pnpm --filter @proofkit/typegen build && node scripts/bundle-registry-templates.js && next build", + "dev": "next dev -p 3005", "start": "next start -p 3005", "postinstall": "fumadocs-mdx", "test": "vitest run" @@ -31,16 +31,16 @@ "hono": "^4.9.0", "jiti": "^1.21.7", "lucide-react": "^0.511.0", - "next": "^15.5.8", + "next": "16.1.0", "next-themes": "^0.4.6", - "react": "^19.1.1", - "react-dom": "^19.1.1", + "react": "19.2.3", + "react-dom": "19.2.3", "shadcn": "^2.10.0", "shiki": "^3.13.0", "tailwind-merge": "^3.3.1", "ts-morph": "^26.0.0", "twoslash": "^0.3.4", - "zod": "3.25.64" + "zod": "^4.1.13" }, "devDependencies": { "@proofkit/fmdapi": "workspace:*", @@ -48,14 +48,20 @@ "@types/jest": "^29.5.14", "@types/mdx": "^2.0.13", "@types/node": "^22.17.1", - "@types/react": "^19.1.10", - "@types/react-dom": "^19.1.7", + "@types/react": "19.2.7", + "@types/react-dom": "19.2.3", "eslint-plugin-prettier": "^5.5.4", "happy-dom": "^15.11.7", "postcss": "^8.5.6", "tailwindcss": "^4.1.11", "tw-animate-css": "^1.3.6", - "typescript": "^5.9.2", - "vitest": "^3.2.4" + "typescript": "^5.9.3", + "vitest": "^4.0.7" + }, + "pnpm": { + "overrides": { + "@types/react": "19.2.7", + "@types/react-dom": "19.2.3" + } } } diff --git a/apps/docs/tests/utils.manifest.test.ts b/apps/docs/tests/utils.manifest.test.ts index 604f90b6..60983014 100644 --- a/apps/docs/tests/utils.manifest.test.ts +++ b/apps/docs/tests/utils.manifest.test.ts @@ -12,9 +12,10 @@ describe("Registry utils (dynamic scanning)", () => { // Should find the mode-toggle template expect(index.length).toBeGreaterThan(0); expect(index[0]).toHaveProperty("name"); - expect(index[0]).toHaveProperty("type"); expect(index[0]).toHaveProperty("category"); - // RegistryIndexItem only has name, type, and category - not files + expect(index[0]).toHaveProperty("title"); + expect(index[0]).toHaveProperty("description"); + // RegistryIndexItem has name, category, title, description - not type or files }); it("reads a known template (mode-toggle)", async () => { diff --git a/apps/docs/tsconfig.json b/apps/docs/tsconfig.json index cecc2912..9b1f5c91 100644 --- a/apps/docs/tsconfig.json +++ b/apps/docs/tsconfig.json @@ -2,7 +2,11 @@ "compilerOptions": { "baseUrl": ".", "target": "ESNext", - "lib": ["dom", "dom.iterable", "esnext"], + "lib": [ + "dom", + "dom.iterable", + "esnext" + ], "allowJs": true, "skipLibCheck": true, "strict": true, @@ -13,13 +17,21 @@ "moduleResolution": "bundler", "resolveJsonModule": true, "isolatedModules": true, - "jsx": "preserve", + "jsx": "react-jsx", "incremental": true, "paths": { - "@/.source": ["./.source/index.ts"], - "@/registry/*": ["./src/registry/*"], - "@/*": ["./src/*"], - "@/components/*": ["./src/components/*"] + "@/.source": [ + "./.source/index.ts" + ], + "@/registry/*": [ + "./src/registry/*" + ], + "@/*": [ + "./src/*" + ], + "@/components/*": [ + "./src/components/*" + ] }, "plugins": [ { @@ -27,6 +39,15 @@ } ] }, - "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"], - "exclude": ["node_modules", "public/registry-templates/**/*"] + "include": [ + "next-env.d.ts", + "**/*.ts", + "**/*.tsx", + ".next/types/**/*.ts", + ".next/dev/types/**/*.ts" + ], + "exclude": [ + "node_modules", + "public/registry-templates/**/*" + ] } diff --git a/package.json b/package.json index 1f103314..95a8ca02 100644 --- a/package.json +++ b/package.json @@ -21,8 +21,8 @@ "knip": "^5.56.0", "prettier": "^3.5.3", "turbo": "^2.5.4", - "typescript": "^5.9.2", - "vitest": "^3.2.4" + "typescript": "^5.9.3", + "vitest": "^4.0.7" }, "packageManager": "pnpm@10.14.0", "engines": { diff --git a/packages/better-auth/package.json b/packages/better-auth/package.json index 4b85b8e5..e6985554 100644 --- a/packages/better-auth/package.json +++ b/packages/better-auth/package.json @@ -58,7 +58,7 @@ "odata-query": "^8.0.4", "prompts": "^2.4.2", "vite": "^6.3.4", - "zod": "3.25.64" + "zod": "^4.1.13" }, "devDependencies": { "@types/fs-extra": "^11.0.4", @@ -66,7 +66,7 @@ "@vitest/ui": "^3.2.4", "fm-odata-client": "^3.0.1", "publint": "^0.3.12", - "typescript": "^5.9.2", - "vitest": "^3.2.4" + "typescript": "^5.9.3", + "vitest": "^4.0.7" } } diff --git a/packages/cli/package.json b/packages/cli/package.json index 14636c7d..aea6db7d 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -67,7 +67,7 @@ "chalk": "5.4.1", "commander": "^14.0.0", "dotenv": "^16.5.0", - "es-toolkit": "^1.15.1", + "es-toolkit": "^1.38.0", "execa": "^9.5.1", "fast-glob": "^3.3.3", "fs-extra": "^11.3.0", @@ -96,7 +96,7 @@ "@proofkit/registry": "workspace:*", "@rollup/plugin-replace": "^6.0.3", "@t3-oss/env-nextjs": "^0.10.1", - "@tanstack/react-query": "^5.49.2", + "@tanstack/react-query": "^5.76.1", "@trpc/client": "11.0.0-rc.441", "@trpc/next": "11.0.0-rc.441", "@trpc/react-query": "11.0.0-rc.441", @@ -108,7 +108,7 @@ "@types/randomstring": "^1.3.0", "@types/react": "^19.1.10", "@types/semver": "^7.7.0", - "@vitest/coverage-v8": "^1.4.0", + "@vitest/coverage-v8": "^2.1.8", "drizzle-kit": "^0.21.4", "drizzle-orm": "^0.30.10", "mysql2": "^3.9.7", @@ -123,8 +123,8 @@ "tailwindcss": "^4.1.11", "tsdown": "^0.14.1", "type-fest": "^3.13.1", - "typescript": "^5.9.2", - "vitest": "^3.2.4", - "zod": "3.25.64" + "typescript": "^5.9.3", + "vitest": "^4.0.7", + "zod": "^4.1.13" } } diff --git a/packages/cli/src/utils/formatting.ts b/packages/cli/src/utils/formatting.ts index 25959dbb..4edbeaba 100644 --- a/packages/cli/src/utils/formatting.ts +++ b/packages/cli/src/utils/formatting.ts @@ -1,4 +1,4 @@ -import { format, getFileInfo } from "prettier"; +import * as prettier from "prettier"; import { Project } from "ts-morph"; import { state } from "~/state.js"; @@ -14,11 +14,13 @@ export async function formatAndSaveSourceFiles(project: Project) { // run each file through the prettier formatter for await (const file of files) { const filePath = file.getFilePath(); - const fileInfo = await getFileInfo(filePath); + const fileInfo = (await prettier.getFileInfo?.(filePath)) ?? { + ignored: false, + }; if (fileInfo.ignored) continue; - const formatted = await format(file.getFullText(), { + const formatted = await prettier.format(file.getFullText(), { filepath: filePath, }); file.replaceWithText(formatted); diff --git a/packages/cli/vitest.config.ts b/packages/cli/vitest.config.ts index b6e696ab..545dbfbb 100644 --- a/packages/cli/vitest.config.ts +++ b/packages/cli/vitest.config.ts @@ -11,9 +11,11 @@ export default defineConfig({ environment: "node", setupFiles: ["./tests/setup.ts"], include: ["tests/**/*.test.ts"], + testTimeout: 60000, // 60 seconds for CLI tests which can be slow coverage: { provider: "v8", reporter: ["text", "json", "html"], + include: ["src/**/*.ts"], }, }, }); diff --git a/packages/fmdapi/package.json b/packages/fmdapi/package.json index c1a9d95e..97be322d 100644 --- a/packages/fmdapi/package.json +++ b/packages/fmdapi/package.json @@ -58,7 +58,7 @@ "fs-extra": "^11.3.0", "ts-morph": "^26.0.0", "vite": "^6.3.4", - "zod": "3.25.64" + "zod": "^4.1.13" }, "devDependencies": { "@types/fs-extra": "^11.0.4", @@ -72,8 +72,8 @@ "prettier": "^3.5.3", "publint": "^0.3.12", "ts-toolbelt": "^9.6.0", - "typescript": "^5.9.2", - "vitest": "^3.2.4" + "typescript": "^5.9.3", + "vitest": "^4.0.7" }, "engines": { "node": ">=18.0.0" diff --git a/packages/fmodata/README.md b/packages/fmodata/README.md index 13950147..2d742318 100644 --- a/packages/fmodata/README.md +++ b/packages/fmodata/README.md @@ -778,6 +778,173 @@ console.log(result.result.recordId); **Note:** OData doesn't support script names with special characters (e.g., `@`, `&`, `/`) or script names beginning with a number. TypeScript will catch these at compile time. +## Webhooks + +Webhooks allow you to receive notifications when data changes in your FileMaker database. The library provides a type-safe API for managing webhooks through the `db.webhook` property. + +### Adding a Webhook + +Create a new webhook to monitor a table for changes: + +```typescript +// Basic webhook +const result = await db.webhook.add({ + webhook: "https://example.com/webhook", + tableName: contactsTable, +}); + +// Access the created webhook ID +console.log(result.webHookResult.webHookID); +``` + +### Webhook Configuration Options + +Webhooks support various configuration options: + +```typescript +// With custom headers +const result = await db.webhook.add({ + webhook: "https://example.com/webhook", + tableName: contactsTable, + headers: { + "X-Custom-Header": "value", + Authorization: "Bearer token", + }, + notifySchemaChanges: true, // Notify when schema changes +}); + +// With field selection (using column references) +const result = await db.webhook.add({ + webhook: "https://example.com/webhook", + tableName: contacts, + select: [contacts.name, contacts.email, contacts.PrimaryKey], +}); + +// With filtering (using filter expressions) +import { eq, gt } from "@proofkit/fmodata"; + +const result = await db.webhook.add({ + webhook: "https://example.com/webhook", + tableName: contacts, + filter: eq(contacts.active, true), + select: [contacts.name, contacts.email], +}); + +// Complex filter example +const result = await db.webhook.add({ + webhook: "https://example.com/webhook", + tableName: users, + filter: and(eq(users.active, true), gt(users.age, 18)), + select: [users.username, users.email], +}); +``` + +**Webhook Configuration Properties:** + +- `webhook` (required) - The URL to call when the webhook is triggered +- `tableName` (required) - The `FMTable` instance for the table to monitor +- `headers` (optional) - Custom headers to include in webhook requests +- `notifySchemaChanges` (optional) - Whether to notify on schema changes +- `select` (optional) - Field selection as a string or array of `Column` references +- `filter` (optional) - Filter expression (string or `FilterExpression`) to limit which records trigger the webhook + +### Listing Webhooks + +Get all webhooks configured for the database: + +```typescript +const result = await db.webhook.list(); + +console.log(result.Status); // Status of the operation +console.log(result.WebHook); // Array of webhook configurations + +result.WebHook.forEach((webhook) => { + console.log(`Webhook ${webhook.webHookID}:`); + console.log(` Table: ${webhook.tableName}`); + console.log(` URL: ${webhook.url}`); + console.log(` Notify Schema Changes: ${webhook.notifySchemaChanges}`); + console.log(` Select: ${webhook.select}`); + console.log(` Filter: ${webhook.filter}`); + console.log(` Pending Operations: ${webhook.pendingOperations.length}`); +}); +``` + +### Getting a Webhook + +Retrieve a specific webhook by ID: + +```typescript +const webhook = await db.webhook.get(1); + +console.log(webhook.webHookID); +console.log(webhook.tableName); +console.log(webhook.url); +console.log(webhook.headers); +console.log(webhook.notifySchemaChanges); +console.log(webhook.select); +console.log(webhook.filter); +console.log(webhook.pendingOperations); +``` + +### Removing a Webhook + +Delete a webhook by ID: + +```typescript +await db.webhook.remove(1); +``` + +### Invoking a Webhook + +Manually trigger a webhook. This is useful for testing or triggering webhooks on-demand: + +```typescript +// Invoke for all rows matching the webhook's filter +await db.webhook.invoke(1); + +// Invoke for specific row IDs +await db.webhook.invoke(1, { rowIDs: [63, 61] }); +``` + +### Complete Example + +Here's a complete example of setting up and managing webhooks: + +```typescript +import { eq } from "@proofkit/fmodata"; + +// Add a webhook to monitor active contacts +const addResult = await db.webhook.add({ + webhook: "https://api.example.com/webhooks/contacts", + tableName: contacts, + headers: { + "X-API-Key": "your-api-key", + }, + filter: eq(contacts.active, true), + select: [contacts.name, contacts.email, contacts.PrimaryKey], + notifySchemaChanges: false, +}); + +const webhookId = addResult.webHookResult.webHookID; +console.log(`Created webhook with ID: ${webhookId}`); + +// List all webhooks +const listResult = await db.webhook.list(); +console.log(`Total webhooks: ${listResult.WebHook.length}`); + +// Get the webhook we just created +const webhook = await db.webhook.get(webhookId); +console.log(`Webhook URL: ${webhook.url}`); + +// Manually invoke the webhook for specific records +await db.webhook.invoke(webhookId, { rowIDs: [1, 2, 3] }); + +// Remove the webhook when done +await db.webhook.remove(webhookId); +``` + +**Note:** Webhooks are triggered automatically by FileMaker when records matching the webhook's filter are created, updated, or deleted. The `invoke()` method allows you to manually trigger webhooks for testing or on-demand processing. + ## Batch Operations Batch operations allow you to execute multiple queries and operations together in a single request. All operations in a batch are executed atomically - they all succeed or all fail together. This is both more efficient (fewer network round-trips) and ensures data consistency across related operations. @@ -1275,6 +1442,37 @@ const users = fmTableOccurrence( ); ``` +### Special Columns (ROWID and ROWMODID) + +FileMaker provides special columns `ROWID` and `ROWMODID` that uniquely identify records and track modifications. These can be included in query responses when enabled. + +Enable special columns at the database level: + +```typescript +const db = connection.database("MyDatabase", { + includeSpecialColumns: true, +}); + +const result = await db.from(users).list().execute(); +// result.data[0] will have ROWID and ROWMODID properties +``` + +Override at the request level: + +```typescript +// Enable for this request only +const result = await db.from(users).list().execute({ + includeSpecialColumns: true, +}); + +// Disable for this request +const result = await db.from(users).list().execute({ + includeSpecialColumns: false, +}); +``` + +**Important:** Special columns are only included when no `$select` query is applied (per OData specification). When using `.select()`, special columns are excluded even if `includeSpecialColumns` is enabled. + ### Error Handling All operations return a `Result` type with either `data` or `error`. The library provides rich error types that help you handle different error scenarios appropriately. diff --git a/packages/fmodata/package.json b/packages/fmodata/package.json index 72e875e0..7b51a9f0 100644 --- a/packages/fmodata/package.json +++ b/packages/fmodata/package.json @@ -1,6 +1,6 @@ { "name": "@proofkit/fmodata", - "version": "0.1.0-alpha.19", + "version": "0.1.0-alpha.20", "description": "FileMaker OData API client", "repository": "git@github.com:proofgeist/proofkit.git", "author": "Eric <37158449+eluce2@users.noreply.github.com>", @@ -63,7 +63,7 @@ "vite": "^6.3.4", "vite-plugin-dts": "^4.5.4", "vitest": "^4.0.7", - "zod": "4.1.12" + "zod": "^4.1.13" }, "engines": { "node": ">=18.0.0" diff --git a/packages/fmodata/scripts/capture-responses.ts b/packages/fmodata/scripts/capture-responses.ts index 7fa66266..fb52ab75 100644 --- a/packages/fmodata/scripts/capture-responses.ts +++ b/packages/fmodata/scripts/capture-responses.ts @@ -35,6 +35,7 @@ import path from "path"; import { fileURLToPath } from "url"; import { config } from "dotenv"; import { writeFileSync } from "fs"; +import * as prettier from "prettier"; import createClient from "@fetchkit/ffetch"; import { MOCK_SERVER_URL } from "../tests/utils/mock-server-url"; @@ -189,7 +190,7 @@ const queriesToCapture: { expectError?: boolean; execute: ( client: ReturnType, - ) => Promise<{ url: string; response: Response }>; + ) => Promise<{ url: string; method: string; response: Response }>; }[] = [ { name: "list-basic", @@ -199,7 +200,7 @@ const queriesToCapture: { const response = await client(path); // Get the full URL from the response const url = response.url; - return { url, response }; + return { url, method: "GET", response }; }, }, { @@ -209,7 +210,7 @@ const queriesToCapture: { const path = "/contacts?$select=name,PrimaryKey&$top=10"; const response = await client(path); const url = response.url; - return { url, response }; + return { url, method: "GET", response }; }, }, { @@ -219,7 +220,7 @@ const queriesToCapture: { const path = "/contacts?$orderby=name&$top=5"; const response = await client(path); const url = response.url; - return { url, response }; + return { url, method: "GET", response }; }, }, { @@ -229,7 +230,7 @@ const queriesToCapture: { const path = "/contacts?$top=2&$skip=2"; const response = await client(path); const url = response.url; - return { url, response }; + return { url, method: "GET", response }; }, }, @@ -248,7 +249,7 @@ const queriesToCapture: { }, }); const url = response.url; - return { url, response }; + return { url, method: "POST", response }; }, }, @@ -266,7 +267,7 @@ const queriesToCapture: { }); const url = response.url; - return { url, response }; + return { url, method: "POST", response }; }, }, { @@ -304,7 +305,7 @@ const queriesToCapture: { const path = `/contacts('${recordId}')`; const response = await client(path); const url = response.url; - return { url, response }; + return { url, method: "GET", response }; }, }, // Error cases - intentionally invalid queries to capture error responses @@ -316,7 +317,7 @@ const queriesToCapture: { const path = "/contacts?$select=InvalidFieldName"; const response = await client(path); const url = response.url; - return { url, response }; + return { url, method: "GET", response }; }, }, { @@ -327,7 +328,7 @@ const queriesToCapture: { const path = "/contacts?$orderby=InvalidFieldName"; const response = await client(path); const url = response.url; - return { url, response }; + return { url, method: "GET", response }; }, }, { @@ -339,7 +340,7 @@ const queriesToCapture: { const path = "/contacts('00000000-0000-0000-0000-000000000000')"; const response = await client(path); const url = response.url; - return { url, response }; + return { url, method: "GET", response }; }, }, { @@ -350,7 +351,7 @@ const queriesToCapture: { const path = "/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')/name"; const response = await client(path); const url = response.url; - return { url, response }; + return { url, method: "GET", response }; }, }, { @@ -361,7 +362,7 @@ const queriesToCapture: { const path = "/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')/users"; const response = await client(path); const url = response.url; - return { url, response }; + return { url, method: "GET", response }; }, }, { @@ -371,7 +372,7 @@ const queriesToCapture: { const path = "/contacts?$expand=users($select=not_real_field)"; const response = await client(path); const url = response.url; - return { url, response }; + return { url, method: "GET", response }; }, }, { @@ -382,7 +383,7 @@ const queriesToCapture: { "/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')?$expand=users"; const response = await client(path); const url = response.url; - return { url, response }; + return { url, method: "GET", response }; }, }, { @@ -393,7 +394,7 @@ const queriesToCapture: { "/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')?$expand=users($expand=user_customer($select=name))"; const response = await client(path); const url = response.url; - return { url, response }; + return { url, method: "GET", response }; }, }, { @@ -403,7 +404,123 @@ const queriesToCapture: { const path = `/contacts?$top=2&$expand=users($expand=user_customer($select=name))`; const response = await client(path); const url = response.url; - return { url, response }; + return { url, method: "GET", response }; + }, + }, + // Webhook API queries + { + name: "webhook-list", + description: "List all webhooks", + execute: async (client) => { + const path = "/Webhook.GetAll"; + const response = await client(path); + const url = response.url; + return { url, method: "GET", response }; + }, + }, + { + name: "webhook-add", + description: "Add a new webhook", + execute: async (client) => { + const path = "/Webhook.Add"; + const response = await client(path, { + method: "POST", + body: { + webhook: "https://example.com/webhook", + tableName: "contacts", + headers: { + "X-Custom-Header": "test-value", + }, + notifySchemaChanges: false, + select: "", + filter: "", + }, + }); + const url = response.url; + + // Clone the response before extracting the data + const cloned = response.clone(); + const newWebhookId = (await cloned.json()).webHookResult.webHookID; + await client(`/Webhook.Delete(${newWebhookId})`); + + return { url, method: "POST", response }; + }, + }, + { + name: "webhook-add-with-options", + description: "Add a new webhook", + execute: async (client) => { + const path = "/Webhook.Add"; + const response = await client(path, { + method: "POST", + body: { + webhook: "https://example.com/webhook", + tableName: "contacts", + headers: { + "X-Custom-Header": "test-value", + }, + notifySchemaChanges: false, + select: "name, age", + filter: "name eq 'John'", + }, + }); + const url = response.url; + + // Clone the response before extracting the data + const cloned = response.clone(); + const newWebhookId = (await cloned.json()).webHookResult.webHookID; + await client(`/Webhook.Delete(${newWebhookId})`); + + return { url, method: "POST", response }; + }, + }, + { + name: "webhook-get", + description: "Get a webhook by ID", + execute: async (client) => { + const listResponse = await client("/Webhook.GetAll"); + const listData = await listResponse.json(); + const webhookId = listData.WebHook?.[0]?.webHookID; + if (!webhookId) { + throw new Error("No webhook ID found"); + } + + // First, try to get webhook ID 1, or use a known ID if available + const path = `/Webhook.Get(${webhookId})`; + const response = await client(path); + const url = response.url; + return { url, method: "GET", response }; + }, + }, + { + name: "webhook-get-not-found", + description: "Error response for non-existent webhook", + expectError: true, + execute: async (client) => { + const path = "/Webhook.Get(99999)"; + const response = await client(path); + const url = response.url; + return { url, method: "GET", response }; + }, + }, + { + name: "webhook-delete", + description: "Delete a webhook by ID", + execute: async (client) => { + const listResponse = await client("/Webhook.GetAll"); + const listData = await listResponse.json(); + const webhookId = listData.WebHook?.[0]?.webHookID; + if (!webhookId) { + throw new Error("No webhook ID found"); + } + + // Use webhook ID 1, or a known ID if available + const path = `/Webhook.Delete(${webhookId})`; + const response = await client(path, { + method: "POST", + }); + const url = response.url; + return { url, method: "POST", response }; }, }, ]; @@ -489,7 +606,7 @@ function generateResponsesFile( * 2. Run: pnpm capture * 3. The captured response will be added to this file automatically * - * You can manually edit responses here if you need to modify test data. + * You MUST NOT manually edit this file. Any changes will be overwritten by the capture script. */ export type MockResponse = { @@ -539,7 +656,7 @@ async function main() { console.log(`Capturing: ${queryDef.name} - ${queryDef.description}`); // Execute the query directly with ffetch - const { url, response } = await queryDef.execute(client); + const { url, method, response } = await queryDef.execute(client); // Capture the response data (even for error status codes) const status = response.status; @@ -567,7 +684,7 @@ async function main() { // Store captured response (including error responses) capturedResponses[queryDef.name] = { url: sanitizedUrl, - method: "GET", + method, status, headers: contentType || location @@ -625,6 +742,8 @@ async function main() { serverUrl, ); + // For error cases, we don't have the method from execute, so default to GET + // This should rarely happen as most errors still return a response capturedResponses[queryDef.name] = { url: sanitizedUrl, method: "GET", @@ -669,7 +788,13 @@ async function main() { "../tests/fixtures/responses.ts", ); const fileContent = generateResponsesFile(capturedResponses); - writeFileSync(fixturesPath, fileContent, "utf-8"); + + // Format the file content with prettier + const formattedContent = await prettier.format(fileContent, { + filepath: fixturesPath, + }); + + writeFileSync(fixturesPath, formattedContent, "utf-8"); console.log(`\nResponses written to: ${fixturesPath}`); console.log("\nYou can now use these mocks in your tests!"); diff --git a/packages/fmodata/scripts/experiment-batch.ts b/packages/fmodata/scripts/experiment-batch.ts deleted file mode 100644 index 44174f20..00000000 --- a/packages/fmodata/scripts/experiment-batch.ts +++ /dev/null @@ -1,614 +0,0 @@ -/** - * Batch Operations Experiment Script - * - * This script experiments with batch operations containing inserts, updates, - * and deletes to understand how FileMaker handles them, especially when - * some operations fail. - * - * Usage: - * cd packages/fmodata && pnpm tsx scripts/experiment-batch.ts - */ - -import { config } from "dotenv"; -import path from "path"; -import { fileURLToPath } from "url"; -import { z } from "zod/v4"; -import { - FMServerConnection, - fmTableOccurrence, - textField, - timestampField, - eq, -} from "../src/index"; - -// Get __dirname equivalent in ES modules -const __filename = fileURLToPath(import.meta.url); -const __dirname = path.dirname(__filename); - -// Load environment variables -config({ path: path.resolve(__dirname, "../.env.local") }); - -const serverUrl = process.env.FMODATA_SERVER_URL; -const username = process.env.FMODATA_USERNAME; -const password = process.env.FMODATA_PASSWORD; -const database = process.env.FMODATA_DATABASE; - -if (!serverUrl || !username || !password || !database) { - throw new Error( - "Environment variables required: FMODATA_SERVER_URL, FMODATA_USERNAME, FMODATA_PASSWORD, FMODATA_DATABASE", - ); -} - -// Define schemas -const contactsTO = fmTableOccurrence("contacts", { - PrimaryKey: textField().primaryKey(), - CreationTimestamp: timestampField(), - CreatedBy: textField(), - ModificationTimestamp: timestampField(), - ModifiedBy: textField(), - name: textField(), - hobby: textField(), - id_user: textField(), -}); - -// Create connection -const connection = new FMServerConnection({ - serverUrl, - auth: { username, password }, -}); - -const db = connection.database(database, { - occurrences: [contactsTO], -}); - -// Track created records for cleanup -const createdRecordIds: string[] = []; - -async function cleanup() { - console.log("\n🧹 Cleaning up created records..."); - for (const id of createdRecordIds) { - try { - await db.from("contacts").delete().byId(id).execute(); - console.log(` Deleted: ${id}`); - } catch (error) { - console.log(` Failed to delete ${id}:`, error); - } - } -} - -async function experiment1_MultipleInserts() { - console.log("\n" + "=".repeat(60)); - console.log("EXPERIMENT 1: Multiple Inserts in a Batch"); - console.log("=".repeat(60)); - - const timestamp = Date.now(); - const insert1 = db.from("contacts").insert({ - name: `Batch Insert 1 - ${timestamp}`, - hobby: "Insert Test", - }); - - const insert2 = db.from("contacts").insert({ - name: `Batch Insert 2 - ${timestamp}`, - hobby: "Insert Test", - }); - - const insert3 = db.from("contacts").insert({ - name: `Batch Insert 3 - ${timestamp}`, - hobby: "Insert Test", - }); - - console.log("\nExecuting batch with 3 insert operations..."); - - const result = await db.batch([insert1, insert2, insert3]).execute(); - - console.log("\nResult:"); - console.log(JSON.stringify(result, null, 2)); - - if (result.data) { - // Track for cleanup - for (const item of result.data) { - if (item && typeof item === "object" && "PrimaryKey" in item) { - createdRecordIds.push(item.PrimaryKey as string); - } - } - } - - return result; -} - -async function experiment2_MixedOperations() { - console.log("\n" + "=".repeat(60)); - console.log( - "EXPERIMENT 2: Mixed Operations (GET + INSERT + UPDATE + DELETE)", - ); - console.log("=".repeat(60)); - - // First, create a record we can update/delete - const timestamp = Date.now(); - const setupResult = await db - .from("contacts") - .insert({ - name: `Setup Record - ${timestamp}`, - hobby: "Will be updated", - }) - .execute(); - - if (setupResult.error || !setupResult.data) { - console.log("Failed to create setup record:", setupResult.error); - return; - } - - const setupRecordId = setupResult.data.PrimaryKey; - console.log(`\nCreated setup record: ${setupRecordId}`); - - // Now create a batch with mixed operations - const listQuery = db.from("contacts").list().top(2); - - const insertOp = db.from("contacts").insert({ - name: `Mixed Batch Insert - ${timestamp}`, - hobby: "Mixed Test", - }); - - const updateOp = db - .from("contacts") - .update({ hobby: "Updated via batch" }) - .byId(setupRecordId); - - const deleteOp = db.from("contacts").delete().byId(setupRecordId); - - console.log("\nExecuting batch with: GET, INSERT, UPDATE, DELETE..."); - - const result = await db - .batch([listQuery, insertOp, updateOp, deleteOp]) - .execute(); - - console.log("\nResult:"); - console.log(JSON.stringify(result, null, 2)); - - if (result.data) { - // Track insert result for cleanup - const insertResult = result.data[1]; - if ( - insertResult && - typeof insertResult === "object" && - "PrimaryKey" in insertResult - ) { - createdRecordIds.push(insertResult.PrimaryKey as string); - } - } - - return result; -} - -async function experiment3_FailingOperation() { - console.log("\n" + "=".repeat(60)); - console.log("EXPERIMENT 3: Batch with a Failing Operation in the Middle"); - console.log("=".repeat(60)); - - const timestamp = Date.now(); - - // Create a valid insert - const insert1 = db.from("contacts").insert({ - name: `Before Failure - ${timestamp}`, - hobby: "Should succeed", - }); - - // Try to update a non-existent record (should fail) - const failingUpdate = db - .from("contacts") - .update({ hobby: "This should fail" }) - .byId("00000000-0000-0000-0000-000000000000"); - - // Another valid insert (should this succeed or fail?) - const insert2 = db.from("contacts").insert({ - name: `After Failure - ${timestamp}`, - hobby: "Should this succeed?", - }); - - console.log( - "\nExecuting batch with: INSERT (valid), UPDATE (invalid ID), INSERT (valid)...", - ); - console.log( - "Question: What happens to the third operation when the second fails?", - ); - - const result = await db.batch([insert1, failingUpdate, insert2]).execute(); - - console.log("\nResult:"); - console.log(JSON.stringify(result, null, 2)); - - if (result.data) { - for (const item of result.data) { - if (item && typeof item === "object" && "PrimaryKey" in item) { - createdRecordIds.push(item.PrimaryKey as string); - } - } - } - - return result; -} - -async function experiment4_FailingDelete() { - console.log("\n" + "=".repeat(60)); - console.log("EXPERIMENT 4: Batch with a Failing Delete"); - console.log("=".repeat(60)); - - const timestamp = Date.now(); - - // Create a valid insert - const insert1 = db.from("contacts").insert({ - name: `Before Delete Fail - ${timestamp}`, - hobby: "Should succeed", - }); - - // Try to delete a non-existent record - const failingDelete = db - .from("contacts") - .delete() - .byId("00000000-0000-0000-0000-000000000000"); - - // Another valid insert - const insert2 = db.from("contacts").insert({ - name: `After Delete Fail - ${timestamp}`, - hobby: "Should this succeed?", - }); - - console.log("\nExecuting batch with: INSERT, DELETE (invalid ID), INSERT..."); - - const result = await db.batch([insert1, failingDelete, insert2]).execute(); - - console.log("\nResult:"); - console.log(JSON.stringify(result, null, 2)); - - if (result.data) { - for (const item of result.data) { - if (item && typeof item === "object" && "PrimaryKey" in item) { - createdRecordIds.push(item.PrimaryKey as string); - } - } - } - - return result; -} - -async function experiment5_AllGetWithOneFailure() { - console.log("\n" + "=".repeat(60)); - console.log( - "EXPERIMENT 5: Multiple GETs with One Filter that Returns Nothing", - ); - console.log("=".repeat(60)); - - // Query that should return results - const query1 = db.from("contacts").list().top(2); - - // Query with a filter that returns empty (not an error, just no results) - const query2 = db - .from(contactsTO) - .list() - .where(eq(contactsTO.name, "THIS_NAME_DEFINITELY_DOES_NOT_EXIST_12345")); - - // Another query that should return results - const query3 = db.from("contacts").list().top(1); - - console.log( - "\nExecuting batch with: GET (valid), GET (empty filter), GET (valid)...", - ); - - const result = await db.batch([query1, query2, query3]).execute(); - - console.log("\nResult:"); - console.log(JSON.stringify(result, null, 2)); - - return result; -} - -async function experiment6_RawResponseInspection() { - console.log("\n" + "=".repeat(60)); - console.log("EXPERIMENT 6: Raw Response Inspection - Direct Fetch"); - console.log("=".repeat(60)); - - // Make a direct batch request to see raw response - const timestamp = Date.now(); - const boundary = "batch_direct_test_123"; - - const baseUrl = `${serverUrl}/fmi/odata/v4/${database}`; - const batchUrl = `${baseUrl}/$batch`; - - // Build a simple batch body with one GET - const batchBody = [ - `--${boundary}`, - "Content-Type: application/http", - "Content-Transfer-Encoding: binary", - "", - `GET ${baseUrl}/contacts?$top=1 HTTP/1.1`, - "", - "", - `--${boundary}--`, - ].join("\r\n"); - - console.log("\n--- Sending Request ---"); - console.log("URL:", batchUrl); - console.log("Body:", batchBody); - - const authHeader = `Basic ${Buffer.from(`${username}:${password}`).toString("base64")}`; - - const response = await fetch(batchUrl, { - method: "POST", - headers: { - Authorization: authHeader, - "Content-Type": `multipart/mixed; boundary=${boundary}`, - "OData-Version": "4.0", - }, - body: batchBody, - }); - - console.log("\n--- Response Info ---"); - console.log("Status:", response.status, response.statusText); - console.log("Content-Type:", response.headers.get("content-type")); - - const responseText = await response.text(); - console.log("\n--- Raw Response Body ---"); - console.log(responseText); - console.log("--- End Raw Response ---"); -} - -async function experiment7_RawResponseWithInsert() { - console.log("\n" + "=".repeat(60)); - console.log("EXPERIMENT 7: Raw Response - Insert with Prefer header"); - console.log("=".repeat(60)); - - const timestamp = Date.now(); - const boundary = "batch_insert_test_456"; - const changesetBoundary = "changeset_insert_789"; - - const baseUrl = `${serverUrl}/fmi/odata/v4/${database}`; - const batchUrl = `${baseUrl}/$batch`; - - const insertBody = JSON.stringify({ - name: `Direct Insert Test - ${timestamp}`, - hobby: "Testing", - }); - - // Build a batch with INSERT using return=representation - const batchBody = [ - `--${boundary}`, - `Content-Type: multipart/mixed; boundary=${changesetBoundary}`, - "", - `--${changesetBoundary}`, - "Content-Type: application/http", - "Content-Transfer-Encoding: binary", - "", - `POST ${baseUrl}/contacts HTTP/1.1`, - "Content-Type: application/json", - "Prefer: return=representation", - `Content-Length: ${insertBody.length}`, - "", - insertBody, - `--${changesetBoundary}--`, - `--${boundary}--`, - ].join("\r\n"); - - console.log("\n--- Sending Insert Request ---"); - console.log("Body:\n", batchBody); - - const authHeader = `Basic ${Buffer.from(`${username}:${password}`).toString("base64")}`; - - const response = await fetch(batchUrl, { - method: "POST", - headers: { - Authorization: authHeader, - "Content-Type": `multipart/mixed; boundary=${boundary}`, - "OData-Version": "4.0", - }, - body: batchBody, - }); - - console.log("\n--- Response Info ---"); - console.log("Status:", response.status, response.statusText); - console.log("Content-Type:", response.headers.get("content-type")); - - const responseText = await response.text(); - console.log("\n--- Raw Response Body ---"); - console.log(responseText); - console.log("--- End Raw Response ---"); - - // Try to extract created record ID for cleanup - const pkMatch = responseText.match(/"PrimaryKey":\s*"([^"]+)"/); - if (pkMatch && pkMatch[1]) { - createdRecordIds.push(pkMatch[1]); - console.log("\nCreated record ID:", pkMatch[1]); - } -} - -async function experiment8_TrueError() { - console.log("\n" + "=".repeat(60)); - console.log("EXPERIMENT 8: Raw Response - Query Non-Existent Table"); - console.log("=".repeat(60)); - - const boundary = "batch_error_test"; - const baseUrl = `${serverUrl}/fmi/odata/v4/${database}`; - const batchUrl = `${baseUrl}/$batch`; - - // Build: GET (valid), GET (non-existent table), GET (valid) - const batchBody = [ - `--${boundary}`, - "Content-Type: application/http", - "Content-Transfer-Encoding: binary", - "", - `GET ${baseUrl}/contacts?$top=1 HTTP/1.1`, - "", - "", - `--${boundary}`, - "Content-Type: application/http", - "Content-Transfer-Encoding: binary", - "", - `GET ${baseUrl}/THIS_TABLE_DOES_NOT_EXIST?$top=1 HTTP/1.1`, - "", - "", - `--${boundary}`, - "Content-Type: application/http", - "Content-Transfer-Encoding: binary", - "", - `GET ${baseUrl}/contacts?$top=2 HTTP/1.1`, - "", - "", - `--${boundary}--`, - ].join("\r\n"); - - console.log("\n--- Sending Request with Non-Existent Table ---"); - - const authHeader = `Basic ${Buffer.from(`${username}:${password}`).toString("base64")}`; - - const response = await fetch(batchUrl, { - method: "POST", - headers: { - Authorization: authHeader, - "Content-Type": `multipart/mixed; boundary=${boundary}`, - "OData-Version": "4.0", - }, - body: batchBody, - }); - - console.log("\n--- Response Info ---"); - console.log("Status:", response.status, response.statusText); - - const responseText = await response.text(); - console.log("\n--- Raw Response Body ---"); - console.log(responseText); - console.log("--- End Raw Response ---"); -} - -async function experiment9_RawResponseWithFailure() { - console.log("\n" + "=".repeat(60)); - console.log("EXPERIMENT 9: Raw Response - Mixed with Failure"); - console.log("=".repeat(60)); - - const timestamp = Date.now(); - const boundary = "batch_fail_test"; - const cs1 = "changeset_1"; - const cs2 = "changeset_2"; - - const baseUrl = `${serverUrl}/fmi/odata/v4/${database}`; - const batchUrl = `${baseUrl}/$batch`; - - const insertBody1 = JSON.stringify({ - name: `Before Fail - ${timestamp}`, - hobby: "Test", - }); - const updateBody = JSON.stringify({ hobby: "Should fail" }); - const insertBody2 = JSON.stringify({ - name: `After Fail - ${timestamp}`, - hobby: "Test", - }); - - // Build: INSERT (valid), UPDATE (invalid ID), INSERT (valid) - const batchBody = [ - // First changeset: valid insert - `--${boundary}`, - `Content-Type: multipart/mixed; boundary=${cs1}`, - "", - `--${cs1}`, - "Content-Type: application/http", - "Content-Transfer-Encoding: binary", - "", - `POST ${baseUrl}/contacts HTTP/1.1`, - "Content-Type: application/json", - "Prefer: return=representation", - `Content-Length: ${insertBody1.length}`, - "", - insertBody1, - `--${cs1}--`, - // Second changeset: invalid update - `--${boundary}`, - `Content-Type: multipart/mixed; boundary=${cs2}`, - "", - `--${cs2}`, - "Content-Type: application/http", - "Content-Transfer-Encoding: binary", - "", - `PATCH ${baseUrl}/contacts('00000000-0000-0000-0000-000000000000') HTTP/1.1`, - "Content-Type: application/json", - `Content-Length: ${updateBody.length}`, - "", - updateBody, - `--${cs2}--`, - // Third changeset: valid insert - `--${boundary}`, - `Content-Type: multipart/mixed; boundary=changeset_3`, - "", - `--changeset_3`, - "Content-Type: application/http", - "Content-Transfer-Encoding: binary", - "", - `POST ${baseUrl}/contacts HTTP/1.1`, - "Content-Type: application/json", - "Prefer: return=representation", - `Content-Length: ${insertBody2.length}`, - "", - insertBody2, - `--changeset_3--`, - `--${boundary}--`, - ].join("\r\n"); - - console.log("\n--- Sending Mixed Request with Invalid Update ---"); - - const authHeader = `Basic ${Buffer.from(`${username}:${password}`).toString("base64")}`; - - const response = await fetch(batchUrl, { - method: "POST", - headers: { - Authorization: authHeader, - "Content-Type": `multipart/mixed; boundary=${boundary}`, - "OData-Version": "4.0", - }, - body: batchBody, - }); - - console.log("\n--- Response Info ---"); - console.log("Status:", response.status, response.statusText); - - const responseText = await response.text(); - console.log("\n--- Raw Response Body ---"); - console.log(responseText); - console.log("--- End Raw Response ---"); - - // Extract created record IDs for cleanup - const pkMatches = responseText.matchAll(/"PrimaryKey":\s*"([^"]+)"/g); - for (const match of pkMatches) { - if (match[1]) { - createdRecordIds.push(match[1]); - console.log("Created record ID:", match[1]); - } - } -} - -async function main() { - console.log("🔬 Batch Operations Experiment"); - console.log("================================"); - console.log(`Server: ${serverUrl}`); - console.log(`Database: ${database}`); - console.log(""); - - try { - // Run experiments - await experiment1_MultipleInserts(); - await experiment2_MixedOperations(); - await experiment3_FailingOperation(); - await experiment4_FailingDelete(); - await experiment5_AllGetWithOneFailure(); - await experiment6_RawResponseInspection(); - await experiment7_RawResponseWithInsert(); - await experiment8_TrueError(); - await experiment9_RawResponseWithFailure(); - - console.log("\n" + "=".repeat(60)); - console.log("ALL EXPERIMENTS COMPLETE"); - console.log("=".repeat(60)); - } catch (error) { - console.error("\n❌ Experiment failed with error:", error); - } finally { - await cleanup(); - } -} - -main().catch(console.error); diff --git a/packages/fmodata/scripts/test-webhooks.ts b/packages/fmodata/scripts/test-webhooks.ts new file mode 100644 index 00000000..c7e4dc96 --- /dev/null +++ b/packages/fmodata/scripts/test-webhooks.ts @@ -0,0 +1,237 @@ +/** + * Webhook API Test Script + * + * This script tests all webhook methods against FileMaker Server + * to understand the exact format and types returned. + * + * Usage: + * bun run scripts/test-webhooks.ts + */ + +import { config } from "dotenv"; +import path from "path"; +import { fileURLToPath } from "url"; +import { + FMServerConnection, + fmTableOccurrence, + textField, +} from "@proofkit/fmodata"; + +// Get __dirname equivalent in ES modules +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +// Load environment variables +config({ path: path.resolve(__dirname, "../.env.local") }); + +const serverUrl = process.env.FMODATA_SERVER_URL; +const apiKey = process.env.FMODATA_API_KEY; +const username = process.env.FMODATA_USERNAME; +const password = process.env.FMODATA_PASSWORD; +const database = process.env.FMODATA_DATABASE; + +if (!serverUrl) { + throw new Error("FMODATA_SERVER_URL environment variable is required"); +} + +if (!database) { + throw new Error("FMODATA_DATABASE environment variable is required"); +} + +// Use API key if available, otherwise username/password +const auth = apiKey + ? { apiKey } + : username && password + ? { username, password } + : null; + +if (!auth) { + throw new Error( + "Either FMODATA_API_KEY or (FMODATA_USERNAME and FMODATA_PASSWORD) environment variables are required", + ); +} + +// Create a simple table occurrence for testing +const contacts = fmTableOccurrence("contacts", { + PrimaryKey: textField().primaryKey(), + name: textField(), +}); + +async function testWebhookMethods() { + console.log("FileMaker OData Webhook API Test"); + console.log("=================================\n"); + + const connection = new FMServerConnection({ + serverUrl, + auth, + }); + + const db = connection.database(database!); + + try { + // Test 1: List all webhooks + console.log("=== Test 1: List All Webhooks ===\n"); + try { + const listResult = await db.webhook.list(); + console.log("✅ list() succeeded"); + console.log("Type:", typeof listResult); + console.log("Is Array:", Array.isArray(listResult)); + console.log("Result structure:"); + console.log(JSON.stringify(listResult, null, 2)); + console.log("\nTypeScript type should be:"); + console.log(" { Status: string; WebHook: Array<{ webHookID: number; tableName: string; url: string; ... }> }"); + console.log("\n"); + } catch (error: any) { + console.log("❌ list() failed:", error.message); + console.log("Error:", error); + console.log("\n"); + } + + // Test 2: Add a webhook + console.log("=== Test 2: Add Webhook ===\n"); + let webhookId: string | number | undefined; + try { + const addResult = await db.webhook.add({ + webhook: "https://example.com/webhook", + tableName: contacts, + headers: { "X-Custom-Header": "test-value" }, + }); + console.log("✅ add() succeeded"); + console.log("Type:", typeof addResult); + console.log("Is Array:", Array.isArray(addResult)); + console.log("Result structure:"); + console.log(JSON.stringify(addResult, null, 2)); + console.log("\nTypeScript type should be:"); + console.log(" { webHookResult: { webHookID: number } }"); + + // Try to extract webhook ID from nested structure + if (typeof addResult === "object" && addResult !== null) { + if ("webHookResult" in addResult) { + const webHookResult = (addResult as any).webHookResult; + if (webHookResult && "webHookID" in webHookResult) { + webhookId = webHookResult.webHookID; + } + } else if ("id" in addResult) { + webhookId = (addResult as any).id; + } else if ("ID" in addResult) { + webhookId = (addResult as any).ID; + } else if ("webhookId" in addResult) { + webhookId = (addResult as any).webhookId; + } + } + console.log("Extracted webhook ID:", webhookId); + console.log("\n"); + } catch (error: any) { + console.log("❌ add() failed:", error.message); + console.log("Error:", error); + console.log("\n"); + } + + // Test 3: Get a webhook (if we have an ID) + if (webhookId !== undefined) { + console.log("=== Test 3: Get Webhook ===\n"); + try { + const getResult = await db.webhook.get(webhookId); + console.log("✅ get() succeeded"); + console.log("Type:", typeof getResult); + console.log("Is Array:", Array.isArray(getResult)); + console.log("Result structure:"); + console.log(JSON.stringify(getResult, null, 2)); + console.log("\nTypeScript type should be:"); + console.log(" { webHookID: number; tableName: string; url: string; headers?: Record; notifySchemaChanges: boolean; select: string; filter: string; pendingOperations: unknown[] }"); + console.log("\n"); + } catch (error: any) { + console.log("❌ get() failed:", error.message); + console.log("Error:", error); + console.log("\n"); + } + } else { + console.log("=== Test 3: Get Webhook ===\n"); + console.log("⚠️ Skipping - no webhook ID available from add()"); + console.log("\n"); + } + + // Test 4: Invoke a webhook (if we have an ID) + if (webhookId !== undefined) { + console.log("=== Test 4: Invoke Webhook (without rowIDs) ===\n"); + try { + const invokeResult = await db.webhook.invoke(webhookId); + console.log("✅ invoke() succeeded (no rowIDs)"); + console.log("Type:", typeof invokeResult); + console.log("Is Array:", Array.isArray(invokeResult)); + console.log("Result:", JSON.stringify(invokeResult, null, 2)); + console.log("\n"); + } catch (error: any) { + console.log("❌ invoke() failed:", error.message); + console.log("Error:", error); + console.log("\n"); + } + + console.log("=== Test 5: Invoke Webhook (with rowIDs) ===\n"); + try { + const invokeResult = await db.webhook.invoke(webhookId, { + rowIDs: [1, 2, 3], + }); + console.log("✅ invoke() succeeded (with rowIDs)"); + console.log("Type:", typeof invokeResult); + console.log("Is Array:", Array.isArray(invokeResult)); + console.log("Result:", JSON.stringify(invokeResult, null, 2)); + console.log("\n"); + } catch (error: any) { + console.log("❌ invoke() failed:", error.message); + console.log("Error:", error); + console.log("\n"); + } + } else { + console.log("=== Test 4 & 5: Invoke Webhook ===\n"); + console.log("⚠️ Skipping - no webhook ID available from add()"); + console.log("\n"); + } + + // Test 6: Remove a webhook (if we have an ID) + if (webhookId !== undefined) { + console.log("=== Test 6: Remove Webhook ===\n"); + try { + await db.webhook.remove(webhookId); + console.log("✅ remove() succeeded"); + console.log("(remove returns void, no data)"); + console.log("\n"); + } catch (error: any) { + console.log("❌ remove() failed:", error.message); + console.log("Error:", error); + console.log("\n"); + } + } else { + console.log("=== Test 6: Remove Webhook ===\n"); + console.log("⚠️ Skipping - no webhook ID available from add()"); + console.log("\n"); + } + + // Test 7: Try to get a webhook that doesn't exist (error case) + console.log("=== Test 7: Get Non-Existent Webhook (Error Case) ===\n"); + try { + await db.webhook.get(99999); + console.log("⚠️ get() succeeded (unexpected - webhook should not exist)"); + console.log("\n"); + } catch (error: any) { + console.log("✅ get() failed as expected"); + console.log("Error type:", error.constructor.name); + console.log("Error message:", error.message); + console.log("Error:", JSON.stringify(error, Object.getOwnPropertyNames(error), 2)); + console.log("\n"); + } + + } catch (error: any) { + console.error("\n❌ Test script failed:", error); + throw error; + } + + console.log("================================="); + console.log("All tests complete!"); +} + +testWebhookMethods().catch((error) => { + console.error("Test script failed:", error); + process.exit(1); +}); + diff --git a/packages/fmodata/src/client/builders/default-select.ts b/packages/fmodata/src/client/builders/default-select.ts index 0256db05..21bb1e4d 100644 --- a/packages/fmodata/src/client/builders/default-select.ts +++ b/packages/fmodata/src/client/builders/default-select.ts @@ -20,9 +20,13 @@ function getContainerFieldNames(table: FMTable): string[] { * Gets default select fields from a table definition. * Returns undefined if defaultSelect is "all". * Automatically filters out container fields since they cannot be selected via $select. + * + * @param table - The table occurrence + * @param includeSpecialColumns - If true, includes ROWID and ROWMODID when defaultSelect is "schema" */ export function getDefaultSelectFields( table: FMTable | undefined, + includeSpecialColumns?: boolean, ): string[] | undefined { if (!table) return undefined; @@ -33,7 +37,14 @@ export function getDefaultSelectFields( const baseTableConfig = getBaseTableConfig(table); const allFields = Object.keys(baseTableConfig.schema); // Filter out container fields - return [...new Set(allFields.filter((f) => !containerFields.includes(f)))]; + const fields = [...new Set(allFields.filter((f) => !containerFields.includes(f)))]; + + // Add special columns if requested + if (includeSpecialColumns) { + fields.push("ROWID", "ROWMODID"); + } + + return fields; } if (Array.isArray(defaultSelect)) { diff --git a/packages/fmodata/src/client/builders/expand-builder.ts b/packages/fmodata/src/client/builders/expand-builder.ts index 89d5ae20..97373eba 100644 --- a/packages/fmodata/src/client/builders/expand-builder.ts +++ b/packages/fmodata/src/client/builders/expand-builder.ts @@ -40,7 +40,7 @@ export class ExpandBuilder { return configs.map((config) => { const targetTable = config.targetTable; - let targetSchema: Record | undefined; + let targetSchema: Partial> | undefined; if (targetTable) { const baseTableConfig = getBaseTableConfig(targetTable); const containerFields = baseTableConfig.containerFields || []; diff --git a/packages/fmodata/src/client/builders/query-string-builder.ts b/packages/fmodata/src/client/builders/query-string-builder.ts index a9fb68df..ee3694dd 100644 --- a/packages/fmodata/src/client/builders/query-string-builder.ts +++ b/packages/fmodata/src/client/builders/query-string-builder.ts @@ -17,12 +17,18 @@ export function buildSelectExpandQueryString(config: { table?: FMTable; useEntityIds: boolean; logger: InternalLogger; + includeSpecialColumns?: boolean; }): string { const parts: string[] = []; const expandBuilder = new ExpandBuilder(config.useEntityIds, config.logger); // Build $select if (config.selectedFields && config.selectedFields.length > 0) { + // Important: do NOT implicitly add system columns (ROWID/ROWMODID) here. + // - `includeSpecialColumns` controls the Prefer header + response parsing, but should not + // mutate/expand an explicit `$select` (e.g. when the user calls `.select({ ... })`). + // - If system columns are desired with `.select()`, they must be explicitly included via + // the `systemColumns` argument, which will already have added them to `selectedFields`. const selectString = formatSelectFields( config.selectedFields, config.table, diff --git a/packages/fmodata/src/client/builders/response-processor.ts b/packages/fmodata/src/client/builders/response-processor.ts index 783b1a72..9f171d15 100644 --- a/packages/fmodata/src/client/builders/response-processor.ts +++ b/packages/fmodata/src/client/builders/response-processor.ts @@ -17,6 +17,7 @@ export interface ProcessResponseConfig { expandValidationConfigs?: ExpandValidationConfig[]; skipValidation?: boolean; useEntityIds?: boolean; + includeSpecialColumns?: boolean; // Mapping from field names to output keys (for renamed fields in select) fieldMapping?: Record; } @@ -37,6 +38,7 @@ export async function processODataResponse( expandValidationConfigs, skipValidation, useEntityIds, + includeSpecialColumns, fieldMapping, } = config; @@ -67,6 +69,9 @@ export async function processODataResponse( } // Validation path + // Note: Special columns are excluded when using QueryBuilder.single() method, + // but included for RecordBuilder.get() method (both use singleMode: "exact") + // The exclusion is handled in QueryBuilder's processQueryResponse, not here if (singleMode !== false) { const validation = await validateSingleResponse( response, @@ -74,6 +79,7 @@ export async function processODataResponse( selectedFields as any, expandValidationConfigs, singleMode, + includeSpecialColumns, ); if (!validation.valid) { @@ -96,6 +102,7 @@ export async function processODataResponse( schema, selectedFields as any, expandValidationConfigs, + includeSpecialColumns, ); if (!validation.valid) { @@ -223,6 +230,7 @@ export async function processQueryResponse( expandConfigs: ExpandConfig[]; skipValidation?: boolean; useEntityIds?: boolean; + includeSpecialColumns?: boolean; // Mapping from field names to output keys (for renamed fields in select) fieldMapping?: Record; logger: InternalLogger; @@ -235,6 +243,7 @@ export async function processQueryResponse( expandConfigs, skipValidation, useEntityIds, + includeSpecialColumns, fieldMapping, logger, } = config; @@ -258,6 +267,7 @@ export async function processQueryResponse( expandValidationConfigs, skipValidation, useEntityIds, + includeSpecialColumns, }); // Rename fields if field mapping is provided (for renamed fields in select) diff --git a/packages/fmodata/src/client/database.ts b/packages/fmodata/src/client/database.ts index b4eba64d..2017e8b3 100644 --- a/packages/fmodata/src/client/database.ts +++ b/packages/fmodata/src/client/database.ts @@ -4,10 +4,26 @@ import { EntitySet } from "./entity-set"; import { BatchBuilder } from "./batch-builder"; import { SchemaManager } from "./schema-manager"; import { FMTable } from "../orm/table"; +import { WebhookManager } from "./webhook-builder"; -export class Database { +type MetadataArgs = { + format?: "xml" | "json"; + /** + * If provided, only the metadata for the specified table will be returned. + * Requires FileMaker Server 22.0.4 or later. + */ + tableName?: string; + /** + * If true, a reduced payload size will be returned by omitting certain annotations. + */ + reduceAnnotations?: boolean; +}; + +export class Database { private _useEntityIds: boolean = false; + private _includeSpecialColumns: IncludeSpecialColumns; public readonly schema: SchemaManager; + public readonly webhook: WebhookManager; constructor( private readonly databaseName: string, @@ -19,14 +35,24 @@ export class Database { * If set to false but some occurrences do not use entity IDs, an error will be thrown */ useEntityIds?: boolean; + /** + * Whether to include special columns (ROWID and ROWMODID) in responses. + * Note: Special columns are only included when there is no $select query. + */ + includeSpecialColumns?: IncludeSpecialColumns; }, ) { // Initialize schema manager this.schema = new SchemaManager(this.databaseName, this.context); + this.webhook = new WebhookManager(this.databaseName, this.context); this._useEntityIds = config?.useEntityIds ?? false; + this._includeSpecialColumns = (config?.includeSpecialColumns ?? + false) as IncludeSpecialColumns; } - from>(table: T): EntitySet { + from>( + table: T, + ): EntitySet { // Only override database-level useEntityIds if table explicitly sets it // (not if it's undefined, which would override the database setting) if ( @@ -37,7 +63,7 @@ export class Database { this._useEntityIds = tableUseEntityIds; } } - return new EntitySet({ + return new EntitySet({ occurrence: table as T, databaseName: this.databaseName, context: this.context, @@ -49,19 +75,35 @@ export class Database { * Retrieves the OData metadata for this database. * @param args Optional configuration object * @param args.format The format to retrieve metadata in. Defaults to "json". + * @param args.tableName If provided, only the metadata for the specified table will be returned. Requires FileMaker Server 22.0.4 or later. + * @param args.reduceAnnotations If true, a reduced payload size will be returned by omitting certain annotations. * @returns The metadata in the specified format */ - async getMetadata(args: { format: "xml" }): Promise; - async getMetadata(args?: { format?: "json" }): Promise; - async getMetadata(args?: { - format?: "xml" | "json"; - }): Promise { + async getMetadata(args: { format: "xml" } & MetadataArgs): Promise; + async getMetadata( + args?: { format?: "json" } & MetadataArgs, + ): Promise; + async getMetadata(args?: MetadataArgs): Promise { + // Build the URL - if tableName is provided, append %23{tableName} to the path + let url = `/${this.databaseName}/$metadata`; + if (args?.tableName) { + url = `/${this.databaseName}/$metadata%23${args.tableName}`; + } + + // Build headers + const headers: Record = { + Accept: args?.format === "xml" ? "application/xml" : "application/json", + }; + + // Add Prefer header if reduceAnnotations is true + if (args?.reduceAnnotations) { + headers["Prefer"] = 'include-annotations="-*"'; + } + const result = await this.context._makeRequest< Record | string - >(`/${this.databaseName}/$metadata`, { - headers: { - Accept: args?.format === "xml" ? "application/xml" : "application/json", - }, + >(url, { + headers, }); if (result.error) { throw result.error; diff --git a/packages/fmodata/src/client/delete-builder.ts b/packages/fmodata/src/client/delete-builder.ts index 0df96248..fd742f2a 100644 --- a/packages/fmodata/src/client/delete-builder.ts +++ b/packages/fmodata/src/client/delete-builder.ts @@ -2,7 +2,7 @@ import type { ExecutionContext, ExecutableBuilder, Result, - WithSystemFields, + WithSpecialColumns, ExecuteOptions, ExecuteMethodOptions, } from "../types"; @@ -26,17 +26,21 @@ export class DeleteBuilder> { private context: ExecutionContext; private table: Occ; private databaseUseEntityIds: boolean; + private databaseIncludeSpecialColumns: boolean; constructor(config: { occurrence: Occ; databaseName: string; context: ExecutionContext; databaseUseEntityIds?: boolean; + databaseIncludeSpecialColumns?: boolean; }) { this.table = config.occurrence; this.databaseName = config.databaseName; this.context = config.context; this.databaseUseEntityIds = config.databaseUseEntityIds ?? false; + this.databaseIncludeSpecialColumns = + config.databaseIncludeSpecialColumns ?? false; } /** diff --git a/packages/fmodata/src/client/entity-set.ts b/packages/fmodata/src/client/entity-set.ts index fb03d177..a43afd71 100644 --- a/packages/fmodata/src/client/entity-set.ts +++ b/packages/fmodata/src/client/entity-set.ts @@ -19,6 +19,7 @@ import { getDefaultSelect, getTableName, getTableColumns, + getTableSchema, } from "../orm/table"; import type { FieldBuilder } from "../orm/field-builders"; import { createLogger, InternalLogger } from "../logger"; @@ -41,16 +42,20 @@ type ExtractColumnsFromOcc = : never : never; -export class EntitySet> { +export class EntitySet< + Occ extends FMTable, + DatabaseIncludeSpecialColumns extends boolean = false, +> { private occurrence: Occ; private databaseName: string; private context: ExecutionContext; - private database: Database; // Database instance for accessing occurrences + private database: Database; // Database instance for accessing occurrences private isNavigateFromEntitySet?: boolean; private navigateRelation?: string; private navigateSourceTableName?: string; private navigateBasePath?: string; // Full base path for chained navigations private databaseUseEntityIds: boolean; + private databaseIncludeSpecialColumns: DatabaseIncludeSpecialColumns; private logger: InternalLogger; constructor(config: { @@ -66,17 +71,23 @@ export class EntitySet> { // Get useEntityIds from database if available, otherwise default to false this.databaseUseEntityIds = (config.database as any)?._useEntityIds ?? false; + // Get includeSpecialColumns from database if available, otherwise default to false + this.databaseIncludeSpecialColumns = + (config.database as any)?._includeSpecialColumns ?? false; this.logger = config.context?._getLogger?.() ?? createLogger(); } // Type-only method to help TypeScript infer the schema from table - static create>(config: { + static create< + Occ extends FMTable, + DatabaseIncludeSpecialColumns extends boolean = false, + >(config: { occurrence: Occ; databaseName: string; context: ExecutionContext; - database: Database; - }): EntitySet { - return new EntitySet({ + database: Database; + }): EntitySet { + return new EntitySet({ occurrence: config.occurrence, databaseName: config.databaseName, context: config.context, @@ -89,33 +100,30 @@ export class EntitySet> { keyof InferSchemaOutputFromFMTable, false, false, - {} + {}, + DatabaseIncludeSpecialColumns > { - const builder = new QueryBuilder({ + const builder = new QueryBuilder< + Occ, + keyof InferSchemaOutputFromFMTable, + false, + false, + {}, + DatabaseIncludeSpecialColumns + >({ occurrence: this.occurrence as Occ, databaseName: this.databaseName, context: this.context, databaseUseEntityIds: this.databaseUseEntityIds, + databaseIncludeSpecialColumns: this.databaseIncludeSpecialColumns, }); // Apply defaultSelect if occurrence exists and select hasn't been called if (this.occurrence) { // FMTable - access via helper functions const defaultSelectValue = getDefaultSelect(this.occurrence); - const tableSchema = (this.occurrence as any)[FMTableClass.Symbol.Schema]; - let schema: Record | undefined; - - if (tableSchema) { - // Extract schema from StandardSchemaV1 - const zodSchema = tableSchema["~standard"]?.schema; - if ( - zodSchema && - typeof zodSchema === "object" && - "shape" in zodSchema - ) { - schema = zodSchema.shape as Record; - } - } + // Schema is stored directly as Partial> + const schema = getTableSchema(this.occurrence); if (defaultSelectValue === "schema") { // Use getTableColumns to get all columns and select them @@ -124,12 +132,22 @@ export class EntitySet> { const allColumns = getTableColumns( this.occurrence, ) as ExtractColumnsFromOcc; - return builder.select(allColumns).top(1000) as QueryBuilder< + + // Include special columns if enabled at database level + const systemColumns = this.databaseIncludeSpecialColumns + ? { ROWID: true, ROWMODID: true } + : undefined; + + return builder + .select(allColumns, systemColumns) + .top(1000) as QueryBuilder< Occ, keyof InferSchemaOutputFromFMTable, false, false, - {} + {}, + DatabaseIncludeSpecialColumns, + typeof systemColumns >; } else if (typeof defaultSelectValue === "object") { // defaultSelectValue is a select object (Record) @@ -141,7 +159,8 @@ export class EntitySet> { keyof InferSchemaOutputFromFMTable, false, false, - {} + {}, + DatabaseIncludeSpecialColumns >; } // If defaultSelect is "all", no changes needed (current behavior) @@ -173,34 +192,31 @@ export class EntitySet> { false, undefined, keyof InferSchemaOutputFromFMTable, - {} + {}, + DatabaseIncludeSpecialColumns > { - const builder = new RecordBuilder({ + const builder = new RecordBuilder< + Occ, + false, + undefined, + keyof InferSchemaOutputFromFMTable, + {}, + DatabaseIncludeSpecialColumns + >({ occurrence: this.occurrence, databaseName: this.databaseName, context: this.context, recordId: id, databaseUseEntityIds: this.databaseUseEntityIds, + databaseIncludeSpecialColumns: this.databaseIncludeSpecialColumns, }); // Apply defaultSelect if occurrence exists if (this.occurrence) { // FMTable - access via helper functions const defaultSelectValue = getDefaultSelect(this.occurrence); - const tableSchema = (this.occurrence as any)[FMTableClass.Symbol.Schema]; - let schema: Record | undefined; - - if (tableSchema) { - // Extract schema from StandardSchemaV1 - const zodSchema = tableSchema["~standard"]?.schema; - if ( - zodSchema && - typeof zodSchema === "object" && - "shape" in zodSchema - ) { - schema = zodSchema.shape as Record; - } - } + // Schema is stored directly as Partial> + const schema = getTableSchema(this.occurrence); if (defaultSelectValue === "schema") { // Use getTableColumns to get all columns and select them @@ -209,7 +225,13 @@ export class EntitySet> { const allColumns = getTableColumns( this.occurrence as any, ) as ExtractColumnsFromOcc; - const selectedBuilder = builder.select(allColumns); + + // Include special columns if enabled at database level + const systemColumns = this.databaseIncludeSpecialColumns + ? { ROWID: true, ROWMODID: true } + : undefined; + + const selectedBuilder = builder.select(allColumns, systemColumns); // Propagate navigation context if present if ( this.isNavigateFromEntitySet && @@ -293,6 +315,7 @@ export class EntitySet> { data: data as any, // Input type is validated/transformed at runtime returnPreference: returnPreference as any, databaseUseEntityIds: this.databaseUseEntityIds, + databaseIncludeSpecialColumns: this.databaseIncludeSpecialColumns, }); } @@ -323,6 +346,7 @@ export class EntitySet> { data: data as any, // Input type is validated/transformed at runtime returnPreference: returnPreference as any, databaseUseEntityIds: this.databaseUseEntityIds, + databaseIncludeSpecialColumns: this.databaseIncludeSpecialColumns, }); } @@ -332,13 +356,17 @@ export class EntitySet> { databaseName: this.databaseName, context: this.context, databaseUseEntityIds: this.databaseUseEntityIds, + databaseIncludeSpecialColumns: this.databaseIncludeSpecialColumns, }) as any; } // Implementation navigate>( targetTable: ValidExpandTarget, - ): EntitySet ? TargetTable : never> { + ): EntitySet< + TargetTable extends FMTable ? TargetTable : never, + DatabaseIncludeSpecialColumns + > { // Check if it's an FMTable object or a string let relationName: string; @@ -361,7 +389,7 @@ export class EntitySet> { } // Create EntitySet with target table - const entitySet = new EntitySet({ + const entitySet = new EntitySet({ occurrence: targetTable, databaseName: this.databaseName, context: this.context, diff --git a/packages/fmodata/src/client/error-parser.ts b/packages/fmodata/src/client/error-parser.ts index fd31d12e..01d31fc2 100644 --- a/packages/fmodata/src/client/error-parser.ts +++ b/packages/fmodata/src/client/error-parser.ts @@ -54,3 +54,7 @@ export async function parseErrorResponse( // Fall back to generic HTTPError return new HTTPError(url, response.status, response.statusText, errorBody); } + + + + diff --git a/packages/fmodata/src/client/filemaker-odata.ts b/packages/fmodata/src/client/filemaker-odata.ts index a82233c6..df31d3db 100644 --- a/packages/fmodata/src/client/filemaker-odata.ts +++ b/packages/fmodata/src/client/filemaker-odata.ts @@ -24,6 +24,7 @@ export class FMServerConnection implements ExecutionContext { private serverUrl: string; private auth: Auth; private useEntityIds: boolean = false; + private includeSpecialColumns: boolean = false; private logger: InternalLogger; constructor(config: { serverUrl: string; @@ -63,6 +64,22 @@ export class FMServerConnection implements ExecutionContext { return this.useEntityIds; } + /** + * @internal + * Sets whether to include special columns (ROWID and ROWMODID) in requests + */ + _setIncludeSpecialColumns(includeSpecialColumns: boolean): void { + this.includeSpecialColumns = includeSpecialColumns; + } + + /** + * @internal + * Gets whether to include special columns (ROWID and ROWMODID) in requests + */ + _getIncludeSpecialColumns(): boolean { + return this.includeSpecialColumns; + } + /** * @internal * Gets the base URL for OData requests @@ -84,7 +101,11 @@ export class FMServerConnection implements ExecutionContext { */ async _makeRequest( url: string, - options?: RequestInit & FFetchOptions & { useEntityIds?: boolean }, + options?: RequestInit & + FFetchOptions & { + useEntityIds?: boolean; + includeSpecialColumns?: boolean; + }, ): Promise> { const logger = this._getLogger(); const baseUrl = `${this.serverUrl}${"apiKey" in this.auth ? `/otto` : ""}/fmi/odata/v4`; @@ -92,10 +113,21 @@ export class FMServerConnection implements ExecutionContext { // Use per-request override if provided, otherwise use the database-level setting const useEntityIds = options?.useEntityIds ?? this.useEntityIds; + const includeSpecialColumns = + options?.includeSpecialColumns ?? this.includeSpecialColumns; // Get includeODataAnnotations from options (it's passed through from execute options) const includeODataAnnotations = (options as any)?.includeODataAnnotations; + // Build Prefer header as comma-separated list when multiple preferences are set + const preferValues: string[] = []; + if (useEntityIds) { + preferValues.push("fmodata.entity-ids"); + } + if (includeSpecialColumns) { + preferValues.push("fmodata.include-specialcolumns"); + } + const headers = { Authorization: "apiKey" in this.auth @@ -103,7 +135,7 @@ export class FMServerConnection implements ExecutionContext { : `Basic ${btoa(`${this.auth.username}:${this.auth.password}`)}`, "Content-Type": "application/json", Accept: getAcceptHeader(includeODataAnnotations), - ...(useEntityIds ? { Prefer: "fmodata.entity-ids" } : {}), + ...(preferValues.length > 0 ? { Prefer: preferValues.join(", ") } : {}), ...(options?.headers || {}), }; @@ -271,13 +303,14 @@ export class FMServerConnection implements ExecutionContext { } } - database( + database( name: string, config?: { useEntityIds?: boolean; + includeSpecialColumns?: IncludeSpecialColumns; }, - ): Database { - return new Database(name, this, config); + ): Database { + return new Database(name, this, config); } /** @@ -287,7 +320,7 @@ export class FMServerConnection implements ExecutionContext { async listDatabaseNames(): Promise { const result = await this._makeRequest<{ value?: Array<{ name: string }>; - }>("/"); + }>("/$metadata", { headers: { Accept: "application/json" } }); if (result.error) { throw result.error; } diff --git a/packages/fmodata/src/client/insert-builder.ts b/packages/fmodata/src/client/insert-builder.ts index 01b74113..0294c9ec 100644 --- a/packages/fmodata/src/client/insert-builder.ts +++ b/packages/fmodata/src/client/insert-builder.ts @@ -52,6 +52,7 @@ export class InsertBuilder< private returnPreference: ReturnPreference; private databaseUseEntityIds: boolean; + private databaseIncludeSpecialColumns: boolean; constructor(config: { occurrence?: Occ; @@ -60,6 +61,7 @@ export class InsertBuilder< data: Partial>>; returnPreference?: ReturnPreference; databaseUseEntityIds?: boolean; + databaseIncludeSpecialColumns?: boolean; }) { this.table = config.occurrence; this.databaseName = config.databaseName; @@ -68,6 +70,8 @@ export class InsertBuilder< this.returnPreference = (config.returnPreference || "representation") as ReturnPreference; this.databaseUseEntityIds = config.databaseUseEntityIds ?? false; + this.databaseIncludeSpecialColumns = + config.databaseIncludeSpecialColumns ?? false; } /** diff --git a/packages/fmodata/src/client/query/query-builder.ts b/packages/fmodata/src/client/query/query-builder.ts index 90b12d61..f6989ac2 100644 --- a/packages/fmodata/src/client/query/query-builder.ts +++ b/packages/fmodata/src/client/query/query-builder.ts @@ -6,15 +6,13 @@ import type { Result, ExecuteOptions, ConditionallyWithODataAnnotations, - ExtractSchemaFromOccurrence, + ConditionallyWithSpecialColumns, + NormalizeIncludeSpecialColumns, ExecuteMethodOptions, } from "../../types"; import { RecordCountMismatchError } from "../../errors"; import { type FFetchOptions } from "@fetchkit/ffetch"; -import { - transformFieldNamesArray, - transformOrderByField, -} from "../../transform"; +import { transformOrderByField } from "../../transform"; import { safeJsonParse } from "../sanitize-json"; import { parseErrorResponse } from "../error-parser"; import { isColumn, type Column } from "../../orm/column"; @@ -28,7 +26,6 @@ import { type InferSchemaOutputFromFMTable, type ValidExpandTarget, type ExtractTableName, - type ValidateNoContainerFields, getTableName, } from "../../orm/table"; import { @@ -37,14 +34,17 @@ import { type ExpandedRelations, resolveTableId, mergeExecuteOptions, - formatSelectFields, processQueryResponse, processSelectWithRenames, buildSelectExpandQueryString, createODataRequest, } from "../builders/index"; import { QueryUrlBuilder, type NavigationConfig } from "./url-builder"; -import type { TypeSafeOrderBy, QueryReturnType } from "./types"; +import type { + TypeSafeOrderBy, + QueryReturnType, + SystemColumnsOption, +} from "./types"; import { createLogger, InternalLogger } from "../../logger"; // Re-export QueryReturnType for backward compatibility @@ -70,6 +70,8 @@ export class QueryBuilder< SingleMode extends "exact" | "maybe" | false = false, IsCount extends boolean = false, Expands extends ExpandedRelations = {}, + DatabaseIncludeSpecialColumns extends boolean = false, + SystemCols extends SystemColumnsOption | undefined = undefined, > implements ExecutableBuilder< QueryReturnType< @@ -77,7 +79,8 @@ export class QueryBuilder< Selected, SingleMode, IsCount, - Expands + Expands, + SystemCols > > { @@ -92,10 +95,13 @@ export class QueryBuilder< private context: ExecutionContext; private navigation?: NavigationConfig; private databaseUseEntityIds: boolean; + private databaseIncludeSpecialColumns: boolean; private expandBuilder: ExpandBuilder; private urlBuilder: QueryUrlBuilder; // Mapping from field names to output keys (for renamed fields in select) private fieldMapping?: Record; + // System columns requested via select() second argument + private systemColumns?: SystemColumnsOption; private logger: InternalLogger; constructor(config: { @@ -103,12 +109,15 @@ export class QueryBuilder< databaseName: string; context: ExecutionContext; databaseUseEntityIds?: boolean; + databaseIncludeSpecialColumns?: boolean; }) { this.occurrence = config.occurrence; this.databaseName = config.databaseName; this.context = config.context; this.logger = config.context?._getLogger?.() ?? createLogger(); this.databaseUseEntityIds = config.databaseUseEntityIds ?? false; + this.databaseIncludeSpecialColumns = + config.databaseIncludeSpecialColumns ?? false; this.expandBuilder = new ExpandBuilder( this.databaseUseEntityIds, this.logger, @@ -121,12 +130,21 @@ export class QueryBuilder< } /** - * Helper to merge database-level useEntityIds with per-request options + * Helper to merge database-level useEntityIds and includeSpecialColumns with per-request options */ private mergeExecuteOptions( options?: RequestInit & FFetchOptions & ExecuteOptions, - ): RequestInit & FFetchOptions & { useEntityIds?: boolean } { - return mergeExecuteOptions(options, this.databaseUseEntityIds); + ): RequestInit & + FFetchOptions & { + useEntityIds?: boolean; + includeSpecialColumns?: boolean; + } { + const merged = mergeExecuteOptions(options, this.databaseUseEntityIds); + return { + ...merged, + includeSpecialColumns: + options?.includeSpecialColumns ?? this.databaseIncludeSpecialColumns, + }; } /** @@ -159,24 +177,37 @@ export class QueryBuilder< | Record>> = Selected, NewSingle extends "exact" | "maybe" | false = SingleMode, NewCount extends boolean = IsCount, + NewSystemCols extends SystemColumnsOption | undefined = SystemCols, >(changes: { selectedFields?: NewSelected; singleMode?: NewSingle; isCountMode?: NewCount; queryOptions?: Partial>>; fieldMapping?: Record; - }): QueryBuilder { + systemColumns?: NewSystemCols; + }): QueryBuilder< + Occ, + NewSelected, + NewSingle, + NewCount, + Expands, + DatabaseIncludeSpecialColumns, + NewSystemCols + > { const newBuilder = new QueryBuilder< Occ, NewSelected, NewSingle, NewCount, - Expands + Expands, + DatabaseIncludeSpecialColumns, + NewSystemCols >({ occurrence: this.occurrence, databaseName: this.databaseName, context: this.context, databaseUseEntityIds: this.databaseUseEntityIds, + databaseIncludeSpecialColumns: this.databaseIncludeSpecialColumns, }); newBuilder.queryOptions = { ...this.queryOptions, @@ -186,6 +217,10 @@ export class QueryBuilder< newBuilder.singleMode = (changes.singleMode ?? this.singleMode) as any; newBuilder.isCountMode = (changes.isCountMode ?? this.isCountMode) as any; newBuilder.fieldMapping = changes.fieldMapping ?? this.fieldMapping; + newBuilder.systemColumns = + changes.systemColumns !== undefined + ? changes.systemColumns + : this.systemColumns; // Copy navigation metadata newBuilder.navigation = this.navigation; newBuilder.urlBuilder = new QueryUrlBuilder( @@ -207,7 +242,15 @@ export class QueryBuilder< * userEmail: users.email // renamed! * }) * + * @example + * // Include system columns (ROWID, ROWMODID) when using select() + * db.from(users).list().select( + * { name: users.name }, + * { ROWID: true, ROWMODID: true } + * ) + * * @param fields - Object mapping output keys to column references (container fields excluded) + * @param systemColumns - Optional object to request system columns (ROWID, ROWMODID) * @returns QueryBuilder with updated selected fields */ select< @@ -215,7 +258,19 @@ export class QueryBuilder< string, Column, false> >, - >(fields: TSelect): QueryBuilder { + TSystemCols extends SystemColumnsOption = {}, + >( + fields: TSelect, + systemColumns?: TSystemCols, + ): QueryBuilder< + Occ, + TSelect, + SingleMode, + IsCount, + Expands, + DatabaseIncludeSpecialColumns, + TSystemCols + > { const tableName = getTableName(this.occurrence); const { selectedFields, fieldMapping } = processSelectWithRenames( fields, @@ -223,13 +278,23 @@ export class QueryBuilder< this.logger, ); + // Add system columns to selectedFields if requested + const finalSelectedFields = [...selectedFields]; + if (systemColumns?.ROWID) { + finalSelectedFields.push("ROWID"); + } + if (systemColumns?.ROWMODID) { + finalSelectedFields.push("ROWMODID"); + } + return this.cloneWithChanges({ selectedFields: fields as any, queryOptions: { - select: selectedFields, + select: finalSelectedFields, }, fieldMapping: Object.keys(fieldMapping).length > 0 ? fieldMapping : undefined, + systemColumns: systemColumns as any, }); } @@ -245,7 +310,15 @@ export class QueryBuilder< */ where( expression: FilterExpression | string, - ): QueryBuilder { + ): QueryBuilder< + Occ, + Selected, + SingleMode, + IsCount, + Expands, + DatabaseIncludeSpecialColumns, + SystemCols + > { // Handle raw string filters (escape hatch) if (typeof expression === "string") { this.queryOptions.filter = expression; @@ -295,7 +368,15 @@ export class QueryBuilder< | OrderByExpression> >, ] - ): QueryBuilder { + ): QueryBuilder< + Occ, + Selected, + SingleMode, + IsCount, + Expands, + DatabaseIncludeSpecialColumns, + SystemCols + > { const tableName = getTableName(this.occurrence); // Handle variadic arguments (multiple fields) @@ -440,14 +521,30 @@ export class QueryBuilder< top( count: number, - ): QueryBuilder { + ): QueryBuilder< + Occ, + Selected, + SingleMode, + IsCount, + Expands, + DatabaseIncludeSpecialColumns, + SystemCols + > { this.queryOptions.top = count; return this; } skip( count: number, - ): QueryBuilder { + ): QueryBuilder< + Occ, + Selected, + SingleMode, + IsCount, + Expands, + DatabaseIncludeSpecialColumns, + SystemCols + > { this.queryOptions.skip = count; return this; } @@ -483,7 +580,9 @@ export class QueryBuilder< selected: TSelected; nested: TNestedExpands; }; - } + }, + DatabaseIncludeSpecialColumns, + SystemCols > { // Use ExpandBuilder.processExpand to handle the expand logic type TargetBuilder = QueryBuilder< @@ -491,7 +590,8 @@ export class QueryBuilder< keyof InferSchemaOutputFromFMTable, false, false, - {} + {}, + DatabaseIncludeSpecialColumns >; const expandConfig = this.expandBuilder.processExpand< TargetTable, @@ -501,11 +601,20 @@ export class QueryBuilder< this.occurrence, callback as ((builder: TargetBuilder) => TargetBuilder) | undefined, () => - new QueryBuilder({ + new QueryBuilder< + TargetTable, + any, + any, + any, + any, + DatabaseIncludeSpecialColumns, + undefined + >({ occurrence: targetTable, databaseName: this.databaseName, context: this.context, databaseUseEntityIds: this.databaseUseEntityIds, + databaseIncludeSpecialColumns: this.databaseIncludeSpecialColumns, }), ); @@ -513,15 +622,39 @@ export class QueryBuilder< return this as any; } - single(): QueryBuilder { + single(): QueryBuilder< + Occ, + Selected, + "exact", + IsCount, + Expands, + DatabaseIncludeSpecialColumns, + SystemCols + > { return this.cloneWithChanges({ singleMode: "exact" as const }); } - maybeSingle(): QueryBuilder { + maybeSingle(): QueryBuilder< + Occ, + Selected, + "maybe", + IsCount, + Expands, + DatabaseIncludeSpecialColumns, + SystemCols + > { return this.cloneWithChanges({ singleMode: "maybe" as const }); } - count(): QueryBuilder { + count(): QueryBuilder< + Occ, + Selected, + SingleMode, + true, + Expands, + DatabaseIncludeSpecialColumns, + SystemCols + > { return this.cloneWithChanges({ isCountMode: true as const, queryOptions: { count: true }, @@ -531,7 +664,7 @@ export class QueryBuilder< /** * Builds the OData query string from current query options and expand configs. */ - private buildQueryString(): string { + private buildQueryString(includeSpecialColumns?: boolean): string { // Build query without expand and select (we'll add them manually if using entity IDs) const queryOptionsWithoutExpandAndSelect = { ...this.queryOptions }; const originalSelect = queryOptionsWithoutExpandAndSelect.select; @@ -547,12 +680,17 @@ export class QueryBuilder< : [String(originalSelect)] : undefined; + // Use merged includeSpecialColumns if provided, otherwise use database-level default + const finalIncludeSpecialColumns = + includeSpecialColumns ?? this.databaseIncludeSpecialColumns; + const selectExpandString = buildSelectExpandQueryString({ selectedFields: selectArray, expandConfigs: this.expandConfigs, table: this.occurrence, useEntityIds: this.databaseUseEntityIds, logger: this.logger, + includeSpecialColumns: finalIncludeSpecialColumns, }); // Append select/expand to existing query string @@ -573,19 +711,35 @@ export class QueryBuilder< ): Promise< Result< ConditionallyWithODataAnnotations< - QueryReturnType< - InferSchemaOutputFromFMTable, - Selected, - SingleMode, - IsCount, - Expands + ConditionallyWithSpecialColumns< + QueryReturnType< + InferSchemaOutputFromFMTable, + Selected, + SingleMode, + IsCount, + Expands, + SystemCols + >, + // Use the merged value: if explicitly provided in options, use that; otherwise use database default + NormalizeIncludeSpecialColumns< + EO["includeSpecialColumns"], + DatabaseIncludeSpecialColumns + >, + // Check if select was applied: if Selected is Record (object select) or a subset of keys, select was applied + Selected extends Record> + ? true + : Selected extends keyof InferSchemaOutputFromFMTable + ? false + : true >, EO["includeODataAnnotations"] extends true ? true : false > > > { const mergedOptions = this.mergeExecuteOptions(options); - const queryString = this.buildQueryString(); + const queryString = this.buildQueryString( + mergedOptions.includeSpecialColumns, + ); // Handle $count endpoint if (this.isCountMode) { @@ -618,6 +772,9 @@ export class QueryBuilder< return { data: undefined, error: result.error }; } + // Check if select was applied (runtime check) + const hasSelect = this.queryOptions.select !== undefined; + return processQueryResponse(result.data, { occurrence: this.occurrence, singleMode: this.singleMode, @@ -625,6 +782,7 @@ export class QueryBuilder< expandConfigs: this.expandConfigs, skipValidation: options?.skipValidation, useEntityIds: mergedOptions.useEntityIds, + includeSpecialColumns: mergedOptions.includeSpecialColumns, fieldMapping: this.fieldMapping, logger: this.logger, }); @@ -667,7 +825,8 @@ export class QueryBuilder< Selected, SingleMode, IsCount, - Expands + Expands, + SystemCols > > > { @@ -728,6 +887,9 @@ export class QueryBuilder< } const mergedOptions = this.mergeExecuteOptions(options); + // Check if select was applied (runtime check) + const hasSelect = this.queryOptions.select !== undefined; + return processQueryResponse(rawData, { occurrence: this.occurrence, singleMode: this.singleMode, @@ -735,6 +897,7 @@ export class QueryBuilder< expandConfigs: this.expandConfigs, skipValidation: options?.skipValidation, useEntityIds: mergedOptions.useEntityIds, + includeSpecialColumns: mergedOptions.includeSpecialColumns, fieldMapping: this.fieldMapping, logger: this.logger, }); diff --git a/packages/fmodata/src/client/query/response-processor.ts b/packages/fmodata/src/client/query/response-processor.ts index c3140601..1ccf3f00 100644 --- a/packages/fmodata/src/client/query/response-processor.ts +++ b/packages/fmodata/src/client/query/response-processor.ts @@ -7,7 +7,7 @@ import { transformResponseFields } from "../../transform"; import { validateListResponse, validateSingleResponse } from "../../validation"; import type { ExpandValidationConfig } from "../../validation"; import type { ExpandConfig } from "./expand-builder"; -import { FMTable as FMTableClass } from "../../orm/table"; +import { FMTable as FMTableClass, getTableSchema } from "../../orm/table"; import { InternalLogger } from "../../logger"; /** @@ -20,6 +20,7 @@ export interface ProcessQueryResponseConfig { expandConfigs: ExpandConfig[]; skipValidation?: boolean; useEntityIds?: boolean; + includeSpecialColumns?: boolean; // Mapping from field names to output keys (for renamed fields in select) fieldMapping?: Record; logger: InternalLogger; @@ -37,20 +38,12 @@ function buildExpandValidationConfigs( const targetTable = config.targetTable; // Extract schema from target table/occurrence - let targetSchema: Record | undefined; - if (targetTable) { - const tableSchema = (targetTable as any)[FMTableClass.Symbol.Schema]; - if (tableSchema) { - const zodSchema = tableSchema["~standard"]?.schema; - if ( - zodSchema && - typeof zodSchema === "object" && - "shape" in zodSchema - ) { - targetSchema = zodSchema.shape as Record; - } - } - } + // Schema is stored directly as Partial> + const targetSchema = targetTable + ? (getTableSchema(targetTable) as + | Record + | undefined) + : undefined; // Extract selected fields from options const selectedFields = config.options?.select @@ -193,16 +186,8 @@ export async function processQueryResponse( // Validation path // Get schema from occurrence if available - let schema: Record | undefined; - if (occurrence) { - const tableSchema = (occurrence as any)[FMTableClass.Symbol.Schema]; - if (tableSchema) { - const zodSchema = tableSchema["~standard"]?.schema; - if (zodSchema && typeof zodSchema === "object" && "shape" in zodSchema) { - schema = zodSchema.shape as Record; - } - } - } + // Schema is stored directly as Partial> + const schema = occurrence ? getTableSchema(occurrence) : undefined; const selectedFields = config.queryOptions.select ? ((Array.isArray(config.queryOptions.select) @@ -214,6 +199,12 @@ export async function processQueryResponse( ); // Validate with original field names + // Special columns are excluded when using single() method (per OData spec behavior) + // Note: While FileMaker may return special columns in single mode if requested via header, + // we exclude them here to maintain OData spec compliance. The types will also not include + // special columns for single mode to match this runtime behavior. + const shouldIncludeSpecialColumns = + singleMode === false ? (config.includeSpecialColumns ?? false) : false; const validationResult = singleMode !== false ? await validateSingleResponse( @@ -222,12 +213,14 @@ export async function processQueryResponse( selectedFields as string[] | undefined, expandValidationConfigs, singleMode, + shouldIncludeSpecialColumns, ) : await validateListResponse( data, schema, selectedFields as string[] | undefined, expandValidationConfigs, + shouldIncludeSpecialColumns, ); if (!validationResult.valid) { diff --git a/packages/fmodata/src/client/query/types.ts b/packages/fmodata/src/client/query/types.ts index a3b81441..9aae8637 100644 --- a/packages/fmodata/src/client/query/types.ts +++ b/packages/fmodata/src/client/query/types.ts @@ -70,30 +70,59 @@ export type ResolveExpandedRelations = { [K in keyof Exps]: ResolveExpandType[]; }; +/** + * System columns option for select() method. + * Allows explicitly requesting ROWID and/or ROWMODID when using select(). + */ +export type SystemColumnsOption = { + ROWID?: boolean; + ROWMODID?: boolean; +}; + +/** + * Extract system columns type from SystemColumnsOption. + * Returns an object type with ROWID and/or ROWMODID properties when set to true. + */ +export type SystemColumnsFromOption< + T extends SystemColumnsOption | undefined, +> = (T extends { ROWID: true } ? { ROWID: number } : {}) & + (T extends { ROWMODID: true } ? { ROWMODID: number } : {}); + export type QueryReturnType< T extends Record, Selected extends keyof T | Record>, SingleMode extends "exact" | "maybe" | false, IsCount extends boolean, Expands extends ExpandedRelations, + SystemCols extends SystemColumnsOption | undefined = undefined, > = IsCount extends true ? number : // Use tuple wrapping [Selected] extends [...] to prevent distribution over unions [Selected] extends [Record>] ? SingleMode extends "exact" - ? MapSelectToReturnType & ResolveExpandedRelations + ? MapSelectToReturnType & + ResolveExpandedRelations & + SystemColumnsFromOption : SingleMode extends "maybe" ? | (MapSelectToReturnType & - ResolveExpandedRelations) + ResolveExpandedRelations & + SystemColumnsFromOption) | null : (MapSelectToReturnType & - ResolveExpandedRelations)[] + ResolveExpandedRelations & + SystemColumnsFromOption)[] : // Use tuple wrapping to prevent distribution over union of keys [Selected] extends [keyof T] ? SingleMode extends "exact" - ? Pick & ResolveExpandedRelations + ? Pick & + ResolveExpandedRelations & + SystemColumnsFromOption : SingleMode extends "maybe" - ? (Pick & ResolveExpandedRelations) | null - : (Pick & ResolveExpandedRelations)[] + ? (Pick & + ResolveExpandedRelations & + SystemColumnsFromOption) | null + : (Pick & + ResolveExpandedRelations & + SystemColumnsFromOption)[] : never; diff --git a/packages/fmodata/src/client/record-builder.ts b/packages/fmodata/src/client/record-builder.ts index 48f66b8f..484c49c7 100644 --- a/packages/fmodata/src/client/record-builder.ts +++ b/packages/fmodata/src/client/record-builder.ts @@ -5,6 +5,8 @@ import type { ODataFieldResponse, ExecuteOptions, ConditionallyWithODataAnnotations, + ConditionallyWithSpecialColumns, + NormalizeIncludeSpecialColumns, ExecuteMethodOptions, } from "../types"; import type { @@ -35,6 +37,8 @@ import { import { type ResolveExpandedRelations, type ResolveExpandType, + type SystemColumnsOption, + type SystemColumnsFromOption, } from "./query/types"; import { createLogger, InternalLogger, Logger } from "../logger"; @@ -64,6 +68,7 @@ export type RecordReturnType< | keyof Schema | Record>>>, Expands extends ExpandedRelations, + SystemCols extends SystemColumnsOption | undefined = undefined, > = IsSingleField extends true ? FieldColumn extends Column ? TOutput @@ -71,10 +76,13 @@ export type RecordReturnType< : // Use tuple wrapping [Selected] extends [...] to prevent distribution over unions [Selected] extends [Record>] ? MapSelectToReturnType & - ResolveExpandedRelations + ResolveExpandedRelations & + SystemColumnsFromOption : // Use tuple wrapping to prevent distribution over union of keys [Selected] extends [keyof Schema] - ? Pick & ResolveExpandedRelations + ? Pick & + ResolveExpandedRelations & + SystemColumnsFromOption : never; export class RecordBuilder< @@ -88,6 +96,8 @@ export class RecordBuilder< Column>> > = keyof InferSchemaOutputFromFMTable>, Expands extends ExpandedRelations = {}, + DatabaseIncludeSpecialColumns extends boolean = false, + SystemCols extends SystemColumnsOption | undefined = undefined, > implements ExecutableBuilder< RecordReturnType< @@ -95,7 +105,8 @@ export class RecordBuilder< IsSingleField, FieldColumn, Selected, - Expands + Expands, + SystemCols > > { @@ -111,12 +122,15 @@ export class RecordBuilder< private navigateSourceTableName?: string; private databaseUseEntityIds: boolean; + private databaseIncludeSpecialColumns: boolean; // Properties for select/expand support private selectedFields?: string[]; private expandConfigs: ExpandConfig[] = []; // Mapping from field names to output keys (for renamed fields in select) private fieldMapping?: Record; + // System columns requested via select() second argument + private systemColumns?: SystemColumnsOption; private logger: InternalLogger; @@ -126,22 +140,34 @@ export class RecordBuilder< context: ExecutionContext; recordId: string | number; databaseUseEntityIds?: boolean; + databaseIncludeSpecialColumns?: boolean; }) { this.table = config.occurrence; this.databaseName = config.databaseName; this.context = config.context; this.recordId = config.recordId; this.databaseUseEntityIds = config.databaseUseEntityIds ?? false; + this.databaseIncludeSpecialColumns = + config.databaseIncludeSpecialColumns ?? false; this.logger = config.context?._getLogger?.() ?? createLogger(); } /** - * Helper to merge database-level useEntityIds with per-request options + * Helper to merge database-level useEntityIds and includeSpecialColumns with per-request options */ private mergeExecuteOptions( options?: RequestInit & FFetchOptions & ExecuteOptions, - ): RequestInit & FFetchOptions & { useEntityIds?: boolean } { - return mergeExecuteOptions(options, this.databaseUseEntityIds); + ): RequestInit & + FFetchOptions & { + useEntityIds?: boolean; + includeSpecialColumns?: boolean; + } { + const merged = mergeExecuteOptions(options, this.databaseUseEntityIds); + return { + ...merged, + includeSpecialColumns: + options?.includeSpecialColumns ?? this.databaseIncludeSpecialColumns, + }; } /** @@ -171,25 +197,42 @@ export class RecordBuilder< string, Column>> > = Selected, + NewSystemCols extends SystemColumnsOption | undefined = SystemCols, >(changes: { selectedFields?: string[]; fieldMapping?: Record; - }): RecordBuilder { + systemColumns?: NewSystemCols; + }): RecordBuilder< + Occ, + false, + FieldColumn, + NewSelected, + Expands, + DatabaseIncludeSpecialColumns, + NewSystemCols + > { const newBuilder = new RecordBuilder< Occ, false, FieldColumn, NewSelected, - Expands + Expands, + DatabaseIncludeSpecialColumns, + NewSystemCols >({ occurrence: this.table, databaseName: this.databaseName, context: this.context, recordId: this.recordId, databaseUseEntityIds: this.databaseUseEntityIds, + databaseIncludeSpecialColumns: this.databaseIncludeSpecialColumns, }); newBuilder.selectedFields = changes.selectedFields ?? this.selectedFields; newBuilder.fieldMapping = changes.fieldMapping ?? this.fieldMapping; + newBuilder.systemColumns = + changes.systemColumns !== undefined + ? changes.systemColumns + : this.systemColumns; newBuilder.expandConfigs = [...this.expandConfigs]; // Preserve navigation context newBuilder.isNavigateFromEntitySet = this.isNavigateFromEntitySet; @@ -208,7 +251,8 @@ export class RecordBuilder< true, TColumn, keyof InferSchemaOutputFromFMTable>, - {} + {}, + DatabaseIncludeSpecialColumns > { // Runtime validation: ensure column is from the correct table const tableName = getTableName(this.table); @@ -223,13 +267,15 @@ export class RecordBuilder< true, TColumn, keyof InferSchemaOutputFromFMTable>, - {} + {}, + DatabaseIncludeSpecialColumns >({ occurrence: this.table, databaseName: this.databaseName, context: this.context, recordId: this.recordId, databaseUseEntityIds: this.databaseUseEntityIds, + databaseIncludeSpecialColumns: this.databaseIncludeSpecialColumns, }); newBuilder.operation = "getSingleField"; newBuilder.operationColumn = column; @@ -254,7 +300,15 @@ export class RecordBuilder< * userEmail: contacts.email // renamed! * }) * + * @example + * // Include system columns (ROWID, ROWMODID) when using select() + * db.from(contacts).get("uuid").select( + * { name: contacts.name }, + * { ROWID: true, ROWMODID: true } + * ) + * * @param fields - Object mapping output keys to column references (container fields excluded) + * @param systemColumns - Optional object to request system columns (ROWID, ROWMODID) * @returns RecordBuilder with updated selected fields */ select< @@ -262,7 +316,19 @@ export class RecordBuilder< string, Column, false> >, - >(fields: TSelect): RecordBuilder { + TSystemCols extends SystemColumnsOption = {}, + >( + fields: TSelect, + systemColumns?: TSystemCols, + ): RecordBuilder< + Occ, + false, + FieldColumn, + TSelect, + Expands, + DatabaseIncludeSpecialColumns, + TSystemCols + > { const tableName = getTableName(this.table); const { selectedFields, fieldMapping } = processSelectWithRenames( fields, @@ -270,10 +336,20 @@ export class RecordBuilder< this.logger, ); + // Add system columns to selectedFields if requested + const finalSelectedFields = [...selectedFields]; + if (systemColumns?.ROWID) { + finalSelectedFields.push("ROWID"); + } + if (systemColumns?.ROWMODID) { + finalSelectedFields.push("ROWMODID"); + } + return this.cloneWithChanges({ - selectedFields, + selectedFields: finalSelectedFields, fieldMapping: Object.keys(fieldMapping).length > 0 ? fieldMapping : undefined, + systemColumns: systemColumns as any, }) as any; } @@ -323,7 +399,9 @@ export class RecordBuilder< selected: TSelected; nested: TNestedExpands; }; - } + }, + DatabaseIncludeSpecialColumns, + SystemCols > { // Create new builder with updated types const newBuilder = new RecordBuilder< @@ -331,18 +409,21 @@ export class RecordBuilder< false, FieldColumn, Selected, - any + any, + DatabaseIncludeSpecialColumns >({ occurrence: this.table, databaseName: this.databaseName, context: this.context, recordId: this.recordId, databaseUseEntityIds: this.databaseUseEntityIds, + databaseIncludeSpecialColumns: this.databaseIncludeSpecialColumns, }); // Copy existing state newBuilder.selectedFields = this.selectedFields; newBuilder.fieldMapping = this.fieldMapping; + newBuilder.systemColumns = this.systemColumns; newBuilder.expandConfigs = [...this.expandConfigs]; newBuilder.isNavigateFromEntitySet = this.isNavigateFromEntitySet; newBuilder.navigateRelation = this.navigateRelation; @@ -369,11 +450,20 @@ export class RecordBuilder< this.table ?? undefined, callback as ((builder: TargetBuilder) => TargetBuilder) | undefined, () => - new QueryBuilder({ + new QueryBuilder< + TargetTable, + any, + any, + any, + any, + DatabaseIncludeSpecialColumns, + undefined + >({ occurrence: targetTable, databaseName: this.databaseName, context: this.context, databaseUseEntityIds: this.databaseUseEntityIds, + databaseIncludeSpecialColumns: this.databaseIncludeSpecialColumns, }), ); @@ -387,7 +477,10 @@ export class RecordBuilder< TargetTable, keyof InferSchemaOutputFromFMTable, false, - false + false, + {}, + DatabaseIncludeSpecialColumns, + undefined > { // Extract name and validate const relationName = getTableName(targetTable); @@ -403,11 +496,20 @@ export class RecordBuilder< } // Create QueryBuilder with target table - const builder = new QueryBuilder({ + const builder = new QueryBuilder< + TargetTable, + any, + any, + any, + any, + DatabaseIncludeSpecialColumns, + undefined + >({ occurrence: targetTable, databaseName: this.databaseName, context: this.context, databaseUseEntityIds: this.databaseUseEntityIds, + databaseIncludeSpecialColumns: this.databaseIncludeSpecialColumns, }); // Store the navigation info - we'll use it in execute @@ -452,13 +554,18 @@ export class RecordBuilder< /** * Builds the complete query string including $select and $expand parameters. */ - private buildQueryString(): string { + private buildQueryString(includeSpecialColumns?: boolean): string { + // Use merged includeSpecialColumns if provided, otherwise use database-level default + const finalIncludeSpecialColumns = + includeSpecialColumns ?? this.databaseIncludeSpecialColumns; + return buildSelectExpandQueryString({ selectedFields: this.selectedFields, expandConfigs: this.expandConfigs, table: this.table, useEntityIds: this.databaseUseEntityIds, logger: this.logger, + includeSpecialColumns: finalIncludeSpecialColumns, }); } @@ -467,12 +574,30 @@ export class RecordBuilder< ): Promise< Result< ConditionallyWithODataAnnotations< - RecordReturnType< - InferSchemaOutputFromFMTable>, - IsSingleField, - FieldColumn, - Selected, - Expands + ConditionallyWithSpecialColumns< + RecordReturnType< + InferSchemaOutputFromFMTable>, + IsSingleField, + FieldColumn, + Selected, + Expands, + SystemCols + >, + // Use the merged value: if explicitly provided in options, use that; otherwise use database default + NormalizeIncludeSpecialColumns< + EO["includeSpecialColumns"], + DatabaseIncludeSpecialColumns + >, + // Check if select was applied: if Selected is Record (object select) or a subset of keys, select was applied + IsSingleField extends true + ? false // Single field operations don't include special columns + : Selected extends Record> + ? true + : Selected extends keyof InferSchemaOutputFromFMTable< + NonNullable + > + ? false + : true >, EO["includeODataAnnotations"] extends true ? true : false > @@ -496,15 +621,17 @@ export class RecordBuilder< url = `/${this.databaseName}/${tableId}('${this.recordId}')`; } + const mergedOptions = this.mergeExecuteOptions(options); + if (this.operation === "getSingleField" && this.operationParam) { url += `/${this.operationParam}`; } else { // Add query string for select/expand (only when not getting a single field) - const queryString = this.buildQueryString(); + const queryString = this.buildQueryString( + mergedOptions.includeSpecialColumns, + ); url += queryString; } - - const mergedOptions = this.mergeExecuteOptions(options); const result = await this.context._makeRequest(url, mergedOptions); if (result.error) { @@ -538,6 +665,7 @@ export class RecordBuilder< expandValidationConfigs, skipValidation: options?.skipValidation, useEntityIds: mergedOptions.useEntityIds, + includeSpecialColumns: mergedOptions.includeSpecialColumns, fieldMapping: this.fieldMapping, }); } @@ -626,7 +754,8 @@ export class RecordBuilder< IsSingleField, FieldColumn, Selected, - Expands + Expands, + SystemCols > > > { @@ -652,10 +781,7 @@ export class RecordBuilder< } // Use shared response processor - const mergedOptions = mergeExecuteOptions( - options, - this.databaseUseEntityIds, - ); + const mergedOptions = this.mergeExecuteOptions(options); const expandBuilder = new ExpandBuilder( mergedOptions.useEntityIds ?? false, this.logger, @@ -672,6 +798,7 @@ export class RecordBuilder< expandValidationConfigs, skipValidation: options?.skipValidation, useEntityIds: mergedOptions.useEntityIds, + includeSpecialColumns: mergedOptions.includeSpecialColumns, fieldMapping: this.fieldMapping, }); } diff --git a/packages/fmodata/src/client/update-builder.ts b/packages/fmodata/src/client/update-builder.ts index a2b2292b..adb540ac 100644 --- a/packages/fmodata/src/client/update-builder.ts +++ b/packages/fmodata/src/client/update-builder.ts @@ -2,7 +2,6 @@ import type { ExecutionContext, ExecutableBuilder, Result, - WithSystemFields, ExecuteOptions, ExecuteMethodOptions, } from "../types"; @@ -35,6 +34,7 @@ export class UpdateBuilder< private returnPreference: ReturnPreference; private databaseUseEntityIds: boolean; + private databaseIncludeSpecialColumns: boolean; constructor(config: { occurrence: Occ; @@ -43,6 +43,7 @@ export class UpdateBuilder< data: Partial>; returnPreference: ReturnPreference; databaseUseEntityIds?: boolean; + databaseIncludeSpecialColumns?: boolean; }) { this.table = config.occurrence; this.databaseName = config.databaseName; @@ -50,6 +51,8 @@ export class UpdateBuilder< this.data = config.data; this.returnPreference = config.returnPreference; this.databaseUseEntityIds = config.databaseUseEntityIds ?? false; + this.databaseIncludeSpecialColumns = + config.databaseIncludeSpecialColumns ?? false; } /** diff --git a/packages/fmodata/src/client/webhook-builder.ts b/packages/fmodata/src/client/webhook-builder.ts new file mode 100644 index 00000000..52d1cac8 --- /dev/null +++ b/packages/fmodata/src/client/webhook-builder.ts @@ -0,0 +1,285 @@ +import { FMTable, getTableName } from "../orm"; +import type { ExecutionContext, ExecuteMethodOptions } from "../types"; +import type { FFetchOptions } from "@fetchkit/ffetch"; +import { FilterExpression } from "../orm/operators"; +import { isColumn, type Column } from "../orm/column"; +import { formatSelectFields } from "./builders/select-utils"; + +export type Webhook = { + webhook: string; + headers?: Record; + tableName: TableName; + notifySchemaChanges?: boolean; + select?: string | Column[]; + filter?: string | FilterExpression; +}; + +/** + * Webhook information returned by the API + */ +export type WebhookInfo = { + webHookID: number; + tableName: string; + url: string; + headers?: Record; + notifySchemaChanges: boolean; + select: string; + filter: string; + pendingOperations: unknown[]; +}; + +/** + * Response from listing all webhooks + */ +export type WebhookListResponse = { + Status: string; + WebHook: WebhookInfo[]; +}; + +/** + * Response from adding a webhook + */ +export type WebhookAddResponse = { + webHookResult: { + webHookID: number; + }; +}; + +export class WebhookManager { + constructor( + private readonly databaseName: string, + private readonly context: ExecutionContext, + ) {} + + /** + * Adds a new webhook to the database. + * @param webhook - The webhook configuration object + * @param webhook.webhook - The webhook URL to call + * @param webhook.tableName - The FMTable instance for the table to monitor + * @param webhook.headers - Optional custom headers to include in webhook requests + * @param webhook.notifySchemaChanges - Whether to notify on schema changes + * @param webhook.select - Optional field selection (string or array of Column references) + * @param webhook.filter - Optional filter (string or FilterExpression) + * @returns Promise resolving to the created webhook data with ID + * @example + * ```ts + * const result = await db.webhook.add({ + * webhook: "https://example.com/webhook", + * tableName: contactsTable, + * headers: { "X-Custom-Header": "value" }, + * }); + * // result.webHookResult.webHookID contains the new webhook ID + * ``` + * @example + * ```ts + * // Using filter expressions and column arrays (same DX as query builder) + * const result = await db.webhook.add({ + * webhook: "https://example.com/webhook", + * tableName: contacts, + * filter: eq(contacts.name, "John"), + * select: [contacts.name, contacts.PrimaryKey], + * }); + * ``` + */ + async add( + webhook: Webhook, + options?: ExecuteMethodOptions, + ): Promise { + // Extract the string table name from the FMTable instance + const tableName = getTableName(webhook.tableName); + + // Get useEntityIds setting (check options first, then context, default to false) + const useEntityIds = + options?.useEntityIds ?? this.context._getUseEntityIds?.() ?? false; + + // Transform filter if it's a FilterExpression + let filter: string | undefined; + if (webhook.filter !== undefined) { + if (webhook.filter instanceof FilterExpression) { + filter = webhook.filter.toODataFilter(useEntityIds); + } else { + filter = webhook.filter; + } + } + + // Transform select if it's an array of Columns + let select: string | undefined; + if (webhook.select !== undefined) { + if (Array.isArray(webhook.select)) { + // Extract field identifiers from columns or use strings as-is + const fieldNames = webhook.select.map((item) => { + if (isColumn(item)) { + return item.getFieldIdentifier(useEntityIds); + } + return String(item); + }); + // Use formatSelectFields to properly format the select string + select = formatSelectFields( + fieldNames, + webhook.tableName, + useEntityIds, + ); + } else { + // Already a string, use as-is + select = webhook.select; + } + } + + // Create request body with string table name and transformed filter/select + const requestBody: { + webhook: string; + headers?: Record; + tableName: string; + notifySchemaChanges?: boolean; + select?: string; + filter?: string; + } = { + webhook: webhook.webhook, + tableName, + }; + + if (webhook.headers !== undefined) { + requestBody.headers = webhook.headers; + } + if (webhook.notifySchemaChanges !== undefined) { + requestBody.notifySchemaChanges = webhook.notifySchemaChanges; + } + if (select !== undefined) { + requestBody.select = select; + } + if (filter !== undefined) { + requestBody.filter = filter; + } + + const result = await this.context._makeRequest( + `/${this.databaseName}/Webhook.Add`, + { + method: "POST", + body: JSON.stringify(requestBody), + ...options, + }, + ); + + if (result.error) { + throw result.error; + } + + return result.data; + } + + /** + * Deletes a webhook by ID. + * @param webhookId - The ID of the webhook to delete + * @returns Promise that resolves when the webhook is deleted + * @example + * ```ts + * await db.webhook.remove(1); + * ``` + */ + async remove( + webhookId: number, + options?: ExecuteMethodOptions, + ): Promise { + const result = await this.context._makeRequest( + `/${this.databaseName}/Webhook.Delete(${webhookId})`, + { + method: "POST", + ...options, + }, + ); + + if (result.error) { + throw result.error; + } + } + + /** + * Gets a webhook by ID. + * @param webhookId - The ID of the webhook to retrieve + * @returns Promise resolving to the webhook data + * @example + * ```ts + * const webhook = await db.webhook.get(1); + * // webhook.webHookID, webhook.tableName, webhook.url, etc. + * ``` + */ + async get( + webhookId: number, + options?: ExecuteMethodOptions, + ): Promise { + const result = await this.context._makeRequest( + `/${this.databaseName}/Webhook.Get(${webhookId})`, + options, + ); + + if (result.error) { + throw result.error; + } + + return result.data; + } + + /** + * Lists all webhooks. + * @returns Promise resolving to webhook list response with status and webhooks array + * @example + * ```ts + * const result = await db.webhook.list(); + * // result.Status contains the status + * // result.WebHook contains the array of webhooks + * ``` + */ + async list(options?: ExecuteMethodOptions): Promise { + const result = await this.context._makeRequest( + `/${this.databaseName}/Webhook.GetAll`, + options, + ); + + if (result.error) { + throw result.error; + } + + return result.data; + } + + /** + * Invokes a webhook by ID, optionally for specific row IDs. + * @param webhookId - The ID of the webhook to invoke + * @param options - Optional configuration + * @param options.rowIDs - Array of row IDs to trigger the webhook for + * @returns Promise resolving to the invocation result (type unknown until API behavior is confirmed) + * @example + * ```ts + * // Invoke for all rows + * await db.webhook.invoke(1); + * + * // Invoke for specific rows + * await db.webhook.invoke(1, { rowIDs: [63, 61] }); + * ``` + */ + async invoke( + webhookId: number, + options?: { rowIDs?: number[] }, + executeOptions?: ExecuteMethodOptions, + ): Promise { + const body: { rowIDs?: number[] } = {}; + if (options?.rowIDs !== undefined) { + body.rowIDs = options.rowIDs; + } + + const result = await this.context._makeRequest( + `/${this.databaseName}/Webhook.Invoke(${webhookId})`, + { + method: "POST", + body: Object.keys(body).length > 0 ? JSON.stringify(body) : undefined, + ...executeOptions, + }, + ); + + if (result.error) { + throw result.error; + } + + return result.data; + } +} diff --git a/packages/fmodata/src/index.ts b/packages/fmodata/src/index.ts index 745d4f99..a2fe9340 100644 --- a/packages/fmodata/src/index.ts +++ b/packages/fmodata/src/index.ts @@ -67,6 +67,12 @@ export type { TimestampField, ContainerField, } from "./client/schema-manager"; +export type { + Webhook, + WebhookInfo, + WebhookListResponse, + WebhookAddResponse, +} from "./client/webhook-builder"; // Utility types for type annotations export type { diff --git a/packages/fmodata/src/orm/field-builders.ts b/packages/fmodata/src/orm/field-builders.ts index d7acbaa7..b856d1d5 100644 --- a/packages/fmodata/src/orm/field-builders.ts +++ b/packages/fmodata/src/orm/field-builders.ts @@ -29,6 +29,7 @@ export class FieldBuilder< private _outputValidator?: StandardSchemaV1; private _inputValidator?: StandardSchemaV1; private _fieldType: string; + private _comment?: string; constructor(fieldType: string) { this._fieldType = fieldType; @@ -36,11 +37,17 @@ export class FieldBuilder< /** * Mark this field as the primary key for the table. - * Primary keys are automatically read-only. + * Primary keys are automatically read-only and non-nullable. */ - primaryKey(): FieldBuilder { + primaryKey(): FieldBuilder< + NonNullable, + NonNullable, + NonNullable, + true + > { const builder = this._clone() as any; builder._primaryKey = true; + builder._notNull = true; // Primary keys are automatically non-nullable builder._readOnly = true; // Primary keys are automatically read-only return builder; } @@ -114,6 +121,19 @@ export class FieldBuilder< return builder; } + /** + * Add a comment to this field for metadata purposes. + * This helps future developers understand the purpose of the field. + * + * @example + * textField().comment("Account name of the user who last modified each record") + */ + comment(comment: string): FieldBuilder { + const builder = this._clone(); + builder._comment = comment; + return builder; + } + /** * Get the metadata configuration for this field. * @internal Used by fmTableOccurrence to extract field configuration @@ -127,6 +147,7 @@ export class FieldBuilder< entityId: this._entityId, outputValidator: this._outputValidator, inputValidator: this._inputValidator, + comment: this._comment, }; } @@ -144,6 +165,7 @@ export class FieldBuilder< builder._entityId = this._entityId; builder._outputValidator = this._outputValidator; builder._inputValidator = this._inputValidator; + builder._comment = this._comment; return builder; } } diff --git a/packages/fmodata/src/orm/table.ts b/packages/fmodata/src/orm/table.ts index b61d9d73..3bd2808e 100644 --- a/packages/fmodata/src/orm/table.ts +++ b/packages/fmodata/src/orm/table.ts @@ -2,7 +2,7 @@ import type { StandardSchemaV1 } from "@standard-schema/spec"; import { FieldBuilder, type ContainerDbType } from "./field-builders"; import type { FieldBuilder as FieldBuilderType } from "./field-builders"; import { Column, createColumn } from "./column"; -import { z } from "zod/v4"; +// import { z } from "zod/v4"; /** * Extract the output type from a FieldBuilder. @@ -89,6 +89,7 @@ const FMTableNavigationPaths = Symbol.for("fmodata:FMTableNavigationPaths"); const FMTableDefaultSelect = Symbol.for("fmodata:FMTableDefaultSelect"); const FMTableBaseTableConfig = Symbol.for("fmodata:FMTableBaseTableConfig"); const FMTableUseEntityIds = Symbol.for("fmodata:FMTableUseEntityIds"); +const FMTableComment = Symbol.for("fmodata:FMTableComment"); /** * Base table class with Symbol-based internal properties. @@ -113,6 +114,7 @@ export class FMTable< NavigationPaths: FMTableNavigationPaths, DefaultSelect: FMTableDefaultSelect, BaseTableConfig: FMTableBaseTableConfig, + Comment: FMTableComment, }; /** @internal */ @@ -125,7 +127,10 @@ export class FMTable< [FMTableUseEntityIds]?: boolean; /** @internal */ - [FMTableSchema]: StandardSchemaV1>; + [FMTableComment]?: string; + + /** @internal */ + [FMTableSchema]: Partial>; /** @internal */ [FMTableFields]: TFields; @@ -141,8 +146,8 @@ export class FMTable< /** @internal */ [FMTableBaseTableConfig]: { - schema: Record; - inputSchema?: Record; + schema: Partial>; + inputSchema?: Partial>; idField?: keyof TFields; required: readonly (keyof TFields)[]; readOnly: readonly (keyof TFields)[]; @@ -154,13 +159,14 @@ export class FMTable< name: TName; entityId?: `FMTID:${string}`; useEntityIds?: boolean; - schema: StandardSchemaV1>; + comment?: string; + schema: Partial>; fields: TFields; navigationPaths: TNavigationPaths; defaultSelect: "all" | "schema" | Record>; baseTableConfig: { - schema: Record; - inputSchema?: Record; + schema: Partial>; + inputSchema?: Partial>; idField?: keyof TFields; required: readonly (keyof TFields)[]; readOnly: readonly (keyof TFields)[]; @@ -171,6 +177,7 @@ export class FMTable< this[FMTableName] = config.name; this[FMTableEntityId] = config.entityId; this[FMTableUseEntityIds] = config.useEntityIds; + this[FMTableComment] = config.comment; this[FMTableSchema] = config.schema; this[FMTableFields] = config.fields; this[FMTableNavigationPaths] = config.navigationPaths; @@ -267,6 +274,9 @@ export interface FMTableOccurrenceOptions< /** The entity ID (FMTID) for this table occurrence */ entityId?: `FMTID:${string}`; + /** The comment for this table */ + comment?: string; + /** * Default select behavior: * - "all": Select all fields (including related tables) @@ -358,39 +368,14 @@ export function fmTableOccurrence< } // Build Zod schema from field builders (output/read validators) - const zodSchema: Record = {}; + const outputSchema: Partial> = {}; // Build input schema from field builders (input/write validators) const inputSchema: Record = {}; for (const { fieldName, config } of fieldConfigs) { - // Use outputValidator if provided, otherwise create a basic validator + // Use outputValidator if provided if (config.outputValidator) { - zodSchema[fieldName] = config.outputValidator; - } else { - // Create a default validator based on field type and nullability - let validator: any; - switch (config.fieldType) { - case "text": - case "date": - case "time": - case "timestamp": - case "container": - case "calculated": - validator = z.string(); - break; - case "number": - validator = z.number(); - break; - default: - validator = z.unknown(); - } - - // Add nullability if not marked as notNull - if (!config.notNull) { - validator = validator.nullable(); - } - - zodSchema[fieldName] = validator; + outputSchema[fieldName as keyof TFields] = config.outputValidator; } // Store inputValidator if provided (for write operations) @@ -399,18 +384,13 @@ export function fmTableOccurrence< } } - // Create a schema validator for the entire table - const tableSchema = z.object(zodSchema) as unknown as StandardSchemaV1< - any, - InferSchemaFromFields - >; - // Build BaseTable-compatible config const baseTableConfig = { - schema: zodSchema as Record, - inputSchema: (Object.keys(inputSchema).length > 0 - ? inputSchema - : undefined) as Record | undefined, + schema: outputSchema as Partial>, + inputSchema: + Object.keys(inputSchema).length > 0 + ? (inputSchema as Partial>) + : undefined, idField: idField as keyof TFields | undefined, required: required as readonly (keyof TFields)[], readOnly: readOnly as readonly (keyof TFields)[], @@ -449,7 +429,8 @@ export function fmTableOccurrence< name, entityId: options?.entityId, useEntityIds: options?.useEntityIds, - schema: tableSchema, + comment: options?.comment, + schema: outputSchema, fields, navigationPaths, defaultSelect: resolvedDefaultSelect, @@ -621,11 +602,11 @@ export function getTableEntityId>( /** * Get the schema validator from an FMTable instance. * @param table - FMTable instance - * @returns The StandardSchemaV1 validator + * @returns The StandardSchemaV1 validator record (partial - only fields with validators) */ export function getTableSchema>( table: T, -): StandardSchemaV1 { +): Partial> { return table[FMTableSchema]; } @@ -732,6 +713,17 @@ export function getTableId>(table: T): string { return table[FMTableEntityId] ?? table[FMTableName]; } +/** + * Get the comment from an FMTable instance. + * @param table - FMTable instance + * @returns The comment string or undefined if not set + */ +export function getTableComment>( + table: T, +): string | undefined { + return table[FMTableComment]; +} + /** * Get all columns from a table as an object. * Useful for selecting all fields except some using destructuring. diff --git a/packages/fmodata/src/types.ts b/packages/fmodata/src/types.ts index 4c310d0d..820fe982 100644 --- a/packages/fmodata/src/types.ts +++ b/packages/fmodata/src/types.ts @@ -32,10 +32,16 @@ export interface ExecutableBuilder { export interface ExecutionContext { _makeRequest( url: string, - options?: RequestInit & FFetchOptions & { useEntityIds?: boolean }, + options?: RequestInit & + FFetchOptions & { + useEntityIds?: boolean; + includeSpecialColumns?: boolean; + }, ): Promise>; _setUseEntityIds?(useEntityIds: boolean): void; _getUseEntityIds?(): boolean; + _setIncludeSpecialColumns?(includeSpecialColumns: boolean): void; + _getIncludeSpecialColumns?(): boolean; _getBaseUrl?(): string; _getLogger?(): InternalLogger; } @@ -46,7 +52,7 @@ export type InferSchemaType> = { : never; }; -export type WithSystemFields = +export type WithSpecialColumns = T extends Record ? T & { ROWID: number; @@ -54,15 +60,12 @@ export type WithSystemFields = } : never; -// Helper type to exclude system fields from a union of keys +// Helper type to exclude special columns from a union of keys export type ExcludeSystemFields = Exclude< T, "ROWID" | "ROWMODID" >; -// Helper type to omit system fields from an object type -export type OmitSystemFields = Omit; - // OData record metadata fields (present on each record) export type ODataRecordMetadata = { "@id": string; @@ -158,6 +161,11 @@ export type ExecuteOptions = { * Overrides the default behavior of the database to use entity IDs (rather than field names) in THIS REQUEST ONLY */ useEntityIds?: boolean; + /** + * Overrides the default behavior of the database to include special columns (ROWID and ROWMODID) in THIS REQUEST ONLY. + * Note: Special columns are only included when there is no $select query. + */ + includeSpecialColumns?: boolean; }; /** @@ -213,6 +221,54 @@ export type ConditionallyWithODataAnnotations< } : T; +/** + * Normalizes includeSpecialColumns with a database-level default. + * Uses distributive conditional types to handle unions correctly. + * @template IncludeSpecialColumns - The includeSpecialColumns value from execute options + * @template DatabaseDefault - The database-level includeSpecialColumns setting (defaults to false) + */ +export type NormalizeIncludeSpecialColumns< + IncludeSpecialColumns extends boolean | undefined, + DatabaseDefault extends boolean = false, +> = [IncludeSpecialColumns] extends [true] + ? true + : [IncludeSpecialColumns] extends [false] + ? false + : DatabaseDefault; // When undefined, use database-level default + +/** + * Conditionally adds ROWID and ROWMODID special columns to a type. + * Special columns are only included when: + * - includeSpecialColumns is true AND + * - hasSelect is false (no $select query was applied) AND + * - T is an object type (not a primitive like string or number) + * + * Handles both single objects and arrays of objects. + */ +export type ConditionallyWithSpecialColumns< + T, + IncludeSpecialColumns extends boolean, + HasSelect extends boolean, +> = IncludeSpecialColumns extends true + ? HasSelect extends false + ? // Handle array types + T extends readonly (infer U)[] + ? U extends Record + ? (U & { + ROWID: number; + ROWMODID: number; + })[] + : T + : // Handle single object types + T extends Record + ? T & { + ROWID: number; + ROWMODID: number; + } + : T // Don't add special columns to primitives (e.g., single field queries) + : T + : T; + // Helper type to extract schema from a FMTable export type ExtractSchemaFromOccurrence = Occ extends { baseTable: { schema: infer S }; diff --git a/packages/fmodata/src/validation.ts b/packages/fmodata/src/validation.ts index 116ba375..6382bf3d 100644 --- a/packages/fmodata/src/validation.ts +++ b/packages/fmodata/src/validation.ts @@ -19,7 +19,7 @@ import { */ export async function validateAndTransformInput>( data: Partial, - inputSchema?: Record, + inputSchema?: Partial>, ): Promise> { // If no input schema, return data as-is if (!inputSchema) { @@ -30,6 +30,9 @@ export async function validateAndTransformInput>( // Process each field that has an input validator for (const [fieldName, fieldSchema] of Object.entries(inputSchema)) { + // Skip if no schema for this field + if (!fieldSchema) continue; + // Only process fields that are present in the input data if (fieldName in data) { const inputValue = data[fieldName]; @@ -83,7 +86,7 @@ export async function validateAndTransformInput>( // Type for expand validation configuration export type ExpandValidationConfig = { relation: string; - targetSchema?: Record; + targetSchema?: Partial>; targetTable?: FMTable; table?: FMTable; // For transformation selectedFields?: string[]; @@ -96,9 +99,10 @@ export type ExpandValidationConfig = { */ export async function validateRecord>( record: any, - schema: Record | undefined, + schema: Partial> | undefined, selectedFields?: (keyof T)[], expandConfigs?: ExpandValidationConfig[], + includeSpecialColumns?: boolean, ): Promise< | { valid: true; data: T & ODataRecordMetadata } | { valid: false; error: ValidationError } @@ -112,15 +116,33 @@ export async function validateRecord>( if (editLink) metadata["@editLink"] = editLink; // If no schema, just return the data with metadata + // Exclude special columns if includeSpecialColumns is false if (!schema) { + const { ROWID, ROWMODID, ...restWithoutSystemFields } = rest; + const specialColumns: { ROWID?: number; ROWMODID?: number } = {}; + if (includeSpecialColumns) { + if (ROWID !== undefined) specialColumns.ROWID = ROWID; + if (ROWMODID !== undefined) specialColumns.ROWMODID = ROWMODID; + } return { valid: true, - data: { ...rest, ...metadata } as T & ODataRecordMetadata, + data: { + ...restWithoutSystemFields, + ...specialColumns, + ...metadata, + } as T & ODataRecordMetadata, }; } - // Filter out FileMaker system fields that shouldn't be in responses by default + // Extract FileMaker special columns - preserve them if includeSpecialColumns is enabled + // Note: Special columns are excluded when using single() method (per OData spec behavior) const { ROWID, ROWMODID, ...restWithoutSystemFields } = rest; + const specialColumns: { ROWID?: number; ROWMODID?: number } = {}; + // Only include special columns if explicitly enabled (they're excluded for single() by design) + if (includeSpecialColumns) { + if (ROWID !== undefined) specialColumns.ROWID = ROWID; + if (ROWMODID !== undefined) specialColumns.ROWMODID = ROWMODID; + } // If selected fields are specified, validate only those fields if (selectedFields && selectedFields.length > 0) { @@ -170,8 +192,18 @@ export async function validateRecord>( } } else { // For fields not in schema (like when explicitly selecting ROWID/ROWMODID) - // include them from the original response - validatedRecord[fieldName] = rest[fieldName]; + // Check if it's a special column that was destructured earlier + if (fieldName === "ROWID" || fieldName === "ROWMODID") { + // Use the destructured value since it was removed from rest + if (fieldName === "ROWID" && ROWID !== undefined) { + validatedRecord[fieldName] = ROWID; + } else if (fieldName === "ROWMODID" && ROWMODID !== undefined) { + validatedRecord[fieldName] = ROWMODID; + } + } else { + // For other fields not in schema, include them from the original response + validatedRecord[fieldName] = rest[fieldName]; + } } } @@ -229,6 +261,7 @@ export async function validateRecord>( expandConfig.targetSchema, expandConfig.selectedFields as string[] | undefined, expandConfig.nestedExpands, + includeSpecialColumns, ); if (!itemValidation.valid) { return { @@ -253,6 +286,7 @@ export async function validateRecord>( expandConfig.targetSchema, expandConfig.selectedFields as string[] | undefined, expandConfig.nestedExpands, + includeSpecialColumns, ); if (!itemValidation.valid) { return { @@ -273,17 +307,21 @@ export async function validateRecord>( } } - // Merge validated data with metadata + // Merge validated data with metadata and special columns return { valid: true, - data: { ...validatedRecord, ...metadata } as T & ODataRecordMetadata, + data: { ...validatedRecord, ...specialColumns, ...metadata } as T & + ODataRecordMetadata, }; } - // Validate all fields in schema, but exclude ROWID/ROWMODID by default + // Validate all fields in schema, but exclude ROWID/ROWMODID by default (unless includeSpecialColumns is enabled) const validatedRecord: Record = { ...restWithoutSystemFields }; for (const [fieldName, fieldSchema] of Object.entries(schema)) { + // Skip if no schema for this field + if (!fieldSchema) continue; + const input = rest[fieldName]; try { let result = fieldSchema["~standard"].validate(input); @@ -378,6 +416,7 @@ export async function validateRecord>( expandConfig.targetSchema, expandConfig.selectedFields as string[] | undefined, expandConfig.nestedExpands, + includeSpecialColumns, ); if (!itemValidation.valid) { return { @@ -402,6 +441,7 @@ export async function validateRecord>( expandConfig.targetSchema, expandConfig.selectedFields as string[] | undefined, expandConfig.nestedExpands, + includeSpecialColumns, ); if (!itemValidation.valid) { return { @@ -424,7 +464,8 @@ export async function validateRecord>( return { valid: true, - data: { ...validatedRecord, ...metadata } as T & ODataRecordMetadata, + data: { ...validatedRecord, ...specialColumns, ...metadata } as T & + ODataRecordMetadata, }; } @@ -433,9 +474,10 @@ export async function validateRecord>( */ export async function validateListResponse>( response: any, - schema: Record | undefined, + schema: Partial> | undefined, selectedFields?: (keyof T)[], expandConfigs?: ExpandValidationConfig[], + includeSpecialColumns?: boolean, ): Promise< | { valid: true; data: (T & ODataRecordMetadata)[] } | { valid: false; error: ResponseStructureError | ValidationError } @@ -471,6 +513,7 @@ export async function validateListResponse>( schema, selectedFields, expandConfigs, + includeSpecialColumns, ); if (!validation.valid) { @@ -494,10 +537,11 @@ export async function validateListResponse>( */ export async function validateSingleResponse>( response: any, - schema: Record | undefined, + schema: Partial> | undefined, selectedFields?: (keyof T)[], expandConfigs?: ExpandValidationConfig[], mode: "exact" | "maybe" = "maybe", + includeSpecialColumns?: boolean, ): Promise< | { valid: true; data: (T & ODataRecordMetadata) | null } | { valid: false; error: RecordCountMismatchError | ValidationError } @@ -539,6 +583,7 @@ export async function validateSingleResponse>( schema, selectedFields, expandConfigs, + includeSpecialColumns, ); if (!validation.valid) { diff --git a/packages/fmodata/tests/fixtures/responses.ts b/packages/fmodata/tests/fixtures/responses.ts index 9b9aa9b5..3936b467 100644 --- a/packages/fmodata/tests/fixtures/responses.ts +++ b/packages/fmodata/tests/fixtures/responses.ts @@ -18,7 +18,7 @@ * 2. Run: pnpm capture * 3. The captured response will be added to this file automatically * - * You can manually edit responses here if you need to modify test data. + * You MUST NOT manually edit this file. Any changes will be overwritten by the capture script. */ export type MockResponse = { @@ -293,7 +293,7 @@ export const mockResponses = { headers: { "content-type": "application/json;charset=utf-8", location: - "https://acme-dev.ottomatic.cloud/fmi/odata/v4/fmdapi_test.fmp12/contacts(ROWID=11073)", + "https://acme-dev.ottomatic.cloud/fmi/odata/v4/fmdapi_test.fmp12/contacts(ROWID=11167)", }, response: null, }, @@ -305,19 +305,19 @@ export const mockResponses = { headers: { "content-type": "application/json;charset=utf-8", location: - "https://acme-dev.ottomatic.cloud/fmi/odata/v4/fmdapi_test.fmp12/contacts('F88124B8-53D1-482D-9EF9-08BA79702DA5')", + "https://acme-dev.ottomatic.cloud/fmi/odata/v4/fmdapi_test.fmp12/contacts('A51EAF8A-68DF-426D-9683-AFF5AAB3CD6D')", }, response: { "@context": "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/$metadata#contacts/$entity", "@id": - "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('F88124B8-53D1-482D-9EF9-08BA79702DA5')", + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('A51EAF8A-68DF-426D-9683-AFF5AAB3CD6D')", "@editLink": - "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('F88124B8-53D1-482D-9EF9-08BA79702DA5')", - PrimaryKey: "F88124B8-53D1-482D-9EF9-08BA79702DA5", - CreationTimestamp: "2025-12-15T11:32:53Z", + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('A51EAF8A-68DF-426D-9683-AFF5AAB3CD6D')", + PrimaryKey: "A51EAF8A-68DF-426D-9683-AFF5AAB3CD6D", + CreationTimestamp: "2025-12-17T09:15:16Z", CreatedBy: "admin", - ModificationTimestamp: "2025-12-15T11:32:53Z", + ModificationTimestamp: "2025-12-17T09:15:16Z", ModifiedBy: "admin", name: "Capture test", hobby: null, @@ -666,4 +666,172 @@ export const mockResponses = { ], }, }, + + "webhook-list": { + url: "https://api.example.com/otto/fmi/odata/v4/fmdapi_test.fmp12/Webhook.GetAll", + method: "GET", + status: 200, + headers: { + "content-type": "application/json;charset=utf-8", + }, + response: { + "@context": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/$metadata#WebHook Processor", + Status: "ACTIVE", + WebHook: [ + { + webHookID: 1, + tableName: "contacts", + url: "https://example.com/webhook", + headers: { + "X-Custom-Header": "test-value", + }, + notifySchemaChanges: false, + select: "", + filter: "", + pendingOperations: [], + }, + { + webHookID: 2, + tableName: "contacts", + url: "https://example.com/webhook", + headers: { + "X-Custom-Header": "test-value", + }, + notifySchemaChanges: false, + select: "", + filter: "", + pendingOperations: [], + }, + { + webHookID: 3, + tableName: "contacts", + url: "https://example.com/webhook", + headers: { + "X-Custom-Header": "test-value", + }, + notifySchemaChanges: false, + select: "", + filter: "", + pendingOperations: [], + }, + { + webHookID: 6, + tableName: "contacts", + url: "https://example.com/webhook", + headers: { + "X-Custom-Header": "test-value", + }, + notifySchemaChanges: false, + select: "name, age", + filter: "name eq 'John'", + pendingOperations: [], + }, + { + webHookID: 4, + tableName: "contacts", + url: "https://example.com/webhook", + headers: { + "X-Custom-Header": "test-value", + }, + notifySchemaChanges: false, + select: "", + filter: "", + pendingOperations: [], + }, + { + webHookID: 7, + tableName: "contacts", + url: "https://example.com/webhook", + headers: { + "X-Custom-Header": "test-value", + }, + notifySchemaChanges: false, + select: "name, age", + filter: "name eq 'John'", + pendingOperations: [], + }, + ], + }, + }, + + "webhook-add": { + url: "https://api.example.com/otto/fmi/odata/v4/fmdapi_test.fmp12/Webhook.Add", + method: "POST", + status: 200, + headers: { + "content-type": "application/json;charset=utf-8", + }, + response: { + webHookResult: { + webHookID: 5, + }, + }, + }, + + "webhook-add-with-options": { + url: "https://api.example.com/otto/fmi/odata/v4/fmdapi_test.fmp12/Webhook.Add", + method: "POST", + status: 200, + headers: { + "content-type": "application/json;charset=utf-8", + }, + response: { + webHookResult: { + webHookID: 8, + }, + }, + }, + + "webhook-get": { + url: "https://api.example.com/otto/fmi/odata/v4/fmdapi_test.fmp12/Webhook.Get(1)", + method: "GET", + status: 200, + headers: { + "content-type": "application/json;charset=utf-8", + }, + response: { + "@context": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/$metadata#WebHook", + webHookID: 1, + tableName: "contacts", + url: "https://example.com/webhook", + headers: { + "X-Custom-Header": "test-value", + }, + notifySchemaChanges: false, + select: "", + filter: "", + pendingOperations: [], + }, + }, + + "webhook-get-not-found": { + url: "https://api.example.com/otto/fmi/odata/v4/fmdapi_test.fmp12/Webhook.Get(99999)", + method: "GET", + status: 404, + headers: { + "content-type": "application/json;charset=utf-8", + }, + response: { + error: { + code: "-1061", + message: "Specified WebHook not found", + }, + }, + }, + + "webhook-delete": { + url: "https://api.example.com/otto/fmi/odata/v4/fmdapi_test.fmp12/Webhook.Delete(1)", + method: "POST", + status: 200, + headers: { + "content-type": "application/json;charset=utf-8", + }, + response: { + webHookResult: { + webHookID: 1, + }, + }, + }, } satisfies MockResponses; diff --git a/packages/fmodata/tests/include-special-columns.test.ts b/packages/fmodata/tests/include-special-columns.test.ts new file mode 100644 index 00000000..2084eb36 --- /dev/null +++ b/packages/fmodata/tests/include-special-columns.test.ts @@ -0,0 +1,568 @@ +/** + * Tests for includeSpecialColumns feature + * + * These tests verify that the includeSpecialColumns option can be set at the database level + * and overridden at the request level, and that special columns (ROWID and ROWMODID) are + * included in responses when the header is set and no $select query is applied. + */ + +import { describe, it, expect, expectTypeOf, assert } from "vitest"; +import { fmTableOccurrence, textField } from "@proofkit/fmodata"; +import { simpleMock } from "./utils/mock-fetch"; +import { createMockClient } from "./utils/test-setup"; +import { first } from "es-toolkit/compat"; + +// Create a simple table occurrence for testing +const contactsTO = fmTableOccurrence("contacts", { + id: textField().primaryKey(), + name: textField(), +}); + +const connection = createMockClient(); + +describe("includeSpecialColumns feature", () => { + it("should include special columns header when enabled at database level", async () => { + const db = connection.database("TestDB", { + includeSpecialColumns: true, + }); + + let preferHeader: string | null = null; + let reqUrl: string | null = null; + const { data } = await db + .from(contactsTO) + .list() + .execute({ + hooks: { + before: async (req) => { + const headers = req.headers; + reqUrl = req.url; + preferHeader = headers.get("Prefer"); + return; + }, + }, + fetchHandler: simpleMock({ + body: { + value: [{ id: "1", name: "John", ROWID: 123, ROWMODID: 456 }], + }, + status: 200, + }), + }); + expect(preferHeader).toBe("fmodata.include-specialcolumns"); + const parsedUrl = new URL(reqUrl!); + const selectParam = parsedUrl.searchParams.get("$select"); + // since we're automatically adding a $select parameter (defaultSelect: "schema"), we need to include the special columns in the select parameter + expect(selectParam).toContain("ROWID"); + expect(selectParam).toContain("ROWMODID"); + + const firstRecord = data![0]!; + + // type checks + expectTypeOf(firstRecord).toHaveProperty("ROWID"); + expectTypeOf(firstRecord).toHaveProperty("ROWMODID"); + firstRecord.ROWID; + firstRecord.ROWMODID; + + // runtime check + expect(firstRecord).toHaveProperty("ROWID"); + expect(firstRecord).toHaveProperty("ROWMODID"); + }); + + it("should not add $select parameter when defaultSelect is not 'schema'", async () => { + const db = connection.database("TestDB", { includeSpecialColumns: true }); + + const contactsAll = fmTableOccurrence( + "contacts", + { + id: textField().primaryKey(), + name: textField(), + }, + { defaultSelect: "all" }, + ); + + let preferHeader: string | null = null; + let reqUrl: string | null = null; + const { data } = await db + .from(contactsAll) + .list() + .execute({ + hooks: { + before: async (req) => { + const headers = req.headers; + preferHeader = headers.get("Prefer"); + reqUrl = req.url; + return; + }, + }, + fetchHandler: simpleMock({ + body: { + value: [{ id: "1", name: "John", ROWID: 123, ROWMODID: 456 }], + }, + status: 200, + }), + }); + const parsedUrl = new URL(reqUrl!); + const selectParam = parsedUrl.searchParams.get("$select"); + // don't add $select parameter when defaultSelect is not 'schema' + expect(selectParam).toBeNull(); + + const firstRecord = data![0]!; + + // type checks + expectTypeOf(firstRecord).toHaveProperty("ROWID"); + expectTypeOf(firstRecord).toHaveProperty("ROWMODID"); + firstRecord.ROWID; + firstRecord.ROWMODID; + + // runtime check + expect(firstRecord).toHaveProperty("ROWID"); + expect(firstRecord).toHaveProperty("ROWMODID"); + }); + + it("should not include special columns header when disabled at database level", async () => { + const db = connection.database("TestDB", { + includeSpecialColumns: false, + }); + + let preferHeader: string | null = null; + const { data } = await db + .from(contactsTO) + .list() + .execute({ + hooks: { + before: async (req) => { + const headers = req.headers; + preferHeader = headers.get("Prefer"); + return; + }, + }, + fetchHandler: simpleMock({ + body: { value: [{ id: "1", name: "John" }] }, + status: 200, + }), + }); + expect(preferHeader).toBeNull(); + + const firstRecord = data![0]!; + + // type checks + expectTypeOf(firstRecord).not.toHaveProperty("ROWID"); + expectTypeOf(firstRecord).not.toHaveProperty("ROWMODID"); + // @ts-expect-error + firstRecord.ROWID; + // @ts-expect-error + firstRecord.ROWMODID; + + // runtime check + expect(firstRecord).not.toHaveProperty("ROWID"); + expect(firstRecord).not.toHaveProperty("ROWMODID"); + }); + + it("should be disabled by default at database level", async () => { + const db = connection.database("TestDB"); + + let preferHeader: string | null = null; + const { data } = await db + .from(contactsTO) + .list() + .execute({ + hooks: { + before: async (req) => { + const headers = req.headers; + preferHeader = headers.get("Prefer"); + return; + }, + }, + fetchHandler: simpleMock({ + body: { value: [{ id: "1", name: "John" }] }, + status: 200, + }), + }); + expect(preferHeader).toBeNull(); + + const firstRecord = data![0]!; + + // type checks + expectTypeOf(firstRecord).not.toHaveProperty("ROWID"); + expectTypeOf(firstRecord).not.toHaveProperty("ROWMODID"); + // @ts-expect-error + firstRecord.ROWID; + // @ts-expect-error + firstRecord.ROWMODID; + + // runtime check + expect(firstRecord).not.toHaveProperty("ROWID"); + expect(firstRecord).not.toHaveProperty("ROWMODID"); + }); + + it("should allow overriding includeSpecialColumns at request level", async () => { + const db = connection.database("TestDB", { + includeSpecialColumns: false, + }); + + // First request: use default (should NOT have header) + let preferHeader1: string | null = null; + const { data: data1 } = await db + .from(contactsTO) + .list() + .execute({ + hooks: { + before: async (req) => { + const headers = req.headers; + preferHeader1 = headers.get("Prefer"); + return; + }, + }, + fetchHandler: simpleMock({ + body: { value: [{ id: "1", name: "John" }] }, + status: 200, + }), + }); + + const firstRecord1 = data1![0]!; + + // type checks + expectTypeOf(firstRecord1).not.toHaveProperty("ROWID"); + expectTypeOf(firstRecord1).not.toHaveProperty("ROWMODID"); + // @ts-expect-error + firstRecord1.ROWID; + // @ts-expect-error + firstRecord1.ROWMODID; + + // runtime check + expect(firstRecord1).not.toHaveProperty("ROWID"); + expect(firstRecord1).not.toHaveProperty("ROWMODID"); + + // Second request: explicitly enable for this request only + let preferHeader2: string | null = null; + const { data: data2 } = await db + .from(contactsTO) + .list() + .execute({ + includeSpecialColumns: true, + hooks: { + before: async (req) => { + const headers = req.headers; + preferHeader2 = headers.get("Prefer"); + return; + }, + }, + fetchHandler: simpleMock({ + body: { + value: [{ id: "1", name: "John", ROWID: 123, ROWMODID: 456 }], + }, + status: 200, + }), + }); + + const firstRecord2 = data2![0]!; + + // type checks + expectTypeOf(firstRecord2).toHaveProperty("ROWID"); + expectTypeOf(firstRecord2).toHaveProperty("ROWMODID"); + firstRecord2.ROWID; + firstRecord2.ROWMODID; + + // runtime check + expect(firstRecord2).toHaveProperty("ROWID"); + expect(firstRecord2).toHaveProperty("ROWMODID"); + + // Third request: explicitly disable for this request + let preferHeader3: string | null = null; + await db + .from(contactsTO) + .list() + .execute({ + includeSpecialColumns: false, + hooks: { + before: async (req) => { + const headers = req.headers; + preferHeader3 = headers.get("Prefer"); + return; + }, + }, + fetchHandler: simpleMock({ body: { value: [] }, status: 200 }), + }); + + expect(preferHeader1).toBeNull(); + expect(preferHeader2).toBe("fmodata.include-specialcolumns"); + expect(preferHeader3).toBeNull(); + }); + + it("should combine includeSpecialColumns with useEntityIds in Prefer header", async () => { + const contactsTOWithEntityIds = fmTableOccurrence( + "contacts", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().entityId("FMFID:2"), + }, + { + entityId: "FMTID:100", + }, + ); + + const db = connection.database("TestDB", { + useEntityIds: true, + includeSpecialColumns: true, + }); + + let preferHeader: string | null = null; + const { data } = await db + .from(contactsTOWithEntityIds) + .list() + .execute({ + hooks: { + before: async (req) => { + const headers = req.headers; + preferHeader = headers.get("Prefer"); + return; + }, + }, + fetchHandler: simpleMock({ + body: { + value: [{ id: "1", name: "John", ROWID: 123, ROWMODID: 456 }], + }, + status: 200, + }), + }); + expect(preferHeader).toContain("fmodata.entity-ids"); + expect(preferHeader).toContain("fmodata.include-specialcolumns"); + // Should be comma-separated + expect(preferHeader).not.toBeNull(); + const preferValues = preferHeader!.split(", "); + expect(preferValues.length).toBe(2); + expect(preferValues).toContain("fmodata.entity-ids"); + expect(preferValues).toContain("fmodata.include-specialcolumns"); + + const firstRecord = data![0]!; + + // type checks + expectTypeOf(firstRecord).toHaveProperty("ROWID"); + expectTypeOf(firstRecord).toHaveProperty("ROWMODID"); + firstRecord.ROWID; + firstRecord.ROWMODID; + + // runtime check + expect(firstRecord).toHaveProperty("ROWID"); + expect(firstRecord).toHaveProperty("ROWMODID"); + }); + + it("should work with get() method for single records", async () => { + const db = connection.database("TestDB", { + includeSpecialColumns: true, + }); + + let preferHeader: string | null = null; + const { data } = await db + .from(contactsTO) + .get("123") + .execute({ + hooks: { + before: async (req) => { + const headers = req.headers; + preferHeader = headers.get("Prefer"); + return; + }, + }, + fetchHandler: simpleMock({ + body: { + id: "123", + name: "John", + ROWID: 123, + ROWMODID: 456, + }, + status: 200, + }), + }); + expect(preferHeader).toBe("fmodata.include-specialcolumns"); + + assert(data, "data is undefined"); + + // type checks + expectTypeOf(data).toHaveProperty("ROWID"); + expectTypeOf(data).toHaveProperty("ROWMODID"); + data.ROWID; + data.ROWMODID; + + // runtime check + expect(data).toHaveProperty("ROWID"); + expect(data).toHaveProperty("ROWMODID"); + }); + + it("should not include special columns when $select is applied", async () => { + const db = connection.database("TestDB", { + includeSpecialColumns: true, + }); + + // FileMaker OData requires ROWID/ROWMODID to be explicitly listed in $select + // to be returned (they are only included when explicitly requested or when header is set and no $select is applied) + let preferHeader: string | null = null; + const { data } = await db + .from(contactsTO) + .list() + .select({ name: contactsTO.name }) + .execute({ + hooks: { + before: async (req) => { + const headers = req.headers; + // Header should still be sent, but server won't return special columns + preferHeader = headers.get("Prefer"); + return; + }, + }, + fetchHandler: simpleMock({ + body: { + value: [{ name: "John" }], // No ROWID or ROWMODID + }, + status: 200, + }), + }); + expect(preferHeader).toBe("fmodata.include-specialcolumns"); + + const firstRecord = data![0]!; + + // type checks + expectTypeOf(firstRecord).not.toHaveProperty("ROWID"); + expectTypeOf(firstRecord).not.toHaveProperty("ROWMODID"); + // @ts-expect-error + firstRecord.ROWID; + // @ts-expect-error + firstRecord.ROWMODID; + + // runtime check + expect(firstRecord).not.toHaveProperty("ROWID"); + expect(firstRecord).not.toHaveProperty("ROWMODID"); + }); + + it("should not append ROWID/ROWMODID to explicit $select unless requested via systemColumns", () => { + const db = connection.database("TestDB", { + includeSpecialColumns: true, + }); + + // Explicit select() should remain exact (no implicit system columns) + const queryString = db + .from(contactsTO) + .list() + .select({ name: contactsTO.name }) + .getQueryString(); + + expect(queryString).toContain("$select="); + expect(queryString).toContain("name"); + expect(queryString).not.toContain("ROWID"); + expect(queryString).not.toContain("ROWMODID"); + + // But system columns should still be selectable when explicitly requested + const queryStringWithSystemCols = db + .from(contactsTO) + .list() + .select({ name: contactsTO.name }, { ROWID: true, ROWMODID: true }) + .getQueryString(); + + expect(queryStringWithSystemCols).toContain("ROWID"); + expect(queryStringWithSystemCols).toContain("ROWMODID"); + }); + + it("should work with single() method", async () => { + const db = connection.database("TestDB", { + includeSpecialColumns: true, + }); + + let preferHeader: string | null = null; + const { data } = await db + .from(contactsTO) + .list() + .single() + .execute({ + hooks: { + before: async (req) => { + const headers = req.headers; + preferHeader = headers.get("Prefer"); + return; + }, + }, + fetchHandler: simpleMock({ + body: { + id: "123", + name: "John", + ROWID: 123, + ROWMODID: 456, + }, + status: 200, + }), + }); + expect(preferHeader).toBe("fmodata.include-specialcolumns"); + + assert(data, "data is undefined"); + + // type checks + expectTypeOf(data).toHaveProperty("ROWID"); + expectTypeOf(data).toHaveProperty("ROWMODID"); + data.ROWID; + data.ROWMODID; + + // runtime check + expect(data).toHaveProperty("ROWID"); + expect(data).toHaveProperty("ROWMODID"); + }); + + it("should not include special columns if getSingleField() is used", async () => { + const db = connection.database("TestDB", { + includeSpecialColumns: true, + }); + + let preferHeader: string | null = null; + const { data } = await db + .from(contactsTO) + .get("123") + .getSingleField(contactsTO.name) + .execute({ + hooks: { + before: async (req) => { + const headers = req.headers; + preferHeader = headers.get("Prefer"); + return; + }, + }, + fetchHandler: simpleMock({ body: { value: "John" }, status: 200 }), + }); + expect(preferHeader).toBe("fmodata.include-specialcolumns"); + + expectTypeOf(data).not.toHaveProperty("ROWID"); + expectTypeOf(data).not.toHaveProperty("ROWMODID"); + // @ts-expect-error + data.ROWID; + // @ts-expect-error + data.ROWMODID; + }); + + it("should still allow you to select ROWID or ROWMODID in select()", async () => { + const db = connection.database("TestDB"); + + const { data } = await db + .from(contactsTO) + .list() + .select( + { + id: contactsTO.id, + }, + { ROWID: true, ROWMODID: true }, + ) + .execute({ + fetchHandler: simpleMock({ + body: { + value: [{ id: "1", ROWID: 123, ROWMODID: 456 }], + }, + status: 200, + }), + }); + const firstRecord = data![0]!; + + expectTypeOf(firstRecord).toHaveProperty("ROWID"); + expectTypeOf(firstRecord).toHaveProperty("ROWMODID"); + firstRecord.ROWID; + firstRecord.ROWMODID; + + // runtime check + expect(firstRecord).toHaveProperty("ROWID"); + expect(firstRecord).toHaveProperty("ROWMODID"); + }); +}); diff --git a/packages/fmodata/tests/tsconfig.build.json b/packages/fmodata/tests/tsconfig.build.json index b90b2dc4..55d2550f 100644 --- a/packages/fmodata/tests/tsconfig.build.json +++ b/packages/fmodata/tests/tsconfig.build.json @@ -35,3 +35,14 @@ ], "exclude": ["../src/**/*", "../dist/**/*.js", "../dist/**/*.js.map"] } + + + + + + + + + + + diff --git a/packages/fmodata/tests/typescript.test.ts b/packages/fmodata/tests/typescript.test.ts index 5b286569..ddb735f3 100644 --- a/packages/fmodata/tests/typescript.test.ts +++ b/packages/fmodata/tests/typescript.test.ts @@ -18,7 +18,7 @@ * helping ensure the API remains ergonomic and type-safe as the library evolves. */ -import { describe, expect, it, expectTypeOf, beforeEach } from "vitest"; +import { describe, expect, it, expectTypeOf } from "vitest"; import { z } from "zod/v4"; import { fmTableOccurrence, @@ -28,6 +28,7 @@ import { FMTable, getTableColumns, eq, + type InferTableSchema, } from "@proofkit/fmodata"; import { createMockFetch } from "./utils/mock-fetch"; import { createMockClient, contacts, users } from "./utils/test-setup"; @@ -554,4 +555,20 @@ describe("fmodata", () => { void _typeChecks; }); }); + + describe("InferSchemaType", () => { + it("Primary key fields should not be nullable in the inferred schema", () => { + const specialUsers = fmTableOccurrence("specialUsers", { + id: textField().primaryKey(), + name: textField(), + }); + type SpecialUserSchema = InferTableSchema; + type IdField = SpecialUserSchema["id"]; + + const controlTest: string | null = null; + + // @ts-expect-error - id should not be nullable + const idData: IdField = null; + }); + }); }); diff --git a/packages/fmodata/tests/webhooks.test.ts b/packages/fmodata/tests/webhooks.test.ts new file mode 100644 index 00000000..9ee09571 --- /dev/null +++ b/packages/fmodata/tests/webhooks.test.ts @@ -0,0 +1,315 @@ +/** + * Webhook Manager Tests + * + * Tests for the WebhookManager class using mock responses. + * These tests verify that webhook methods correctly handle API responses + * and return properly typed data. + */ + +import { describe, it, expect, assert } from "vitest"; +import { + FMServerConnection, + fmTableOccurrence, + textField, + type WebhookInfo, + type WebhookListResponse, + type WebhookAddResponse, + eq, +} from "@proofkit/fmodata"; +import { createMockFetch } from "./utils/mock-fetch"; +import { mockResponses } from "./fixtures/responses"; +import { createMockClient } from "./utils/test-setup"; + +describe("WebhookManager", () => { + const connection = createMockClient(); + const db = connection.database("fmdapi_test.fmp12"); + + // Create a simple table occurrence for testing + const contacts = fmTableOccurrence("contacts", { + PrimaryKey: textField().primaryKey(), + name: textField(), + }); + + describe("list()", () => { + it("should return a list of webhooks with status", async () => { + const result = await db.webhook.list({ + fetchHandler: createMockFetch(mockResponses["webhook-list"]), + }); + + expect(result).toBeDefined(); + expect(result.Status).toBe("ACTIVE"); + expect(Array.isArray(result.WebHook)).toBe(true); + expect(result.WebHook.length).toBeGreaterThan(0); + + // Extract expected data from mock response + const mockData = mockResponses["webhook-list"].response; + const expectedWebhooks = mockData.WebHook; + expect(result.WebHook.length).toBe(expectedWebhooks.length); + + const firstWebhook = result.WebHook[0]; + expect(firstWebhook).toBeDefined(); + if (!firstWebhook) throw new Error("Expected firstWebhook to be defined"); + + // Use the first webhook from the mock response as the expected value + const expectedFirstWebhook = expectedWebhooks[0]; + if (!expectedFirstWebhook) + throw new Error("Expected first webhook in mock response"); + expect(firstWebhook.webHookID).toBe(expectedFirstWebhook.webHookID); + expect(firstWebhook.tableName).toBe("contacts"); + expect(firstWebhook.url).toBe("https://example.com/webhook"); + expect(firstWebhook.headers).toEqual({ "X-Custom-Header": "test-value" }); + expect(firstWebhook.notifySchemaChanges).toBe(false); + expect(firstWebhook.select).toBe(""); + expect(firstWebhook.filter).toBe(""); + expect(Array.isArray(firstWebhook.pendingOperations)).toBe(true); + }); + + it("should have correct TypeScript types", async () => { + const result = await db.webhook.list({ + fetchHandler: createMockFetch(mockResponses["webhook-list"]), + }); + + // Type check - result should be WebhookListResponse + const typedResult: WebhookListResponse = result; + expect(typedResult.Status).toBe("ACTIVE"); + + // Extract expected ID from mock response + const mockData = mockResponses["webhook-list"].response; + const expectedFirstWebhookID = mockData.WebHook[0]?.webHookID; + expect(typedResult.WebHook[0]?.webHookID).toBe(expectedFirstWebhookID); + }); + }); + + describe("add()", () => { + it("should add a webhook and return the webhook ID", async () => { + const result = await db.webhook.add( + { + webhook: "https://example.com/webhook", + tableName: contacts, + headers: { "X-Custom-Header": "test-value" }, + }, + { + fetchHandler: createMockFetch(mockResponses["webhook-add"]), + }, + ); + + expect(result).toBeDefined(); + expect(result.webHookResult).toBeDefined(); + + // Extract expected ID from mock response + const expectedWebhookID = + mockResponses["webhook-add"].response.webHookResult.webHookID; + expect(result.webHookResult.webHookID).toBe(expectedWebhookID); + }); + + it("should extract table name from FMTable instance", async () => { + const result = await db.webhook.add( + { + webhook: "https://example.com/webhook", + tableName: contacts, + }, + { + fetchHandler: createMockFetch(mockResponses["webhook-add"]), + }, + ); + + // Extract expected ID from mock response + const expectedWebhookID = + mockResponses["webhook-add"].response.webHookResult.webHookID; + expect(result.webHookResult.webHookID).toBe(expectedWebhookID); + }); + + it("should support the same filter/select DX as the main query builder", async () => { + let requestBody: string | null = null; + const result = await db.webhook.add( + { + webhook: "https://example.com/webhook", + tableName: contacts, + filter: eq(contacts.name, "John"), + select: [contacts.name, contacts.PrimaryKey], + }, + { + hooks: { + before: async (req: Request) => { + if (req.body) { + // Clone the request to read the body without consuming it + const clonedRequest = req.clone(); + requestBody = await clonedRequest.text(); + } + }, + }, + fetchHandler: createMockFetch( + mockResponses["webhook-add-with-options"], + ), + }, + ); + assert(requestBody, "Request body should be defined"); + + // Parse the request body to verify it contains the transformed OData expressions + const body = JSON.parse(requestBody); + + // Verify the filter is transformed to OData filter syntax + // eq(contacts.name, "John") should become "name eq 'John'" + expect(body.filter).toBe("name eq 'John'"); + + // Verify the select is transformed to OData select syntax + // [contacts.name, contacts.PrimaryKey] should become "name,PrimaryKey" + expect(body.select).toBe("name,PrimaryKey"); + }); + + it("should have correct TypeScript types", async () => { + const result = await db.webhook.add( + { + webhook: "https://example.com/webhook", + tableName: contacts, + }, + { + fetchHandler: createMockFetch(mockResponses["webhook-add"]), + }, + ); + + // Type check - result should be WebhookAddResponse + const typedResult: WebhookAddResponse = result; + + // Extract expected ID from mock response + const expectedWebhookID = + mockResponses["webhook-add"].response.webHookResult.webHookID; + expect(typedResult.webHookResult.webHookID).toBe(expectedWebhookID); + }); + }); + + describe("get()", () => { + it("should get a webhook by ID", async () => { + // Extract webhook ID from mock response URL or response data + const mockData = mockResponses["webhook-get"].response; + const webhookID = mockData.webHookID; + + const result = await db.webhook.get(webhookID, { + fetchHandler: createMockFetch(mockResponses["webhook-get"]), + }); + + expect(result).toBeDefined(); + expect(result.webHookID).toBe(webhookID); + expect(result.tableName).toBe("contacts"); + expect(result.url).toBe("https://example.com/webhook"); + expect(result.headers).toEqual({ "X-Custom-Header": "test-value" }); + expect(result.notifySchemaChanges).toBe(false); + expect(result.select).toBe(""); + expect(result.filter).toBe(""); + expect(Array.isArray(result.pendingOperations)).toBe(true); + }); + + it("should throw an error for non-existent webhook", async () => { + await expect( + db.webhook.get(99999, { + fetchHandler: createMockFetch(mockResponses["webhook-get-not-found"]), + }), + ).rejects.toThrow(); + }); + + it("should have correct TypeScript types", async () => { + // Extract webhook ID from mock response + const mockData = mockResponses["webhook-get"].response; + const webhookID = mockData.webHookID; + + const result = await db.webhook.get(webhookID, { + fetchHandler: createMockFetch(mockResponses["webhook-get"]), + }); + + // Type check - result should be WebhookInfo + const typedResult: WebhookInfo = result; + expect(typedResult.webHookID).toBe(webhookID); + expect(typedResult.tableName).toBe("contacts"); + }); + }); + + describe("remove()", () => { + it("should remove a webhook successfully", async () => { + // Extract webhook ID from mock response + const webhookID = + mockResponses["webhook-delete"].response.webHookResult.webHookID; + + await expect( + db.webhook.remove(webhookID, { + fetchHandler: createMockFetch(mockResponses["webhook-delete"]), + }), + ).resolves.toBeUndefined(); + }); + + it("should return void on success", async () => { + // Extract webhook ID from mock response + const webhookID = + mockResponses["webhook-delete"].response.webHookResult.webHookID; + + const result = await db.webhook.remove(webhookID, { + fetchHandler: createMockFetch(mockResponses["webhook-delete"]), + }); + + expect(result).toBeUndefined(); + }); + }); + + describe.skip("invoke()", () => { + // it("should invoke a webhook without rowIDs", async () => { + // const result = await db.webhook.invoke(1, undefined, { + // fetchHandler: createMockFetch(mockResponses["webhook-invoke"]), + // }); + // expect(result).toBeDefined(); + // expect(typeof result).toBe("object"); + // if (result && typeof result === "object" && "status" in result) { + // expect((result as any).status).toBe("success"); + // } + // }); + // it("should invoke a webhook with rowIDs", async () => { + // const result = await db.webhook.invoke( + // 1, + // { rowIDs: [63, 61] }, + // { + // fetchHandler: createMockFetch(mockResponses["webhook-invoke"]), + // }, + // ); + // expect(result).toBeDefined(); + // expect(typeof result).toBe("object"); + // }); + }); + + describe("integration", () => { + it("should add, get, and remove a webhook in sequence", async () => { + // Extract expected IDs from mock responses + const expectedAddID = + mockResponses["webhook-add"].response.webHookResult.webHookID; + const expectedGetID = mockResponses["webhook-get"].response.webHookID; + + // Add webhook + const addResult = await db.webhook.add( + { + webhook: "https://example.com/webhook", + tableName: contacts, + }, + { + fetchHandler: createMockFetch(mockResponses["webhook-add"]), + }, + ); + + expect(addResult.webHookResult.webHookID).toBe(expectedAddID); + + // Get webhook - use the ID from the add result, but verify it matches expected get ID + // Note: In a real scenario, the get would use the add result ID, but for mocking + // we need to use the ID that matches our mock response + const getResult = await db.webhook.get(expectedGetID, { + fetchHandler: createMockFetch(mockResponses["webhook-get"]), + }); + + expect(getResult.webHookID).toBe(expectedGetID); + + // Remove webhook - use the ID from the delete mock response + const expectedDeleteID = + mockResponses["webhook-delete"].response.webHookResult.webHookID; + await expect( + db.webhook.remove(expectedDeleteID, { + fetchHandler: createMockFetch(mockResponses["webhook-delete"]), + }), + ).resolves.toBeUndefined(); + }); + }); +}); diff --git a/packages/registry/lib/types.ts b/packages/registry/lib/types.ts index f9b60ac5..c58db6ac 100644 --- a/packages/registry/lib/types.ts +++ b/packages/registry/lib/types.ts @@ -147,8 +147,20 @@ const categorySchema = z.enum([ export const frameworkSchema = z.enum(["next-pages", "next-app", "manual"]); +export type TemplateMetadata = z.infer & { + title: string; + description?: string; + category: z.infer; + files: TemplateFile[]; + registryType: z.infer; + postInstall?: PostInstallStep[]; + minimumProofKitVersion?: string; + allowedFrameworks?: z.infer[]; + schemaRequired?: boolean; +}; + // Defines the metadata for a single template (_meta.ts) -export const templateMetadataSchema = registryItemSchema +export const templateMetadataSchema: z.ZodType = registryItemSchema .omit({ name: true, type: true, files: true, docs: true }) .extend({ title: z.string(), @@ -176,15 +188,19 @@ export const templateMetadataSchema = registryItemSchema }); export type TemplateFile = z.infer; -export type TemplateMetadata = z.infer; -export const registryIndexSchema = templateMetadataSchema +export type RegistryIndex = Array<{ + name: string; + title: string; + category: z.infer; + description?: string; +}>; + +export const registryIndexSchema: z.ZodType = templateMetadataSchema .pick({ title: true, category: true, description: true }) .extend({ name: z.string(), }) .array(); -export type RegistryIndex = z.infer; - export type RegistryItem = ShadcnRegistryItem; diff --git a/packages/registry/lib/validator.test.ts b/packages/registry/lib/validator.test.ts index db17ad80..da9dfb3a 100644 --- a/packages/registry/lib/validator.test.ts +++ b/packages/registry/lib/validator.test.ts @@ -110,7 +110,8 @@ describe("validator", () => { ], }; - // Mock that the file exists + // Mock that the file exists (existsSync is used for file check) + mockedFs.existsSync.mockReturnValue(true); mockedFs.readdirSync.mockReturnValue([ "component.tsx", "_meta.ts", @@ -137,8 +138,10 @@ describe("validator", () => { ], }; - // Mock that the file doesn't exist - mockedFs.readdirSync.mockReturnValue(["_meta.ts"] as any); + // Mock that the file doesn't exist (existsSync returns false for the file) + mockedFs.existsSync.mockReturnValue(false); + // Mock readdirSync to return the _meta.ts and some other files (so it passes "has content" check) + mockedFs.readdirSync.mockReturnValue(["_meta.ts", "other-file.ts"] as any); expect(() => validateTemplateMetadata(metaWithMissingFile, mockContext), @@ -161,8 +164,10 @@ describe("validator", () => { ], }; - // Mock empty directory (only _meta.ts) - this will trigger "file does not exist" first - mockedFs.readdirSync.mockReturnValue(["_meta.ts"] as any); + // Mock that the file doesn't exist (existsSync returns false for the file) + mockedFs.existsSync.mockReturnValue(false); + // Mock readdirSync to return _meta.ts and some other files (so it passes "has content" check) + mockedFs.readdirSync.mockReturnValue(["_meta.ts", "other-file.ts"] as any); expect(() => validateTemplateMetadata(metaWithFiles, mockContext), @@ -217,7 +222,7 @@ describe("validator", () => { category: "utility", registryType: "registry:lib", files: [], - proofkitDependencies: ["nonexistent-template"], + registryDependencies: ["{proofkit}/r/nonexistent-template"], }; // Mock that template doesn't exist @@ -226,7 +231,7 @@ describe("validator", () => { expect(() => validateTemplateMetadata(metaWithInvalidDeps, mockContext), ).toThrow( - /Invalid proofkitDependencies reference 'nonexistent-template'/, + /Invalid registryDependencies reference '{proofkit}\/r\/nonexistent-template'/, ); }); diff --git a/packages/registry/lib/validator.ts b/packages/registry/lib/validator.ts index a15716e4..2ea5bba0 100644 --- a/packages/registry/lib/validator.ts +++ b/packages/registry/lib/validator.ts @@ -2,7 +2,7 @@ import fs from "fs"; import path from "path"; import { fileURLToPath } from "url"; import createJiti from "jiti"; -import { templateMetadataSchema } from "./types"; +import { templateMetadataSchema } from "./types.js"; export interface ValidationContext { templatesPath: string; diff --git a/packages/registry/package.json b/packages/registry/package.json index b42aadd0..20b31f6b 100644 --- a/packages/registry/package.json +++ b/packages/registry/package.json @@ -24,8 +24,9 @@ "templates" ], "scripts": { - "build": "tsdown && tsc --emitDeclarationOnly && publint --strict", - "dev": "pnpm run build && chokidar ./lib/**/* ./templates/**/* -c \"tsdown && tsc --emitDeclarationOnly\" --silent", + "prebuild": "tsx scripts/validate.ts", + "build": "tsdown && publint --strict", + "dev": "pnpm run build && chokidar ./lib/**/* ./templates/**/* -c \"tsdown && NODE_OPTIONS='--max-old-space-size=4096' tsc --emitDeclarationOnly\" --silent", "test": "vitest run", "test:watch": "vitest", "test:coverage": "vitest run --coverage" @@ -42,14 +43,15 @@ "@vitest/coverage-v8": "^2.1.8", "chokidar-cli": "^3.0.0", "concurrently": "^8.2.2", - "publint": "^0.3.12", - "tsdown": "^0.3.1", - "typescript": "^5.9.2", - "vitest": "^2.1.8" + "publint": "^0.3.16", + "tsdown": "^0.14.2", + "tsx": "^4.19.2", + "typescript": "^5.9.3", + "vitest": "^4.0.7" }, "dependencies": { "jiti": "^1.21.7", "shadcn": "^2.10.0", - "zod": "3.25.64" + "zod": "^4.2.0" } } diff --git a/packages/registry/scripts/validate.ts b/packages/registry/scripts/validate.ts new file mode 100644 index 00000000..9127c9f2 --- /dev/null +++ b/packages/registry/scripts/validate.ts @@ -0,0 +1,27 @@ +#!/usr/bin/env node +import { fileURLToPath } from "url"; +import path from "path"; +import createJiti from "jiti"; + +// Run validation before build starts +console.log("🔍 Validating registry before build..."); +try { + // Use jiti to load TypeScript source directly (since the compiled JS doesn't exist yet) + const __filename = fileURLToPath(import.meta.url); + const __dirname = path.dirname(__filename); + const validatorPath = path.resolve(__dirname, "../lib/validator.ts"); + + const jiti = createJiti(__filename, { + interopDefault: true, + requireCache: false, + }); + const validatorModule = jiti(validatorPath); + const { validateRegistry } = validatorModule; + validateRegistry(); + console.log("✅ Registry validation completed successfully"); +} catch (error) { + console.error("❌ Registry validation failed:"); + console.error(error); + process.exit(1); +} + diff --git a/packages/registry/tsconfig.json b/packages/registry/tsconfig.json index ed30e8a7..738f1c41 100644 --- a/packages/registry/tsconfig.json +++ b/packages/registry/tsconfig.json @@ -20,5 +20,6 @@ "@/lib/*": ["./lib/*"] } }, - "include": ["lib/**/*", "templates/**/_meta.ts", "scripts/**/*"] + "include": ["lib/**/*"], + "exclude": ["scripts/**/*", "dist", "node_modules", "*.test.ts", "templates/**/*"] } diff --git a/packages/registry/tsdown.config.ts b/packages/registry/tsdown.config.ts index b8d9519d..4980ddfc 100644 --- a/packages/registry/tsdown.config.ts +++ b/packages/registry/tsdown.config.ts @@ -1,31 +1,44 @@ import { defineConfig } from "tsdown"; -// Run validation before build starts -console.log("🔍 Validating registry before build..."); -try { - const { validateRegistry } = await import("./lib/validator.js"); - validateRegistry(); - console.log("✅ Registry validation completed successfully"); -} catch (error) { - console.error("❌ Registry validation failed:"); - console.error(error); - process.exit(1); -} - export default defineConfig({ entry: ["lib/index.ts"], outDir: "dist/lib", format: ["esm"], clean: true, - dts: false, + dts: true, sourcemap: true, onSuccess: async () => { // Copy templates to dist directory after successful build console.log("📁 Copying templates to dist..."); try { const { execSync } = await import("child_process"); + const fs = await import("fs"); + const path = await import("path"); + execSync("cp -r templates dist/", { stdio: "inherit" }); console.log("✅ Templates copied successfully"); + + // Find and rename the hashed .d.ts file to index.d.ts + const distLib = "dist/lib"; + const files = fs.readdirSync(distLib); + const dtsFile = files.find((f) => f.match(/^index-.+\.d\.ts$/)); + const dtsMapFile = files.find((f) => f.match(/^index-.+\.d\.ts\.map$/)); + + if (dtsFile) { + fs.renameSync( + path.join(distLib, dtsFile), + path.join(distLib, "index.d.ts"), + ); + console.log(`✅ Renamed ${dtsFile} to index.d.ts`); + } + + if (dtsMapFile) { + fs.renameSync( + path.join(distLib, dtsMapFile), + path.join(distLib, "index.d.ts.map"), + ); + console.log(`✅ Renamed ${dtsMapFile} to index.d.ts.map`); + } } catch (error) { console.error("❌ Failed to copy templates:"); console.error(error); diff --git a/packages/registry/vitest.config.ts b/packages/registry/vitest.config.ts index c88a255c..50483983 100644 --- a/packages/registry/vitest.config.ts +++ b/packages/registry/vitest.config.ts @@ -7,7 +7,8 @@ export default defineConfig({ coverage: { provider: "v8", reporter: ["text", "json", "html"], - exclude: ["node_modules/", "dist/", "**/*.test.ts"], + include: ["lib/**/*.{ts,tsx}", "templates/**/*.{ts,tsx}"], + exclude: ["**/*.test.ts"], }, }, }); diff --git a/packages/typegen/package.json b/packages/typegen/package.json index 9bf537c8..67631c16 100644 --- a/packages/typegen/package.json +++ b/packages/typegen/package.json @@ -6,8 +6,10 @@ "main": "dist/esm/index.js", "scripts": { "dev": "pnpm build:watch", + "dev:ui": "concurrently -n \"web,api\" -c \"cyan,magenta\" \"pnpm -C web dev\" \"pnpm run dev:api\"", + "dev:api": "concurrently -n \"build,server\" -c \"cyan,magenta\" \"pnpm build:watch\" \"nodemon --watch dist/esm --delay 1 --exec 'node dist/esm/cli.js ui --port 3141 --no-open'\"", "test": "op inject -i op.env -o .env.local -f && vitest run", - "build": "vite build && publint --strict", + "build": "pnpm -C web build && pnpm vite build && node scripts/build-copy.js && publint --strict", "build:watch": "vite build --watch", "ci": "pnpm run build && pnpm run test", "prepublishOnly": "pnpm run ci" @@ -26,6 +28,13 @@ "default": "./dist/esm/types.js" } }, + "./webui-server": { + "import": { + "types": "./dist/esm/server/app.d.ts", + "default": "./dist/esm/server/app.js" + } + }, + "./src/types.ts": "./src/types.ts", "./package.json": "./package.json" }, "keywords": [ @@ -51,26 +60,34 @@ "dependencies": { "@clack/prompts": "^0.11.0", "@commander-js/extra-typings": "^14.0.0", + "@hono/node-server": "^1.19.7", + "@hono/zod-validator": "^0.7.5", "@proofkit/fmdapi": "workspace:*", + "@proofkit/fmodata": "workspace:*", "@tanstack/vite-config": "^0.2.0", "chalk": "5.4.1", "commander": "^14.0.0", "dotenv": "^16.5.0", + "fast-xml-parser": "^5.3.2", "fs-extra": "^11.3.0", + "hono": "^4.9.0", "jsonc-parser": "^3.3.1", + "open": "^10.1.0", "prettier": "^3.5.3", "semver": "^7.7.2", "ts-morph": "^26.0.0", "ts-toolbelt": "^9.6.0", "vite": "^6.3.4", - "zod": "3.25.64" + "zod": "^4.1.13" }, "devDependencies": { "@types/fs-extra": "^11.0.4", "@types/semver": "^7.7.0", + "concurrently": "^8.2.2", + "nodemon": "^3.1.11", "publint": "^0.3.12", "type-fest": "^3.13.1", - "typescript": "^5.9.2", - "vitest": "^3.2.4" + "typescript": "^5.9.3", + "vitest": "^4.0.7" } } diff --git a/packages/typegen/proofkit-typegen.config.jsonc b/packages/typegen/proofkit-typegen.config.jsonc new file mode 100644 index 00000000..2e5db011 --- /dev/null +++ b/packages/typegen/proofkit-typegen.config.jsonc @@ -0,0 +1,57 @@ +{ + "config": [ + { + "type": "fmodata", + "path": "schema", + "clearOldFiles": false, + "alwaysOverrideFieldNames": true, + "tables": [ + { + "tableName": "isolated_contacts", + "reduceMetadata": false, + }, + { + "tableName": "fmdapi_test", + "fields": [ + { + "fieldName": "CreationTimestamp", + "exclude": true, + }, + { + "fieldName": "CreatedBy", + "exclude": true, + }, + { + "fieldName": "ModificationTimestamp", + "exclude": true, + }, + { + "fieldName": "ModifiedBy", + "exclude": true, + }, + { + "fieldName": "anything", + "exclude": true, + }, + { + "fieldName": "fieldWithValues", + "exclude": true, + }, + { + "fieldName": "myContainer", + "exclude": true, + }, + { + "fieldName": "repeatingContainer", + "exclude": true, + }, + { + "fieldName": "booleanField", + "exclude": true, + }, + ], + }, + ], + }, + ], +} diff --git a/packages/typegen/scripts/build-copy.js b/packages/typegen/scripts/build-copy.js new file mode 100644 index 00000000..1b068566 --- /dev/null +++ b/packages/typegen/scripts/build-copy.js @@ -0,0 +1,18 @@ +import { cpSync, existsSync } from "fs"; +import { join, dirname } from "path"; +import { fileURLToPath } from "url"; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = dirname(__filename); + +const rootDir = join(__dirname, ".."); +const webDistDir = join(rootDir, "web", "dist"); +const distWebDir = join(rootDir, "dist", "web"); + +if (existsSync(webDistDir)) { + console.log("Copying web assets to dist/web..."); + cpSync(webDistDir, distWebDir, { recursive: true }); + console.log("Build complete!"); +} else { + console.warn("Web dist directory not found, skipping copy"); +} diff --git a/packages/typegen/scripts/build.ts b/packages/typegen/scripts/build.ts new file mode 100644 index 00000000..06779643 --- /dev/null +++ b/packages/typegen/scripts/build.ts @@ -0,0 +1,5 @@ +// This file is kept for git worktree compatibility +// The actual build logic has been moved to build-copy.js +// This file can be removed once the worktree is updated + +export {}; diff --git a/packages/typegen/src/buildSchema.ts b/packages/typegen/src/buildSchema.ts index 34e9bf85..9e2ab2e5 100644 --- a/packages/typegen/src/buildSchema.ts +++ b/packages/typegen/src/buildSchema.ts @@ -27,6 +27,7 @@ export function buildSchema( const hasPortals = portalSchema.length > 0; if (type === "zod" || type === "zod/v4" || type === "zod/v3") { + // Map zod/v4 to zod since we're using zod v4 schemaFile.addImportDeclaration({ moduleSpecifier: type, namedImports: ["z"], diff --git a/packages/typegen/src/cli.ts b/packages/typegen/src/cli.ts index d2bb30eb..eed4ed37 100644 --- a/packages/typegen/src/cli.ts +++ b/packages/typegen/src/cli.ts @@ -9,6 +9,7 @@ import { config } from "dotenv"; import { fileURLToPath } from "url"; import { typegenConfig } from "./types"; import { generateTypedClients } from "./typegen"; +import { startServer } from "./server"; const defaultConfigPaths = [ "proofkit-typegen.config.jsonc", @@ -145,7 +146,7 @@ program ) .option( "--skip-env-check", - "Ignore loading environment variables from a file.", + "(deprecated) Ignore loading environment variables from a file.", false, ) .action(async (options) => { @@ -157,9 +158,12 @@ program path.resolve(configPath ?? defaultConfigPaths[0] ?? ""), ); - if (!options.skipEnvCheck) { - parseEnvs(options.envPath); + if (options.skipEnvCheck) { + console.log( + chalk.yellow("⚠️ You no longer need to use --skip-env-check"), + ); } + parseEnvs(options.envPath); // default command await runCodegen({ @@ -178,6 +182,71 @@ program console.log(configLocation); init({ configLocation }); }); + +program + .command("ui") + .description("Launch the configuration UI") + .option("--port ", "Port for the UI server") + .option("--config ", "optional config file name") + .option("--no-open", "Don't automatically open the browser") + .option("--env-path ", "optional path to your .env file") + .action(async (options) => { + const configPath = getConfigPath(options.config); + const configLocation = configPath ?? defaultConfigPaths[0] ?? ""; + + // Load environment variables before starting the server + parseEnvs(options.envPath); + + let port: number | null = null; + if (options.port) { + port = Number.parseInt(options.port, 10); + if (Number.isNaN(port) || port < 1 || port > 65535) { + console.error(chalk.red("Invalid port number")); + return process.exit(1); + } + } + + try { + const server = await startServer({ + port, + cwd: process.cwd(), + configPath: configLocation, + }); + + const url = `http://localhost:${server.port}`; + console.log(); + console.log(chalk.green(`🚀 Config UI ready at ${url}`)); + console.log(); + + // Auto-open browser + if (options.open !== false) { + try { + const { default: open } = await import("open"); + await open(url); + } catch (err) { + // Ignore errors opening browser + } + } + + // Handle graceful shutdown + process.on("SIGINT", () => { + console.log(); + console.log(chalk.yellow("Shutting down server...")); + server.close(); + process.exit(0); + }); + + process.on("SIGTERM", () => { + server.close(); + process.exit(0); + }); + } catch (err) { + console.error(chalk.red("Failed to start server:")); + console.error(err); + process.exit(1); + } + }); + program.parse(); function parseEnvs(envPath?: string | undefined) { @@ -192,15 +261,16 @@ function parseEnvs(envPath?: string | undefined) { } } + // this should fail silently. + // if we can't resolve the right env vars, they will be logged as errors later const envRes = config({ path: actualEnvPath }); - if (envRes.error) { - console.log( - chalk.red( - `Could not resolve your environment variables.\n${envRes.error.message}\n`, - ), - ); - throw new Error("Could not resolve your environment variables."); - } + // if (envRes.error) { + // console.log( + // chalk.red( + // `Could not resolve your environment variables.\n${envRes.error.message}\n`, + // ), + // ); + // } } function getConfigPath(configPath?: string): string | null { diff --git a/packages/typegen/src/fmodata/downloadMetadata.ts b/packages/typegen/src/fmodata/downloadMetadata.ts new file mode 100644 index 00000000..fd761c62 --- /dev/null +++ b/packages/typegen/src/fmodata/downloadMetadata.ts @@ -0,0 +1,57 @@ +import { FMServerConnection } from "@proofkit/fmodata"; +import type { z } from "zod/v4"; +import type { typegenConfigSingle } from "../types"; +import { getEnvValues, validateEnvValues } from "../getEnvValues"; + +type FmodataConfig = Extract< + z.infer, + { type: "fmodata" } +>; + +/** + * Downloads OData metadata for a single table from a FileMaker server. + * + * @param params - Object containing function parameters + * @param params.config - The fmodata config object containing connection details + * @param params.tableName - The name of the table to download metadata for + * @returns Promise that resolves with the XML metadata string + */ +export async function downloadTableMetadata({ + config, + tableName, + reduceAnnotations = false, +}: { + config: FmodataConfig; + tableName: string; + reduceAnnotations?: boolean; +}): Promise { + const envValues = getEnvValues(config.envNames); + const validationResult = validateEnvValues(envValues, config.envNames); + + if (!validationResult.success) { + throw new Error(validationResult.errorMessage); + } + + const { server, db, auth } = validationResult; + + // Create connection based on authentication method + const connection = new FMServerConnection({ + serverUrl: server, + auth, + fetchClientOptions: { + timeout: 15000, // 15 seconds + retries: 2, + }, + }); + + const database = connection.database(db); + + // Download metadata for the specific table in XML format + const tableMetadata = await database.getMetadata({ + tableName, + format: "xml", + reduceAnnotations, + }); + + return tableMetadata; +} diff --git a/packages/typegen/src/fmodata/generateODataTypes.ts b/packages/typegen/src/fmodata/generateODataTypes.ts new file mode 100644 index 00000000..7b86534b --- /dev/null +++ b/packages/typegen/src/fmodata/generateODataTypes.ts @@ -0,0 +1,1318 @@ +import { writeFile, mkdir } from "node:fs/promises"; +import { join, resolve } from "node:path"; +import fs from "fs-extra"; +import { + Project, + SourceFile, + CallExpression, + ObjectLiteralExpression, + PropertyAssignment, +} from "ts-morph"; +import type { ParsedMetadata, EntityType } from "./parseMetadata"; +import { FmodataConfig } from "../types"; +import { formatAndSaveSourceFiles } from "../formatting"; + +interface GeneratedTO { + varName: string; + code: string; + navigation: string[]; + usedFieldBuilders: Set; + needsZod: boolean; + entitySetName: string; + entityType: EntityType; + tableOverride?: NonNullable[number]; +} + +/** + * Maps type override enum values to field builder functions from @proofkit/fmodata + */ +function mapTypeOverrideToFieldBuilder( + typeOverride: + | "text" + | "number" + | "boolean" + | "fmBooleanNumber" + | "date" + | "timestamp" + | "container", +): string { + switch (typeOverride) { + case "text": + return "textField()"; + case "number": + return "numberField()"; + case "boolean": + case "fmBooleanNumber": + return "numberField().outputValidator(z.coerce.boolean())"; + case "date": + return "dateField()"; + case "timestamp": + return "timestampField()"; + case "container": + return "containerField()"; + } +} + +/** + * Maps OData types to field builder functions from @proofkit/fmodata + */ +function mapODataTypeToFieldBuilder( + edmType: string, + typeOverride?: + | "text" + | "number" + | "boolean" + | "fmBooleanNumber" + | "date" + | "timestamp" + | "container", +): string { + // If typeOverride is provided, use it instead of the inferred type + if (typeOverride) { + return mapTypeOverrideToFieldBuilder(typeOverride); + } + + switch (edmType) { + case "Edm.String": + return "textField()"; + case "Edm.Decimal": + case "Edm.Int32": + case "Edm.Int64": + case "Edm.Double": + return "numberField()"; + case "Edm.Boolean": + return "numberField().outputValidator(z.coerce.boolean())"; + case "Edm.Date": + return "dateField()"; // ISO date string + case "Edm.DateTimeOffset": + return "timestampField()"; // ISO datetime string + case "Edm.Binary": + return "containerField()"; // base64 encoded + default: + return "textField()"; // Default to textField for unknown types + } +} + +/** + * Extracts entity type name from Type string like "Collection(com.filemaker.odata.WebData.fmp12.Work_Orders_)" + * Returns "Work_Orders_" + */ +function extractEntityTypeNameFromType(typeString: string): string | null { + // Pattern: Collection(namespace.EntityTypeName) -> extract EntityTypeName + const collectionMatch = typeString.match(/Collection\(([^)]+)\)/); + if (collectionMatch && collectionMatch[1]) { + const fullType = collectionMatch[1]; + // Extract the last part after the last dot + const parts = fullType.split("."); + return parts[parts.length - 1] ?? null; + } + // Try without Collection wrapper - extract last part after last dot + const parts = typeString.split("."); + return parts.length > 0 ? (parts[parts.length - 1] ?? null) : null; +} + +/** + * Generates a table occurrence definition for a single entity set + */ +function generateTableOccurrence( + entitySetName: string, + entityType: EntityType, + entityTypeToSetMap: Map, + tableOverride?: NonNullable[number], + existingFields?: ParsedTableOccurrence, + alwaysOverrideFieldNames?: boolean, +): GeneratedTO { + const fmtId = entityType["@TableID"]; + const keyFields = entityType.$Key || []; + const fields = entityType.Properties; + const readOnlyFields: string[] = []; + const navigationTargets: string[] = []; + const usedFieldBuilders = new Set(); + let needsZod = false; + + // Process navigation properties + for (const navProp of entityType.NavigationProperties) { + const targetEntityTypeName = extractEntityTypeNameFromType(navProp.Type); + if (targetEntityTypeName) { + const targetEntitySet = entityTypeToSetMap.get(targetEntityTypeName); + if (targetEntitySet) { + navigationTargets.push(targetEntitySet); + } + } + } + + // Determine read-only fields + for (const [fieldName, metadata] of fields.entries()) { + if ( + metadata["@Calculation"] || + metadata["@Global"] || + metadata["@Org.OData.Core.V1.Permissions"]?.includes("Read") + ) { + readOnlyFields.push(fieldName); + } + } + + // Determine the id field (for reference, not used in generation) + let idField: string; + if (keyFields.length > 0) { + // Use the first key field + const firstKey = keyFields[0]; + if (!firstKey) { + throw new Error("Key fields array is empty but length check passed"); + } + idField = firstKey; + } else { + // Find a suitable ID field: look for auto-generated fields or fields with "id" in the name + const fieldNames = Array.from(fields.keys()); + const autoGenField = fieldNames.find( + (name) => fields.get(name)?.["@AutoGenerated"], + ); + const idFieldName = fieldNames.find( + (name) => + name.toLowerCase().includes("_id") || + name.toLowerCase().endsWith("id") || + name.toLowerCase() === "id", + ); + const firstFieldName = fieldNames[0]; + if (!firstFieldName) { + throw new Error("No fields found in entity type"); + } + idField = autoGenField ?? idFieldName ?? firstFieldName; + } + + // Build a field overrides map from the array for easier lookup + type FieldOverrideType = Exclude< + NonNullable[number]>["fields"], + undefined + >[number]; + const fieldOverridesMap = new Map(); + if (tableOverride?.fields) { + for (const fieldOverride of tableOverride.fields) { + if (fieldOverride?.fieldName) { + fieldOverridesMap.set(fieldOverride.fieldName, fieldOverride); + } + } + } + + // Generate field builder definitions + const fieldLines: string[] = []; + const fieldEntries = Array.from(fields.entries()); + + // Filter out excluded fields and collect valid entries + const validFieldEntries: Array< + [string, typeof fields extends Map ? V : never] + > = []; + for (const entry of fieldEntries) { + if (!entry) continue; + const [fieldName] = entry; + const fieldOverride = fieldOverridesMap.get(fieldName); + + // Skip excluded fields + if (fieldOverride?.exclude === true) { + continue; + } + + validFieldEntries.push(entry); + } + + for (let i = 0; i < validFieldEntries.length; i++) { + const entry = validFieldEntries[i]; + if (!entry) continue; + const [fieldName, metadata] = entry; + const fieldOverride = fieldOverridesMap.get(fieldName); + + // Try to match existing field: first by entity ID, then by name + let matchedExistingField: ParsedField | null = null; + let finalFieldName = fieldName; + + if (existingFields) { + // Try matching by entity ID first + if (metadata["@FieldID"]) { + matchedExistingField = matchFieldByEntityId( + existingFields.fieldsByEntityId, + metadata["@FieldID"], + ); + if (matchedExistingField) { + // Use existing field name unless alwaysOverrideFieldNames is true + if (!alwaysOverrideFieldNames) { + finalFieldName = matchedExistingField.fieldName; + } + } + } + + // If no match by entity ID, try matching by name + if (!matchedExistingField) { + matchedExistingField = matchFieldByName( + existingFields.fields, + fieldName, + ); + } + } + + // Apply typeOverride if provided, otherwise use inferred type + const fieldBuilder = mapODataTypeToFieldBuilder( + metadata.$Type, + fieldOverride?.typeOverride as + | "text" + | "number" + | "boolean" + | "fmBooleanNumber" + | "date" + | "timestamp" + | "container" + | undefined, + ); + + // Track which field builders are used + if (fieldBuilder.includes("textField()")) { + usedFieldBuilders.add("textField"); + } else if (fieldBuilder.includes("numberField()")) { + usedFieldBuilders.add("numberField"); + } else if (fieldBuilder.includes("dateField()")) { + usedFieldBuilders.add("dateField"); + } else if (fieldBuilder.includes("timestampField()")) { + usedFieldBuilders.add("timestampField"); + } else if (fieldBuilder.includes("containerField()")) { + usedFieldBuilders.add("containerField"); + } + + // Track if z.coerce.boolean() is used + if (fieldBuilder.includes("z.coerce.boolean()")) { + needsZod = true; + } + + const isKeyField = keyFields.includes(fieldName); + // Only add .notNull() if explicitly marked as Nullable="false" in XML + // metadata.$Nullable is false only if Nullable="false" was in XML, otherwise it's true (nullable by default) + const isExplicitlyNotNullable = metadata.$Nullable === false; + const isReadOnly = readOnlyFields.includes(fieldName); + const isLastField = i === validFieldEntries.length - 1; + + let line = ` ${JSON.stringify(finalFieldName)}: ${fieldBuilder}`; + + // Chain methods: primaryKey, readOnly, notNull, entityId, comment + if (isKeyField) { + line += ".primaryKey()"; + } + if (isReadOnly) { + line += ".readOnly()"; + } + // Only add .notNull() if explicitly marked as Nullable="false" in XML + // Key fields are handled by primaryKey() which already makes them not null + if (isExplicitlyNotNullable && !isKeyField) { + line += ".notNull()"; + } + if (metadata["@FieldID"]) { + line += `.entityId(${JSON.stringify(metadata["@FieldID"])})`; + } + if (metadata["@FMComment"]) { + line += `.comment(${JSON.stringify(metadata["@FMComment"])})`; + } + + // Preserve user customizations from existing field + if (matchedExistingField) { + line = preserveUserCustomizations(matchedExistingField, line); + } + + // Add comma if not the last field + if (!isLastField) { + line += ","; + } + + fieldLines.push(line); + } + + // Apply variableName override if provided, otherwise generate from entitySetName + let varName = tableOverride?.variableName + ? tableOverride.variableName.replace(/[^a-zA-Z0-9_]/g, "_") + : entitySetName.replace(/[^a-zA-Z0-9_]/g, "_"); + // Prefix with underscore if name starts with a digit (invalid JavaScript identifier) + if (/^\d/.test(varName)) { + varName = `_${varName}`; + } + + // Build options object + const optionsParts: string[] = []; + if (fmtId) { + optionsParts.push(`entityId: ${JSON.stringify(fmtId)}`); + } + if (entityType["@FMComment"]) { + optionsParts.push(`comment: ${JSON.stringify(entityType["@FMComment"])}`); + } + // Always include navigationPaths, even if empty + const navPaths = navigationTargets.map((n) => JSON.stringify(n)).join(", "); + optionsParts.push(`navigationPaths: [${navPaths}]`); + + const optionsSection = + optionsParts.length > 0 + ? `, {\n${optionsParts.map((p) => ` ${p}`).join(",\n")}\n}` + : ""; + + const code = `export const ${varName} = fmTableOccurrence(${JSON.stringify(entitySetName)}, { +${fieldLines.join("\n")} +}${optionsSection});`; + + return { + varName, + code, + navigation: navigationTargets, + usedFieldBuilders, + needsZod, + entitySetName, + entityType, + tableOverride, + }; +} + +/** + * Generates import statements based on which field builders are used + */ +function generateImports( + usedFieldBuilders: Set, + needsZod: boolean, +): string { + const fieldBuilderImports: string[] = []; + + // Always need fmTableOccurrence + fieldBuilderImports.push("fmTableOccurrence"); + + // Add only the field builders that are actually used + if (usedFieldBuilders.has("textField")) { + fieldBuilderImports.push("textField"); + } + if (usedFieldBuilders.has("numberField")) { + fieldBuilderImports.push("numberField"); + } + if (usedFieldBuilders.has("dateField")) { + fieldBuilderImports.push("dateField"); + } + if (usedFieldBuilders.has("timestampField")) { + fieldBuilderImports.push("timestampField"); + } + if (usedFieldBuilders.has("containerField")) { + fieldBuilderImports.push("containerField"); + } + + const imports = [ + `import { ${fieldBuilderImports.join(", ")} } from "@proofkit/fmodata"`, + ]; + + if (needsZod) { + imports.push(`import { z } from "zod/v4"`); + } + + return imports.join(";\n") + ";\n"; +} + +/** + * Sanitizes a name to be a safe filename + */ +function sanitizeFileName(name: string): string { + const sanitized = name.replace(/[^a-zA-Z0-9_]/g, "_"); + // Prefix with underscore if name starts with a digit (invalid JavaScript identifier) + return /^\d/.test(sanitized) ? `_${sanitized}` : sanitized; +} + +/** + * Represents a parsed field from an existing file + */ +interface ParsedField { + fieldName: string; + entityId?: string; + fullChainText: string; + userCustomizations: string; // Everything after the base chain (e.g., .inputValidator(...).outputValidator(...)) +} + +/** + * Represents a parsed table occurrence from an existing file + */ +interface ParsedTableOccurrence { + varName: string; + entitySetName: string; + tableEntityId?: string; + fields: Map; // keyed by field name + fieldsByEntityId: Map; // keyed by entity ID + existingImports: string[]; // All existing import statements as strings +} + +/** + * Extracts user customizations (like .inputValidator() and .outputValidator()) from a method chain + */ +function extractUserCustomizations( + chainText: string, + baseChainEnd: number, +): string { + // We want to preserve user-added chained calls even if they were placed: + // - before a standard method (e.g. textField().inputValidator(...).entityId(...)) + // - on fields that have no standard methods at all (possible when reduceMetadata is true) + // + // `baseChainEnd` should point to the end of the generator-owned "base builder chain" + // (e.g. `textField()` or `numberField().outputValidator(z.coerce.boolean())`). + // Everything after that may contain standard methods *and* user customizations. + // We extract only the non-standard chained calls and return them as a string + // that can be appended to the regenerated chain. + + const standardMethodNames = new Set([ + "primaryKey", + "readOnly", + "notNull", + "entityId", + "comment", + ]); + + const start = Math.max(0, Math.min(baseChainEnd, chainText.length)); + const tail = chainText.slice(start); + if (!tail.includes(".")) { + return ""; + } + + function isIdentChar(c: string): boolean { + return /[A-Za-z0-9_$]/.test(c); + } + + function skipWhitespace(s: string, idx: number): number { + while (idx < s.length && /\s/.test(s[idx] ?? "")) idx++; + return idx; + } + + // Best-effort scanning helpers: handle nested parentheses and quoted strings. + function scanString(s: string, idx: number, quote: string): number { + // idx points at opening quote + idx++; + while (idx < s.length) { + const ch = s[idx]; + if (ch === "\\") { + idx += 2; + continue; + } + if (ch === quote) { + return idx + 1; + } + idx++; + } + return idx; + } + + function scanTemplateLiteral(s: string, idx: number): number { + // idx points at opening backtick + idx++; + while (idx < s.length) { + const ch = s[idx]; + if (ch === "\\") { + idx += 2; + continue; + } + if (ch === "`") { + return idx + 1; + } + if (ch === "$" && s[idx + 1] === "{") { + idx += 2; // skip ${ + let braceDepth = 1; + while (idx < s.length && braceDepth > 0) { + const c = s[idx]; + if (c === "'" || c === '"') { + idx = scanString(s, idx, c); + continue; + } + if (c === "`") { + idx = scanTemplateLiteral(s, idx); + continue; + } + if (c === "{") braceDepth++; + else if (c === "}") braceDepth--; + idx++; + } + continue; + } + idx++; + } + return idx; + } + + function scanAngleBrackets(s: string, idx: number): number { + // idx points at '<' + let depth = 0; + while (idx < s.length) { + const ch = s[idx]; + if (ch === "'" || ch === '"') { + idx = scanString(s, idx, ch); + continue; + } + if (ch === "`") { + idx = scanTemplateLiteral(s, idx); + continue; + } + if (ch === "<") depth++; + if (ch === ">") { + depth--; + idx++; + if (depth === 0) return idx; + continue; + } + idx++; + } + return idx; + } + + function scanParens(s: string, idx: number): number { + // idx points at '(' + let depth = 0; + while (idx < s.length) { + const ch = s[idx]; + if (ch === "'" || ch === '"') { + idx = scanString(s, idx, ch); + continue; + } + if (ch === "`") { + idx = scanTemplateLiteral(s, idx); + continue; + } + if (ch === "(") depth++; + if (ch === ")") { + depth--; + idx++; + if (depth === 0) return idx; + continue; + } + idx++; + } + return idx; + } + + const keptSegments: string[] = []; + let i = 0; + while (i < tail.length) { + const dot = tail.indexOf(".", i); + if (dot === -1) break; + + let j = dot + 1; + if (j >= tail.length) break; + if (!isIdentChar(tail[j] ?? "")) { + i = j; + continue; + } + + const nameStart = j; + while (j < tail.length && isIdentChar(tail[j] ?? "")) j++; + const methodName = tail.slice(nameStart, j); + + j = skipWhitespace(tail, j); + + // Optional generic type args: .foo<...>(...) + if (tail[j] === "<") { + j = scanAngleBrackets(tail, j); + j = skipWhitespace(tail, j); + } + + // Method call args: (...) + if (tail[j] === "(") { + const end = scanParens(tail, j); + const segment = tail.slice(dot, end); + if (!standardMethodNames.has(methodName)) { + keptSegments.push(segment); + } + i = end; + continue; + } + + // Property access or malformed chain segment: keep it if it's not standard. + // Capture up to the next '.' or end. + const nextDot = tail.indexOf(".", j); + const end = nextDot === -1 ? tail.length : nextDot; + const segment = tail.slice(dot, end); + if (!standardMethodNames.has(methodName)) { + keptSegments.push(segment); + } + i = end; + } + + return keptSegments.join(""); +} + +/** + * Parses an existing table occurrence file and extracts field definitions + */ +function parseExistingTableFile( + sourceFile: SourceFile, +): ParsedTableOccurrence | null { + // Find the fmTableOccurrence call by searching all call expressions + let callExpr: CallExpression | null = null; + + sourceFile.forEachDescendant((node) => { + if (node.getKindName() === "CallExpression") { + const expr = node as CallExpression; + const expression = expr.getExpression(); + if ( + expression.getKindName() === "Identifier" && + expression.getText() === "fmTableOccurrence" + ) { + callExpr = expr; + } + } + }); + + if (!callExpr) { + return null; + } + + // TypeScript needs explicit type here + const call: CallExpression = callExpr; + + // Extract variable name from the containing variable declaration + let varName = ""; + let parent = call.getParent(); + while (parent) { + if (parent.getKindName() === "VariableDeclaration") { + // TypeScript needs explicit cast here + const varDecl = parent as any; + varName = varDecl.getName(); + break; + } + parent = parent.getParent() ?? undefined; + } + + if (!varName) { + // Try to find from export declaration + const exportDecl = sourceFile.getExportDeclarations().find((decl) => { + const namedExports = decl.getNamedExports(); + return namedExports.length > 0; + }); + if (exportDecl) { + const namedExports = exportDecl.getNamedExports(); + if (namedExports.length > 0) { + const firstExport = namedExports[0]; + if (firstExport) { + varName = firstExport.getName(); + } + } + } + } + + // Get arguments to fmTableOccurrence + const args = call.getArguments(); + if (args.length < 2) { + return null; + } + + const entitySetNameArg = args[0]; + if (!entitySetNameArg) { + return null; + } + const entitySetName = entitySetNameArg.getText().replace(/['"]/g, ""); + + // Get the fields object (second argument) + const fieldsArg = args[1]; + if (!fieldsArg || fieldsArg.getKindName() !== "ObjectLiteralExpression") { + return null; + } + const fieldsObject = fieldsArg as ObjectLiteralExpression; + + // Get options object (third argument, if present) + let tableEntityId: string | undefined; + if (args.length >= 3) { + const optionsArg = args[2]; + if (optionsArg && optionsArg.getKindName() === "ObjectLiteralExpression") { + const optionsObject = optionsArg as ObjectLiteralExpression; + const entityIdProp = optionsObject.getProperty("entityId"); + if (entityIdProp && entityIdProp.getKindName() === "PropertyAssignment") { + const value = (entityIdProp as PropertyAssignment) + .getInitializer() + ?.getText(); + if (value) { + tableEntityId = value.replace(/['"]/g, ""); + } + } + } + } + + // Extract existing imports + const existingImports: string[] = []; + const importDeclarations = sourceFile.getImportDeclarations(); + for (const importDecl of importDeclarations) { + const importText = importDecl.getFullText(); + if (importText.trim()) { + existingImports.push(importText.trim()); + } + } + + // Parse each field + const fields = new Map(); + const fieldsByEntityId = new Map(); + + const properties = fieldsObject.getProperties(); + for (const prop of properties) { + if (prop.getKindName() !== "PropertyAssignment") { + continue; + } + const fieldProp = prop as PropertyAssignment; + const fieldNameNode = fieldProp.getNameNode(); + const fieldName = fieldNameNode.getText().replace(/['"]/g, ""); + + const initializer = fieldProp.getInitializer(); + if (!initializer) { + continue; + } + + const chainText = initializer.getText(); + + // Extract entity ID from .entityId() call + let entityId: string | undefined; + const entityIdMatch = chainText.match(/\.entityId\(['"]([^'"]+)['"]\)/); + if (entityIdMatch) { + entityId = entityIdMatch[1]; + } + + // Extract user customizations (everything after standard methods) + const userCustomizations = extractUserCustomizations(chainText, 0); + + const parsedField: ParsedField = { + fieldName, + entityId, + fullChainText: chainText, + userCustomizations, + }; + + fields.set(fieldName, parsedField); + if (entityId) { + fieldsByEntityId.set(entityId, parsedField); + } + } + + return { + varName, + entitySetName, + tableEntityId, + fields, + fieldsByEntityId, + existingImports, + }; +} + +/** + * Matches a field from metadata to an existing field by entity ID + */ +function matchFieldByEntityId( + existingFields: Map, + metadataEntityId: string | undefined, +): ParsedField | null { + if (!metadataEntityId) { + return null; + } + return existingFields.get(metadataEntityId) || null; +} + +/** + * Matches a field from metadata to an existing field by name + */ +function matchFieldByName( + existingFields: Map, + fieldName: string, +): ParsedField | null { + return existingFields.get(fieldName) || null; +} + +/** + * Preserves user customizations from an existing field chain + */ +function preserveUserCustomizations( + existingField: ParsedField | undefined, + newChain: string, +): string { + if (!existingField) { + return newChain; + } + + const standardMethods = [ + ".primaryKey()", + ".readOnly()", + ".notNull()", + ".entityId(", + ".comment(", + ]; + + // Determine where the generator-owned base builder chain ends in the new chain + // (before any standard methods added by the generator). + let baseChainEnd = newChain.length; + for (const method of standardMethods) { + const idx = newChain.indexOf(method); + if (idx !== -1 && idx < baseChainEnd) { + baseChainEnd = idx; + } + } + + const baseBuilderPrefix = newChain.slice(0, baseChainEnd); + const existingChainText = existingField.fullChainText; + const existingBaseEnd = existingChainText.startsWith(baseBuilderPrefix) + ? baseBuilderPrefix.length + : 0; + + const userCustomizations = extractUserCustomizations( + existingChainText, + existingBaseEnd, + ); + + if (!userCustomizations) { + return newChain; + } + + // Append extracted user customizations to the regenerated chain + return newChain + userCustomizations; +} + +/** + * Generates TypeScript table occurrence files from parsed OData metadata. + * + * @param metadata - The parsed OData metadata + * @param options - Generation options including output path + * @returns Promise that resolves when all files have been generated + */ +export async function generateODataTypes( + metadata: ParsedMetadata, + config: FmodataConfig & { + alwaysOverrideFieldNames?: boolean; + }, +): Promise { + const { entityTypes, entitySets } = metadata; + const { + path, + clearOldFiles = true, + tables, + alwaysOverrideFieldNames = true, + } = config; + const outputPath = path ?? "schema"; + + // Build a map from entity type name to entity set name + const entityTypeToSetMap = new Map(); + for (const [entitySetName, entitySet] of entitySets.entries()) { + entityTypeToSetMap.set(entitySet.EntityType, entitySetName); + } + + // Build a set of allowed table names from config + const allowedTableNames = new Set(); + if (tables) { + for (const tableOverride of tables) { + if (tableOverride?.tableName) { + allowedTableNames.add(tableOverride.tableName); + } + } + } + + // Build a table overrides map from the array for easier lookup + const tableOverridesMap = new Map< + string, + NonNullable[number] + >(); + if (tables) { + for (const tableOverride of tables) { + if (tableOverride?.tableName) { + tableOverridesMap.set(tableOverride.tableName, tableOverride); + } + } + } + + // Generate table occurrences for entity sets + const generatedTOs: GeneratedTO[] = []; + + for (const [entitySetName, entitySet] of entitySets.entries()) { + // Only generate types for tables specified in config + if (allowedTableNames.size > 0 && !allowedTableNames.has(entitySetName)) { + continue; + } + + // Get table override config if it exists + const tableOverride = tableOverridesMap.get(entitySetName); + + const entityType = entityTypes.get(entitySet.EntityType); + if (entityType) { + // Determine alwaysOverrideFieldNames: table-level override takes precedence + const tableAlwaysOverrideFieldNames = + tableOverride?.alwaysOverrideFieldNames ?? alwaysOverrideFieldNames; + + // First generate without existing fields to get the structure + // We'll regenerate with existing fields later if the file exists + const generated = generateTableOccurrence( + entitySetName, + entityType, + entityTypeToSetMap, + tableOverride, + undefined, + tableAlwaysOverrideFieldNames, + ); + generatedTOs.push({ + ...generated, + entitySetName, + entityType, + tableOverride, + }); + } + } + + // Resolve and create output directory + const resolvedOutputPath = resolve(outputPath); + await mkdir(resolvedOutputPath, { recursive: true }); + + if (clearOldFiles) { + // Clear the directory if requested (but keep the directory itself) + fs.emptyDirSync(resolvedOutputPath); + } + + // Create ts-morph project for file manipulation + const project = new Project({}); + + // Generate one file per table occurrence + const exportStatements: string[] = []; + + for (const generated of generatedTOs) { + const fileName = `${sanitizeFileName(generated.varName)}.ts`; + const filePath = join(resolvedOutputPath, fileName); + + // Check if file exists and parse it + let existingFields: ParsedTableOccurrence | undefined; + if (fs.existsSync(filePath) && !clearOldFiles) { + try { + const existingSourceFile = project.addSourceFileAtPath(filePath); + const parsed = parseExistingTableFile(existingSourceFile); + if (parsed) { + existingFields = parsed; + } + } catch (error) { + // If parsing fails, continue without existing fields + console.warn(`Failed to parse existing file ${filePath}:`, error); + } + } + + // Determine alwaysOverrideFieldNames: table-level override takes precedence + const tableAlwaysOverrideFieldNames = + generated.tableOverride?.alwaysOverrideFieldNames ?? + alwaysOverrideFieldNames; + + // Regenerate with existing fields merged in if file exists + const regenerated = existingFields + ? generateTableOccurrence( + generated.entitySetName, + generated.entityType, + entityTypeToSetMap, + generated.tableOverride, + existingFields, + tableAlwaysOverrideFieldNames, + ) + : generated; + + // Track removed fields (fields in existing but not in metadata) + const removedFields: ParsedField[] = []; + if (existingFields) { + for (const existingField of existingFields.fields.values()) { + // Check if this field is still in metadata + const stillExists = Array.from( + generated.entityType.Properties.keys(), + ).some((metaFieldName) => { + const metaField = generated.entityType.Properties.get(metaFieldName); + if (!metaField) return false; + + // Match by entity ID or name + if ( + existingField.entityId && + metaField["@FieldID"] === existingField.entityId + ) { + return true; + } + if (metaFieldName === existingField.fieldName) { + return true; + } + return false; + }); + + if (!stillExists) { + removedFields.push(existingField); + } + } + } + + // Generate required imports based on what's actually used in this file + const requiredImports = generateImports( + regenerated.usedFieldBuilders, + regenerated.needsZod, + ); + + // Parse import statements to extract module and named imports + function parseImport(importText: string): { + module: string; + namedImports: string[]; // Base names only (for comparison) + fullNamedImports: string[]; // Full specifiers including aliases (e.g., "x as y") + fullText: string; + } | null { + const trimmed = importText.trim(); + if (!trimmed.startsWith("import")) { + return null; + } + + // Extract module specifier using regex + const moduleMatch = trimmed.match(/from\s+['"]([^'"]+)['"]/); + if (!moduleMatch || !moduleMatch[1]) { + return null; + } + const module = moduleMatch[1]; + + // Extract named imports + const namedImports: string[] = []; // Base names for comparison + const fullNamedImports: string[] = []; // Full specifiers with aliases preserved + const namedMatch = trimmed.match(/\{([^}]+)\}/); + if (namedMatch && namedMatch[1]) { + const importsList = namedMatch[1]; + // Split by comma and clean up + importsList.split(",").forEach((imp) => { + const cleaned = imp.trim(); + if (cleaned) { + // Preserve the full import specifier (including alias) + fullNamedImports.push(cleaned); + + // Extract base name for comparison (e.g., "x as y" -> "x") + const aliasMatch = cleaned.match(/^(\w+)(?:\s+as\s+\w+)?$/); + if (aliasMatch && aliasMatch[1]) { + namedImports.push(aliasMatch[1]); + } else { + namedImports.push(cleaned); + } + } + }); + } + + return { module, namedImports, fullNamedImports, fullText: trimmed }; + } + + // If file exists, preserve existing imports and merge with required ones + let finalImports = requiredImports; + if (existingFields && existingFields.existingImports.length > 0) { + // Parse all existing imports by module + const existingImportsByModule = new Map< + string, + { + namedImports: Set; // Base names for comparison + fullNamedImports: Map; // Map base name -> full specifier (preserves aliases) + fullText: string; + } + >(); + + for (const existingImport of existingFields.existingImports) { + const parsed = parseImport(existingImport); + if (parsed) { + const existing = existingImportsByModule.get(parsed.module); + if (existing) { + // Merge named imports from duplicate imports + parsed.namedImports.forEach((imp) => + existing.namedImports.add(imp), + ); + // Preserve full import specifiers (with aliases) + parsed.fullNamedImports.forEach((fullSpec) => { + const baseName = + fullSpec.match(/^(\w+)(?:\s+as\s+\w+)?$/)?.[1] || fullSpec; + existing.fullNamedImports.set(baseName, fullSpec); + }); + } else { + const fullNamedImportsMap = new Map(); + parsed.fullNamedImports.forEach((fullSpec) => { + const baseName = + fullSpec.match(/^(\w+)(?:\s+as\s+\w+)?$/)?.[1] || fullSpec; + fullNamedImportsMap.set(baseName, fullSpec); + }); + existingImportsByModule.set(parsed.module, { + namedImports: new Set(parsed.namedImports), + fullNamedImports: fullNamedImportsMap, + fullText: parsed.fullText, + }); + } + } + } + + // Parse required imports + const requiredImportLines = requiredImports + .split("\n") + .filter((line) => line.trim()); + const requiredImportsByModule = new Map>(); + + for (const requiredLine of requiredImportLines) { + const parsed = parseImport(requiredLine); + if (parsed) { + const existing = requiredImportsByModule.get(parsed.module); + if (existing) { + parsed.namedImports.forEach((imp) => existing.add(imp)); + } else { + requiredImportsByModule.set( + parsed.module, + new Set(parsed.namedImports), + ); + } + } + } + + // Build final imports: preserve existing, update if needed, add missing + const finalImportLines: string[] = []; + const handledModules = new Set(); + const processedModules = new Set(); + + // Process existing imports - deduplicate by module + for (const existingImport of existingFields.existingImports) { + const parsed = parseImport(existingImport); + if (parsed && parsed.module) { + // Skip if we've already processed this module (deduplicate) + if (processedModules.has(parsed.module)) { + continue; + } + processedModules.add(parsed.module); + + // Use the merged named imports from existingImportsByModule + const existing = existingImportsByModule.get(parsed.module); + const allExistingImports = existing + ? Array.from(existing.namedImports) + : parsed.namedImports; + + const required = requiredImportsByModule.get(parsed.module); + if (required) { + // Check if we need to add any missing named imports + const missingImports = Array.from(required).filter( + (imp) => !allExistingImports.includes(imp), + ); + if (missingImports.length > 0) { + // Build import list: use preserved full specifiers (with aliases) for existing, + // and base names for new required imports + const importSpecs: string[] = []; + + // Add existing imports using their preserved full specifiers (with aliases) + if (existing) { + allExistingImports.forEach((baseName) => { + const fullSpec = existing.fullNamedImports.get(baseName); + if (fullSpec) { + importSpecs.push(fullSpec); + } else { + importSpecs.push(baseName); + } + }); + } else { + // Fallback to parsed full named imports + parsed.fullNamedImports.forEach((fullSpec) => { + importSpecs.push(fullSpec); + }); + } + + // Add missing required imports (without aliases) + importSpecs.push(...missingImports); + + // Sort imports (but preserve aliases) + importSpecs.sort(); + + finalImportLines.push( + `import { ${importSpecs.join(", ")} } from "${parsed.module}";`, + ); + } else { + // Keep existing import format with preserved aliases + const importSpecs: string[] = []; + if (existing) { + allExistingImports.forEach((baseName) => { + const fullSpec = existing.fullNamedImports.get(baseName); + if (fullSpec) { + importSpecs.push(fullSpec); + } else { + importSpecs.push(baseName); + } + }); + } else { + parsed.fullNamedImports.forEach((fullSpec) => { + importSpecs.push(fullSpec); + }); + } + importSpecs.sort(); + finalImportLines.push( + `import { ${importSpecs.join(", ")} } from "${parsed.module}";`, + ); + } + handledModules.add(parsed.module); + requiredImportsByModule.delete(parsed.module); + } else { + // Keep existing import (not in required imports - user added it) + // Preserve aliases from existing imports + const importSpecs: string[] = []; + if (existing) { + allExistingImports.forEach((baseName) => { + const fullSpec = existing.fullNamedImports.get(baseName); + if (fullSpec) { + importSpecs.push(fullSpec); + } else { + importSpecs.push(baseName); + } + }); + } else { + parsed.fullNamedImports.forEach((fullSpec) => { + importSpecs.push(fullSpec); + }); + } + importSpecs.sort(); + finalImportLines.push( + `import { ${importSpecs.join(", ")} } from "${parsed.module}";`, + ); + } + } else { + // Keep non-import lines as-is (comments, etc.) + finalImportLines.push(existingImport); + } + } + + // Add any required imports that don't exist yet + for (const [module, namedImports] of requiredImportsByModule.entries()) { + if (module && !handledModules.has(module)) { + const importsList = Array.from(namedImports).sort().join(", "); + if (importsList) { + finalImportLines.push( + `import { ${importsList} } from "${module}";`, + ); + } + } + } + + finalImports = finalImportLines.join("\n") + "\n"; + } + + // Build file content with removed fields commented out + let fileContent = finalImports + "\n"; + + if (removedFields.length > 0) { + fileContent += + "// ============================================================================\n"; + fileContent += "// Removed fields (not found in metadata)\n"; + fileContent += + "// ============================================================================\n"; + for (const removedField of removedFields) { + const matchInfo = removedField.entityId + ? ` (was matched by entityId ${removedField.entityId})` + : ""; + fileContent += `// @removed: Field not found in metadata${matchInfo}\n`; + fileContent += `// ${JSON.stringify(removedField.fieldName)}: ${removedField.fullChainText},\n\n`; + } + } + + fileContent += regenerated.code; + + // Create or update source file + project.createSourceFile(filePath, fileContent, { + overwrite: true, + }); + + // Collect export statement for index file + exportStatements.push( + `export { ${regenerated.varName} } from "./${sanitizeFileName(regenerated.varName)}";`, + ); + } + + // Format and save all files + await formatAndSaveSourceFiles(project); + + // Generate index.ts file that exports all table occurrences + const indexContent = `// ============================================================================ +// Auto-generated index file - exports all table occurrences +// ============================================================================ + +${exportStatements.join("\n")} +`; + + const indexPath = join(resolvedOutputPath, "index.ts"); + await writeFile(indexPath, indexContent, "utf-8"); +} diff --git a/packages/typegen/src/fmodata/index.ts b/packages/typegen/src/fmodata/index.ts new file mode 100644 index 00000000..9376351b --- /dev/null +++ b/packages/typegen/src/fmodata/index.ts @@ -0,0 +1,11 @@ +export { downloadTableMetadata } from "./downloadMetadata"; +export { + parseMetadata, + parseMetadataFromFile, + type ParsedMetadata, + type EntityType, + type EntitySet, + type FieldMetadata, + type NavigationProperty, +} from "./parseMetadata"; +export { generateODataTypes } from "./generateODataTypes"; diff --git a/packages/typegen/src/fmodata/parseMetadata.ts b/packages/typegen/src/fmodata/parseMetadata.ts new file mode 100644 index 00000000..f6d1ef65 --- /dev/null +++ b/packages/typegen/src/fmodata/parseMetadata.ts @@ -0,0 +1,249 @@ +import { XMLParser } from "fast-xml-parser"; +import { readFile } from "node:fs/promises"; + +export interface FieldMetadata { + $Type: string; + $Nullable?: boolean; + "@FieldID": string; + "@Calculation"?: boolean; + "@Global"?: boolean; + "@Org.OData.Core.V1.Permissions"?: string; + $DefaultValue?: string; + "@AutoGenerated"?: boolean; + "@Index"?: boolean; + "@VersionID"?: boolean; + "@FMComment"?: string; +} + +export interface NavigationProperty { + Name: string; + Type: string; // e.g., "Collection(com.filemaker.odata.WebData.fmp12.Work_Orders_)" +} + +export interface EntityType { + Name: string; + "@TableID": string; + "@FMComment"?: string; + $Key?: string[]; + Properties: Map; + NavigationProperties: NavigationProperty[]; +} + +export interface EntitySet { + Name: string; + EntityType: string; // Full type name like "com.filemaker.odata.WebData.fmp12.Addresses_" +} + +export interface ParsedMetadata { + entityTypes: Map; + entitySets: Map; + namespace: string; +} + +function ensureArray(value: T | T[] | undefined): T[] { + if (!value) return []; + return Array.isArray(value) ? value : [value]; +} + +/** + * Parses OData metadata XML content and extracts entity types, entity sets, and namespace. + * + * @param xmlContent - The XML content as a string + * @returns Promise resolving to parsed metadata containing entity types, entity sets, and namespace + */ +export async function parseMetadata( + xmlContent: string, +): Promise { + const entityTypes = new Map(); + const entitySets = new Map(); + let namespace = ""; + + // Parse XML using fast-xml-parser + const parser = new XMLParser({ + ignoreAttributes: false, + attributeNamePrefix: "@_", + textNodeName: "#text", + parseAttributeValue: true, + trimValues: true, + }); + + const parsed = parser.parse(xmlContent); + + // Navigate to Schema element + const edmx = parsed["edmx:Edmx"] || parsed.Edmx; + if (!edmx) { + throw new Error("No Edmx element found in XML"); + } + + const dataServices = edmx["edmx:DataServices"] || edmx.DataServices; + if (!dataServices) { + throw new Error("No DataServices element found in XML"); + } + + const schema = ensureArray(dataServices.Schema)[0]; + if (!schema) { + throw new Error("No Schema element found in XML"); + } + + namespace = schema["@_Namespace"] || schema.Namespace || ""; + + // Extract EntityTypes + const entityTypeList = ensureArray(schema.EntityType); + for (const entityTypeEl of entityTypeList) { + const entityTypeName = entityTypeEl["@_Name"] || entityTypeEl.Name; + if (!entityTypeName) continue; + + // Get TableID and FMComment from Annotations + let tableId = ""; + let tableComment: string | undefined; + const annotations = ensureArray(entityTypeEl.Annotation); + for (const ann of annotations) { + const term = ann["@_Term"] || ann.Term; + if (term === "com.filemaker.odata.TableID") { + tableId = ann["@_String"] || ann.String || ""; + } else if (term === "com.filemaker.odata.FMComment") { + tableComment = ann["@_String"] || ann.String || undefined; + } + } + + // Get Key fields + const keyFields: string[] = []; + if (entityTypeEl.Key) { + const propertyRefs = ensureArray(entityTypeEl.Key.PropertyRef); + for (const propRef of propertyRefs) { + const name = propRef["@_Name"] || propRef.Name; + if (name) keyFields.push(name); + } + } + + // Extract Properties + const properties = new Map(); + const propertyList = ensureArray(entityTypeEl.Property); + for (const propEl of propertyList) { + const propName = propEl["@_Name"] || propEl.Name; + if (!propName) continue; + + const propType = propEl["@_Type"] || propEl.Type || ""; + // Nullable is false only if explicitly set to "false", otherwise assume nullable + // The parser converts "false" to boolean false, so check for both + const nullableAttr = propEl["@_Nullable"] ?? propEl.Nullable; + const isExplicitlyNotNullable = + nullableAttr === "false" || nullableAttr === false; + const defaultValue = + propEl["@_DefaultValue"] || propEl.DefaultValue || undefined; + + // Get annotations + let fieldId = ""; + let isCalculation = false; + let isGlobal = false; + let isAutoGenerated = false; + let hasIndex = false; + let isVersionId = false; + let permissions: string | undefined; + let fieldComment: string | undefined; + + const propAnnotations = ensureArray(propEl.Annotation); + for (const ann of propAnnotations) { + const term = ann["@_Term"] || ann.Term; + if (term === "com.filemaker.odata.FieldID") { + fieldId = ann["@_String"] || ann.String || ""; + } else if (term === "com.filemaker.odata.Calculation") { + isCalculation = ann["@_Bool"] === "true" || ann.Bool === "true"; + } else if (term === "com.filemaker.odata.Global") { + isGlobal = ann["@_Bool"] === "true" || ann.Bool === "true"; + } else if (term === "com.filemaker.odata.AutoGenerated") { + isAutoGenerated = ann["@_Bool"] === "true" || ann.Bool === "true"; + } else if (term === "com.filemaker.odata.Index") { + hasIndex = ann["@_Bool"] === "true" || ann.Bool === "true"; + } else if (term === "com.filemaker.odata.VersionID") { + isVersionId = ann["@_Bool"] === "true" || ann.Bool === "true"; + } else if (term === "com.filemaker.odata.FMComment") { + fieldComment = ann["@_String"] || ann.String || undefined; + } else if (term === "Org.OData.Core.V1.Permissions") { + const enumMember = ann.EnumMember; + if (enumMember) { + permissions = + typeof enumMember === "string" + ? enumMember + : enumMember["#text"] || undefined; + } + } + } + + properties.set(propName, { + $Type: propType, + $Nullable: !isExplicitlyNotNullable, // true if not explicitly set to false + "@FieldID": fieldId, + "@Calculation": isCalculation, + "@Global": isGlobal, + "@Org.OData.Core.V1.Permissions": permissions, + $DefaultValue: defaultValue, + "@AutoGenerated": isAutoGenerated, + "@Index": hasIndex, + "@VersionID": isVersionId, + "@FMComment": fieldComment, + }); + } + + // Extract NavigationProperties + const navigationProperties: NavigationProperty[] = []; + if (entityTypeEl.NavigationProperty) { + const navPropList = ensureArray(entityTypeEl.NavigationProperty); + for (const navPropEl of navPropList) { + const navName = navPropEl["@_Name"] || navPropEl.Name; + const navType = navPropEl["@_Type"] || navPropEl.Type; + if (navName && navType) { + navigationProperties.push({ + Name: navName, + Type: navType, + }); + } + } + } + + entityTypes.set(entityTypeName, { + Name: entityTypeName, + "@TableID": tableId, + "@FMComment": tableComment, + $Key: keyFields, + Properties: properties, + NavigationProperties: navigationProperties, + }); + } + + // Extract EntitySets from EntityContainer + const entityContainer = ensureArray(schema.EntityContainer)[0]; + if (entityContainer) { + const entitySetList = ensureArray(entityContainer.EntitySet); + for (const entitySetEl of entitySetList) { + const setName = entitySetEl["@_Name"] || entitySetEl.Name; + const entityType = entitySetEl["@_EntityType"] || entitySetEl.EntityType; + if (setName && entityType) { + // Extract just the entity type name from the full type string + // e.g., "com.filemaker.odata.WebData.fmp12.Addresses_" -> "Addresses_" + const typeNameMatch = entityType.match(/\.([^.]+)$/); + const entityTypeName = typeNameMatch ? typeNameMatch[1] : entityType; + + entitySets.set(setName, { + Name: setName, + EntityType: entityTypeName, + }); + } + } + } + + return { entityTypes, entitySets, namespace }; +} + +/** + * Reads and parses metadata from a file path. + * + * @param filePath - The path to the XML metadata file + * @returns Promise resolving to parsed metadata + */ +export async function parseMetadataFromFile( + filePath: string, +): Promise { + const xmlContent = await readFile(filePath, "utf-8"); + return parseMetadata(xmlContent); +} diff --git a/packages/typegen/src/fmodata/typegen.ts b/packages/typegen/src/fmodata/typegen.ts new file mode 100644 index 00000000..29d469e1 --- /dev/null +++ b/packages/typegen/src/fmodata/typegen.ts @@ -0,0 +1,68 @@ +import { FmodataConfig } from "../types"; +import { downloadTableMetadata } from "./downloadMetadata"; +import { parseMetadata, type ParsedMetadata } from "./parseMetadata"; +import { generateODataTypes } from "./generateODataTypes"; + +export async function generateODataTablesSingle(config: FmodataConfig) { + const { tables, reduceMetadata = false } = config; + + if (!tables || tables.length === 0) { + throw new Error("No tables specified in config"); + } + + // Download and parse metadata for each table + const allEntityTypes = new Map< + string, + ParsedMetadata["entityTypes"] extends Map ? V : never + >(); + const allEntitySets = new Map< + string, + ParsedMetadata["entitySets"] extends Map ? V : never + >(); + let namespace = ""; + + for (const tableConfig of tables) { + const tableName = tableConfig.tableName; + + // Download metadata for this table + const tableMetadataXml = await downloadTableMetadata({ + config, + tableName, + reduceAnnotations: tableConfig.reduceMetadata ?? reduceMetadata, + }); + + // Parse the metadata + const parsedMetadata = await parseMetadata(tableMetadataXml); + + // Merge entity types + for (const [ + entityTypeName, + entityType, + ] of parsedMetadata.entityTypes.entries()) { + allEntityTypes.set(entityTypeName, entityType); + } + + // Merge entity sets + for (const [ + entitySetName, + entitySet, + ] of parsedMetadata.entitySets.entries()) { + allEntitySets.set(entitySetName, entitySet); + } + + // Use namespace from first table (should be the same for all) + if (!namespace) { + namespace = parsedMetadata.namespace; + } + } + + // Combine all parsed metadata + const mergedMetadata: ParsedMetadata = { + entityTypes: allEntityTypes, + entitySets: allEntitySets, + namespace, + }; + + // Generate types from merged metadata + await generateODataTypes(mergedMetadata, config); +} diff --git a/packages/typegen/src/formatting.ts b/packages/typegen/src/formatting.ts index b74f9860..febb94cd 100644 --- a/packages/typegen/src/formatting.ts +++ b/packages/typegen/src/formatting.ts @@ -1,5 +1,5 @@ import { Project } from "ts-morph"; -import { format, getFileInfo } from "prettier"; +import * as prettier from "prettier"; /** * Formats all source files in a ts-morph Project using prettier and saves the changes. @@ -12,11 +12,13 @@ export async function formatAndSaveSourceFiles(project: Project) { // run each file through the prettier formatter for await (const file of files) { const filePath = file.getFilePath(); - const fileInfo = await getFileInfo(filePath); + const fileInfo = (await prettier.getFileInfo?.(filePath)) ?? { + ignored: false, + }; if (fileInfo.ignored) continue; - const formatted = await format(file.getFullText(), { + const formatted = await prettier.format(file.getFullText(), { filepath: filePath, }); file.replaceWithText(formatted); diff --git a/packages/typegen/src/getEnvValues.ts b/packages/typegen/src/getEnvValues.ts new file mode 100644 index 00000000..fe155da9 --- /dev/null +++ b/packages/typegen/src/getEnvValues.ts @@ -0,0 +1,179 @@ +import chalk from "chalk"; +import type { z } from "zod/v4"; +import type { typegenConfigSingle } from "./types"; +import { defaultEnvNames } from "./constants"; + +type EnvNames = z.infer["envNames"]; + +export interface EnvValues { + server: string | undefined; + db: string | undefined; + apiKey: string | undefined; + username: string | undefined; + password: string | undefined; +} + +export type EnvValidationResult = + | { + success: true; + server: string; + db: string; + auth: { apiKey: string } | { username: string; password: string }; + } + | { + success: false; + errorMessage: string; + }; + +/** + * Gets environment variable values for FileMaker connection. + * Supports both fmdapi and fmodata config types. + * + * @param envNames - Optional custom environment variable names + * @returns Object containing all environment variable values + */ +export function getEnvValues(envNames?: EnvNames): EnvValues { + const server = process.env[envNames?.server ?? defaultEnvNames.server]; + const db = process.env[envNames?.db ?? defaultEnvNames.db]; + + // For apiKey, check custom env name first, then fall back to default + // This matches the pattern in typegen.ts + const apiKey = + (envNames?.auth && "apiKey" in envNames.auth + ? process.env[envNames.auth.apiKey ?? defaultEnvNames.apiKey] + : undefined) ?? process.env[defaultEnvNames.apiKey]; + + const username = + (envNames?.auth && "username" in envNames.auth + ? process.env[envNames.auth.username ?? defaultEnvNames.username] + : undefined) ?? process.env[defaultEnvNames.username]; + + const password = + (envNames?.auth && "password" in envNames.auth + ? process.env[envNames.auth.password ?? defaultEnvNames.password] + : undefined) ?? process.env[defaultEnvNames.password]; + + return { + server, + db, + apiKey, + username, + password, + }; +} + +/** + * Validates environment values and returns a result with either success data or error message. + * Uses chalk for console output (for fmdapi compatibility). + * + * @param envValues - The environment values to validate + * @param envNames - Optional custom environment variable names (for error messages) + * @returns Validation result with success flag and either data or error message + */ +export function validateEnvValues( + envValues: EnvValues, + envNames?: EnvNames, +): EnvValidationResult { + const { server, db, apiKey, username, password } = envValues; + + if (!server || !db || (!apiKey && !username)) { + const missingVars: string[] = []; + if (!server) { + missingVars.push(envNames?.server ?? defaultEnvNames.server); + } + if (!db) { + missingVars.push(envNames?.db ?? defaultEnvNames.db); + } + + if (!apiKey) { + // Determine the names to display in the error message + const apiKeyName = + envNames?.auth && "apiKey" in envNames.auth && envNames.auth.apiKey + ? envNames.auth.apiKey + : defaultEnvNames.apiKey; + const usernameName = + envNames?.auth && "username" in envNames.auth && envNames.auth.username + ? envNames.auth.username + : defaultEnvNames.username; + const passwordName = + envNames?.auth && "password" in envNames.auth && envNames.auth.password + ? envNames.auth.password + : defaultEnvNames.password; + + missingVars.push( + `${apiKeyName} (or ${usernameName} and ${passwordName})`, + ); + } + + return { + success: false, + errorMessage: `Missing required environment variables: ${missingVars.join(", ")}`, + }; + } + + const auth: { apiKey: string } | { username: string; password: string } = + apiKey + ? { apiKey } + : { username: username ?? "", password: password ?? "" }; + + return { + success: true, + server, + db, + auth, + }; +} + +/** + * Validates environment values and logs errors using chalk (for fmdapi compatibility). + * Returns undefined if validation fails, otherwise returns the validated values. + * + * @param envValues - The environment values to validate + * @param envNames - Optional custom environment variable names (for error messages) + * @returns Validated values or undefined if validation failed + */ +export function validateAndLogEnvValues( + envValues: EnvValues, + envNames?: EnvNames, +): EnvValidationResult | undefined { + const result = validateEnvValues(envValues, envNames); + + if (!result.success) { + console.log(chalk.red("ERROR: Could not get all required config values")); + console.log("Ensure the following environment variables are set:"); + + const { server, db, apiKey, username } = envValues; + + if (!server) { + console.log(`${envNames?.server ?? defaultEnvNames.server}`); + } + if (!db) { + console.log(`${envNames?.db ?? defaultEnvNames.db}`); + } + + if (!apiKey) { + // Determine the names to display in the error message + const apiKeyNameToLog = + envNames?.auth && "apiKey" in envNames.auth && envNames.auth.apiKey + ? envNames.auth.apiKey + : defaultEnvNames.apiKey; + const usernameNameToLog = + envNames?.auth && "username" in envNames.auth && envNames.auth.username + ? envNames.auth.username + : defaultEnvNames.username; + const passwordNameToLog = + envNames?.auth && "password" in envNames.auth && envNames.auth.password + ? envNames.auth.password + : defaultEnvNames.password; + + console.log( + `${apiKeyNameToLog} (or ${usernameNameToLog} and ${passwordNameToLog})`, + ); + } + + console.log(); + return undefined; + } + + return result; +} diff --git a/packages/typegen/src/server/api.ts b/packages/typegen/src/server/api.ts new file mode 100644 index 00000000..9fa6ed1a --- /dev/null +++ b/packages/typegen/src/server/api.ts @@ -0,0 +1,153 @@ +import { IncomingMessage } from "http"; +import { URL } from "url"; +import fs from "fs-extra"; +import path from "path"; +import { parse } from "jsonc-parser"; +import { typegenConfig } from "../types"; + +export interface ApiContext { + cwd: string; + configPath: string; +} + +export interface ApiResponse { + status: number; + headers: Record; + body: string; +} + +export async function handleApiRequest( + req: IncomingMessage, + url: URL, + context: ApiContext, +): Promise { + const pathname = url.pathname.replace("/api", ""); + + // GET /api/config + if (pathname === "/config" && req.method === "GET") { + return handleGetConfig(context); + } + + // POST /api/config + if (pathname === "/config" && req.method === "POST") { + return handlePostConfig(req, context); + } + + return { + status: 404, + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ error: "Not found" }), + }; +} + +async function handleGetConfig(context: ApiContext): Promise { + const { configPath } = context; + const fullPath = path.resolve(context.cwd, configPath); + + const exists = fs.existsSync(fullPath); + + if (!exists) { + return { + status: 200, + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ + exists: false, + path: configPath, + config: null, + }), + }; + } + + try { + const raw = fs.readFileSync(fullPath, "utf8"); + const parsed = parse(raw); + + return { + status: 200, + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ + exists: true, + path: configPath, + config: parsed, + }), + }; + } catch (err) { + return { + status: 500, + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ + error: err instanceof Error ? err.message : "Failed to read config", + }), + }; + } +} + +async function handlePostConfig( + req: IncomingMessage, + context: ApiContext, +): Promise { + try { + const body = await readRequestBody(req); + const data = JSON.parse(body); + + // Handle both { config: ... } and direct config object + const configToValidate = data.config ?? data; + + // Validate with Zod + const validation = typegenConfig.safeParse({ config: configToValidate }); + + if (!validation.success) { + const issues = validation.error.issues.map((err) => ({ + path: err.path, + message: err.message, + })); + + return { + status: 400, + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ + success: false, + error: "Validation failed", + issues, + }), + }; + } + + // Write to disk as pretty JSON (replacing JSONC) + const fullPath = path.resolve(context.cwd, context.configPath); + const jsonContent = JSON.stringify(validation.data, null, 2) + "\n"; + + await fs.ensureDir(path.dirname(fullPath)); + await fs.writeFile(fullPath, jsonContent, "utf8"); + + return { + status: 200, + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ success: true }), + }; + } catch (err) { + return { + status: 500, + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ + success: false, + error: err instanceof Error ? err.message : "Unknown error", + }), + }; + } +} + +function readRequestBody(req: IncomingMessage): Promise { + return new Promise((resolve, reject) => { + let body = ""; + req.on("data", (chunk) => { + body += chunk.toString(); + }); + req.on("end", () => { + resolve(body); + }); + req.on("error", (err) => { + reject(err); + }); + }); +} diff --git a/packages/typegen/src/server/app.ts b/packages/typegen/src/server/app.ts new file mode 100644 index 00000000..15b03c8f --- /dev/null +++ b/packages/typegen/src/server/app.ts @@ -0,0 +1,679 @@ +import { Hono } from "hono"; +import { zValidator } from "@hono/zod-validator"; +import fs from "fs-extra"; +import path from "path"; +import { parse } from "jsonc-parser"; +import { typegenConfig, typegenConfigSingle } from "../types"; +import z from "zod/v4"; +import { type clientTypes, FileMakerError } from "@proofkit/fmdapi"; +import { + createDataApiClient, + createClientFromConfig, + createOdataClientFromConfig, +} from "./createDataApiClient"; +import { ContentfulStatusCode } from "hono/utils/http-status"; +import { generateTypedClients } from "../typegen"; +import { FMServerConnection } from "@proofkit/fmodata"; +import { downloadTableMetadata, parseMetadata } from "../fmodata"; + +export interface ApiContext { + cwd: string; + configPath: string; +} + +/** + * Flattens a nested layout/folder structure into a flat list with full paths + */ +function flattenLayouts( + layouts: clientTypes.LayoutOrFolder[], + parentPath: string = "", +): Array<{ name: string; path: string; table?: string }> { + const result: Array<{ name: string; path: string; table?: string }> = []; + + for (const item of layouts) { + if ("isFolder" in item && item.isFolder) { + // It's a folder - recursively process its contents + const folderPath = parentPath ? `${parentPath}/${item.name}` : item.name; + if (item.folderLayoutNames) { + result.push(...flattenLayouts(item.folderLayoutNames, folderPath)); + } + } else { + // It's a layout + const layoutPath = parentPath ? `${parentPath}/${item.name}` : item.name; + result.push({ + name: item.name, + path: layoutPath, + table: "table" in item ? item.table : undefined, + }); + } + } + + return result; +} + +export function createApiApp(context: ApiContext) { + // Define all routes with proper chaining for type inference + const app = new Hono() + .basePath("/api") + + // GET /api/config + .get("/config", async (c) => { + const { configPath, cwd } = context; + const fullPath = path.resolve(cwd, configPath); + + const exists = fs.existsSync(fullPath); + + if (!exists) { + return c.json({ + exists: false, + path: configPath, + fullPath: fullPath, + config: null, + }); + } + + try { + const raw = fs.readFileSync(fullPath, "utf8"); + const rawJson = parse(raw); + const parsed = typegenConfig.parse(rawJson); + + return c.json({ + exists: true, + path: configPath, + fullPath: fullPath, + config: parsed.config, + }); + } catch (err) { + console.log("error from get config", err); + return c.json( + { + error: err instanceof Error ? err.message : "Failed to read config", + }, + 500, + ); + } + }) + // POST /api/config + .post( + "/config", + zValidator( + "json", + z.object({ + config: z.array(typegenConfigSingle), + }), + ), + async (c) => { + try { + const data = c.req.valid("json"); + + // Validate with Zod (data is already { config: [...] }) + const validation = typegenConfig.safeParse(data); + + if (!validation.success) { + const issues = validation.error.issues.map((err) => ({ + path: err.path, + message: err.message, + })); + + const response = z + .object({ + success: z.boolean(), + error: z.string().optional(), + issues: z + .array( + z.object({ + path: z.array(z.union([z.string(), z.number()])), + message: z.string(), + }), + ) + .optional(), + }) + .parse({ + success: false, + error: "Validation failed", + issues, + }); + return c.json(response, 400); + } + + // Write to disk as pretty JSON (replacing JSONC) + const fullPath = path.resolve(context.cwd, context.configPath); + const jsonContent = JSON.stringify(validation.data, null, 2) + "\n"; + + await fs.ensureDir(path.dirname(fullPath)); + await fs.writeFile(fullPath, jsonContent, "utf8"); + + const response = z + .object({ + success: z.boolean(), + error: z.string().optional(), + issues: z + .array( + z.object({ + path: z.array(z.union([z.string(), z.number()])), + message: z.string(), + }), + ) + .optional(), + }) + .parse({ success: true }); + return c.json(response); + } catch (err) { + const response = z + .object({ + success: z.boolean(), + error: z.string().optional(), + issues: z + .array( + z.object({ + path: z.array(z.union([z.string(), z.number()])), + message: z.string(), + }), + ) + .optional(), + }) + .parse({ + success: false, + error: err instanceof Error ? err.message : "Unknown error", + }); + return c.json(response, 500); + } + }, + ) + // POST /api/run (stub) + .post( + "/run", + zValidator( + "json", + z.object({ + config: z.union([z.array(typegenConfigSingle), typegenConfigSingle]), + }), + ), + async (c, next) => { + const data = c.req.valid("json"); + const config = data.config; + + await generateTypedClients(config); + await next(); + }, + ) + // GET /api/layouts + .get( + "/layouts", + zValidator("query", z.object({ configIndex: z.coerce.number() })), + async (c) => { + const input = c.req.valid("query"); + const configIndex = input.configIndex; + + const result = createDataApiClient(context, configIndex); + + // Check if result is an error + if ("error" in result) { + const statusCode = result.statusCode; + if (statusCode === 400) { + return c.json( + { + error: result.error, + ...(result.details || {}), + }, + 400, + ); + } else if (statusCode === 404) { + return c.json( + { + error: result.error, + ...(result.details || {}), + }, + 404, + ); + } else { + return c.json( + { + error: result.error, + ...(result.details || {}), + }, + 500, + ); + } + } + + const { client } = result; + + // Call layouts method - using type assertion as TypeScript has inference issues with DataApi return type + // The layouts method exists but TypeScript can't infer it from the complex return type + try { + const layoutsResp = (await (client as any).layouts()) as { + layouts: clientTypes.LayoutOrFolder[]; + }; + const { layouts } = layoutsResp; + + // Flatten the nested layout/folder structure into a flat list with full paths + const flatLayouts = flattenLayouts(layouts); + + return c.json({ layouts: flatLayouts }); + } catch (err) { + // Handle connection errors from layouts() call + let errorMessage = "Failed to fetch layouts"; + let statusCode = 500; + let suspectedField: "server" | "db" | "auth" | undefined; + let fmErrorCode: string | undefined; + + if (err instanceof FileMakerError) { + errorMessage = err.message; + fmErrorCode = err.code; + + // Infer suspected field from error code + if (err.code === "105") { + suspectedField = "db"; + errorMessage = `Database not found: ${err.message}`; + } else if (err.code === "212" || err.code === "952") { + suspectedField = "auth"; + errorMessage = `Authentication failed: ${err.message}`; + } + statusCode = 400; + } else if (err instanceof TypeError) { + errorMessage = `Connection error: ${err.message}`; + suspectedField = "server"; + statusCode = 400; + } else if (err instanceof Error) { + errorMessage = err.message; + statusCode = 500; + } + + return c.json( + { + error: errorMessage, + message: errorMessage, + suspectedField, + fmErrorCode, + }, + statusCode as ContentfulStatusCode, + ); + } + }, + ) + // GET /api/env-names + .get( + "/env-names", + zValidator("query", z.object({ envName: z.string() })), + async (c) => { + const input = c.req.valid("query"); + + const value = process.env[input.envName]; + + return c.json({ value }); + }, + ) + .get( + "/file-exists", + zValidator("query", z.object({ path: z.string() })), + async (c) => { + const input = c.req.valid("query"); + const path = input.path; + const exists = await fs.pathExists(path); + return c.json({ exists }); + }, + ) + .post( + "/table-metadata", + zValidator( + "json", + z.object({ + config: typegenConfigSingle, + tableName: z.string(), + }), + ), + async (c) => { + const input = c.req.valid("json"); + const config = input.config; + const { tableName } = input; + if (config.type !== "fmodata") { + return c.json({ error: "Invalid config type" }, 400); + } + const tableConfig = config.tables.find( + (t) => t.tableName === tableName, + ); + try { + // Download metadata for the specified table + const tableMetadataXml = await downloadTableMetadata({ + config: config, + tableName, + reduceAnnotations: tableConfig?.reduceMetadata ?? false, + }); + // Parse the metadata + const parsedMetadata = await parseMetadata(tableMetadataXml); + // Convert Maps to objects for JSON serialization + // Also convert nested Maps (like Properties) to objects + const serializedMetadata = { + entityTypes: Object.fromEntries( + Array.from(parsedMetadata.entityTypes.entries()).map( + ([key, value]) => [ + key, + { + ...value, + Properties: Object.fromEntries(value.Properties), + }, + ], + ), + ), + entitySets: Object.fromEntries(parsedMetadata.entitySets), + namespace: parsedMetadata.namespace, + }; + return c.json({ parsedMetadata: serializedMetadata }); + } catch (err) { + return c.json( + { + error: + err instanceof Error ? err.message : "Failed to fetch metadata", + }, + 500, + ); + } + }, + ) + .get( + "/list-tables", + zValidator("query", z.object({ config: z.string() })), + async (c) => { + const input = c.req.valid("query"); + // Parse the JSON-encoded config string + let config: z.infer; + try { + config = typegenConfigSingle.parse(JSON.parse(input.config)); + } catch (err) { + return c.json({ error: "Invalid config format" }, 400); + } + if (config.type !== "fmodata") { + return c.json({ error: "Invalid config type" }, 400); + } + try { + const result = createOdataClientFromConfig(config); + if ("error" in result) { + return c.json( + { + error: result.error, + kind: result.kind, + suspectedField: result.suspectedField, + }, + result.statusCode as ContentfulStatusCode, + ); + } + const { db } = result; + const tableNames = await db.listTableNames(); + return c.json({ tables: tableNames }); + } catch (err) { + return c.json( + { + error: + err instanceof Error ? err.message : "Failed to list tables", + }, + 500, + ); + } + }, + ) + // POST /api/test-connection + .post( + "/test-connection", + zValidator("json", z.object({ config: typegenConfigSingle })), + async (c) => { + try { + const data = c.req.valid("json"); + const config = data.config; + + // Validate config type + if (config.type === "fmdapi") { + // Create client from config + const clientResult = createClientFromConfig(config); + + // Check if client creation failed + if ("error" in clientResult) { + return c.json( + { + ok: false, + ...clientResult, + }, + clientResult.statusCode as ContentfulStatusCode, + ); + } + + const { client, server, db, authType } = clientResult; + + // Test connection by calling layouts() + try { + await client.layouts(); + + return c.json({ + ok: true, + server, + db, + authType, + }); + } catch (err) { + // Handle connection errors + let errorMessage = "Failed to connect to FileMaker Data API"; + let statusCode = 500; + let kind: "connection_error" | "unknown" = "unknown"; + let suspectedField: "server" | "db" | "auth" | undefined; + let fmErrorCode: string | undefined; + + if (err instanceof FileMakerError) { + errorMessage = err.message; + fmErrorCode = err.code; + kind = "connection_error"; + + // Infer suspected field from error code + // Common FileMaker error codes: + // 105 = Database not found + // 212 = Authentication failed + // 802 = Record not found (less relevant here) + if (err.code === "105") { + suspectedField = "db"; + errorMessage = `Database not found: ${err.message}`; + } else if (err.code === "212" || err.code === "952") { + suspectedField = "auth"; + errorMessage = `Authentication failed: ${err.message}`; + } + statusCode = 400; + } else if (err instanceof TypeError) { + // Network/URL errors + errorMessage = `Connection error: ${err.message}`; + suspectedField = "server"; + kind = "connection_error"; + statusCode = 400; + } else if (err instanceof Error) { + errorMessage = err.message; + kind = "connection_error"; + statusCode = 500; + } + + return c.json( + { + ok: false, + error: errorMessage, + statusCode, + kind, + suspectedField, + fmErrorCode, + message: errorMessage, + }, + statusCode as ContentfulStatusCode, + ); + } + } else if (config.type === "fmodata") { + const result = createOdataClientFromConfig(config); + if ("error" in result) { + return c.json( + { + ok: false, + ...result, + }, + result.statusCode as ContentfulStatusCode, + ); + } + + const { db, connection, server, dbName, authType } = result; + + if (authType === "username") { + // Test connection by calling listDatabaseNames() and listTableNames() separately + // First test: listDatabaseNames() - tests server connection + try { + await connection.listDatabaseNames(); + } catch (err) { + // Handle connection errors from listDatabaseNames() + let errorMessage = + "Failed to connect to FileMaker OData API (listDatabaseNames failed)"; + let statusCode = 500; + let kind: "connection_error" | "unknown" = "unknown"; + let suspectedField: "server" | "db" | "auth" | undefined; + + if (err instanceof Error) { + errorMessage = `listDatabaseNames() failed: ${err.message}`; + kind = "connection_error"; + + // Infer suspected field from error message + const lowerMessage = errorMessage.toLowerCase(); + if ( + lowerMessage.includes("database") || + lowerMessage.includes("not found") || + lowerMessage.includes("404") + ) { + suspectedField = "db"; + } else if ( + lowerMessage.includes("auth") || + lowerMessage.includes("unauthorized") || + lowerMessage.includes("401") || + lowerMessage.includes("403") + ) { + suspectedField = "auth"; + } else if ( + lowerMessage.includes("network") || + lowerMessage.includes("connection") || + lowerMessage.includes("timeout") || + lowerMessage.includes("dns") + ) { + suspectedField = "server"; + } + + // Network/URL errors typically indicate server issues + if (err instanceof TypeError) { + suspectedField = "server"; + statusCode = 400; + } else { + statusCode = 400; + } + } + + return c.json( + { + ok: false, + error: errorMessage, + statusCode, + kind, + suspectedField, + message: errorMessage, + failedMethod: "listDatabaseNames", + }, + statusCode as ContentfulStatusCode, + ); + } + } + + // Second test: listTableNames() - tests database connection + try { + await db.listTableNames(); + + return c.json({ + ok: true, + server, + db: dbName, + authType, + }); + } catch (err) { + // Handle connection errors from listTableNames() + let errorMessage = + "Failed to connect to FileMaker OData API (listTableNames failed)"; + let statusCode = 500; + let kind: "connection_error" | "unknown" = "unknown"; + let suspectedField: "server" | "db" | "auth" | undefined; + + if (err instanceof Error) { + errorMessage = `listTableNames() failed: ${err.message}`; + kind = "connection_error"; + + // Infer suspected field from error message + const lowerMessage = errorMessage.toLowerCase(); + if ( + lowerMessage.includes("database") || + lowerMessage.includes("not found") || + lowerMessage.includes("404") + ) { + suspectedField = "db"; + } else if ( + lowerMessage.includes("auth") || + lowerMessage.includes("unauthorized") || + lowerMessage.includes("401") || + lowerMessage.includes("403") + ) { + suspectedField = "auth"; + } else if ( + lowerMessage.includes("network") || + lowerMessage.includes("connection") || + lowerMessage.includes("timeout") || + lowerMessage.includes("dns") + ) { + suspectedField = "server"; + } + + // Network/URL errors typically indicate server issues + if (err instanceof TypeError) { + suspectedField = "server"; + statusCode = 400; + } else { + statusCode = 400; + } + } + + return c.json( + { + ok: false, + error: errorMessage, + statusCode, + kind, + suspectedField, + message: errorMessage, + failedMethod: "listTableNames", + }, + statusCode as ContentfulStatusCode, + ); + } + } else { + return c.json( + { + ok: false, + error: "Invalid config type", + }, + 400, + ); + } + } catch (err) { + return c.json( + { + ok: false, + error: err instanceof Error ? err.message : "Unknown error", + statusCode: 500, + kind: "unknown", + message: err instanceof Error ? err.message : "Unknown error", + }, + 500, + ); + } + }, + ); + + return app; +} + +// Export the app type for use in the typed client +// With proper chaining, TypeScript can now infer all route types +export type ApiApp = ReturnType; diff --git a/packages/typegen/src/server/createDataApiClient.ts b/packages/typegen/src/server/createDataApiClient.ts new file mode 100644 index 00000000..80141c7c --- /dev/null +++ b/packages/typegen/src/server/createDataApiClient.ts @@ -0,0 +1,306 @@ +import fs from "fs-extra"; +import path from "path"; +import { parse } from "jsonc-parser"; +import { typegenConfig, typegenConfigSingle } from "../types"; +import type { z } from "zod/v4"; +import { OttoAdapter, type OttoAPIKey } from "@proofkit/fmdapi/adapters/otto"; +import DataApi from "@proofkit/fmdapi"; +import { FetchAdapter } from "@proofkit/fmdapi/adapters/fetch"; +import { memoryStore } from "@proofkit/fmdapi/tokenStore/memory"; +import { defaultEnvNames } from "../constants"; +import type { ApiContext } from "./app"; +import { Database, FMServerConnection } from "@proofkit/fmodata"; + +export interface CreateClientResult { + client: ReturnType>; + config: Extract, { type: "fmdapi" }>; + server: string; + db: string; + authType: "apiKey" | "username"; +} + +export interface CreateClientError { + error: string; + statusCode: number; + details?: Record; + kind?: "missing_env" | "adapter_error" | "connection_error" | "unknown"; + suspectedField?: "server" | "db" | "auth"; + fmErrorCode?: string; + message?: string; +} + +type SingleConfig = z.infer; + +type FmdapiConfig = Extract; + +type FmodataConfig = Extract; + +type EnvVarsResult = + | CreateClientError + | { + server: string; + db: string; + authType: "apiKey" | "username"; + auth: { apiKey: OttoAPIKey } | { username: string; password: string }; + }; + +function getEnvVarsFromConfig( + envNames: SingleConfig["envNames"], +): EnvVarsResult { + // Helper to get env name, treating empty strings as undefined + const getEnvName = (customName: string | undefined, defaultName: string) => + customName && customName.trim() !== "" ? customName : defaultName; + + // Resolve environment variables + const server = + process.env[getEnvName(envNames?.server, defaultEnvNames.server)]; + const db = process.env[getEnvName(envNames?.db, defaultEnvNames.db)]; + const apiKey = + (envNames?.auth && "apiKey" in envNames.auth + ? process.env[getEnvName(envNames.auth.apiKey, defaultEnvNames.apiKey)] + : undefined) ?? process.env[defaultEnvNames.apiKey]; + const username = + (envNames?.auth && "username" in envNames.auth + ? process.env[ + getEnvName(envNames.auth.username, defaultEnvNames.username) + ] + : undefined) ?? process.env[defaultEnvNames.username]; + const password = + (envNames?.auth && "password" in envNames.auth + ? process.env[ + getEnvName(envNames.auth.password, defaultEnvNames.password) + ] + : undefined) ?? process.env[defaultEnvNames.password]; + + // Validate required env vars + if (!server || !db || (!apiKey && !username)) { + console.error("Missing required environment variables", { + server, + db, + apiKey, + username, + }); + + // Build missing details object + const missingDetails: { + server?: boolean; + db?: boolean; + auth?: boolean; + password?: boolean; + } = { + server: !server, + db: !db, + auth: !apiKey && !username, + }; + + // Only report password as missing if server and db are both present, + // and username is set but password is missing. This ensures we don't + // incorrectly report password as missing when the actual error is about + // missing server or database. + if (server && db && username && !password) { + missingDetails.password = true; + } + + return { + error: "Missing required environment variables", + statusCode: 400, + kind: "missing_env" as const, + details: { + missing: missingDetails, + }, + suspectedField: (!server + ? "server" + : !db + ? "db" + : !apiKey && !username + ? "auth" + : undefined) as "server" | "db" | "auth" | undefined, + message: !server + ? "Server URL environment variable is missing" + : !db + ? "Database name environment variable is missing" + : "Authentication credentials environment variable is missing", + }; + } + + // Validate password if username is provided + if (username && !password) { + return { + error: "Password is required when using username authentication", + statusCode: 400, + kind: "missing_env" as const, + details: { + missing: { + password: true, + }, + }, + suspectedField: "auth" as const, + message: "Password environment variable is missing", + }; + } + + return { + server, + db, + authType: (apiKey ? "apiKey" : "username") as "apiKey" | "username", + auth: apiKey + ? { apiKey: apiKey as OttoAPIKey } + : { username: username ?? "", password: password ?? "" }, + }; +} + +export interface OdataClientResult { + db: Database; + connection: FMServerConnection; + server: string; + dbName: string; + authType: "apiKey" | "username"; +} + +export interface OdataClientError { + error: string; + statusCode: number; + kind?: "missing_env" | "adapter_error" | "connection_error" | "unknown"; + suspectedField?: "server" | "db" | "auth"; +} + +export function createOdataClientFromConfig( + config: FmodataConfig, +): OdataClientResult | OdataClientError { + const result = getEnvVarsFromConfig(config.envNames); + if ("error" in result) { + return result; + } + const { server, db: dbName, authType, auth } = result; + + const connection = new FMServerConnection({ + serverUrl: server, + auth, + }); + + const db = connection.database(dbName); + + return { db, connection, server, dbName, authType }; +} + +/** + * Creates a DataApi client from an in-memory config object + * @param config The fmdapi config object + * @returns The client, server, and db, or an error object + */ +export function createClientFromConfig( + config: FmdapiConfig, +): Omit | CreateClientError { + const result = getEnvVarsFromConfig(config.envNames); + if ("error" in result) { + return result; + } + const { server, db, authType, auth } = result; + + // Determine which auth method will be used (prefer API key if available) + + // Create DataApi client with error handling for adapter construction + let client: ReturnType>; + try { + client = + "apiKey" in auth + ? DataApi({ + adapter: new OttoAdapter({ auth, server, db }), + layout: "", + }) + : DataApi({ + adapter: new FetchAdapter({ + auth: auth as any, + server, + db, + tokenStore: memoryStore(), + }), + layout: "", + }); + } catch (err) { + // Handle adapter construction errors (e.g., invalid API key format, empty username/password) + const errorMessage = + err instanceof Error ? err.message : "Failed to create adapter"; + return { + error: errorMessage, + statusCode: 400, + kind: "adapter_error", + suspectedField: "auth", + message: errorMessage, + }; + } + + return { + client, + server, + db, + authType, + }; +} + +/** + * Creates a DataApi client from a config index + * @param context The API context with cwd and configPath + * @param configIndex The index of the config to use + * @returns The client, config, server, and db, or an error object + */ +export function createDataApiClient( + context: ApiContext, + configIndex: number, +): CreateClientResult | CreateClientError { + // Read and parse config file + const fullPath = path.resolve(context.cwd, context.configPath); + + if (!fs.existsSync(fullPath)) { + return { + error: "Config file not found", + statusCode: 404, + }; + } + + let parsed; + try { + const raw = fs.readFileSync(fullPath, "utf8"); + const rawJson = parse(raw); + parsed = typegenConfig.parse(rawJson); + } catch (err) { + return { + error: err instanceof Error ? err.message : "Failed to parse config", + statusCode: 500, + }; + } + + // Get config at index + const configArray = Array.isArray(parsed.config) + ? parsed.config + : [parsed.config]; + const config = configArray[configIndex]; + + if (!config) { + return { + error: "Config not found", + statusCode: 404, + }; + } + + // Validate config type + if (config.type !== "fmdapi") { + return { + error: "Only fmdapi config type is supported", + statusCode: 400, + }; + } + + // Use the extracted helper function + const result = createClientFromConfig(config); + + // Check if result is an error + if ("error" in result) { + return result; + } + + return { + ...result, + config, + }; +} diff --git a/packages/typegen/src/server/index.ts b/packages/typegen/src/server/index.ts new file mode 100644 index 00000000..f41fd414 --- /dev/null +++ b/packages/typegen/src/server/index.ts @@ -0,0 +1,155 @@ +import { serve } from "@hono/node-server"; +import { readFileSync, existsSync } from "fs"; +import { join, dirname, resolve } from "path"; +import { fileURLToPath } from "url"; +import { createServer } from "net"; +import { Hono } from "hono"; +import { createApiApp } from "./app"; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = dirname(__filename); + +// Resolve path to embedded web assets +// When compiled, this will be relative to dist/esm/server/index.js +// So we go up to dist/esm, then into dist/web +const WEB_DIR = resolve(__dirname, "../../web"); + +export interface ServerOptions { + port: number | null; + cwd: string; + configPath: string; +} + +export async function startServer(options: ServerOptions) { + const { port, cwd, configPath } = options; + + const app = new Hono(); + + // Mount API routes + const apiApp = createApiApp({ cwd, configPath }); + app.route("/", apiApp); + + // Serve static files (only for non-API routes) + app.get("*", async (c) => { + const url = new URL(c.req.url); + // Skip API routes + if (url.pathname.startsWith("/api/")) { + return c.notFound(); + } + + // Handle root path + // Remove leading slash from pathname to avoid path.join() ignoring WEB_DIR + const pathname = + url.pathname === "/" ? "index.html" : url.pathname.slice(1); + const filePath = join(WEB_DIR, pathname); + + try { + if (existsSync(filePath)) { + const content = readFileSync(filePath); + const ext = filePath.split(".").pop()?.toLowerCase(); + const contentType = getContentType(ext || ""); + return c.body(content, 200, { + "Content-Type": contentType, + }); + } + } catch (err) { + // Fall through to SPA fallback + } + + // SPA fallback - serve index.html for client-side routing + try { + const indexPath = join(WEB_DIR, "index.html"); + if (existsSync(indexPath)) { + const content = readFileSync(indexPath); + return c.html(content.toString()); + } + } catch (err) { + // If we can't even serve index.html, return 404 + } + + return c.text("Not found", 404); + }); + + // If port is null, try to find an available port starting from 3141 + // Try 3141 first, then 3142-3151 (next 10 ports) if needed + let actualPort: number; + if (port === null) { + actualPort = await findAvailablePort(3141, 11); + } else { + // If port is explicitly specified, use it as-is + actualPort = port; + } + + const server = serve({ + fetch: app.fetch, + port: actualPort, + }); + + // The serve function from @hono/node-server already starts listening + // We just need to return the server with a close method + return Promise.resolve({ + port: actualPort, + close: () => { + server.close(); + }, + }); +} + +async function findAvailablePort( + startPort: number, + maxAttempts: number, +): Promise { + for (let i = 0; i < maxAttempts; i++) { + const portToTry = startPort + i; + const isAvailable = await checkPortAvailable(portToTry); + if (isAvailable) { + return portToTry; + } + } + throw new Error( + `Could not find an available port in range ${startPort}-${startPort + maxAttempts - 1}`, + ); +} + +function checkPortAvailable(port: number): Promise { + return new Promise((resolve) => { + const server = createServer(); + + server.once("error", (err: NodeJS.ErrnoException) => { + if (err.code === "EADDRINUSE") { + resolve(false); + } else { + // For other errors, assume port is not available + resolve(false); + } + }); + + server.once("listening", () => { + server.close(); + resolve(true); + }); + + server.listen(port); + }); +} + +function getContentType(ext: string): string { + const types: Record = { + html: "text/html", + js: "application/javascript", + mjs: "application/javascript", + css: "text/css", + json: "application/json", + png: "image/png", + jpg: "image/jpeg", + jpeg: "image/jpeg", + gif: "image/gif", + svg: "image/svg+xml", + ico: "image/x-icon", + woff: "font/woff", + woff2: "font/woff2", + ttf: "font/ttf", + eot: "application/vnd.ms-fontobject", + }; + return types[ext] || "application/octet-stream"; +} diff --git a/packages/typegen/src/typegen.ts b/packages/typegen/src/typegen.ts index c86cb852..69122863 100644 --- a/packages/typegen/src/typegen.ts +++ b/packages/typegen/src/typegen.ts @@ -24,6 +24,8 @@ import { z } from "zod/v4"; import { formatAndSaveSourceFiles } from "./formatting"; import { type PackageJson } from "type-fest"; import semver from "semver"; +import { getEnvValues, validateAndLogEnvValues } from "./getEnvValues"; +import { generateODataTablesSingle } from "./fmodata/typegen"; export const generateTypedClients = async ( config: z.infer["config"], @@ -41,17 +43,23 @@ export const generateTypedClients = async ( return; } - if (Array.isArray(parsedConfig.data.config)) { - for (const option of parsedConfig.data.config) { - await generateTypedClientsSingle(option, options); + const configArray = Array.isArray(parsedConfig.data.config) + ? parsedConfig.data.config + : [parsedConfig.data.config]; + + for (const singleConfig of configArray) { + if (singleConfig.type === "fmdapi") { + await generateTypedClientsSingle(singleConfig, options); + } else if (singleConfig.type === "fmodata") { + await generateODataTablesSingle(singleConfig); + } else { + console.log(chalk.red("ERROR: Invalid config type")); } - } else { - await generateTypedClientsSingle(parsedConfig.data.config, options); } }; const generateTypedClientsSingle = async ( - config: z.infer, + config: Extract, { type: "fmdapi" }>, options?: { resetOverrides?: boolean; cwd?: string }, ) => { const { @@ -91,56 +99,19 @@ const generateTypedClientsSingle = async ( const project = new Project({}); - const server = process.env[envNames?.server ?? defaultEnvNames.server]; - const db = process.env[envNames?.db ?? defaultEnvNames.db]; - const apiKey = - (envNames?.auth && "apiKey" in envNames.auth - ? process.env[envNames.auth.apiKey ?? defaultEnvNames.apiKey] - : undefined) ?? process.env[defaultEnvNames.apiKey]; - const username = - (envNames?.auth && "username" in envNames.auth - ? process.env[envNames.auth.username ?? defaultEnvNames.username] - : undefined) ?? process.env[defaultEnvNames.username]; - const password = - (envNames?.auth && "password" in envNames.auth - ? process.env[envNames.auth.password ?? defaultEnvNames.password] - : undefined) ?? process.env[defaultEnvNames.password]; - - const auth: { apiKey: OttoAPIKey } | { username: string; password: string } = - apiKey - ? { apiKey: apiKey as OttoAPIKey } - : { username: username ?? "", password: password ?? "" }; - - if (!server || !db || (!apiKey && !username)) { - console.log(chalk.red("ERROR: Could not get all required config values")); - console.log("Ensure the following environment variables are set:"); - if (!server) console.log(`${envNames?.server ?? defaultEnvNames.server}`); - if (!db) console.log(`${envNames?.db ?? defaultEnvNames.db}`); - - if (!apiKey) { - // Determine the names to display in the error message - const apiKeyNameToLog = - envNames?.auth && "apiKey" in envNames.auth && envNames.auth.apiKey - ? envNames.auth.apiKey - : defaultEnvNames.apiKey; - const usernameNameToLog = - envNames?.auth && "username" in envNames.auth && envNames.auth.username - ? envNames.auth.username - : defaultEnvNames.username; - const passwordNameToLog = - envNames?.auth && "password" in envNames.auth && envNames.auth.password - ? envNames.auth.password - : defaultEnvNames.password; + const envValues = getEnvValues(envNames); + const validationResult = validateAndLogEnvValues(envValues, envNames); - console.log( - `${apiKeyNameToLog} (or ${usernameNameToLog} and ${passwordNameToLog})`, - ); - } - - console.log(); + if (!validationResult || !validationResult.success) { return; } + const { server, db, auth: validatedAuth } = validationResult; + const auth: { apiKey: OttoAPIKey } | { username: string; password: string } = + "apiKey" in validatedAuth + ? { apiKey: validatedAuth.apiKey as OttoAPIKey } + : validatedAuth; + await fs.ensureDir(rootDir); if (clearOldFiles) { fs.emptyDirSync(path.join(rootDir, "client")); @@ -192,24 +163,25 @@ const generateTypedClientsSingle = async ( ? validator : "ts", strictNumbers: item.strictNumbers, - webviewerScriptName: config.webviewerScriptName, + webviewerScriptName: + config?.type === "fmdapi" ? config.webviewerScriptName : undefined, envNames: { auth: "apiKey" in auth ? { apiKey: envNames?.auth && "apiKey" in envNames.auth - ? (envNames.auth.apiKey as OttoAPIKey) - : (defaultEnvNames.apiKey as OttoAPIKey), + ? (envNames.auth.apiKey ?? defaultEnvNames.apiKey) + : defaultEnvNames.apiKey, } : { username: envNames?.auth && "username" in envNames.auth - ? envNames.auth.username + ? (envNames.auth.username ?? defaultEnvNames.username) : defaultEnvNames.username, password: envNames?.auth && "password" in envNames.auth - ? envNames.auth.password + ? (envNames.auth.password ?? defaultEnvNames.password) : defaultEnvNames.password, }, db: envNames?.db ?? defaultEnvNames.db, diff --git a/packages/typegen/src/types.ts b/packages/typegen/src/types.ts index b3483752..ee2fe244 100644 --- a/packages/typegen/src/types.ts +++ b/packages/typegen/src/types.ts @@ -26,68 +26,196 @@ const layoutConfig = z.object({ const envNames = z .object({ - server: z.string(), - db: z.string(), + server: z + .string() + .optional() + .transform((val) => (val === "" ? undefined : val)), + db: z + .string() + .optional() + .transform((val) => (val === "" ? undefined : val)), auth: z.union([ z .object({ - apiKey: z.string(), + apiKey: z + .string() + .optional() + .transform((val) => (val === "" ? undefined : val)), }) - .partial(), + .optional() + .transform((val) => { + if (val && Object.values(val).every((v) => v === undefined)) { + return undefined; + } + return val ?? undefined; + }), z .object({ - username: z.string(), - password: z.string(), + username: z + .string() + .optional() + .transform((val) => (val === "" ? undefined : val)), + password: z + .string() + .optional() + .transform((val) => (val === "" ? undefined : val)), }) - .partial(), + .optional() + .transform((val) => { + if (val && Object.values(val).every((v) => v === undefined)) { + return undefined; + } + return val ?? undefined; + }), ]), }) - .partial() .optional() + .transform((val) => { + if (val && Object.values(val).every((v) => v === undefined)) { + return undefined; + } + return val ?? undefined; + }) .meta({ description: "If you're using other environment variables than the default, custom the NAMES of them here for the typegen to lookup their values when it runs.", }); -export const typegenConfigSingle = z.object({ - envNames, - layouts: z.array(layoutConfig), - path: z - .string() - .default("schema") - .optional() - .meta({ description: "The folder path to output the generated files" }), - clearOldFiles: z.boolean().default(false).optional().meta({ - description: - "If false, the path will not be cleared before the new files are written. Only the `client` and `generated` directories are cleared to allow for potential overrides to be kept.", +const path = z + .string() + .default("schema") + .optional() + .meta({ description: "The folder path to output the generated files" }); + +// Field-level override configuration +const fieldOverride = z.object({ + // Field name to apply override to + fieldName: z.string().meta({ + description: "The field name this override applies to", + }), + // Exclude this field from generation + exclude: z.boolean().optional().meta({ + description: "If true, this field will be excluded from generation", }), - validator: z - .union([z.enum(["zod", "zod/v4", "zod/v3"]), z.literal(false)]) - .default("zod/v4") + // Override the inferred type from metadata + typeOverride: z + .enum([ + "text", // textField() + "number", // numberField() + "boolean", // numberField().outputValidator(z.coerce.boolean()) + "fmBooleanNumber", // Same as boolean, explicit FileMaker 0/1 pattern + "date", // dateField() + "timestamp", // timestampField() + "container", // containerField() + ]) .optional() .meta({ description: - "If set to 'zod', 'zod/v4', or 'zod/v3', the validator will be generated using zod, otherwise it will generated Typescript types only and no runtime validation will be performed", + "Override the inferred field type from metadata. Options: text, number, boolean, fmBooleanNumber, date, timestamp, container", }), - clientSuffix: z.string().default("Layout").optional().meta({ - description: "The suffix to be added to the schema name for each layout", +}); + +// Table-level configuration (opt-in model) +const tableConfig = z.object({ + // Table name to generate + tableName: z.string().meta({ + description: + "The entity set name (table occurrence name) to generate. This table will be included in metadata download and type generation.", + }), + // Override the generated TypeScript variable name + // (original entity set name is still used for the path) + variableName: z.string().optional().meta({ + description: + "Override the generated TypeScript variable name. The original entity set name is still used for the OData path.", + }), + // Field-specific overrides as an array + fields: z.array(fieldOverride).optional().meta({ + description: "Field-specific overrides as an array", }), - generateClient: z.boolean().default(true).optional().meta({ + reduceMetadata: z.boolean().optional().meta({ description: - "If true, a layout-specific client will be generated for each layout provided, otherwise it will only generate the types. This option can be overridden for each layout individually.", + "If undefined, the top-level setting will be used. If true, reduced OData annotations will be requested from the server to reduce payload size. This will prevent comments, entity ids, and other properties from being generated.", }), - webviewerScriptName: z.string().optional().meta({ + alwaysOverrideFieldNames: z.boolean().optional().meta({ description: - "The name of the webviewer script to be used. If this key is set, the generated client will use the @proofkit/webviewer adapter instead of the OttoFMS or Fetch adapter, which will only work when loaded inside of a FileMaker webviewer.", + "If undefined, the top-level setting will be used. If true, field names will always be updated to match metadata, even when matching by entity ID. If false, existing field names are preserved when matching by entity ID.", }), }); +const typegenConfigSingleBase = z.discriminatedUnion("type", [ + z.object({ + type: z.literal("fmdapi"), + configName: z.string().optional(), + envNames, + layouts: z.array(layoutConfig).default([]), + path, + clearOldFiles: z.boolean().default(false).optional().meta({ + description: + "If false, the path will not be cleared before the new files are written. Only the `client` and `generated` directories are cleared to allow for potential overrides to be kept.", + }), + validator: z + .union([z.enum(["zod", "zod/v4", "zod/v3"]), z.literal(false)]) + .default("zod/v4") + .optional() + .meta({ + description: + "If set to 'zod', 'zod/v4', or 'zod/v3', the validator will be generated using zod, otherwise it will generated Typescript types only and no runtime validation will be performed", + }), + clientSuffix: z.string().default("Layout").optional().meta({ + description: "The suffix to be added to the schema name for each layout", + }), + generateClient: z.boolean().default(true).optional().meta({ + description: + "If true, a layout-specific client will be generated for each layout provided, otherwise it will only generate the types. This option can be overridden for each layout individually.", + }), + webviewerScriptName: z.string().optional().meta({ + description: + "The name of the webviewer script to be used. If this key is set, the generated client will use the @proofkit/webviewer adapter instead of the OttoFMS or Fetch adapter, which will only work when loaded inside of a FileMaker webviewer.", + }), + }), + z.object({ + type: z.literal("fmodata"), + configName: z.string().optional(), + envNames: z.optional(envNames), + path, + reduceMetadata: z.boolean().optional().meta({ + description: + "If true, reduced OData annotations will be requested from the server to reduce payload size. This will prevent comments, entity ids, and other properties from being generated.", + }), + clearOldFiles: z.boolean().default(false).optional().meta({ + description: + "If false, the path will not be cleared before the new files are written. Only the `client` and `generated` directories are cleared to allow for potential overrides to be kept.", + }), + alwaysOverrideFieldNames: z.boolean().default(true).optional().meta({ + description: + "If true (default), field names will always be updated to match metadata, even when matching by entity ID. If false, existing field names are preserved when matching by entity ID.", + }), + tables: z.array(tableConfig).default([]).meta({ + description: + "Required array of tables to generate. Only the tables specified here will be downloaded and generated. Each table can have field-level overrides for excluding fields, renaming variables, and overriding field types.", + }), + }), +]); + +// Add default "type" field for backwards compatibility +export const typegenConfigSingle = z.preprocess((data) => { + if (data && typeof data === "object" && !("type" in data)) { + return { ...data, type: "fmdapi" }; + } + return data; +}, typegenConfigSingleBase); + export const typegenConfig = z.object({ - config: z.union([typegenConfigSingle, z.array(typegenConfigSingle)]), + config: z.union([z.array(typegenConfigSingle), typegenConfigSingle]), }); export type TypegenConfig = z.infer; +export type FmodataConfig = Extract< + z.infer, + { type: "fmodata" } +>; + export type TSchema = { name: string; type: "string" | "fmnumber" | "valueList"; diff --git a/packages/typegen/tests/__snapshots__/zod-layout-client.snap.ts b/packages/typegen/tests/__snapshots__/zod-layout-client.snap.ts index e2fef985..050d9f46 100644 --- a/packages/typegen/tests/__snapshots__/zod-layout-client.snap.ts +++ b/packages/typegen/tests/__snapshots__/zod-layout-client.snap.ts @@ -3,7 +3,7 @@ * https://proofkit.dev/docs/typegen * DO NOT EDIT THIS FILE DIRECTLY. Changes may be overritten */ -import { z } from "zod"; +import { z } from "zod/v4"; import type { InferZodPortals } from "@proofkit/fmdapi"; // @generated diff --git a/packages/typegen/tests/fmodata-preserve-customizations.test.ts b/packages/typegen/tests/fmodata-preserve-customizations.test.ts new file mode 100644 index 00000000..d1c12b8e --- /dev/null +++ b/packages/typegen/tests/fmodata-preserve-customizations.test.ts @@ -0,0 +1,187 @@ +import { describe, expect, it } from "vitest"; +import fs from "node:fs/promises"; +import path from "node:path"; +import os from "node:os"; +import { generateODataTypes } from "../src/fmodata/generateODataTypes"; +import type { ParsedMetadata } from "../src/fmodata/parseMetadata"; + +function makeMetadata({ + entitySetName, + entityTypeName, + fields, +}: { + entitySetName: string; + entityTypeName: string; + fields: Array<{ name: string; type: string; fieldId: string }>; +}): ParsedMetadata { + const entityTypes = new Map(); + const entitySets = new Map(); + + const properties = new Map(); + for (const f of fields) { + properties.set(f.name, { + $Type: f.type, + $Nullable: true, + "@FieldID": f.fieldId, + }); + } + + entityTypes.set(entityTypeName, { + Name: entityTypeName.split(".").at(-1) ?? entityTypeName, + "@TableID": "T1", + Properties: properties, + NavigationProperties: [], + }); + + entitySets.set(entitySetName, { + Name: entitySetName, + EntityType: entityTypeName, + }); + + return { + entityTypes, + entitySets, + namespace: "NS", + }; +} + +describe("fmodata generateODataTypes preserves user customizations", () => { + it("preserves custom chained calls even when placed before standard methods", async () => { + const tmpDir = await fs.mkdtemp( + path.join(os.tmpdir(), "proofkit-fmodata-preserve-"), + ); + + try { + const entitySetName = "MyTable"; + const entityTypeName = "NS.MyTable"; + const metadata = makeMetadata({ + entitySetName, + entityTypeName, + fields: [{ name: "FieldA", type: "Edm.String", fieldId: "F1" }], + }); + + const existingFilePath = path.join(tmpDir, "MyTable.ts"); + await fs.writeFile( + existingFilePath, + [ + `import { fmTableOccurrence, textField } from "@proofkit/fmdapi";`, + `import { z } from "zod/v4";`, + ``, + `export const MyTable = fmTableOccurrence("MyTable", {`, + ` "FieldA": textField().inputValidator(z.string()).entityId("F1"),`, + `});`, + ``, + ].join("\n"), + "utf8", + ); + + await generateODataTypes(metadata, { + type: "fmodata", + path: tmpDir, + clearOldFiles: false, + tables: [{ tableName: "MyTable" }], + }); + + const regenerated = await fs.readFile(existingFilePath, "utf8"); + expect(regenerated).toContain( + `FieldA: textField().entityId("F1").inputValidator(z.string())`, + ); + } finally { + await fs.rm(tmpDir, { recursive: true, force: true }); + } + }); + + it("preserves custom chained calls when no standard methods exist", async () => { + const tmpDir = await fs.mkdtemp( + path.join(os.tmpdir(), "proofkit-fmodata-preserve-"), + ); + + try { + const entitySetName = "MyTable"; + const entityTypeName = "NS.MyTable"; + const metadata = makeMetadata({ + entitySetName, + entityTypeName, + // Simulate reduceMetadata: no FieldID => generator won't emit .entityId() + fields: [{ name: "FieldB", type: "Edm.String", fieldId: "" }], + }); + + const existingFilePath = path.join(tmpDir, "MyTable.ts"); + await fs.writeFile( + existingFilePath, + [ + `import { fmTableOccurrence, textField } from "@proofkit/fmdapi";`, + `import { z } from "zod/v4";`, + ``, + `export const MyTable = fmTableOccurrence("MyTable", {`, + ` "FieldB": textField().inputValidator(z.string()),`, + `});`, + ``, + ].join("\n"), + "utf8", + ); + + await generateODataTypes(metadata, { + type: "fmodata", + path: tmpDir, + clearOldFiles: false, + tables: [{ tableName: "MyTable" }], + }); + + const regenerated = await fs.readFile(existingFilePath, "utf8"); + expect(regenerated).toContain( + `FieldB: textField().inputValidator(z.string())`, + ); + } finally { + await fs.rm(tmpDir, { recursive: true, force: true }); + } + }); + + it("preserves aliased imports when regenerating files", async () => { + const tmpDir = await fs.mkdtemp( + path.join(os.tmpdir(), "proofkit-fmodata-preserve-"), + ); + + try { + const entitySetName = "MyTable"; + const entityTypeName = "NS.MyTable"; + const metadata = makeMetadata({ + entitySetName, + entityTypeName, + fields: [{ name: "FieldA", type: "Edm.String", fieldId: "F1" }], + }); + + const existingFilePath = path.join(tmpDir, "MyTable.ts"); + await fs.writeFile( + existingFilePath, + [ + `import { fmTableOccurrence, textField as tf } from "@proofkit/fmdapi";`, + `import { z as zod } from "zod/v4";`, + ``, + `export const MyTable = fmTableOccurrence("MyTable", {`, + ` "FieldA": tf().entityId("F1").inputValidator(zod.string()),`, + `});`, + ``, + ].join("\n"), + "utf8", + ); + + await generateODataTypes(metadata, { + type: "fmodata", + path: tmpDir, + clearOldFiles: false, + tables: [{ tableName: "MyTable" }], + }); + + const regenerated = await fs.readFile(existingFilePath, "utf8"); + // Verify aliased imports are preserved + expect(regenerated).toContain(`textField as tf`); + expect(regenerated).toContain(`z as zod`); + // Verify the code still uses the aliases + expect(regenerated).toContain(`tf().entityId("F1")`); + expect(regenerated).toContain(`zod.string()`); + } finally { + await fs.rm(tmpDir, { recursive: true, force: true }); + } + }); +}); diff --git a/packages/typegen/tests/typegen.test.ts b/packages/typegen/tests/typegen.test.ts index 1416364a..67185e24 100644 --- a/packages/typegen/tests/typegen.test.ts +++ b/packages/typegen/tests/typegen.test.ts @@ -1,13 +1,4 @@ -import { - describe, - expect, - it, - afterAll, - beforeAll, - vi, - beforeEach, - afterEach, -} from "vitest"; +import { describe, expect, it, beforeEach } from "vitest"; import { generateTypedClients } from "../src/typegen"; import { typegenConfigSingle } from "../src/types"; import { OttoAPIKey } from "../../fmdapi/src"; @@ -16,7 +7,6 @@ import fs from "fs/promises"; import path from "path"; import { execSync } from "child_process"; -import dotenv from "dotenv"; // // Load the correct .env.local relative to this test file's directory // dotenv.config({ path: path.resolve(__dirname, ".env.local") }); @@ -140,7 +130,11 @@ describe("typegen", () => { }); it("basic typegen with zod", async () => { - const config: z.infer = { + const config: Extract< + z.infer, + { type: "fmdapi" } + > = { + type: "fmdapi", layouts: [ { layoutName: "layout", @@ -163,7 +157,11 @@ describe("typegen", () => { it("basic typegen without zod", async () => { // Define baseGenPath within the scope or ensure it's accessible // Assuming baseGenPath is accessible from the describe block's scope - const config: z.infer = { + const config: Extract< + z.infer, + { type: "fmdapi" } + > = { + type: "fmdapi", layouts: [ // add your layouts and name schemas here { @@ -189,7 +187,11 @@ describe("typegen", () => { }, 30000); it("basic typegen with strict numbers", async () => { - const config: z.infer = { + const config: Extract< + z.infer, + { type: "fmdapi" } + > = { + type: "fmdapi", layouts: [ { layoutName: "layout", @@ -223,7 +225,11 @@ describe("typegen", () => { }, 30000); it("zod validator", async () => { - const config: z.infer = { + const config: Extract< + z.infer, + { type: "fmdapi" } + > = { + type: "fmdapi", layouts: [ { layoutName: "layout", diff --git a/packages/typegen/tsconfig.json b/packages/typegen/tsconfig.json index 02d040ae..1eefccbe 100644 --- a/packages/typegen/tsconfig.json +++ b/packages/typegen/tsconfig.json @@ -6,6 +6,8 @@ "moduleResolution": "Bundler", "outDir": "dist", "sourceMap": true, + /* Include ES2022 lib for ErrorOptions and Error.cause support */ + "lib": ["DOM", "DOM.Iterable", "ES2021", "ES2022"], /* AND if you're building for a library: */ "declaration": true, diff --git a/packages/typegen/vite.config.ts b/packages/typegen/vite.config.ts index 32a35a9f..a297b02e 100644 --- a/packages/typegen/vite.config.ts +++ b/packages/typegen/vite.config.ts @@ -8,7 +8,12 @@ const config = defineConfig({ export default mergeConfig( config, tanstackViteConfig({ - entry: ["./src/index.ts", "./src/cli.ts", "./src/types.ts"], + entry: [ + "./src/index.ts", + "./src/cli.ts", + "./src/types.ts", + "./src/server/app.ts", + ], srcDir: "./src", cjs: false, outDir: "./dist", diff --git a/packages/typegen/web/components.json b/packages/typegen/web/components.json new file mode 100644 index 00000000..2d677db9 --- /dev/null +++ b/packages/typegen/web/components.json @@ -0,0 +1,24 @@ +{ + "$schema": "https://ui.shadcn.com/schema.json", + "style": "new-york", + "rsc": false, + "tsx": true, + "tailwind": { + "config": "", + "css": "src/index.css", + "baseColor": "neutral", + "cssVariables": true, + "prefix": "" + }, + "iconLibrary": "lucide", + "aliases": { + "components": "@/components", + "utils": "@/lib/utils", + "ui": "@/components/ui", + "lib": "@/lib", + "hooks": "@/hooks" + }, + "registries": { + "@reui": "https://reui.io/r/{name}.json" + } +} diff --git a/packages/typegen/web/index.html b/packages/typegen/web/index.html new file mode 100644 index 00000000..1d0cdf73 --- /dev/null +++ b/packages/typegen/web/index.html @@ -0,0 +1,35 @@ + + + + + + + Typegen Config Editor + + + +
+ + + diff --git a/packages/typegen/web/package.json b/packages/typegen/web/package.json new file mode 100644 index 00000000..2748e154 --- /dev/null +++ b/packages/typegen/web/package.json @@ -0,0 +1,54 @@ +{ + "name": "@proofkit/typegen-web", + "version": "0.0.0", + "private": true, + "type": "module", + "scripts": { + "dev": "vite", + "build": "vite build", + "preview": "vite preview" + }, + "dependencies": { + "@dnd-kit/core": "^6.3.1", + "@dnd-kit/modifiers": "^9.0.0", + "@dnd-kit/sortable": "^10.0.0", + "@dnd-kit/utilities": "^3.2.2", + "@fetchkit/ffetch": "^4.2.0", + "@headless-tree/core": "^1.6.0", + "@headless-tree/react": "^1.6.0", + "@hookform/resolvers": "^5.2.2", + "@proofkit/typegen": "workspace:*", + "@radix-ui/react-dialog": "^1.1.15", + "@radix-ui/react-direction": "^1.1.1", + "@radix-ui/react-slot": "^1.2.3", + "@radix-ui/react-tooltip": "^1.2.8", + "@remixicon/react": "^4.7.0", + "@tailwindcss/vite": "^4.1.18", + "@tanstack/react-query": "^5.90.12", + "@tanstack/react-table": "^8.21.3", + "@uidotdev/usehooks": "^2.4.1", + "class-variance-authority": "^0.7.1", + "clsx": "^2.1.1", + "cmdk": "^1.1.1", + "hono": "^4.9.0", + "lucide-react": "^0.511.0", + "radix-ui": "^1.4.3", + "react": "^19.2.3", + "react-dom": "^19.2.3", + "react-hook-form": "^7.68.0", + "sonner": "^2.0.7", + "tailwind-merge": "^3.3.1", + "tailwindcss": "^4.1.11", + "zod": "^4.1.13" + }, + "devDependencies": { + "@tanstack/react-query-devtools": "^5.91.1", + "@types/node": "^22.17.1", + "@types/react": "^19.1.10", + "@types/react-dom": "^19.1.7", + "@vitejs/plugin-react": "^4.3.4", + "tw-animate-css": "^1.3.6", + "typescript": "^5.9.3", + "vite": "^6.3.4" + } +} diff --git a/packages/typegen/web/public/proofkit-horiz.png b/packages/typegen/web/public/proofkit-horiz.png new file mode 100644 index 00000000..1e88602a Binary files /dev/null and b/packages/typegen/web/public/proofkit-horiz.png differ diff --git a/packages/typegen/web/src/App.tsx b/packages/typegen/web/src/App.tsx new file mode 100644 index 00000000..bd4cd394 --- /dev/null +++ b/packages/typegen/web/src/App.tsx @@ -0,0 +1,386 @@ +import { useEffect, useState } from "react"; +import { useMutation } from "@tanstack/react-query"; +import { useForm, useFieldArray } from "react-hook-form"; +import { client } from "./lib/api"; +import { + Accordion, + AccordionContent, + AccordionItem, + AccordionTrigger, +} from "@/components/ui/accordion"; +import { Button } from "./components/ui/button"; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuTrigger, +} from "./components/ui/dropdown-menu"; +import { Loader2, PlayIcon, Plus } from "lucide-react"; +import { ConfigSummary } from "./components/ConfigSummary"; +import { type SingleConfig } from "./lib/config-utils"; +import { Form } from "./components/ui/form"; +import { useConfig } from "./hooks/useConfig"; +import { useHealthCheck } from "./hooks/useHealthCheck"; +import { Badge } from "./components/ui/badge"; +import { ConfigEditor } from "./components/ConfigEditor"; +import { EmptyState } from "./components/EmptyState"; +import { ConnectionWarning } from "./components/ConnectionWarning"; + +// Normalize config to always be an array +function normalizeConfig( + config: SingleConfig | SingleConfig[] | null, +): SingleConfig[] { + if (Array.isArray(config)) { + return config; + } + if (config && typeof config === "object") { + return [config]; + } + return []; +} + +// Create config objects for each type +function createFmdapiConfig(): SingleConfig { + return { + type: "fmdapi", + envNames: { + server: undefined, + db: undefined, + auth: undefined, + }, + layouts: [], + }; +} + +function createFmodataConfig(): SingleConfig { + return { + type: "fmodata", + envNames: { + server: undefined, + db: undefined, + auth: undefined, + }, + tables: [], + alwaysOverrideFieldNames: true, + }; +} + +function App() { + // Health check to detect if server is down + const { isHealthy } = useHealthCheck({ + enabled: true, + }); + + // Load and save config using custom hook + const { + configDataResponse, + isError, + error, + refetch, + saveMutation, + isLoading, + isRetrying, + } = useConfig(); + + // Track active accordion item to preserve state + const [activeAccordionItem, setActiveAccordionItem] = useState(0); + + // Use React Hook Form to manage the configs array + type FormData = { config: SingleConfig[] }; + const form = useForm({}); + + useEffect(() => { + console.log("configData from useEffect", configDataResponse); + if (configDataResponse) { + const configData = configDataResponse?.config; + const serverConfigs = normalizeConfig(configData); + form.reset({ config: serverConfigs }); + } + }, [configDataResponse]); + + const { fields, append, remove } = useFieldArray({ + control: form.control, + name: "config", + }); + + // Get configs from form values for data access + const configs = form.watch("config"); + + // Extract exists and path from configDataResponse + const configExists = configDataResponse?.exists ?? false; + const configPath = configDataResponse?.path; + const fullPath = configDataResponse?.fullPath; + + // Determine empty state conditions + const isFileMissing = !configExists; + const isEmptyConfig = configExists && configs.length === 0; + const showEmptyState = isFileMissing || isEmptyConfig; + + // Unified handler for creating configs (works for both file creation and adding) + const handleAddConfig = async (type: "fmdapi" | "fmodata") => { + const newConfig = + type === "fmdapi" ? createFmdapiConfig() : createFmodataConfig(); + + // If file doesn't exist, create it with the new config + if (isFileMissing) { + try { + await saveMutation.mutateAsync([newConfig]); + await refetch(); + setTimeout(() => { + setActiveAccordionItem(0); + }, 100); + } catch (err) { + const apiType = type === "fmdapi" ? "Data API" : "OData"; + console.error(`Failed to create config file with ${apiType}:`, err); + } + } else { + // File exists, just append to form + append(newConfig); + setTimeout(() => { + setActiveAccordionItem(fields.length); + }, 1); + } + }; + + // Run typegen mutation + const runTypegenMutation = useMutation({ + mutationFn: async () => { + await client.api.run.$post({ + json: { config: configs }, + }); + }, + }); + + const handleSaveAll = form.handleSubmit(async (data) => { + try { + await saveMutation.mutateAsync(data.config); + // Reset the form with the current form state to clear dirty state + // Use getValues() to get the current state, preserving any changes made during the save request + // The accordion state is preserved because it's controlled and the component doesn't unmount + const currentConfigs = form.getValues("config"); + form.reset({ config: currentConfigs }); + } catch (err) { + // Error is handled by the mutation + console.error("Failed to save configs:", err); + } + }); + + const handleRunTypegen = async () => { + try { + // First save the config if there are changes + if (form.formState.isDirty) { + await handleSaveAll(); + } + // Then run typegen + await runTypegenMutation.mutateAsync(); + } catch (err) { + // Error is handled by the mutation + console.error("Failed to run typegen:", err); + } + }; + + return ( +
+
+ {/* Connection Warning Overlay - Shows when server is unreachable */} + {/* Only show if we've lost connection (not during initial load or retries) */} + {!isHealthy && !isLoading && !isRetrying && ( + refetch()} /> + )} + + {/* Loading Overlay - Preserves form state underneath */} + {isLoading && ( +
+
+ {isRetrying ? "Waiting for API server..." : "Loading config..."} +
+
+ )} + + {/* Error Overlay - Preserves form state underneath */} + {isError && !isRetrying && ( +
+
+

+ Error +

+

+ {error instanceof Error + ? error.message + : "Failed to load config"} +

+ +
+
+ )} + + {/* Main Content - Always rendered to preserve state */} +
+
+
+ ProofKit +

+
+ type + gen +
+ + UI + +

+
+ + {!isFileMissing && ( +
+ + +
+ )} +
+
+ +
+ + {!isLoading && showEmptyState ? ( +
+ handleAddConfig("fmdapi") + : undefined + } + onAddFmodata={ + isFileMissing || isEmptyConfig + ? () => handleAddConfig("fmodata") + : undefined + } + /> +
+ ) : ( + setActiveAccordionItem(Number(value))} + type="single" + variant="outline" + collapsible + className="w-full lg:w-[75%] mx-auto" + > + {fields.map((field, index) => { + const config = configs[index]; + return ( + + + + + + remove(index)} + /> + + + ); + })} + +
+ + + + + + handleAddConfig("fmdapi")} + > +
+

Data API

+ + Legacy + +
+
+ Reads/writes data using layout-specific context +
+
+ handleAddConfig("fmodata")} + > +
+

OData

+ + New + +
+
+ Reads/writes data directly to the database tables, + using the relationship graph as context +
+
+
+
+
+
+ )} +
+ +
+
+ ); +} + +export default App; diff --git a/packages/typegen/web/src/components/ConfigEditor.tsx b/packages/typegen/web/src/components/ConfigEditor.tsx new file mode 100644 index 00000000..c62e1769 --- /dev/null +++ b/packages/typegen/web/src/components/ConfigEditor.tsx @@ -0,0 +1,401 @@ +import { useFormContext, useWatch } from "react-hook-form"; +import { useState, useEffect, useId } from "react"; +import { Input, InputWrapper, InputGroup, InputAddon } from "./ui/input"; +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from "./ui/select"; +import { SwitchField } from "./ui/switch-field"; +import { Switch, SwitchIndicator, SwitchWrapper } from "./ui/switch"; +import { + FormControl, + FormField, + FormItem, + FormLabel, + FormMessage, +} from "./ui/form"; +import { EnvVarDialog } from "./EnvVarDialog"; +import { SingleConfig } from "../lib/config-utils"; +import { InfoTooltip } from "./InfoTooltip"; +import { LayoutEditor } from "./LayoutEditor"; +import { MetadataTablesEditor } from "./MetadataTablesEditor"; +import { Button } from "./ui/button"; +import { + Dialog, + DialogContent, + DialogDescription, + DialogFooter, + DialogHeader, + DialogTitle, +} from "./ui/dialog"; +import { PlayIcon, Trash2, Loader2 } from "lucide-react"; +import { useRunTypegen } from "../hooks/useRunTypegen"; + +interface ConfigEditorProps { + index: number; + onRemove: () => void; +} + +export function ConfigEditor({ index, onRemove }: ConfigEditorProps) { + const { + control, + formState: { errors }, + setValue, + watch, + trigger, + } = useFormContext<{ config: SingleConfig[] }>(); + + const hasMultipleConfigs = watch("config").length > 1; + + const baseId = useId(); + const generateClientSwitchId = `${baseId}-generate-client`; + const configType = watch(`config.${index}.type` as const); + const generateClient = useWatch({ + control, + name: `config.${index}.generateClient` as const, + }); + + const configErrors = errors.config?.[index]; + const webviewerScriptName = useWatch({ + control, + name: `config.${index}.webviewerScriptName` as const, + }); + const [usingWebviewer, setUsingWebviewer] = useState(!!webviewerScriptName); + const [showRemoveDialog, setShowRemoveDialog] = useState(false); + const { runTypegen, isRunning } = useRunTypegen(); + + // Get the current config value + const currentConfig = watch(`config.${index}` as const); + + useEffect(() => { + setUsingWebviewer(!!webviewerScriptName); + }, [webviewerScriptName]); + + const handleWebviewerToggle = (checked: boolean) => { + setUsingWebviewer(checked); + if (!checked) { + setValue(`config.${index}.webviewerScriptName` as const, ""); + } + }; + + const handleRunTypegen = async (e: React.MouseEvent) => { + e.preventDefault(); + e.stopPropagation(); + try { + await runTypegen(currentConfig); + } catch (err) { + console.error("Failed to run typegen:", err); + } + }; + + return ( +
+ {configErrors?.root && ( +
+ Error: {configErrors.root.message} +
+ )} + +
+ {/* General Settings */} +
+
+
+

General Settings

+
+ +
+ + {hasMultipleConfigs && ( + + )} + +
+
+
+ {/* First row: Display Name, Output Path, Clear Old Files */} +
+ ( + + + Display Name{" "} + + + + + + + + + )} + /> + + ( + + + Output Path{" "} + + + + + + + + + )} + /> + + ( + + + + + + + )} + /> +
+ + {/* Second row: Generate Client, Client Suffix, and Validator */} + {configType === "fmdapi" && ( +
+ ( + + Generate + +
+ + + + Full Client + + + Types Only + + +
+
+ +
+ )} + /> + + {generateClient && ( + ( + + Client Suffix + + + + + + )} + /> + )} + + {generateClient && ( + ( + + Validator + + + + + + )} + /> + )} +
+ )} + + {configType === "fmodata" && ( +
+ ( + + )} + /> + ( + + )} + /> +
+ )} + + {/* Final row: Using a Webviewer switch with script name inline */} + {configType === "fmdapi" && ( +
+
+ +
+ {usingWebviewer && ( +
+ ( + + + Webviewer Script Name{" "} + + + + + + + + )} + /> +
+ )} +
+ )} +
+
+ + {configType === "fmdapi" && } + {configType === "fmodata" && ( + + )} +
+ + {/* Remove Config Confirmation Dialog */} + + + + Remove Config + + Are you sure you want to remove this config? This action cannot be + undone. + + + + + + + + +
+ ); +} diff --git a/packages/typegen/web/src/components/ConfigList.css b/packages/typegen/web/src/components/ConfigList.css new file mode 100644 index 00000000..29ca5fd2 --- /dev/null +++ b/packages/typegen/web/src/components/ConfigList.css @@ -0,0 +1,89 @@ +.config-list { + display: flex; + flex-direction: column; + gap: 1rem; +} + +.config-list-header { + display: flex; + justify-content: space-between; + align-items: center; +} + +.config-list-header h2 { + font-size: 1.5rem; + font-weight: 600; + margin: 0; +} + +.add-button { + padding: 0.5rem 1.5rem; + background: #16a34a; + color: white; + border: none; + border-radius: 0.375rem; + font-weight: 500; + cursor: pointer; + transition: background 0.2s; +} + +.add-button:hover { + background: #15803d; +} + +.config-list-items { + display: flex; + flex-direction: column; + gap: 0.75rem; +} + +.config-list-item { + padding: 1rem; + background: #27272a; + border: 1px solid #3f3f46; + border-radius: 0.5rem; + cursor: pointer; + transition: all 0.2s; +} + +.config-list-item:hover { + background: #3f3f46; + border-color: #52525b; +} + +.config-list-item-content { + display: flex; + justify-content: space-between; + align-items: center; +} + +.config-list-item-label { + font-weight: 500; + color: #e4e4e7; +} + +.config-list-item-arrow { + color: #a1a1aa; + font-size: 1.25rem; +} + +.config-list-empty { + padding: 2rem; + text-align: center; + color: #a1a1aa; +} + +.config-list-empty p { + margin: 0; +} + + + + + + + + + + + diff --git a/packages/typegen/web/src/components/ConfigList.tsx b/packages/typegen/web/src/components/ConfigList.tsx new file mode 100644 index 00000000..fc123d74 --- /dev/null +++ b/packages/typegen/web/src/components/ConfigList.tsx @@ -0,0 +1,74 @@ +import "./ConfigList.css"; + +interface ConfigListProps { + configs: unknown[]; + onSelectConfig: (index: number) => void; + onAddConfig: () => void; +} + +export function ConfigList({ + configs, + onSelectConfig, + onAddConfig, +}: ConfigListProps) { + const getConfigLabel = (config: unknown, index: number): string => { + if (typeof config === "object" && config !== null) { + const obj = config as Record; + // Try to find a meaningful label + if (obj.path && typeof obj.path === "string") { + return `Config ${index + 1} (${obj.path})`; + } + if (obj.layouts && Array.isArray(obj.layouts) && obj.layouts.length > 0) { + const firstLayout = obj.layouts[0] as Record; + if (firstLayout.layoutName && typeof firstLayout.layoutName === "string") { + return `Config ${index + 1} (${firstLayout.layoutName})`; + } + } + } + return `Config ${index + 1}`; + }; + + return ( +
+
+

Configurations

+ +
+
+ {configs.length === 0 ? ( +
+

No configurations found. Click "Add Config" to create one.

+
+ ) : ( + configs.map((config, index) => ( +
onSelectConfig(index)} + > +
+ + {getConfigLabel(config, index)} + + +
+
+ )) + )} +
+
+ ); +} + + + + + + + + + + + diff --git a/packages/typegen/web/src/components/ConfigSummary.tsx b/packages/typegen/web/src/components/ConfigSummary.tsx new file mode 100644 index 00000000..dffb10f8 --- /dev/null +++ b/packages/typegen/web/src/components/ConfigSummary.tsx @@ -0,0 +1,38 @@ +import { Folder } from "lucide-react"; +import { type SingleConfig } from "../lib/config-utils"; +import { Badge } from "./ui/badge"; + +export function ConfigSummary({ config }: { config: SingleConfig }) { + return ( +
+ + {config?.type.toUpperCase()} + + {config.configName ? ( + <> +

{config.configName}

+

+ + {config.path} +

+ + ) : ( +

+ {config.path} +

+ )} +
+ ); +} + +export default ConfigSummary; diff --git a/packages/typegen/web/src/components/ConnectionWarning.tsx b/packages/typegen/web/src/components/ConnectionWarning.tsx new file mode 100644 index 00000000..cc188a9b --- /dev/null +++ b/packages/typegen/web/src/components/ConnectionWarning.tsx @@ -0,0 +1,69 @@ +import { AlertTriangle, RefreshCw, X } from "lucide-react"; +import { Button } from "./ui/button"; + +interface ConnectionWarningProps { + onRefresh?: () => void; +} + +export function ConnectionWarning({ onRefresh }: ConnectionWarningProps) { + const handleRefresh = () => { + if (onRefresh) { + onRefresh(); + } else { + window.location.reload(); + } + }; + + return ( +
+
+
+ +
+

+ UI Server Unavailable +

+

+ Did you stop the @proofkit/ui command? +

+
+

+ To resolve this, you can: +

+
    +
  • Close this browser tab or window
  • +
  • Refresh the page if you plan to reconnect
  • +
  • Rerun the @proofkit/ui command
  • +
+
+
+ + +
+
+
+
+
+ ); +} diff --git a/packages/typegen/web/src/components/EmptyState.tsx b/packages/typegen/web/src/components/EmptyState.tsx new file mode 100644 index 00000000..89a545be --- /dev/null +++ b/packages/typegen/web/src/components/EmptyState.tsx @@ -0,0 +1,163 @@ +import { FileText, Database } from "lucide-react"; +import { Button } from "./ui/button"; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuTrigger, +} from "./ui/dropdown-menu"; +import { Plus } from "lucide-react"; +import { Badge } from "./ui/badge"; +import { Tooltip, TooltipTrigger, TooltipContent } from "./ui/tooltip"; + +interface EmptyStateProps { + variant: "file-missing" | "empty-config"; + configPath?: string; + onAddFmdapi?: () => void; + onAddFmodata?: () => void; +} + +export function EmptyState({ + variant, + configPath, + onAddFmdapi, + onAddFmodata, +}: EmptyStateProps) { + if (variant === "file-missing") { + return ( +
+
+ +
+

No Config File Found

+

+ A config file will be created at the{" "} + + + + current working directory + + + +

+ {configPath || "proofkit-typegen.config.json"} +

+ + + . +

+

+ Want to create it somewhere else?
+ Restart the server with the{" "} + + --config + {" "} + option and specify a file path. +

+ {(onAddFmdapi || onAddFmodata) && ( + + + + + + {onAddFmdapi && ( + +
+

Data API

+ + Legacy + +
+
+ Reads/writes data using layout-specific context +
+
+ )} + {onAddFmodata && ( + +
+

OData

+ + New + +
+
+ Reads/writes data directly to the database tables, using the + relationship graph as context +
+
+ )} +
+
+ )} +
+ ); + } + + // Empty config state + return ( +
+
+ +
+

No Connections Yet

+

+ Add your first FileMaker connection to get started +

+ {(onAddFmdapi || onAddFmodata) && ( + + + + + + {onAddFmdapi && ( + +
+

Data API

+ + Legacy + +
+
+ Reads/writes data using layout-specific context +
+
+ )} + {onAddFmodata && ( + +
+

OData

+ + New + +
+
+ Reads/writes data directly to the database tables, using the + relationship graph as context +
+
+ )} +
+
+ )} +
+ ); +} diff --git a/packages/typegen/web/src/components/EnvVarDialog.tsx b/packages/typegen/web/src/components/EnvVarDialog.tsx new file mode 100644 index 00000000..816825f9 --- /dev/null +++ b/packages/typegen/web/src/components/EnvVarDialog.tsx @@ -0,0 +1,458 @@ +import { useEffect, useState } from "react"; +import { useFormContext, useWatch } from "react-hook-form"; +import { Button } from "./ui/button"; +import { + Dialog, + DialogContent, + DialogDescription, + DialogHeader, + DialogTitle, + DialogTrigger, +} from "./ui/dialog"; +import { defaultEnvNames } from "../../../src/constants"; +import { EnvVarField } from "./EnvVarField"; +import { useEnvVarIndicator } from "./useEnvVarIndicator"; +import { useEnvValue } from "../lib/envValues"; +import { useTestConnection, setDialogOpen } from "../hooks/useTestConnection"; +import { Alert, AlertContent, AlertDescription, AlertIcon } from "./ui/alert"; +import { Card, CardContent, CardTitle } from "./ui/card"; +import { Separator } from "./ui/separator"; +import { + Loader2, + CheckCircle2, + XCircle, + AlertCircle, + AlertTriangle, + Server, + Info, +} from "lucide-react"; + +interface EnvVarDialogProps { + index: number; +} + +// Helper to safely extract error message from various error formats +function getErrorMessage(error: unknown): string { + if (typeof error === "string") { + return error; + } + if (error instanceof Error) { + return error.message; + } + if (error && typeof error === "object" && "message" in error) { + return String((error as { message: unknown }).message); + } + return ""; +} + +export function EnvVarDialog({ index }: EnvVarDialogProps) { + const { control, setValue, getValues } = useFormContext<{ + config: any[]; + }>(); + const [dialogOpen, setDialogOpenState] = useState(false); + + // Track dialog open state to pause background tests + useEffect(() => { + setDialogOpen(index, dialogOpen); + return () => { + setDialogOpen(index, false); + }; + }, [index, dialogOpen]); + + // Get indicator data + const { hasCustomValues, serverValue, serverLoading, dbValue, dbLoading } = + useEnvVarIndicator(index); + + // Watch the auth env names from the form + const envNamesAuth = useWatch({ + control, + name: `config.${index}.envNames.auth` as const, + }); + + // Determine the actual env names to use (from form or defaults) + const apiKeyEnvName = + envNamesAuth && + typeof envNamesAuth === "object" && + "apiKey" in envNamesAuth && + envNamesAuth.apiKey && + envNamesAuth.apiKey.trim() !== "" + ? envNamesAuth.apiKey + : defaultEnvNames.apiKey; + const usernameEnvName = + envNamesAuth && + typeof envNamesAuth === "object" && + "username" in envNamesAuth && + envNamesAuth.username && + envNamesAuth.username.trim() !== "" + ? envNamesAuth.username + : defaultEnvNames.username; + const passwordEnvName = + envNamesAuth && + typeof envNamesAuth === "object" && + "password" in envNamesAuth && + envNamesAuth.password && + envNamesAuth.password.trim() !== "" + ? envNamesAuth.password + : defaultEnvNames.password; + + // Resolve all three auth env values + const { data: apiKeyValue, isLoading: apiKeyLoading } = + useEnvValue(apiKeyEnvName); + const { data: usernameValue, isLoading: usernameLoading } = + useEnvValue(usernameEnvName); + const { data: passwordValue, isLoading: passwordLoading } = + useEnvValue(passwordEnvName); + + // Determine which authentication method will be used + // Default to API key if it resolves to a value, otherwise use username/password if both resolve + const activeAuthMethod = + !apiKeyLoading && + apiKeyValue !== undefined && + apiKeyValue !== null && + apiKeyValue !== "" + ? "apiKey" + : !usernameLoading && + !passwordLoading && + usernameValue !== undefined && + usernameValue !== null && + usernameValue !== "" && + passwordValue !== undefined && + passwordValue !== null && + passwordValue !== "" + ? "username" + : null; + + // Test connection hook - enable when dialog is closed, disable when open + // When dialog is open, it will only run when the retry button is clicked + const { + status: testStatus, + data: testData, + error: testError, + errorDetails, + run: runTest, + } = useTestConnection(index, { enabled: !dialogOpen }); + + // Check if any values resolve to undefined/null/empty (only check after loading completes) + // For auth, check that at least one complete auth method is configured (either API key OR username+password) + const hasApiKeyAuth = + !apiKeyLoading && + apiKeyValue !== undefined && + apiKeyValue !== null && + apiKeyValue !== ""; + const hasUsernamePasswordAuth = + !usernameLoading && + !passwordLoading && + usernameValue !== undefined && + usernameValue !== null && + usernameValue !== "" && + passwordValue !== undefined && + passwordValue !== null && + passwordValue !== ""; + const hasAuth = hasApiKeyAuth || hasUsernamePasswordAuth; + + const hasUndefinedValues = + (!serverLoading && + (serverValue === undefined || + serverValue === null || + serverValue === "")) || + (!dbLoading && + (dbValue === undefined || dbValue === null || dbValue === "")) || + (!apiKeyLoading && !usernameLoading && !passwordLoading && !hasAuth); + + // Initialize auth fields if not already set + useEffect(() => { + const currentAuth = getValues(`config.${index}.envNames.auth` as any); + if (!currentAuth) { + setValue(`config.${index}.envNames.auth` as const, { + apiKey: "", + username: "", + password: "", + }); + } else if (typeof currentAuth === "object") { + // Ensure all fields exist + setValue(`config.${index}.envNames.auth` as const, { + apiKey: currentAuth.apiKey || "", + username: currentAuth.username || "", + password: currentAuth.password || "", + }); + } + }, [setValue, getValues, index]); + + return ( + +
+ + + + {(hasUndefinedValues || hasCustomValues) && ( + + {hasUndefinedValues ? "!" : "•"} + + )} +
+ + + Custom Environment Variable Names + + Enter the names{" "} + of the environment variables below, not the values + + + +
+
+ + + + +
+ + + Authentication + {/* API Key on its own line */} + + + {/* OR Divider */} +
+ + + OR + + +
+ + {/* Username and Password on the same line */} +
+ + + +
+
+
+
+
+ + + + + + + + You will need to rerun the{" "} + + @proofkit/typegen ui + {" "} + command if you change any environment variables. + + + + + {/* Test Connection Section */} +
+
+

Connection Status

+ +
+ + {/* Test Results - Show automatically when available */} + {testStatus !== "idle" && ( +
+ {testStatus === "pending" && ( +
+ + Testing connection... +
+ )} + + {testStatus === "success" && testData && ( +
+
+ + Connection OK +
+
+
+ Server:{" "} + {testData.server} +
+
+ Database:{" "} + {testData.db} +
+
+ Auth Type:{" "} + {testData.authType === "apiKey" + ? "API Key" + : "Username/Password"} +
+
+
+ )} + + {testStatus === "error" && ( +
+
+ + Connection Failed +
+ {errorDetails && ( +
+
+ {errorDetails.message || + getErrorMessage(errorDetails.error as unknown) || + "Unknown error"} +
+ {errorDetails.details?.missing && ( +
+
+ Missing environment variables: +
+
    + {errorDetails.details.missing.server && ( +
  • + Server ( + {errorDetails.suspectedField === "server" && + "⚠️"} + ) +
  • + )} + {errorDetails.details.missing.db && ( +
  • + Database ( + {errorDetails.suspectedField === "db" && "⚠️"} + ) +
  • + )} + {errorDetails.details.missing.auth && ( +
  • + Authentication ( + {errorDetails.suspectedField === "auth" && + "⚠️"} + ) +
  • + )} + {errorDetails.details.missing.password && ( +
  • + Password ( + {errorDetails.suspectedField === "auth" && + "⚠️"} + ) +
  • + )} +
+
+ )} + {errorDetails.fmErrorCode && ( +
+ + FileMaker Error Code: + {" "} + {errorDetails.fmErrorCode} +
+ )} + {errorDetails.suspectedField && + !errorDetails.details?.missing && ( +
+ + + Suspected issue with:{" "} + {errorDetails.suspectedField === "server" + ? "Server URL" + : errorDetails.suspectedField === "db" + ? "Database name" + : "Credentials"} + +
+ )} +
+ )} + {testError && !errorDetails && ( +
+ {testError instanceof Error + ? testError.message + : typeof testError === "object" && + testError !== null && + "message" in testError + ? String( + (testError as { message: unknown }).message, + ) + : "Unknown error"} +
+ )} +
+ )} +
+ )} +
+
+
+
+ ); +} diff --git a/packages/typegen/web/src/components/EnvVarField.tsx b/packages/typegen/web/src/components/EnvVarField.tsx new file mode 100644 index 00000000..68d2b08d --- /dev/null +++ b/packages/typegen/web/src/components/EnvVarField.tsx @@ -0,0 +1,116 @@ +import { useMemo } from "react"; +import { useFormContext, useWatch, Path, PathValue } from "react-hook-form"; +import { z } from "zod"; +import { CircleCheck, CircleSlash, Loader } from "lucide-react"; +import { configSchema } from "../lib/schema"; +import { Input, InputWrapper } from "./ui/input"; +import { + FormControl, + FormField, + FormItem, + FormLabel, + FormMessage, +} from "./ui/form"; +import { useEnvValue } from "../lib/envValues"; +import { useDebounce } from "@uidotdev/usehooks"; +import { Tooltip, TooltipContent, TooltipTrigger } from "./ui/tooltip"; +import { cn } from "@/lib/utils"; + +type FormData = z.infer; +type FormConfig = { config: FormData[] }; + +interface EnvVarFieldProps> { + fieldName: TFieldName extends Path + ? PathValue extends string | undefined + ? TFieldName + : never + : never; + label: string; + placeholder: string; + defaultValue: string; + dimField?: boolean; +} + +export function EnvVarField>({ + fieldName, + label, + placeholder, + defaultValue, + dimField = false, +}: EnvVarFieldProps) { + const { control } = useFormContext(); + const envName = useWatch({ + control, + name: fieldName, + defaultValue: undefined, + }); + + const debouncedEnvName = useDebounce(envName, 300); + + // Get the resolved value from the server (using debounced value) + // Ensure debouncedEnvName is a string or undefined before passing to useEnvValue + // Handle nested paths where watch might return objects or other types + const envNameForQuery: string | undefined = (() => { + if (typeof debouncedEnvName === "string") { + return debouncedEnvName.trim() !== "" ? debouncedEnvName : undefined; + } + return undefined; + })(); + const { data: envValue, isLoading } = useEnvValue( + envNameForQuery ?? defaultValue, + ); + + const valueState: "loading" | "not-set" | "set" = useMemo(() => { + if (isLoading) return "loading"; + if (envValue === undefined || envValue === null || envValue === "") + return "not-set"; + return "set"; + }, [isLoading, envValue]); + + return ( + ( + + + {label}{" "} + {dimField ? ( + (not used) + ) : ( + "" + )} + + + + + {valueState === "set" ? ( + + + + + {envValue} + + ) : valueState === "loading" ? ( + + ) : ( + + + + + Not set + + )} + + + + + )} + /> + ); +} diff --git a/packages/typegen/web/src/components/ErrorBoundary.tsx b/packages/typegen/web/src/components/ErrorBoundary.tsx new file mode 100644 index 00000000..edfe4d11 --- /dev/null +++ b/packages/typegen/web/src/components/ErrorBoundary.tsx @@ -0,0 +1,115 @@ +import React, { Component, ErrorInfo, ReactNode } from "react"; +import { Button } from "./ui/button"; +import { AlertTriangle } from "lucide-react"; + +interface Props { + children: ReactNode; + fallback?: ReactNode; +} + +interface State { + hasError: boolean; + error: Error | null; + errorInfo: ErrorInfo | null; +} + +export class ErrorBoundary extends Component { + constructor(props: Props) { + super(props); + this.state = { + hasError: false, + error: null, + errorInfo: null, + }; + } + + static getDerivedStateFromError(error: Error): Partial { + return { + hasError: true, + error, + }; + } + + componentDidCatch(error: Error, errorInfo: ErrorInfo) { + console.error("ErrorBoundary caught an error:", error, errorInfo); + this.setState({ + error, + errorInfo, + }); + } + + handleReset = () => { + this.setState({ + hasError: false, + error: null, + errorInfo: null, + }); + }; + + render() { + if (this.state.hasError) { + if (this.props.fallback) { + return this.props.fallback; + } + + return ( +
+
+
+ +

+ Something went wrong +

+
+

+ An unexpected error occurred. Please try refreshing the page or + contact support if the problem persists. +

+ {this.state.error && ( +
+ + Error details + +
+
+ {this.state.error.name}: {this.state.error.message} +
+ {this.state.error.stack && ( +
+                      {this.state.error.stack}
+                    
+ )} + {this.state.errorInfo && ( +
+
+ Component Stack: +
+
+                        {this.state.errorInfo.componentStack}
+                      
+
+ )} +
+
+ )} +
+ + +
+
+
+ ); + } + + return this.props.children; + } +} + + diff --git a/packages/typegen/web/src/components/InfoTooltip.tsx b/packages/typegen/web/src/components/InfoTooltip.tsx new file mode 100644 index 00000000..89d96e5e --- /dev/null +++ b/packages/typegen/web/src/components/InfoTooltip.tsx @@ -0,0 +1,15 @@ +import { InfoIcon } from "lucide-react"; +import { Tooltip, TooltipTrigger, TooltipContent } from "./ui/tooltip"; + +export function InfoTooltip({ label }: { label: string }) { + return ( + + + + + + {label} + + + ); +} diff --git a/packages/typegen/web/src/components/LayoutEditor.tsx b/packages/typegen/web/src/components/LayoutEditor.tsx new file mode 100644 index 00000000..737253e0 --- /dev/null +++ b/packages/typegen/web/src/components/LayoutEditor.tsx @@ -0,0 +1,83 @@ +import { useFieldArray, useFormContext } from "react-hook-form"; +import { Button } from "./ui/button"; +import { SingleConfig } from "../lib/config-utils"; +import { LayoutItemEditor } from "./LayoutItemEditor"; +import { Plus, AlertTriangle } from "lucide-react"; +import { useTestConnection } from "../hooks/useTestConnection"; + +interface LayoutEditorProps { + configIndex: number; +} + +export function LayoutEditor({ configIndex }: LayoutEditorProps) { + const { control } = useFormContext<{ config: SingleConfig[] }>(); + const { fields, append, remove } = useFieldArray({ + control, + name: `config.${configIndex}.layouts` as const, + }); + + // Check connection test status + const { status: testStatus, errorDetails } = useTestConnection(configIndex); + // Only show warning if connection test failed + const showWarning = testStatus === "error"; + + return ( +
+ {showWarning && ( +
+
+ +
+
+
Connection test failed
+ {errorDetails?.message && ( +
+ {errorDetails.message} +
+ )} +
+ Fix the connection issue in the "Server Connection Settings" + dialog before adding layouts. +
+
+
+
+
+ )} + +

Layouts

+ + {fields.length === 0 && ( +

+ No layouts configured. Click "Add Layout" to add one. +

+ )} + + {fields.map((field, fieldIndex) => ( + remove(fieldIndex)} + /> + ))} + +
+ +
+
+ ); +} diff --git a/packages/typegen/web/src/components/LayoutItemEditor.tsx b/packages/typegen/web/src/components/LayoutItemEditor.tsx new file mode 100644 index 00000000..eafcee34 --- /dev/null +++ b/packages/typegen/web/src/components/LayoutItemEditor.tsx @@ -0,0 +1,210 @@ +import { useFormContext } from "react-hook-form"; +import { Button } from "./ui/button"; +import { + FormControl, + FormField, + FormItem, + FormLabel, + FormMessage, +} from "./ui/form"; +import { Input } from "./ui/input"; +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from "./ui/select"; +import { SwitchField } from "./ui/switch-field"; +import { SingleConfig } from "../lib/config-utils"; +import { LayoutSelector } from "./LayoutSelector"; +import { InfoTooltip } from "./InfoTooltip"; +import { CircleMinus } from "lucide-react"; + +interface LayoutItemEditorProps { + configIndex: number; + layoutIndex: number; + onRemove: () => void; +} + +export function LayoutItemEditor({ + configIndex, + layoutIndex, + onRemove, +}: LayoutItemEditorProps) { + const { control, watch } = useFormContext<{ config: SingleConfig[] }>(); + const schemaName = watch( + `config.${configIndex}.layouts.${layoutIndex}.schemaName`, + ); + const layoutName = watch( + `config.${configIndex}.layouts.${layoutIndex}.layoutName`, + ); + + return ( +
+
+
+

+ {schemaName || `Layout ${layoutIndex + 1}`} +

+
+ {layoutName ? ( + layoutName + ) : ( + No layout selected + )} +
+
+ +
+ +
+
+ + + ( + + + Schema Name{" "} + + + + + + + + )} + /> +
+ +
+ ( + + + Value Lists{" "} + + + + + + + + )} + /> + + ( + + + + + + + )} + /> + + { + const isDefault = field.value === undefined; + return ( + + Generate Client + + + + + + ); + }} + /> +
+
+
+ ); +} diff --git a/packages/typegen/web/src/components/LayoutSelector.tsx b/packages/typegen/web/src/components/LayoutSelector.tsx new file mode 100644 index 00000000..947a3d01 --- /dev/null +++ b/packages/typegen/web/src/components/LayoutSelector.tsx @@ -0,0 +1,243 @@ +import * as React from "react"; +import { client } from "@/lib/api"; +import { useQuery } from "@tanstack/react-query"; +import { Path, useFormContext } from "react-hook-form"; +import { cn } from "@/lib/utils"; +import { Button, ButtonArrow } from "@/components/ui/button"; +import { + Command, + CommandCheck, + CommandEmpty, + CommandGroup, + CommandInput, + CommandItem, + CommandList, +} from "@/components/ui/command"; +import { + Popover, + PopoverContent, + PopoverTrigger, +} from "@/components/ui/popover"; +import { + FormControl, + FormField, + FormItem, + FormLabel, + FormMessage, +} from "./ui/form"; +import { SingleConfig } from "@/lib/config-utils"; +import { InfoTooltip } from "./InfoTooltip"; + +type FormData = { config: SingleConfig[] }; + +export function LayoutSelector({ + configIndex, + path, +}: { + configIndex: number; + path: Path; +}) { + const { control, setValue, getValues } = useFormContext(); + const [open, setOpen] = React.useState(false); + + const { + data: layouts, + isLoading, + isError, + error, + } = useQuery({ + queryKey: ["layouts", configIndex], + queryFn: async () => { + const res = await client.api.layouts.$get({ + query: { configIndex: configIndex.toString() }, + }); + + const data = await res.json(); + if (!res.ok || "error" in data) { + // Parse error JSON to get detailed error information + const errorMessage = + "error" in data ? data.error : "Failed to fetch layouts"; + throw new Error(errorMessage); + } + return data.layouts; + }, + }); + + // Extract error details from the error object + const errorDetails = error && (error as any).details; + + // Transform layouts array into combobox format + const layoutOptions = React.useMemo(() => { + if (!layouts) return []; + return layouts.map((layout) => ({ + value: layout.name, + label: layout.name, + })); + }, [layouts]); + + return ( + ( + + + Layout Name{" "} + + + + + + + + + + + + {isLoading ? ( +
+ Loading layouts... +
+ ) : isError ? ( +
+
+ {error instanceof Error + ? error.message + : "Failed to load layouts"} +
+ {errorDetails && ( +
+ {errorDetails.missing && ( +
+
+ Missing environment variables: +
+
    + {errorDetails.missing.server && ( +
  • + Server + {errorDetails.suspectedField === + "server" && " ⚠️"} +
  • + )} + {errorDetails.missing.db && ( +
  • + Database + {errorDetails.suspectedField === "db" && + " ⚠️"} +
  • + )} + {errorDetails.missing.auth && ( +
  • + Authentication + {errorDetails.suspectedField === "auth" && + " ⚠️"} +
  • + )} + {errorDetails.missing.password && ( +
  • + Password + {errorDetails.suspectedField === "auth" && + " ⚠️"} +
  • + )} +
+
+ )} + {errorDetails.fmErrorCode && ( +
+ + FileMaker Error Code: + {" "} + {errorDetails.fmErrorCode} +
+ )} + {errorDetails.suspectedField && + !errorDetails.missing && ( +
+ Suspected issue with:{" "} + {errorDetails.suspectedField === "server" + ? "Server URL" + : errorDetails.suspectedField === "db" + ? "Database name" + : "Credentials"} +
+ )} +
+ )} +
+ Check your connection settings in "Configure + Environment Variables" +
+
+ ) : ( + <> + No layout found. + + {layoutOptions.map((layout) => ( + { + const newValue = + currentValue === field.value + ? "" + : currentValue; + field.onChange(newValue); + + // If schema name is undefined or empty, set it to the layout name + if (newValue) { + const schemaNamePath = path.replace( + ".layoutName", + ".schemaName", + ) as Path; + const currentSchemaName = + getValues(schemaNamePath); + if ( + currentSchemaName === undefined || + currentSchemaName === "" + ) { + setValue(schemaNamePath, newValue); + } + } + + setOpen(false); + }} + > + {layout.label} + {field.value === layout.value && } + + ))} + + + )} +
+
+
+
+
+ +
+ )} + /> + ); +} + +export default LayoutSelector; diff --git a/packages/typegen/web/src/components/MetadataFieldsDialog.tsx b/packages/typegen/web/src/components/MetadataFieldsDialog.tsx new file mode 100644 index 00000000..8803fc5c --- /dev/null +++ b/packages/typegen/web/src/components/MetadataFieldsDialog.tsx @@ -0,0 +1,1099 @@ +import { useMemo, useState, useCallback, useRef, useEffect } from "react"; +import { useFormContext, useWatch } from "react-hook-form"; +import { Search, Check, Key } from "lucide-react"; +import { + useReactTable, + getCoreRowModel, + getSortedRowModel, + getFilteredRowModel, + getPaginationRowModel, + type ColumnDef, +} from "@tanstack/react-table"; +import { DataGrid, DataGridContainer } from "./ui/data-grid"; +import { DataGridTable } from "./ui/data-grid-table"; +import { DataGridColumnHeader } from "./ui/data-grid-column-header"; +import { DataGridPagination } from "./ui/data-grid-pagination"; +import { Input, InputWrapper } from "./ui/input"; +import { Switch } from "./ui/switch"; +import { Skeleton } from "./ui/skeleton"; +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from "./ui/select"; +import { DropdownMenuItem } from "./ui/dropdown-menu"; +import { + Dialog, + DialogContent, + DialogHeader, + DialogTitle, + DialogBody, +} from "./ui/dialog"; +import { useTableMetadata } from "../hooks/useTableMetadata"; +import type { SingleConfig } from "../lib/config-utils"; +import { InfoTooltip } from "./InfoTooltip"; +import { + FormControl, + FormField, + FormItem, + FormLabel, + FormMessage, +} from "./ui/form"; + +// Memoize model functions outside component to ensure stable references +const coreRowModel = getCoreRowModel(); +const sortedRowModel = getSortedRowModel(); +const filteredRowModel = getFilteredRowModel(); +const paginationRowModel = getPaginationRowModel(); + +// Stable empty array to prevent infinite re-renders +const EMPTY_FIELDS_CONFIG: any[] = []; + +/** + * Maps OData types to readable field type labels + * Based on the mappings in generateODataTypes.ts + */ +function mapODataTypeToReadableLabel(edmType: string): string { + switch (edmType) { + case "Edm.String": + return "Text"; + case "Edm.Decimal": + case "Edm.Int32": + case "Edm.Int64": + case "Edm.Double": + return "Number"; + case "Edm.Boolean": + return "Boolean"; + case "Edm.Date": + return "Date"; + case "Edm.DateTimeOffset": + return "Timestamp"; + case "Edm.Binary": + return "Container"; + default: + // For unknown types, show the original type + return edmType || "Unknown"; + } +} + +/** + * Reusable component for rendering boolean values in table cells + * Shows a green checkmark when true, dash when false/undefined + */ +function BooleanCell({ value }: { value: boolean | undefined }) { + return ( +
+ {value === true ? ( + + ) : ( + - + )} +
+ ); +} + +interface FieldRow { + fieldName: string; + fieldType: string; + nullable: boolean | undefined; + global: boolean | undefined; + readOnly: boolean; + isExcluded: boolean; + typeOverride?: string; + primaryKey: boolean; +} + +interface MetadataFieldsDialogProps { + open: boolean; + onOpenChange: (open: boolean) => void; + tableName: string | null; + configIndex: number; +} + +export function MetadataFieldsDialog({ + open, + onOpenChange, + tableName, + configIndex, +}: MetadataFieldsDialogProps) { + // Fetch metadata - query is paused when dialog is not open + const { + data: parsedMetadata, + isLoading, + isError, + error, + } = useTableMetadata( + configIndex, + tableName, + open, // enabled flag + ); + const { control, setValue } = useFormContext<{ config: SingleConfig[] }>(); + + const [globalFilter, setGlobalFilter] = useState(""); + + // Get the config type to validate we're working with fmodata + const configType = useWatch({ + control, + name: `config.${configIndex}.type` as const, + }); + + // Get the entire tables config - we'll extract the specific table's fields + const allTablesConfig = useWatch({ + control, + name: `config.${configIndex}.tables` as const, + }); + + // Use a ref to store the latest fieldsConfig to avoid unstable dependencies + const fieldsConfigRef = useRef(EMPTY_FIELDS_CONFIG); + + // Extract the specific table's config - use stable reference to prevent infinite re-renders + const tableConfig = useMemo(() => { + if (!tableName || !allTablesConfig || !Array.isArray(allTablesConfig)) { + return undefined; + } + return allTablesConfig.find((t) => t?.tableName === tableName); + }, [tableName, allTablesConfig]); + + // Compute the table index for use in form paths + const tableIndex = useMemo(() => { + if (!tableName || !allTablesConfig || !Array.isArray(allTablesConfig)) { + return -1; + } + return allTablesConfig.findIndex((t) => t?.tableName === tableName); + }, [tableName, allTablesConfig]); + + // Ensure table exists in config when dialog opens (if table is included) + // This ensures we have a stable index for useController + useEffect(() => { + if (!open || !tableName || configType !== "fmodata") return; + if (tableIndex < 0) { + // Table doesn't exist yet, but we need it to exist for the form fields + // Only create it if we're actually configuring it (it should be included) + const currentTables = Array.isArray(allTablesConfig) + ? allTablesConfig + : []; + setValue( + `config.${configIndex}.tables` as any, + [...currentTables, { tableName }], + { shouldDirty: false }, // Don't mark as dirty since this is just initialization + ); + } + }, [ + open, + tableName, + tableIndex, + configType, + configIndex, + allTablesConfig, + setValue, + ]); + + // Get the current table index - this will update after useEffect ensures table exists + const currentTableIndex = useMemo(() => { + if (!tableName || !allTablesConfig || !Array.isArray(allTablesConfig)) { + return -1; + } + return allTablesConfig.findIndex((t) => t?.tableName === tableName); + }, [tableName, allTablesConfig]); + + // Extract only the specific table's fields config - use stable reference to prevent infinite re-renders + const fieldsConfig = useMemo(() => { + if (!tableConfig) { + return EMPTY_FIELDS_CONFIG; + } + return (tableConfig.fields ?? EMPTY_FIELDS_CONFIG) as any[]; + }, [tableConfig]); + + // Keep ref in sync + fieldsConfigRef.current = fieldsConfig; + + // Helper to toggle field exclusion - use ref to avoid dependency on fieldsConfig + const toggleFieldExclude = useCallback( + (fieldName: string, exclude: boolean) => { + if (configType !== "fmodata" || !tableName) return; + + const currentTables = Array.isArray(allTablesConfig) + ? allTablesConfig + : []; + const tableIndex = currentTables.findIndex( + (t) => t?.tableName === tableName, + ); + + if (tableIndex < 0) { + // Table doesn't exist in config yet + if (exclude) { + // Add new table with field excluded + setValue( + `config.${configIndex}.tables` as any, + [ + ...currentTables, + { tableName, fields: [{ fieldName, exclude: true }] }, + ], + { shouldDirty: true }, + ); + } + return; + } + + const currentFields = currentTables[tableIndex]?.fields ?? []; + const fieldIndex = currentFields.findIndex( + (f) => f?.fieldName === fieldName, + ); + + if (exclude) { + // Set exclude to true + if (fieldIndex >= 0) { + // Update existing field entry + const newFields = [...currentFields]; + newFields[fieldIndex] = { ...newFields[fieldIndex]!, exclude: true }; + const newTables = [...currentTables]; + newTables[tableIndex] = { + ...newTables[tableIndex]!, + fields: newFields, + }; + setValue(`config.${configIndex}.tables` as any, newTables, { + shouldDirty: true, + }); + } else { + // Add new field entry + const newTables = [...currentTables]; + newTables[tableIndex] = { + ...newTables[tableIndex]!, + fields: [...currentFields, { fieldName, exclude: true }], + }; + setValue(`config.${configIndex}.tables` as any, newTables, { + shouldDirty: true, + }); + } + } else { + // Remove exclude (or remove entire entry if no other config) + if (fieldIndex >= 0) { + const fieldConfig = currentFields[fieldIndex]!; + const { exclude: _, ...rest } = fieldConfig; + + if (Object.keys(rest).length === 1 && rest.fieldName) { + // Only fieldName left, remove entire field entry + const newFields = currentFields.filter((_, i) => i !== fieldIndex); + const newTables = [...currentTables]; + + if ( + newFields.length === 0 && + Object.keys(newTables[tableIndex]!).length === 2 + ) { + // Only tableName and fields left, remove entire table entry + const filteredTables = currentTables.filter( + (_, i) => i !== tableIndex, + ); + setValue( + `config.${configIndex}.tables` as any, + filteredTables.length > 0 ? filteredTables : undefined, + { shouldDirty: true }, + ); + } else { + // Keep table but update fields + newTables[tableIndex] = { + ...newTables[tableIndex]!, + fields: newFields.length > 0 ? newFields : undefined, + }; + setValue(`config.${configIndex}.tables` as any, newTables, { + shouldDirty: true, + }); + } + } else { + // Keep other properties + const newFields = [...currentFields]; + newFields[fieldIndex] = rest as any; + const newTables = [...currentTables]; + newTables[tableIndex] = { + ...newTables[tableIndex]!, + fields: newFields, + }; + setValue(`config.${configIndex}.tables` as any, newTables, { + shouldDirty: true, + }); + } + } + } + }, + [configType, configIndex, tableName, allTablesConfig, setValue], + ); + + // Get the field name for variableName - table should exist due to ensuredTableIndex above + const variableNameFieldName = + `config.${configIndex}.tables.${currentTableIndex >= 0 ? currentTableIndex : 0}.variableName` as any; + + // Get the field name for reduceMetadata - table should exist due to ensuredTableIndex above + const reduceMetadataFieldName = + `config.${configIndex}.tables.${currentTableIndex >= 0 ? currentTableIndex : 0}.reduceMetadata` as any; + + // Get the field name for alwaysOverrideFieldNames - table should exist due to ensuredTableIndex above + const alwaysOverrideFieldNamesFieldName = + `config.${configIndex}.tables.${currentTableIndex >= 0 ? currentTableIndex : 0}.alwaysOverrideFieldNames` as any; + + // Helper to set field type override - use ref to avoid dependency on fieldsConfig + const setFieldTypeOverride = useCallback( + (fieldName: string, typeOverride: string | undefined) => { + if (configType !== "fmodata" || !tableName) return; + + const currentTables = Array.isArray(allTablesConfig) + ? allTablesConfig + : []; + const tableIndex = currentTables.findIndex( + (t) => t?.tableName === tableName, + ); + + if (tableIndex < 0) { + // Table doesn't exist in config yet + if (typeOverride) { + // Add new table with field type override + setValue( + `config.${configIndex}.tables` as any, + [ + ...currentTables, + { tableName, fields: [{ fieldName, typeOverride }] }, + ], + { shouldDirty: true }, + ); + } + return; + } + + const currentFields = currentTables[tableIndex]?.fields ?? []; + const fieldIndex = currentFields.findIndex( + (f) => f?.fieldName === fieldName, + ); + + if (typeOverride) { + // Set typeOverride + if (fieldIndex >= 0) { + // Update existing field entry + const newFields = [...currentFields]; + newFields[fieldIndex] = { + ...newFields[fieldIndex]!, + typeOverride, + } as any; + const newTables = [...currentTables]; + newTables[tableIndex] = { + ...newTables[tableIndex]!, + fields: newFields, + }; + setValue(`config.${configIndex}.tables` as any, newTables, { + shouldDirty: true, + }); + } else { + // Add new field entry + const newTables = [...currentTables]; + newTables[tableIndex] = { + ...newTables[tableIndex]!, + fields: [...currentFields, { fieldName, typeOverride } as any], + }; + setValue(`config.${configIndex}.tables` as any, newTables, { + shouldDirty: true, + }); + } + } else { + // Remove typeOverride (or remove entire entry if no other config) + if (fieldIndex >= 0) { + const fieldConfig = currentFields[fieldIndex]!; + const { typeOverride: _, ...rest } = fieldConfig; + + if (Object.keys(rest).length === 1 && rest.fieldName) { + // Only fieldName left, remove entire field entry + const newFields = currentFields.filter((_, i) => i !== fieldIndex); + const newTables = [...currentTables]; + + if ( + newFields.length === 0 && + Object.keys(newTables[tableIndex]!).length === 2 + ) { + // Only tableName and fields left, remove entire table entry + const filteredTables = currentTables.filter( + (_, i) => i !== tableIndex, + ); + setValue( + `config.${configIndex}.tables` as any, + filteredTables.length > 0 ? filteredTables : undefined, + { shouldDirty: true }, + ); + } else { + // Keep table but update fields + newTables[tableIndex] = { + ...newTables[tableIndex]!, + fields: newFields.length > 0 ? newFields : undefined, + }; + setValue(`config.${configIndex}.tables` as any, newTables, { + shouldDirty: true, + }); + } + } else { + // Keep other properties + const newFields = [...currentFields]; + newFields[fieldIndex] = rest as any; + const newTables = [...currentTables]; + newTables[tableIndex] = { + ...newTables[tableIndex]!, + fields: newFields, + }; + setValue(`config.${configIndex}.tables` as any, newTables, { + shouldDirty: true, + }); + } + } + } + }, + [configType, configIndex, tableName, allTablesConfig, setValue], + ); + + // Get fields for the selected table + const fieldsData = useMemo(() => { + if ( + !tableName || + !parsedMetadata?.entitySets || + !parsedMetadata?.entityTypes + ) { + return []; + } + + const entitySet = Object.values(parsedMetadata.entitySets).find( + (es) => es.Name === tableName, + ); + if (!entitySet) return []; + + const entityType = parsedMetadata.entityTypes[entitySet.EntityType]; + if (!entityType?.Properties) return []; + + const properties = entityType.Properties; + const keyFields = entityType.$Key || []; + const fields: FieldRow[] = []; + + // Handle both Map and object formats + if (properties instanceof Map) { + properties.forEach((fieldMetadata, fieldName) => { + const metadata = fieldMetadata as { + $Type?: string; + $Nullable?: boolean; + "@Calculation"?: boolean; + "@Global"?: boolean; + "@Org.OData.Core.V1.Permissions"?: string; + $DefaultValue?: string; + }; + // Determine if field is read-only based on generateODataTypes.ts logic + const isReadOnly = + metadata["@Calculation"] || + metadata["@Global"] || + metadata["@Org.OData.Core.V1.Permissions"]?.includes("Read") || + false; + + const fieldConfig = Array.isArray(fieldsConfig) + ? fieldsConfig.find((f) => f?.fieldName === fieldName) + : undefined; + const isExcluded = fieldConfig?.exclude === true; + const typeOverride = fieldConfig?.typeOverride; + const isPrimaryKey = keyFields.includes(fieldName); + + fields.push({ + fieldName, + fieldType: mapODataTypeToReadableLabel(metadata.$Type || ""), + nullable: metadata.$Nullable, + global: metadata["@Global"], + readOnly: isReadOnly, + isExcluded, + typeOverride, + primaryKey: isPrimaryKey, + }); + }); + } else if (typeof properties === "object") { + Object.entries(properties).forEach(([fieldName, fieldMetadata]) => { + const metadata = fieldMetadata as { + $Type?: string; + $Nullable?: boolean; + "@Calculation"?: boolean; + "@Global"?: boolean; + "@Org.OData.Core.V1.Permissions"?: string; + $DefaultValue?: string; + }; + // Determine if field is read-only based on generateODataTypes.ts logic + const isReadOnly = + metadata["@Calculation"] || + metadata["@Global"] || + metadata["@Org.OData.Core.V1.Permissions"]?.includes("Read") || + false; + + const fieldConfig = Array.isArray(fieldsConfig) + ? fieldsConfig.find((f) => f?.fieldName === fieldName) + : undefined; + const isExcluded = fieldConfig?.exclude === true; + const typeOverride = fieldConfig?.typeOverride; + const isPrimaryKey = keyFields.includes(fieldName); + + fields.push({ + fieldName, + fieldType: mapODataTypeToReadableLabel(metadata.$Type || ""), + nullable: metadata.$Nullable, + global: metadata["@Global"], + readOnly: isReadOnly, + isExcluded, + typeOverride, + primaryKey: isPrimaryKey, + }); + }); + } + + return fields; + }, [tableName, parsedMetadata, fieldsConfig]); + + // Check if all fields are included or excluded + const allFieldsIncluded = useMemo(() => { + return fieldsData.length > 0 && fieldsData.every((row) => !row.isExcluded); + }, [fieldsData]); + + const allFieldsExcluded = useMemo(() => { + return fieldsData.length > 0 && fieldsData.every((row) => row.isExcluded); + }, [fieldsData]); + + // Helper to include all fields + const includeAllFields = useCallback(() => { + if (configType !== "fmodata" || !tableName || !fieldsData.length) return; + + const currentTables = Array.isArray(allTablesConfig) ? allTablesConfig : []; + const tableIndex = currentTables.findIndex( + (t) => t?.tableName === tableName, + ); + + if (tableIndex < 0) { + // Table doesn't exist in config, nothing to do + return; + } + + const currentFields = currentTables[tableIndex]?.fields ?? []; + const allFieldNames = fieldsData.map((f) => f.fieldName); + + // Remove exclude flags from all fields + const newFields = currentFields + .map((fieldConfig) => { + const fieldName = fieldConfig?.fieldName; + if (fieldName && allFieldNames.includes(fieldName)) { + const { exclude: _, ...rest } = fieldConfig; + // If only fieldName is left, don't include it + if (Object.keys(rest).length === 1 && rest.fieldName) { + return null; + } + return Object.keys(rest).length > 1 ? rest : null; + } + return fieldConfig; + }) + .filter((f) => f !== null) as any[]; + + const newTables = [...currentTables]; + if (newFields.length === 0) { + // No fields left, remove fields array or entire table entry if only tableName and fields + if (Object.keys(newTables[tableIndex]!).length === 2) { + const filteredTables = currentTables.filter((_, i) => i !== tableIndex); + setValue( + `config.${configIndex}.tables` as any, + filteredTables.length > 0 ? filteredTables : undefined, + { shouldDirty: true }, + ); + } else { + newTables[tableIndex] = { + ...newTables[tableIndex]!, + fields: undefined, + }; + setValue(`config.${configIndex}.tables` as any, newTables, { + shouldDirty: true, + }); + } + } else { + newTables[tableIndex] = { + ...newTables[tableIndex]!, + fields: newFields, + }; + setValue(`config.${configIndex}.tables` as any, newTables, { + shouldDirty: true, + }); + } + }, [ + configType, + configIndex, + tableName, + allTablesConfig, + setValue, + fieldsData, + ]); + + // Helper to exclude all fields + const excludeAllFields = useCallback(() => { + if (configType !== "fmodata" || !tableName || !fieldsData.length) return; + + const currentTables = Array.isArray(allTablesConfig) ? allTablesConfig : []; + const tableIndex = currentTables.findIndex( + (t) => t?.tableName === tableName, + ); + + // Create a map of existing field configs + const fieldConfigMap = new Map( + tableIndex >= 0 + ? (currentTables[tableIndex]?.fields ?? []).map((f) => [ + f?.fieldName, + f, + ]) + : [], + ); + + // Update or add exclude flag for all fields + const allFieldNames = fieldsData.map((f) => f.fieldName); + const newFields = allFieldNames.map((fieldName) => { + const existing = fieldConfigMap.get(fieldName); + if (existing) { + return { ...existing, exclude: true }; + } + return { fieldName, exclude: true }; + }); + + if (tableIndex < 0) { + // Table doesn't exist, add it with all fields excluded + setValue( + `config.${configIndex}.tables` as any, + [...currentTables, { tableName, fields: newFields }], + { shouldDirty: true }, + ); + } else { + // Update existing table + const newTables = [...currentTables]; + newTables[tableIndex] = { + ...newTables[tableIndex]!, + fields: newFields, + }; + setValue(`config.${configIndex}.tables` as any, newTables, { + shouldDirty: true, + }); + } + }, [ + configType, + configIndex, + tableName, + allTablesConfig, + setValue, + fieldsData, + ]); + + // Define columns for fields table + const fieldsColumns = useMemo[]>( + () => [ + { + accessorKey: "isExcluded", + header: ({ column }) => ( + + { + e.stopPropagation(); + includeAllFields(); + }} + disabled={allFieldsIncluded} + > + Include All + + { + e.stopPropagation(); + excludeAllFields(); + }} + disabled={allFieldsExcluded} + > + Exclude All + + + } + /> + ), + enableSorting: true, + size: 60, + minSize: 60, + maxSize: 60, + cell: (info) => { + const row = info.row.original; + const isExcluded = row.isExcluded; + return ( +
+ { + toggleFieldExclude(row.fieldName, !checked); + }} + /> +
+ ); + }, + meta: { + skeleton: , + }, + }, + { + accessorKey: "fieldName", + header: ({ column }) => ( + + ), + enableSorting: true, + cell: (info) => { + const row = info.row.original; + return ( +
+ {row.primaryKey && ( + + )} + + {info.getValue() as string} + +
+ ); + }, + meta: { + skeleton: , + }, + }, + { + accessorKey: "fieldType", + header: ({ column }) => ( + + ), + enableSorting: true, + cell: (info) => ( + + {info.getValue() as string} + + ), + meta: { + skeleton: , + }, + }, + { + id: "typeOverride", + header: ({ column }) => ( + + ), + enableSorting: false, + cell: (info) => { + const row = info.row.original; + return ( + + ); + }, + meta: { + skeleton: , + }, + }, + { + accessorKey: "nullable", + header: ({ column }) => ( + + ), + enableSorting: true, + cell: (info) => ( + + ), + meta: { + skeleton: , + }, + }, + { + accessorKey: "global", + header: ({ column }) => ( + + ), + enableSorting: true, + cell: (info) => ( + + ), + meta: { + skeleton: , + }, + }, + { + accessorKey: "readOnly", + header: ({ column }) => ( + + ), + enableSorting: true, + cell: (info) => , + meta: { + skeleton: , + }, + }, + ], + [ + toggleFieldExclude, + setFieldTypeOverride, + includeAllFields, + excludeAllFields, + allFieldsIncluded, + allFieldsExcluded, + ], + ); + + // Create fields table instance - use memoized model functions for stable references + const fieldsTable = useReactTable({ + data: fieldsData, + columns: fieldsColumns, + getCoreRowModel: coreRowModel, + getSortedRowModel: sortedRowModel, + getFilteredRowModel: filteredRowModel, + getPaginationRowModel: paginationRowModel, + globalFilterFn: "includesString", + state: { + globalFilter, + }, + onGlobalFilterChange: setGlobalFilter, + initialState: { + pagination: { + pageSize: 10, + }, + }, + }); + + // Calculate the number of included (non-excluded) fields + const selectedFieldsCount = useMemo(() => { + return fieldsData.filter((row) => !row.isExcluded).length; + }, [fieldsData]); + + return ( + + + + + Including {selectedFieldsCount} of {fieldsData.length} fields for{" "} + {tableName || "Table"} + + + +
+ + + setGlobalFilter(e.target.value)} + /> + +
+
+ {isError ? ( +
+
+
+ Failed to load fields +
+ {error instanceof Error && ( +
+ {error.message} +
+ )} +
+
+ ) : ( + + + +
+ +
+
+
+ )} +
+
+
+ ( + + + Variable Name Override + + + + { + const value = e.target.value.trim(); + field.onChange(value || undefined); + }} + /> + + + + )} + /> + { + const isDefault = field.value === undefined; + return ( + + + Always Update Field Names{" "} + + + + + + + + ); + }} + /> + { + const isDefault = field.value === undefined; + return ( + + + Reduce Metadata Annotations{" "} + + + + + + + + ); + }} + /> +
+
+
+
+
+ ); +} diff --git a/packages/typegen/web/src/components/MetadataTablesEditor.tsx b/packages/typegen/web/src/components/MetadataTablesEditor.tsx new file mode 100644 index 00000000..114b2259 --- /dev/null +++ b/packages/typegen/web/src/components/MetadataTablesEditor.tsx @@ -0,0 +1,531 @@ +import { useFormContext, useWatch } from "react-hook-form"; +import { Button } from "./ui/button"; +import { SingleConfig } from "../lib/config-utils"; +import { AlertTriangle, Loader2, Search, RefreshCw } from "lucide-react"; +import { useListTables } from "../hooks/useListTables"; +import { useTestConnection } from "../hooks/useTestConnection"; +import { Switch } from "./ui/switch"; +import { Input, InputWrapper } from "./ui/input"; +import { useMemo, useState, useCallback, useRef, useEffect } from "react"; +import { MetadataFieldsDialog } from "./MetadataFieldsDialog"; +import { useTableMetadata } from "../hooks/useTableMetadata"; +import { + useReactTable, + getCoreRowModel, + getSortedRowModel, + getFilteredRowModel, + getPaginationRowModel, + type ColumnDef, +} from "@tanstack/react-table"; +import { DataGrid, DataGridContainer } from "./ui/data-grid"; +import { DataGridTable } from "./ui/data-grid-table"; +import { DataGridColumnHeader } from "./ui/data-grid-column-header"; +import { DataGridPagination } from "./ui/data-grid-pagination"; +import { Skeleton } from "./ui/skeleton"; + +interface MetadataTablesEditorProps { + configIndex: number; +} + +interface TableRow { + tableName: string; + isIncluded: boolean; + fieldCount?: number; + includedFieldCount?: number; +} + +// Memoize model functions outside component to ensure stable references +const coreRowModel = getCoreRowModel(); +const sortedRowModel = getSortedRowModel(); +const filteredRowModel = getFilteredRowModel(); +const paginationRowModel = getPaginationRowModel(); + +// Helper component to fetch and display field count for a table +function FieldCountCell({ + tableName, + isIncluded, + configIndex, +}: { + tableName: string; + isIncluded: boolean; + configIndex: number; +}) { + const { control } = useFormContext<{ config: SingleConfig[] }>(); + const { data: parsedMetadata, isLoading } = useTableMetadata( + configIndex, + tableName, + isIncluded, // Only fetch when table is included + ); + + // Watch the tables config directly to ensure reactivity + const allTablesConfig = useWatch({ + control, + name: `config.${configIndex}.tables` as const, + }); + + const tableConfig = Array.isArray(allTablesConfig) + ? allTablesConfig.find((t) => t?.tableName === tableName) + : undefined; + const fieldsConfig = tableConfig?.fields ?? []; + + const fieldCount = useMemo(() => { + if (!parsedMetadata?.entitySets || !parsedMetadata?.entityTypes) { + return undefined; + } + + const entitySet = Object.values(parsedMetadata.entitySets).find( + (es) => es.Name === tableName, + ); + if (!entitySet) return undefined; + + const entityType = parsedMetadata.entityTypes[entitySet.EntityType]; + if (!entityType?.Properties) return undefined; + + const properties = entityType.Properties; + // Handle both Map and object formats + if (properties instanceof Map) { + return properties.size; + } else if (typeof properties === "object") { + return Object.keys(properties).length; + } + return undefined; + }, [parsedMetadata, tableName]); + + const includedFieldCount = useMemo(() => { + if (fieldCount === undefined) return undefined; + + // Count excluded fields + const excludedFields = fieldsConfig.filter( + (f) => f?.exclude === true, + ).length; + + // Total fields minus excluded fields + return fieldCount - excludedFields; + }, [fieldCount, fieldsConfig]); + + if (isLoading) { + return ; + } + + if (fieldCount === undefined) { + return -; + } + + // Show "included / total" if some fields are excluded, otherwise just total + if (includedFieldCount !== undefined && includedFieldCount < fieldCount) { + return ( + + {includedFieldCount} / {fieldCount} + + ); + } + + return {fieldCount}; +} + +export function MetadataTablesEditor({ + configIndex, +}: MetadataTablesEditorProps) { + const { control, setValue } = useFormContext<{ config: SingleConfig[] }>(); + const config = useWatch({ + control, + name: `config.${configIndex}` as const, + }); + + // Get tables config - memoize to prevent unnecessary recalculations + const tablesConfig = useMemo(() => { + if (config?.type === "fmodata" && "tables" in config) { + return config.tables ?? []; + } + return []; + }, [config]); + + // Local state to control whether to enable the query + // Initialize based on whether there are tables in the config + const [shouldLoadTables, setShouldLoadTables] = useState(() => { + if (config?.type === "fmodata" && "tables" in config) { + return (config.tables ?? []).length > 0; + } + return false; + }); + + // Update shouldLoadTables when tablesConfig changes (e.g., user adds tables manually) + useEffect(() => { + if (tablesConfig.length > 0 && !shouldLoadTables) { + setShouldLoadTables(true); + } + }, [tablesConfig.length, shouldLoadTables]); + + // Check connection test status + const { status: testStatus, errorDetails } = useTestConnection(configIndex); + const hasConnectionError = testStatus === "error"; + + const { + tables, + isLoading: isLoadingTables, + isError: isErrorTables, + error: errorTables, + refetch: refetchTables, + } = useListTables(configIndex, shouldLoadTables); + + const [selectedTableName, setSelectedTableName] = useState( + null, + ); + const [isDialogOpen, setIsDialogOpen] = useState(false); + const [searchFilter, setSearchFilter] = useState(""); + + // Use a ref to store the latest config to avoid unstable callback dependencies + const configRef = useRef(config); + configRef.current = config; + + // Helper to toggle table inclusion + const toggleTableInclude = useCallback( + (tableName: string, include: boolean) => { + const currentConfig = configRef.current; + if (currentConfig?.type !== "fmodata") return; + + const currentTables = currentConfig.tables ?? []; + const tableIndex = currentTables.findIndex( + (t) => t?.tableName === tableName, + ); + + if (include) { + // Add table if not already present + if (tableIndex < 0) { + setValue( + `config.${configIndex}.tables` as any, + [...currentTables, { tableName }], + { shouldDirty: true }, + ); + } + } else { + // Remove table if present + if (tableIndex >= 0) { + const tableConfig = currentTables[tableIndex]!; + // If table has other config (like fields), we might want to keep it + // But for now, if it's just tableName, remove it + const { tableName: _, ...rest } = tableConfig; + if (Object.keys(rest).length === 0) { + // No other config, remove entirely + const newTables = currentTables.filter((_, i) => i !== tableIndex); + setValue( + `config.${configIndex}.tables` as any, + newTables.length > 0 ? newTables : undefined, + { shouldDirty: true }, + ); + } else { + // Has other config, but we're removing it anyway per user request + const newTables = currentTables.filter((_, i) => i !== tableIndex); + setValue( + `config.${configIndex}.tables` as any, + newTables.length > 0 ? newTables : undefined, + { shouldDirty: true }, + ); + } + } + } + }, + [configIndex, setValue], + ); + + // Convert tables to table rows (filtering will be handled by DataGrid) + const tableRows = useMemo(() => { + if (!tables) return []; + return tables.map((tableName) => ({ + tableName, + isIncluded: tablesConfig.some((t) => t?.tableName === tableName), + })); + }, [tables, tablesConfig]); + + // Define columns for tables table + const tablesColumns = useMemo[]>( + () => [ + { + accessorKey: "isIncluded", + header: ({ column }) => ( + + ), + enableSorting: true, + size: 100, + cell: (info) => { + const row = info.row.original; + return ( +
+ { + toggleTableInclude(row.tableName, checked); + }} + /> +
+ ); + }, + meta: { + skeleton: , + }, + }, + { + accessorKey: "tableName", + header: ({ column }) => ( + + ), + enableSorting: true, + cell: (info) => { + const row = info.row.original; + return ( + + {info.getValue() as string} + + ); + }, + meta: { + skeleton: , + }, + }, + { + id: "fieldCount", + header: ({ column }) => ( + + ), + enableSorting: false, + size: 100, + cell: (info) => { + const row = info.row.original; + if (!row.isIncluded) { + return null; + } + return ( + + ); + }, + meta: { + skeleton: , + }, + }, + { + id: "actions", + header: () => null, + enableSorting: false, + size: 150, + cell: (info) => { + const row = info.row.original; + return ( +
+ +
+ ); + }, + meta: { + skeleton: , + }, + }, + ], + [toggleTableInclude], + ); + + // Create tables table instance + const tablesTable = useReactTable({ + data: tableRows, + columns: tablesColumns, + getCoreRowModel: coreRowModel, + getSortedRowModel: sortedRowModel, + getFilteredRowModel: filteredRowModel, + getPaginationRowModel: paginationRowModel, + globalFilterFn: "includesString", + state: { + globalFilter: searchFilter, + }, + onGlobalFilterChange: setSearchFilter, + initialState: { + pagination: { + pageSize: 10, + }, + }, + }); + + // Show loading state only when actively loading + if (isLoadingTables && shouldLoadTables) { + return ( +
+

OData Tables

+
+ + Loading tables... +
+
+ ); + } + + // Show error state only if we attempted to load + if (isErrorTables && shouldLoadTables) { + return ( +
+

OData Tables

+
+
+ +
+
Failed to load tables
+ {errorTables instanceof Error && ( +
+ {errorTables.message} +
+ )} +
+
+
+
+ ); + } + + // Show button to load tables if not yet loaded + if (!shouldLoadTables) { + // Show connection warning if there are connection errors + if (hasConnectionError) { + return ( +
+

OData Tables

+
+
+ +
+
+
Connection test failed
+ {errorDetails?.message && ( +
+ {errorDetails.message} +
+ )} +
+ Fix the connection issue in the "Server Connection Settings" + dialog before loading tables. +
+
+
+
+
+
+ ); + } + + // Show button to load tables if connection is good + return ( +
+
+

OData Tables

+
+
+

+ Your connection looks good! Click the button below to pick the + tables you want to generate types for. +

+ +
+
+ ); + } + + // Show empty state only after loading + if (!tables || tables.length === 0) { + return ( +
+

OData Tables

+

+ No tables found in database. +

+
+ ); + } + + return ( + <> +
+
+

OData Tables

+ +
+ +
+ + + setSearchFilter(e.target.value)} + /> + + + + +
+ +
+
+ +
+
+
+
+
+ + + + ); +} diff --git a/packages/typegen/web/src/components/ServerEnvField.tsx b/packages/typegen/web/src/components/ServerEnvField.tsx new file mode 100644 index 00000000..cf3cf39e --- /dev/null +++ b/packages/typegen/web/src/components/ServerEnvField.tsx @@ -0,0 +1,70 @@ +import { useFormContext, useWatch, Path } from "react-hook-form"; +import { z } from "zod"; +import { configSchema } from "../lib/schema"; +import { Input } from "./ui/input"; +import { + FormControl, + FormField, + FormItem, + FormLabel, + FormMessage, +} from "./ui/form"; +import { useEnvValue } from "../lib/envValues"; + +type FormData = z.infer; + +interface EnvVarFieldProps { + index: number; + fieldName: Path<{ config: FormData[] }>; + label: string; + placeholder: string; + defaultValue: string; + type?: "text" | "password"; +} + +export function EnvVarField({ + index, + fieldName, + label, + placeholder, + defaultValue, + type = "text", +}: EnvVarFieldProps) { + const { control } = useFormContext<{ config: FormData[] }>(); + + // Watch the env name value to get the resolved env var + const envName = useWatch({ + control, + name: fieldName, + }); + + // Get the resolved value from the server + const { data: envValue, isLoading } = useEnvValue(envName || defaultValue); + + return ( + ( + + {label} + + + + {(envName || defaultValue) && ( +
+ {isLoading ? ( + Loading... + ) : envValue ? ( + Resolved: {envValue} + ) : ( + Not set + )} +
+ )} + +
+ )} + /> + ); +} diff --git a/packages/typegen/web/src/components/TableItemEditor.tsx b/packages/typegen/web/src/components/TableItemEditor.tsx new file mode 100644 index 00000000..95f263d9 --- /dev/null +++ b/packages/typegen/web/src/components/TableItemEditor.tsx @@ -0,0 +1,84 @@ +import { useFormContext, useWatch } from "react-hook-form"; +import { Button } from "./ui/button"; +import { + FormControl, + FormField, + FormItem, + FormMessage, +} from "./ui/form"; +import { SingleConfig } from "../lib/config-utils"; +import { TableSelectorCompact } from "./TableSelectorCompact"; +import { CircleMinus } from "lucide-react"; +import { MetadataFieldsDialog } from "./MetadataFieldsDialog"; +import { useState } from "react"; +import { useTableMetadata } from "../hooks/useTableMetadata"; + +interface TableItemEditorProps { + configIndex: number; + tableIndex: number; + onRemove: () => void; +} + +export function TableItemEditor({ + configIndex, + tableIndex, + onRemove, +}: TableItemEditorProps) { + const { watch } = useFormContext<{ config: SingleConfig[] }>(); + const tableName = watch( + `config.${configIndex}.tables.${tableIndex}.tableName`, + ); + + const [isDialogOpen, setIsDialogOpen] = useState(false); + + // Fetch metadata when dialog is opened + const { data: dialogTableMetadata } = useTableMetadata( + configIndex, + isDialogOpen ? tableName : null, + ); + + return ( + <> +
+
+ +
+
+ {tableName && ( + + )} + +
+
+ + + + ); +} + diff --git a/packages/typegen/web/src/components/TableSelector.tsx b/packages/typegen/web/src/components/TableSelector.tsx new file mode 100644 index 00000000..d180ffb1 --- /dev/null +++ b/packages/typegen/web/src/components/TableSelector.tsx @@ -0,0 +1,150 @@ +import * as React from "react"; +import { Path, useFormContext } from "react-hook-form"; +import { cn } from "@/lib/utils"; +import { Button, ButtonArrow } from "@/components/ui/button"; +import { + Command, + CommandCheck, + CommandEmpty, + CommandGroup, + CommandInput, + CommandItem, + CommandList, +} from "@/components/ui/command"; +import { + Popover, + PopoverContent, + PopoverTrigger, +} from "@/components/ui/popover"; +import { + FormControl, + FormField, + FormItem, + FormLabel, + FormMessage, +} from "./ui/form"; +import { SingleConfig } from "@/lib/config-utils"; +import { InfoTooltip } from "./InfoTooltip"; +import { useListTables } from "../hooks/useListTables"; +import { Loader2, AlertTriangle } from "lucide-react"; + +type FormData = { config: SingleConfig[] }; + +export function TableSelector({ + configIndex, + path, +}: { + configIndex: number; + path: Path; +}) { + const { control } = useFormContext(); + const [open, setOpen] = React.useState(false); + + const { + tables, + isLoading, + isError, + error, + } = useListTables(configIndex); + + // Transform tables array into combobox format + const tableOptions = React.useMemo(() => { + if (!tables) return []; + return tables.map((table) => ({ + value: table, + label: table, + })); + }, [tables]); + + return ( + ( + + + Table Occurrence Name{" "} + + + + + + + + + + + + {isLoading ? ( +
+ + Loading tables... +
+ ) : isError ? ( +
+
+ +
+
+ {error instanceof Error + ? error.message + : "Failed to load tables"} +
+
+
+
+ ) : ( + <> + No table found. + + {tableOptions.map((table) => ( + { + const newValue = + currentValue === field.value + ? "" + : currentValue; + field.onChange(newValue); + setOpen(false); + }} + > + {table.label} + {field.value === table.value && } + + ))} + + + )} +
+
+
+
+
+ +
+ )} + /> + ); +} + +export default TableSelector; + diff --git a/packages/typegen/web/src/components/TableSelectorCompact.tsx b/packages/typegen/web/src/components/TableSelectorCompact.tsx new file mode 100644 index 00000000..5e81da7e --- /dev/null +++ b/packages/typegen/web/src/components/TableSelectorCompact.tsx @@ -0,0 +1,143 @@ +import * as React from "react"; +import { Path, useFormContext } from "react-hook-form"; +import { cn } from "@/lib/utils"; +import { Button, ButtonArrow } from "@/components/ui/button"; +import { + Command, + CommandCheck, + CommandEmpty, + CommandGroup, + CommandInput, + CommandItem, + CommandList, +} from "@/components/ui/command"; +import { + Popover, + PopoverContent, + PopoverTrigger, +} from "@/components/ui/popover"; +import { + FormControl, + FormField, + FormItem, + FormMessage, +} from "./ui/form"; +import { SingleConfig } from "@/lib/config-utils"; +import { useListTables } from "../hooks/useListTables"; +import { Loader2, AlertTriangle } from "lucide-react"; + +type FormData = { config: SingleConfig[] }; + +export function TableSelectorCompact({ + configIndex, + path, +}: { + configIndex: number; + path: Path; +}) { + const { control } = useFormContext(); + const [open, setOpen] = React.useState(false); + + const { + tables, + isLoading, + isError, + error, + } = useListTables(configIndex); + + // Transform tables array into combobox format + const tableOptions = React.useMemo(() => { + if (!tables) return []; + return tables.map((table) => ({ + value: table, + label: table, + })); + }, [tables]); + + return ( + ( + + + + + + + + + + + {isLoading ? ( +
+ + Loading tables... +
+ ) : isError ? ( +
+
+ +
+
+ {error instanceof Error + ? error.message + : "Failed to load tables"} +
+
+
+
+ ) : ( + <> + No table found. + + {tableOptions.map((table) => ( + { + const newValue = + currentValue === field.value + ? "" + : currentValue; + field.onChange(newValue); + setOpen(false); + }} + > + {table.label} + {field.value === table.value && } + + ))} + + + )} +
+
+
+
+
+ +
+ )} + /> + ); +} + + diff --git a/packages/typegen/web/src/components/badge/circle.tsx b/packages/typegen/web/src/components/badge/circle.tsx new file mode 100644 index 00000000..3da418fa --- /dev/null +++ b/packages/typegen/web/src/components/badge/circle.tsx @@ -0,0 +1,59 @@ +import { Badge } from '@/components/ui/badge'; + +export default function Component() { + return ( +
+
+ + Primary + + + Success + + + Warning + + + Info + + + Destructive + +
+
+ + Primary + + + Success + + + Warning + + + Info + + + Destructive + +
+
+ + Primary + + + Success + + + Warning + + + Info + + + Destructive + +
+
+ ); +} diff --git a/packages/typegen/web/src/components/button/loading.tsx b/packages/typegen/web/src/components/button/loading.tsx new file mode 100644 index 00000000..685eb47c --- /dev/null +++ b/packages/typegen/web/src/components/button/loading.tsx @@ -0,0 +1,28 @@ +'use client'; + +import { useEffect, useState } from 'react'; +import { Button } from '@/components/ui/button'; +import { LoaderCircleIcon } from 'lucide-react'; + +export default function ButtonDemo() { + const [isDisabled, setIsDisabled] = useState(false); + + useEffect(() => { + // Automatically toggle button state every 4 seconds + const interval = setInterval(() => { + setIsDisabled((prev) => !prev); + }, 1000); + + // Cleanup interval on component unmount + return () => clearInterval(interval); + }, []); + + return ( +
+ +
+ ); +} diff --git a/packages/typegen/web/src/components/combobox/default.tsx b/packages/typegen/web/src/components/combobox/default.tsx new file mode 100644 index 00000000..d909f881 --- /dev/null +++ b/packages/typegen/web/src/components/combobox/default.tsx @@ -0,0 +1,90 @@ +'use client'; + +import * as React from 'react'; +import { cn } from '@/lib/utils'; +import { Button, ButtonArrow } from '@/components/ui/button'; +import { + Command, + CommandCheck, + CommandEmpty, + CommandGroup, + CommandInput, + CommandItem, + CommandList, +} from '@/components/ui/command'; +import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover'; + +const topCities = [ + { + value: 'amsterdam', + label: 'Amsterdam, Netherlands', + }, + { + value: 'london', + label: 'London, UK', + }, + { + value: 'paris', + label: 'Paris, France', + }, + { + value: 'tokyo', + label: 'Tokyo, Japan', + }, + { + value: 'new_york', + label: 'New York, USA', + }, + { + value: 'dubai', + label: 'Dubai, UAE', + }, +]; + +export default function ComboboxDemo() { + const [open, setOpen] = React.useState(false); + const [value, setValue] = React.useState(''); + + return ( + + + + + + + + + No city found. + + {topCities.map((city) => ( + { + setValue(currentValue === value ? '' : currentValue); + setOpen(false); + }} + > + {city.label} + {value === city.value && } + + ))} + + + + + + ); +} diff --git a/packages/typegen/web/src/components/data-grid/skeleton.tsx b/packages/typegen/web/src/components/data-grid/skeleton.tsx new file mode 100644 index 00000000..c48f4c53 --- /dev/null +++ b/packages/typegen/web/src/components/data-grid/skeleton.tsx @@ -0,0 +1,434 @@ +import { useMemo, useState } from "react"; +import { Avatar, AvatarFallback, AvatarImage } from "@/components/ui/avatar"; +import { Badge } from "@/components/ui/badge"; +import { Button } from "@/components/ui/button"; +import { + Card, + CardFooter, + CardHeader, + CardTable, + CardTitle, + CardToolbar, +} from "@/components/ui/card"; +import { DataGrid } from "@/components/ui/data-grid"; +import { DataGridColumnHeader } from "@/components/ui/data-grid-column-header"; +import { DataGridPagination } from "@/components/ui/data-grid-pagination"; +import { DataGridTable } from "@/components/ui/data-grid-table"; +import { ScrollArea, ScrollBar } from "@/components/ui/scroll-area"; +import { Skeleton } from "@/components/ui/skeleton"; +import { + ColumnDef, + getCoreRowModel, + getFilteredRowModel, + getPaginationRowModel, + getSortedRowModel, + PaginationState, + SortingState, + useReactTable, +} from "@tanstack/react-table"; + +interface IData { + id: string; + name: string; + availability: "online" | "away" | "busy" | "offline"; + avatar: string; + status: "active" | "inactive"; + flag: string; // Emoji flags + email: string; + company: string; + role: string; + joined: string; + location: string; + balance: number; +} + +const demoData: IData[] = [ + { + id: "1", + name: "Kathryn Campbell", + availability: "online", + avatar: "1.png", + status: "active", + flag: "🇺🇸", + email: "kathryn@apple.com", + company: "Apple", + role: "CEO", + joined: "2021-04-15", + location: "San Francisco, USA", + balance: 5143.03, + }, + { + id: "2", + name: "Robert Smith", + availability: "away", + avatar: "2.png", + status: "inactive", + flag: "🇬🇧", + email: "robert@openai.com", + company: "OpenAI", + role: "CTO", + joined: "2020-07-20", + location: "London, UK", + balance: 4321.87, + }, + { + id: "3", + name: "Sophia Johnson", + availability: "busy", + avatar: "3.png", + status: "active", + flag: "🇨🇦", + email: "sophia@meta.com", + company: "Meta", + role: "Designer", + joined: "2019-03-12", + location: "Toronto, Canada", + balance: 7654.98, + }, + { + id: "4", + name: "Lucas Walker", + availability: "offline", + avatar: "4.png", + status: "inactive", + flag: "🇦🇺", + email: "lucas@tesla.com", + company: "Tesla", + role: "Developer", + joined: "2022-01-18", + location: "Sydney, Australia", + balance: 3456.45, + }, + { + id: "5", + name: "Emily Davis", + availability: "online", + avatar: "5.png", + status: "active", + flag: "🇩🇪", + email: "emily@sap.com", + company: "SAP", + role: "Lawyer", + joined: "2023-05-23", + location: "Berlin, Germany", + balance: 9876.54, + }, + { + id: "6", + name: "James Lee", + availability: "away", + avatar: "6.png", + status: "active", + flag: "🇲🇾", + email: "james@keenthemes.com", + company: "Keenthemes", + role: "Director", + joined: "2018-11-30", + location: "Kuala Lumpur, MY", + balance: 6214.22, + }, + { + id: "7", + name: "Isabella Martinez", + availability: "busy", + avatar: "7.png", + status: "inactive", + flag: "🇪🇸", + email: "isabella@bbva.es", + company: "BBVA", + role: "Product Manager", + joined: "2021-06-14", + location: "Barcelona, Spain", + balance: 5321.77, + }, + { + id: "8", + name: "Benjamin Harris", + availability: "offline", + avatar: "8.png", + status: "active", + flag: "🇯🇵", + email: "benjamin@sony.jp", + company: "Sony", + role: "Marketing Lead", + joined: "2020-10-22", + location: "Tokyo, Japan", + balance: 8452.39, + }, + { + id: "9", + name: "Olivia Brown", + availability: "online", + avatar: "9.png", + status: "active", + flag: "🇫🇷", + email: "olivia@lvmh.fr", + company: "LVMH", + role: "Data Scientist", + joined: "2019-09-17", + location: "Paris, France", + balance: 7345.1, + }, + { + id: "10", + name: "Michael Clark", + availability: "away", + avatar: "10.png", + status: "inactive", + flag: "🇮🇹", + email: "michael@eni.it", + company: "ENI", + role: "Engineer", + joined: "2023-02-11", + location: "Milan, Italy", + balance: 5214.88, + }, + { + id: "11", + name: "Ava Wilson", + availability: "busy", + avatar: "11.png", + status: "active", + flag: "🇧🇷", + email: "ava@vale.br", + company: "Vale", + role: "Software Engineer", + joined: "2022-12-01", + location: "Rio de Janeiro, Brazil", + balance: 9421.5, + }, + { + id: "12", + name: "David Young", + availability: "offline", + avatar: "12.png", + status: "active", + flag: "🇮🇳", + email: "david@tata.in", + company: "Tata", + role: "Sales Manager", + joined: "2020-03-27", + location: "Mumbai, India", + balance: 4521.67, + }, +]; + +export default function DataGridDemo() { + const [pagination, setPagination] = useState({ + pageIndex: 0, + pageSize: 5, + }); + const [sorting, setSorting] = useState([ + { id: "name", desc: true }, + ]); + const [isLoading, setIsLoading] = useState(true); + + const handleToggleLoading = () => { + setIsLoading((prev) => !prev); + }; + + const columns = useMemo[]>( + () => [ + { + accessorKey: "name", + id: "name", + header: ({ column }) => ( + + ), + cell: ({ row }) => { + return ( +
+ + + N + +
+
+ {row.original.name} +
+
+ {row.original.email} +
+
+
+ ); + }, + meta: { + skeleton: ( +
+ +
+ + +
+
+ ), + }, + size: 200, + enableSorting: true, + enableHiding: false, + enableResizing: true, + }, + { + accessorKey: "email", + id: "email", + header: ({ column }) => ( + + ), + cell: (info) => ( + + {info.getValue() as string} + + ), + size: 150, + meta: { + headerClassName: "", + cellClassName: "text-left", + skeleton: , + }, + enableSorting: true, + enableHiding: true, + enableResizing: true, + }, + { + accessorKey: "location", + id: "location", + header: ({ column }) => ( + + ), + cell: ({ row }) => { + return ( +
+ {row.original.flag} +
+ {row.original.location} +
+
+ ); + }, + size: 160, + meta: { + headerClassName: "", + cellClassName: "text-start", + skeleton: , + }, + enableSorting: true, + enableHiding: true, + enableResizing: true, + }, + { + accessorKey: "status", + id: "status", + header: ({ column }) => ( + + ), + cell: ({ row }) => { + const status = row.original.status; + + if (status == "active") { + return ( + + Approved + + ); + } else { + return ( + + Pending + + ); + } + }, + meta: { + skeleton: , + }, + size: 100, + enableSorting: true, + enableHiding: true, + enableResizing: false, + }, + ], + [], + ); + + const [columnOrder, setColumnOrder] = useState( + columns.map((column) => column.id as string), + ); + + const table = useReactTable({ + columns, + data: demoData, + pageCount: Math.ceil((demoData?.length || 0) / pagination.pageSize), + getRowId: (row: IData) => row.id, + state: { + pagination, + sorting, + columnOrder, + }, + onPaginationChange: setPagination, + onSortingChange: setSorting, + onColumnOrderChange: setColumnOrder, + getCoreRowModel: getCoreRowModel(), + getFilteredRowModel: getFilteredRowModel(), + getPaginationRowModel: getPaginationRowModel(), + getSortedRowModel: getSortedRowModel(), + }); + + return ( + + + + Employees + + + + + + + + + + + + + + + + ); +} diff --git a/packages/typegen/web/src/components/dialog/default.tsx b/packages/typegen/web/src/components/dialog/default.tsx new file mode 100644 index 00000000..a96f5240 --- /dev/null +++ b/packages/typegen/web/src/components/dialog/default.tsx @@ -0,0 +1,92 @@ +import { useState } from 'react'; +import { Alert, AlertIcon, AlertTitle } from '@/components/ui/alert'; +import { Button } from '@/components/ui/button'; +import { + Dialog, + DialogBody, + DialogClose, + DialogContent, + DialogDescription, + DialogFooter, + DialogHeader, + DialogTitle, + DialogTrigger, +} from '@/components/ui/dialog'; +import { Form, FormControl, FormDescription, FormField, FormItem, FormMessage } from '@/components/ui/form'; +import { Textarea } from '@/components/ui/textarea'; +import { zodResolver } from '@hookform/resolvers/zod'; +import { useDirection } from '@radix-ui/react-direction'; +import { RiCheckboxCircleFill } from '@remixicon/react'; +import { useForm } from 'react-hook-form'; +import { toast } from 'sonner'; +import { z } from 'zod'; + +export default function DialogDemo() { + const [open, setOpen] = useState(false); + const direction = useDirection(); + + const FormSchema = z.object({ + feedback: z.string().min(1, 'Feedback is required').max(200, 'Feedback cannot exceed 200 characters'), + }); + + const form = useForm>({ + resolver: zodResolver(FormSchema), + defaultValues: { feedback: '' }, + mode: 'onSubmit', + }); + + function onSubmit() { + toast.custom((t) => ( + toast.dismiss(t)}> + + + + Your feedback successfully submitted + + )); + + form.reset(); + setOpen(false); + } + + return ( + + + + + +
+ + + Suggest Idea + Describe your suggestion. + + + ( + + +