diff --git a/.cursor/plans/static-registry.md b/.cursor/plans/static-registry.md deleted file mode 100644 index 3c26a355..00000000 --- a/.cursor/plans/static-registry.md +++ /dev/null @@ -1,295 +0,0 @@ -This plan will give you a clear, step-by-step guide to building the static component registry within the existing "apps/docs" project. - ---- - -### **High-Level Plan: Phase 1 - Static Registry** - -The goal is to create a robust API for static components that is fully compatible with the `shadcn-cli` and can be tested thoroughly. - -### **1. The Data Layer: Defining the "Source of Truth"** - -This is the most critical part. A well-defined data structure will make the rest of the implementation smooth. - -#### **A. Directory Structure** - -The directory structure remains the same, providing a clean organization for your templates. - -``` -src/ -└── registry/ - ├── lib/ - │ ├── types.ts // NEW: Centralized type definitions - │ ├── validator.ts // Build-time validation script - │ └── utils.ts // File system and data transformation logic - └── templates/ - ├── button/ - │ ├── _meta.ts - │ └── button.tsx - └── icon/ - ├── _meta.ts - └── index.ts -``` - -#### **B. Type Definitions (`types.ts`)** - -Create a central file for your internal data types. This ensures consistency and provides excellent developer experience with TypeScript. - -```typescript -// src/registry/lib/types.ts -import { z } from "zod"; - -// Defines a single file within a template -export const templateFileSchema = z.object({ - sourceFileName: z.string(), - destinationPath: z.string(), -}); - -// Defines the metadata for a single template (_meta.ts) -export const templateMetadataSchema = z.object({ - name: z.string(), - type: z.literal("static"), // For Phase 1, we only allow 'static' - description: z.string(), - categories: z.array(z.enum(["component", "page", "utility", "hook"])), - files: z.array(templateFileSchema), -}); - -export type TemplateFile = z.infer; -export type TemplateMetadata = z.infer; -``` - -#### **C. Example Metadata (`_meta.ts`)** - -Here is how you would define a `button` component using the new types. - -```typescript -// src/registry/templates/button/_meta.ts -import type { TemplateMetadata } from "@/registry/lib/types"; - -export const meta: TemplateMetadata = { - name: "button", - type: "static", - description: "Displays a button or a link.", - categories: ["component"], - files: [ - { - // The name of the file within this directory - sourceFileName: "button.tsx", - // The path where the file will be placed in the user's project - destinationPath: "src/components/ui/button.tsx", - }, - ], -}; -``` - -### **2. The API Layer: Building the Registry with Next.js & Hono** - -This layer reads from your data source and exposes it in the Shadcn-compatible format. - -#### **A. API Route Handler (`route.ts`)** - -The Hono router remains the core of the API, providing flexibility for the future. - -```typescript -// src/app/api/registry/[...slug]/route.ts -import { Hono } from "hono"; -import { handle } from "hono/vercel"; -import { getRegistryIndex, getStaticComponent } from "@/registry/lib/utils"; - -export const runtime = "edge"; - -const app = new Hono().basePath("/api/registry"); - -// Serves the index of all available components -app.get("/index.json", async (c) => { - try { - const index = await getRegistryIndex(); - return c.json(index); - } catch (error) { - return c.json({ error: "Failed to fetch registry index." }, 500); - } -}); - -// Serves the data for a single component -// The :style param is part of the shadcn spec, we'll include it for compatibility -app.get("/:style/:name.json", async (c) => { - const { name } = c.req.param(); - try { - const component = await getStaticComponent(name); - if (!component) { - return c.json({ error: "Component not found." }, 404); - } - return c.json(component); - } catch (error) { - return c.json({ error: "Failed to fetch component." }, 500); - } -}); - -export const GET = handle(app); -``` - -#### **B. Registry Utilities (`utils.ts`)** - -These functions are updated to handle the new `sourceFileName` and `destinationPath` structure. - -```typescript -// src/registry/lib/utils.ts -import fs from "fs/promises"; -import path from "path"; -import type { TemplateMetadata } from "./types"; - -const templatesPath = path.join(process.cwd(), "src/registry/templates"); - -// Builds the index.json file -export async function getRegistryIndex() { - const componentDirs = await fs.readdir(templatesPath, { - withFileTypes: true, - }); - const index = []; - - for (const dir of componentDirs) { - if (dir.isDirectory()) { - const { meta }: { meta: TemplateMetadata } = await import( - `@/registry/templates/${dir.name}/_meta` - ); - index.push({ - name: meta.name, - type: meta.type, - categories: meta.categories, - files: meta.files.map((f) => f.destinationPath), // shadcn index uses the destination paths - }); - } - } - return index; -} - -// Builds the JSON for a single static component -export async function getStaticComponent(name: string) { - const { meta }: { meta: TemplateMetadata } = await import( - `@/registry/templates/${name}/_meta` - ); - - const componentFiles = await Promise.all( - meta.files.map(async (file) => { - const contentPath = path.join(templatesPath, name, file.sourceFileName); - const content = await fs.readFile(contentPath, "utf-8"); - return { - // The `name` key in the output should be the filename part of the destination - name: path.basename(file.destinationPath), - path: file.destinationPath, - content: content, // The critical content key - }; - }), - ); - - return { - name: meta.name, - type: meta.type, - files: componentFiles, - }; -} -``` - -#### **C. Build-Time Validation (`validator.ts`)** - -This script is crucial for preventing regressions. It should be run as part of your CI/CD pipeline or build process. - -```typescript -// src/registry/lib/validator.ts -import fs from "fs/promises"; -import path from "path"; -import { templateMetadataSchema } from "./types"; - -const templatesPath = path.join(process.cwd(), "src/registry/templates"); - -async function validateRegistry() { - console.log("🔍 Validating registry templates..."); - const componentDirs = await fs.readdir(templatesPath, { - withFileTypes: true, - }); - let errorCount = 0; - - for (const dir of componentDirs) { - if (dir.isDirectory()) { - const metaPath = path.join(templatesPath, dir.name, "_meta.ts"); - const { meta } = await import(metaPath); - - // 1. Validate metadata against Zod schema - const validationResult = templateMetadataSchema.safeParse(meta); - if (!validationResult.success) { - console.error(`❌ Invalid metadata in ${dir.name}/_meta.ts:`); - console.error(validationResult.error.flatten()); - errorCount++; - } - - // 2. Validate that all source files exist - for (const file of meta.files) { - const sourcePath = path.join( - templatesPath, - dir.name, - file.sourceFileName, - ); - try { - await fs.access(sourcePath); - } catch { - console.error( - `❌ Missing source file: ${file.sourceFileName} referenced in ${dir.name}/_meta.ts`, - ); - errorCount++; - } - } - } - } - - if (errorCount > 0) { - console.error(`\nValidation failed with ${errorCount} error(s).`); - process.exit(1); // Fail the build - } else { - console.log("✅ Registry validation successful!"); - } -} - -validateRegistry(); -``` - -To run this, add a script to your `package.json`: - -```json -{ - "scripts": { - "build": "npm run registry:validate && next build", - "registry:validate": "node src/registry/lib/validator.ts" - } -} -``` - -### **3. Testing with Vitest** - -Your tests should confirm that the API output adheres to the Shadcn spec. - -```typescript -// src/app/api/registry/route.test.ts -import { describe, it, expect, vi } from "vitest"; -// You will need to mock the `utils.ts` functions to test the API routes in isolation. - -vi.mock("@/registry/lib/utils", () => ({ - getRegistryIndex: vi.fn(), - getStaticComponent: vi.fn(), -})); - -describe("Registry API - Phase 1", () => { - it("GET /api/registry/index.json should return a valid index", async () => { - // Mock the return value of getRegistryIndex - // Make a request to the endpoint - // Assert that the response contains `name`, `type`, `categories`, and `files` (as an array of strings). - }); - - it("GET /api/registry/default/button.json should return a valid component", async () => { - // Mock the return value of getStaticComponent - // Make a request to the endpoint - // Assert that the top-level response has `name`, `type`, and `files`. - // Assert that each object in the `files` array has `name`, `path`, and `content`. - }); -}); -``` - -This detailed plan for Phase 1 provides a robust, testable, and scalable foundation. By focusing on data integrity and API compatibility first, you set yourself up for success when implementing dynamic components and authentication later. diff --git a/.vscode/last.sql b/.vscode/last.sql new file mode 100644 index 00000000..e69de29b diff --git a/package.json b/package.json index eceac07a..1f103314 100644 --- a/package.json +++ b/package.json @@ -10,7 +10,7 @@ "version-packages": "changeset version", "sherif": "pnpm dlx sherif@latest", "sherif:fix": "pnpm sherif --fix", - "release": "pnpm build --filter={./packages/*} && changeset publish", + "release": "turbo run build --filter={./packages/*} && changeset publish", "test": "vitest", "knip": "knip" }, diff --git a/packages/cli/package.json b/packages/cli/package.json index 39edc231..14636c7d 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "@proofkit/cli", - "version": "2.0.0-beta.4", + "version": "2.0.0-beta.6", "description": "Create web application with the ProofKit stack", "license": "MIT", "repository": { @@ -94,6 +94,7 @@ "@prisma/adapter-planetscale": "^5.14.0", "@prisma/client": "^5.14.0", "@proofkit/registry": "workspace:*", + "@rollup/plugin-replace": "^6.0.3", "@t3-oss/env-nextjs": "^0.10.1", "@tanstack/react-query": "^5.49.2", "@trpc/client": "11.0.0-rc.441", diff --git a/packages/cli/tsdown.config.ts b/packages/cli/tsdown.config.ts index 9bdf8362..2b3b22b6 100644 --- a/packages/cli/tsdown.config.ts +++ b/packages/cli/tsdown.config.ts @@ -1,8 +1,11 @@ import path from "path"; import { fileURLToPath } from "url"; +import replacePlugin from "@rollup/plugin-replace"; import fsExtra from "fs-extra"; import { defineConfig } from "tsdown"; +const replace = replacePlugin.default ?? replacePlugin; + const { readJSONSync } = fsExtra; const __dirname = path.dirname(fileURLToPath(import.meta.url)); @@ -38,12 +41,17 @@ export default defineConfig({ // Keep Node.js built-in module imports as-is for better compatibility nodeProtocol: false, // Inject package versions and registry URL at build time - define: { - __FMDAPI_VERSION__: JSON.stringify(FMDAPI_VERSION), - __BETTER_AUTH_VERSION__: JSON.stringify(BETTER_AUTH_VERSION), - __REGISTRY_URL__: JSON.stringify( - isDev ? "http://localhost:3005" : "https://proofkit.dev" - ), - }, + plugins: [ + replace({ + preventAssignment: true, + values: { + __FMDAPI_VERSION__: JSON.stringify(FMDAPI_VERSION), + __BETTER_AUTH_VERSION__: JSON.stringify(BETTER_AUTH_VERSION), + __REGISTRY_URL__: JSON.stringify( + isDev ? "http://localhost:3005" : "https://proofkit.dev" + ), + }, + }), + ], onSuccess: isDev ? "node dist/index.js" : undefined, }); diff --git a/packages/fmodata/CHANGELOG.md b/packages/fmodata/CHANGELOG.md new file mode 100644 index 00000000..53d16662 --- /dev/null +++ b/packages/fmodata/CHANGELOG.md @@ -0,0 +1,5 @@ +# @proofkit/fmodata + +## 0.0.0 + +Initial setup of the package. diff --git a/packages/fmodata/IMPLEMENTATION_SUMMARY.md b/packages/fmodata/IMPLEMENTATION_SUMMARY.md new file mode 100644 index 00000000..79798795 --- /dev/null +++ b/packages/fmodata/IMPLEMENTATION_SUMMARY.md @@ -0,0 +1,275 @@ +# ORM API Implementation Summary + +## Overview + +Successfully implemented a Drizzle-inspired ORM API for fmodata that provides enhanced type safety and developer experience while maintaining full compatibility with the existing API. + +## Completed Features + +### ✅ 1. Field Builder System (`src/orm/field-builders.ts`) + +Created a fluent field builder API with: + +- **Field Types**: `textField()`, `numberField()`, `dateField()`, `timeField()`, `timestampField()`, `containerField()`, `calcField()` +- **Chainable Methods**: + - `.primaryKey()` - Mark as primary key (auto read-only) + - `.notNull()` - Make non-nullable + - `.readOnly()` - Exclude from insert/update + - `.entityId(id)` - Assign FileMaker field ID + - `.readValidator(validator)` - Transform/validate data when reading from database + - `.writeValidator(validator)` - Transform/validate data when writing to database + +### ✅ 2. Column Reference System (`src/orm/column.ts`) + +Created `Column` class that: + +- Carries type information for TypeScript inference +- Stores field name, entity ID, table name, and table entity ID +- Provides methods to get identifiers (field/table) +- Supports both field names and entity IDs +- Includes `isColumn()` type guard + +### ✅ 3. Filter Operators (`src/orm/operators.ts`) + +Implemented comprehensive operator functions: + +**Comparison**: `eq()`, `ne()`, `gt()`, `gte()`, `lt()`, `lte()` +**String**: `contains()`, `startsWith()`, `endsWith()` +**Array**: `inArray()`, `notInArray()` +**Null**: `isNull()`, `isNotNull()` +**Logical**: `and()`, `or()`, `not()` +**OrderBy**: `asc()`, `desc()` - Create OrderByExpression for type-safe sorting + +Features: + +- Support column-to-value comparisons +- Support column-to-column comparisons (cross-table) +- Convert to OData filter syntax +- Handle entity ID transformation +- Proper SQL escaping (single quotes) + +### ✅ 4. Table Occurrence Factory (`src/orm/table.ts`) + +Created `fmTableOccurrence()` function that: + +- Takes field builders as input +- Generates Zod schema automatically (output and input schemas) +- Creates Column references for each field +- Extracts metadata (primary key, required, read-only, entity IDs) +- Supports `navigationPaths` for runtime validation of expand/navigate operations +- Supports `defaultSelect` option ("all", "schema", or function) for automatic field selection +- Returns object with both metadata (via Symbols) and column accessors + +### ✅ 5. Query Builder Updates (`src/client/query-builder.ts`) + +Enhanced QueryBuilder to support: + +**Select Method**: + +- Accepts object with Column references for type-safe field selection +- `.select({ id: users.id, name: users.name })` ✓ +- Supports field renaming: `.select({ userId: users.id, userName: users.name })` ✓ +- String-based select still supported via legacy API + +**Where Method**: + +- New `.where()` method accepts FilterExpression +- Converts operator expressions to OData syntax +- Respects `useEntityIds` setting + +**OrderBy Method**: + +- Accepts Column references, OrderByExpression, or strings +- `.orderBy(users.name)` ✓ (single column, ascending by default) +- `.orderBy([users.name, "asc"])` ✓ (single column with direction) +- `.orderBy(asc(users.name), desc(users.age))` ✓ (variadic with helpers) +- `.orderBy([[users.name, "asc"], [users.createdAt, "desc"]])` ✓ (array syntax) + +### ✅ 6. Navigation Validation (`src/client/builders/expand-builder.ts`, `src/client/entity-set.ts`, `src/client/record-builder.ts`) + +Added runtime validation for navigation operations: + +- Validates `expand()` operations using `getNavigationPaths()` helper +- Validates `navigate()` operations in EntitySet and RecordBuilder +- Checks if relation name is in table's `navigationPaths` array +- Throws descriptive error if invalid path is attempted +- Works with new ORM table occurrences +- Backward compatible with old API + +### ✅ 7. Default Select Feature (`src/client/entity-set.ts`, `src/client/builders/default-select.ts`) + +Implemented automatic field selection based on table configuration: + +- `defaultSelect: "all"` - Select all fields (default behavior) +- `defaultSelect: "schema"` - Select only fields defined in schema +- `defaultSelect: (columns) => {...}` - Custom function to select specific columns +- Automatically applied in `list()` and `get()` if no explicit `select()` is called + +### ✅ 8. Documentation + +Created comprehensive documentation: + +- **`docs/ORM_API.md`**: Complete API guide with examples +- **`scripts/dreams.ts`**: Updated with working examples +- **`tests/orm-api.test.ts`**: Test suite covering all features + +### ✅ 9. Exports (`src/index.ts`, `src/orm/index.ts`) + +Updated exports to include: + +- All field builder functions +- Column and operator types/functions +- fmTableOccurrence function +- Proper TypeScript types + +## Key Design Decisions + +### 1. Query Order: `from().select().where()` + +Kept the existing pattern (not Drizzle's `select().from()`) for consistency and single-table query ergonomics. + +### 2. Select Syntax + +Support both string-based and column-based selection: + +- String-based (legacy): `select("id", "name")` - variadic string arguments +- Column-based (new ORM): `select({ id: users.id, name: users.name })` - object with column references, supports field renaming + +### 3. Navigation Validation + +Simple `navigationPaths: string[]` array with runtime validation when expanding/navigating. Uses `getNavigationPaths()` helper to access paths from FMTable. Throws descriptive error if relation name is not in paths. + +### 4. Cross-Table Operations + +Operators support column-to-column comparisons: `eq(users.id, contacts.id_user)` + +### 5. Default Select + +Tables can define `defaultSelect` option to automatically select fields when `list()` or `get()` is called without explicit `select()`. Supports "all", "schema", or custom function. + +### 6. Backward Compatibility + +New API coexists with old API. Both exported from main package. No breaking changes. + +## File Structure + +``` +src/ +├── orm/ +│ ├── field-builders.ts # Field builder classes and factories +│ ├── column.ts # Column reference type +│ ├── operators.ts # Filter and OrderBy operator functions +│ ├── table.ts # fmTableOccurrence function and FMTable class +│ └── index.ts # Barrel exports +├── client/ +│ ├── query/ +│ │ └── query-builder.ts # Enhanced with Column/operator support +│ ├── builders/ +│ │ ├── expand-builder.ts # Expand logic with navigation validation +│ │ └── default-select.ts # Default select helper functions +│ ├── entity-set.ts # EntitySet with defaultSelect support +│ └── ... # Other existing files +└── index.ts # Main exports (old + new API) + +docs/ +└── ORM_API.md # Complete API documentation + +scripts/ +└── dreams.ts # Updated with working examples + +tests/ +└── orm-api.test.ts # Test suite for new API +``` + +## Usage Example + +```typescript +import { + fmTableOccurrence, + textField, + numberField, + timestampField, + eq, + and, + gt, + asc, + FMServerConnection, +} from "@proofkit/fmodata"; +import { z } from "zod/v4"; + +// Define table with field builders +const users = fmTableOccurrence( + "users", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().notNull().entityId("FMFID:2"), + age: numberField().entityId("FMFID:3"), + status: textField() + .readValidator(z.enum(["active", "pending", "inactive"])) + .entityId("FMFID:4"), + createdAt: timestampField().readOnly().entityId("FMFID:5"), + }, + { + entityId: "FMTID:100", + navigationPaths: ["contacts"], + }, +); + +// Connect +const connection = new FMServerConnection({ + serverUrl: "https://api.example.com", + auth: { apiKey: "key" }, +}); +const db = connection.database("MyDB.fmp12"); + +// Query with new API +const result = await db + .from(users) + .list() + .select({ + id: users.id, + name: users.name, + age: users.age, + }) + .where(and(eq(users.status, "active"), gt(users.age, 18))) + .orderBy(asc(users.name)) + .execute(); +``` + +## Type Safety Benefits + +1. **Enum Autocomplete**: `eq(users.status, "active")` - "active" autocompletes from enum validator +2. **Column Type Checking**: Operators validate value types against column types +3. **Select Field Validation**: Column references provide type-safe field selection with renaming support +4. **Cross-Table Safety**: Column references carry table information for validation +5. **Navigation Validation**: Runtime checks ensure valid expand/navigate paths +6. **Insert/Update Type Safety**: Read-only fields automatically excluded, required fields enforced +7. **Input/Output Transformation**: Separate validators for reading (readValidator) and writing (writeValidator) + +## Implementation Status + +✅ All core features completed: + +1. ✅ Field builders with read/write validators +2. ✅ Column references with type safety +3. ✅ Filter operators (comparison, string, array, null, logical) +4. ✅ OrderBy operators (asc, desc) +5. ✅ Table factory (fmTableOccurrence) with Symbol-based metadata +6. ✅ Query builder updates (select, where, orderBy) +7. ✅ Navigation validation (expand, navigate) +8. ✅ Default select feature + +✅ No linting errors +✅ Documentation complete +✅ Tests written +✅ Examples updated + +## Next Steps (Optional) + +Potential future enhancements: + +1. Add more operator types (between, like with wildcards, etc.) +2. Support for aggregate functions (count, sum, avg, etc.) +3. Type-safe joins (if OData supports them) +4. Schema migration helpers +5. Code generation from FileMaker metadata diff --git a/packages/fmodata/README.md b/packages/fmodata/README.md new file mode 100644 index 00000000..13950147 --- /dev/null +++ b/packages/fmodata/README.md @@ -0,0 +1,1647 @@ +# @proofkit/fmodata Documentation + +A strongly-typed FileMaker OData API client. + +⚠️ WARNING: This library is in "alpha" status. It's still in active development and the API is subject to change. Feedback is welcome on the [community forum](https://community.ottomatic.cloud/c/proofkit/13) or on [GitHub](https://github.com/proofgeist/proofkit/issues). + +Roadmap: + +- [ ] Crossjoin support +- [x] Batch operations + - [ ] Automatically chunk requests into smaller batches (e.g. max 512 inserts per batch) +- [x] Schema updates (add/update tables and fields) +- [ ] Proper docs at proofkit.dev +- [ ] @proofkit/typegen integration + +## Installation + +```bash +pnpm add @proofkit/fmodata@alpha +``` + +## Quick Start + +Here's a minimal example to get you started: + +```typescript +import { + FMServerConnection, + fmTableOccurrence, + textField, + numberField, + eq, +} from "@proofkit/fmodata"; +import { z } from "zod/v4"; + +// 1. Create a connection to the server +const connection = new FMServerConnection({ + serverUrl: "https://your-server.com", + auth: { + // OttoFMS API key + apiKey: "your-api-key", + + // or username and password + // username: "admin", + // password: "password", + }, +}); + +// 2. Define your table schema using field builders +const users = fmTableOccurrence("users", { + id: textField().primaryKey(), + username: textField().notNull(), + email: textField().notNull(), + active: numberField() + .readValidator(z.coerce.boolean()) + .writeValidator(z.boolean().transform((v) => (v ? 1 : 0))), +}); + +// 3. Create a database instance +const db = connection.database("MyDatabase.fmp12"); + +// 4. Query your data +const { data, error } = await db.from(users).list().execute(); + +if (error) { + console.error(error); + return; +} + +if (data) { + console.log(data); // Array of users, properly typed +} +``` + +## Core Concepts + +This library relies heavily on the builder pattern for defining your queries and operations. Most operations require a final call to `execute()` to send the request to the server. The builder pattern allows you to build complex queries and also supports batch operations, allowing you to execute multiple operations in a single request as supported by the FileMaker OData API. It's also helpful for testing the library, as you can call `getQueryString()` to get the OData query string without executing the request. + +As such, there are layers to the library to help you build your queries and operations. + +- `FMServerConnection` - hold server connection details and authentication +- `FMTable` (created via `fmTableOccurrence()`) - defines the fields, validators, and metadata for a table occurrence +- `Database` - connects the table occurrences to the server connection + +### FileMaker Server prerequisites + +To use this library you need: + +- OData service enabled on your FileMaker server +- A FileMaker account with `fmodata` privilege enabled +- (if using OttoFMS) a Data API key setup for your FileMaker account with OData enabled + +A note on best practices: + +OData relies entirely on the table occurances in the relationship graph for data access. Relationships between table occurrences are also used, but maybe not as you expect (in short, only the simplest relationships are supported). Given these constraints, it may be best for you to have a seperate FileMaker file for your OData connection, using external data sources to link to your actual data file. We've found this especially helpful for larger projects that have very large graphs with lots of redundant table occurances compared to actual number of base tables. + +### Server Connection + +The client can authenticate using username/password or API key: + +```typescript +// Username and password authentication +const connection = new FMServerConnection({ + serverUrl: "https://api.example.com", + auth: { + username: "test", + password: "test", + }, +}); + +// API key authentication +const connection = new FMServerConnection({ + serverUrl: "https://api.example.com", + auth: { + apiKey: "your-api-key", + }, +}); +``` + +### Schema Definitions + +This library relies on a schema-first approach for good type-safety and optional runtime validation. Use **`fmTableOccurrence()`** with field builders to create your schemas. This provides full TypeScript type inference for field names in queries. + +#### Field Builders + +Field builders provide a fluent API for defining table fields with type-safe metadata. These field types map directly to the FileMaker field types + +- `textField()` +- `numberField()` +- `dateField()` +- `timeField()` +- `timestampField()` +- `containerField()` +- `calcField()` + +Each field builder supports chainable methods: + +- `.primaryKey()` - Mark as primary key (automatically read-only) +- `.notNull()` - Make field non-nullable (required for inserts) +- `.readOnly()` - Exclude from insert/update operations +- `.entityId(id)` - Assign FileMaker field ID (FMFID), allowing your API calls to survive FileMaker name changes +- `.readValidator(validator)` - Transform/validate data when reading from database +- `.writeValidator(validator)` - Transform/validate data when writing to database + +#### Defining Tables + +Use `fmTableOccurrence()` to define a table with field builders: + +```typescript +import { z } from "zod/v4"; +import { + fmTableOccurrence, + textField, + numberField, + timestampField, +} from "@proofkit/fmodata"; + +const contacts = fmTableOccurrence( + "contacts", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().notNull().entityId("FMFID:2"), + email: textField().notNull().entityId("FMFID:3"), + phone: textField().entityId("FMFID:4"), // Optional (nullable by default) + createdAt: timestampField().readOnly().entityId("FMFID:5"), + }, + { + entityId: "FMTID:100", // Optional: FileMaker table occurrence ID + defaultSelect: "schema", // Optional: "all", "schema", or function. Defaults to "schema". + navigationPaths: ["users"], // Optional: valid navigation targets to provide type-errors when navigating/expanding + }, +); +``` + +The function returns a table object that provides: + +- Column references for each field (e.g., `contacts.id`, `contacts.name`) +- Type-safe schema for queries and operations +- Metadata stored via Symbols (hidden from IDE autocomplete) + +#### Default Field Selection + +FileMaker will automatically return all non-container fields from a schema if you don't specify a $select parameter in your query. This library allows you to configure default field selection behavior using the `defaultSelect` option: + +```typescript +// Option 1 (default): "schema" - Select all fields from the schema +const users = fmTableOccurrence( + "users", + { + /* fields */ + }, + { + defaultSelect: "schema", // A $select parameter will always be added for only the fields defined in the schema + }, +); + +// Option 2: "all" - Select all fields (FileMaker default behavior) +const users = fmTableOccurrence( + "users", + { + /* fields */ + }, + { + defaultSelect: "all", // No $select parameter by default; FileMaker returns all non-container fields + }, +); + +// Option 3: Function - Select specific columns by default +const users = fmTableOccurrence( + "users", + { + /* fields */ + }, + { + defaultSelect: (cols) => ({ + username: cols.username, + email: cols.email, + }), // Only select these fields by default + }, +); + +// When you call list() or get(), the defaultSelect is applied automatically +const result = await db.from(users).list().execute(); +// If defaultSelect is a function returning { username, email }, result.data will only contain those fields + +// You can still override with explicit select() +const result = await db + .from(users) + .list() + .select({ username: users.username, email: users.email, age: users.age }) // Always overrides at the per-request level + .execute(); +``` + +## Querying Data + +### Basic Queries + +Use `list()` to retrieve multiple records: + +```typescript +// Get all users +const result = await db.from("users").list().execute(); + +if (result.data) { + result.data.forEach((user) => { + console.log(user.username); + }); +} +``` + +Get a specific record by ID: + +```typescript +const result = await db.from("users").get("user-123").execute(); + +if (result.data) { + console.log(result.data.username); +} +``` + +Get a single field value: + +```typescript +const result = await db + .from(users) + .get("user-123") + .getSingleField(users.email) + .execute(); + +if (result.data) { + console.log(result.data); // "user@example.com" +} +``` + +### Filtering + +fmodata provides type-safe filter operations that prevent common errors at compile time. You can use either the new ORM-style API with operators and column references, or the legacy filter API. + +#### New ORM-Style API (Recommended) + +Use the `where()` method with filter operators and column references for type-safe filtering: + +```typescript +import { eq, gt, and, or, contains } from "@proofkit/fmodata"; + +// Simple equality +const result = await db + .from(users) + .list() + .where(eq(users.active, true)) + .execute(); + +// Comparison operators +const result = await db.from(users).list().where(gt(users.age, 18)).execute(); + +// String operators +const result = await db + .from(users) + .list() + .where(contains(users.name, "John")) + .execute(); + +// Combine with AND +const result = await db + .from(users) + .list() + .where(and(eq(users.active, true), gt(users.age, 18))) + .execute(); + +// Combine with OR +const result = await db + .from(users) + .list() + .where(or(eq(users.role, "admin"), eq(users.role, "moderator"))) + .execute(); +``` + +Available operators: + +- **Comparison**: `eq()`, `ne()`, `gt()`, `gte()`, `lt()`, `lte()` +- **String**: `contains()`, `startsWith()`, `endsWith()` +- **Array**: `inArray()`, `notInArray()` +- **Null**: `isNull()`, `isNotNull()` +- **Logical**: `and()`, `or()`, `not()` + +### Sorting + +Sort results using `orderBy()`. The method supports both column references (new ORM API) and string field names (legacy API). + +#### Using Column References (New ORM API) + +```typescript +import { asc, desc } from "@proofkit/fmodata"; + +// Single field (ascending by default) +const result = await db.from(users).list().orderBy(users.name).execute(); + +// Single field with explicit direction +const result = await db.from(users).list().orderBy(asc(users.name)).execute(); +const result = await db.from(users).list().orderBy(desc(users.age)).execute(); + +// Multiple fields (variadic) +const result = await db + .from(users) + .list() + .orderBy(asc(users.lastName), desc(users.firstName)) + .execute(); + +// Multiple fields (array syntax) +const result = await db + .from(users) + .list() + .orderBy([ + [users.lastName, "asc"], + [users.firstName, "desc"], + ]) + .execute(); +``` + +#### Type Safety + +For typed databases, `orderBy()` provides full type safety: + +```typescript +// ✅ Valid - "name" is a field in the schema +db.from(users).list().orderBy(users.name); + +// ✅ Valid - tuple with field and direction +db.from(users).list().orderBy(asc(users.name)); +db.from(users).list().orderBy(desc(users.name)); + +// ✅ Valid - multiple fields +db.from(users).list().orderBy(asc(users.lastName), desc(users.firstName)); +``` + +### Pagination + +Control the number of records returned and pagination: + +```typescript +// Limit results +const result = await db.from(users).list().top(10).execute(); + +// Skip records (pagination) +const result = await db.from(users).list().top(10).skip(20).execute(); + +// Count total records +const result = await db.from(users).list().count().execute(); +``` + +### Selecting Fields + +Select specific fields to return. You can use either column references (new ORM API) or string field names (legacy API): + +```typescript +// New ORM API: Using column references (type-safe, supports renaming) +const result = await db + .from(users) + .list() + .select({ + username: users.username, + email: users.email, + userId: users.id, // Renamed from "id" to "userId" + }) + .execute(); + +// result.data[0] will only have username and email fields +``` + +### Single Records + +Use `single()` to ensure exactly one record is returned (returns an error if zero or multiple records are found): + +```typescript +const result = await db + .from(users) + .list() + .where(eq(users.email, "user@example.com")) + .single() + .execute(); + +if (result.data) { + // result.data is a single record, not an array + console.log(result.data.username); +} +``` + +Use `maybeSingle()` when you want at most one record (returns `null` if no record is found, returns an error if multiple records are found): + +```typescript +const result = await db + .from(users) + .list() + .where(eq(users.email, "user@example.com")) + .maybeSingle() + .execute(); + +if (result.data) { + // result.data is a single record or null + console.log(result.data?.username); +} else { + // No record found - result.data would be null + console.log("User not found"); +} +``` + +**Difference between `single()` and `maybeSingle()`:** + +- `single()` - Requires exactly one record. Returns an error if zero or multiple records are found. +- `maybeSingle()` - Allows zero or one record. Returns `null` if no record is found, returns an error only if multiple records are found. + +### Chaining Methods + +All query methods can be chained together: + +```typescript +// Using new ORM API +const result = await db + .from(users) + .list() + .select({ + username: users.username, + email: users.email, + age: users.age, + }) + .where(gt(users.age, 18)) + .orderBy(asc(users.username)) + .top(10) + .skip(0) + .execute(); +``` + +## CRUD Operations + +### Insert + +Insert new records with type-safe data: + +```typescript +// Insert a new user +const result = await db + .from(users) + .insert({ + username: "johndoe", + email: "john@example.com", + active: true, + }) + .execute(); + +if (result.data) { + console.log("Created user:", result.data); +} +``` + +Fields are automatically required for insert if they use `.notNull()`. Read-only fields (including primary keys) are automatically excluded: + +```typescript +const users = fmTableOccurrence("users", { + id: textField().primaryKey(), // Auto-required, but excluded from insert (primaryKey) + username: textField().notNull(), // Auto-required (notNull) + email: textField().notNull(), // Auto-required (notNull) + phone: textField(), // Optional by default (nullable) + createdAt: timestampField().readOnly(), // Excluded from insert/update +}); + +// TypeScript enforces: username and email are required +// TypeScript excludes: id and createdAt cannot be provided +const result = await db + .from(users) + .insert({ + username: "johndoe", + email: "john@example.com", + phone: "+1234567890", // Optional + }) + .execute(); +``` + +### Update + +Update records by ID or filter: + +```typescript +// Update by ID +const result = await db + .from(users) + .update({ username: "newname" }) + .byId("user-123") + .execute(); + +if (result.data) { + console.log(`Updated ${result.data.updatedCount} record(s)`); +} + +// Update by filter (using new ORM API) +import { lt, and, eq } from "@proofkit/fmodata"; + +const result = await db + .from(users) + .update({ active: false }) + .where(lt(users.lastLogin, "2023-01-01")) + .execute(); + +// Complex filter example +const result = await db + .from(users) + .update({ active: false }) + .where(and(eq(users.active, true), lt(users.count, 5))) + .execute(); + +// Update with additional query options (legacy filter API) +const result = await db + .from("users") + .update({ active: false }) + .where((q) => q.where(eq(users.active, true)).top(10)) + .execute(); +``` + +### Delete + +Delete records by ID or filter: + +```typescript +// Delete by ID +const result = await db.from(users).delete().byId("user-123").execute(); + +if (result.data) { + console.log(`Deleted ${result.data.deletedCount} record(s)`); +} + +// Delete by filter (using new ORM API) +import { eq, and, lt } from "@proofkit/fmodata"; + +const result = await db + .from(users) + .delete() + .where(eq(users.active, false)) + .execute(); + +// Delete with complex filters +const result = await db + .from(users) + .delete() + .where(and(eq(users.active, false), lt(users.lastLogin, "2023-01-01"))) + .execute(); +``` + +## Navigation & Relationships + +### Defining Navigation + +Define navigation relationships using the `navigationPaths` option when creating table occurrences: + +```typescript +import { fmTableOccurrence, textField } from "@proofkit/fmodata"; + +const contacts = fmTableOccurrence( + "contacts", + { + id: textField().primaryKey(), + name: textField().notNull(), + userId: textField().notNull(), + }, + { + navigationPaths: ["users"], // Valid navigation targets + }, +); + +const users = fmTableOccurrence( + "users", + { + id: textField().primaryKey(), + username: textField().notNull(), + email: textField().notNull(), + }, + { + navigationPaths: ["contacts"], // Valid navigation targets + }, +); + +// Use with your database +const db = connection.database("MyDB", { + occurrences: [contacts, users], +}); +``` + +The `navigationPaths` option: + +- Specifies which table occurrences can be navigated to from this table +- Enables runtime validation when using `expand()` or `navigate()` +- Throws descriptive errors if you try to navigate to an invalid path + +### Navigating Between Tables + +Navigate to related records: + +```typescript +// Navigate from a specific record (using column references) +const result = await db + .from(contacts) + .get("contact-123") + .navigate(users) + .select({ + username: users.username, + email: users.email, + }) + .execute(); + +// Navigate without specifying a record first +const result = await db.from(contacts).navigate(users).list().execute(); + +// Using legacy API with string field names +const result = await db + .from(contacts) + .get("contact-123") + .navigate(users) + .select({ username: users.username, email: users.email }) + .execute(); +``` + +### Expanding Related Records + +Use `expand()` to include related records in your query results. The library validates that the target table is in the source table's `navigationPaths`: + +```typescript +// Simple expand +const result = await db.from(contacts).list().expand(users).execute(); + +// Expand with field selection (using column references) +const result = await db + .from(contacts) + .list() + .expand(users, (b) => + b.select({ + username: users.username, + email: users.email, + }), + ) + .execute(); + +// Expand with filtering (using new ORM API) +import { eq } from "@proofkit/fmodata"; + +const result = await db + .from(contacts) + .list() + .expand(users, (b) => b.where(eq(users.active, true))) + .execute(); + +// Multiple expands +const result = await db + .from(contacts) + .list() + .expand(users, (b) => b.select({ username: users.username })) + .expand(orders, (b) => b.select({ total: orders.total }).top(5)) + .execute(); + +// Nested expands +const result = await db + .from(contacts) + .list() + .expand(users, (usersBuilder) => + usersBuilder + .select({ + username: users.username, + email: users.email, + }) + .expand(customers, (customerBuilder) => + customerBuilder.select({ + name: customers.name, + tier: customers.tier, + }), + ), + ) + .execute(); + +// Complex expand with multiple options +const result = await db + .from(contacts) + .list() + .expand(users, (b) => + b + .select({ + username: users.username, + email: users.email, + }) + .where(eq(users.active, true)) + .orderBy(asc(users.username)) + .top(10) + .expand(customers, (nested) => nested.select({ name: customers.name })), + ) + .execute(); +``` + +## Running Scripts + +Execute FileMaker scripts via OData: + +```typescript +// Simple script execution +const result = await db.runScript("MyScriptName"); + +console.log(result.resultCode); // Script result code +console.log(result.result); // Optional script result string + +// Pass parameters to script +const result = await db.runScript("MyScriptName", { + scriptParam: "some value", +}); + +// Script parameters can be strings, numbers, or objects +const result = await db.runScript("ProcessOrder", { + scriptParam: { + orderId: "12345", + action: "approve", + }, // Will be JSON stringified +}); + +// Validate script result with Zod schema +// NOTE: Your validator must be able to parse a string. +// See Zod codecs for how to build a jsonCodec function that does this +// https://zod.dev/codecs?id=jsonschema + +const schema = jsonCodec( + z.object({ + success: z.boolean(), + message: z.string(), + recordId: z.string(), + }), +); + +const result = await db.runScript("CreateRecord", { + resultSchema: schema, +}); + +// result.result is now typed based on your schema +console.log(result.result.recordId); +``` + +**Note:** OData doesn't support script names with special characters (e.g., `@`, `&`, `/`) or script names beginning with a number. TypeScript will catch these at compile time. + +## Batch Operations + +Batch operations allow you to execute multiple queries and operations together in a single request. All operations in a batch are executed atomically - they all succeed or all fail together. This is both more efficient (fewer network round-trips) and ensures data consistency across related operations. + +### Batch Result Structure + +Batch operations return a `BatchResult` object that contains individual results for each operation. Each result has its own `data`, `error`, and `status` properties, allowing you to handle success and failure on a per-operation basis: + +```typescript +type BatchItemResult = { + data: T | undefined; + error: FMODataErrorType | undefined; + status: number; // HTTP status code (0 for truncated operations) +}; + +type BatchResult = { + results: { [K in keyof T]: BatchItemResult }; + successCount: number; + errorCount: number; + truncated: boolean; // true if FileMaker stopped processing due to an error + firstErrorIndex: number | null; // Index of the first operation that failed +}; +``` + +### Basic Batch with Multiple Queries + +Execute multiple read operations in a single batch: + +```typescript +// Create query builders +const contactsQuery = db.from(contacts).list().top(5); +const usersQuery = db.from(users).list().top(5); + +// Execute both queries in a single batch +const result = await db.batch([contactsQuery, usersQuery]).execute(); + +// Access individual results +const [r1, r2] = result.results; + +if (r1.error) { + console.error("Contacts query failed:", r1.error); +} else { + console.log("Contacts:", r1.data); +} + +if (r2.error) { + console.error("Users query failed:", r2.error); +} else { + console.log("Users:", r2.data); +} + +// Check summary statistics +console.log(`Success: ${result.successCount}, Errors: ${result.errorCount}`); +``` + +### Mixed Operations (Reads and Writes) + +Combine queries, inserts, updates, and deletes in a single batch: + +```typescript +// Mix different operation types +const listQuery = db.from(contacts).list().top(10); +const insertOp = db.from(contacts).insert({ + name: "John Doe", + email: "john@example.com", +}); +const updateOp = db.from(users).update({ active: true }).byId("user-123"); + +// All operations execute atomically +const result = await db.batch([listQuery, insertOp, updateOp]).execute(); + +// Access individual results +const [r1, r2, r3] = result.results; + +if (r1.error) { + console.error("List query failed:", r1.error); +} else { + console.log("Fetched contacts:", r1.data); +} + +if (r2.error) { + console.error("Insert failed:", r2.error); +} else { + console.log("Inserted contact:", r2.data); +} + +if (r3.error) { + console.error("Update failed:", r3.error); +} else { + console.log("Updated user:", r3.data); +} +``` + +### Handling Errors in Batches + +When FileMaker encounters an error in a batch operation, it **stops processing** subsequent operations. Operations that were never executed due to an earlier error will have a `BatchTruncatedError`: + +```typescript +import { BatchTruncatedError, isBatchTruncatedError } from "@proofkit/fmodata"; + +const result = await db.batch([query1, query2, query3]).execute(); + +const [r1, r2, r3] = result.results; + +// First operation succeeded +if (r1.error) { + console.error("First query failed:", r1.error); +} else { + console.log("First query succeeded:", r1.data); +} + +// Second operation failed +if (r2.error) { + console.error("Second query failed:", r2.error); + console.log("HTTP Status:", r2.status); // e.g., 404 +} + +// Third operation was never executed (truncated) +if (r3.error && isBatchTruncatedError(r3.error)) { + console.log("Third operation was not executed"); + console.log(`Failed at operation ${r3.error.failedAtIndex}`); + console.log(`This operation index: ${r3.error.operationIndex}`); + console.log("Status:", r3.status); // 0 (never executed) +} + +// Check if batch was truncated +if (result.truncated) { + console.log(`Batch stopped early at index ${result.firstErrorIndex}`); +} +``` + +### Transactional Behavior + +Batch operations are transactional for write operations (inserts, updates, deletes). If any operation in the batch fails, all write operations are rolled back: + +```typescript +const result = await db + .batch([ + db.from(users).insert({ username: "alice", email: "alice@example.com" }), + db.from(users).insert({ username: "bob", email: "bob@example.com" }), + db.from(users).insert({ username: "charlie", email: "invalid" }), // This fails + ]) + .execute(); + +// Check individual results +const [r1, r2, r3] = result.results; + +if (r1.error || r2.error || r3.error) { + // All three inserts are rolled back - no users were created + console.error("Batch had errors:"); + if (r1.error) console.error("Operation 1:", r1.error); + if (r2.error) console.error("Operation 2:", r2.error); + if (r3.error) console.error("Operation 3:", r3.error); +} +``` + +### Important Notes + +- **FileMaker stops on first error**: When an error occurs, FileMaker stops processing subsequent operations in the batch. Truncated operations will have `BatchTruncatedError` with `status: 0`. +- **Insert operations in batches**: FileMaker ignores `Prefer: return=representation` in batch requests. Insert operations return `{}` or `{ ROWID?: number }` instead of the full created record. +- **All results are always defined**: Every operation in the batch will have a corresponding result in `result.results`, even if it was never executed (truncated operations). +- **Summary statistics**: Use `result.successCount`, `result.errorCount`, `result.truncated`, and `result.firstErrorIndex` for quick batch status checks. + +**Note:** Batch operations automatically group write operations (POST, PATCH, DELETE) into changesets for transactional behavior, while read operations (GET) are executed individually within the batch. + +## Schema Management + +The library provides methods for managing database schema through the `db.schema` property. You can create and delete tables, add and remove fields, and manage indexes. + +### Creating Tables + +Create a new table with field definitions: + +```typescript +import type { Field } from "@proofkit/fmodata"; + +const fields: Field[] = [ + { + name: "id", + type: "string", + primary: true, + maxLength: 36, + }, + { + name: "username", + type: "string", + nullable: false, + unique: true, + maxLength: 50, + }, + { + name: "email", + type: "string", + nullable: false, + maxLength: 255, + }, + { + name: "age", + type: "numeric", + nullable: true, + }, + { + name: "created_at", + type: "timestamp", + default: "CURRENT_TIMESTAMP", + }, +]; + +const tableDefinition = await db.schema.createTable("users", fields); +console.log(tableDefinition.tableName); // "users" +console.log(tableDefinition.fields); // Array of field definitions +``` + +### Field Types + +The library supports various field types: + +**String Fields:** + +```typescript +{ + name: "username", + type: "string", + maxLength: 100, // Optional: varchar(100) + nullable: true, + unique: true, + default: "USER" | "USERNAME" | "CURRENT_USER", // Optional + repetitions: 5, // Optional: for repeating fields +} +``` + +**Numeric Fields:** + +```typescript +{ + name: "age", + type: "numeric", + nullable: true, + primary: false, + unique: false, +} +``` + +**Date Fields:** + +```typescript +{ + name: "birth_date", + type: "date", + default: "CURRENT_DATE" | "CURDATE", // Optional + nullable: true, +} +``` + +**Time Fields:** + +```typescript +{ + name: "start_time", + type: "time", + default: "CURRENT_TIME" | "CURTIME", // Optional + nullable: true, +} +``` + +**Timestamp Fields:** + +```typescript +{ + name: "created_at", + type: "timestamp", + default: "CURRENT_TIMESTAMP" | "CURTIMESTAMP", // Optional + nullable: false, +} +``` + +**Container Fields:** + +```typescript +{ + name: "avatar", + type: "container", + externalSecurePath: "/secure/path", // Optional + nullable: true, +} +``` + +### Adding Fields to Existing Tables + +Add new fields to an existing table: + +```typescript +const newFields: Field[] = [ + { + name: "phone", + type: "string", + nullable: true, + maxLength: 20, + }, + { + name: "bio", + type: "string", + nullable: true, + maxLength: 1000, + }, +]; + +const updatedTable = await db.schema.addFields("users", newFields); +``` + +### Deleting Tables and Fields + +Delete an entire table: + +```typescript +await db.schema.deleteTable("old_table"); +``` + +Delete a specific field from a table: + +```typescript +await db.schema.deleteField("users", "old_field"); +``` + +### Managing Indexes + +Create an index on a field: + +```typescript +const index = await db.schema.createIndex("users", "email"); +console.log(index.indexName); // "email" +``` + +Delete an index: + +```typescript +await db.schema.deleteIndex("users", "email"); +``` + +### Complete Example + +Here's a complete example of creating a table with various field types: + +```typescript +const fields: Field[] = [ + // Primary key + { + name: "id", + type: "string", + primary: true, + maxLength: 36, + }, + + // String fields + { + name: "username", + type: "string", + nullable: false, + unique: true, + maxLength: 50, + }, + { + name: "email", + type: "string", + nullable: false, + maxLength: 255, + }, + + // Numeric field + { + name: "age", + type: "numeric", + nullable: true, + }, + + // Date/time fields + { + name: "birth_date", + type: "date", + nullable: true, + }, + { + name: "created_at", + type: "timestamp", + default: "CURRENT_TIMESTAMP", + nullable: false, + }, + + // Container field + { + name: "avatar", + type: "container", + nullable: true, + }, + + // Repeating field + { + name: "tags", + type: "string", + repetitions: 5, + maxLength: 50, + }, +]; + +// Create the table +const table = await db.schema.createTable("users", fields); + +// Later, add more fields +await db.schema.addFields("users", [ + { + name: "phone", + type: "string", + nullable: true, + }, +]); + +// Create an index on email +await db.schema.createIndex("users", "email"); +``` + +**Note:** Schema management operations require appropriate access privileges on your FileMaker account. Operations will throw errors if you don't have the necessary permissions. + +## Advanced Features + +### Required and Read-Only Fields + +The library automatically infers which fields are required based on field builder configuration: + +```typescript +const users = fmTableOccurrence("users", { + id: textField().primaryKey(), // Auto-required, auto-readOnly (primaryKey) + username: textField().notNull(), // Auto-required (notNull) + email: textField().notNull(), // Auto-required (notNull) + status: textField(), // Optional (nullable by default) + createdAt: timestampField().readOnly(), // Read-only system field + updatedAt: timestampField(), // Optional (nullable) +}); + +// Insert: username and email are required +// Insert: id and createdAt are excluded (cannot be provided - read-only) +db.from(users).insert({ + username: "john", + email: "john@example.com", + status: "active", // Optional + updatedAt: new Date().toISOString(), // Optional +}); + +// Update: all fields are optional except id and createdAt are excluded +db.from(users) + .update({ + status: "active", // Optional + // id and createdAt cannot be modified (read-only) + }) + .byId("user-123"); +``` + +**Key Features:** + +- **Auto-inference:** Fields with `.notNull()` are automatically required for insert +- **Primary keys:** Fields with `.primaryKey()` are automatically read-only +- **Read-only fields:** Use `.readOnly()` to exclude fields from insert/update (e.g., timestamps, calculated fields) +- **Update flexibility:** All fields are optional for updates (except read-only fields) + +### Prefer: fmodata.entity-ids + +This library supports using FileMaker's internal field identifiers (FMFID) and table occurrence identifiers (FMTID) instead of names. This protects your integration from both field and table occurrence name changes. + +To enable this feature, simply define your schema with entity IDs using the `.entityId()` method on field builders and the `entityId` option in `fmTableOccurrence()`. Behind the scenes, the library will transform your request and the response back to the names you specify in your schema. This is an all-or-nothing feature. For it to work properly, you must define all table occurrences passed to a `Database` with entity IDs (both field IDs via `.entityId()` and table ID via the `entityId` option). + +_Note for OttoFMS proxy: This feature requires version 4.14 or later of OttoFMS_ + +How do I find these ids? They can be found in the XML version of the `$metadata` endpoint for your database, or you can calculate them using these [custom functions](https://github.com/rwu2359/CFforID) from John Renfrew + +#### Basic Usage + +```typescript +import { + fmTableOccurrence, + textField, + timestampField, +} from "@proofkit/fmodata"; + +// Define a table with FileMaker field IDs and table occurrence ID +const users = fmTableOccurrence( + "users", + { + id: textField().primaryKey().entityId("FMFID:12039485"), + username: textField().notNull().entityId("FMFID:34323433"), + email: textField().entityId("FMFID:12232424"), + createdAt: timestampField().readOnly().entityId("FMFID:43234355"), + }, + { + entityId: "FMTID:12432533", // FileMaker table occurrence ID + }, +); +``` + +### Error Handling + +All operations return a `Result` type with either `data` or `error`. The library provides rich error types that help you handle different error scenarios appropriately. + +#### Basic Error Checking + +```typescript +const result = await db.from(users).list().execute(); + +if (result.error) { + console.error("Query failed:", result.error.message); + return; +} + +if (result.data) { + console.log("Query succeeded:", result.data); +} +``` + +#### HTTP Errors + +Handle HTTP status codes (4xx, 5xx) with the `HTTPError` class: + +```typescript +import { HTTPError, isHTTPError } from "@proofkit/fmodata"; + +const result = await db.from(users).list().execute(); + +if (result.error) { + if (isHTTPError(result.error)) { + // TypeScript knows this is HTTPError + console.log("HTTP Status:", result.error.status); + + if (result.error.isNotFound()) { + console.log("Resource not found"); + } else if (result.error.isUnauthorized()) { + console.log("Authentication required"); + } else if (result.error.is5xx()) { + console.log("Server error - try again later"); + } else if (result.error.is4xx()) { + console.log("Client error:", result.error.statusText); + } + + // Access the response body if available + if (result.error.response) { + console.log("Error details:", result.error.response); + } + } +} +``` + +#### Network Errors + +Handle network-level errors (timeouts, connection issues, etc.): + +```typescript +import { + TimeoutError, + NetworkError, + RetryLimitError, + CircuitOpenError, +} from "@proofkit/fmodata"; + +const result = await db.from(users).list().execute(); + +if (result.error) { + if (result.error instanceof TimeoutError) { + console.log("Request timed out"); + // Show user-friendly timeout message + } else if (result.error instanceof NetworkError) { + console.log("Network connectivity issue"); + // Show offline message + } else if (result.error instanceof RetryLimitError) { + console.log("Request failed after retries"); + // Log the underlying error: result.error.cause + } else if (result.error instanceof CircuitOpenError) { + console.log("Service is currently unavailable"); + // Show maintenance message + } +} +``` + +#### Validation Errors + +When schema validation fails, you get a `ValidationError` with rich context: + +```typescript +import { ValidationError, isValidationError } from "@proofkit/fmodata"; + +const result = await db.from(users).list().execute(); + +if (result.error) { + if (isValidationError(result.error)) { + // Access validation issues (Standard Schema format) + console.log("Validation failed for field:", result.error.field); + console.log("Issues:", result.error.issues); + console.log("Failed value:", result.error.value); + } +} +``` + +**Validator-Agnostic Error Handling** + +The library uses [Standard Schema](https://github.com/standard-schema/standard-schema) to support any validation library (Zod, Valibot, ArkType, etc.). Following the same pattern as [uploadthing](https://github.com/pingdotgg/uploadthing), the `ValidationError.cause` property contains the normalized Standard Schema issues array: + +```typescript +import { ValidationError } from "@proofkit/fmodata"; + +const result = await db.from(users).list().execute(); + +if (result.error instanceof ValidationError) { + // The cause property (ES2022 Error.cause) contains the Standard Schema issues array + // This is validator-agnostic and works with Zod, Valibot, ArkType, etc. + console.log("Validation issues:", result.error.cause); + console.log("Issues are also available directly:", result.error.issues); + + // Both point to the same array + console.log(result.error.cause === result.error.issues); // true + + // Access additional context + console.log("Failed field:", result.error.field); + console.log("Failed value:", result.error.value); + + // Standard Schema issues have a normalized format + result.error.issues.forEach((issue) => { + console.log("Path:", issue.path); + console.log("Message:", issue.message); + }); +} +``` + +**Why Standard Schema Issues Instead of Original Validator Errors?** + +By using Standard Schema's normalized issue format in the `cause` property, the library remains truly validator-agnostic. All validation libraries that implement Standard Schema (Zod, Valibot, ArkType, etc.) produce the same issue structure, making error handling consistent regardless of which validator you choose. + +If you need validator-specific error formatting, you can still access your validator's methods during validation before the data reaches fmodata: + +```typescript +import { z } from "zod"; + +const userSchema = z.object({ + email: z.string().email(), + age: z.number().min(0).max(150), +}); + +// Validate early if you need Zod-specific error handling +const parseResult = userSchema.safeParse(userData); +if (!parseResult.success) { + // Use Zod's error formatting + const formatted = parseResult.error.flatten(); + console.log("Zod-specific formatting:", formatted); +} +``` + +#### OData Errors + +Handle OData-specific protocol errors: + +```typescript +import { ODataError, isODataError } from "@proofkit/fmodata"; + +const result = await db.from(users).list().execute(); + +if (result.error) { + if (isODataError(result.error)) { + console.log("OData Error Code:", result.error.code); + console.log("OData Error Message:", result.error.message); + console.log("OData Error Details:", result.error.details); + } +} +``` + +#### Error Handling Patterns + +**Pattern 1: Using instanceof (like ffetch):** + +```typescript +import { + HTTPError, + ValidationError, + TimeoutError, + NetworkError, +} from "@proofkit/fmodata"; + +const result = await db.from(users).list().execute(); + +if (result.error) { + if (result.error instanceof TimeoutError) { + showTimeoutMessage(); + } else if (result.error instanceof HTTPError) { + if (result.error.isNotFound()) { + showNotFoundMessage(); + } else if (result.error.is5xx()) { + showServerErrorMessage(); + } + } else if (result.error instanceof ValidationError) { + showValidationError(result.error.field, result.error.issues); + } else if (result.error instanceof NetworkError) { + showOfflineMessage(); + } +} +``` + +**Pattern 2: Using kind property (for exhaustive matching):** + +```typescript +const result = await db.from(users).list().execute(); + +if (result.error) { + switch (result.error.kind) { + case "TimeoutError": + showTimeoutMessage(); + break; + case "HTTPError": + handleHTTPError(result.error.status); + break; + case "ValidationError": + showValidationError(result.error.field, result.error.issues); + break; + case "NetworkError": + showOfflineMessage(); + break; + case "ODataError": + handleODataError(result.error.code); + break; + // TypeScript ensures exhaustive matching! + } +} +``` + +**Pattern 3: Using type guards:** + +```typescript +import { + isHTTPError, + isValidationError, + isODataError, + isNetworkError, +} from "@proofkit/fmodata"; + +const result = await db.from("users").list().execute(); + +if (result.error) { + if (isHTTPError(result.error)) { + // TypeScript knows this is HTTPError + console.log("Status:", result.error.status); + } else if (isValidationError(result.error)) { + // TypeScript knows this is ValidationError + console.log("Field:", result.error.field); + console.log("Issues:", result.error.issues); + } else if (isODataError(result.error)) { + // TypeScript knows this is ODataError + console.log("Code:", result.error.code); + } else if (isNetworkError(result.error)) { + // TypeScript knows this is NetworkError + console.log("Network issue:", result.error.cause); + } +} +``` + +#### Error Properties + +All errors include helpful metadata: + +```typescript +if (result.error) { + // All errors have a timestamp + console.log("Error occurred at:", result.error.timestamp); + + // All errors have a kind property for discriminated unions + console.log("Error kind:", result.error.kind); + + // All errors have a message + console.log("Error message:", result.error.message); +} +``` + +#### Available Error Types + +- **`HTTPError`** - HTTP status errors (4xx, 5xx) with helper methods (`is4xx()`, `is5xx()`, `isNotFound()`, etc.) +- **`ODataError`** - OData protocol errors with code and details +- **`ValidationError`** - Schema validation failures with issues, schema reference, and failed value +- **`ResponseStructureError`** - Malformed API responses +- **`RecordCountMismatchError`** - When `single()` or `maybeSingle()` expectations aren't met +- **`TimeoutError`** - Request timeout (from ffetch) +- **`NetworkError`** - Network connectivity issues (from ffetch) +- **`RetryLimitError`** - Request failed after retries (from ffetch) +- **`CircuitOpenError`** - Circuit breaker is open (from ffetch) +- **`AbortError`** - Request was aborted (from ffetch) + +### OData Annotations and Validation + +By default, the library automatically strips OData annotations fields (`@id` and `@editLink`) from responses. If you need these fields, you can include them by passing `includeODataAnnotations: true`: + +```typescript +const result = await db.from("users").list().execute({ + includeODataAnnotations: true, +}); +``` + +You can also skip runtime validation by passing `skipValidation: true`. + +```typescript +const result = await db.from("users").list().execute({ + skipValidation: true, +}); + +// Response is returned without schema validation +``` + +**Note:** Skipping validation means the response won't be validated OR transformed against your schema, so you lose runtime type safety guarantees. Use with caution. + +### Custom Fetch Handlers + +You can provide custom fetch handlers for testing or custom networking: + +```typescript +const customFetch = async (url, options) => { + console.log("Fetching:", url); + return fetch(url, options); +}; + +const result = await db.from("users").list().execute({ + fetchHandler: customFetch, +}); +``` + +## Testing + +The library supports testing with custom fetch handlers. You can create mock fetch functions to return test data: + +```typescript +const mockResponse = { + "@odata.context": "...", + value: [ + { id: "1", username: "john", email: "john@example.com" }, + { id: "2", username: "jane", email: "jane@example.com" }, + ], +}; + +const mockFetch = async () => { + return new Response(JSON.stringify(mockResponse), { + status: 200, + headers: { "content-type": "application/json" }, + }); +}; + +const result = await db.from("users").list().execute({ + fetchHandler: mockFetch, +}); + +expect(result.data).toHaveLength(2); +expect(result.data[0].username).toBe("john"); +``` + +You can also inspect query strings without executing: + +```typescript +const queryString = db + .from("users") + .list() + .select("username", "email") + .where(eq(users.active, true)) + .orderBy("username") + .top(10) + .getQueryString(); + +console.log(queryString); +// Output: "/users?$select=username,email&$filter=active eq true&$orderby=username&$top=10" +``` diff --git a/packages/fmodata/REFACTORING_SUMMARY.md b/packages/fmodata/REFACTORING_SUMMARY.md new file mode 100644 index 00000000..5a2ea7f6 --- /dev/null +++ b/packages/fmodata/REFACTORING_SUMMARY.md @@ -0,0 +1,323 @@ +# Refactoring Summary: Decoupling Table Schema from Client + +## Overview + +This refactoring separates table schema definitions from execution context (connection details), making the architecture more composable and suitable for code generation. + +## Key Changes + +### 1. New `ExecutionContext` Interface + +```typescript +export interface ExecutionContext { + _makeRequest(url: string, options?: RequestInit): Promise; +} +``` + +**Purpose**: Defines the contract for executing requests. `FileMakerOData` implements this interface. + +**Benefits**: + +- Enables dependency injection +- Makes testing easier (can create mock implementations) +- Allows different execution strategies + +### 2. New `TableDefinition` Class + +```typescript +export class TableDefinition { + public readonly name: string; + public readonly schema?: Schema; + + constructor(config: { name: string; schema?: Schema }) { + this.name = config.name; + this.schema = config.schema; + } +} +``` + +**Purpose**: Holds pure table metadata (name and schema) without any connection details. + +**Benefits**: + +- Completely decoupled from database name and connection +- Can be generated by code generators +- Reusable across different databases and servers +- Uses named arguments for clarity + +### 3. Refactored `Table` Class + +**Before**: + +```typescript +class Table { + constructor({ + databaseName, + name, + client, + schema, + }: { + databaseName: string; + name: string; + client: FileMakerOData; + schema?: Schema; + }) { + // ... + } +} +``` + +**After**: + +```typescript +export class Table { + constructor(config: { + definition: TableDefinition; + databaseName: string; + context: ExecutionContext; + }) { + // ... + } +} +``` + +**Changes**: + +- Now accepts a `TableDefinition` instead of raw name/schema +- Accepts `ExecutionContext` instead of concrete `FileMakerOData` client +- Uses named arguments pattern +- Exported for direct usage + +### 4. Refactored `QueryBuilder` and `RecordBuilder` + +**Before**: + +```typescript +constructor({ + databaseName, + tableName, + client, +}: { + databaseName: string; + tableName: string; + client: FileMakerOData; +}) +``` + +**After**: + +```typescript +constructor(config: { + definition: TableDefinition; + databaseName: string; + context: ExecutionContext; +}) +``` + +**Changes**: + +- Now use `TableDefinition` instead of string `tableName` +- Accept `ExecutionContext` instead of concrete client +- Access table name via `definition.name` +- Use named arguments pattern + +### 5. Updated `Database` Class + +**Before**: + +```typescript +table(name: string, config?: { schema: Schema }) { + return new Table({ + databaseName: this.name, + name, + client: this.client, + schema: config?.schema, + }); +} +``` + +**After**: + +```typescript +table(name: string, config?: { schema: Schema }) { + const definition = new TableDefinition({ name, schema: config?.schema }); + return new Table({ + definition, + databaseName: this.name, + context: this.context, + }); +} +``` + +**Changes**: + +- Creates `TableDefinition` instances internally +- Passes `ExecutionContext` instead of concrete client +- Database name is provided at runtime, not stored in definition + +## Architecture Diagram + +``` +Before: +┌─────────────────┐ +│ FileMakerOData │ +│ (Client) │ +└────────┬────────┘ + │ + ├──► Database ──► Table (holds client reference) + │ │ + │ ├──► QueryBuilder (holds client) + │ └──► RecordBuilder (holds client) + │ + └──► Tightly coupled to connection details + +After: +┌─────────────────┐ +│ TableDefinition │ ← Pure metadata (can be generated) +│ (name, schema) │ +└────────┬────────┘ + │ + │ ┌─────────────────┐ + │ │ ExecutionContext│ ← Interface for execution + │ │ (FileMaker │ + │ │ implements) │ + │ └────────┬────────┘ + │ │ + └─────┬───────┘ + │ + ┌────▼─────┐ + │ Table │ ← Composition of definition + context + │ │ + ├──► QueryBuilder (uses definition + context) + └──► RecordBuilder (uses definition + context) + +Decoupled: Schema definitions separate from execution +``` + +## Backward Compatibility + +✅ **The public API remains identical.** + +Existing code continues to work without changes: + +```typescript +const client = new FileMakerOData({ serverUrl, auth }); +const db = client.database("MyDB"); +const table = db.table("Users", { schema }); +const results = await table.select("id", "name").execute(); +``` + +## New Capabilities Enabled + +### 1. Direct Table Instantiation + +```typescript +const tableDef = new TableDefinition({ + name: "Users", + schema: mySchema, +}); + +const table = new Table({ + definition: tableDef, + databaseName: "MyDB", + context: client, +}); +``` + +### 2. Reuse Definitions Across Contexts + +```typescript +const tableDef = new TableDefinition({ name: "Users", schema }); + +const table1 = new Table({ + definition: tableDef, + databaseName: "DB1", + context: client1, +}); +const table2 = new Table({ + definition: tableDef, + databaseName: "DB2", + context: client2, +}); +``` + +### 3. Mock Testing + +```typescript +class MockContext implements ExecutionContext { + async _makeRequest(url: string) { + return { value: [{ id: 1, name: "Test" }] }; + } +} + +const table = new Table({ + definition: tableDef, + databaseName: "TestDB", + context: new MockContext(), +}); +``` + +### 4. Code Generation + +```typescript +// Generated code (no connection details) +export const UsersTable = new TableDefinition({ + name: "Users", + schema: z.object({ + id: z.number(), + name: z.string(), + email: z.string().email(), + }), +}); + +// Runtime usage +const table = new Table({ + definition: UsersTable, + databaseName: config.dbName, + context: client, +}); +``` + +## Files Modified + +- ✅ `/packages/fmodata/src/index.ts` - Core refactoring +- ✅ `/packages/fmodata/tests/basic.test.ts` - Added new tests + +## Files Created + +- ✅ `/packages/fmodata/CODEGEN_EXAMPLE.md` - Code generation examples +- ✅ `/packages/fmodata/REFACTORING_SUMMARY.md` - This file + +## Testing + +New tests added: + +1. ✅ Standalone `TableDefinition` creation +2. ✅ Using `TableDefinition` with `Table` class directly +3. ✅ Reusing definitions across different execution contexts + +All existing tests pass without modification (backward compatible). + +## Next Steps for Code Generation + +1. Create a code generator that reads FileMaker metadata +2. Generate `TableDefinition` instances with full Zod schemas +3. Export definitions from a generated file +4. Users import and use generated definitions with their clients + +## Benefits Summary + +| Aspect | Before | After | +| --------------- | ---------------------------------- | ------------------------------------ | +| **Coupling** | Table tightly coupled to client | Table uses interface abstraction | +| **Reusability** | Table bound to single connection | Table definition reusable everywhere | +| **Testing** | Hard to mock (needs real client) | Easy to mock (interface-based) | +| **Code Gen** | Must generate full Table instances | Generate pure TableDefinition | +| **Clarity** | Positional arguments | Named arguments | +| **Composition** | Monolithic | Composable layers | + +## Migration Impact + +✅ **Zero breaking changes** - All existing code continues to work as-is. + +The refactoring is a pure internal improvement that enables new use cases without affecting existing functionality. diff --git a/packages/fmodata/docs/ORM_API.md b/packages/fmodata/docs/ORM_API.md new file mode 100644 index 00000000..2e8fd651 --- /dev/null +++ b/packages/fmodata/docs/ORM_API.md @@ -0,0 +1,415 @@ +# New ORM API (Drizzle-Inspired) + +The new ORM API provides a Drizzle-inspired interface for defining tables and building queries with enhanced type safety and developer experience. + +## Key Features + +- **Field Builders**: Fluent API for defining fields with metadata (primary keys, entity IDs, validators) +- **Column References**: Type-safe column references for queries (`users.id`, `users.name`) +- **Filter Operators**: Standalone operator functions (`eq()`, `gt()`, `and()`, `or()`) +- **Support for Both Styles**: Works with both typed strings AND column references +- **Cross-Table Comparisons**: Compare columns across tables (`eq(users.id, contacts.id_user)`) +- **Runtime Navigation Validation**: Validates expand paths against `navigationPaths` + +## Table Definition + +### Basic Table + +```typescript +import { fmTableOccurrence, textField, numberField, timestampField } from "@proofkit/fmodata"; + +export const users = fmTableOccurrence("users", { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().notNull().entityId("FMFID:6"), + email: textField().entityId("FMFID:7"), + age: numberField().entityId("FMFID:8"), + CreationTimestamp: timestampField().readOnly().entityId("FMFID:2"), +}, { + entityId: "FMTID:100", + defaultSelect: "schema", + navigationPaths: ["contacts", "orders"], +}); +``` + +### Field Builder Methods + +All field builders support these chainable methods: + +- `.primaryKey()` - Mark as primary key (automatically read-only) +- `.notNull()` - Mark as non-nullable +- `.readOnly()` - Exclude from insert/update operations +- `.entityId(id)` - Assign FileMaker field ID (FMFID) +- `.outputValidator(validator)` - Transform data when reading (FM → your app) +- `.inputValidator(validator)` - Transform data when writing (your app → FM) + +### Available Field Types + +```typescript +textField() // string | null +numberField() // number | null +dateField() // string | null (ISO date) +timeField() // string | null (ISO time) +timestampField() // string | null (ISO 8601) +containerField() // string | null (base64) +calcField() // string | null (auto read-only) +``` + +### Custom Field Types with Validators + +```typescript +import { z } from "zod/v4"; + +// Boolean field (FM stores as 0/1) +const booleanField = () => + numberField() + .outputValidator(z.coerce.boolean()) + .inputValidator(z.boolean().transform(v => v ? 1 : 0)); + +// Enum field +const statusField = () => + textField() + .outputValidator(z.enum(["active", "pending", "inactive"])); + +// Use in table definition +const users = fmTableOccurrence("users", { + active: booleanField().entityId("FMFID:7"), + status: statusField().entityId("FMFID:8"), +}, { entityId: "FMTID:100" }); +``` + +## Querying + +### Select - Multiple Syntax Options + +```typescript +import { eq } from "@proofkit/fmodata"; + +// Option 1: Typed strings (original style) +db.from(users).select("id", "name", "email"); + +// Option 2: Column references (new capability) +db.from(users).select(users.id, users.name, users.email); + +// Option 3: Mix both styles +db.from(users).select(users.id, "name", users.email); +``` + +### Filter with Operators + +#### Comparison Operators + +```typescript +import { eq, ne, gt, gte, lt, lte } from "@proofkit/fmodata"; + +// Equal +db.from(users).where(eq(users.status, "active")); + +// Not equal +db.from(users).where(ne(users.status, "deleted")); + +// Greater than / Greater than or equal +db.from(users).where(gt(users.age, 18)); +db.from(users).where(gte(users.age, 18)); + +// Less than / Less than or equal +db.from(users).where(lt(users.age, 65)); +db.from(users).where(lte(users.age, 65)); +``` + +#### String Operators + +```typescript +import { contains, startsWith, endsWith } from "@proofkit/fmodata"; + +// Contains substring +db.from(users).where(contains(users.name, "John")); + +// Starts with prefix +db.from(users).where(startsWith(users.email, "admin")); + +// Ends with suffix +db.from(users).where(endsWith(users.email, "@example.com")); +``` + +#### Array Operators + +```typescript +import { inArray, notInArray } from "@proofkit/fmodata"; + +// Value in array +db.from(users).where(inArray(users.status, ["active", "pending"])); + +// Value not in array +db.from(users).where(notInArray(users.status, ["deleted", "banned"])); +``` + +#### Null Checks + +```typescript +import { isNull, isNotNull } from "@proofkit/fmodata"; + +// Is null +db.from(users).where(isNull(users.deletedAt)); + +// Is not null +db.from(users).where(isNotNull(users.email)); +``` + +#### Logical Operators + +```typescript +import { and, or, not, eq, gt } from "@proofkit/fmodata"; + +// AND - all conditions must be true +db.from(users).where( + and( + eq(users.active, true), + gt(users.age, 18) + ) +); + +// OR - at least one condition must be true +db.from(users).where( + or( + eq(users.role, "admin"), + eq(users.role, "moderator") + ) +); + +// NOT - negate a condition +db.from(users).where( + not(eq(users.status, "deleted")) +); + +// Complex combinations +db.from(users).where( + and( + eq(users.active, true), + or( + eq(users.role, "admin"), + and( + eq(users.role, "user"), + gt(users.age, 18) + ) + ) + ) +); +``` + +### Cross-Table Column Comparisons + +```typescript +// Compare columns from different tables +db.from(users) + .select(users.id, users.name) + .where(eq(users.id, contacts.id_user)); + +// Works with any comparison operator +db.from(orders) + .where(gt(orders.total, users.credit_limit)); +``` + +### Order By + +```typescript +// With strings +db.from(users).orderBy("name"); +db.from(users).orderBy(["name", "asc"]); +db.from(users).orderBy([["name", "asc"], ["createdAt", "desc"]]); + +// With Column references +db.from(users).orderBy(users.name); +db.from(users).orderBy([users.name, "asc"]); +db.from(users).orderBy([[users.name, "asc"], [users.createdAt, "desc"]]); +``` + +## Navigation & Expansion + +Navigation paths are defined in the table definition and validated at runtime: + +```typescript +const users = fmTableOccurrence("users", { + // ... fields +}, { + navigationPaths: ["contacts", "orders"], // Valid paths +}); + +// Valid expansion (contacts is in navigationPaths) +db.from(users) + .expand(contacts, (q) => q.select("name", "email")) + .execute(); + +// Error: "Cannot expand to 'invoices'. Valid navigation paths: contacts, orders" +db.from(users) + .expand(invoices, (q) => q.select("id")) // Runtime error! + .execute(); +``` + +## Type Inference + +The new API provides excellent type inference: + +```typescript +// users.id is Column +type UserId = typeof users.id; + +// users.hobby is Column<"reading" | "writing" | "coding", "hobby"> +// (inferred from the enum validator) +type UserHobby = typeof users.hobby; + +// Filter values are type-checked +eq(users.hobby, "reading") // ✓ Valid - "reading" is in enum +eq(users.hobby, "invalid") // ✗ Type error - not in enum + +// Select fields are type-checked +db.from(users).select("id", "name") // ✓ Valid +db.from(users).select("invalid") // ✗ Type error +``` + +## Migration from Old API + +The new ORM API coexists with the old API. Both are exported from `@proofkit/fmodata`: + +```typescript +// Old API (still works) +import { defineBaseTable, defineTableOccurrence } from "@proofkit/fmodata"; + +// New API +import { fmTableOccurrence, textField, eq } from "@proofkit/fmodata"; +``` + +### Key Differences + +| Feature | Old API | New API | +|---------|---------|---------| +| Table Definition | `defineBaseTable` + `defineTableOccurrence` | `fmTableOccurrence` | +| Schema | Zod schemas in separate object | Field builders inline | +| Metadata | Separate `required`, `readOnly` arrays | Chainable methods on fields | +| Filters | Object syntax or typed strings | Operator functions | +| Select | Typed strings only | Typed strings OR column references | +| Navigation | Type-safe via `buildOccurrences()` | Runtime validation via `navigationPaths` | + +### Migration Example + +**Old API:** +```typescript +const usersBase = defineBaseTable({ + schema: { + id: z.string(), + name: z.string().nullable(), + active: z.coerce.boolean(), + }, + idField: "id", + readOnly: ["CreationTimestamp"], + fmfIds: { id: "FMFID:1", name: "FMFID:6" }, +}); + +const users = defineTableOccurrence({ + name: "users", + baseTable: usersBase, +}); + +// Query +db.from(users) + .select("id", "name") + .filter({ active: { eq: true } }); +``` + +**New API:** +```typescript +const users = fmTableOccurrence("users", { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().entityId("FMFID:6"), + active: numberField() + .outputValidator(z.coerce.boolean()) + .inputValidator(z.boolean().transform(v => v ? 1 : 0)), + CreationTimestamp: timestampField().readOnly(), +}); + +// Query +db.from(users) + .select(users.id, users.name) + .where(eq(users.active, true)); +``` + +## Best Practices + +1. **Use Column References for Clarity**: `users.name` is more explicit than `"name"` +2. **Define Reusable Field Builders**: Extract common patterns like `booleanField()` +3. **Leverage Type Inference**: Let TypeScript infer types from validators +4. **Use Logical Operators**: Prefer `and()` / `or()` over nested objects +5. **Validate Navigation Paths**: Always define `navigationPaths` for type safety +6. **Combine Old and New APIs**: Use whichever feels better for each use case + +## Complete Example + +```typescript +import { + fmTableOccurrence, + textField, + numberField, + timestampField, + FMServerConnection, + eq, + and, + or, + gt, + contains, +} from "@proofkit/fmodata"; +import { z } from "zod/v4"; + +// Define tables +const users = fmTableOccurrence("users", { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().notNull().entityId("FMFID:2"), + email: textField().entityId("FMFID:3"), + age: numberField().entityId("FMFID:4"), + status: textField() + .outputValidator(z.enum(["active", "pending", "inactive"])) + .entityId("FMFID:5"), + createdAt: timestampField().readOnly().entityId("FMFID:6"), +}, { + entityId: "FMTID:100", + navigationPaths: ["orders"], +}); + +const orders = fmTableOccurrence("orders", { + id: textField().primaryKey().entityId("FMFID:10"), + user_id: textField().entityId("FMFID:11"), + total: numberField().entityId("FMFID:12"), + status: textField().entityId("FMFID:13"), +}, { + entityId: "FMTID:101", + navigationPaths: ["users"], +}); + +// Connect +const connection = new FMServerConnection({ + serverUrl: "https://api.example.com", + auth: { apiKey: "test-api-key" }, +}); +const db = connection.database("MyDatabase.fmp12"); + +// Query with new API +const result = await db + .from(users) + .select(users.id, users.name, users.email) + .where( + and( + or( + eq(users.status, "active"), + eq(users.status, "pending") + ), + gt(users.age, 18), + contains(users.email, "@example.com") + ) + ) + .orderBy([[users.name, "asc"], [users.createdAt, "desc"]]) + .top(50) + .execute(); + +if (result.data) { + console.log(`Found ${result.data.length} users`); +} +``` + diff --git a/packages/fmodata/op.env b/packages/fmodata/op.env new file mode 100644 index 00000000..09dca379 --- /dev/null +++ b/packages/fmodata/op.env @@ -0,0 +1,15 @@ +# __ _____ _ +# /_ | __ \ | | +# | | |__) |_ _ ___ _____ _____ _ __ __| | ___ _ ____ __ +# | | ___/ _` / __/ __\ \ /\ / / _ \| '__/ _` | / _ \ '_ \ \ / / +# | | | | (_| \__ \__ \\ V V / (_) | | | (_| || __/ | | \ V / +# |_|_| \__,_|___/___/ \_/\_/ \___/|_| \__,_(_)___|_| |_|\_/ +# +# This file is intentionally commited to source control. +# It should only reference secrets in 1Password + +FMODATA_SERVER_URL="op://xrs5sehh2gm36me62rlfpmsyde/fmdapi_test/1Password env Values/FM_SERVER" +FMODATA_DATABASE="op://xrs5sehh2gm36me62rlfpmsyde/fmdapi_test/1Password env Values/FM_DATABASE" +FMODATA_API_KEY="op://xrs5sehh2gm36me62rlfpmsyde/fmdapi_test/1Password env Values/OTTO_API_KEY" +FMODATA_USERNAME="op://xrs5sehh2gm36me62rlfpmsyde/fmdapi_test/username" +FMODATA_PASSWORD="op://xrs5sehh2gm36me62rlfpmsyde/fmdapi_test/password" \ No newline at end of file diff --git a/packages/fmodata/package.json b/packages/fmodata/package.json index adfe9c53..72e875e0 100644 --- a/packages/fmodata/package.json +++ b/packages/fmodata/package.json @@ -1,5 +1,83 @@ { - "name": "fmodata", - "version": "0.0.0", - "private": true -} \ No newline at end of file + "name": "@proofkit/fmodata", + "version": "0.1.0-alpha.19", + "description": "FileMaker OData API client", + "repository": "git@github.com:proofgeist/proofkit.git", + "author": "Eric <37158449+eluce2@users.noreply.github.com>", + "license": "MIT", + "private": false, + "type": "module", + "main": "./dist/esm/index.js", + "types": "./dist/esm/index.d.ts", + "exports": { + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + } + }, + "./package.json": "./package.json" + }, + "scripts": { + "build": "tsc && vite build && publint --strict", + "build:watch": "tsc && vite build --watch", + "check-format": "prettier --check .", + "format": "prettier --write .", + "dev": "tsc --watch", + "test": "vitest run --typecheck", + "tsc": "tsc --noEmit", + "test:typecheck": "vitest run --typecheck", + "test:watch": "vitest --typecheck", + "test:build": "pnpm build && TEST_BUILD=true vitest run --typecheck", + "test:watch:build": "TEST_BUILD=true vitest --typecheck", + "test:e2e": "op inject -i op.env -o .env.local -f && vitest run tests/e2e.test.ts", + "capture": "op inject -i op.env -o .env.local -f && tsx scripts/capture-responses.ts", + "knip": "knip", + "pub:alpha": "bun run scripts/publish-alpha.ts", + "global:link": "pnpm link --global" + }, + "dependencies": { + "@fetchkit/ffetch": "^4.2.0", + "dotenv": "^16.5.0", + "es-toolkit": "^1.38.0", + "neverthrow": "^8.2.0", + "odata-query": "^8.0.4" + }, + "peerDependencies": { + "zod": ">=4.0.0" + }, + "peerDependenciesMeta": { + "zod": { + "optional": true + } + }, + "devDependencies": { + "@standard-schema/spec": "^1.0.0", + "@tanstack/vite-config": "^0.2.0", + "@types/node": "^22.17.1", + "fast-xml-parser": "^5.3.2", + "prettier": "^3.5.3", + "publint": "^0.3.12", + "tsx": "^4.19.2", + "typescript": "^5.9.3", + "vite": "^6.3.4", + "vite-plugin-dts": "^4.5.4", + "vitest": "^4.0.7", + "zod": "4.1.12" + }, + "engines": { + "node": ">=18.0.0" + }, + "files": [ + "src", + "dist" + ], + "keywords": [ + "filemaker", + "fms", + "fm", + "odata", + "proofgeist", + "proofkit" + ] +} diff --git a/packages/fmodata/scripts/capture-responses.ts b/packages/fmodata/scripts/capture-responses.ts new file mode 100644 index 00000000..7fa66266 --- /dev/null +++ b/packages/fmodata/scripts/capture-responses.ts @@ -0,0 +1,681 @@ +/** + * Response Capture Script + * + * This script executes real queries against a live FileMaker OData server + * and captures the responses for use in mock tests. + * + * This script uses ffetch directly (not our library) to ensure raw API + * responses are captured without any transformations or processing. + * + * Setup: + * - Ensure you have a `.env.local` file with: + * - FMODATA_SERVER_URL + * - FMODATA_API_KEY + * - FMODATA_DATABASE + * + * Usage: + * pnpm capture + * + * How to add new queries to capture: + * 1. Add a new entry to the `queriesToCapture` array below + * 2. Each entry should have: + * - name: A descriptive name (used as the key in the fixtures file) + * - execute: A function that calls the client with a relative path (e.g., "/contacts?$top=5") + * 3. Run `pnpm capture` + * 4. The captured response will be automatically added to tests/fixtures/responses.ts + * + * Query names should be descriptive and follow a pattern like: + * - "list-basic" - Basic list query + * - "list-with-select" - List with $select + * - "filter-by-status" - List with $filter + * - "single-by-id" - Single record query + */ + +import path from "path"; +import { fileURLToPath } from "url"; +import { config } from "dotenv"; +import { writeFileSync } from "fs"; +import createClient from "@fetchkit/ffetch"; +import { MOCK_SERVER_URL } from "../tests/utils/mock-server-url"; + +// Get __dirname equivalent in ES modules +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +// Load environment variables +config({ path: path.resolve(__dirname, "../.env.local") }); + +const serverUrl = process.env.FMODATA_SERVER_URL; +const apiKey = process.env.FMODATA_API_KEY; +const database = process.env.FMODATA_DATABASE; + +if (!serverUrl) { + throw new Error("FMODATA_SERVER_URL environment variable is required"); +} + +if (!apiKey) { + throw new Error("FMODATA_API_KEY environment variable is required"); +} + +if (!database) { + throw new Error("FMODATA_DATABASE environment variable is required"); +} + +// Type for captured response +type CapturedResponse = { + url: string; + method: string; + status: number; + headers?: { + "content-type"?: string; + location?: string; + }; + response: any; +}; + +// Storage for captured responses - maps query name to response +const capturedResponses: Record = {}; + +/** + * Sanitizes URLs by replacing the actual server domain with the mock server URL + * This ensures we don't store actual test server names in fixtures + */ +function sanitizeUrl(url: string, actualServerUrl: string): string { + try { + // Extract domain from serverUrl (handle both with and without protocol) + const serverUrlObj = new URL( + actualServerUrl.startsWith("http") + ? actualServerUrl + : `https://${actualServerUrl}`, + ); + const actualDomain = serverUrlObj.hostname; + + // Replace all occurrences of the actual domain with the mock domain + return url.replace( + new RegExp(actualDomain.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"), "g"), + MOCK_SERVER_URL, + ); + } catch (e) { + // If URL parsing fails, return original + return url; + } +} + +/** + * Recursively sanitizes all URLs in a response object + */ +function sanitizeResponseData(data: any, actualServerUrl: string): any { + if (typeof data === "string") { + // Check if it's a URL and sanitize it + if (data.startsWith("http://") || data.startsWith("https://")) { + return sanitizeUrl(data, actualServerUrl); + } + return data; + } + + if (Array.isArray(data)) { + return data.map((item) => sanitizeResponseData(item, actualServerUrl)); + } + + if (data && typeof data === "object") { + const sanitized: any = {}; + for (const [key, value] of Object.entries(data)) { + sanitized[key] = sanitizeResponseData(value, actualServerUrl); + } + return sanitized; + } + + return data; +} + +/** + * Extended RequestInit that allows any body type (will be JSON stringified if object) + */ +type ClientRequestInit = Omit & { + body?: any; +}; + +/** + * Creates an ffetch client with Authorization header, baseUrl, and database configured + */ +function createAuthenticatedClient( + baseUrl: string, + database: string, + apiKey: string, +) { + const client = createClient(); + // Ensure baseUrl has no trailing slash + const cleanBaseUrl = baseUrl.replace(/\/+$/, ""); + // FileMaker OData API path: /otto/fmi/odata/v4/{database} when using API key auth + const basePath = `${cleanBaseUrl}/otto/fmi/odata/v4/${encodeURIComponent(database)}`; + return (path: string, init?: ClientRequestInit): Promise => { + // Ensure path starts with / + const fullPath = path.startsWith("/") ? path : `/${path}`; + const fullUrl = `${basePath}${fullPath}`; + + // Merge headers, ensuring Authorization is always present + const headers = { + Authorization: `Bearer ${apiKey}`, + ...(init?.headers || {}), + }; + + // If body is an object, stringify it and set Content-Type + let body: BodyInit | undefined = init?.body; + if (body && typeof body === "object" && !(body instanceof FormData)) { + body = JSON.stringify(body); + if (!init?.headers || !("Content-Type" in init.headers)) { + headers["Content-Type"] = "application/json"; + } + } + + return client(fullUrl, { + ...init, + headers, + body, + }); + }; +} + +/** + * Query definitions to capture + * + * Each query should: + * - Have a descriptive name (used as the fixture key) + * - Execute ffetch directly with the URL + */ +const queriesToCapture: { + name: string; + description: string; + expectError?: boolean; + execute: ( + client: ReturnType, + ) => Promise<{ url: string; response: Response }>; +}[] = [ + { + name: "list-basic", + description: "Basic list query without filters or options", + execute: async (client) => { + const path = "/contacts$top=10"; + const response = await client(path); + // Get the full URL from the response + const url = response.url; + return { url, response }; + }, + }, + { + name: "list-with-select", + description: "List query with $select to limit fields", + execute: async (client) => { + const path = "/contacts?$select=name,PrimaryKey&$top=10"; + const response = await client(path); + const url = response.url; + return { url, response }; + }, + }, + { + name: "list-with-orderby", + description: "List query with $orderby for sorting", + execute: async (client) => { + const path = "/contacts?$orderby=name&$top=5"; + const response = await client(path); + const url = response.url; + return { url, response }; + }, + }, + { + name: "list-with-pagination", + description: "List query with $top and $skip for pagination", + execute: async (client) => { + const path = "/contacts?$top=2&$skip=2"; + const response = await client(path); + const url = response.url; + return { url, response }; + }, + }, + + { + name: "insert-return-minimal", + description: "Insert query with return=minimal", + execute: async (client) => { + const path = "/contacts"; + const response = await client(path, { + method: "POST", + headers: { + Prefer: "return=minimal", + }, + body: { + name: "Capture test (minimal)", + }, + }); + const url = response.url; + return { url, response }; + }, + }, + + { + name: "insert", + description: "Insert query with return=representation (default)", + execute: async (client) => { + const path = "/contacts"; + const response = await client(path, { + method: "POST", + + body: { + name: "Capture test", + }, + }); + + const url = response.url; + return { url, response }; + }, + }, + { + name: "single-record", + description: "Single record query using get()", + execute: async (client) => { + // First get a list to find an ID + const listPath = "/contacts?$top=1"; + const listResponse = await client(listPath); + + // Check if response is JSON before parsing + const contentType = listResponse.headers.get("content-type") || ""; + let listData: any = {}; + + if (contentType.includes("application/json") && listResponse.ok) { + try { + listData = await listResponse.json(); + } catch (e) { + // If JSON parsing fails, use fallback ID + } + } + + let recordId = "B5BFBC89-03E0-47FC-ABB6-D51401730227"; // fallback + if (listData.value && listData.value.length > 0) { + const firstId = + listData.value[0].ContactID || + listData.value[0].id || + listData.value[0].PrimaryKey; + if (firstId) { + recordId = String(firstId); + } + } + + // OData requires GUIDs to be wrapped in single quotes + const path = `/contacts('${recordId}')`; + const response = await client(path); + const url = response.url; + return { url, response }; + }, + }, + // Error cases - intentionally invalid queries to capture error responses + { + name: "error-invalid-field-select", + description: "Error response for invalid field in $select", + expectError: true, + execute: async (client) => { + const path = "/contacts?$select=InvalidFieldName"; + const response = await client(path); + const url = response.url; + return { url, response }; + }, + }, + { + name: "error-invalid-field-orderby", + description: "Error response for invalid field in $orderby", + expectError: true, + execute: async (client) => { + const path = "/contacts?$orderby=InvalidFieldName"; + const response = await client(path); + const url = response.url; + return { url, response }; + }, + }, + { + name: "error-invalid-record-id", + description: "Error response for invalid record ID in get()", + expectError: true, + execute: async (client) => { + // OData requires GUIDs to be wrapped in single quotes + const path = "/contacts('00000000-0000-0000-0000-000000000000')"; + const response = await client(path); + const url = response.url; + return { url, response }; + }, + }, + { + name: "single-field", + description: "Single field query using getSingleField()", + execute: async (client) => { + // OData requires GUIDs to be wrapped in single quotes + const path = "/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')/name"; + const response = await client(path); + const url = response.url; + return { url, response }; + }, + }, + { + name: "simple-navigation", + description: "Simple navigation query", + execute: async (client) => { + // OData requires GUIDs to be wrapped in single quotes + const path = "/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')/users"; + const response = await client(path); + const url = response.url; + return { url, response }; + }, + }, + { + name: "list with invalid expand", + description: "List query with expand to include related records", + execute: async (client) => { + const path = "/contacts?$expand=users($select=not_real_field)"; + const response = await client(path); + const url = response.url; + return { url, response }; + }, + }, + { + name: "get with expand", + description: "Get query with expand to include related records", + execute: async (client) => { + const path = + "/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')?$expand=users"; + const response = await client(path); + const url = response.url; + return { url, response }; + }, + }, + { + name: "deep nested expand", + description: "Deep nested expand query", + execute: async (client) => { + const path = + "/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')?$expand=users($expand=user_customer($select=name))"; + const response = await client(path); + const url = response.url; + return { url, response }; + }, + }, + { + name: "list with nested expand", + description: "List query with deeply nested expand and selected fields", + execute: async (client) => { + const path = `/contacts?$top=2&$expand=users($expand=user_customer($select=name))`; + const response = await client(path); + const url = response.url; + return { url, response }; + }, + }, +]; + +/** + * Formats a JavaScript object as a TypeScript-compatible string with proper indentation + */ +function formatObject(obj: any, indent = 2): string { + const spaces = " ".repeat(indent); + if (obj === null) return "null"; + if (obj === undefined) return "undefined"; + if (typeof obj === "string") { + // Escape quotes and newlines using JSON.stringify + return JSON.stringify(obj); + } + if (typeof obj === "number" || typeof obj === "boolean") { + return String(obj); + } + if (Array.isArray(obj)) { + if (obj.length === 0) return "[]"; + const items = obj + .map((item) => `${spaces}${formatObject(item, indent + 2)},`) + .join("\n"); + return `[\n${items}\n${" ".repeat(indent - 2)}]`; + } + if (typeof obj === "object") { + const keys = Object.keys(obj); + if (keys.length === 0) return "{}"; + const entries = keys + .map((key) => { + const value = formatObject(obj[key], indent + 2); + return `${spaces}${JSON.stringify(key)}: ${value}`; + }) + .join(",\n"); + return `{\n${entries}\n${" ".repeat(indent - 2)}}`; + } + return String(obj); +} + +/** + * Generates TypeScript code for the responses file + */ +function generateResponsesFile( + responses: Record, +): string { + const entries = Object.entries(responses) + .map(([key, response]) => { + const urlStr = JSON.stringify(response.url); + const methodStr = JSON.stringify(response.method); + const statusStr = response.status; + const responseStr = formatObject(response.response); + + const headersLine = response.headers + ? `\n headers: ${formatObject(response.headers, 4)},` + : ""; + + return ` "${key}": { + url: ${urlStr}, + method: ${methodStr}, + status: ${statusStr},${headersLine} + response: ${responseStr}, + },`; + }) + .join("\n\n"); + + return `/** + * Mock Response Fixtures + * + * This file contains captured responses from real FileMaker OData API calls. + * These responses are used by the mock fetch implementation to replay API responses + * in tests without requiring a live server connection. + * + * Format: + * - Each response is keyed by a descriptive query name + * - Each response object contains: + * - url: The full request URL (for matching) + * - method: HTTP method (typically "GET") + * - status: Response status code + * - response: The actual response data (JSON-parsed) + * + * To add new mock responses: + * 1. Add a query definition to scripts/capture-responses.ts + * 2. Run: pnpm capture + * 3. The captured response will be added to this file automatically + * + * You can manually edit responses here if you need to modify test data. + */ + +export type MockResponse = { + url: string; + method: string; + status: number; + headers?: { + "content-type"?: string; + "location"?: string; + }; + response: any; +}; + +export type MockResponses = Record; + +/** + * Captured mock responses from FileMaker OData API + * + * These responses are used in tests by passing them to createMockFetch() at the + * per-execution level. Each test explicitly declares which response it expects. + */ +export const mockResponses = { +${entries} +} satisfies MockResponses; +`; +} + +async function main() { + console.log("Starting response capture...\n"); + + if (!database) { + throw new Error("FMODATA_DATABASE environment variable is required"); + } + if (!serverUrl) { + throw new Error("FMODATA_SERVER_URL environment variable is required"); + } + if (!apiKey) { + throw new Error("FMODATA_API_KEY environment variable is required"); + } + + // Create authenticated client with baseUrl and database configured + const client = createAuthenticatedClient(serverUrl, database, apiKey); + + // Execute each query and capture responses + for (const queryDef of queriesToCapture) { + try { + console.log(`Capturing: ${queryDef.name} - ${queryDef.description}`); + + // Execute the query directly with ffetch + const { url, response } = await queryDef.execute(client); + + // Capture the response data (even for error status codes) + const status = response.status; + const contentType = response.headers.get("content-type") || ""; + const location = response.headers.get("location") || undefined; + let responseData: any; + + if (contentType.includes("application/json")) { + try { + // Clone response to read without consuming + const clonedResponse = response.clone(); + responseData = await clonedResponse.json(); + } catch (e) { + responseData = null; + } + } else { + const clonedResponse = response.clone(); + responseData = await clonedResponse.text(); + } + + // Sanitize URLs before storing + const sanitizedUrl = sanitizeUrl(url, serverUrl); + const sanitizedResponse = sanitizeResponseData(responseData, serverUrl); + + // Store captured response (including error responses) + capturedResponses[queryDef.name] = { + url: sanitizedUrl, + method: "GET", + status, + headers: + contentType || location + ? { + ...(contentType && { "content-type": contentType }), + ...(location && { location }), + } + : undefined, + response: sanitizedResponse, + }; + + if (status >= 400 && !queryDef.expectError) { + console.log( + ` ⚠ Captured error response for ${queryDef.name} (status: ${status})`, + ); + } else { + console.log(` ✓ Captured response for ${queryDef.name}`); + } + } catch (error) { + // Only log errors if they're not expected + if (!queryDef.expectError) { + console.error(` ✗ Failed to capture ${queryDef.name}:`, error); + if (error instanceof Error) { + console.error(` ${error.message}`); + } + } else { + // For expected errors, try to capture the error response + // ffetch might throw, but we can check if we got a response + if (error && typeof error === "object" && "response" in error) { + const errorResponse = (error as any).response; + if (errorResponse) { + const url = errorResponse.url || ""; + const status = errorResponse.status || 500; + const contentType = + errorResponse.headers?.get("content-type") || ""; + const location = + errorResponse.headers?.get("location") || undefined; + let responseData: any; + + try { + const clonedResponse = errorResponse.clone(); + if (contentType.includes("application/json")) { + responseData = await clonedResponse.json(); + } else { + responseData = await clonedResponse.text(); + } + } catch (e) { + responseData = null; + } + + // Sanitize URLs before storing + const sanitizedUrl = sanitizeUrl(url, serverUrl); + const sanitizedResponse = sanitizeResponseData( + responseData, + serverUrl, + ); + + capturedResponses[queryDef.name] = { + url: sanitizedUrl, + method: "GET", + status, + headers: + contentType || location + ? { + ...(contentType && { "content-type": contentType }), + ...(location && { location }), + } + : undefined, + response: sanitizedResponse, + }; + console.log(` ✓ Captured error response for ${queryDef.name}`); + } else { + console.warn( + ` ⚠ Expected error for ${queryDef.name} but response was not captured`, + ); + } + } else { + console.warn( + ` ⚠ Expected error for ${queryDef.name} but response was not captured`, + ); + } + } + } + } + + console.log("\nCapture complete!"); + console.log(`Captured ${Object.keys(capturedResponses).length} responses`); + + if (Object.keys(capturedResponses).length === 0) { + console.warn( + "Warning: No responses were captured. Check your queries and server connection.", + ); + return; + } + + // Generate and write the responses file + const fixturesPath = path.resolve( + __dirname, + "../tests/fixtures/responses.ts", + ); + const fileContent = generateResponsesFile(capturedResponses); + writeFileSync(fixturesPath, fileContent, "utf-8"); + + console.log(`\nResponses written to: ${fixturesPath}`); + console.log("\nYou can now use these mocks in your tests!"); +} + +main().catch((error) => { + console.error("Capture script failed:", error); + process.exit(1); +}); diff --git a/packages/fmodata/scripts/download-metadata.ts b/packages/fmodata/scripts/download-metadata.ts new file mode 100644 index 00000000..3e5d8168 --- /dev/null +++ b/packages/fmodata/scripts/download-metadata.ts @@ -0,0 +1,102 @@ +#!/usr/bin/env bun + +/** + * OData Metadata Downloader + * + * This script downloads OData metadata from a FileMaker server and saves it + * to a JSON file. The metadata can then be used with typegen-starter.ts to + * generate TypeScript table occurrence definitions. + * + * Usage: + * bun scripts/download-metadata.ts + * + * For now, authentication details are hardcoded in the script. + * Later, this will support command-line arguments for: + * - username and password, OR + * - API key and server URL + */ + +import { FMServerConnection } from "../src/client/filemaker-odata"; +import { writeFile } from "node:fs/promises"; +import { resolve, dirname } from "node:path"; +import { fileURLToPath } from "node:url"; +import dotenv from "dotenv"; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = dirname(__filename); +dotenv.config({ path: resolve(__dirname, ".env.local") }); + +// ============================================================================ +// HARDCODED CONFIGURATION - Replace these with your actual values +// ============================================================================ + +const SERVER_URL = process.env.FM_SERVER; // Replace with your server URL +const DATABASE_NAME = process.env.FM_DATABASE; // Replace with your database name + +const USERNAME = process.env.FM_USERNAME; // Replace with your username +const PASSWORD = process.env.FM_PASSWORD; // Replace with your password + +// Output file path (relative to the scripts directory) +const OUTPUT_FILE = "../tests/fixtures/metadata.xml"; // Adjust as needed + +// ============================================================================ +// END CONFIGURATION +// ============================================================================ + +// Get __dirname equivalent in ES modules + +async function downloadMetadata(): Promise { + console.log("Connecting to FileMaker server..."); + console.log(`Server URL: ${SERVER_URL}`); + console.log(`Database: ${DATABASE_NAME}`); + + if (!SERVER_URL || !DATABASE_NAME || !USERNAME || !PASSWORD) { + throw new Error("Missing required configuration values"); + } + + // Create connection based on authentication method + const connection = new FMServerConnection({ + serverUrl: SERVER_URL, + auth: { username: USERNAME, password: PASSWORD }, + fetchClientOptions: { + timeout: 15000, // 10 seconds + retries: 2, + }, + }); + + const db = connection.database(DATABASE_NAME); + + console.log("Downloading metadata..."); + + try { + const fullMetadata = await db.getMetadata({ format: "xml" }); + + // Resolve output path + const outputPath = resolve(__dirname, OUTPUT_FILE); + + console.log(`Writing metadata to: ${outputPath}`); + + // Write metadata to file + await writeFile(outputPath, fullMetadata, "utf-8"); + + console.log("✓ Metadata downloaded successfully!"); + console.log( + `\nYou can now use this metadata file with typegen-starter.ts:`, + ); + console.log( + ` bun scripts/typegen-starter.ts ${OUTPUT_FILE} output/occurrences.ts`, + ); + } catch (error) { + console.error("Error downloading metadata:", error); + if (error instanceof Error) { + console.error("Error message:", error.message); + } + process.exit(1); + } +} + +// Run the script +downloadMetadata().catch((error) => { + console.error("Fatal error:", error); + process.exit(1); +}); diff --git a/packages/fmodata/scripts/dreams.ts b/packages/fmodata/scripts/dreams.ts new file mode 100644 index 00000000..ecda67b3 --- /dev/null +++ b/packages/fmodata/scripts/dreams.ts @@ -0,0 +1,162 @@ +// Example of the new ORM-style API for fmodata +// This demonstrates the Drizzle-inspired syntax with field builders and operators + +import { + fmTableOccurrence, + textField, + numberField, + dateField, + timeField, + timestampField, + containerField, + calcField, + eq, + gt, + and, + or, + contains, +} from "../src/orm"; +import { FMServerConnection } from "../src"; +import { z } from "zod/v4"; + +// Helper function for boolean fields (FileMaker stores as 0/1) +const booleanField = () => + numberField() + // Parses the number to a boolean when reading from the database + .outputValidator(z.coerce.boolean()) + // Allows the user to pass a boolean when inserting or updating, converting it back to number + .inputValidator(z.boolean().transform((val) => (val ? 1 : 0))); + +// Define table with field builders +// All fields nullable by default, unless primary key or "notNull" is set +export const users = fmTableOccurrence( + "users", // table name on the graph + { + id: textField().primaryKey().entityId("FMFID:1"), + CreationTimestamp: timestampField().readOnly().entityId("FMFID:2"), + CreatedBy: textField().readOnly().entityId("FMFID:3"), + ModificationTimestamp: timestampField().readOnly().entityId("FMFID:4"), + ModifiedBy: textField().readOnly().entityId("FMFID:5"), + name: textField().notNull().entityId("FMFID:6"), + active: booleanField().entityId("FMFID:7"), + id_customer: textField().entityId("FMFID:8"), + hobby: textField() + .outputValidator(z.enum(["reading", "writing", "coding"])) + .entityId("FMFID:9"), + }, + { + entityId: "FMTID:100", + defaultSelect: "schema", + navigationPaths: ["contacts"], // Runtime validation when expanding + }, +); + +// @ts-expect-error should not be able to see property +users._entityId; + +// @ts-expect-error should not be able to see symbols +users[FMTableBaseTableConfig]; + +// Example contacts table +export const contacts = fmTableOccurrence( + "contacts", + { + id: textField().primaryKey().entityId("FMFID:10"), + name: textField().notNull().entityId("FMFID:11"), + email: textField().entityId("FMFID:12"), + id_user: textField().entityId("FMFID:13"), + }, + { + entityId: "FMTID:101", + defaultSelect: "schema", + navigationPaths: ["users"], + }, +); + +const connection = new FMServerConnection({ + serverUrl: "https://api.example.com", + auth: { apiKey: "test-api-key" }, +}); + +const db = connection.database("MyDatabase.fmp12"); + +// Expand with FMTable object (validated against navigationPaths) +db.from(users).list().expand(contacts); + +// Navigate with FMTable object (validated against navigationPaths) +db.from(users).navigate(contacts).list(); + +// ============================================================================ +// Query Examples - New ORM-style API +// ============================================================================ + +// Select with typed strings (original style) +db.from(users).list().select(users.id); + +// Select with Column references (new capability) +db.from(users) + .list() + .select({ id: users.id, name: users.name, hobby: users.hobby }); + +// Filter with operators - "reading" autocompletes based on enum +db.from(users) + .list() + .select(users.id, users.name) + .where(eq(users.hobby, "reading")); + +// Complex filters with logical operators +db.from(users) + .list() + .select("id", "name") + .where( + and( + eq(users.active, true), + or(eq(users.hobby, "reading"), eq(users.hobby, "coding")), + ), + ); + +// String operators +db.from(users) + .list() + .select("name", "email") + .where(contains(users.name, "John")); + +// // Cross-table column comparison +// db.from(users).select("id", "name").where(eq(users.id, contacts.id_user)); + +// OrderBy with Column references +db.from(users).list().select("id", "name").orderBy([users.name, "asc"]); + +// OrderBy with strings (still supported) +db.from(users) + .list() + .select(users.id, users.name) + .orderBy([ + ["name", "asc"], + ["CreationTimestamp", "desc"], + ]); + +// ============================================================================ +// Note: Insert/Update/Delete APIs remain unchanged +// ============================================================================ + +// Insert (existing API) +// db.from(users).insert({ name: "John", hobby: "reading" }); + +// Update (existing API) +// db.from(users).update({ name: "Jane" }).where(eq(users.id, "123")); + +// Delete (existing API) +// db.from(users).delete().where(eq(users.id, "123")); + +// ============================================================================ +// Type inference examples +// ============================================================================ + +// users.id is Column +// users.name is Column +// users.hobby is Column<"reading" | "writing" | "coding", "hobby"> +// users.active is Column + +type UserId = typeof users.id; // Column +type UserHobby = typeof users.hobby; // Column<"reading" | "writing" | "coding", "hobby"> diff --git a/packages/fmodata/scripts/experiment-batch.ts b/packages/fmodata/scripts/experiment-batch.ts new file mode 100644 index 00000000..44174f20 --- /dev/null +++ b/packages/fmodata/scripts/experiment-batch.ts @@ -0,0 +1,614 @@ +/** + * Batch Operations Experiment Script + * + * This script experiments with batch operations containing inserts, updates, + * and deletes to understand how FileMaker handles them, especially when + * some operations fail. + * + * Usage: + * cd packages/fmodata && pnpm tsx scripts/experiment-batch.ts + */ + +import { config } from "dotenv"; +import path from "path"; +import { fileURLToPath } from "url"; +import { z } from "zod/v4"; +import { + FMServerConnection, + fmTableOccurrence, + textField, + timestampField, + eq, +} from "../src/index"; + +// Get __dirname equivalent in ES modules +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +// Load environment variables +config({ path: path.resolve(__dirname, "../.env.local") }); + +const serverUrl = process.env.FMODATA_SERVER_URL; +const username = process.env.FMODATA_USERNAME; +const password = process.env.FMODATA_PASSWORD; +const database = process.env.FMODATA_DATABASE; + +if (!serverUrl || !username || !password || !database) { + throw new Error( + "Environment variables required: FMODATA_SERVER_URL, FMODATA_USERNAME, FMODATA_PASSWORD, FMODATA_DATABASE", + ); +} + +// Define schemas +const contactsTO = fmTableOccurrence("contacts", { + PrimaryKey: textField().primaryKey(), + CreationTimestamp: timestampField(), + CreatedBy: textField(), + ModificationTimestamp: timestampField(), + ModifiedBy: textField(), + name: textField(), + hobby: textField(), + id_user: textField(), +}); + +// Create connection +const connection = new FMServerConnection({ + serverUrl, + auth: { username, password }, +}); + +const db = connection.database(database, { + occurrences: [contactsTO], +}); + +// Track created records for cleanup +const createdRecordIds: string[] = []; + +async function cleanup() { + console.log("\n🧹 Cleaning up created records..."); + for (const id of createdRecordIds) { + try { + await db.from("contacts").delete().byId(id).execute(); + console.log(` Deleted: ${id}`); + } catch (error) { + console.log(` Failed to delete ${id}:`, error); + } + } +} + +async function experiment1_MultipleInserts() { + console.log("\n" + "=".repeat(60)); + console.log("EXPERIMENT 1: Multiple Inserts in a Batch"); + console.log("=".repeat(60)); + + const timestamp = Date.now(); + const insert1 = db.from("contacts").insert({ + name: `Batch Insert 1 - ${timestamp}`, + hobby: "Insert Test", + }); + + const insert2 = db.from("contacts").insert({ + name: `Batch Insert 2 - ${timestamp}`, + hobby: "Insert Test", + }); + + const insert3 = db.from("contacts").insert({ + name: `Batch Insert 3 - ${timestamp}`, + hobby: "Insert Test", + }); + + console.log("\nExecuting batch with 3 insert operations..."); + + const result = await db.batch([insert1, insert2, insert3]).execute(); + + console.log("\nResult:"); + console.log(JSON.stringify(result, null, 2)); + + if (result.data) { + // Track for cleanup + for (const item of result.data) { + if (item && typeof item === "object" && "PrimaryKey" in item) { + createdRecordIds.push(item.PrimaryKey as string); + } + } + } + + return result; +} + +async function experiment2_MixedOperations() { + console.log("\n" + "=".repeat(60)); + console.log( + "EXPERIMENT 2: Mixed Operations (GET + INSERT + UPDATE + DELETE)", + ); + console.log("=".repeat(60)); + + // First, create a record we can update/delete + const timestamp = Date.now(); + const setupResult = await db + .from("contacts") + .insert({ + name: `Setup Record - ${timestamp}`, + hobby: "Will be updated", + }) + .execute(); + + if (setupResult.error || !setupResult.data) { + console.log("Failed to create setup record:", setupResult.error); + return; + } + + const setupRecordId = setupResult.data.PrimaryKey; + console.log(`\nCreated setup record: ${setupRecordId}`); + + // Now create a batch with mixed operations + const listQuery = db.from("contacts").list().top(2); + + const insertOp = db.from("contacts").insert({ + name: `Mixed Batch Insert - ${timestamp}`, + hobby: "Mixed Test", + }); + + const updateOp = db + .from("contacts") + .update({ hobby: "Updated via batch" }) + .byId(setupRecordId); + + const deleteOp = db.from("contacts").delete().byId(setupRecordId); + + console.log("\nExecuting batch with: GET, INSERT, UPDATE, DELETE..."); + + const result = await db + .batch([listQuery, insertOp, updateOp, deleteOp]) + .execute(); + + console.log("\nResult:"); + console.log(JSON.stringify(result, null, 2)); + + if (result.data) { + // Track insert result for cleanup + const insertResult = result.data[1]; + if ( + insertResult && + typeof insertResult === "object" && + "PrimaryKey" in insertResult + ) { + createdRecordIds.push(insertResult.PrimaryKey as string); + } + } + + return result; +} + +async function experiment3_FailingOperation() { + console.log("\n" + "=".repeat(60)); + console.log("EXPERIMENT 3: Batch with a Failing Operation in the Middle"); + console.log("=".repeat(60)); + + const timestamp = Date.now(); + + // Create a valid insert + const insert1 = db.from("contacts").insert({ + name: `Before Failure - ${timestamp}`, + hobby: "Should succeed", + }); + + // Try to update a non-existent record (should fail) + const failingUpdate = db + .from("contacts") + .update({ hobby: "This should fail" }) + .byId("00000000-0000-0000-0000-000000000000"); + + // Another valid insert (should this succeed or fail?) + const insert2 = db.from("contacts").insert({ + name: `After Failure - ${timestamp}`, + hobby: "Should this succeed?", + }); + + console.log( + "\nExecuting batch with: INSERT (valid), UPDATE (invalid ID), INSERT (valid)...", + ); + console.log( + "Question: What happens to the third operation when the second fails?", + ); + + const result = await db.batch([insert1, failingUpdate, insert2]).execute(); + + console.log("\nResult:"); + console.log(JSON.stringify(result, null, 2)); + + if (result.data) { + for (const item of result.data) { + if (item && typeof item === "object" && "PrimaryKey" in item) { + createdRecordIds.push(item.PrimaryKey as string); + } + } + } + + return result; +} + +async function experiment4_FailingDelete() { + console.log("\n" + "=".repeat(60)); + console.log("EXPERIMENT 4: Batch with a Failing Delete"); + console.log("=".repeat(60)); + + const timestamp = Date.now(); + + // Create a valid insert + const insert1 = db.from("contacts").insert({ + name: `Before Delete Fail - ${timestamp}`, + hobby: "Should succeed", + }); + + // Try to delete a non-existent record + const failingDelete = db + .from("contacts") + .delete() + .byId("00000000-0000-0000-0000-000000000000"); + + // Another valid insert + const insert2 = db.from("contacts").insert({ + name: `After Delete Fail - ${timestamp}`, + hobby: "Should this succeed?", + }); + + console.log("\nExecuting batch with: INSERT, DELETE (invalid ID), INSERT..."); + + const result = await db.batch([insert1, failingDelete, insert2]).execute(); + + console.log("\nResult:"); + console.log(JSON.stringify(result, null, 2)); + + if (result.data) { + for (const item of result.data) { + if (item && typeof item === "object" && "PrimaryKey" in item) { + createdRecordIds.push(item.PrimaryKey as string); + } + } + } + + return result; +} + +async function experiment5_AllGetWithOneFailure() { + console.log("\n" + "=".repeat(60)); + console.log( + "EXPERIMENT 5: Multiple GETs with One Filter that Returns Nothing", + ); + console.log("=".repeat(60)); + + // Query that should return results + const query1 = db.from("contacts").list().top(2); + + // Query with a filter that returns empty (not an error, just no results) + const query2 = db + .from(contactsTO) + .list() + .where(eq(contactsTO.name, "THIS_NAME_DEFINITELY_DOES_NOT_EXIST_12345")); + + // Another query that should return results + const query3 = db.from("contacts").list().top(1); + + console.log( + "\nExecuting batch with: GET (valid), GET (empty filter), GET (valid)...", + ); + + const result = await db.batch([query1, query2, query3]).execute(); + + console.log("\nResult:"); + console.log(JSON.stringify(result, null, 2)); + + return result; +} + +async function experiment6_RawResponseInspection() { + console.log("\n" + "=".repeat(60)); + console.log("EXPERIMENT 6: Raw Response Inspection - Direct Fetch"); + console.log("=".repeat(60)); + + // Make a direct batch request to see raw response + const timestamp = Date.now(); + const boundary = "batch_direct_test_123"; + + const baseUrl = `${serverUrl}/fmi/odata/v4/${database}`; + const batchUrl = `${baseUrl}/$batch`; + + // Build a simple batch body with one GET + const batchBody = [ + `--${boundary}`, + "Content-Type: application/http", + "Content-Transfer-Encoding: binary", + "", + `GET ${baseUrl}/contacts?$top=1 HTTP/1.1`, + "", + "", + `--${boundary}--`, + ].join("\r\n"); + + console.log("\n--- Sending Request ---"); + console.log("URL:", batchUrl); + console.log("Body:", batchBody); + + const authHeader = `Basic ${Buffer.from(`${username}:${password}`).toString("base64")}`; + + const response = await fetch(batchUrl, { + method: "POST", + headers: { + Authorization: authHeader, + "Content-Type": `multipart/mixed; boundary=${boundary}`, + "OData-Version": "4.0", + }, + body: batchBody, + }); + + console.log("\n--- Response Info ---"); + console.log("Status:", response.status, response.statusText); + console.log("Content-Type:", response.headers.get("content-type")); + + const responseText = await response.text(); + console.log("\n--- Raw Response Body ---"); + console.log(responseText); + console.log("--- End Raw Response ---"); +} + +async function experiment7_RawResponseWithInsert() { + console.log("\n" + "=".repeat(60)); + console.log("EXPERIMENT 7: Raw Response - Insert with Prefer header"); + console.log("=".repeat(60)); + + const timestamp = Date.now(); + const boundary = "batch_insert_test_456"; + const changesetBoundary = "changeset_insert_789"; + + const baseUrl = `${serverUrl}/fmi/odata/v4/${database}`; + const batchUrl = `${baseUrl}/$batch`; + + const insertBody = JSON.stringify({ + name: `Direct Insert Test - ${timestamp}`, + hobby: "Testing", + }); + + // Build a batch with INSERT using return=representation + const batchBody = [ + `--${boundary}`, + `Content-Type: multipart/mixed; boundary=${changesetBoundary}`, + "", + `--${changesetBoundary}`, + "Content-Type: application/http", + "Content-Transfer-Encoding: binary", + "", + `POST ${baseUrl}/contacts HTTP/1.1`, + "Content-Type: application/json", + "Prefer: return=representation", + `Content-Length: ${insertBody.length}`, + "", + insertBody, + `--${changesetBoundary}--`, + `--${boundary}--`, + ].join("\r\n"); + + console.log("\n--- Sending Insert Request ---"); + console.log("Body:\n", batchBody); + + const authHeader = `Basic ${Buffer.from(`${username}:${password}`).toString("base64")}`; + + const response = await fetch(batchUrl, { + method: "POST", + headers: { + Authorization: authHeader, + "Content-Type": `multipart/mixed; boundary=${boundary}`, + "OData-Version": "4.0", + }, + body: batchBody, + }); + + console.log("\n--- Response Info ---"); + console.log("Status:", response.status, response.statusText); + console.log("Content-Type:", response.headers.get("content-type")); + + const responseText = await response.text(); + console.log("\n--- Raw Response Body ---"); + console.log(responseText); + console.log("--- End Raw Response ---"); + + // Try to extract created record ID for cleanup + const pkMatch = responseText.match(/"PrimaryKey":\s*"([^"]+)"/); + if (pkMatch && pkMatch[1]) { + createdRecordIds.push(pkMatch[1]); + console.log("\nCreated record ID:", pkMatch[1]); + } +} + +async function experiment8_TrueError() { + console.log("\n" + "=".repeat(60)); + console.log("EXPERIMENT 8: Raw Response - Query Non-Existent Table"); + console.log("=".repeat(60)); + + const boundary = "batch_error_test"; + const baseUrl = `${serverUrl}/fmi/odata/v4/${database}`; + const batchUrl = `${baseUrl}/$batch`; + + // Build: GET (valid), GET (non-existent table), GET (valid) + const batchBody = [ + `--${boundary}`, + "Content-Type: application/http", + "Content-Transfer-Encoding: binary", + "", + `GET ${baseUrl}/contacts?$top=1 HTTP/1.1`, + "", + "", + `--${boundary}`, + "Content-Type: application/http", + "Content-Transfer-Encoding: binary", + "", + `GET ${baseUrl}/THIS_TABLE_DOES_NOT_EXIST?$top=1 HTTP/1.1`, + "", + "", + `--${boundary}`, + "Content-Type: application/http", + "Content-Transfer-Encoding: binary", + "", + `GET ${baseUrl}/contacts?$top=2 HTTP/1.1`, + "", + "", + `--${boundary}--`, + ].join("\r\n"); + + console.log("\n--- Sending Request with Non-Existent Table ---"); + + const authHeader = `Basic ${Buffer.from(`${username}:${password}`).toString("base64")}`; + + const response = await fetch(batchUrl, { + method: "POST", + headers: { + Authorization: authHeader, + "Content-Type": `multipart/mixed; boundary=${boundary}`, + "OData-Version": "4.0", + }, + body: batchBody, + }); + + console.log("\n--- Response Info ---"); + console.log("Status:", response.status, response.statusText); + + const responseText = await response.text(); + console.log("\n--- Raw Response Body ---"); + console.log(responseText); + console.log("--- End Raw Response ---"); +} + +async function experiment9_RawResponseWithFailure() { + console.log("\n" + "=".repeat(60)); + console.log("EXPERIMENT 9: Raw Response - Mixed with Failure"); + console.log("=".repeat(60)); + + const timestamp = Date.now(); + const boundary = "batch_fail_test"; + const cs1 = "changeset_1"; + const cs2 = "changeset_2"; + + const baseUrl = `${serverUrl}/fmi/odata/v4/${database}`; + const batchUrl = `${baseUrl}/$batch`; + + const insertBody1 = JSON.stringify({ + name: `Before Fail - ${timestamp}`, + hobby: "Test", + }); + const updateBody = JSON.stringify({ hobby: "Should fail" }); + const insertBody2 = JSON.stringify({ + name: `After Fail - ${timestamp}`, + hobby: "Test", + }); + + // Build: INSERT (valid), UPDATE (invalid ID), INSERT (valid) + const batchBody = [ + // First changeset: valid insert + `--${boundary}`, + `Content-Type: multipart/mixed; boundary=${cs1}`, + "", + `--${cs1}`, + "Content-Type: application/http", + "Content-Transfer-Encoding: binary", + "", + `POST ${baseUrl}/contacts HTTP/1.1`, + "Content-Type: application/json", + "Prefer: return=representation", + `Content-Length: ${insertBody1.length}`, + "", + insertBody1, + `--${cs1}--`, + // Second changeset: invalid update + `--${boundary}`, + `Content-Type: multipart/mixed; boundary=${cs2}`, + "", + `--${cs2}`, + "Content-Type: application/http", + "Content-Transfer-Encoding: binary", + "", + `PATCH ${baseUrl}/contacts('00000000-0000-0000-0000-000000000000') HTTP/1.1`, + "Content-Type: application/json", + `Content-Length: ${updateBody.length}`, + "", + updateBody, + `--${cs2}--`, + // Third changeset: valid insert + `--${boundary}`, + `Content-Type: multipart/mixed; boundary=changeset_3`, + "", + `--changeset_3`, + "Content-Type: application/http", + "Content-Transfer-Encoding: binary", + "", + `POST ${baseUrl}/contacts HTTP/1.1`, + "Content-Type: application/json", + "Prefer: return=representation", + `Content-Length: ${insertBody2.length}`, + "", + insertBody2, + `--changeset_3--`, + `--${boundary}--`, + ].join("\r\n"); + + console.log("\n--- Sending Mixed Request with Invalid Update ---"); + + const authHeader = `Basic ${Buffer.from(`${username}:${password}`).toString("base64")}`; + + const response = await fetch(batchUrl, { + method: "POST", + headers: { + Authorization: authHeader, + "Content-Type": `multipart/mixed; boundary=${boundary}`, + "OData-Version": "4.0", + }, + body: batchBody, + }); + + console.log("\n--- Response Info ---"); + console.log("Status:", response.status, response.statusText); + + const responseText = await response.text(); + console.log("\n--- Raw Response Body ---"); + console.log(responseText); + console.log("--- End Raw Response ---"); + + // Extract created record IDs for cleanup + const pkMatches = responseText.matchAll(/"PrimaryKey":\s*"([^"]+)"/g); + for (const match of pkMatches) { + if (match[1]) { + createdRecordIds.push(match[1]); + console.log("Created record ID:", match[1]); + } + } +} + +async function main() { + console.log("🔬 Batch Operations Experiment"); + console.log("================================"); + console.log(`Server: ${serverUrl}`); + console.log(`Database: ${database}`); + console.log(""); + + try { + // Run experiments + await experiment1_MultipleInserts(); + await experiment2_MixedOperations(); + await experiment3_FailingOperation(); + await experiment4_FailingDelete(); + await experiment5_AllGetWithOneFailure(); + await experiment6_RawResponseInspection(); + await experiment7_RawResponseWithInsert(); + await experiment8_TrueError(); + await experiment9_RawResponseWithFailure(); + + console.log("\n" + "=".repeat(60)); + console.log("ALL EXPERIMENTS COMPLETE"); + console.log("=".repeat(60)); + } catch (error) { + console.error("\n❌ Experiment failed with error:", error); + } finally { + await cleanup(); + } +} + +main().catch(console.error); diff --git a/packages/fmodata/scripts/publish-alpha.ts b/packages/fmodata/scripts/publish-alpha.ts new file mode 100644 index 00000000..79899953 --- /dev/null +++ b/packages/fmodata/scripts/publish-alpha.ts @@ -0,0 +1,435 @@ +/** + * Publish Alpha Script + * + * Builds and publishes the package to npm with the "alpha" tag. + * Checks npm for existing version and git hash, automatically bumps patch version + * if git hashes differ or version needs to be incremented. + * Prompts for confirmation before publishing. + * + * Usage: + * bun run scripts/publish-alpha.ts + */ + +import { readFileSync, writeFileSync } from "fs"; +import { resolve, dirname } from "path"; +import { fileURLToPath } from "url"; +import { execSync } from "child_process"; +import readline from "readline"; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = dirname(__filename); + +// Read package.json +const packagePath = resolve(__dirname, "../package.json"); +let packageJson = JSON.parse(readFileSync(packagePath, "utf-8")); +const packageName = packageJson.name; +let version = packageJson.version; + +// Create readline interface for user input +const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout, +}); + +function question(query: string): Promise { + return new Promise((resolve) => rl.question(query, resolve)); +} + +async function getPublishedVersion( + packageName: string, +): Promise<{ version: string; gitHead?: string } | null> { + try { + const registryUrl = `https://registry.npmjs.org/${packageName}`; + const response = await fetch(registryUrl); + + if (!response.ok) { + if (response.status === 404) { + return null; // Package doesn't exist yet + } + throw new Error(`Failed to fetch package info: ${response.statusText}`); + } + + const data = await response.json(); + + let version: string | undefined; + let gitHead: string | undefined; + + // Check for alpha tagged versions first + if (data["dist-tags"]?.alpha) { + version = data["dist-tags"].alpha; + if (version && data.versions?.[version]) { + gitHead = data.versions[version].gitHead; + } + } else if (data["dist-tags"]?.latest) { + // Fall back to latest if no alpha tag exists + version = data["dist-tags"].latest; + if (version && data.versions?.[version]) { + gitHead = data.versions[version].gitHead; + } + } + + if (version) { + return { version, gitHead }; + } + + return null; + } catch (error) { + console.warn(`⚠️ Could not check npm registry: ${error}`); + return null; + } +} + +function getLocalGitHash(): string | null { + try { + const hash = execSync("git rev-parse HEAD", { + cwd: resolve(__dirname, "../.."), + encoding: "utf-8", + }).trim(); + return hash; + } catch (error) { + console.warn(`⚠️ Could not get local git hash: ${error}`); + return null; + } +} + +function hasUncommittedChanges(): { hasChanges: boolean; details: string } { + try { + // Check for staged changes + const staged = execSync("git diff --cached --name-only", { + cwd: resolve(__dirname, "../.."), + encoding: "utf-8", + }).trim(); + + // Check for unstaged changes + const unstaged = execSync("git diff --name-only", { + cwd: resolve(__dirname, "../.."), + encoding: "utf-8", + }).trim(); + + // Check for untracked files in this package + const untracked = execSync( + "git ls-files --others --exclude-standard packages/fmodata/", + { + cwd: resolve(__dirname, "../.."), + encoding: "utf-8", + }, + ).trim(); + + const changes: string[] = []; + if (staged) changes.push(`staged: ${staged.split("\n").length} file(s)`); + if (unstaged) + changes.push(`unstaged: ${unstaged.split("\n").length} file(s)`); + if (untracked) + changes.push(`untracked: ${untracked.split("\n").length} file(s)`); + + return { + hasChanges: changes.length > 0, + details: changes.join(", "), + }; + } catch (error) { + console.warn(`⚠️ Could not check git status: ${error}`); + return { hasChanges: false, details: "" }; + } +} + +function commitChanges(message: string): void { + const repoRoot = resolve(__dirname, "../.."); + try { + // Check if there are any changes to commit + const status = execSync("git status --porcelain", { + cwd: repoRoot, + encoding: "utf-8", + }).trim(); + + if (!status) { + console.log(`ℹ️ No changes to commit`); + return; + } + + // Stage all changes + execSync("git add -A", { + cwd: repoRoot, + encoding: "utf-8", + }); + + // Commit with the provided message + execSync(`git commit -m "${message.replace(/"/g, '\\"')}"`, { + cwd: repoRoot, + encoding: "utf-8", + }); + + console.log(`✅ Changes committed: "${message}"`); + } catch (error) { + throw new Error(`Failed to commit changes: ${error}`); + } +} + +function compareVersions(v1: string, v2: string): number { + // Parse semantic versions (handles pre-release versions like alpha) + const parseVersion = (v: string) => { + const match = v.match(/^(\d+)\.(\d+)\.(\d+)(?:-(.+))?$/); + if (!match) { + // Fallback for non-standard versions + const parts = v.split(/[.-]/).map(Number); + return { + major: parts[0] || 0, + minor: parts[1] || 0, + patch: parts[2] || 0, + prerelease: null, + }; + } + return { + major: parseInt(match[1]), + minor: parseInt(match[2]), + patch: parseInt(match[3]), + prerelease: match[4] || null, + }; + }; + + const ver1 = parseVersion(v1); + const ver2 = parseVersion(v2); + + // Compare major, minor, patch + if (ver1.major !== ver2.major) { + return ver1.major > ver2.major ? 1 : -1; + } + if (ver1.minor !== ver2.minor) { + return ver1.minor > ver2.minor ? 1 : -1; + } + if (ver1.patch !== ver2.patch) { + return ver1.patch > ver2.patch ? 1 : -1; + } + + // If versions are equal, pre-release versions are considered lower + if (ver1.prerelease && !ver2.prerelease) return -1; + if (!ver1.prerelease && ver2.prerelease) return 1; + if (ver1.prerelease && ver2.prerelease) { + // Compare prerelease strings (e.g., "alpha.0" vs "alpha.1") + return ver1.prerelease.localeCompare(ver2.prerelease); + } + + return 0; +} + +function bumpVersion( + currentVersion: string, + type: "patch" | "minor" | "major", +): string { + const parts = currentVersion.split(/[.-]/); + const major = parseInt(parts[0]) || 0; + const minor = parseInt(parts[1]) || 0; + const patch = parseInt(parts[2]) || 0; + + if (type === "major") { + return `${major + 1}.0.0-alpha.0`; + } else if (type === "minor") { + return `${major}.${minor + 1}.0-alpha.0`; + } else { + // patch - increment the alpha number if it exists, otherwise start at alpha.0 + const alphaMatch = currentVersion.match(/alpha\.(\d+)$/); + if (alphaMatch) { + const alphaNum = parseInt(alphaMatch[1]); + return currentVersion.replace(/alpha\.\d+$/, `alpha.${alphaNum + 1}`); + } + return `${major}.${minor}.${patch + 1}-alpha.0`; + } +} + +function autoBumpPatch(fromVersion?: string): string { + return bumpVersion(fromVersion ?? version, "patch"); +} + +function checkNpmAuth(): boolean { + try { + execSync("npm whoami", { + cwd: resolve(__dirname, ".."), + stdio: "pipe", + }); + return true; + } catch (error) { + return false; + } +} + +async function ensureNpmAuth(): Promise { + if (checkNpmAuth()) { + console.log("✅ Authenticated with npm"); + return; + } + + console.log("\n⚠️ Not authenticated with npm"); + console.log(" You need to log in to npm before publishing.\n"); + + const answer = await question("Would you like to log in now? (y/n): "); + + if (answer.toLowerCase() !== "y" && answer.toLowerCase() !== "yes") { + console.log("❌ Publish cancelled - npm authentication required."); + rl.close(); + process.exit(0); + } + + console.log("\n🔐 Opening npm login..."); + console.log(" (This will open your browser for authentication)\n"); + + try { + execSync("npm login", { + cwd: resolve(__dirname, ".."), + stdio: "inherit", + }); + console.log("\n✅ Successfully logged in to npm"); + } catch (error) { + console.error("\n❌ Failed to log in to npm"); + rl.close(); + process.exit(1); + } +} + +async function updateVersion(newVersion: string) { + packageJson.version = newVersion; + writeFileSync( + packagePath, + JSON.stringify(packageJson, null, 2) + "\n", + "utf-8", + ); + version = newVersion; + console.log(`✅ Version updated to ${newVersion}`); +} + +async function main() { + try { + console.log(`\n📦 Checking npm registry for ${packageName}...`); + + // Check npm for published version + const publishedInfo = await getPublishedVersion(packageName); + const localGitHash = getLocalGitHash(); + + if (publishedInfo) { + const publishedVersion = publishedInfo.version; + const publishedGitHash = publishedInfo.gitHead; + + console.log(` Published version: ${publishedVersion}`); + if (publishedGitHash) { + console.log( + ` Published git hash: ${publishedGitHash.substring(0, 7)}`, + ); + } + console.log(` Local version: ${version}`); + if (localGitHash) { + console.log(` Local git hash: ${localGitHash.substring(0, 7)}`); + } + + const gitHashesMatch = + publishedGitHash && localGitHash && publishedGitHash === localGitHash; + + // Only check for uncommitted changes if git hashes match + // If hashes match but there are uncommitted changes, that's fine - we'll commit later + // If hashes match and there are NO uncommitted changes, prevent republishing same code + if (gitHashesMatch) { + const gitStatus = hasUncommittedChanges(); + if (!gitStatus.hasChanges) { + console.log( + `\n⚠️ Git hashes match and there are no uncommitted changes.`, + ); + console.log( + "❌ Cannot republish the exact same code that's already on npm.", + ); + rl.close(); + process.exit(0); + } + // If hashes match but there are uncommitted changes, proceed (will commit later) + } + + const comparison = compareVersions(version, publishedVersion); + + if (comparison <= 0) { + // Version needs to be bumped + if (gitHashesMatch) { + // Git hashes match but we have uncommitted changes (already checked above) + // Auto-bump patch version from the HIGHER version + const versionToBumpFrom = comparison < 0 ? publishedVersion : version; + console.log( + `\n🔄 Git hashes match but you have uncommitted changes - automatically bumping from ${versionToBumpFrom}...`, + ); + const newVersion = autoBumpPatch(versionToBumpFrom); + await updateVersion(newVersion); + } else { + // Git hashes differ, auto-bump patch version from the HIGHER version + // (usually the published version when local is behind) + const versionToBumpFrom = comparison < 0 ? publishedVersion : version; + console.log( + `\n🔄 Git hashes differ - automatically bumping from ${versionToBumpFrom}...`, + ); + const newVersion = autoBumpPatch(versionToBumpFrom); + await updateVersion(newVersion); + } + } else { + // Local version is greater than published version (not yet published) + console.log(`✅ Local version is newer than published version`); + if (gitHashesMatch) { + console.log(`✅ Git hashes match`); + } + } + } else { + console.log(` No published version found (first publish)`); + // If version hasn't been published, ensure we have a valid version + // The current version should be fine, but we could bump if needed + console.log(` Using current version: ${version}`); + } + + console.log(`\n📦 Ready to publish:`); + console.log(` Package: ${packageName}`); + console.log(` Version: ${version}`); + console.log(` Tag: alpha\n`); + + // Build the package + console.log("🔨 Building package..."); + process.env.NODE_ENV = "production"; + execSync("pnpm build", { + cwd: resolve(__dirname, ".."), + stdio: "inherit", + }); + console.log("✅ Build complete!\n"); + + // Prompt for confirmation + const answer = await question( + `Continue with publish of ${packageName}@${version}? (y/n): `, + ); + + if (answer.toLowerCase() !== "y" && answer.toLowerCase() !== "yes") { + console.log("❌ Publish cancelled."); + rl.close(); + process.exit(0); + } + + // Check and ensure npm authentication + await ensureNpmAuth(); + + // Publish with npm (will prompt for 2FA interactively if needed) + console.log("\n🚀 Publishing to npm with tag 'alpha'..."); + execSync("npm publish --tag alpha --access public --", { + cwd: resolve(__dirname, ".."), + stdio: "inherit", + }); + + // Also update the 'latest' tag since there's no production version yet + console.log("\n🏷️ Updating 'latest' tag..."); + execSync(`npm dist-tag add ${packageName}@${version} latest`, { + cwd: resolve(__dirname, ".."), + stdio: "inherit", + }); + + console.log("\n✅ Successfully published!"); + + // Commit the version change with the version number as the commit message + console.log(`\n📝 Committing version change...`); + commitChanges(version); + } catch (error) { + console.error("\n❌ Error:", error); + rl.close(); + process.exit(1); + } finally { + rl.close(); + } +} + +main(); diff --git a/packages/fmodata/scripts/test-batch.ts b/packages/fmodata/scripts/test-batch.ts new file mode 100644 index 00000000..9d8824c6 --- /dev/null +++ b/packages/fmodata/scripts/test-batch.ts @@ -0,0 +1,255 @@ +/** + * Batch Request Test Script + * + * This script tests batch requests directly against FileMaker Server + * to understand the exact format expected and returned. + * + * Usage: + * bun run scripts/test-batch.ts + */ + +import { config } from "dotenv"; +import path from "path"; +import { fileURLToPath } from "url"; + +// Get __dirname equivalent in ES modules +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +// Load environment variables +config({ path: path.resolve(__dirname, "../.env.local") }); + +const serverUrl = process.env.FMODATA_SERVER_URL; +const username = process.env.FMODATA_USERNAME; +const password = process.env.FMODATA_PASSWORD; +const database = process.env.FMODATA_DATABASE; + +if (!serverUrl) { + throw new Error("FMODATA_SERVER_URL environment variable is required"); +} + +if (!username || !password) { + throw new Error( + "FMODATA_USERNAME and FMODATA_PASSWORD environment variables are required", + ); +} + +if (!database) { + throw new Error("FMODATA_DATABASE environment variable is required"); +} + +// Generate a random boundary +function generateBoundary(prefix: string): string { + const randomHex = Array.from({ length: 32 }, () => + Math.floor(Math.random() * 16).toString(16), + ).join(""); + return `${prefix}${randomHex}`; +} + +async function testSimpleBatch() { + console.log("=== Testing Simple Batch Request ===\n"); + + // Construct base URL + const cleanUrl = serverUrl!.replace(/\/+$/, ""); + const baseUrl = `${cleanUrl}/fmi/odata/v4`; + const fullBaseUrl = `${baseUrl}/${database}`; + const batchUrl = `${fullBaseUrl}/$batch`; + + console.log("Batch URL:", batchUrl); + + // Generate boundary + const batchBoundary = generateBoundary("batch_"); + console.log("Batch Boundary:", batchBoundary); + + // Construct batch request body according to Claris docs + // Note: After the HTTP request line, there should be a blank line, then immediately the next boundary + const batchBody = [ + `--${batchBoundary}`, + "Content-Type: application/http", + "Content-Transfer-Encoding: binary", + "", + `GET ${fullBaseUrl}/contacts?$top=2 HTTP/1.1`, + "", // Blank line after HTTP request (required even with no body) + "", // Empty line before boundary + `--${batchBoundary}`, + "Content-Type: application/http", + "Content-Transfer-Encoding: binary", + "", + `GET ${fullBaseUrl}/users?$top=2 HTTP/1.1`, + "", // Blank line after HTTP request + "", // Empty line before boundary + `--${batchBoundary}--`, + ].join("\r\n"); + + console.log("\n=== Request Body ==="); + console.log(batchBody); + console.log("\n=== End Request Body ===\n"); + + // Create Basic Auth header + const authString = `${username}:${password}`; + const authHeader = `Basic ${Buffer.from(authString).toString("base64")}`; + + console.log("Authorization:", authHeader.substring(0, 20) + "..."); + + // Make the request + try { + console.log("\nSending request...\n"); + + const response = await fetch(batchUrl, { + method: "POST", + headers: { + Authorization: authHeader, + "Content-Type": `multipart/mixed; boundary=${batchBoundary}`, + "OData-Version": "4.0", + }, + body: batchBody, + }); + + console.log("Response Status:", response.status); + console.log("Response Status Text:", response.statusText); + console.log("\nResponse Headers:"); + response.headers.forEach((value, key) => { + console.log(` ${key}: ${value}`); + }); + + const responseText = await response.text(); + console.log("\n=== Response Body ==="); + console.log(responseText); + console.log("=== End Response Body ===\n"); + + if (!response.ok) { + console.error("\n❌ Request failed!"); + // Try to parse as JSON error + try { + const errorData = JSON.parse(responseText); + console.error("Error details:", JSON.stringify(errorData, null, 2)); + } catch { + // Not JSON, already printed above + } + } else { + console.log("\n✅ Request succeeded!"); + } + } catch (error) { + console.error("\n❌ Request threw error:"); + console.error(error); + } +} + +async function testBatchWithChangeset() { + console.log("\n\n=== Testing Batch with Changeset ===\n"); + + const cleanUrl = serverUrl!.replace(/\/+$/, ""); + const baseUrl = `${cleanUrl}/fmi/odata/v4`; + const fullBaseUrl = `${baseUrl}/${database}`; + const batchUrl = `${fullBaseUrl}/$batch`; + + // Generate boundaries + const batchBoundary = generateBoundary("batch_"); + const changesetBoundary = generateBoundary("changeset_"); + + console.log("Batch Boundary:", batchBoundary); + console.log("Changeset Boundary:", changesetBoundary); + + // Construct batch with changeset (based on Claris example) + // Key formatting rules discovered: + // - GET requests (no body): request line → blank → blank → boundary + // - POST/PATCH (with body): request line → headers (incl Content-Length) → blank → body → boundary (NO blank!) + // - No blank line between closing changeset and closing batch boundaries + + const postBody = JSON.stringify({ + name: `Test Batch ${Date.now()}`, + hobby: "Testing", + }); + + const batchBody = [ + `--${batchBoundary}`, + "Content-Type: application/http", + "Content-Transfer-Encoding: binary", + "", + `GET ${fullBaseUrl}/contacts?$top=1 HTTP/1.1`, + "", // Blank line after HTTP request (no body) + "", // Second blank before boundary (for requests without body) + `--${batchBoundary}`, + `Content-Type: multipart/mixed; boundary=${changesetBoundary}`, + "", + `--${changesetBoundary}`, + "Content-Type: application/http", + "Content-Transfer-Encoding: binary", + "Content-ID: 1", + "", + `POST ${fullBaseUrl}/contacts HTTP/1.1`, + "Content-Type: application/json", + `Content-Length: ${postBody.length}`, + "", // Blank line separating headers from body + postBody, + // NO blank line after body - boundary comes immediately + `--${changesetBoundary}--`, + `--${batchBoundary}--`, + ].join("\r\n"); + + console.log("\n=== Request Body ==="); + console.log(batchBody); + console.log("\n=== End Request Body ===\n"); + + const authString = `${username}:${password}`; + const authHeader = `Basic ${Buffer.from(authString).toString("base64")}`; + + try { + console.log("Sending request...\n"); + + const response = await fetch(batchUrl, { + method: "POST", + headers: { + Authorization: authHeader, + "Content-Type": `multipart/mixed; boundary=${batchBoundary}`, + "OData-Version": "4.0", + }, + body: batchBody, + }); + + console.log("Response Status:", response.status); + console.log("Response Status Text:", response.statusText); + console.log("\nResponse Headers:"); + response.headers.forEach((value, key) => { + console.log(` ${key}: ${value}`); + }); + + const responseText = await response.text(); + console.log("\n=== Response Body ==="); + console.log(responseText); + console.log("=== End Response Body ===\n"); + + if (!response.ok) { + console.error("\n❌ Request failed!"); + try { + const errorData = JSON.parse(responseText); + console.error("Error details:", JSON.stringify(errorData, null, 2)); + } catch { + // Not JSON, already printed above + } + } else { + console.log("\n✅ Request succeeded!"); + } + } catch (error) { + console.error("\n❌ Request threw error:"); + console.error(error); + } +} + +async function main() { + console.log("FileMaker OData Batch Request Test"); + console.log("===================================\n"); + + // Test 1: Simple batch with two GET requests + await testSimpleBatch(); + + // Test 2: Batch with changeset + await testBatchWithChangeset(); + + console.log("\n\nTests complete!"); +} + +main().catch((error) => { + console.error("Test script failed:", error); + process.exit(1); +}); diff --git a/packages/fmodata/scripts/typegen-starter.ts b/packages/fmodata/scripts/typegen-starter.ts new file mode 100755 index 00000000..77e7e7fc --- /dev/null +++ b/packages/fmodata/scripts/typegen-starter.ts @@ -0,0 +1,585 @@ +#!/usr/bin/env bun + +/** + * OData Metadata to TypeScript Table Occurrence Generator + * + * This script parses OData metadata XML files and generates TypeScript code + * with fmTableOccurrence definitions using navigationPaths + * for use with the fmodata package. + * + * Usage: + * bun scripts/typegen-starter.ts + * + * Example: + * bun scripts/typegen-starter.ts tests/fixtures/metadata.xml output/ + * + * Features: + * - Automatically maps OData types (Edm.String, Edm.Decimal, etc.) to Zod types + * - Identifies key fields from Key elements and ensures they're non-nullable + * - Marks calculation fields as readOnly + * - Handles nullable fields with .nullable() + * - Extracts FileMaker field IDs (FMFID) and table IDs (FMTID) + * - Smart ID field detection (prioritizes @AutoGenerated fields or fields with "id" in name) + * - Generates navigation relationships via navigationPaths with type-safe string refs + * - Outputs one file per table with dynamic imports based on used field builders + */ + +import { readFile, writeFile, mkdir } from "node:fs/promises"; +import { resolve, join, basename } from "node:path"; +import { XMLParser } from "fast-xml-parser"; + +// Map OData types to field builder functions +function mapODataTypeToFieldBuilder(edmType: string): string { + switch (edmType) { + case "Edm.String": + return "textField()"; + case "Edm.Decimal": + case "Edm.Int32": + case "Edm.Int64": + case "Edm.Double": + return "numberField()"; + case "Edm.Boolean": + return "numberField().outputValidator(z.coerce.boolean())"; + case "Edm.Date": + return "dateField()"; // ISO date string + case "Edm.DateTimeOffset": + return "timestampField()"; // ISO datetime string + case "Edm.Binary": + return "containerField()"; // base64 encoded + default: + return "textField()"; // Default to textField for unknown types + } +} + +interface FieldMetadata { + $Type: string; + $Nullable?: boolean; + "@FieldID": string; + "@Calculation"?: boolean; + "@Global"?: boolean; + "@Org.OData.Core.V1.Permissions"?: string; + $DefaultValue?: string; + "@AutoGenerated"?: boolean; + "@Index"?: boolean; + "@VersionID"?: boolean; +} + +interface NavigationProperty { + Name: string; + Type: string; // e.g., "Collection(com.filemaker.odata.WebData.fmp12.Work_Orders_)" +} + +interface EntityType { + Name: string; + "@TableID": string; + $Key?: string[]; + Properties: Map; + NavigationProperties: NavigationProperty[]; +} + +interface EntitySet { + Name: string; + EntityType: string; // Full type name like "com.filemaker.odata.WebData.fmp12.Addresses_" +} + +interface GeneratedTO { + varName: string; + code: string; + navigation: string[]; // Array of target TO names + usedFieldBuilders: Set; // Track which field builders are used + needsZod: boolean; // Whether z.coerce.boolean() is used +} + +function extractEntityTypeNameFromType(typeString: string): string | null { + // Extract entity type name from Type like "Collection(com.filemaker.odata.WebData.fmp12.Work_Orders_)" + // Returns "Work_Orders_" + // Pattern: Collection(namespace.EntityTypeName) -> extract EntityTypeName + const collectionMatch = typeString.match(/Collection\(([^)]+)\)/); + if (collectionMatch) { + const fullType = collectionMatch[1]; + // Extract the last part after the last dot (e.g., "com.filemaker.odata.WebData.fmp12.Work_Orders_" -> "Work_Orders_") + const parts = fullType.split("."); + return parts[parts.length - 1] || null; + } + // Try without Collection wrapper - extract last part after last dot + const parts = typeString.split("."); + return parts.length > 0 ? parts[parts.length - 1] : null; +} + +function generateTableOccurrence( + entitySetName: string, + entityType: EntityType, + entityTypeToSetMap: Map, +): GeneratedTO { + const fmtId = entityType["@TableID"]; + const keyFields = entityType.$Key || []; + const fields = entityType.Properties; + const readOnlyFields: string[] = []; + const navigationTargets: string[] = []; + const usedFieldBuilders = new Set(); + let needsZod = false; + + // Process navigation properties + for (const navProp of entityType.NavigationProperties) { + const targetEntityTypeName = extractEntityTypeNameFromType(navProp.Type); + if (targetEntityTypeName) { + const targetEntitySet = entityTypeToSetMap.get(targetEntityTypeName); + if (targetEntitySet) { + navigationTargets.push(targetEntitySet); + } + } + } + + // Determine read-only fields + for (const [fieldName, metadata] of fields.entries()) { + if ( + metadata["@Calculation"] || + metadata["@Global"] || + metadata["@Org.OData.Core.V1.Permissions"]?.includes("Read") + ) { + readOnlyFields.push(fieldName); + } + } + + // Determine the id field + let idField: string; + if (keyFields.length > 0) { + // Use the first key field + idField = keyFields[0]; + } else { + // Find a suitable ID field: look for auto-generated fields or fields with "id" in the name + const fieldNames = Array.from(fields.keys()); + const autoGenField = fieldNames.find( + (name) => fields.get(name)?.["@AutoGenerated"], + ); + const idFieldName = fieldNames.find( + (name) => + name.toLowerCase().includes("_id") || + name.toLowerCase().endsWith("id") || + name.toLowerCase() === "id", + ); + idField = autoGenField || idFieldName || fieldNames[0]; + } + + // Generate field builder definitions + const fieldLines: string[] = []; + const fieldEntries = Array.from(fields.entries()); + for (let i = 0; i < fieldEntries.length; i++) { + const [fieldName, metadata] = fieldEntries[i]; + const fieldBuilder = mapODataTypeToFieldBuilder(metadata.$Type); + + // Track which field builders are used + if (fieldBuilder.includes("textField()")) { + usedFieldBuilders.add("textField"); + } else if (fieldBuilder.includes("numberField()")) { + usedFieldBuilders.add("numberField"); + } else if (fieldBuilder.includes("dateField()")) { + usedFieldBuilders.add("dateField"); + } else if (fieldBuilder.includes("timestampField()")) { + usedFieldBuilders.add("timestampField"); + } else if (fieldBuilder.includes("containerField()")) { + usedFieldBuilders.add("containerField"); + } + + // Track if z.coerce.boolean() is used + if (fieldBuilder.includes("z.coerce.boolean()")) { + needsZod = true; + } + + const isKeyField = keyFields.includes(fieldName); + // Only add .notNull() if explicitly marked as Nullable="false" in XML + // metadata.$Nullable is false only if Nullable="false" was in XML, otherwise it's true (nullable by default) + const isExplicitlyNotNullable = metadata.$Nullable === false; + const isReadOnly = readOnlyFields.includes(fieldName); + const isLastField = i === fieldEntries.length - 1; + + let line = ` ${JSON.stringify(fieldName)}: ${fieldBuilder}`; + + // Chain methods: primaryKey, readOnly, notNull, entityId + if (isKeyField) { + line += ".primaryKey()"; + } + if (isReadOnly) { + line += ".readOnly()"; + } + // Only add .notNull() if explicitly marked as Nullable="false" in XML + // Key fields are handled by primaryKey() which already makes them not null + if (isExplicitlyNotNullable && !isKeyField) { + line += ".notNull()"; + } + if (metadata["@FieldID"]) { + line += `.entityId(${JSON.stringify(metadata["@FieldID"])})`; + } + + // Add comma if not the last field + if (!isLastField) { + line += ","; + } + + fieldLines.push(line); + } + + const varName = entitySetName.replace(/[^a-zA-Z0-9_]/g, "_"); + + // Build options object + const optionsParts: string[] = []; + if (fmtId) { + optionsParts.push(`entityId: ${JSON.stringify(fmtId)}`); + } + // Always include navigationPaths, even if empty + const navPaths = navigationTargets.map((n) => JSON.stringify(n)).join(", "); + optionsParts.push(`navigationPaths: [${navPaths}]`); + + const optionsSection = + optionsParts.length > 0 + ? `, {\n${optionsParts.map((p) => ` ${p}`).join(",\n")}\n}` + : ""; + + const code = `export const ${varName} = fmTableOccurrence(${JSON.stringify(entitySetName)}, { +${fieldLines.join("\n")} +}${optionsSection});`; + + return { + varName, + code, + navigation: navigationTargets, + usedFieldBuilders, + needsZod, + }; +} + +function ensureArray(value: T | T[] | undefined): T[] { + if (!value) return []; + return Array.isArray(value) ? value : [value]; +} + +async function parseXMLMetadata(xmlContent: string): Promise<{ + entityTypes: Map; + entitySets: Map; + namespace: string; +}> { + const entityTypes = new Map(); + const entitySets = new Map(); + let namespace = ""; + + // Parse XML using fast-xml-parser + const parser = new XMLParser({ + ignoreAttributes: false, + attributeNamePrefix: "@_", + textNodeName: "#text", + parseAttributeValue: true, + trimValues: true, + parseTrueNumberOnly: false, + arrayMode: false, + }); + + const parsed = parser.parse(xmlContent); + + // Navigate to Schema element + const edmx = parsed["edmx:Edmx"] || parsed.Edmx; + if (!edmx) { + throw new Error("No Edmx element found in XML"); + } + + const dataServices = edmx["edmx:DataServices"] || edmx.DataServices; + if (!dataServices) { + throw new Error("No DataServices element found in XML"); + } + + const schema = ensureArray(dataServices.Schema)[0]; + if (!schema) { + throw new Error("No Schema element found in XML"); + } + + namespace = schema["@_Namespace"] || schema.Namespace || ""; + + // Extract EntityTypes + const entityTypeList = ensureArray(schema.EntityType); + for (const entityTypeEl of entityTypeList) { + const entityTypeName = entityTypeEl["@_Name"] || entityTypeEl.Name; + if (!entityTypeName) continue; + + // Get TableID from Annotation + let tableId = ""; + const annotations = ensureArray(entityTypeEl.Annotation); + for (const ann of annotations) { + const term = ann["@_Term"] || ann.Term; + if (term === "com.filemaker.odata.TableID") { + tableId = ann["@_String"] || ann.String || ""; + break; + } + } + + // Get Key fields + const keyFields: string[] = []; + if (entityTypeEl.Key) { + const propertyRefs = ensureArray(entityTypeEl.Key.PropertyRef); + for (const propRef of propertyRefs) { + const name = propRef["@_Name"] || propRef.Name; + if (name) keyFields.push(name); + } + } + + // Extract Properties + const properties = new Map(); + const propertyList = ensureArray(entityTypeEl.Property); + for (const propEl of propertyList) { + const propName = propEl["@_Name"] || propEl.Name; + if (!propName) continue; + + const propType = propEl["@_Type"] || propEl.Type || ""; + // Nullable is false only if explicitly set to "false", otherwise assume nullable + // The parser converts "false" to boolean false, so check for both + const nullableAttr = propEl["@_Nullable"] ?? propEl.Nullable; + const isExplicitlyNotNullable = + nullableAttr === "false" || nullableAttr === false; + const defaultValue = + propEl["@_DefaultValue"] || propEl.DefaultValue || undefined; + + // Get annotations + let fieldId = ""; + let isCalculation = false; + let isGlobal = false; + let isAutoGenerated = false; + let hasIndex = false; + let isVersionId = false; + let permissions: string | undefined; + + const propAnnotations = ensureArray(propEl.Annotation); + for (const ann of propAnnotations) { + const term = ann["@_Term"] || ann.Term; + if (term === "com.filemaker.odata.FieldID") { + fieldId = ann["@_String"] || ann.String || ""; + } else if (term === "com.filemaker.odata.Calculation") { + isCalculation = ann["@_Bool"] === "true" || ann.Bool === "true"; + } else if (term === "com.filemaker.odata.Global") { + isGlobal = ann["@_Bool"] === "true" || ann.Bool === "true"; + } else if (term === "com.filemaker.odata.AutoGenerated") { + isAutoGenerated = ann["@_Bool"] === "true" || ann.Bool === "true"; + } else if (term === "com.filemaker.odata.Index") { + hasIndex = ann["@_Bool"] === "true" || ann.Bool === "true"; + } else if (term === "com.filemaker.odata.VersionID") { + isVersionId = ann["@_Bool"] === "true" || ann.Bool === "true"; + } else if (term === "Org.OData.Core.V1.Permissions") { + const enumMember = ann.EnumMember; + if (enumMember) { + permissions = + typeof enumMember === "string" + ? enumMember + : enumMember["#text"] || undefined; + } + } + } + + properties.set(propName, { + $Type: propType, + $Nullable: !isExplicitlyNotNullable, // true if not explicitly set to false + "@FieldID": fieldId, + "@Calculation": isCalculation, + "@Global": isGlobal, + "@Org.OData.Core.V1.Permissions": permissions, + $DefaultValue: defaultValue, + "@AutoGenerated": isAutoGenerated, + "@Index": hasIndex, + "@VersionID": isVersionId, + }); + } + + // Extract NavigationProperties + const navigationProperties: NavigationProperty[] = []; + if (entityTypeEl.NavigationProperty) { + const navPropList = ensureArray(entityTypeEl.NavigationProperty); + for (const navPropEl of navPropList) { + const navName = navPropEl["@_Name"] || navPropEl.Name; + const navType = navPropEl["@_Type"] || navPropEl.Type; + if (navName && navType) { + navigationProperties.push({ + Name: navName, + Type: navType, + }); + } + } + } + + entityTypes.set(entityTypeName, { + Name: entityTypeName, + "@TableID": tableId, + $Key: keyFields, + Properties: properties, + NavigationProperties: navigationProperties, + }); + } + + // Extract EntitySets from EntityContainer + const entityContainer = ensureArray(schema.EntityContainer)[0]; + if (entityContainer) { + const entitySetList = ensureArray(entityContainer.EntitySet); + for (const entitySetEl of entitySetList) { + const setName = entitySetEl["@_Name"] || entitySetEl.Name; + const entityType = entitySetEl["@_EntityType"] || entitySetEl.EntityType; + if (setName && entityType) { + // Extract just the entity type name from the full type string + // e.g., "com.filemaker.odata.WebData.fmp12.Addresses_" -> "Addresses_" + const typeNameMatch = entityType.match(/\.([^.]+)$/); + const entityTypeName = typeNameMatch ? typeNameMatch[1] : entityType; + + entitySets.set(setName, { + Name: setName, + EntityType: entityTypeName, + }); + } + } + } + + return { entityTypes, entitySets, namespace }; +} + +function generateImports( + usedFieldBuilders: Set, + needsZod: boolean, +): string { + const fieldBuilderImports: string[] = []; + + // Always need fmTableOccurrence + fieldBuilderImports.push("fmTableOccurrence"); + + // Add only the field builders that are actually used + if (usedFieldBuilders.has("textField")) { + fieldBuilderImports.push("textField"); + } + if (usedFieldBuilders.has("numberField")) { + fieldBuilderImports.push("numberField"); + } + if (usedFieldBuilders.has("dateField")) { + fieldBuilderImports.push("dateField"); + } + if (usedFieldBuilders.has("timestampField")) { + fieldBuilderImports.push("timestampField"); + } + if (usedFieldBuilders.has("containerField")) { + fieldBuilderImports.push("containerField"); + } + + const imports = [ + `import { ${fieldBuilderImports.join(", ")} } from "@proofkit/fmodata"`, + ]; + + if (needsZod) { + imports.push(`import { z } from "zod/v4"`); + } + + return imports.join(";\n") + ";\n"; +} + +function sanitizeFileName(name: string): string { + // Convert to a safe filename + return name.replace(/[^a-zA-Z0-9_]/g, "_"); +} + +async function generateFromMetadata( + inputPath: string, + outputFolder: string, +): Promise { + console.log(`Reading metadata from: ${inputPath}`); + + // Read and parse the metadata XML + const xmlContent = await readFile(inputPath, "utf-8"); + const { entityTypes, entitySets, namespace } = + await parseXMLMetadata(xmlContent); + + // Build a map from entity type name to entity set name + const entityTypeToSetMap = new Map(); + for (const [entitySetName, entitySet] of entitySets.entries()) { + entityTypeToSetMap.set(entitySet.EntityType, entitySetName); + } + + // Generate table occurrences for entity sets + const generatedTOs: GeneratedTO[] = []; + + console.log(`\nFound ${entitySets.size} entity sets:`); + for (const [entitySetName, entitySet] of entitySets.entries()) { + const entityType = entityTypes.get(entitySet.EntityType); + if (entityType) { + const generated = generateTableOccurrence( + entitySetName, + entityType, + entityTypeToSetMap, + ); + + const navInfo = + generated.navigation.length > 0 + ? ` [nav: ${generated.navigation.join(", ")}]` + : ""; + console.log(` - ${entitySetName} (${entitySet.EntityType})${navInfo}`); + + generatedTOs.push(generated); + } + } + + // Create output directory + console.log(`\nCreating output directory: ${outputFolder}`); + await mkdir(outputFolder, { recursive: true }); + + // Generate one file per table occurrence + const exportStatements: string[] = []; + + for (const generated of generatedTOs) { + const fileName = `${sanitizeFileName(generated.varName)}.ts`; + const filePath = join(outputFolder, fileName); + + // Generate imports based on what's actually used in this file + const imports = generateImports( + generated.usedFieldBuilders, + generated.needsZod, + ); + + const fileContent = `${imports} +// ============================================================================ +// Table Occurrence: ${generated.varName} +// ============================================================================ + +${generated.code} +`; + + await writeFile(filePath, fileContent, "utf-8"); + console.log(` ✓ Generated ${fileName}`); + + // Collect export statement for index file + exportStatements.push( + `export { ${generated.varName} } from "./${sanitizeFileName(generated.varName)}";`, + ); + } + + // Generate index.ts file that exports all table occurrences + const indexContent = `// ============================================================================ +// Auto-generated index file - exports all table occurrences +// ============================================================================ + +${exportStatements.join("\n")} +`; + + const indexPath = join(outputFolder, "index.ts"); + await writeFile(indexPath, indexContent, "utf-8"); + console.log(` ✓ Generated index.ts`); + + console.log( + `\n✓ Generation complete! Generated ${generatedTOs.length} table occurrence files.`, + ); +} + +// Main execution +const args = process.argv.slice(2); + +if (args.length < 2) { + console.error("Usage: bun typegen-starter.ts "); + console.error("\nExample: bun typegen-starter.ts metadata.xml output/"); + process.exit(1); +} + +const inputPath = resolve(args[0]); +const outputFolder = resolve(args[1]); + +generateFromMetadata(inputPath, outputFolder).catch((error) => { + console.error("Error:", error); + process.exit(1); +}); diff --git a/packages/fmodata/src/client/batch-builder.ts b/packages/fmodata/src/client/batch-builder.ts new file mode 100644 index 00000000..8446f9bc --- /dev/null +++ b/packages/fmodata/src/client/batch-builder.ts @@ -0,0 +1,334 @@ +import type { + ExecutableBuilder, + ExecutionContext, + Result, + ExecuteOptions, + BatchResult, + BatchItemResult, + ExecuteMethodOptions, +} from "../types"; +import { BatchTruncatedError } from "../errors"; +import { type FFetchOptions } from "@fetchkit/ffetch"; +import { + formatBatchRequestFromNative, + parseBatchResponse, + type ParsedBatchResponse, +} from "./batch-request"; + +/** + * Helper type to extract result types from a tuple of ExecutableBuilders. + * Uses a mapped type which TypeScript 4.1+ can handle for tuples. + */ +type ExtractTupleTypes[]> = { + [K in keyof T]: T[K] extends ExecutableBuilder ? U : never; +}; + +/** + * Converts a ParsedBatchResponse to a native Response object + * @param parsed - The parsed batch response + * @returns A native Response object + */ +function parsedToResponse(parsed: ParsedBatchResponse): Response { + const headers = new Headers(parsed.headers); + + // Handle null body + if (parsed.body === null || parsed.body === undefined) { + return new Response(null, { + status: parsed.status, + statusText: parsed.statusText, + headers, + }); + } + + // Convert body to string if it's not already + const bodyString = + typeof parsed.body === "string" ? parsed.body : JSON.stringify(parsed.body); + + // Handle 204 No Content status - it cannot have a body per HTTP spec + // If FileMaker returns 204 with a body, treat it as 200 + let status = parsed.status; + if (status === 204 && bodyString && bodyString.trim() !== "") { + status = 200; + } + + return new Response(status === 204 ? null : bodyString, { + status: status, + statusText: parsed.statusText, + headers, + }); +} + +/** + * Builder for batch operations that allows multiple queries to be executed together + * in a single transactional request. + * + * Note: BatchBuilder does not implement ExecutableBuilder because execute() returns + * BatchResult instead of Result, which is a different return type structure. + */ +export class BatchBuilder[]> { + private builders: ExecutableBuilder[]; + private readonly originalBuilders: Builders; + + constructor( + builders: Builders, + private readonly databaseName: string, + private readonly context: ExecutionContext, + ) { + // Convert readonly tuple to mutable array for dynamic additions + this.builders = [...builders]; + // Store original tuple for type preservation + this.originalBuilders = builders; + } + + /** + * Add a request to the batch dynamically. + * This allows building up batch operations programmatically. + * + * @param builder - An executable builder to add to the batch + * @returns This BatchBuilder for method chaining + * @example + * ```ts + * const batch = db.batch([]); + * batch.addRequest(db.from('contacts').list()); + * batch.addRequest(db.from('users').list()); + * const result = await batch.execute(); + * ``` + */ + addRequest(builder: ExecutableBuilder): this { + this.builders.push(builder); + return this; + } + + /** + * Get the request configuration for this batch operation. + * This is used internally by the execution system. + */ + getRequestConfig(): { method: string; url: string; body?: any } { + // Note: This method is kept for compatibility but batch operations + // should use execute() directly which handles the full Request/Response flow + return { + method: "POST", + url: `/${this.databaseName}/$batch`, + body: undefined, // Body is constructed in execute() + }; + } + + toRequest(baseUrl: string, options?: ExecuteOptions): Request { + // Batch operations are not designed to be nested, but we provide + // a basic implementation for interface compliance + const fullUrl = `${baseUrl}/${this.databaseName}/$batch`; + return new Request(fullUrl, { + method: "POST", + headers: { + "Content-Type": "multipart/mixed", + "OData-Version": "4.0", + }, + }); + } + + async processResponse( + response: Response, + options?: ExecuteOptions, + ): Promise> { + // This should not typically be called for batch operations + // as they handle their own response processing + return { + data: undefined, + error: { + name: "NotImplementedError", + message: "Batch operations handle response processing internally", + timestamp: new Date(), + } as any, + }; + } + + /** + * Execute the batch operation. + * + * @param options - Optional fetch options and batch-specific options (includes beforeRequest hook) + * @returns A BatchResult containing individual results for each operation + */ + async execute( + options?: ExecuteMethodOptions, + ): Promise>> { + const baseUrl = this.context._getBaseUrl?.(); + if (!baseUrl) { + // Return BatchResult with all operations marked as failed + const errorCount = this.builders.length; + const results: BatchItemResult[] = this.builders.map((_, i) => ({ + data: undefined, + error: { + name: "ConfigurationError", + message: + "Base URL not available - execution context must implement _getBaseUrl()", + timestamp: new Date(), + } as any, + status: 0, + })); + + return { + results: results as any, + successCount: 0, + errorCount, + truncated: false, + firstErrorIndex: 0, + }; + } + + try { + // Convert builders to native Request objects + const requests: Request[] = this.builders.map((builder) => + builder.toRequest(baseUrl, options), + ); + + // Format batch request (automatically groups mutations into changesets) + const { body, boundary } = await formatBatchRequestFromNative( + requests, + baseUrl, + ); + + // Execute the batch request + const response = await this.context._makeRequest( + `/${this.databaseName}/$batch`, + { + ...options, + method: "POST", + headers: { + ...options?.headers, + "Content-Type": `multipart/mixed; boundary=${boundary}`, + "OData-Version": "4.0", + }, + body, + }, + ); + + if (response.error) { + // Return BatchResult with all operations marked as failed + const errorCount = this.builders.length; + const results: BatchItemResult[] = this.builders.map((_, i) => ({ + data: undefined, + error: response.error, + status: 0, + })); + + return { + results: results as any, + successCount: 0, + errorCount, + truncated: false, + firstErrorIndex: 0, + }; + } + + // Extract the actual boundary from the response + // FileMaker uses its own boundary, not the one we sent + const firstLine = + response.data.split("\r\n")[0] || response.data.split("\n")[0] || ""; + const actualBoundary = firstLine.startsWith("--") + ? firstLine.substring(2) + : boundary; + + // Parse the multipart response + const contentTypeHeader = `multipart/mixed; boundary=${actualBoundary}`; + const parsedResponses = parseBatchResponse( + response.data, + contentTypeHeader, + ); + + // Process each response using the corresponding builder + // Build BatchResult with per-item results + type ResultTuple = ExtractTupleTypes; + + const results: BatchItemResult[] = []; + let successCount = 0; + let errorCount = 0; + let firstErrorIndex: number | null = null; + const truncated = parsedResponses.length < this.builders.length; + + // Process builders sequentially to preserve tuple order + for (let i = 0; i < this.builders.length; i++) { + const builder = this.builders[i]; + const parsed = parsedResponses[i]; + + if (!parsed) { + // Truncated - operation never executed + const failedAtIndex = firstErrorIndex ?? i; + results.push({ + data: undefined, + error: new BatchTruncatedError(i, failedAtIndex), + status: 0, + }); + errorCount++; + continue; + } + + if (!builder) { + // Should not happen, but handle gracefully + results.push({ + data: undefined, + error: { + name: "BatchError", + message: `Builder at index ${i} is undefined`, + timestamp: new Date(), + } as any, + status: parsed.status, + }); + errorCount++; + if (firstErrorIndex === null) firstErrorIndex = i; + continue; + } + + // Convert parsed response to native Response + const nativeResponse = parsedToResponse(parsed); + + // Let the builder process its own response + const result = await builder.processResponse(nativeResponse, options); + + if (result.error) { + results.push({ + data: undefined, + error: result.error, + status: parsed.status, + }); + errorCount++; + if (firstErrorIndex === null) firstErrorIndex = i; + } else { + results.push({ + data: result.data, + error: undefined, + status: parsed.status, + }); + successCount++; + } + } + + return { + results: results as any, + successCount, + errorCount, + truncated, + firstErrorIndex, + }; + } catch (err) { + // On exception, return a BatchResult with all operations marked as failed + const errorCount = this.builders.length; + const results: BatchItemResult[] = this.builders.map((_, i) => ({ + data: undefined, + error: { + name: "BatchError", + message: err instanceof Error ? err.message : "Unknown error", + timestamp: new Date(), + } as any, + status: 0, + })); + + return { + results: results as any, + successCount: 0, + errorCount, + truncated: false, + firstErrorIndex: 0, + }; + } + } +} diff --git a/packages/fmodata/src/client/batch-request.ts b/packages/fmodata/src/client/batch-request.ts new file mode 100644 index 00000000..d82e868b --- /dev/null +++ b/packages/fmodata/src/client/batch-request.ts @@ -0,0 +1,485 @@ +/** + * Batch Request Utilities + * + * Utilities for formatting and parsing OData batch requests using multipart/mixed format. + * OData batch requests allow bundling multiple operations into a single HTTP request, + * with support for transactional changesets. + */ + +export interface RequestConfig { + method: string; + url: string; + body?: string; + headers?: Record; +} + +export interface ParsedBatchResponse { + status: number; + statusText: string; + headers: Record; + body: any; +} + +/** + * Generates a random boundary string for multipart requests + * @param prefix - Prefix for the boundary (e.g., "batch_" or "changeset_") + * @returns A boundary string with the prefix and 32 random hex characters + */ +export function generateBoundary(prefix: string = "batch_"): string { + const randomHex = Array.from({ length: 32 }, () => + Math.floor(Math.random() * 16).toString(16), + ).join(""); + return `${prefix}${randomHex}`; +} + +/** + * Converts a native Request object to RequestConfig + * @param request - Native Request object + * @returns RequestConfig object + */ +async function requestToConfig(request: Request): Promise { + const headers: Record = {}; + request.headers.forEach((value, key) => { + headers[key] = value; + }); + + let body: string | undefined; + if (request.body) { + // Clone the request to read the body without consuming it + const clonedRequest = request.clone(); + body = await clonedRequest.text(); + } + + return { + method: request.method, + url: request.url, + body, + headers, + }; +} + +/** + * Formats a single HTTP request for inclusion in a batch + * @param request - The request configuration + * @param baseUrl - The base URL to prepend to relative URLs + * @returns Formatted request string with CRLF line endings + * + * Formatting rules for FileMaker OData: + * - GET (no body): request line → blank → blank + * - POST/PATCH (with body): request line → headers → blank → body (NO blank after!) + */ +function formatSubRequest(request: RequestConfig, baseUrl: string): string { + const lines: string[] = []; + + // Add required headers for sub-request + lines.push("Content-Type: application/http"); + lines.push("Content-Transfer-Encoding: binary"); + lines.push(""); // Empty line after multipart headers + + // Construct full URL (convert relative to absolute) + const fullUrl = request.url.startsWith("http") + ? request.url + : `${baseUrl}${request.url}`; + + // Add HTTP request line + lines.push(`${request.method} ${fullUrl} HTTP/1.1`); + + // For requests with body, add headers + if (request.body) { + // Add request headers (excluding Authorization - it's in the outer request) + if (request.headers) { + for (const [key, value] of Object.entries(request.headers)) { + if (key.toLowerCase() !== "authorization") { + lines.push(`${key}: ${value}`); + } + } + } + + // Check if Content-Type is already set + const hasContentType = + request.headers && + Object.keys(request.headers).some( + (k) => k.toLowerCase() === "content-type", + ); + + if (!hasContentType) { + lines.push("Content-Type: application/json"); + } + + // Add Content-Length (required for FileMaker to read the body) + const hasContentLength = + request.headers && + Object.keys(request.headers).some( + (k) => k.toLowerCase() === "content-length", + ); + + if (!hasContentLength) { + lines.push(`Content-Length: ${request.body.length}`); + } + + lines.push(""); // Empty line between headers and body + lines.push(request.body); + // NO blank line after body - the boundary comes immediately + } else { + // For GET requests (no body), add TWO blank lines + lines.push(""); // First blank + lines.push(""); // Second blank + } + + return lines.join("\r\n"); +} + +/** + * Formats a changeset containing multiple non-GET operations + * @param requests - Array of request configurations (should be non-GET) + * @param baseUrl - The base URL to prepend to relative URLs + * @param changesetBoundary - Boundary string for the changeset + * @returns Formatted changeset string with CRLF line endings + */ +function formatChangeset( + requests: RequestConfig[], + baseUrl: string, + changesetBoundary: string, +): string { + const lines: string[] = []; + + lines.push(`Content-Type: multipart/mixed; boundary=${changesetBoundary}`); + lines.push(""); // Empty line after headers + + // Add each request in the changeset + for (const request of requests) { + lines.push(`--${changesetBoundary}`); + lines.push(formatSubRequest(request, baseUrl)); + } + + // Close the changeset + lines.push(`--${changesetBoundary}--`); + + return lines.join("\r\n"); +} + +/** + * Formats multiple requests into a batch request body + * @param requests - Array of request configurations + * @param baseUrl - The base URL to prepend to relative URLs + * @param batchBoundary - Optional boundary string for the batch (generated if not provided) + * @returns Object containing the formatted body and boundary + */ +export function formatBatchRequest( + requests: RequestConfig[], + baseUrl: string, + batchBoundary?: string, +): { body: string; boundary: string } { + const boundary = batchBoundary || generateBoundary("batch_"); + const lines: string[] = []; + + // Group requests: consecutive non-GET operations go into changesets + let currentChangeset: RequestConfig[] | null = null; + + for (const request of requests) { + if (request.method === "GET") { + // GET operations break changesets and are added individually + if (currentChangeset) { + // Close and add the current changeset + const changesetBoundary = generateBoundary("changeset_"); + lines.push(`--${boundary}`); + lines.push( + formatChangeset(currentChangeset, baseUrl, changesetBoundary), + ); + currentChangeset = null; + } + + // Add GET request + lines.push(`--${boundary}`); + lines.push(formatSubRequest(request, baseUrl)); + } else { + // Non-GET operations: add to current changeset or create new one + if (!currentChangeset) { + currentChangeset = []; + } + currentChangeset.push(request); + } + } + + // Add any remaining changeset + if (currentChangeset) { + const changesetBoundary = generateBoundary("changeset_"); + lines.push(`--${boundary}`); + lines.push(formatChangeset(currentChangeset, baseUrl, changesetBoundary)); + } + + // Close the batch + lines.push(`--${boundary}--`); + + return { + body: lines.join("\r\n"), + boundary, + }; +} + +/** + * Formats multiple Request objects into a batch request body + * Supports explicit changesets via Request arrays + * @param requests - Array of Request objects or Request arrays (for explicit changesets) + * @param baseUrl - The base URL to prepend to relative URLs + * @param batchBoundary - Optional boundary string for the batch (generated if not provided) + * @returns Promise resolving to object containing the formatted body and boundary + */ +export async function formatBatchRequestFromNative( + requests: Array, + baseUrl: string, + batchBoundary?: string, +): Promise<{ body: string; boundary: string }> { + const boundary = batchBoundary || generateBoundary("batch_"); + const lines: string[] = []; + + for (const item of requests) { + if (Array.isArray(item)) { + // Explicit changeset - array of Requests + const changesetBoundary = generateBoundary("changeset_"); + const changesetConfigs: RequestConfig[] = []; + + for (const request of item) { + changesetConfigs.push(await requestToConfig(request)); + } + + lines.push(`--${boundary}`); + lines.push(formatChangeset(changesetConfigs, baseUrl, changesetBoundary)); + } else { + // Single request + const config = await requestToConfig(item); + + if (config.method === "GET") { + // GET requests are always individual + lines.push(`--${boundary}`); + lines.push(formatSubRequest(config, baseUrl)); + } else { + // Non-GET operations wrapped in a changeset + const changesetBoundary = generateBoundary("changeset_"); + lines.push(`--${boundary}`); + lines.push(formatChangeset([config], baseUrl, changesetBoundary)); + } + } + } + + // Close the batch + lines.push(`--${boundary}--`); + + return { + body: lines.join("\r\n"), + boundary, + }; +} + +/** + * Extracts the boundary from a Content-Type header + * @param contentType - The Content-Type header value + * @returns The boundary string, or null if not found + */ +export function extractBoundary(contentType: string): string | null { + const match = contentType.match(/boundary=([^;]+)/); + return match && match[1] ? match[1].trim() : null; +} + +/** + * Parses an HTTP response line (status line) + * @param line - The HTTP status line (e.g., "HTTP/1.1 200 OK") + * @returns Object containing status code and status text + */ +function parseStatusLine(line: string): { + status: number; + statusText: string; +} { + const match = line.match(/HTTP\/\d\.\d\s+(\d+)\s*(.*)/); + if (!match || !match[1]) { + return { status: 0, statusText: "" }; + } + return { + status: parseInt(match[1], 10), + statusText: match[2]?.trim() || "", + }; +} + +/** + * Parses headers from an array of header lines + * @param lines - Array of header lines + * @returns Object containing parsed headers + */ +function parseHeaders(lines: string[]): Record { + const headers: Record = {}; + for (const line of lines) { + const colonIndex = line.indexOf(":"); + if (colonIndex > 0) { + const key = line.substring(0, colonIndex).trim(); + const value = line.substring(colonIndex + 1).trim(); + headers[key.toLowerCase()] = value; + } + } + return headers; +} + +/** + * Parses a single HTTP response from a batch part + * @param part - The raw HTTP response string + * @returns Parsed response object + */ +function parseHttpResponse(part: string): ParsedBatchResponse { + const lines = part.split(/\r\n/); + + // Find the HTTP status line (skip multipart headers) + let statusLineIndex = -1; + for (let i = 0; i < lines.length; i++) { + const line = lines[i]; + if (line && line.startsWith("HTTP/")) { + statusLineIndex = i; + break; + } + } + + if (statusLineIndex === -1) { + return { + status: 0, + statusText: "Invalid response", + headers: {}, + body: null, + }; + } + + const statusLine = lines[statusLineIndex]; + if (!statusLine) { + return { + status: 0, + statusText: "Invalid response", + headers: {}, + body: null, + }; + } + + const { status, statusText } = parseStatusLine(statusLine); + + // Parse headers (between status line and empty line) + const headerLines: string[] = []; + let bodyStartIndex = lines.length; // Default to end of lines (no body) + let foundEmptyLine = false; + + for (let i = statusLineIndex + 1; i < lines.length; i++) { + const line = lines[i]; + if (line === "") { + bodyStartIndex = i + 1; + foundEmptyLine = true; + break; + } + // Stop at boundary markers (for responses without bodies like 204) + if (line && line.startsWith("--")) { + break; + } + if (line) { + headerLines.push(line); + } + } + + const headers = parseHeaders(headerLines); + + // Parse body (everything after the empty line, if there was one) + let bodyText = ""; + if (foundEmptyLine && bodyStartIndex < lines.length) { + const bodyLines = lines.slice(bodyStartIndex); + // Stop at boundary markers + const bodyLinesFiltered: string[] = []; + for (const line of bodyLines) { + if (line.startsWith("--")) { + break; + } + bodyLinesFiltered.push(line); + } + bodyText = bodyLinesFiltered.join("\r\n").trim(); + } + + let body: any = null; + if (bodyText) { + try { + body = JSON.parse(bodyText); + } catch { + // If not JSON, return as text + body = bodyText; + } + } + + return { + status, + statusText, + headers, + body, + }; +} + +/** + * Parses a batch response into individual responses + * @param responseText - The raw batch response text + * @param contentType - The Content-Type header from the response + * @returns Array of parsed responses in the same order as the request + */ +export function parseBatchResponse( + responseText: string, + contentType: string, +): ParsedBatchResponse[] { + const boundary = extractBoundary(contentType); + if (!boundary) { + throw new Error("Could not extract boundary from Content-Type header"); + } + + const results: ParsedBatchResponse[] = []; + + // Split by boundary (handle both --boundary and --boundary--) + const boundaryPattern = `--${boundary}`; + const parts = responseText.split(boundaryPattern); + + for (const part of parts) { + const trimmedPart = part.trim(); + + // Skip empty parts and the closing boundary marker + if (!trimmedPart || trimmedPart === "--") { + continue; + } + + // Check if this part is a changeset (nested multipart) + if (trimmedPart.includes("Content-Type: multipart/mixed")) { + // Extract the changeset boundary + const changesetContentTypeMatch = trimmedPart.match( + /Content-Type: multipart\/mixed;\s*boundary=([^\r\n]+)/, + ); + if (changesetContentTypeMatch) { + const changesetBoundary = changesetContentTypeMatch?.[1]?.trim(); + const changesetPattern = `--${changesetBoundary}`; + const changesetParts = trimmedPart.split(changesetPattern); + + for (const changesetPart of changesetParts) { + const trimmedChangesetPart = changesetPart.trim(); + if (!trimmedChangesetPart || trimmedChangesetPart === "--") { + continue; + } + + // Skip the changeset header + if ( + trimmedChangesetPart.startsWith("Content-Type: multipart/mixed") + ) { + continue; + } + + const response = parseHttpResponse(trimmedChangesetPart); + if (response.status > 0) { + results.push(response); + } + } + } + } else { + // Regular response (not a changeset) + const response = parseHttpResponse(trimmedPart); + if (response.status > 0) { + results.push(response); + } + } + } + + return results; +} diff --git a/packages/fmodata/src/client/builders/default-select.ts b/packages/fmodata/src/client/builders/default-select.ts new file mode 100644 index 00000000..0256db05 --- /dev/null +++ b/packages/fmodata/src/client/builders/default-select.ts @@ -0,0 +1,69 @@ +import type { FMTable } from "../../orm/table"; +import { FMTable as FMTableClass } from "../../orm/table"; +import type { StandardSchemaV1 } from "@standard-schema/spec"; +import { getBaseTableConfig } from "../../orm/table"; +import { isColumn } from "../../orm/column"; + +/** + * Helper function to get container field names from a table. + * Container fields cannot be selected via $select in FileMaker OData API. + */ +function getContainerFieldNames(table: FMTable): string[] { + const baseTableConfig = getBaseTableConfig(table); + if (!baseTableConfig || !baseTableConfig.containerFields) { + return []; + } + return baseTableConfig.containerFields as string[]; +} + +/** + * Gets default select fields from a table definition. + * Returns undefined if defaultSelect is "all". + * Automatically filters out container fields since they cannot be selected via $select. + */ +export function getDefaultSelectFields( + table: FMTable | undefined, +): string[] | undefined { + if (!table) return undefined; + + const defaultSelect = (table as any)[FMTableClass.Symbol.DefaultSelect]; + const containerFields = getContainerFieldNames(table); + + if (defaultSelect === "schema") { + const baseTableConfig = getBaseTableConfig(table); + const allFields = Object.keys(baseTableConfig.schema); + // Filter out container fields + return [...new Set(allFields.filter((f) => !containerFields.includes(f)))]; + } + + if (Array.isArray(defaultSelect)) { + // Filter out container fields + return [ + ...new Set(defaultSelect.filter((f) => !containerFields.includes(f))), + ]; + } + + // Check if defaultSelect is a Record (resolved from function) + if ( + typeof defaultSelect === "object" && + defaultSelect !== null && + !Array.isArray(defaultSelect) + ) { + // Extract field names from Column instances + const fieldNames: string[] = []; + for (const value of Object.values(defaultSelect)) { + if (isColumn(value)) { + fieldNames.push(value.fieldName); + } + } + if (fieldNames.length > 0) { + // Filter out container fields + return [ + ...new Set(fieldNames.filter((f) => !containerFields.includes(f))), + ]; + } + } + + // defaultSelect is "all" or undefined + return undefined; +} diff --git a/packages/fmodata/src/client/builders/expand-builder.ts b/packages/fmodata/src/client/builders/expand-builder.ts new file mode 100644 index 00000000..89d5ae20 --- /dev/null +++ b/packages/fmodata/src/client/builders/expand-builder.ts @@ -0,0 +1,245 @@ +import { QueryOptions } from "odata-query"; +import buildQuery from "odata-query"; +import type { StandardSchemaV1 } from "@standard-schema/spec"; +import { FMTable } from "../../orm/table"; +import { + getBaseTableConfig, + getTableName, + getNavigationPaths, +} from "../../orm/table"; +import type { ExpandValidationConfig } from "../../validation"; +import type { ExpandConfig } from "./shared-types"; +import { formatSelectFields } from "./select-utils"; +import { getDefaultSelectFields } from "./default-select"; +import { InternalLogger } from "../../logger"; + +/** + * Builds OData expand query strings and validation configs. + * Handles nested expands recursively and transforms relation names to FMTIDs + * when using entity IDs. + */ +export class ExpandBuilder { + constructor( + private useEntityIds: boolean, + private logger: InternalLogger, + ) {} + + /** + * Builds OData $expand query string from expand configurations. + */ + buildExpandString(configs: ExpandConfig[]): string { + if (configs.length === 0) return ""; + + return configs.map((config) => this.buildSingleExpand(config)).join(","); + } + + /** + * Builds validation configs for expanded navigation properties. + */ + buildValidationConfigs(configs: ExpandConfig[]): ExpandValidationConfig[] { + return configs.map((config) => { + const targetTable = config.targetTable; + + let targetSchema: Record | undefined; + if (targetTable) { + const baseTableConfig = getBaseTableConfig(targetTable); + const containerFields = baseTableConfig.containerFields || []; + + // Filter out container fields from schema + const schema = { ...baseTableConfig.schema }; + for (const containerField of containerFields) { + delete schema[containerField as string]; + } + + targetSchema = schema; + } + + const selectedFields = config.options?.select + ? Array.isArray(config.options.select) + ? config.options.select.map(String) + : [String(config.options.select)] + : undefined; + + // Recursively build validation configs for nested expands + const nestedExpands = config.nestedExpandConfigs + ? this.buildValidationConfigs(config.nestedExpandConfigs) + : undefined; + + return { + relation: config.relation, + targetSchema, + targetTable, + table: targetTable, + selectedFields, + nestedExpands, + }; + }); + } + + /** + * Process an expand() call and return the expand config. + * Used by both QueryBuilder and RecordBuilder to eliminate duplication. + * + * @param targetTable - The target table to expand to + * @param sourceTable - The source table (for validation) + * @param callback - Optional callback to configure the expand query + * @param builderFactory - Function that creates a QueryBuilder for the target table + * @returns ExpandConfig to add to the builder's expandConfigs array + */ + processExpand, Builder = any>( + targetTable: TargetTable, + sourceTable: FMTable | undefined, + callback?: (builder: Builder) => Builder, + builderFactory?: () => Builder, + ): ExpandConfig { + // Extract name and validate + const relationName = getTableName(targetTable); + + // Runtime validation: Check if relation name is in navigationPaths + if (sourceTable) { + const navigationPaths = getNavigationPaths(sourceTable); + if (navigationPaths && !navigationPaths.includes(relationName)) { + this.logger.warn( + `Cannot expand to "${relationName}". Valid navigation paths: ${navigationPaths.length > 0 ? navigationPaths.join(", ") : "none"}`, + ); + } + } + + if (callback && builderFactory) { + // Create a new QueryBuilder for the target table + const targetBuilder = builderFactory(); + + // Pass to callback and get configured builder + const configuredBuilder = callback(targetBuilder); + + // Extract the builder's query options + const expandOptions: Partial> = { + ...(configuredBuilder as any).queryOptions, + }; + + // If callback didn't provide select, apply defaultSelect from target table + if (!expandOptions.select) { + const defaultFields = getDefaultSelectFields(targetTable); + if (defaultFields) { + expandOptions.select = defaultFields; + } + } + + // If the configured builder has nested expands, we need to include them + const nestedExpandConfigs = (configuredBuilder as any).expandConfigs; + if (nestedExpandConfigs?.length > 0) { + // Build nested expand string from the configured builder's expand configs + const nestedExpandString = this.buildExpandString(nestedExpandConfigs); + if (nestedExpandString) { + // Add nested expand to options + expandOptions.expand = nestedExpandString as any; + } + } + + return { + relation: relationName, + options: expandOptions, + targetTable, + nestedExpandConfigs: nestedExpandConfigs?.length > 0 ? nestedExpandConfigs : undefined, + }; + } else { + // Simple expand without callback - apply defaultSelect if available + const defaultFields = getDefaultSelectFields(targetTable); + if (defaultFields) { + return { + relation: relationName, + options: { select: defaultFields }, + targetTable, + }; + } else { + return { + relation: relationName, + targetTable, + }; + } + } + } + + /** + * Builds a single expand string with its options. + */ + private buildSingleExpand(config: ExpandConfig): string { + const relationName = this.resolveRelationName(config); + const parts = this.buildExpandParts(config); + + if (parts.length === 0) { + return relationName; + } + + return `${relationName}(${parts.join(";")})`; + } + + /** + * Resolves relation name, using FMTID if entity IDs are enabled. + */ + private resolveRelationName(config: ExpandConfig): string { + if (!this.useEntityIds) { + return config.relation; + } + + const targetTable = config.targetTable; + if (targetTable && FMTable.Symbol.EntityId in targetTable) { + const tableId = (targetTable as any)[FMTable.Symbol.EntityId] as + | `FMTID:${string}` + | undefined; + if (tableId) { + return tableId; + } + } + + return config.relation; + } + + /** + * Builds expand parts (select, filter, orderBy, etc.) for a single expand. + */ + private buildExpandParts(config: ExpandConfig): string[] { + if (!config.options || Object.keys(config.options).length === 0) { + return []; + } + + const parts: string[] = []; + const opts = config.options; + + if (opts.select) { + const selectArray = Array.isArray(opts.select) + ? opts.select.map(String) + : [String(opts.select)]; + const selectFields = formatSelectFields( + selectArray, + config.targetTable, + this.useEntityIds, + ); + parts.push(`$select=${selectFields}`); + } + + if (opts.filter) { + const filterQuery = buildQuery({ filter: opts.filter }); + const match = filterQuery.match(/\$filter=([^&]+)/); + if (match) parts.push(`$filter=${match[1]}`); + } + + if (opts.orderBy) { + const orderByValue = Array.isArray(opts.orderBy) + ? opts.orderBy.join(",") + : String(opts.orderBy); + parts.push(`$orderby=${orderByValue}`); + } + + if (opts.top !== undefined) parts.push(`$top=${opts.top}`); + if (opts.skip !== undefined) parts.push(`$skip=${opts.skip}`); + + if (opts.expand) { + if (typeof opts.expand === "string") { + parts.push(`$expand=${opts.expand}`); + } + } + + return parts; + } +} diff --git a/packages/fmodata/src/client/builders/index.ts b/packages/fmodata/src/client/builders/index.ts new file mode 100644 index 00000000..385ad688 --- /dev/null +++ b/packages/fmodata/src/client/builders/index.ts @@ -0,0 +1,11 @@ +// Re-export all shared builder utilities +export * from "./shared-types"; +export * from "./table-utils"; +export * from "./select-utils"; +export * from "./select-mixin"; +export * from "./expand-builder"; +export * from "./response-processor"; +export * from "./default-select"; +export * from "./query-string-builder"; + + diff --git a/packages/fmodata/src/client/builders/query-string-builder.ts b/packages/fmodata/src/client/builders/query-string-builder.ts new file mode 100644 index 00000000..a9fb68df --- /dev/null +++ b/packages/fmodata/src/client/builders/query-string-builder.ts @@ -0,0 +1,43 @@ +import type { FMTable } from "../../orm/table"; +import { ExpandBuilder } from "./expand-builder"; +import type { ExpandConfig } from "./shared-types"; +import { formatSelectFields } from "./select-utils"; +import { InternalLogger } from "../../logger"; + +/** + * Builds OData query string for $select and $expand parameters. + * Used by both QueryBuilder and RecordBuilder to eliminate duplication. + * + * @param config - Configuration object + * @returns Query string starting with ? or empty string if no parameters + */ +export function buildSelectExpandQueryString(config: { + selectedFields?: string[]; + expandConfigs: ExpandConfig[]; + table?: FMTable; + useEntityIds: boolean; + logger: InternalLogger; +}): string { + const parts: string[] = []; + const expandBuilder = new ExpandBuilder(config.useEntityIds, config.logger); + + // Build $select + if (config.selectedFields && config.selectedFields.length > 0) { + const selectString = formatSelectFields( + config.selectedFields, + config.table, + config.useEntityIds, + ); + if (selectString) { + parts.push(`$select=${selectString}`); + } + } + + // Build $expand + const expandString = expandBuilder.buildExpandString(config.expandConfigs); + if (expandString) { + parts.push(`$expand=${expandString}`); + } + + return parts.length > 0 ? `?${parts.join("&")}` : ""; +} diff --git a/packages/fmodata/src/client/builders/response-processor.ts b/packages/fmodata/src/client/builders/response-processor.ts new file mode 100644 index 00000000..783b1a72 --- /dev/null +++ b/packages/fmodata/src/client/builders/response-processor.ts @@ -0,0 +1,276 @@ +import type { FMTable } from "../../orm/table"; +import type { Result } from "../../types"; +import type { ExpandValidationConfig } from "../../validation"; +import { validateSingleResponse, validateListResponse } from "../../validation"; +import { transformResponseFields } from "../../transform"; +import { RecordCountMismatchError } from "../../errors"; +import { getBaseTableConfig } from "../../orm/table"; +import { ExpandBuilder } from "./expand-builder"; +import type { ExpandConfig } from "./shared-types"; +import { InternalLogger } from "../../logger"; + +export interface ProcessResponseConfig { + table?: FMTable; + schema?: Record; + singleMode: "exact" | "maybe" | false; + selectedFields?: string[]; + expandValidationConfigs?: ExpandValidationConfig[]; + skipValidation?: boolean; + useEntityIds?: boolean; + // Mapping from field names to output keys (for renamed fields in select) + fieldMapping?: Record; +} + +/** + * Processes OData response with transformation and validation. + * Shared by QueryBuilder and RecordBuilder. + */ +export async function processODataResponse( + rawResponse: any, + config: ProcessResponseConfig, +): Promise> { + const { + table, + schema, + singleMode, + selectedFields, + expandValidationConfigs, + skipValidation, + useEntityIds, + fieldMapping, + } = config; + + // Transform field IDs back to names if using entity IDs + let response = rawResponse; + if (table && useEntityIds) { + response = transformResponseFields( + response, + table, + expandValidationConfigs, + ); + } + + // Fast path: skip validation + if (skipValidation) { + const result = extractRecords(response, singleMode); + // Rename fields AFTER extraction (but before returning) + if (result.data && fieldMapping && Object.keys(fieldMapping).length > 0) { + if (result.error) { + return { data: undefined, error: result.error } as Result; + } + return { + data: renameFieldsInResponse(result.data, fieldMapping) as T, + error: undefined, + }; + } + return result as Result; + } + + // Validation path + if (singleMode !== false) { + const validation = await validateSingleResponse( + response, + schema, + selectedFields as any, + expandValidationConfigs, + singleMode, + ); + + if (!validation.valid) { + return { data: undefined, error: validation.error }; + } + + // Rename fields AFTER validation completes + if (fieldMapping && Object.keys(fieldMapping).length > 0) { + return { + data: renameFieldsInResponse(validation.data, fieldMapping) as T, + error: undefined, + }; + } + + return { data: validation.data as T, error: undefined }; + } + + const validation = await validateListResponse( + response, + schema, + selectedFields as any, + expandValidationConfigs, + ); + + if (!validation.valid) { + return { data: undefined, error: validation.error }; + } + + // Rename fields AFTER validation completes + if (fieldMapping && Object.keys(fieldMapping).length > 0) { + return { + data: renameFieldsInResponse(validation.data, fieldMapping) as T, + error: undefined, + }; + } + + return { data: validation.data as T, error: undefined }; +} + +/** + * Extracts records from response without validation. + */ +function extractRecords( + response: any, + singleMode: "exact" | "maybe" | false, +): Result { + if (singleMode === false) { + const records = response.value ?? []; + return { data: records as T, error: undefined }; + } + + const records = response.value ?? [response]; + const count = Array.isArray(records) ? records.length : 1; + + if (count > 1) { + return { + data: undefined, + error: new RecordCountMismatchError( + singleMode === "exact" ? "one" : "at-most-one", + count, + ), + }; + } + + if (count === 0) { + if (singleMode === "exact") { + return { data: undefined, error: new RecordCountMismatchError("one", 0) }; + } + return { data: null as T, error: undefined }; + } + + const record = Array.isArray(records) ? records[0] : records; + return { data: record as T, error: undefined }; +} + +/** + * Gets schema from a table occurrence, excluding container fields. + * Container fields are never returned in regular responses (only via getSingleField). + */ +export function getSchemaFromTable( + table: FMTable | undefined, +): Record | undefined { + if (!table) return undefined; + const baseTableConfig = getBaseTableConfig(table); + const containerFields = baseTableConfig.containerFields || []; + + // Filter out container fields from schema + const schema = { ...baseTableConfig.schema }; + for (const containerField of containerFields) { + delete schema[containerField as string]; + } + + return schema; +} + +/** + * Renames fields in response data according to the field mapping. + * Used when select() is called with renamed fields (e.g., { userEmail: users.email }). + */ +function renameFieldsInResponse( + data: any, + fieldMapping: Record, +): any { + if (!data || typeof data !== "object") { + return data; + } + + // Handle array responses + if (Array.isArray(data)) { + return data.map((item) => renameFieldsInResponse(item, fieldMapping)); + } + + // Handle OData list response structure + if ("value" in data && Array.isArray(data.value)) { + return { + ...data, + value: data.value.map((item: any) => + renameFieldsInResponse(item, fieldMapping), + ), + }; + } + + // Handle single record + const renamed: Record = {}; + for (const [key, value] of Object.entries(data)) { + // Check if this field should be renamed + const outputKey = fieldMapping[key]; + if (outputKey) { + renamed[outputKey] = value; + } else { + renamed[key] = value; + } + } + return renamed; +} + +/** + * Processes query response with expand configs. + * This is a convenience wrapper that builds validation configs from expand configs. + */ +export async function processQueryResponse( + response: any, + config: { + occurrence?: FMTable; + singleMode: "exact" | "maybe" | false; + queryOptions: { select?: (keyof T)[] | string[] }; + expandConfigs: ExpandConfig[]; + skipValidation?: boolean; + useEntityIds?: boolean; + // Mapping from field names to output keys (for renamed fields in select) + fieldMapping?: Record; + logger: InternalLogger; + }, +): Promise> { + const { + occurrence, + singleMode, + queryOptions, + expandConfigs, + skipValidation, + useEntityIds, + fieldMapping, + logger, + } = config; + + const expandBuilder = new ExpandBuilder(useEntityIds ?? false, logger); + const expandValidationConfigs = + expandBuilder.buildValidationConfigs(expandConfigs); + + const selectedFields = queryOptions.select + ? Array.isArray(queryOptions.select) + ? queryOptions.select.map(String) + : [String(queryOptions.select)] + : undefined; + + // Process the response first + let processedResponse = await processODataResponse(response, { + table: occurrence, + schema: getSchemaFromTable(occurrence), + singleMode, + selectedFields, + expandValidationConfigs, + skipValidation, + useEntityIds, + }); + + // Rename fields if field mapping is provided (for renamed fields in select) + if ( + processedResponse.data && + fieldMapping && + Object.keys(fieldMapping).length > 0 + ) { + processedResponse = { + ...processedResponse, + data: renameFieldsInResponse(processedResponse.data, fieldMapping), + }; + } + + return processedResponse; +} diff --git a/packages/fmodata/src/client/builders/select-mixin.ts b/packages/fmodata/src/client/builders/select-mixin.ts new file mode 100644 index 00000000..0b491cb1 --- /dev/null +++ b/packages/fmodata/src/client/builders/select-mixin.ts @@ -0,0 +1,75 @@ +import { InternalLogger } from "../../logger"; +import { isColumn, type Column } from "../../orm/column"; + +/** + * Utility function for processing select() calls. + * Used by both QueryBuilder and RecordBuilder to eliminate duplication. + * + * @param fields - Field names or Column references + * @returns Object with selectedFields array + */ +export function processSelectFields( + ...fields: (string | Column)[] +): { selectedFields: string[] } { + const fieldNames = fields.map((field) => { + if (isColumn(field)) { + return field.fieldName as string; + } + return String(field); + }); + return { selectedFields: [...new Set(fieldNames)] }; +} + +/** + * Processes select() calls with field renaming support. + * Validates columns belong to the correct table and builds field mapping for renamed fields. + * Used by both QueryBuilder and RecordBuilder to eliminate duplication. + * + * @param fields - Object mapping output keys to column references + * @param tableName - Expected table name for validation + * @returns Object with selectedFields array and fieldMapping for renamed fields + */ +export function processSelectWithRenames( + fields: Record>, + tableName: string, + logger: InternalLogger, +): { selectedFields: string[]; fieldMapping: Record } { + const selectedFields: string[] = []; + const fieldMapping: Record = {}; + + for (const [outputKey, column] of Object.entries(fields)) { + if (!isColumn(column)) { + throw new Error( + `select() expects column references, but got: ${typeof column}`, + ); + } + + // Warn (not throw) on table mismatch for consistency + if (column.tableName !== tableName) { + logger.warn( + `Column ${column.toString()} is from table "${column.tableName}", but query is for table "${tableName}"`, + ); + } + + const fieldName = column.fieldName; + selectedFields.push(fieldName); + + // Build mapping from field name to output key (only if renamed) + if (fieldName !== outputKey) { + fieldMapping[fieldName] = outputKey; + } + } + + return { + selectedFields, + fieldMapping: Object.keys(fieldMapping).length > 0 ? fieldMapping : {}, + }; +} + +/** + * Legacy class name for backward compatibility. + * @deprecated Use processSelectFields function instead + */ +export class SelectMixin { + static processSelect = processSelectFields; +} diff --git a/packages/fmodata/src/client/builders/select-utils.ts b/packages/fmodata/src/client/builders/select-utils.ts new file mode 100644 index 00000000..360cf025 --- /dev/null +++ b/packages/fmodata/src/client/builders/select-utils.ts @@ -0,0 +1,56 @@ +import type { FMTable } from "../../orm/table"; +import { transformFieldNamesArray } from "../../transform"; + +/** + * Determines if a field name needs to be quoted in OData queries. + * Per FileMaker docs: field names with special characters (spaces, underscores, etc.) must be quoted. + * Also quotes "id" as it's an OData reserved word. + * Entity IDs (FMFID:*, FMTID:*) are not quoted as they're identifiers, not field names. + * + * @param fieldName - The field name or identifier to check + * @returns true if the field name should be quoted in OData queries + */ +export function needsFieldQuoting(fieldName: string): boolean { + // Entity IDs are identifiers and don't need quoting + if (fieldName.startsWith("FMFID:") || fieldName.startsWith("FMTID:")) { + return false; + } + // Always quote "id" as it's an OData reserved word + if (fieldName === "id") return true; + // Quote if field name contains spaces, underscores, or other special characters + return ( + fieldName.includes(" ") || + fieldName.includes("_") || + !/^[a-zA-Z][a-zA-Z0-9]*$/.test(fieldName) + ); +} + +/** + * Formats select fields for use in OData query strings. + * - Transforms field names to FMFIDs if using entity IDs + * - Wraps "id" fields in double quotes (OData reserved) + * - URL-encodes special characters but preserves spaces + */ +export function formatSelectFields( + select: string[] | readonly string[] | undefined, + table?: FMTable, + useEntityIds?: boolean, +): string { + if (!select || select.length === 0) return ""; + + const selectArray = Array.isArray(select) ? select : [select]; + + // Transform to field IDs if using entity IDs + const transformedFields = + table && useEntityIds + ? transformFieldNamesArray(selectArray.map(String), table) + : selectArray.map(String); + + return transformedFields + .map((field) => { + if (needsFieldQuoting(field)) return `"${field}"`; + const encoded = encodeURIComponent(field); + return encoded.replace(/%20/g, " "); + }) + .join(","); +} diff --git a/packages/fmodata/src/client/builders/shared-types.ts b/packages/fmodata/src/client/builders/shared-types.ts new file mode 100644 index 00000000..ffc5ed4e --- /dev/null +++ b/packages/fmodata/src/client/builders/shared-types.ts @@ -0,0 +1,42 @@ +import type { QueryOptions } from "odata-query"; +import type { ExecutionContext } from "../../types"; +import type { FMTable } from "../../orm/table"; + +/** + * Expand configuration used by both QueryBuilder and RecordBuilder + */ +export type ExpandConfig = { + relation: string; + options?: Partial>; + targetTable?: FMTable; + nestedExpandConfigs?: ExpandConfig[]; +}; + +/** + * Type to represent expanded relations in return types + */ +export type ExpandedRelations = Record; + +/** + * Navigation context shared between builders + */ +export interface NavigationContext { + isNavigate?: boolean; + navigateRecordId?: string | number; + navigateRelation?: string; + navigateSourceTableName?: string; + navigateBaseRelation?: string; + navigateBasePath?: string; +} + +/** + * Common builder configuration + */ +export interface BuilderConfig | undefined> { + occurrence?: Occ; + tableName: string; + databaseName: string; + context: ExecutionContext; + databaseUseEntityIds?: boolean; +} + diff --git a/packages/fmodata/src/client/builders/table-utils.ts b/packages/fmodata/src/client/builders/table-utils.ts new file mode 100644 index 00000000..91e1a11b --- /dev/null +++ b/packages/fmodata/src/client/builders/table-utils.ts @@ -0,0 +1,87 @@ +import type { ExecutionContext } from "../../types"; +import { getAcceptHeader } from "../../types"; +import type { FMTable } from "../../orm/table"; +import { + getTableName, + getTableId as getTableIdHelper, + isUsingEntityIds, +} from "../../orm/table"; +import type { FFetchOptions } from "@fetchkit/ffetch"; +import type { ExecuteOptions } from "../../types"; + +/** + * Resolves table identifier based on entity ID settings. + * Used by both QueryBuilder and RecordBuilder. + */ +export function resolveTableId( + table: FMTable | undefined, + fallbackTableName: string, + context: ExecutionContext, + useEntityIdsOverride?: boolean, +): string { + if (!table) { + return fallbackTableName; + } + + const contextDefault = context._getUseEntityIds?.() ?? false; + const shouldUseIds = useEntityIdsOverride ?? contextDefault; + + if (shouldUseIds) { + if (!isUsingEntityIds(table)) { + throw new Error( + `useEntityIds is true but table "${getTableName(table)}" does not have entity IDs configured`, + ); + } + return getTableIdHelper(table); + } + + return getTableName(table); +} + +/** + * Merges database-level useEntityIds with per-request options. + */ +export function mergeEntityIdOptions>( + options: T | undefined, + databaseDefault: boolean, +): T & { useEntityIds?: boolean } { + return { + ...options, + useEntityIds: (options as any)?.useEntityIds ?? databaseDefault, + } as T & { useEntityIds?: boolean }; +} + +/** + * Type-safe helper for merging execute options with entity ID settings + */ +export function mergeExecuteOptions( + options: (RequestInit & FFetchOptions & ExecuteOptions) | undefined, + databaseUseEntityIds: boolean, +): RequestInit & FFetchOptions & { useEntityIds?: boolean } { + return mergeEntityIdOptions(options, databaseUseEntityIds); +} + +/** + * Creates an OData Request object with proper headers. + * Used by both QueryBuilder and RecordBuilder to eliminate duplication. + * + * @param baseUrl - Base URL for the request + * @param config - Request configuration with method and url + * @param options - Optional execution options + * @returns Request object ready to use + */ +export function createODataRequest( + baseUrl: string, + config: { method: string; url: string }, + options?: { includeODataAnnotations?: boolean }, +): Request { + const fullUrl = `${baseUrl}${config.url}`; + + return new Request(fullUrl, { + method: config.method, + headers: { + "Content-Type": "application/json", + Accept: getAcceptHeader(options?.includeODataAnnotations), + }, + }); +} diff --git a/packages/fmodata/src/client/database.ts b/packages/fmodata/src/client/database.ts new file mode 100644 index 00000000..b4eba64d --- /dev/null +++ b/packages/fmodata/src/client/database.ts @@ -0,0 +1,193 @@ +import type { StandardSchemaV1 } from "@standard-schema/spec"; +import type { ExecutionContext, ExecutableBuilder, Metadata } from "../types"; +import { EntitySet } from "./entity-set"; +import { BatchBuilder } from "./batch-builder"; +import { SchemaManager } from "./schema-manager"; +import { FMTable } from "../orm/table"; + +export class Database { + private _useEntityIds: boolean = false; + public readonly schema: SchemaManager; + + constructor( + private readonly databaseName: string, + private readonly context: ExecutionContext, + config?: { + /** + * Whether to use entity IDs instead of field names in the actual requests to the server + * Defaults to true if all occurrences use entity IDs, false otherwise + * If set to false but some occurrences do not use entity IDs, an error will be thrown + */ + useEntityIds?: boolean; + }, + ) { + // Initialize schema manager + this.schema = new SchemaManager(this.databaseName, this.context); + this._useEntityIds = config?.useEntityIds ?? false; + } + + from>(table: T): EntitySet { + // Only override database-level useEntityIds if table explicitly sets it + // (not if it's undefined, which would override the database setting) + if ( + Object.prototype.hasOwnProperty.call(table, FMTable.Symbol.UseEntityIds) + ) { + const tableUseEntityIds = (table as any)[FMTable.Symbol.UseEntityIds]; + if (typeof tableUseEntityIds === "boolean") { + this._useEntityIds = tableUseEntityIds; + } + } + return new EntitySet({ + occurrence: table as T, + databaseName: this.databaseName, + context: this.context, + database: this, + }); + } + + /** + * Retrieves the OData metadata for this database. + * @param args Optional configuration object + * @param args.format The format to retrieve metadata in. Defaults to "json". + * @returns The metadata in the specified format + */ + async getMetadata(args: { format: "xml" }): Promise; + async getMetadata(args?: { format?: "json" }): Promise; + async getMetadata(args?: { + format?: "xml" | "json"; + }): Promise { + const result = await this.context._makeRequest< + Record | string + >(`/${this.databaseName}/$metadata`, { + headers: { + Accept: args?.format === "xml" ? "application/xml" : "application/json", + }, + }); + if (result.error) { + throw result.error; + } + + if (args?.format === "json") { + const data = result.data as Record; + const metadata = data[this.databaseName]; + if (!metadata) { + throw new Error( + `Metadata for database "${this.databaseName}" not found in response`, + ); + } + return metadata; + } + return result.data as string; + } + + /** + * Lists all available tables (entity sets) in this database. + * @returns Promise resolving to an array of table names + */ + async listTableNames(): Promise { + const result = await this.context._makeRequest<{ + value?: Array<{ name: string }>; + }>(`/${this.databaseName}`); + if (result.error) { + throw result.error; + } + if (result.data.value && Array.isArray(result.data.value)) { + return result.data.value.map((item) => item.name); + } + return []; + } + + /** + * Executes a FileMaker script. + * @param scriptName - The name of the script to execute (must be valid according to OData rules) + * @param options - Optional script parameter and result schema + * @returns Promise resolving to script execution result + */ + async runScript = never>( + scriptName: string, + options?: { + scriptParam?: string | number | Record; + resultSchema?: ResultSchema; + }, + ): Promise< + [ResultSchema] extends [never] + ? { resultCode: number; result?: string } + : ResultSchema extends StandardSchemaV1 + ? { resultCode: number; result: Output } + : { resultCode: number; result?: string } + > { + const body: { scriptParameterValue?: unknown } = {}; + if (options?.scriptParam !== undefined) { + body.scriptParameterValue = options.scriptParam; + } + + const result = await this.context._makeRequest<{ + scriptResult: { + code: number; + resultParameter?: string; + }; + }>(`/${this.databaseName}/Script.${scriptName}`, { + method: "POST", + body: Object.keys(body).length > 0 ? JSON.stringify(body) : undefined, + }); + + if (result.error) { + throw result.error; + } + + const response = result.data; + + // If resultSchema is provided, validate the result through it + if (options?.resultSchema && response.scriptResult !== undefined) { + const validationResult = options.resultSchema["~standard"].validate( + response.scriptResult.resultParameter, + ); + // Handle both sync and async validation + const result = + validationResult instanceof Promise + ? await validationResult + : validationResult; + + if (result.issues) { + throw new Error( + `Script result validation failed: ${JSON.stringify(result.issues)}`, + ); + } + + return { + resultCode: response.scriptResult.code, + result: result.value, + } as any; + } + + return { + resultCode: response.scriptResult.code, + result: response.scriptResult.resultParameter, + } as any; + } + + /** + * Create a batch operation builder that allows multiple queries to be executed together + * in a single atomic request. All operations succeed or fail together (transactional). + * + * @param builders - Array of executable query builders to batch + * @returns A BatchBuilder that can be executed + * @example + * ```ts + * const result = await db.batch([ + * db.from('contacts').list().top(5), + * db.from('users').list().top(5), + * db.from('contacts').insert({ name: 'John' }) + * ]).execute(); + * + * if (result.data) { + * const [contacts, users, insertResult] = result.data; + * } + * ``` + */ + batch[]>( + builders: Builders, + ): BatchBuilder { + return new BatchBuilder(builders, this.databaseName, this.context); + } +} diff --git a/packages/fmodata/src/client/delete-builder.ts b/packages/fmodata/src/client/delete-builder.ts new file mode 100644 index 00000000..0df96248 --- /dev/null +++ b/packages/fmodata/src/client/delete-builder.ts @@ -0,0 +1,291 @@ +import type { + ExecutionContext, + ExecutableBuilder, + Result, + WithSystemFields, + ExecuteOptions, + ExecuteMethodOptions, +} from "../types"; +import { getAcceptHeader } from "../types"; +import type { FMTable, InferSchemaOutputFromFMTable } from "../orm/table"; +import { + getTableName, + getTableId as getTableIdHelper, + isUsingEntityIds, +} from "../orm/table"; +import { QueryBuilder } from "./query-builder"; +import { type FFetchOptions } from "@fetchkit/ffetch"; +import { parseErrorResponse } from "./error-parser"; + +/** + * Initial delete builder returned from EntitySet.delete() + * Requires calling .byId() or .where() before .execute() is available + */ +export class DeleteBuilder> { + private databaseName: string; + private context: ExecutionContext; + private table: Occ; + private databaseUseEntityIds: boolean; + + constructor(config: { + occurrence: Occ; + databaseName: string; + context: ExecutionContext; + databaseUseEntityIds?: boolean; + }) { + this.table = config.occurrence; + this.databaseName = config.databaseName; + this.context = config.context; + this.databaseUseEntityIds = config.databaseUseEntityIds ?? false; + } + + /** + * Delete a single record by ID + */ + byId(id: string | number): ExecutableDeleteBuilder { + return new ExecutableDeleteBuilder({ + occurrence: this.table, + databaseName: this.databaseName, + context: this.context, + mode: "byId", + recordId: id, + databaseUseEntityIds: this.databaseUseEntityIds, + }); + } + + /** + * Delete records matching a filter query + * @param fn Callback that receives a QueryBuilder for building the filter + */ + where( + fn: (q: QueryBuilder) => QueryBuilder, + ): ExecutableDeleteBuilder { + // Create a QueryBuilder for the user to configure + const queryBuilder = new QueryBuilder({ + occurrence: this.table, + databaseName: this.databaseName, + context: this.context, + }); + + // Let the user configure it + const configuredBuilder = fn(queryBuilder); + + return new ExecutableDeleteBuilder({ + occurrence: this.table, + databaseName: this.databaseName, + context: this.context, + mode: "byFilter", + queryBuilder: configuredBuilder, + databaseUseEntityIds: this.databaseUseEntityIds, + }); + } +} + +/** + * Executable delete builder - has execute() method + * Returned after calling .byId() or .where() + */ +export class ExecutableDeleteBuilder> + implements ExecutableBuilder<{ deletedCount: number }> +{ + private databaseName: string; + private context: ExecutionContext; + private table: Occ; + private mode: "byId" | "byFilter"; + private recordId?: string | number; + private queryBuilder?: QueryBuilder; + private databaseUseEntityIds: boolean; + + constructor(config: { + occurrence: Occ; + databaseName: string; + context: ExecutionContext; + mode: "byId" | "byFilter"; + recordId?: string | number; + queryBuilder?: QueryBuilder; + databaseUseEntityIds?: boolean; + }) { + this.table = config.occurrence; + this.databaseName = config.databaseName; + this.context = config.context; + this.mode = config.mode; + this.recordId = config.recordId; + this.queryBuilder = config.queryBuilder; + this.databaseUseEntityIds = config.databaseUseEntityIds ?? false; + } + + /** + * Helper to merge database-level useEntityIds with per-request options + */ + private mergeExecuteOptions( + options?: RequestInit & FFetchOptions & ExecuteOptions, + ): RequestInit & FFetchOptions & { useEntityIds?: boolean } { + // If useEntityIds is not set in options, use the database-level setting + return { + ...options, + useEntityIds: options?.useEntityIds ?? this.databaseUseEntityIds, + }; + } + + /** + * Gets the table ID (FMTID) if using entity IDs, otherwise returns the table name + * @param useEntityIds - Optional override for entity ID usage + */ + private getTableId(useEntityIds?: boolean): string { + const contextDefault = this.context._getUseEntityIds?.() ?? false; + const shouldUseIds = useEntityIds ?? contextDefault; + + if (shouldUseIds) { + if (!isUsingEntityIds(this.table)) { + throw new Error( + `useEntityIds is true but table "${getTableName(this.table)}" does not have entity IDs configured`, + ); + } + return getTableIdHelper(this.table); + } + + return getTableName(this.table); + } + + async execute( + options?: ExecuteMethodOptions, + ): Promise> { + // Merge database-level useEntityIds with per-request options + const mergedOptions = this.mergeExecuteOptions(options); + + // Get table identifier with override support + const tableId = this.getTableId(mergedOptions.useEntityIds); + + let url: string; + + if (this.mode === "byId") { + // Delete single record by ID: DELETE /{database}/{table}('id') + url = `/${this.databaseName}/${tableId}('${this.recordId}')`; + } else { + // Delete by filter: DELETE /{database}/{table}?$filter=... + if (!this.queryBuilder) { + throw new Error("Query builder is required for filter-based delete"); + } + + // Get the query string from the configured QueryBuilder + const queryString = this.queryBuilder.getQueryString(); + // Remove the leading "/" and table name from the query string as we'll build our own URL + const tableName = getTableName(this.table); + const queryParams = queryString.startsWith(`/${tableId}`) + ? queryString.slice(`/${tableId}`.length) + : queryString.startsWith(`/${tableName}`) + ? queryString.slice(`/${tableName}`.length) + : queryString; + + url = `/${this.databaseName}/${tableId}${queryParams}`; + } + + // Make DELETE request + const result = await this.context._makeRequest(url, { + method: "DELETE", + ...mergedOptions, + }); + + if (result.error) { + return { data: undefined, error: result.error }; + } + + const response = result.data; + + // OData returns 204 No Content with fmodata.affected_rows header + // The _makeRequest should handle extracting the header value + // For now, we'll check if response contains the count + let deletedCount = 0; + + if (typeof response === "number") { + deletedCount = response; + } else if (response && typeof response === "object") { + // Check if the response has a count property (fallback) + deletedCount = (response as any).deletedCount || 0; + } + + return { data: { deletedCount }, error: undefined }; + } + + getRequestConfig(): { method: string; url: string; body?: any } { + // For batch operations, use database-level setting (no per-request override available here) + const tableId = this.getTableId(this.databaseUseEntityIds); + + let url: string; + + if (this.mode === "byId") { + url = `/${this.databaseName}/${tableId}('${this.recordId}')`; + } else { + if (!this.queryBuilder) { + throw new Error("Query builder is required for filter-based delete"); + } + + const queryString = this.queryBuilder.getQueryString(); + const tableName = getTableName(this.table); + const queryParams = queryString.startsWith(`/${tableId}`) + ? queryString.slice(`/${tableId}`.length) + : queryString.startsWith(`/${tableName}`) + ? queryString.slice(`/${tableName}`.length) + : queryString; + + url = `/${this.databaseName}/${tableId}${queryParams}`; + } + + return { + method: "DELETE", + url, + }; + } + + toRequest(baseUrl: string, options?: ExecuteOptions): Request { + const config = this.getRequestConfig(); + const fullUrl = `${baseUrl}${config.url}`; + + return new Request(fullUrl, { + method: config.method, + headers: { + Accept: getAcceptHeader(options?.includeODataAnnotations), + }, + }); + } + + async processResponse( + response: Response, + options?: ExecuteOptions, + ): Promise> { + // Check for error responses (important for batch operations) + if (!response.ok) { + const tableName = getTableName(this.table); + const error = await parseErrorResponse( + response, + response.url || `/${this.databaseName}/${tableName}`, + ); + return { data: undefined, error }; + } + + // Check for empty response (204 No Content) + const text = await response.text(); + if (!text || text.trim() === "") { + // For 204 No Content, check the fmodata.affected_rows header + const affectedRows = response.headers.get("fmodata.affected_rows"); + const deletedCount = affectedRows ? parseInt(affectedRows, 10) : 1; + return { data: { deletedCount }, error: undefined }; + } + + const rawResponse = JSON.parse(text); + + // OData returns 204 No Content with fmodata.affected_rows header + // The _makeRequest should handle extracting the header value + // For now, we'll check if response contains the count + let deletedCount = 0; + + if (typeof rawResponse === "number") { + deletedCount = rawResponse; + } else if (rawResponse && typeof rawResponse === "object") { + // Check if the response has a count property (fallback) + deletedCount = (rawResponse as any).deletedCount || 0; + } + + return { data: { deletedCount }, error: undefined }; + } +} diff --git a/packages/fmodata/src/client/entity-set.ts b/packages/fmodata/src/client/entity-set.ts new file mode 100644 index 00000000..fb03d177 --- /dev/null +++ b/packages/fmodata/src/client/entity-set.ts @@ -0,0 +1,393 @@ +import type { ExecutionContext } from "../types"; +import type { StandardSchemaV1 } from "@standard-schema/spec"; +import { QueryBuilder } from "./query/index"; +import { RecordBuilder } from "./record-builder"; +import { InsertBuilder } from "./insert-builder"; +import { DeleteBuilder } from "./delete-builder"; +import { UpdateBuilder } from "./update-builder"; +import { Database } from "./database"; +import type { + FMTable, + InferSchemaOutputFromFMTable, + InsertDataFromFMTable, + UpdateDataFromFMTable, + ValidExpandTarget, + ColumnMap, +} from "../orm/table"; +import { + FMTable as FMTableClass, + getDefaultSelect, + getTableName, + getTableColumns, +} from "../orm/table"; +import type { FieldBuilder } from "../orm/field-builders"; +import { createLogger, InternalLogger } from "../logger"; + +// Helper type to extract defaultSelect from an FMTable +// Since TypeScript can't extract Symbol-indexed properties at the type level, +// we simplify to return keyof InferSchemaFromFMTable when O is an FMTable. +// The actual defaultSelect logic is handled at runtime. +type ExtractDefaultSelect = + O extends FMTable ? keyof InferSchemaOutputFromFMTable : never; + +/** + * Helper type to extract properly-typed columns from an FMTable. + * This preserves the specific column types instead of widening to `any`. + */ +type ExtractColumnsFromOcc = + T extends FMTable + ? TFields extends Record> + ? ColumnMap + : never + : never; + +export class EntitySet> { + private occurrence: Occ; + private databaseName: string; + private context: ExecutionContext; + private database: Database; // Database instance for accessing occurrences + private isNavigateFromEntitySet?: boolean; + private navigateRelation?: string; + private navigateSourceTableName?: string; + private navigateBasePath?: string; // Full base path for chained navigations + private databaseUseEntityIds: boolean; + private logger: InternalLogger; + + constructor(config: { + occurrence: Occ; + databaseName: string; + context: ExecutionContext; + database?: any; + }) { + this.occurrence = config.occurrence; + this.databaseName = config.databaseName; + this.context = config.context; + this.database = config.database; + // Get useEntityIds from database if available, otherwise default to false + this.databaseUseEntityIds = + (config.database as any)?._useEntityIds ?? false; + this.logger = config.context?._getLogger?.() ?? createLogger(); + } + + // Type-only method to help TypeScript infer the schema from table + static create>(config: { + occurrence: Occ; + databaseName: string; + context: ExecutionContext; + database: Database; + }): EntitySet { + return new EntitySet({ + occurrence: config.occurrence, + databaseName: config.databaseName, + context: config.context, + database: config.database, + }); + } + + list(): QueryBuilder< + Occ, + keyof InferSchemaOutputFromFMTable, + false, + false, + {} + > { + const builder = new QueryBuilder({ + occurrence: this.occurrence as Occ, + databaseName: this.databaseName, + context: this.context, + databaseUseEntityIds: this.databaseUseEntityIds, + }); + + // Apply defaultSelect if occurrence exists and select hasn't been called + if (this.occurrence) { + // FMTable - access via helper functions + const defaultSelectValue = getDefaultSelect(this.occurrence); + const tableSchema = (this.occurrence as any)[FMTableClass.Symbol.Schema]; + let schema: Record | undefined; + + if (tableSchema) { + // Extract schema from StandardSchemaV1 + const zodSchema = tableSchema["~standard"]?.schema; + if ( + zodSchema && + typeof zodSchema === "object" && + "shape" in zodSchema + ) { + schema = zodSchema.shape as Record; + } + } + + if (defaultSelectValue === "schema") { + // Use getTableColumns to get all columns and select them + // This is equivalent to select(getTableColumns(occurrence)) + // Cast to the declared return type - runtime behavior handles the actual selection + const allColumns = getTableColumns( + this.occurrence, + ) as ExtractColumnsFromOcc; + return builder.select(allColumns).top(1000) as QueryBuilder< + Occ, + keyof InferSchemaOutputFromFMTable, + false, + false, + {} + >; + } else if (typeof defaultSelectValue === "object") { + // defaultSelectValue is a select object (Record) + // Cast to the declared return type - runtime behavior handles the actual selection + return builder + .select(defaultSelectValue as ExtractColumnsFromOcc) + .top(1000) as QueryBuilder< + Occ, + keyof InferSchemaOutputFromFMTable, + false, + false, + {} + >; + } + // If defaultSelect is "all", no changes needed (current behavior) + } + + // Propagate navigation context if present + if ( + this.isNavigateFromEntitySet && + this.navigateRelation && + this.navigateSourceTableName + ) { + (builder as any).navigation = { + relation: this.navigateRelation, + sourceTableName: this.navigateSourceTableName, + basePath: this.navigateBasePath, + // recordId is intentionally not set (undefined) to indicate navigation from EntitySet + }; + } + + // Apply default pagination limit of 1000 records to prevent stack overflow + // with large datasets. Users can override with .top() if needed. + return builder.top(1000); + } + + get( + id: string | number, + ): RecordBuilder< + Occ, + false, + undefined, + keyof InferSchemaOutputFromFMTable, + {} + > { + const builder = new RecordBuilder({ + occurrence: this.occurrence, + databaseName: this.databaseName, + context: this.context, + recordId: id, + databaseUseEntityIds: this.databaseUseEntityIds, + }); + + // Apply defaultSelect if occurrence exists + if (this.occurrence) { + // FMTable - access via helper functions + const defaultSelectValue = getDefaultSelect(this.occurrence); + const tableSchema = (this.occurrence as any)[FMTableClass.Symbol.Schema]; + let schema: Record | undefined; + + if (tableSchema) { + // Extract schema from StandardSchemaV1 + const zodSchema = tableSchema["~standard"]?.schema; + if ( + zodSchema && + typeof zodSchema === "object" && + "shape" in zodSchema + ) { + schema = zodSchema.shape as Record; + } + } + + if (defaultSelectValue === "schema") { + // Use getTableColumns to get all columns and select them + // This is equivalent to select(getTableColumns(occurrence)) + // Use ExtractColumnsFromOcc to preserve the properly-typed column types + const allColumns = getTableColumns( + this.occurrence as any, + ) as ExtractColumnsFromOcc; + const selectedBuilder = builder.select(allColumns); + // Propagate navigation context if present + if ( + this.isNavigateFromEntitySet && + this.navigateRelation && + this.navigateSourceTableName + ) { + (selectedBuilder as any).navigation = { + relation: this.navigateRelation, + sourceTableName: this.navigateSourceTableName, + basePath: this.navigateBasePath, + }; + } + return selectedBuilder as any; + } else if ( + typeof defaultSelectValue === "object" && + defaultSelectValue !== null && + !Array.isArray(defaultSelectValue) + ) { + // defaultSelectValue is a select object (Record) + // Use it directly with select() + // Use ExtractColumnsFromOcc to preserve the properly-typed column types + const selectedBuilder = builder.select( + defaultSelectValue as ExtractColumnsFromOcc, + ); + // Propagate navigation context if present + if ( + this.isNavigateFromEntitySet && + this.navigateRelation && + this.navigateSourceTableName + ) { + (selectedBuilder as any).navigation = { + relation: this.navigateRelation, + sourceTableName: this.navigateSourceTableName, + basePath: this.navigateBasePath, + }; + } + return selectedBuilder as any; + } + // If defaultSelect is "all", no changes needed (current behavior) + } + + // Propagate navigation context if present + if ( + this.isNavigateFromEntitySet && + this.navigateRelation && + this.navigateSourceTableName + ) { + (builder as any).navigation = { + relation: this.navigateRelation, + sourceTableName: this.navigateSourceTableName, + basePath: this.navigateBasePath, + }; + } + return builder as any; + } + + // Overload: when returnFullRecord is false + insert( + data: InsertDataFromFMTable, + options: { returnFullRecord: false }, + ): InsertBuilder; + + // Overload: when returnFullRecord is true or omitted (default) + insert( + data: InsertDataFromFMTable, + options?: { returnFullRecord?: true }, + ): InsertBuilder; + + // Implementation + insert( + data: InsertDataFromFMTable, + options?: { returnFullRecord?: boolean }, + ): InsertBuilder { + const returnPreference = + options?.returnFullRecord === false ? "minimal" : "representation"; + + return new InsertBuilder({ + occurrence: this.occurrence, + databaseName: this.databaseName, + context: this.context, + data: data as any, // Input type is validated/transformed at runtime + returnPreference: returnPreference as any, + databaseUseEntityIds: this.databaseUseEntityIds, + }); + } + + // Overload: when returnFullRecord is explicitly true + update( + data: UpdateDataFromFMTable, + options: { returnFullRecord: true }, + ): UpdateBuilder; + + // Overload: when returnFullRecord is false or omitted (default) + update( + data: UpdateDataFromFMTable, + options?: { returnFullRecord?: false }, + ): UpdateBuilder; + + // Implementation + update( + data: UpdateDataFromFMTable, + options?: { returnFullRecord?: boolean }, + ): UpdateBuilder { + const returnPreference = + options?.returnFullRecord === true ? "representation" : "minimal"; + + return new UpdateBuilder({ + occurrence: this.occurrence, + databaseName: this.databaseName, + context: this.context, + data: data as any, // Input type is validated/transformed at runtime + returnPreference: returnPreference as any, + databaseUseEntityIds: this.databaseUseEntityIds, + }); + } + + delete(): DeleteBuilder { + return new DeleteBuilder({ + occurrence: this.occurrence, + databaseName: this.databaseName, + context: this.context, + databaseUseEntityIds: this.databaseUseEntityIds, + }) as any; + } + + // Implementation + navigate>( + targetTable: ValidExpandTarget, + ): EntitySet ? TargetTable : never> { + // Check if it's an FMTable object or a string + let relationName: string; + + // FMTable object - extract name and validate + relationName = getTableName(targetTable); + + // Runtime validation: Check if relation name is in navigationPaths + if ( + this.occurrence && + FMTableClass.Symbol.NavigationPaths in this.occurrence + ) { + const navigationPaths = (this.occurrence as any)[ + FMTableClass.Symbol.NavigationPaths + ] as readonly string[]; + if (navigationPaths && !navigationPaths.includes(relationName)) { + this.logger.warn( + `Cannot navigate to "${relationName}". Valid navigation paths: ${navigationPaths.length > 0 ? navigationPaths.join(", ") : "none"}`, + ); + } + } + + // Create EntitySet with target table + const entitySet = new EntitySet({ + occurrence: targetTable, + databaseName: this.databaseName, + context: this.context, + database: this.database, + }); + // Store the navigation info in the EntitySet + (entitySet as any).isNavigateFromEntitySet = true; + (entitySet as any).navigateRelation = relationName; + + // Build the full base path for chained navigations + if (this.isNavigateFromEntitySet && this.navigateBasePath) { + // Already have a base path from previous navigation - extend it with current relation + (entitySet as any).navigateBasePath = + `${this.navigateBasePath}/${this.navigateRelation}`; + (entitySet as any).navigateSourceTableName = this.navigateSourceTableName; + } else if (this.isNavigateFromEntitySet && this.navigateRelation) { + // First chained navigation - create base path from source/relation + (entitySet as any).navigateBasePath = + `${this.navigateSourceTableName}/${this.navigateRelation}`; + (entitySet as any).navigateSourceTableName = this.navigateSourceTableName; + } else { + // Initial navigation - source is just the table name + (entitySet as any).navigateSourceTableName = getTableName( + this.occurrence, + ); + } + return entitySet; + } +} diff --git a/packages/fmodata/src/client/error-parser.ts b/packages/fmodata/src/client/error-parser.ts new file mode 100644 index 00000000..fd31d12e --- /dev/null +++ b/packages/fmodata/src/client/error-parser.ts @@ -0,0 +1,56 @@ +import { + HTTPError, + ODataError, + SchemaLockedError, + FMODataErrorType, +} from "../errors"; +import { safeJsonParse } from "./sanitize-json"; + +/** + * Parses an error response and returns an appropriate error object. + * This helper is used by builder processResponse methods to handle error responses + * consistently, particularly important for batch operations where errors need to be + * properly parsed from the response body. + * + * @param response - The Response object (may be from batch or direct request) + * @param url - The URL that was requested (for error context) + * @returns An appropriate error object (ODataError, SchemaLockedError, or HTTPError) + */ +export async function parseErrorResponse( + response: Response, + url: string, +): Promise { + // Try to parse error body if it's JSON + let errorBody: + | { error?: { code?: string | number; message?: string } } + | undefined; + + try { + if (response.headers.get("content-type")?.includes("application/json")) { + errorBody = await safeJsonParse(response); + } + } catch { + // Ignore JSON parse errors - we'll fall back to HTTPError + } + + // Check if it's an OData error response + if (errorBody?.error) { + const errorCode = errorBody.error.code; + const errorMessage = errorBody.error.message || response.statusText; + + // Check for schema locked error (code 303) + if (errorCode === "303" || errorCode === 303) { + return new SchemaLockedError(url, errorMessage, errorBody.error); + } + + return new ODataError( + url, + errorMessage, + String(errorCode), + errorBody.error, + ); + } + + // Fall back to generic HTTPError + return new HTTPError(url, response.status, response.statusText, errorBody); +} diff --git a/packages/fmodata/src/client/filemaker-odata.ts b/packages/fmodata/src/client/filemaker-odata.ts new file mode 100644 index 00000000..a82233c6 --- /dev/null +++ b/packages/fmodata/src/client/filemaker-odata.ts @@ -0,0 +1,299 @@ +import createClient, { + FFetchOptions, + TimeoutError, + AbortError, + NetworkError, + RetryLimitError, + CircuitOpenError, +} from "@fetchkit/ffetch"; +import type { Auth, ExecutionContext, Result } from "../types"; +import { getAcceptHeader } from "../types"; +import { + HTTPError, + ODataError, + SchemaLockedError, + ResponseParseError, +} from "../errors"; +import { Database } from "./database"; +import { safeJsonParse } from "./sanitize-json"; +import { get } from "es-toolkit/compat"; +import { createLogger, type Logger, type InternalLogger } from "../logger"; + +export class FMServerConnection implements ExecutionContext { + private fetchClient: ReturnType; + private serverUrl: string; + private auth: Auth; + private useEntityIds: boolean = false; + private logger: InternalLogger; + constructor(config: { + serverUrl: string; + auth: Auth; + fetchClientOptions?: FFetchOptions; + logger?: Logger; + }) { + this.logger = createLogger(config.logger); + this.fetchClient = createClient({ + retries: 0, + ...config.fetchClientOptions, + }); + // Ensure the URL uses https://, is valid, and has no trailing slash + const url = new URL(config.serverUrl); + if (url.protocol !== "https:") { + url.protocol = "https:"; + } + // Remove any trailing slash from pathname + url.pathname = url.pathname.replace(/\/+$/, ""); + this.serverUrl = url.toString().replace(/\/+$/, ""); + this.auth = config.auth; + } + + /** + * @internal + * Sets whether to use FileMaker entity IDs (FMFID/FMTID) in requests + */ + _setUseEntityIds(useEntityIds: boolean): void { + this.useEntityIds = useEntityIds; + } + + /** + * @internal + * Gets whether to use FileMaker entity IDs (FMFID/FMTID) in requests + */ + _getUseEntityIds(): boolean { + return this.useEntityIds; + } + + /** + * @internal + * Gets the base URL for OData requests + */ + _getBaseUrl(): string { + return `${this.serverUrl}${"apiKey" in this.auth ? `/otto` : ""}/fmi/odata/v4`; + } + + /** + * @internal + * Gets the logger instance + */ + _getLogger(): InternalLogger { + return this.logger; + } + + /** + * @internal + */ + async _makeRequest( + url: string, + options?: RequestInit & FFetchOptions & { useEntityIds?: boolean }, + ): Promise> { + const logger = this._getLogger(); + const baseUrl = `${this.serverUrl}${"apiKey" in this.auth ? `/otto` : ""}/fmi/odata/v4`; + const fullUrl = baseUrl + url; + + // Use per-request override if provided, otherwise use the database-level setting + const useEntityIds = options?.useEntityIds ?? this.useEntityIds; + + // Get includeODataAnnotations from options (it's passed through from execute options) + const includeODataAnnotations = (options as any)?.includeODataAnnotations; + + const headers = { + Authorization: + "apiKey" in this.auth + ? `Bearer ${this.auth.apiKey}` + : `Basic ${btoa(`${this.auth.username}:${this.auth.password}`)}`, + "Content-Type": "application/json", + Accept: getAcceptHeader(includeODataAnnotations), + ...(useEntityIds ? { Prefer: "fmodata.entity-ids" } : {}), + ...(options?.headers || {}), + }; + + // Prepare loggableHeaders by omitting the Authorization key + const { Authorization, ...loggableHeaders } = headers; + logger.debug("Request headers:", loggableHeaders); + + // TEMPORARY WORKAROUND: Hopefully this feature will be fixed in the ffetch library + // Extract fetchHandler and headers separately, only for tests where we're overriding the fetch handler per-request + const fetchHandler = options?.fetchHandler; + const { + headers: _headers, + fetchHandler: _fetchHandler, + ...restOptions + } = options || {}; + + // If fetchHandler is provided, create a temporary client with it + // Otherwise use the existing client + const clientToUse = fetchHandler + ? createClient({ retries: 0, fetchHandler }) + : this.fetchClient; + + try { + const finalOptions = { + ...restOptions, + headers, + }; + + const resp = await clientToUse(fullUrl, finalOptions); + logger.debug(`${finalOptions.method ?? "GET"} ${resp.status} ${fullUrl}`); + + // Handle HTTP errors + if (!resp.ok) { + // Try to parse error body if it's JSON + let errorBody: + | { error?: { code?: string | number; message?: string } } + | undefined; + try { + if (resp.headers.get("content-type")?.includes("application/json")) { + errorBody = await safeJsonParse(resp); + } + } catch { + // Ignore JSON parse errors + } + + // Check if it's an OData error response + if (errorBody?.error) { + const errorCode = errorBody.error.code; + const errorMessage = errorBody.error.message || resp.statusText; + + // Check for schema locked error (code 303) + if (errorCode === "303" || errorCode === 303) { + return { + data: undefined, + error: new SchemaLockedError( + fullUrl, + errorMessage, + errorBody.error, + ), + }; + } + + return { + data: undefined, + error: new ODataError( + fullUrl, + errorMessage, + String(errorCode), + errorBody.error, + ), + }; + } + + return { + data: undefined, + error: new HTTPError( + fullUrl, + resp.status, + resp.statusText, + errorBody, + ), + }; + } + + // Check for affected rows header (for DELETE and bulk PATCH operations) + // FileMaker may return this with 204 No Content or 200 OK + const affectedRows = resp.headers.get("fmodata.affected_rows"); + if (affectedRows !== null) { + return { data: parseInt(affectedRows, 10) as T, error: undefined }; + } + + // Handle 204 No Content with no body + if (resp.status === 204) { + // Check for Location header (used for insert with return=minimal) + // Use optional chaining for safety with mocks that might not have proper headers + const locationHeader = + resp.headers?.get?.("Location") || resp.headers?.get?.("location"); + if (locationHeader) { + // Return the location header so InsertBuilder can extract ROWID + return { data: { _location: locationHeader } as T, error: undefined }; + } + return { data: 0 as T, error: undefined }; + } + + // Parse response + if (resp.headers.get("content-type")?.includes("application/json")) { + const data = await safeJsonParse< + T & { error?: { code?: string | number; message?: string } } + >(resp); + + // Check for embedded OData errors + if (get(data, "error", null)) { + const errorCode = get(data, "error.code", null); + const errorMessage = get( + data, + "error.message", + "Unknown OData error", + ); + + // Check for schema locked error (code 303) + if (errorCode === "303" || errorCode === 303) { + return { + data: undefined, + error: new SchemaLockedError(fullUrl, errorMessage, data.error), + }; + } + + return { + data: undefined, + error: new ODataError( + fullUrl, + errorMessage, + String(errorCode), + data.error, + ), + }; + } + + return { data: data as T, error: undefined }; + } + + return { data: (await resp.text()) as T, error: undefined }; + } catch (err) { + // Map ffetch errors - return them directly (no re-wrapping) + if ( + err instanceof TimeoutError || + err instanceof AbortError || + err instanceof NetworkError || + err instanceof RetryLimitError || + err instanceof CircuitOpenError + ) { + return { data: undefined, error: err }; + } + + // Handle JSON parse errors (ResponseParseError from safeJsonParse) + if (err instanceof ResponseParseError) { + return { data: undefined, error: err }; + } + + // Unknown error - wrap it as NetworkError + return { + data: undefined, + error: new NetworkError(fullUrl, err), + }; + } + } + + database( + name: string, + config?: { + useEntityIds?: boolean; + }, + ): Database { + return new Database(name, this, config); + } + + /** + * Lists all available databases from the FileMaker OData service. + * @returns Promise resolving to an array of database names + */ + async listDatabaseNames(): Promise { + const result = await this._makeRequest<{ + value?: Array<{ name: string }>; + }>("/"); + if (result.error) { + throw result.error; + } + if (result.data.value && Array.isArray(result.data.value)) { + return result.data.value.map((item) => item.name); + } + return []; + } +} diff --git a/packages/fmodata/src/client/insert-builder.ts b/packages/fmodata/src/client/insert-builder.ts new file mode 100644 index 00000000..01b74113 --- /dev/null +++ b/packages/fmodata/src/client/insert-builder.ts @@ -0,0 +1,460 @@ +import type { + ExecutionContext, + ExecutableBuilder, + Result, + ODataRecordMetadata, + InferSchemaType, + ExecuteOptions, + ConditionallyWithODataAnnotations, + ExecuteMethodOptions, +} from "../types"; +import { getAcceptHeader } from "../types"; +import type { FMTable } from "../orm/table"; +import { + getBaseTableConfig, + getTableName, + getTableId as getTableIdHelper, + isUsingEntityIds, +} from "../orm/table"; +import { + validateSingleResponse, + validateAndTransformInput, +} from "../validation"; +import { type FFetchOptions } from "@fetchkit/ffetch"; +import { + transformFieldNamesToIds, + transformResponseFields, +} from "../transform"; +import { InvalidLocationHeaderError } from "../errors"; +import { safeJsonParse } from "./sanitize-json"; +import { parseErrorResponse } from "./error-parser"; + +export type InsertOptions = { + return?: "minimal" | "representation"; +}; + +import type { InferSchemaOutputFromFMTable } from "../orm/table"; + +export class InsertBuilder< + Occ extends FMTable | undefined = undefined, + ReturnPreference extends "minimal" | "representation" = "representation", +> implements + ExecutableBuilder< + ReturnPreference extends "minimal" + ? { ROWID: number } + : InferSchemaOutputFromFMTable> + > +{ + private table?: Occ; + private databaseName: string; + private context: ExecutionContext; + private data: Partial>>; + private returnPreference: ReturnPreference; + + private databaseUseEntityIds: boolean; + + constructor(config: { + occurrence?: Occ; + databaseName: string; + context: ExecutionContext; + data: Partial>>; + returnPreference?: ReturnPreference; + databaseUseEntityIds?: boolean; + }) { + this.table = config.occurrence; + this.databaseName = config.databaseName; + this.context = config.context; + this.data = config.data; + this.returnPreference = (config.returnPreference || + "representation") as ReturnPreference; + this.databaseUseEntityIds = config.databaseUseEntityIds ?? false; + } + + /** + * Helper to merge database-level useEntityIds with per-request options + */ + private mergeExecuteOptions( + options?: RequestInit & FFetchOptions & ExecuteOptions, + ): RequestInit & FFetchOptions & { useEntityIds?: boolean } { + // If useEntityIds is not set in options, use the database-level setting + return { + ...options, + useEntityIds: options?.useEntityIds ?? this.databaseUseEntityIds, + }; + } + + /** + * Parse ROWID from Location header + * Expected formats: + * - contacts(ROWID=4583) + * - contacts('some-uuid') + */ + private parseLocationHeader(locationHeader: string | undefined): number { + if (!locationHeader) { + throw new InvalidLocationHeaderError( + "Location header is required but was not provided", + ); + } + + // Try to match ROWID=number pattern + const rowidMatch = locationHeader.match(/ROWID=(\d+)/); + if (rowidMatch && rowidMatch[1]) { + return parseInt(rowidMatch[1], 10); + } + + // Try to extract value from parentheses and parse as number + const parenMatch = locationHeader.match(/\(['"]?([^'"]+)['"]?\)/); + if (parenMatch && parenMatch[1]) { + const value = parenMatch[1]; + const numValue = parseInt(value, 10); + if (!isNaN(numValue)) { + return numValue; + } + } + + throw new InvalidLocationHeaderError( + `Could not extract ROWID from Location header: ${locationHeader}`, + locationHeader, + ); + } + + /** + * Gets the table ID (FMTID) if using entity IDs, otherwise returns the table name + * @param useEntityIds - Optional override for entity ID usage + */ + private getTableId(useEntityIds?: boolean): string { + if (!this.table) { + throw new Error("Table occurrence is required"); + } + + const contextDefault = this.context._getUseEntityIds?.() ?? false; + const shouldUseIds = useEntityIds ?? contextDefault; + + if (shouldUseIds) { + if (!isUsingEntityIds(this.table)) { + throw new Error( + `useEntityIds is true but table "${getTableName(this.table)}" does not have entity IDs configured`, + ); + } + return getTableIdHelper(this.table); + } + + return getTableName(this.table); + } + + async execute( + options?: ExecuteMethodOptions, + ): Promise< + Result< + ReturnPreference extends "minimal" + ? { ROWID: number } + : ConditionallyWithODataAnnotations< + InferSchemaOutputFromFMTable>, + EO["includeODataAnnotations"] extends true ? true : false + > + > + > { + // Merge database-level useEntityIds with per-request options + const mergedOptions = this.mergeExecuteOptions(options); + + // Get table identifier with override support + const tableId = this.getTableId(mergedOptions.useEntityIds); + const url = `/${this.databaseName}/${tableId}`; + + // Validate and transform input data using input validators (writeValidators) + let validatedData = this.data; + if (this.table) { + const baseTableConfig = getBaseTableConfig(this.table); + const inputSchema = baseTableConfig.inputSchema; + + try { + validatedData = await validateAndTransformInput(this.data, inputSchema); + } catch (error) { + // If validation fails, return error immediately + return { + data: undefined, + error: error instanceof Error ? error : new Error(String(error)), + } as any; + } + } + + // Transform field names to FMFIDs if using entity IDs + // Only transform if useEntityIds resolves to true (respects per-request override) + const shouldUseIds = mergedOptions.useEntityIds ?? false; + + const transformedData = + this.table && shouldUseIds + ? transformFieldNamesToIds(validatedData, this.table) + : validatedData; + + // Set Prefer header based on return preference + const preferHeader = + this.returnPreference === "minimal" + ? "return=minimal" + : "return=representation"; + + // Make POST request with JSON body + const result = await this.context._makeRequest(url, { + method: "POST", + headers: { + "Content-Type": "application/json", + Prefer: preferHeader, + ...((mergedOptions as any)?.headers || {}), + }, + body: JSON.stringify(transformedData), + ...mergedOptions, + }); + + if (result.error) { + return { data: undefined, error: result.error }; + } + + // Handle return=minimal case + if (this.returnPreference === "minimal") { + // The response should be empty (204 No Content) + // _makeRequest will return { _location: string } when there's a Location header + const responseData = result.data as any; + + if (!responseData || !responseData._location) { + throw new InvalidLocationHeaderError( + "Location header is required when using return=minimal but was not found in response", + ); + } + + const rowid = this.parseLocationHeader(responseData._location); + return { data: { ROWID: rowid } as any, error: undefined }; + } + + let response = result.data; + + // Transform response field IDs back to names if using entity IDs + // Only transform if useEntityIds resolves to true (respects per-request override) + if (this.table && shouldUseIds) { + response = transformResponseFields( + response, + this.table, + undefined, // No expand configs for insert + ); + } + + // Get schema from table if available, excluding container fields + let schema: Record | undefined; + if (this.table) { + const baseTableConfig = getBaseTableConfig(this.table); + const containerFields = baseTableConfig.containerFields || []; + + // Filter out container fields from schema + schema = { ...baseTableConfig.schema }; + for (const containerField of containerFields) { + delete schema[containerField as string]; + } + } + + // Validate the response (FileMaker returns the created record) + const validation = await validateSingleResponse< + InferSchemaOutputFromFMTable> + >( + response, + schema, + undefined, // No selected fields for insert + undefined, // No expand configs + "exact", // Expect exactly one record + ); + + if (!validation.valid) { + return { data: undefined, error: validation.error }; + } + + // Handle null response (shouldn't happen for insert, but handle it) + if (validation.data === null) { + return { + data: undefined, + error: new Error("Insert operation returned null response"), + }; + } + + return { data: validation.data as any, error: undefined }; + } + + getRequestConfig(): { method: string; url: string; body?: any } { + // For batch operations, use database-level setting (no per-request override available here) + // Note: Input validation happens in execute() and processResponse() for batch operations + const tableId = this.getTableId(this.databaseUseEntityIds); + + // Transform field names to FMFIDs if using entity IDs + const transformedData = + this.table && this.databaseUseEntityIds + ? transformFieldNamesToIds(this.data, this.table) + : this.data; + + return { + method: "POST", + url: `/${this.databaseName}/${tableId}`, + body: JSON.stringify(transformedData), + }; + } + + toRequest(baseUrl: string, options?: ExecuteOptions): Request { + const config = this.getRequestConfig(); + const fullUrl = `${baseUrl}${config.url}`; + + // Set Prefer header based on return preference + const preferHeader = + this.returnPreference === "minimal" + ? "return=minimal" + : "return=representation"; + + return new Request(fullUrl, { + method: config.method, + headers: { + "Content-Type": "application/json", + Accept: getAcceptHeader(options?.includeODataAnnotations), + Prefer: preferHeader, + }, + body: config.body, + }); + } + + async processResponse( + response: Response, + options?: ExecuteOptions, + ): Promise< + Result< + ReturnPreference extends "minimal" + ? { ROWID: number } + : InferSchemaOutputFromFMTable> + > + > { + // Check for error responses (important for batch operations) + if (!response.ok) { + const tableName = this.table ? getTableName(this.table) : "unknown"; + const error = await parseErrorResponse( + response, + response.url || `/${this.databaseName}/${tableName}`, + ); + return { data: undefined, error }; + } + + // Handle 204 No Content (common in batch/changeset operations) + // FileMaker uses return=minimal for changeset operations regardless of Prefer header + if (response.status === 204) { + // Check for Location header (for return=minimal) + if (this.returnPreference === "minimal") { + const locationHeader = + response.headers.get("Location") || response.headers.get("location"); + if (locationHeader) { + const rowid = this.parseLocationHeader(locationHeader); + return { data: { ROWID: rowid } as any, error: undefined }; + } + throw new InvalidLocationHeaderError( + "Location header is required when using return=minimal but was not found in response", + ); + } + + // For 204 responses without return=minimal, FileMaker doesn't return the created entity + // This is valid OData behavior for changeset operations + // We return a success indicator but no actual data + return { + data: {} as any, + error: undefined, + }; + } + + // If we expected return=minimal but got a body, that's unexpected + if (this.returnPreference === "minimal") { + throw new InvalidLocationHeaderError( + "Expected 204 No Content for return=minimal, but received response with body", + ); + } + + // Use safeJsonParse to handle FileMaker's invalid JSON with unquoted ? values + let rawResponse; + try { + rawResponse = await safeJsonParse(response); + } catch (err) { + // If parsing fails with 204, handle it gracefully + if (response.status === 204) { + return { + data: {} as any, + error: undefined, + }; + } + return { + data: undefined, + error: { + name: "ResponseParseError", + message: `Failed to parse response JSON: ${err instanceof Error ? err.message : "Unknown error"}`, + timestamp: new Date(), + } as any, + }; + } + + // Validate and transform input data using input validators (writeValidators) + // This is needed for processResponse because it's called from batch operations + // where the data hasn't been validated yet + let validatedData = this.data; + if (this.table) { + const baseTableConfig = getBaseTableConfig(this.table); + const inputSchema = baseTableConfig.inputSchema; + try { + validatedData = await validateAndTransformInput(this.data, inputSchema); + } catch (error) { + return { + data: undefined, + error: error instanceof Error ? error : new Error(String(error)), + } as any; + } + } + + // Transform response field IDs back to names if using entity IDs + // Only transform if useEntityIds resolves to true (respects per-request override) + const shouldUseIds = options?.useEntityIds ?? this.databaseUseEntityIds; + + let transformedResponse = rawResponse; + if (this.table && shouldUseIds) { + transformedResponse = transformResponseFields( + rawResponse, + this.table, + undefined, // No expand configs for insert + ); + } + + // Get schema from table if available, excluding container fields + let schema: Record | undefined; + if (this.table) { + const baseTableConfig = getBaseTableConfig(this.table); + const containerFields = baseTableConfig.containerFields || []; + + // Filter out container fields from schema + schema = { ...baseTableConfig.schema }; + for (const containerField of containerFields) { + delete schema[containerField as string]; + } + } + + // Validate the response (FileMaker returns the created record) + const validation = await validateSingleResponse< + InferSchemaOutputFromFMTable> + >( + transformedResponse, + schema, + undefined, // No selected fields for insert + undefined, // No expand configs + "exact", // Expect exactly one record + ); + + if (!validation.valid) { + return { data: undefined, error: validation.error }; + } + + // Handle null response (shouldn't happen for insert, but handle it) + if (validation.data === null) { + return { + data: undefined, + error: new Error("Insert operation returned null response"), + }; + } + + return { data: validation.data as any, error: undefined }; + } +} diff --git a/packages/fmodata/src/client/query-builder.ts b/packages/fmodata/src/client/query-builder.ts new file mode 100644 index 00000000..713db643 --- /dev/null +++ b/packages/fmodata/src/client/query-builder.ts @@ -0,0 +1,8 @@ +// Re-export QueryBuilder and types from the new modular location +// This maintains backward compatibility for existing imports +export { + QueryBuilder, + type TypeSafeOrderBy, + type ExpandedRelations, + type QueryReturnType, +} from "./query/index"; diff --git a/packages/fmodata/src/client/query/expand-builder.ts b/packages/fmodata/src/client/query/expand-builder.ts new file mode 100644 index 00000000..0e459e3d --- /dev/null +++ b/packages/fmodata/src/client/query/expand-builder.ts @@ -0,0 +1,164 @@ +import { QueryOptions } from "odata-query"; +import buildQuery from "odata-query"; +import type { StandardSchemaV1 } from "@standard-schema/spec"; +import { FMTable } from "../../orm/table"; +import type { ExpandValidationConfig } from "../../validation"; +import { formatSelectFields } from "../builders/select-utils"; + +/** + * Internal type for expand configuration + */ +export type ExpandConfig = { + relation: string; + options?: Partial>; + targetTable?: FMTable; +}; + +/** + * Builds OData expand query strings and validation configs. + * Handles nested expands recursively and transforms relation names to FMTIDs + * when using entity IDs. + */ +export class ExpandBuilder { + constructor(private useEntityIds: boolean) {} + + /** + * Builds OData expand query string from expand configurations. + * Handles nested expands recursively. + * Transforms relation names to FMTIDs if using entity IDs. + */ + buildExpandString(configs: ExpandConfig[]): string { + if (configs.length === 0) { + return ""; + } + + return configs.map((config) => this.buildSingleExpand(config)).join(","); + } + + /** + * Builds a single expand string with its options. + */ + private buildSingleExpand(config: ExpandConfig): string { + // Get target table/occurrence from config (stored during expand call) + const targetTable = config.targetTable; + + // When using entity IDs, use the target table's FMTID in the expand parameter + // FileMaker expects FMTID in $expand when Prefer header is set + // Only use FMTID if databaseUseEntityIds is enabled + let relationName = config.relation; + if (this.useEntityIds) { + if (targetTable && FMTable.Symbol.EntityId in targetTable) { + const tableId = (targetTable as any)[FMTable.Symbol.EntityId] as + | `FMTID:${string}` + | undefined; + if (tableId) { + relationName = tableId; + } + } + } + + if (!config.options || Object.keys(config.options).length === 0) { + // Simple expand without options + return relationName; + } + + // Build query options for this expand + const parts: string[] = []; + + if (config.options.select) { + // Use shared formatSelectFields function for consistent id field quoting + const selectArray = Array.isArray(config.options.select) + ? config.options.select.map(String) + : [String(config.options.select)]; + const selectFields = formatSelectFields( + selectArray, + targetTable, + this.useEntityIds, + ); + parts.push(`$select=${selectFields}`); + } + + if (config.options.filter) { + // Filter should already be transformed by the nested builder + // Use odata-query to build filter string + const filterQuery = buildQuery({ filter: config.options.filter }); + const filterMatch = filterQuery.match(/\$filter=([^&]+)/); + if (filterMatch) { + parts.push(`$filter=${filterMatch[1]}`); + } + } + + if (config.options.orderBy) { + // OrderBy should already be transformed by the nested builder + const orderByValue = Array.isArray(config.options.orderBy) + ? config.options.orderBy.join(",") + : config.options.orderBy; + parts.push(`$orderby=${String(orderByValue)}`); + } + + if (config.options.top !== undefined) { + parts.push(`$top=${config.options.top}`); + } + + if (config.options.skip !== undefined) { + parts.push(`$skip=${config.options.skip}`); + } + + // Handle nested expands (from expand configs) + if (config.options.expand) { + // If expand is a string, it's already been built + if (typeof config.options.expand === "string") { + parts.push(`$expand=${config.options.expand}`); + } + } + + if (parts.length === 0) { + return relationName; + } + + return `${relationName}(${parts.join(";")})`; + } + + /** + * Builds expand validation configs from internal expand configurations. + * These are used to validate expanded navigation properties. + */ + buildValidationConfigs(configs: ExpandConfig[]): ExpandValidationConfig[] { + return configs.map((config) => { + // Get target table/occurrence from config (stored during expand call) + const targetTable = config.targetTable; + + // Extract schema from target table/occurrence + let targetSchema: Record | undefined; + if (targetTable) { + const tableSchema = (targetTable as any)[FMTable.Symbol.Schema]; + if (tableSchema) { + const zodSchema = tableSchema["~standard"]?.schema; + if ( + zodSchema && + typeof zodSchema === "object" && + "shape" in zodSchema + ) { + targetSchema = zodSchema.shape as Record; + } + } + } + + // Extract selected fields from options + const selectedFields = config.options?.select + ? Array.isArray(config.options.select) + ? config.options.select.map((f) => String(f)) + : [String(config.options.select)] + : undefined; + + return { + relation: config.relation, + targetSchema: targetSchema, + targetTable: targetTable, + table: targetTable, // For transformation + selectedFields: selectedFields, + nestedExpands: undefined, // TODO: Handle nested expands if needed + }; + }); + } +} diff --git a/packages/fmodata/src/client/query/index.ts b/packages/fmodata/src/client/query/index.ts new file mode 100644 index 00000000..094a4d89 --- /dev/null +++ b/packages/fmodata/src/client/query/index.ts @@ -0,0 +1,13 @@ +// Re-export QueryBuilder as the main export +export { QueryBuilder } from "./query-builder"; + +// Export types +export type { + TypeSafeOrderBy, + ExpandedRelations, + QueryReturnType, +} from "./query-builder"; + +// Export ExpandConfig from expand-builder +export type { ExpandConfig } from "./expand-builder"; + diff --git a/packages/fmodata/src/client/query/query-builder.ts b/packages/fmodata/src/client/query/query-builder.ts new file mode 100644 index 00000000..90b12d61 --- /dev/null +++ b/packages/fmodata/src/client/query/query-builder.ts @@ -0,0 +1,742 @@ +import { QueryOptions } from "odata-query"; +import buildQuery from "odata-query"; +import type { + ExecutionContext, + ExecutableBuilder, + Result, + ExecuteOptions, + ConditionallyWithODataAnnotations, + ExtractSchemaFromOccurrence, + ExecuteMethodOptions, +} from "../../types"; +import { RecordCountMismatchError } from "../../errors"; +import { type FFetchOptions } from "@fetchkit/ffetch"; +import { + transformFieldNamesArray, + transformOrderByField, +} from "../../transform"; +import { safeJsonParse } from "../sanitize-json"; +import { parseErrorResponse } from "../error-parser"; +import { isColumn, type Column } from "../../orm/column"; +import { + FilterExpression, + OrderByExpression, + isOrderByExpression, +} from "../../orm/operators"; +import { + FMTable, + type InferSchemaOutputFromFMTable, + type ValidExpandTarget, + type ExtractTableName, + type ValidateNoContainerFields, + getTableName, +} from "../../orm/table"; +import { + ExpandBuilder, + type ExpandConfig, + type ExpandedRelations, + resolveTableId, + mergeExecuteOptions, + formatSelectFields, + processQueryResponse, + processSelectWithRenames, + buildSelectExpandQueryString, + createODataRequest, +} from "../builders/index"; +import { QueryUrlBuilder, type NavigationConfig } from "./url-builder"; +import type { TypeSafeOrderBy, QueryReturnType } from "./types"; +import { createLogger, InternalLogger } from "../../logger"; + +// Re-export QueryReturnType for backward compatibility +export type { QueryReturnType }; + +/** + * Default maximum number of records to return in a list query. + * This prevents stack overflow issues with large datasets while still + * allowing substantial data retrieval. Users can override with .top(). + */ +const DEFAULT_TOP = 1000; + +export type { TypeSafeOrderBy, ExpandedRelations }; + +export class QueryBuilder< + Occ extends FMTable, + Selected extends + | keyof InferSchemaOutputFromFMTable + | Record< + string, + Column> + > = keyof InferSchemaOutputFromFMTable, + SingleMode extends "exact" | "maybe" | false = false, + IsCount extends boolean = false, + Expands extends ExpandedRelations = {}, +> implements + ExecutableBuilder< + QueryReturnType< + InferSchemaOutputFromFMTable, + Selected, + SingleMode, + IsCount, + Expands + > + > +{ + private queryOptions: Partial< + QueryOptions> + > = {}; + private expandConfigs: ExpandConfig[] = []; + private singleMode: SingleMode = false as SingleMode; + private isCountMode = false as IsCount; + private occurrence: Occ; + private databaseName: string; + private context: ExecutionContext; + private navigation?: NavigationConfig; + private databaseUseEntityIds: boolean; + private expandBuilder: ExpandBuilder; + private urlBuilder: QueryUrlBuilder; + // Mapping from field names to output keys (for renamed fields in select) + private fieldMapping?: Record; + private logger: InternalLogger; + + constructor(config: { + occurrence: Occ; + databaseName: string; + context: ExecutionContext; + databaseUseEntityIds?: boolean; + }) { + this.occurrence = config.occurrence; + this.databaseName = config.databaseName; + this.context = config.context; + this.logger = config.context?._getLogger?.() ?? createLogger(); + this.databaseUseEntityIds = config.databaseUseEntityIds ?? false; + this.expandBuilder = new ExpandBuilder( + this.databaseUseEntityIds, + this.logger, + ); + this.urlBuilder = new QueryUrlBuilder( + this.databaseName, + this.occurrence, + this.context, + ); + } + + /** + * Helper to merge database-level useEntityIds with per-request options + */ + private mergeExecuteOptions( + options?: RequestInit & FFetchOptions & ExecuteOptions, + ): RequestInit & FFetchOptions & { useEntityIds?: boolean } { + return mergeExecuteOptions(options, this.databaseUseEntityIds); + } + + /** + * Gets the FMTable instance + */ + private getTable(): FMTable | undefined { + return this.occurrence; + } + + /** + * Gets the table ID (FMTID) if using entity IDs, otherwise returns the table name + * @param useEntityIds - Optional override for entity ID usage + */ + private getTableIdOrName(useEntityIds?: boolean): string { + return resolveTableId( + this.occurrence, + getTableName(this.occurrence), + this.context, + useEntityIds, + ); + } + + /** + * Creates a new QueryBuilder with modified configuration. + * Used by single(), maybeSingle(), count(), and select() to create new instances. + */ + private cloneWithChanges< + NewSelected extends + | keyof InferSchemaOutputFromFMTable + | Record>> = Selected, + NewSingle extends "exact" | "maybe" | false = SingleMode, + NewCount extends boolean = IsCount, + >(changes: { + selectedFields?: NewSelected; + singleMode?: NewSingle; + isCountMode?: NewCount; + queryOptions?: Partial>>; + fieldMapping?: Record; + }): QueryBuilder { + const newBuilder = new QueryBuilder< + Occ, + NewSelected, + NewSingle, + NewCount, + Expands + >({ + occurrence: this.occurrence, + databaseName: this.databaseName, + context: this.context, + databaseUseEntityIds: this.databaseUseEntityIds, + }); + newBuilder.queryOptions = { + ...this.queryOptions, + ...changes.queryOptions, + }; + newBuilder.expandConfigs = [...this.expandConfigs]; + newBuilder.singleMode = (changes.singleMode ?? this.singleMode) as any; + newBuilder.isCountMode = (changes.isCountMode ?? this.isCountMode) as any; + newBuilder.fieldMapping = changes.fieldMapping ?? this.fieldMapping; + // Copy navigation metadata + newBuilder.navigation = this.navigation; + newBuilder.urlBuilder = new QueryUrlBuilder( + this.databaseName, + this.occurrence, + this.context, + ); + return newBuilder; + } + + /** + * Select fields using column references. + * Allows renaming fields by using different keys in the object. + * Container fields cannot be selected and will cause a type error. + * + * @example + * db.from(users).list().select({ + * name: users.name, + * userEmail: users.email // renamed! + * }) + * + * @param fields - Object mapping output keys to column references (container fields excluded) + * @returns QueryBuilder with updated selected fields + */ + select< + TSelect extends Record< + string, + Column, false> + >, + >(fields: TSelect): QueryBuilder { + const tableName = getTableName(this.occurrence); + const { selectedFields, fieldMapping } = processSelectWithRenames( + fields, + tableName, + this.logger, + ); + + return this.cloneWithChanges({ + selectedFields: fields as any, + queryOptions: { + select: selectedFields, + }, + fieldMapping: + Object.keys(fieldMapping).length > 0 ? fieldMapping : undefined, + }); + } + + /** + * Filter results using operator expressions (new ORM-style API). + * Supports eq, gt, lt, and, or, etc. operators with Column references. + * Also supports raw OData filter strings as an escape hatch. + * + * @example + * .where(eq(users.hobby, "reading")) + * .where(and(eq(users.active, true), gt(users.age, 18))) + * .where("status eq 'active'") // Raw OData string escape hatch + */ + where( + expression: FilterExpression | string, + ): QueryBuilder { + // Handle raw string filters (escape hatch) + if (typeof expression === "string") { + this.queryOptions.filter = expression; + return this; + } + // Convert FilterExpression to OData filter string + const filterString = expression.toODataFilter(this.databaseUseEntityIds); + this.queryOptions.filter = filterString; + return this; + } + + /** + * Specify the sort order for query results. + * + * @example Single field (ascending by default) + * ```ts + * .orderBy("name") + * .orderBy(users.name) // Column reference + * .orderBy(asc(users.name)) // Explicit ascending + * ``` + * + * @example Single field with explicit direction + * ```ts + * .orderBy(["name", "desc"]) + * .orderBy([users.name, "desc"]) // Column reference + * .orderBy(desc(users.name)) // Explicit descending + * ``` + * + * @example Multiple fields with directions + * ```ts + * .orderBy([["name", "asc"], ["createdAt", "desc"]]) + * .orderBy([[users.name, "asc"], [users.createdAt, "desc"]]) // Column references + * .orderBy(users.name, desc(users.age)) // Variadic with helpers + * ``` + */ + orderBy( + ...orderByArgs: + | [ + | TypeSafeOrderBy> + | Column> + | OrderByExpression>, + ] + | [ + Column>, + ...Array< + | Column> + | OrderByExpression> + >, + ] + ): QueryBuilder { + const tableName = getTableName(this.occurrence); + + // Handle variadic arguments (multiple fields) + if (orderByArgs.length > 1) { + const orderByParts = orderByArgs.map((arg) => { + if (isOrderByExpression(arg)) { + // Validate table match + if (arg.column.tableName !== tableName) { + this.logger.warn( + `Column ${arg.column.toString()} is from table "${arg.column.tableName}", but query is for table "${tableName}"`, + ); + } + const fieldName = arg.column.fieldName; + const transformedField = this.occurrence + ? transformOrderByField(fieldName, this.occurrence) + : fieldName; + return `${transformedField} ${arg.direction}`; + } else if (isColumn(arg)) { + // Validate table match + if (arg.tableName !== tableName) { + this.logger.warn( + `Column ${arg.toString()} is from table "${arg.tableName}", but query is for table "${tableName}"`, + ); + } + const fieldName = arg.fieldName; + const transformedField = this.occurrence + ? transformOrderByField(fieldName, this.occurrence) + : fieldName; + return transformedField; // Default to ascending + } else { + throw new Error( + "Variadic orderBy() only accepts Column or OrderByExpression arguments", + ); + } + }); + this.queryOptions.orderBy = orderByParts; + return this; + } + + // Handle single argument + const orderBy = orderByArgs[0]; + + // Handle OrderByExpression + if (isOrderByExpression(orderBy)) { + // Validate table match + if (orderBy.column.tableName !== tableName) { + this.logger.warn( + `Column ${orderBy.column.toString()} is from table "${orderBy.column.tableName}", but query is for table "${tableName}"`, + ); + } + const fieldName = orderBy.column.fieldName; + const transformedField = this.occurrence + ? transformOrderByField(fieldName, this.occurrence) + : fieldName; + this.queryOptions.orderBy = `${transformedField} ${orderBy.direction}`; + return this; + } + + // Handle Column references + if (isColumn(orderBy)) { + // Validate table match + if (orderBy.tableName !== tableName) { + this.logger.warn( + `Column ${orderBy.toString()} is from table "${orderBy.tableName}", but query is for table "${tableName}"`, + ); + } + // Single Column reference without direction (defaults to ascending) + const fieldName = orderBy.fieldName; + this.queryOptions.orderBy = this.occurrence + ? transformOrderByField(fieldName, this.occurrence) + : fieldName; + return this; + } + // Transform field names to FMFIDs if using entity IDs + if (this.occurrence && orderBy) { + if (Array.isArray(orderBy)) { + // Check if it's a single tuple [field, direction] or array of tuples + if ( + orderBy.length === 2 && + (typeof orderBy[0] === "string" || isColumn(orderBy[0])) && + (orderBy[1] === "asc" || orderBy[1] === "desc") + ) { + // Single tuple: [field, direction] or [column, direction] + const field = isColumn(orderBy[0]) + ? orderBy[0].fieldName + : orderBy[0]; + const direction = orderBy[1] as "asc" | "desc"; + this.queryOptions.orderBy = `${transformOrderByField(field, this.occurrence)} ${direction}`; + } else { + // Array of tuples: [[field, dir], [field, dir], ...] + this.queryOptions.orderBy = ( + orderBy as Array<[any, "asc" | "desc"]> + ).map(([fieldOrCol, direction]) => { + const field = isColumn(fieldOrCol) + ? fieldOrCol.fieldName + : String(fieldOrCol); + const transformedField = transformOrderByField( + field, + this.occurrence!, + ); + return `${transformedField} ${direction}`; + }); + } + } else { + // Single field name (string) + this.queryOptions.orderBy = transformOrderByField( + String(orderBy), + this.occurrence, + ); + } + } else { + // No occurrence/baseTable - pass through as-is + if (Array.isArray(orderBy)) { + if ( + orderBy.length === 2 && + (typeof orderBy[0] === "string" || isColumn(orderBy[0])) && + (orderBy[1] === "asc" || orderBy[1] === "desc") + ) { + // Single tuple: [field, direction] or [column, direction] + const field = isColumn(orderBy[0]) + ? orderBy[0].fieldName + : orderBy[0]; + const direction = orderBy[1] as "asc" | "desc"; + this.queryOptions.orderBy = `${field} ${direction}`; + } else { + // Array of tuples + this.queryOptions.orderBy = ( + orderBy as Array<[any, "asc" | "desc"]> + ).map(([fieldOrCol, direction]) => { + const field = isColumn(fieldOrCol) + ? fieldOrCol.fieldName + : String(fieldOrCol); + return `${field} ${direction}`; + }); + } + } else { + this.queryOptions.orderBy = orderBy; + } + } + return this; + } + + top( + count: number, + ): QueryBuilder { + this.queryOptions.top = count; + return this; + } + + skip( + count: number, + ): QueryBuilder { + this.queryOptions.skip = count; + return this; + } + + expand< + TargetTable extends FMTable, + TSelected extends + | keyof InferSchemaOutputFromFMTable + | Record< + string, + Column> + > = keyof InferSchemaOutputFromFMTable, + TNestedExpands extends ExpandedRelations = {}, + >( + targetTable: ValidExpandTarget, + callback?: ( + builder: QueryBuilder< + TargetTable, + keyof InferSchemaOutputFromFMTable, + false, + false, + {} + >, + ) => QueryBuilder, + ): QueryBuilder< + Occ, + Selected, + SingleMode, + IsCount, + Expands & { + [K in ExtractTableName]: { + schema: InferSchemaOutputFromFMTable; + selected: TSelected; + nested: TNestedExpands; + }; + } + > { + // Use ExpandBuilder.processExpand to handle the expand logic + type TargetBuilder = QueryBuilder< + TargetTable, + keyof InferSchemaOutputFromFMTable, + false, + false, + {} + >; + const expandConfig = this.expandBuilder.processExpand< + TargetTable, + TargetBuilder + >( + targetTable, + this.occurrence, + callback as ((builder: TargetBuilder) => TargetBuilder) | undefined, + () => + new QueryBuilder({ + occurrence: targetTable, + databaseName: this.databaseName, + context: this.context, + databaseUseEntityIds: this.databaseUseEntityIds, + }), + ); + + this.expandConfigs.push(expandConfig); + return this as any; + } + + single(): QueryBuilder { + return this.cloneWithChanges({ singleMode: "exact" as const }); + } + + maybeSingle(): QueryBuilder { + return this.cloneWithChanges({ singleMode: "maybe" as const }); + } + + count(): QueryBuilder { + return this.cloneWithChanges({ + isCountMode: true as const, + queryOptions: { count: true }, + }); + } + + /** + * Builds the OData query string from current query options and expand configs. + */ + private buildQueryString(): string { + // Build query without expand and select (we'll add them manually if using entity IDs) + const queryOptionsWithoutExpandAndSelect = { ...this.queryOptions }; + const originalSelect = queryOptionsWithoutExpandAndSelect.select; + delete queryOptionsWithoutExpandAndSelect.expand; + delete queryOptionsWithoutExpandAndSelect.select; + + let queryString = buildQuery(queryOptionsWithoutExpandAndSelect); + + // Use shared helper for select/expand portion + const selectArray = originalSelect + ? Array.isArray(originalSelect) + ? originalSelect.map(String) + : [String(originalSelect)] + : undefined; + + const selectExpandString = buildSelectExpandQueryString({ + selectedFields: selectArray, + expandConfigs: this.expandConfigs, + table: this.occurrence, + useEntityIds: this.databaseUseEntityIds, + logger: this.logger, + }); + + // Append select/expand to existing query string + if (selectExpandString) { + // Strip leading ? from helper result and append with appropriate separator + const params = selectExpandString.startsWith("?") + ? selectExpandString.slice(1) + : selectExpandString; + const separator = queryString.includes("?") ? "&" : "?"; + queryString = `${queryString}${separator}${params}`; + } + + return queryString; + } + + async execute( + options?: ExecuteMethodOptions, + ): Promise< + Result< + ConditionallyWithODataAnnotations< + QueryReturnType< + InferSchemaOutputFromFMTable, + Selected, + SingleMode, + IsCount, + Expands + >, + EO["includeODataAnnotations"] extends true ? true : false + > + > + > { + const mergedOptions = this.mergeExecuteOptions(options); + const queryString = this.buildQueryString(); + + // Handle $count endpoint + if (this.isCountMode) { + const url = this.urlBuilder.build(queryString, { + isCount: true, + useEntityIds: mergedOptions.useEntityIds, + navigation: this.navigation, + }); + const result = await this.context._makeRequest(url, mergedOptions); + + if (result.error) { + return { data: undefined, error: result.error }; + } + + // OData returns count as a string, convert to number + const count = + typeof result.data === "string" ? Number(result.data) : result.data; + return { data: count as number, error: undefined } as any; + } + + const url = this.urlBuilder.build(queryString, { + isCount: this.isCountMode, + useEntityIds: mergedOptions.useEntityIds, + navigation: this.navigation, + }); + + const result = await this.context._makeRequest(url, mergedOptions); + + if (result.error) { + return { data: undefined, error: result.error }; + } + + return processQueryResponse(result.data, { + occurrence: this.occurrence, + singleMode: this.singleMode, + queryOptions: this.queryOptions as any, + expandConfigs: this.expandConfigs, + skipValidation: options?.skipValidation, + useEntityIds: mergedOptions.useEntityIds, + fieldMapping: this.fieldMapping, + logger: this.logger, + }); + } + + getQueryString(): string { + const queryString = this.buildQueryString(); + return this.urlBuilder.buildPath(queryString, { + useEntityIds: this.databaseUseEntityIds, + navigation: this.navigation, + }); + } + + getRequestConfig(): { method: string; url: string; body?: any } { + const queryString = this.buildQueryString(); + const url = this.urlBuilder.build(queryString, { + isCount: this.isCountMode, + useEntityIds: this.databaseUseEntityIds, + navigation: this.navigation, + }); + + return { + method: "GET", + url, + }; + } + + toRequest(baseUrl: string, options?: ExecuteOptions): Request { + const config = this.getRequestConfig(); + return createODataRequest(baseUrl, config, options); + } + + async processResponse( + response: Response, + options?: ExecuteOptions, + ): Promise< + Result< + QueryReturnType< + InferSchemaOutputFromFMTable, + Selected, + SingleMode, + IsCount, + Expands + > + > + > { + // Check for error responses (important for batch operations) + if (!response.ok) { + const error = await parseErrorResponse( + response, + response.url || + `/${this.databaseName}/${getTableName(this.occurrence)}`, + ); + return { data: undefined, error }; + } + + // Handle 204 No Content (shouldn't happen for queries, but handle it gracefully) + if (response.status === 204) { + // Return empty list for list queries, null for single queries + if (this.singleMode !== false) { + if (this.singleMode === "maybe") { + return { data: null as any, error: undefined }; + } + return { + data: undefined, + error: new RecordCountMismatchError("one", 0), + }; + } + return { data: [] as any, error: undefined }; + } + + // Parse the response body (using safeJsonParse to handle FileMaker's invalid JSON with unquoted ? values) + let rawData; + try { + rawData = await safeJsonParse(response); + } catch (err) { + // Check if it's an empty body error (common with 204 responses) + if (err instanceof SyntaxError && response.status === 204) { + // Handled above, but just in case + return { data: [] as any, error: undefined }; + } + return { + data: undefined, + error: { + name: "ResponseParseError", + message: `Failed to parse response JSON: ${err instanceof Error ? err.message : "Unknown error"}`, + timestamp: new Date(), + } as any, + }; + } + + if (!rawData) { + return { + data: undefined, + error: { + name: "ResponseError", + message: "Response body was empty or null", + timestamp: new Date(), + } as any, + }; + } + + const mergedOptions = this.mergeExecuteOptions(options); + return processQueryResponse(rawData, { + occurrence: this.occurrence, + singleMode: this.singleMode, + queryOptions: this.queryOptions as any, + expandConfigs: this.expandConfigs, + skipValidation: options?.skipValidation, + useEntityIds: mergedOptions.useEntityIds, + fieldMapping: this.fieldMapping, + logger: this.logger, + }); + } +} diff --git a/packages/fmodata/src/client/query/response-processor.ts b/packages/fmodata/src/client/query/response-processor.ts new file mode 100644 index 00000000..c3140601 --- /dev/null +++ b/packages/fmodata/src/client/query/response-processor.ts @@ -0,0 +1,246 @@ +import type { StandardSchemaV1 } from "@standard-schema/spec"; +import type { QueryOptions } from "odata-query"; +import type { FMTable } from "../../orm/table"; +import type { Result } from "../../types"; +import { RecordCountMismatchError } from "../../errors"; +import { transformResponseFields } from "../../transform"; +import { validateListResponse, validateSingleResponse } from "../../validation"; +import type { ExpandValidationConfig } from "../../validation"; +import type { ExpandConfig } from "./expand-builder"; +import { FMTable as FMTableClass } from "../../orm/table"; +import { InternalLogger } from "../../logger"; + +/** + * Configuration for processing query responses + */ +export interface ProcessQueryResponseConfig { + occurrence?: FMTable; + singleMode: "exact" | "maybe" | false; + queryOptions: Partial>; + expandConfigs: ExpandConfig[]; + skipValidation?: boolean; + useEntityIds?: boolean; + // Mapping from field names to output keys (for renamed fields in select) + fieldMapping?: Record; + logger: InternalLogger; +} + +/** + * Builds expand validation configs from internal expand configurations. + * These are used to validate expanded navigation properties. + */ +function buildExpandValidationConfigs( + configs: ExpandConfig[], +): ExpandValidationConfig[] { + return configs.map((config) => { + // Get target table/occurrence from config (stored during expand call) + const targetTable = config.targetTable; + + // Extract schema from target table/occurrence + let targetSchema: Record | undefined; + if (targetTable) { + const tableSchema = (targetTable as any)[FMTableClass.Symbol.Schema]; + if (tableSchema) { + const zodSchema = tableSchema["~standard"]?.schema; + if ( + zodSchema && + typeof zodSchema === "object" && + "shape" in zodSchema + ) { + targetSchema = zodSchema.shape as Record; + } + } + } + + // Extract selected fields from options + const selectedFields = config.options?.select + ? Array.isArray(config.options.select) + ? config.options.select.map((f) => String(f)) + : [String(config.options.select)] + : undefined; + + return { + relation: config.relation, + targetSchema: targetSchema, + targetTable: targetTable, + table: targetTable, // For transformation + selectedFields: selectedFields, + nestedExpands: undefined, // TODO: Handle nested expands if needed + }; + }); +} + +/** + * Extracts records from response data without validation. + * Handles both single and list responses. + */ +function extractRecords( + data: any, + singleMode: "exact" | "maybe" | false, +): Result { + const resp = data as any; + if (singleMode !== false) { + const records = resp.value ?? [resp]; + const count = Array.isArray(records) ? records.length : 1; + + if (count > 1) { + return { + data: undefined, + error: new RecordCountMismatchError( + singleMode === "exact" ? "one" : "at-most-one", + count, + ), + }; + } + + if (count === 0) { + if (singleMode === "exact") { + return { + data: undefined, + error: new RecordCountMismatchError("one", 0), + }; + } + return { data: null as any, error: undefined }; + } + + const record = Array.isArray(records) ? records[0] : records; + return { data: record as any, error: undefined }; + } else { + // Handle list response structure + const records = resp.value ?? []; + return { data: records as any, error: undefined }; + } +} + +/** + * Renames fields in response data according to the field mapping. + * Used when select() is called with renamed fields (e.g., { userEmail: users.email }). + */ +function renameFieldsInResponse( + data: any, + fieldMapping: Record, +): any { + if (!data || typeof data !== "object") { + return data; + } + + // Handle array responses + if (Array.isArray(data)) { + return data.map((item) => renameFieldsInResponse(item, fieldMapping)); + } + + // Handle OData list response structure + if ("value" in data && Array.isArray(data.value)) { + return { + ...data, + value: data.value.map((item: any) => + renameFieldsInResponse(item, fieldMapping), + ), + }; + } + + // Handle single record + const renamed: Record = {}; + for (const [key, value] of Object.entries(data)) { + // Check if this field should be renamed + const outputKey = fieldMapping[key]; + if (outputKey) { + renamed[outputKey] = value; + } else { + renamed[key] = value; + } + } + return renamed; +} + +/** + * Processes a query response by transforming field IDs and validating the data. + * This function consolidates the response processing logic that was duplicated + * across multiple navigation branches in QueryBuilder.execute(). + */ +export async function processQueryResponse( + response: any, + config: ProcessQueryResponseConfig, +): Promise> { + const { occurrence, singleMode, skipValidation, useEntityIds, fieldMapping } = + config; + + // Transform response if needed + let data = response; + if (occurrence && useEntityIds) { + const expandValidationConfigs = buildExpandValidationConfigs( + config.expandConfigs, + ); + data = transformResponseFields( + response, + occurrence, + expandValidationConfigs, + ); + } + + // Skip validation path + if (skipValidation) { + const result = extractRecords(data, singleMode); + // Rename fields AFTER extraction (but before returning) + if (result.data && fieldMapping && Object.keys(fieldMapping).length > 0) { + return { + ...result, + data: renameFieldsInResponse(result.data, fieldMapping), + }; + } + return result; + } + + // Validation path + // Get schema from occurrence if available + let schema: Record | undefined; + if (occurrence) { + const tableSchema = (occurrence as any)[FMTableClass.Symbol.Schema]; + if (tableSchema) { + const zodSchema = tableSchema["~standard"]?.schema; + if (zodSchema && typeof zodSchema === "object" && "shape" in zodSchema) { + schema = zodSchema.shape as Record; + } + } + } + + const selectedFields = config.queryOptions.select + ? ((Array.isArray(config.queryOptions.select) + ? config.queryOptions.select.map((f) => String(f)) + : [String(config.queryOptions.select)]) as (keyof T)[]) + : undefined; + const expandValidationConfigs = buildExpandValidationConfigs( + config.expandConfigs, + ); + + // Validate with original field names + const validationResult = + singleMode !== false + ? await validateSingleResponse( + data, + schema, + selectedFields as string[] | undefined, + expandValidationConfigs, + singleMode, + ) + : await validateListResponse( + data, + schema, + selectedFields as string[] | undefined, + expandValidationConfigs, + ); + + if (!validationResult.valid) { + return { data: undefined, error: validationResult.error }; + } + + // Rename fields AFTER validation completes + if (fieldMapping && Object.keys(fieldMapping).length > 0) { + return { + data: renameFieldsInResponse(validationResult.data, fieldMapping), + error: undefined, + }; + } + + return { data: validationResult.data as any, error: undefined }; +} diff --git a/packages/fmodata/src/client/query/types.ts b/packages/fmodata/src/client/query/types.ts new file mode 100644 index 00000000..a3b81441 --- /dev/null +++ b/packages/fmodata/src/client/query/types.ts @@ -0,0 +1,99 @@ +import type { Column } from "../../orm/column"; + +/** + * Type-safe orderBy type that provides better DX than odata-query's default. + * + * Supported forms: + * - `keyof T` - single field name (defaults to ascending) + * - `[keyof T, 'asc' | 'desc']` - single field with explicit direction + * - `Array<[keyof T, 'asc' | 'desc']>` - multiple fields with directions + * + * This type intentionally EXCLUDES `Array` to avoid ambiguity + * between [field1, field2] and [field, direction]. + */ +export type TypeSafeOrderBy = + | (keyof T & string) // Single field name + | [keyof T & string, "asc" | "desc"] // Single field with direction + | Array<[keyof T & string, "asc" | "desc"]>; // Multiple fields with directions + +// Internal type for expand configuration +export type ExpandConfig = { + relation: string; + options?: Partial>; + targetTable?: import("../../orm/table").FMTable; +}; + +// Type to represent expanded relations +export type ExpandedRelations = Record< + string, + { schema: any; selected: any; nested?: ExpandedRelations } +>; + +/** + * Extract the value type from a Column. + * This uses the phantom type stored in Column to get the actual value type (output type for reading). + */ +type ExtractColumnType = + C extends Column ? T : never; + +/** + * Map a select object to its return type. + * For each key in the select object, extract the type from the corresponding Column. + */ +type MapSelectToReturnType< + TSelect extends Record>, + TSchema extends Record, +> = { + [K in keyof TSelect]: ExtractColumnType; +}; + +/** + * Helper: Resolve a single expand's return type, including nested expands + */ +export type ResolveExpandType< + Exp extends { schema: any; selected: any; nested?: ExpandedRelations }, +> = // Handle the selected fields +(Exp["selected"] extends Record> + ? MapSelectToReturnType + : Exp["selected"] extends keyof Exp["schema"] + ? Pick + : Exp["schema"]) & + // Recursively handle nested expands + (Exp["nested"] extends ExpandedRelations + ? ResolveExpandedRelations + : {}); + +/** + * Helper: Resolve all expanded relations recursively + */ +export type ResolveExpandedRelations = { + [K in keyof Exps]: ResolveExpandType[]; +}; + +export type QueryReturnType< + T extends Record, + Selected extends keyof T | Record>, + SingleMode extends "exact" | "maybe" | false, + IsCount extends boolean, + Expands extends ExpandedRelations, +> = IsCount extends true + ? number + : // Use tuple wrapping [Selected] extends [...] to prevent distribution over unions + [Selected] extends [Record>] + ? SingleMode extends "exact" + ? MapSelectToReturnType & ResolveExpandedRelations + : SingleMode extends "maybe" + ? + | (MapSelectToReturnType & + ResolveExpandedRelations) + | null + : (MapSelectToReturnType & + ResolveExpandedRelations)[] + : // Use tuple wrapping to prevent distribution over union of keys + [Selected] extends [keyof T] + ? SingleMode extends "exact" + ? Pick & ResolveExpandedRelations + : SingleMode extends "maybe" + ? (Pick & ResolveExpandedRelations) | null + : (Pick & ResolveExpandedRelations)[] + : never; diff --git a/packages/fmodata/src/client/query/url-builder.ts b/packages/fmodata/src/client/query/url-builder.ts new file mode 100644 index 00000000..f9b466d4 --- /dev/null +++ b/packages/fmodata/src/client/query/url-builder.ts @@ -0,0 +1,179 @@ +import type { FMTable } from "../../orm/table"; +import { getTableName } from "../../orm/table"; +import { resolveTableId } from "../builders/table-utils"; +import type { ExecutionContext } from "../../types"; + +/** + * Configuration for navigation from RecordBuilder or EntitySet + */ +export interface NavigationConfig { + recordId?: string | number; + relation: string; + sourceTableName: string; + baseRelation?: string; // For chained navigations from navigated EntitySets + basePath?: string; // Full base path for chained entity set navigations +} + +/** + * Builds OData query URLs for different navigation modes. + * Handles: + * - Record navigation: /database/sourceTable('recordId')/relation + * - Entity set navigation: /database/sourceTable/relation + * - Count endpoint: /database/tableId/$count + * - Standard queries: /database/tableId + */ +export class QueryUrlBuilder { + constructor( + private databaseName: string, + private occurrence: FMTable, + private context: ExecutionContext, + ) {} + + /** + * Builds the full URL for a query request. + * + * @param queryString - The OData query string (e.g., "?$filter=...&$select=...") + * @param options - Options including whether this is a count query, useEntityIds override, and navigation config + */ + build( + queryString: string, + options: { + isCount?: boolean; + useEntityIds?: boolean; + navigation?: NavigationConfig; + }, + ): string { + const tableId = resolveTableId( + this.occurrence, + getTableName(this.occurrence), + this.context, + options.useEntityIds, + ); + + const navigation = options.navigation; + if (navigation?.recordId && navigation?.relation) { + return this.buildRecordNavigation(queryString, tableId, navigation); + } + if (navigation?.relation) { + return this.buildEntitySetNavigation(queryString, tableId, navigation); + } + if (options.isCount) { + return `/${this.databaseName}/${tableId}/$count${queryString}`; + } + return `/${this.databaseName}/${tableId}${queryString}`; + } + + /** + * Builds URL for record navigation: /database/sourceTable('recordId')/relation + * or /database/sourceTable/baseRelation('recordId')/relation for chained navigations + */ + private buildRecordNavigation( + queryString: string, + tableId: string, + navigation: NavigationConfig, + ): string { + const { sourceTableName, baseRelation, recordId, relation } = navigation; + const base = baseRelation + ? `${sourceTableName}/${baseRelation}('${recordId}')` + : `${sourceTableName}('${recordId}')`; + return `/${this.databaseName}/${base}/${relation}${queryString}`; + } + + /** + * Builds URL for entity set navigation: /database/sourceTable/relation + * or /database/basePath/relation for chained navigations + */ + private buildEntitySetNavigation( + queryString: string, + tableId: string, + navigation: NavigationConfig, + ): string { + const { sourceTableName, basePath, relation } = navigation; + const base = basePath || sourceTableName; + return `/${this.databaseName}/${base}/${relation}${queryString}`; + } + + /** + * Builds a query string path (without database prefix) for getQueryString(). + * Used when the full URL is not needed. + */ + buildPath( + queryString: string, + options?: { useEntityIds?: boolean; navigation?: NavigationConfig }, + ): string { + const useEntityIds = options?.useEntityIds; + const navigation = options?.navigation; + const tableId = resolveTableId( + this.occurrence, + getTableName(this.occurrence), + this.context, + useEntityIds, + ); + + if (navigation?.recordId && navigation?.relation) { + const { sourceTableName, baseRelation, recordId, relation } = navigation; + const base = baseRelation + ? `${sourceTableName}/${baseRelation}('${recordId}')` + : `${sourceTableName}('${recordId}')`; + return queryString + ? `/${base}/${relation}${queryString}` + : `/${base}/${relation}`; + } + if (navigation?.relation) { + const { sourceTableName, basePath, relation } = navigation; + const base = basePath || sourceTableName; + return queryString + ? `/${base}/${relation}${queryString}` + : `/${base}/${relation}`; + } + return queryString ? `/${tableId}${queryString}` : `/${tableId}`; + } + + /** + * Build URL for record operations (single record by ID). + * Used by RecordBuilder to build URLs like /database/table('id'). + * + * @param recordId - The record ID + * @param queryString - The OData query string (e.g., "?$select=...") + * @param options - Options including operation type and useEntityIds override + */ + buildRecordUrl( + recordId: string | number, + queryString: string, + options?: { + operation?: "getSingleField"; + operationParam?: string; + useEntityIds?: boolean; + isNavigateFromEntitySet?: boolean; + navigateSourceTableName?: string; + navigateRelation?: string; + }, + ): string { + const tableId = resolveTableId( + this.occurrence, + getTableName(this.occurrence), + this.context, + options?.useEntityIds, + ); + + // Build the base URL depending on whether this came from a navigated EntitySet + let url: string; + if ( + options?.isNavigateFromEntitySet && + options.navigateSourceTableName && + options.navigateRelation + ) { + // From navigated EntitySet: /sourceTable/relation('recordId') + url = `/${this.databaseName}/${options.navigateSourceTableName}/${options.navigateRelation}('${recordId}')`; + } else { + // Normal record: /tableName('recordId') - use FMTID if configured + url = `/${this.databaseName}/${tableId}('${recordId}')`; + } + + if (options?.operation === "getSingleField" && options.operationParam) { + url += `/${options.operationParam}`; + } + + return url + queryString; + } +} diff --git a/packages/fmodata/src/client/record-builder.ts b/packages/fmodata/src/client/record-builder.ts new file mode 100644 index 00000000..48f66b8f --- /dev/null +++ b/packages/fmodata/src/client/record-builder.ts @@ -0,0 +1,678 @@ +import type { + ExecutionContext, + ExecutableBuilder, + Result, + ODataFieldResponse, + ExecuteOptions, + ConditionallyWithODataAnnotations, + ExecuteMethodOptions, +} from "../types"; +import type { + FMTable, + InferSchemaOutputFromFMTable, + ValidExpandTarget, + ExtractTableName, + ValidateNoContainerFields, +} from "../orm/table"; +import { getTableName, getNavigationPaths } from "../orm/table"; +import { safeJsonParse } from "./sanitize-json"; +import { parseErrorResponse } from "./error-parser"; +import { QueryBuilder } from "./query-builder"; +import { type FFetchOptions } from "@fetchkit/ffetch"; +import { isColumn, type Column } from "../orm/column"; +import { + type ExpandConfig, + type ExpandedRelations, + ExpandBuilder, + resolveTableId, + mergeExecuteOptions, + processODataResponse, + getSchemaFromTable, + processSelectWithRenames, + buildSelectExpandQueryString, + createODataRequest, +} from "./builders/index"; +import { + type ResolveExpandedRelations, + type ResolveExpandType, +} from "./query/types"; +import { createLogger, InternalLogger, Logger } from "../logger"; + +/** + * Extract the value type from a Column. + * This uses the phantom type stored in Column to get the actual value type. + */ +type ExtractColumnType = C extends Column ? T : never; + +/** + * Map a select object to its return type. + * For each key in the select object, extract the type from the corresponding Column. + */ +type MapSelectToReturnType< + TSelect extends Record>, + TSchema extends Record, +> = { + [K in keyof TSelect]: ExtractColumnType; +}; + +// Return type for RecordBuilder execute +export type RecordReturnType< + Schema extends Record, + IsSingleField extends boolean, + FieldColumn extends Column | undefined, + Selected extends + | keyof Schema + | Record>>>, + Expands extends ExpandedRelations, +> = IsSingleField extends true + ? FieldColumn extends Column + ? TOutput + : never + : // Use tuple wrapping [Selected] extends [...] to prevent distribution over unions + [Selected] extends [Record>] + ? MapSelectToReturnType & + ResolveExpandedRelations + : // Use tuple wrapping to prevent distribution over union of keys + [Selected] extends [keyof Schema] + ? Pick & ResolveExpandedRelations + : never; + +export class RecordBuilder< + Occ extends FMTable = FMTable, + IsSingleField extends boolean = false, + FieldColumn extends Column | undefined = undefined, + Selected extends + | keyof InferSchemaOutputFromFMTable> + | Record< + string, + Column>> + > = keyof InferSchemaOutputFromFMTable>, + Expands extends ExpandedRelations = {}, +> implements + ExecutableBuilder< + RecordReturnType< + InferSchemaOutputFromFMTable>, + IsSingleField, + FieldColumn, + Selected, + Expands + > + > +{ + private table: Occ; + private databaseName: string; + private context: ExecutionContext; + private recordId: string | number; + private operation?: "getSingleField" | "navigate"; + private operationParam?: string; + private operationColumn?: Column; + private isNavigateFromEntitySet?: boolean; + private navigateRelation?: string; + private navigateSourceTableName?: string; + + private databaseUseEntityIds: boolean; + + // Properties for select/expand support + private selectedFields?: string[]; + private expandConfigs: ExpandConfig[] = []; + // Mapping from field names to output keys (for renamed fields in select) + private fieldMapping?: Record; + + private logger: InternalLogger; + + constructor(config: { + occurrence: Occ; + databaseName: string; + context: ExecutionContext; + recordId: string | number; + databaseUseEntityIds?: boolean; + }) { + this.table = config.occurrence; + this.databaseName = config.databaseName; + this.context = config.context; + this.recordId = config.recordId; + this.databaseUseEntityIds = config.databaseUseEntityIds ?? false; + this.logger = config.context?._getLogger?.() ?? createLogger(); + } + + /** + * Helper to merge database-level useEntityIds with per-request options + */ + private mergeExecuteOptions( + options?: RequestInit & FFetchOptions & ExecuteOptions, + ): RequestInit & FFetchOptions & { useEntityIds?: boolean } { + return mergeExecuteOptions(options, this.databaseUseEntityIds); + } + + /** + * Gets the table ID (FMTID) if using entity IDs, otherwise returns the table name + * @param useEntityIds - Optional override for entity ID usage + */ + private getTableId(useEntityIds?: boolean): string { + if (!this.table) { + throw new Error("Table occurrence is required"); + } + return resolveTableId( + this.table, + getTableName(this.table), + this.context, + useEntityIds, + ); + } + + /** + * Creates a new RecordBuilder with modified configuration. + * Used by select() to create new instances. + */ + private cloneWithChanges< + NewSelected extends + | keyof InferSchemaOutputFromFMTable> + | Record< + string, + Column>> + > = Selected, + >(changes: { + selectedFields?: string[]; + fieldMapping?: Record; + }): RecordBuilder { + const newBuilder = new RecordBuilder< + Occ, + false, + FieldColumn, + NewSelected, + Expands + >({ + occurrence: this.table, + databaseName: this.databaseName, + context: this.context, + recordId: this.recordId, + databaseUseEntityIds: this.databaseUseEntityIds, + }); + newBuilder.selectedFields = changes.selectedFields ?? this.selectedFields; + newBuilder.fieldMapping = changes.fieldMapping ?? this.fieldMapping; + newBuilder.expandConfigs = [...this.expandConfigs]; + // Preserve navigation context + newBuilder.isNavigateFromEntitySet = this.isNavigateFromEntitySet; + newBuilder.navigateRelation = this.navigateRelation; + newBuilder.navigateSourceTableName = this.navigateSourceTableName; + newBuilder.operationColumn = this.operationColumn; + return newBuilder; + } + + getSingleField< + TColumn extends Column>, any>, + >( + column: TColumn, + ): RecordBuilder< + Occ, + true, + TColumn, + keyof InferSchemaOutputFromFMTable>, + {} + > { + // Runtime validation: ensure column is from the correct table + const tableName = getTableName(this.table); + if (!column.isFromTable(tableName)) { + throw new Error( + `Column ${column.toString()} is not from table ${tableName}`, + ); + } + + const newBuilder = new RecordBuilder< + Occ, + true, + TColumn, + keyof InferSchemaOutputFromFMTable>, + {} + >({ + occurrence: this.table, + databaseName: this.databaseName, + context: this.context, + recordId: this.recordId, + databaseUseEntityIds: this.databaseUseEntityIds, + }); + newBuilder.operation = "getSingleField"; + newBuilder.operationColumn = column; + newBuilder.operationParam = column.getFieldIdentifier( + this.databaseUseEntityIds, + ); + // Preserve navigation context + newBuilder.isNavigateFromEntitySet = this.isNavigateFromEntitySet; + newBuilder.navigateRelation = this.navigateRelation; + newBuilder.navigateSourceTableName = this.navigateSourceTableName; + return newBuilder; + } + + /** + * Select fields using column references. + * Allows renaming fields by using different keys in the object. + * Container fields cannot be selected and will cause a type error. + * + * @example + * db.from(contacts).get("uuid").select({ + * name: contacts.name, + * userEmail: contacts.email // renamed! + * }) + * + * @param fields - Object mapping output keys to column references (container fields excluded) + * @returns RecordBuilder with updated selected fields + */ + select< + TSelect extends Record< + string, + Column, false> + >, + >(fields: TSelect): RecordBuilder { + const tableName = getTableName(this.table); + const { selectedFields, fieldMapping } = processSelectWithRenames( + fields, + tableName, + this.logger, + ); + + return this.cloneWithChanges({ + selectedFields, + fieldMapping: + Object.keys(fieldMapping).length > 0 ? fieldMapping : undefined, + }) as any; + } + + /** + * Expand a navigation property to include related records. + * Supports nested select, filter, orderBy, and expand operations. + * + * @example + * ```typescript + * // Simple expand with FMTable object + * const contact = await db.from(contacts).get("uuid").expand(users).execute(); + * + * // Expand with select + * const contact = await db.from(contacts).get("uuid") + * .expand(users, b => b.select({ username: users.username, email: users.email })) + * .execute(); + * ``` + */ + expand< + TargetTable extends FMTable, + TSelected extends + | keyof InferSchemaOutputFromFMTable + | Record< + string, + Column> + > = keyof InferSchemaOutputFromFMTable, + TNestedExpands extends ExpandedRelations = {}, + >( + targetTable: ValidExpandTarget, + callback?: ( + builder: QueryBuilder< + TargetTable, + keyof InferSchemaOutputFromFMTable, + false, + false, + {} + >, + ) => QueryBuilder, + ): RecordBuilder< + Occ, + false, + FieldColumn, + Selected, + Expands & { + [K in ExtractTableName]: { + schema: InferSchemaOutputFromFMTable; + selected: TSelected; + nested: TNestedExpands; + }; + } + > { + // Create new builder with updated types + const newBuilder = new RecordBuilder< + Occ, + false, + FieldColumn, + Selected, + any + >({ + occurrence: this.table, + databaseName: this.databaseName, + context: this.context, + recordId: this.recordId, + databaseUseEntityIds: this.databaseUseEntityIds, + }); + + // Copy existing state + newBuilder.selectedFields = this.selectedFields; + newBuilder.fieldMapping = this.fieldMapping; + newBuilder.expandConfigs = [...this.expandConfigs]; + newBuilder.isNavigateFromEntitySet = this.isNavigateFromEntitySet; + newBuilder.navigateRelation = this.navigateRelation; + newBuilder.navigateSourceTableName = this.navigateSourceTableName; + newBuilder.operationColumn = this.operationColumn; + + // Use ExpandBuilder.processExpand to handle the expand logic + const expandBuilder = new ExpandBuilder( + this.databaseUseEntityIds, + this.logger, + ); + type TargetBuilder = QueryBuilder< + TargetTable, + keyof InferSchemaOutputFromFMTable, + false, + false, + {} + >; + const expandConfig = expandBuilder.processExpand< + TargetTable, + TargetBuilder + >( + targetTable, + this.table ?? undefined, + callback as ((builder: TargetBuilder) => TargetBuilder) | undefined, + () => + new QueryBuilder({ + occurrence: targetTable, + databaseName: this.databaseName, + context: this.context, + databaseUseEntityIds: this.databaseUseEntityIds, + }), + ); + + newBuilder.expandConfigs.push(expandConfig); + return newBuilder as any; + } + + navigate>( + targetTable: ValidExpandTarget, + ): QueryBuilder< + TargetTable, + keyof InferSchemaOutputFromFMTable, + false, + false + > { + // Extract name and validate + const relationName = getTableName(targetTable); + + // Runtime validation: Check if relation name is in navigationPaths + if (this.table) { + const navigationPaths = getNavigationPaths(this.table); + if (navigationPaths && !navigationPaths.includes(relationName)) { + this.logger.warn( + `Cannot navigate to "${relationName}". Valid navigation paths: ${navigationPaths.length > 0 ? navigationPaths.join(", ") : "none"}`, + ); + } + } + + // Create QueryBuilder with target table + const builder = new QueryBuilder({ + occurrence: targetTable, + databaseName: this.databaseName, + context: this.context, + databaseUseEntityIds: this.databaseUseEntityIds, + }); + + // Store the navigation info - we'll use it in execute + // Use relation name as-is (entity ID handling is done in QueryBuilder) + const relationId = relationName; + + // If this RecordBuilder came from a navigated EntitySet, we need to preserve that base path + let sourceTableName: string; + let baseRelation: string | undefined; + if ( + this.isNavigateFromEntitySet && + this.navigateSourceTableName && + this.navigateRelation + ) { + // Build the base path: /sourceTable/relation('recordId')/newRelation + sourceTableName = this.navigateSourceTableName; + baseRelation = this.navigateRelation; + } else { + // Normal record navigation: /tableName('recordId')/relation + // Use table ID if available, otherwise table name + if (!this.table) { + throw new Error("Table occurrence is required for navigation"); + } + sourceTableName = resolveTableId( + this.table, + getTableName(this.table), + this.context, + this.databaseUseEntityIds, + ); + } + + (builder as any).navigation = { + recordId: this.recordId, + relation: relationId, + sourceTableName, + baseRelation, + }; + + return builder; + } + + /** + * Builds the complete query string including $select and $expand parameters. + */ + private buildQueryString(): string { + return buildSelectExpandQueryString({ + selectedFields: this.selectedFields, + expandConfigs: this.expandConfigs, + table: this.table, + useEntityIds: this.databaseUseEntityIds, + logger: this.logger, + }); + } + + async execute( + options?: ExecuteMethodOptions, + ): Promise< + Result< + ConditionallyWithODataAnnotations< + RecordReturnType< + InferSchemaOutputFromFMTable>, + IsSingleField, + FieldColumn, + Selected, + Expands + >, + EO["includeODataAnnotations"] extends true ? true : false + > + > + > { + let url: string; + + // Build the base URL depending on whether this came from a navigated EntitySet + if ( + this.isNavigateFromEntitySet && + this.navigateSourceTableName && + this.navigateRelation + ) { + // From navigated EntitySet: /sourceTable/relation('recordId') + url = `/${this.databaseName}/${this.navigateSourceTableName}/${this.navigateRelation}('${this.recordId}')`; + } else { + // Normal record: /tableName('recordId') - use FMTID if configured + const tableId = this.getTableId( + options?.useEntityIds ?? this.databaseUseEntityIds, + ); + url = `/${this.databaseName}/${tableId}('${this.recordId}')`; + } + + if (this.operation === "getSingleField" && this.operationParam) { + url += `/${this.operationParam}`; + } else { + // Add query string for select/expand (only when not getting a single field) + const queryString = this.buildQueryString(); + url += queryString; + } + + const mergedOptions = this.mergeExecuteOptions(options); + const result = await this.context._makeRequest(url, mergedOptions); + + if (result.error) { + return { data: undefined, error: result.error }; + } + + let response = result.data; + + // Handle single field operation + if (this.operation === "getSingleField") { + // Single field returns a JSON object with @context and value + // The type is extracted from the Column stored in FieldColumn generic + const fieldResponse = response as ODataFieldResponse; + return { data: fieldResponse.value as any, error: undefined }; + } + + // Use shared response processor + const expandBuilder = new ExpandBuilder( + mergedOptions.useEntityIds ?? false, + this.logger, + ); + const expandValidationConfigs = expandBuilder.buildValidationConfigs( + this.expandConfigs, + ); + + return processODataResponse(response, { + table: this.table, + schema: getSchemaFromTable(this.table), + singleMode: "exact", + selectedFields: this.selectedFields, + expandValidationConfigs, + skipValidation: options?.skipValidation, + useEntityIds: mergedOptions.useEntityIds, + fieldMapping: this.fieldMapping, + }); + } + + getRequestConfig(): { method: string; url: string; body?: any } { + let url: string; + + // Build the base URL depending on whether this came from a navigated EntitySet + if ( + this.isNavigateFromEntitySet && + this.navigateSourceTableName && + this.navigateRelation + ) { + // From navigated EntitySet: /sourceTable/relation('recordId') + url = `/${this.databaseName}/${this.navigateSourceTableName}/${this.navigateRelation}('${this.recordId}')`; + } else { + // For batch operations, use database-level setting (no per-request override available here) + const tableId = this.getTableId(this.databaseUseEntityIds); + url = `/${this.databaseName}/${tableId}('${this.recordId}')`; + } + + if (this.operation === "getSingleField" && this.operationColumn) { + // Use the column's getFieldIdentifier to support entity IDs + url += `/${this.operationColumn.getFieldIdentifier( + this.databaseUseEntityIds, + )}`; + } else if (this.operation === "getSingleField" && this.operationParam) { + // Fallback for backwards compatibility (shouldn't happen in normal flow) + url += `/${this.operationParam}`; + } else { + // Add query string for select/expand (only when not getting a single field) + const queryString = this.buildQueryString(); + url += queryString; + } + + return { + method: "GET", + url, + }; + } + + /** + * Returns the query string for this record builder (for testing purposes). + */ + getQueryString(): string { + let path: string; + + // Build the path depending on navigation context + if ( + this.isNavigateFromEntitySet && + this.navigateSourceTableName && + this.navigateRelation + ) { + path = `/${this.navigateSourceTableName}/${this.navigateRelation}('${this.recordId}')`; + } else { + // Use getTableId to respect entity ID settings (same as getRequestConfig) + const tableId = this.getTableId(this.databaseUseEntityIds); + path = `/${tableId}('${this.recordId}')`; + } + + if (this.operation === "getSingleField" && this.operationColumn) { + return `${path}/${this.operationColumn.getFieldIdentifier( + this.databaseUseEntityIds, + )}`; + } else if (this.operation === "getSingleField" && this.operationParam) { + // Fallback for backwards compatibility (shouldn't happen in normal flow) + return `${path}/${this.operationParam}`; + } + + const queryString = this.buildQueryString(); + return `${path}${queryString}`; + } + + toRequest(baseUrl: string, options?: ExecuteOptions): Request { + const config = this.getRequestConfig(); + return createODataRequest(baseUrl, config, options); + } + + async processResponse( + response: Response, + options?: ExecuteOptions, + ): Promise< + Result< + RecordReturnType< + InferSchemaOutputFromFMTable>, + IsSingleField, + FieldColumn, + Selected, + Expands + > + > + > { + // Check for error responses (important for batch operations) + if (!response.ok) { + const tableName = this.table ? getTableName(this.table) : "unknown"; + const error = await parseErrorResponse( + response, + response.url || `/${this.databaseName}/${tableName}`, + ); + return { data: undefined, error }; + } + + // Use safeJsonParse to handle FileMaker's invalid JSON with unquoted ? values + const rawResponse = await safeJsonParse(response); + + // Handle single field operation + if (this.operation === "getSingleField") { + // Single field returns a JSON object with @context and value + // The type is extracted from the Column stored in FieldColumn generic + const fieldResponse = rawResponse as ODataFieldResponse; + return { data: fieldResponse.value as any, error: undefined }; + } + + // Use shared response processor + const mergedOptions = mergeExecuteOptions( + options, + this.databaseUseEntityIds, + ); + const expandBuilder = new ExpandBuilder( + mergedOptions.useEntityIds ?? false, + this.logger, + ); + const expandValidationConfigs = expandBuilder.buildValidationConfigs( + this.expandConfigs, + ); + + return processODataResponse(rawResponse, { + table: this.table, + schema: getSchemaFromTable(this.table), + singleMode: "exact", + selectedFields: this.selectedFields, + expandValidationConfigs, + skipValidation: options?.skipValidation, + useEntityIds: mergedOptions.useEntityIds, + fieldMapping: this.fieldMapping, + }); + } +} diff --git a/packages/fmodata/src/client/response-processor.ts b/packages/fmodata/src/client/response-processor.ts new file mode 100644 index 00000000..92b6ad42 --- /dev/null +++ b/packages/fmodata/src/client/response-processor.ts @@ -0,0 +1,89 @@ +import type { StandardSchemaV1 } from "@standard-schema/spec"; +import type { FMTable } from "../orm/table"; +import type { ExecuteOptions } from "../types"; +import type { ExpandValidationConfig } from "../validation"; +import { ValidationError, ResponseStructureError } from "../errors"; +import { transformResponseFields } from "../transform"; +import { validateListResponse, validateRecord } from "../validation"; + +// Type for raw OData responses +export type ODataResponse = T & { + "@odata.context"?: string; + "@odata.count"?: number; +}; + +export type ODataListResponse = ODataResponse<{ + value: T[]; +}>; + +export type ODataRecordResponse = ODataResponse< + T & { + "@id"?: string; + "@editLink"?: string; + } +>; + +/** + * Transform field IDs back to names using the table configuration + */ +export function applyFieldTransformation>( + response: ODataResponse | ODataListResponse, + table: FMTable, + expandConfigs?: ExpandValidationConfig[], +): ODataResponse | ODataListResponse { + return transformResponseFields(response, table, expandConfigs) as + | ODataResponse + | ODataListResponse; +} + +/** + * Apply schema validation and transformation to data + */ +export async function applyValidation>( + data: T | T[], + schema?: Record, + selectedFields?: (keyof T)[], + expandConfigs?: ExpandValidationConfig[], +): Promise< + | { valid: true; data: T | T[] } + | { valid: false; error: ValidationError | ResponseStructureError } +> { + if (Array.isArray(data)) { + // Validate as a list + const validation = await validateListResponse( + { value: data }, + schema, + selectedFields as string[] | undefined, + expandConfigs, + ); + if (!validation.valid) { + return { valid: false, error: validation.error }; + } + return { valid: true, data: validation.data }; + } else { + // Validate as a single record + const validation = await validateRecord( + data, + schema, + selectedFields, + expandConfigs, + ); + if (!validation.valid) { + return { valid: false, error: validation.error }; + } + return { valid: true, data: validation.data }; + } +} + +/** + * Extract value array from OData list response, or wrap single record in array + */ +export function extractListValue( + response: ODataListResponse | ODataRecordResponse, +): T[] { + if ("value" in response && Array.isArray(response.value)) { + return response.value; + } + // Single record responses return the record directly + return [response as T]; +} diff --git a/packages/fmodata/src/client/sanitize-json.ts b/packages/fmodata/src/client/sanitize-json.ts new file mode 100644 index 00000000..96b9ebc7 --- /dev/null +++ b/packages/fmodata/src/client/sanitize-json.ts @@ -0,0 +1,66 @@ +/** + * FileMaker OData API sometimes returns invalid JSON containing unquoted `?` + * characters as field values (e.g., `"fieldName": ?`), which causes JSON.parse() + * to fail. This module provides utilities to sanitize such responses before parsing. + */ + +import { ResponseParseError } from "../errors"; + +/** + * Sanitizes FileMaker OData JSON responses by replacing unquoted `?` values with `null`. + * + * FileMaker uses `?` to represent undefined/null values in its OData responses, + * but this is not valid JSON. This function converts those to proper `null` values. + * + * The regex uses two patterns: + * 1. `/:\s*\?(?=\s*[,}\]])/g` - for values in objects (after `:`) + * 2. `/(?<=[\[,])\s*\?(?=\s*[,\]])/g` - for values in arrays (after `[` or `,`) + * + * @param text - The raw response text from FileMaker OData API + * @returns Sanitized JSON string with `?` values replaced by `null` + * + * @example + * sanitizeFileMakerJson('{"field1": "valid", "field2": ?, "field3": null}') + * // Returns: '{"field1": "valid", "field2": null, "field3": null}' + */ +export function sanitizeFileMakerJson(text: string): string { + // Replace unquoted ? values in objects (after colon) + // Also handles arrays when the array is a value in an object + let result = text.replace(/:\s*\?(?=\s*[,}\]])/g, ": null"); + + // Replace unquoted ? values directly in arrays (not after colon) + // e.g., [1, ?, 3] -> [1, null, 3] + result = result.replace(/(?<=[\[,])\s*\?(?=\s*[,\]])/g, " null"); + + return result; +} + +/** + * Safely parses a Response body as JSON, handling FileMaker's invalid JSON responses. + * + * This function reads the response as text first, sanitizes any invalid `?` values, + * and then parses the sanitized JSON. This approach handles the case where FileMaker + * returns a Content-Type of application/json but the body contains invalid JSON. + * + * @param response - The fetch Response object + * @returns Parsed JSON data + * @throws ResponseParseError if the JSON is still invalid after sanitization (includes sanitized text for debugging) + */ +export async function safeJsonParse( + response: Response, +): Promise { + const text = await response.text(); + const sanitized = sanitizeFileMakerJson(text); + try { + return JSON.parse(sanitized) as T; + } catch (err) { + throw new ResponseParseError( + response.url, + `Failed to parse response as JSON: ${err instanceof Error ? err.message : "Unknown error"}`, + { + rawText: sanitized, + cause: err instanceof Error ? err : undefined, + }, + ); + } +} diff --git a/packages/fmodata/src/client/schema-manager.ts b/packages/fmodata/src/client/schema-manager.ts new file mode 100644 index 00000000..7e5a2dcb --- /dev/null +++ b/packages/fmodata/src/client/schema-manager.ts @@ -0,0 +1,246 @@ +import type { FFetchOptions } from "@fetchkit/ffetch"; +import type { ExecutionContext } from "../types"; + +type GenericField = { + name: string; + nullable?: boolean; + primary?: boolean; + unique?: boolean; + global?: boolean; + repetitions?: number; +}; + +type StringField = GenericField & { + type: "string"; + maxLength?: number; + default?: "USER" | "USERNAME" | "CURRENT_USER"; +}; + +type NumericField = GenericField & { + type: "numeric"; +}; + +type DateField = GenericField & { + type: "date"; + default?: "CURRENT_DATE" | "CURDATE"; +}; + +type TimeField = GenericField & { + type: "time"; + default?: "CURRENT_TIME" | "CURTIME"; +}; + +type TimestampField = GenericField & { + type: "timestamp"; + default?: "CURRENT_TIMESTAMP" | "CURTIMESTAMP"; +}; + +type ContainerField = GenericField & { + type: "container"; + externalSecurePath?: string; +}; + +export type Field = + | StringField + | NumericField + | DateField + | TimeField + | TimestampField + | ContainerField; + +export type { + StringField, + NumericField, + DateField, + TimeField, + TimestampField, + ContainerField, +}; + +type FileMakerField = Omit & { + type: string; +}; + +type TableDefinition = { + tableName: string; + fields: FileMakerField[]; +}; + +export class SchemaManager { + public constructor( + private readonly databaseName: string, + private readonly context: ExecutionContext, + ) {} + + public async createTable( + tableName: string, + fields: Field[], + options?: RequestInit & FFetchOptions, + ): Promise { + const result = await this.context._makeRequest( + `/${this.databaseName}/FileMaker_Tables`, + { + method: "POST", + body: JSON.stringify({ + tableName, + fields: fields.map(SchemaManager.compileFieldDefinition), + }), + ...options, + }, + ); + + if (result.error) { + throw result.error; + } + + return result.data; + } + + public async addFields( + tableName: string, + fields: Field[], + options?: RequestInit & FFetchOptions, + ): Promise { + const result = await this.context._makeRequest( + `/${this.databaseName}/FileMaker_Tables/${tableName}`, + { + method: "PATCH", + body: JSON.stringify({ + fields: fields.map(SchemaManager.compileFieldDefinition), + }), + ...options, + }, + ); + + if (result.error) { + throw result.error; + } + + return result.data; + } + + public async deleteTable( + tableName: string, + options?: RequestInit & FFetchOptions, + ): Promise { + const result = await this.context._makeRequest( + `/${this.databaseName}/FileMaker_Tables/${tableName}`, + { method: "DELETE", ...options }, + ); + + if (result.error) { + throw result.error; + } + } + + public async deleteField( + tableName: string, + fieldName: string, + options?: RequestInit & FFetchOptions, + ): Promise { + const result = await this.context._makeRequest( + `/${this.databaseName}/FileMaker_Tables/${tableName}/${fieldName}`, + { + method: "DELETE", + ...options, + }, + ); + + if (result.error) { + throw result.error; + } + } + + public async createIndex( + tableName: string, + fieldName: string, + options?: RequestInit & FFetchOptions, + ): Promise<{ indexName: string }> { + const result = await this.context._makeRequest<{ indexName: string }>( + `/${this.databaseName}/FileMaker_Indexes/${tableName}`, + { + method: "POST", + body: JSON.stringify({ indexName: fieldName }), + ...options, + }, + ); + + if (result.error) { + throw result.error; + } + + return result.data; + } + + public async deleteIndex( + tableName: string, + fieldName: string, + options?: RequestInit & FFetchOptions, + ): Promise { + const result = await this.context._makeRequest( + `/${this.databaseName}/FileMaker_Indexes/${tableName}/${fieldName}`, + { + method: "DELETE", + ...options, + }, + ); + + if (result.error) { + throw result.error; + } + } + + private static compileFieldDefinition(field: Field): FileMakerField { + let type: string = field.type; + const repetitions = field.repetitions; + + // Handle string fields - convert to varchar and add maxLength if present + if (field.type === "string") { + type = "varchar"; + const stringField = field as StringField; + if (stringField.maxLength !== undefined) { + type += `(${stringField.maxLength})`; + } + } + + // Add repetitions suffix if present + if (repetitions !== undefined) { + type += `[${repetitions}]`; + } + + // Build the result object, excluding type, maxLength, and repetitions + const result: any = { + name: field.name, + type, + }; + + // Add optional properties that FileMaker expects + if (field.nullable !== undefined) result.nullable = field.nullable; + if (field.primary !== undefined) result.primary = field.primary; + if (field.unique !== undefined) result.unique = field.unique; + if (field.global !== undefined) result.global = field.global; + + // Add type-specific properties + if (field.type === "string") { + const stringField = field as StringField; + if (stringField.default !== undefined) + result.default = stringField.default; + } else if (field.type === "date") { + const dateField = field as DateField; + if (dateField.default !== undefined) result.default = dateField.default; + } else if (field.type === "time") { + const timeField = field as TimeField; + if (timeField.default !== undefined) result.default = timeField.default; + } else if (field.type === "timestamp") { + const timestampField = field as TimestampField; + if (timestampField.default !== undefined) + result.default = timestampField.default; + } else if (field.type === "container") { + const containerField = field as ContainerField; + if (containerField.externalSecurePath !== undefined) + result.externalSecurePath = containerField.externalSecurePath; + } + + return result as FileMakerField; + } +} diff --git a/packages/fmodata/src/client/update-builder.ts b/packages/fmodata/src/client/update-builder.ts new file mode 100644 index 00000000..a2b2292b --- /dev/null +++ b/packages/fmodata/src/client/update-builder.ts @@ -0,0 +1,437 @@ +import type { + ExecutionContext, + ExecutableBuilder, + Result, + WithSystemFields, + ExecuteOptions, + ExecuteMethodOptions, +} from "../types"; +import { getAcceptHeader } from "../types"; +import type { FMTable, InferSchemaOutputFromFMTable } from "../orm/table"; +import { + getTableName, + getTableId as getTableIdHelper, + getBaseTableConfig, + isUsingEntityIds, +} from "../orm/table"; +import { QueryBuilder } from "./query-builder"; +import { type FFetchOptions } from "@fetchkit/ffetch"; +import { transformFieldNamesToIds } from "../transform"; +import { parseErrorResponse } from "./error-parser"; +import { validateAndTransformInput } from "../validation"; + +/** + * Initial update builder returned from EntitySet.update(data) + * Requires calling .byId() or .where() before .execute() is available + */ +export class UpdateBuilder< + Occ extends FMTable, + ReturnPreference extends "minimal" | "representation" = "minimal", +> { + private databaseName: string; + private context: ExecutionContext; + private table: Occ; + private data: Partial>; + private returnPreference: ReturnPreference; + + private databaseUseEntityIds: boolean; + + constructor(config: { + occurrence: Occ; + databaseName: string; + context: ExecutionContext; + data: Partial>; + returnPreference: ReturnPreference; + databaseUseEntityIds?: boolean; + }) { + this.table = config.occurrence; + this.databaseName = config.databaseName; + this.context = config.context; + this.data = config.data; + this.returnPreference = config.returnPreference; + this.databaseUseEntityIds = config.databaseUseEntityIds ?? false; + } + + /** + * Update a single record by ID + * Returns updated count by default, or full record if returnFullRecord was set to true + */ + byId( + id: string | number, + ): ExecutableUpdateBuilder { + return new ExecutableUpdateBuilder({ + occurrence: this.table, + databaseName: this.databaseName, + context: this.context, + data: this.data, + mode: "byId", + recordId: id, + returnPreference: this.returnPreference, + databaseUseEntityIds: this.databaseUseEntityIds, + }); + } + + /** + * Update records matching a filter query + * Returns updated count by default, or full record if returnFullRecord was set to true + * @param fn Callback that receives a QueryBuilder for building the filter + */ + where( + fn: (q: QueryBuilder) => QueryBuilder, + ): ExecutableUpdateBuilder { + // Create a QueryBuilder for the user to configure + const queryBuilder = new QueryBuilder({ + occurrence: this.table, + databaseName: this.databaseName, + context: this.context, + }); + + // Let the user configure it + const configuredBuilder = fn(queryBuilder); + + return new ExecutableUpdateBuilder({ + occurrence: this.table, + databaseName: this.databaseName, + context: this.context, + data: this.data, + mode: "byFilter", + queryBuilder: configuredBuilder, + returnPreference: this.returnPreference, + databaseUseEntityIds: this.databaseUseEntityIds, + }); + } +} + +/** + * Executable update builder - has execute() method + * Returned after calling .byId() or .where() + * Can return either updated count or full record based on returnFullRecord option + */ +export class ExecutableUpdateBuilder< + Occ extends FMTable, + IsByFilter extends boolean, + ReturnPreference extends "minimal" | "representation" = "minimal", +> implements + ExecutableBuilder< + ReturnPreference extends "minimal" + ? { updatedCount: number } + : InferSchemaOutputFromFMTable + > +{ + private databaseName: string; + private context: ExecutionContext; + private table: Occ; + private data: Partial>; + private mode: "byId" | "byFilter"; + private recordId?: string | number; + private queryBuilder?: QueryBuilder; + private returnPreference: ReturnPreference; + private databaseUseEntityIds: boolean; + + constructor(config: { + occurrence: Occ; + databaseName: string; + context: ExecutionContext; + data: Partial>; + mode: "byId" | "byFilter"; + recordId?: string | number; + queryBuilder?: QueryBuilder; + returnPreference: ReturnPreference; + databaseUseEntityIds?: boolean; + }) { + this.table = config.occurrence; + this.databaseName = config.databaseName; + this.context = config.context; + this.data = config.data; + this.mode = config.mode; + this.recordId = config.recordId; + this.queryBuilder = config.queryBuilder; + this.returnPreference = config.returnPreference; + this.databaseUseEntityIds = config.databaseUseEntityIds ?? false; + } + + /** + * Helper to merge database-level useEntityIds with per-request options + */ + private mergeExecuteOptions( + options?: RequestInit & FFetchOptions & ExecuteOptions, + ): RequestInit & FFetchOptions & { useEntityIds?: boolean } { + // If useEntityIds is not set in options, use the database-level setting + return { + ...options, + useEntityIds: options?.useEntityIds ?? this.databaseUseEntityIds, + }; + } + + /** + * Gets the table ID (FMTID) if using entity IDs, otherwise returns the table name + * @param useEntityIds - Optional override for entity ID usage + */ + private getTableId(useEntityIds?: boolean): string { + const contextDefault = this.context._getUseEntityIds?.() ?? false; + const shouldUseIds = useEntityIds ?? contextDefault; + + if (shouldUseIds) { + if (!isUsingEntityIds(this.table)) { + throw new Error( + `useEntityIds is true but table "${getTableName(this.table)}" does not have entity IDs configured`, + ); + } + return getTableIdHelper(this.table); + } + + return getTableName(this.table); + } + + async execute( + options?: ExecuteMethodOptions, + ): Promise< + Result< + ReturnPreference extends "minimal" + ? { updatedCount: number } + : InferSchemaOutputFromFMTable + > + > { + // Merge database-level useEntityIds with per-request options + const mergedOptions = this.mergeExecuteOptions(options); + + // Get table identifier with override support + const tableId = this.getTableId(mergedOptions.useEntityIds); + + // Validate and transform input data using input validators (writeValidators) + let validatedData = this.data; + if (this.table) { + const baseTableConfig = getBaseTableConfig(this.table); + const inputSchema = baseTableConfig.inputSchema; + + try { + validatedData = await validateAndTransformInput(this.data, inputSchema); + } catch (error) { + // If validation fails, return error immediately + return { + data: undefined, + error: error instanceof Error ? error : new Error(String(error)), + } as any; + } + } + + // Transform field names to FMFIDs if using entity IDs + // Only transform if useEntityIds resolves to true (respects per-request override) + const shouldUseIds = mergedOptions.useEntityIds ?? false; + + const transformedData = + this.table && shouldUseIds + ? transformFieldNamesToIds(validatedData, this.table) + : validatedData; + + let url: string; + + if (this.mode === "byId") { + // Update single record by ID: PATCH /{database}/{table}('id') + url = `/${this.databaseName}/${tableId}('${this.recordId}')`; + } else { + // Update by filter: PATCH /{database}/{table}?$filter=... + if (!this.queryBuilder) { + throw new Error("Query builder is required for filter-based update"); + } + + // Get the query string from the configured QueryBuilder + const queryString = this.queryBuilder.getQueryString(); + // The query string will have the tableId already transformed by QueryBuilder + // Remove the leading "/" and table name from the query string as we'll build our own URL + const tableName = getTableName(this.table); + const queryParams = queryString.startsWith(`/${tableId}`) + ? queryString.slice(`/${tableId}`.length) + : queryString.startsWith(`/${tableName}`) + ? queryString.slice(`/${tableName}`.length) + : queryString; + + url = `/${this.databaseName}/${tableId}${queryParams}`; + } + + // Set Prefer header based on returnPreference + const headers: Record = { + "Content-Type": "application/json", + }; + + if (this.returnPreference === "representation") { + headers["Prefer"] = "return=representation"; + } + + // Make PATCH request with JSON body + const result = await this.context._makeRequest(url, { + method: "PATCH", + headers, + body: JSON.stringify(transformedData), + ...mergedOptions, + }); + + if (result.error) { + return { data: undefined, error: result.error }; + } + + const response = result.data; + + // Handle based on return preference + if (this.returnPreference === "representation") { + // Return the full updated record + return { + data: response as ReturnPreference extends "minimal" + ? { updatedCount: number } + : InferSchemaOutputFromFMTable, + error: undefined, + }; + } else { + // Return updated count (minimal) + let updatedCount = 0; + + if (typeof response === "number") { + updatedCount = response; + } else if (response && typeof response === "object") { + // Check if the response has a count property (fallback) + updatedCount = (response as any).updatedCount || 0; + } + + return { + data: { updatedCount } as ReturnPreference extends "minimal" + ? { updatedCount: number } + : InferSchemaOutputFromFMTable, + error: undefined, + }; + } + } + + getRequestConfig(): { method: string; url: string; body?: any } { + // For batch operations, use database-level setting (no per-request override available here) + // Note: Input validation happens in execute() and processResponse() for batch operations + const tableId = this.getTableId(this.databaseUseEntityIds); + + // Transform field names to FMFIDs if using entity IDs + const transformedData = + this.table && this.databaseUseEntityIds + ? transformFieldNamesToIds(this.data, this.table) + : this.data; + + let url: string; + + if (this.mode === "byId") { + url = `/${this.databaseName}/${tableId}('${this.recordId}')`; + } else { + if (!this.queryBuilder) { + throw new Error("Query builder is required for filter-based update"); + } + + const queryString = this.queryBuilder.getQueryString(); + const tableName = getTableName(this.table); + const queryParams = queryString.startsWith(`/${tableId}`) + ? queryString.slice(`/${tableId}`.length) + : queryString.startsWith(`/${tableName}`) + ? queryString.slice(`/${tableName}`.length) + : queryString; + + url = `/${this.databaseName}/${tableId}${queryParams}`; + } + + return { + method: "PATCH", + url, + body: JSON.stringify(transformedData), + }; + } + + toRequest(baseUrl: string, options?: ExecuteOptions): Request { + const config = this.getRequestConfig(); + const fullUrl = `${baseUrl}${config.url}`; + + return new Request(fullUrl, { + method: config.method, + headers: { + "Content-Type": "application/json", + Accept: getAcceptHeader(options?.includeODataAnnotations), + }, + body: config.body, + }); + } + + async processResponse( + response: Response, + options?: ExecuteOptions, + ): Promise< + Result< + ReturnPreference extends "minimal" + ? { updatedCount: number } + : InferSchemaOutputFromFMTable + > + > { + // Check for error responses (important for batch operations) + if (!response.ok) { + const tableName = getTableName(this.table); + const error = await parseErrorResponse( + response, + response.url || `/${this.databaseName}/${tableName}`, + ); + return { data: undefined, error }; + } + + // Check for empty response (204 No Content) + const text = await response.text(); + if (!text || text.trim() === "") { + // For 204 No Content, check the fmodata.affected_rows header + const affectedRows = response.headers.get("fmodata.affected_rows"); + const updatedCount = affectedRows ? parseInt(affectedRows, 10) : 1; + return { + data: { updatedCount } as ReturnPreference extends "minimal" + ? { updatedCount: number } + : InferSchemaOutputFromFMTable, + error: undefined, + }; + } + + const rawResponse = JSON.parse(text); + + // Validate and transform input data using input validators (writeValidators) + // This is needed for processResponse because it's called from batch operations + // where the data hasn't been validated yet + let validatedData = this.data; + if (this.table) { + const baseTableConfig = getBaseTableConfig(this.table); + const inputSchema = baseTableConfig.inputSchema; + try { + validatedData = await validateAndTransformInput(this.data, inputSchema); + } catch (error) { + return { + data: undefined, + error: error instanceof Error ? error : new Error(String(error)), + } as any; + } + } + + // Handle based on return preference + if (this.returnPreference === "representation") { + // Return the full updated record + return { + data: rawResponse as ReturnPreference extends "minimal" + ? { updatedCount: number } + : InferSchemaOutputFromFMTable, + error: undefined, + }; + } else { + // Return updated count (minimal) + let updatedCount = 0; + + if (typeof rawResponse === "number") { + updatedCount = rawResponse; + } else if (rawResponse && typeof rawResponse === "object") { + // Check if the response has a count property (fallback) + updatedCount = (rawResponse as any).updatedCount || 0; + } + + return { + data: { updatedCount } as ReturnPreference extends "minimal" + ? { updatedCount: number } + : InferSchemaOutputFromFMTable, + error: undefined, + }; + } + } +} diff --git a/packages/fmodata/src/errors.ts b/packages/fmodata/src/errors.ts new file mode 100644 index 00000000..8f85b98b --- /dev/null +++ b/packages/fmodata/src/errors.ts @@ -0,0 +1,261 @@ +import type { StandardSchemaV1 } from "@standard-schema/spec"; + +/** + * Base class for all fmodata errors + */ +export abstract class FMODataError extends Error { + abstract readonly kind: string; + readonly timestamp: Date; + + constructor(message: string, options?: ErrorOptions) { + super(message, options); + this.name = this.constructor.name; + this.timestamp = new Date(); + } +} + +// ============================================ +// HTTP Errors (with status codes) +// ============================================ + +export class HTTPError extends FMODataError { + readonly kind = "HTTPError" as const; + readonly url: string; + readonly status: number; + readonly statusText: string; + readonly response?: any; + + constructor(url: string, status: number, statusText: string, response?: any) { + super(`HTTP ${status} ${statusText} for ${url}`); + this.url = url; + this.status = status; + this.statusText = statusText; + this.response = response; + } + + // Helper methods for common status checks + is4xx(): boolean { + return this.status >= 400 && this.status < 500; + } + + is5xx(): boolean { + return this.status >= 500 && this.status < 600; + } + + isNotFound(): boolean { + return this.status === 404; + } + + isUnauthorized(): boolean { + return this.status === 401; + } + + isForbidden(): boolean { + return this.status === 403; + } +} + +// ============================================ +// OData Specific Errors +// ============================================ + +export class ODataError extends FMODataError { + readonly kind = "ODataError" as const; + readonly url: string; + readonly code?: string; + readonly details?: any; + + constructor(url: string, message: string, code?: string, details?: any) { + super(`OData error: ${message}`); + this.url = url; + this.code = code; + this.details = details; + } +} + +export class SchemaLockedError extends FMODataError { + readonly kind = "SchemaLockedError" as const; + readonly url: string; + readonly code: string; + readonly details?: any; + + constructor(url: string, message: string, details?: any) { + super(`OData error: ${message}`); + this.url = url; + this.code = "303"; + this.details = details; + } +} + +// ============================================ +// Validation Errors +// ============================================ + +export class ValidationError extends FMODataError { + readonly kind = "ValidationError" as const; + readonly field?: string; + readonly issues: readonly StandardSchemaV1.Issue[]; + readonly value?: unknown; + + constructor( + message: string, + issues: readonly StandardSchemaV1.Issue[], + options?: { + field?: string; + value?: unknown; + cause?: Error["cause"]; + }, + ) { + super( + message, + options?.cause !== undefined ? { cause: options.cause } : undefined, + ); + this.field = options?.field; + this.issues = issues; + this.value = options?.value; + } +} + +export class ResponseStructureError extends FMODataError { + readonly kind = "ResponseStructureError" as const; + readonly expected: string; + readonly received: any; + + constructor(expected: string, received: any) { + super(`Invalid response structure: expected ${expected}`); + this.expected = expected; + this.received = received; + } +} + +export class RecordCountMismatchError extends FMODataError { + readonly kind = "RecordCountMismatchError" as const; + readonly expected: number | "one" | "at-most-one"; + readonly received: number; + + constructor(expected: number | "one" | "at-most-one", received: number) { + const expectedStr = typeof expected === "number" ? expected : expected; + super(`Expected ${expectedStr} record(s), but received ${received}`); + this.expected = expected; + this.received = received; + } +} + +export class InvalidLocationHeaderError extends FMODataError { + readonly kind = "InvalidLocationHeaderError" as const; + readonly locationHeader?: string; + + constructor(message: string, locationHeader?: string) { + super(message); + this.locationHeader = locationHeader; + } +} + +export class ResponseParseError extends FMODataError { + readonly kind = "ResponseParseError" as const; + readonly url: string; + readonly rawText?: string; + + constructor( + url: string, + message: string, + options?: { rawText?: string; cause?: Error }, + ) { + super(message, options?.cause ? { cause: options.cause } : undefined); + this.url = url; + this.rawText = options?.rawText; + } +} + +export class BatchTruncatedError extends FMODataError { + readonly kind = "BatchTruncatedError" as const; + readonly operationIndex: number; + readonly failedAtIndex: number; + + constructor(operationIndex: number, failedAtIndex: number) { + super( + `Operation ${operationIndex} was not executed because operation ${failedAtIndex} failed`, + ); + this.operationIndex = operationIndex; + this.failedAtIndex = failedAtIndex; + } +} + +// ============================================ +// Type Guards +// ============================================ + +export function isHTTPError(error: unknown): error is HTTPError { + return error instanceof HTTPError; +} + +export function isValidationError(error: unknown): error is ValidationError { + return error instanceof ValidationError; +} + +export function isODataError(error: unknown): error is ODataError { + return error instanceof ODataError; +} + +export function isSchemaLockedError( + error: unknown, +): error is SchemaLockedError { + return error instanceof SchemaLockedError; +} + +export function isResponseStructureError( + error: unknown, +): error is ResponseStructureError { + return error instanceof ResponseStructureError; +} + +export function isRecordCountMismatchError( + error: unknown, +): error is RecordCountMismatchError { + return error instanceof RecordCountMismatchError; +} + +export function isResponseParseError( + error: unknown, +): error is ResponseParseError { + return error instanceof ResponseParseError; +} + +export function isBatchTruncatedError( + error: unknown, +): error is BatchTruncatedError { + return error instanceof BatchTruncatedError; +} + +export function isFMODataError(error: unknown): error is FMODataError { + return error instanceof FMODataError; +} + +// ============================================ +// Union type for all possible errors +// ============================================ + +// Re-export ffetch errors (they'll be imported from @fetchkit/ffetch) +export type { + TimeoutError, + AbortError, + NetworkError, + RetryLimitError, + CircuitOpenError, +} from "@fetchkit/ffetch"; + +export type FMODataErrorType = + | import("@fetchkit/ffetch").TimeoutError + | import("@fetchkit/ffetch").AbortError + | import("@fetchkit/ffetch").NetworkError + | import("@fetchkit/ffetch").RetryLimitError + | import("@fetchkit/ffetch").CircuitOpenError + | HTTPError + | ODataError + | SchemaLockedError + | ValidationError + | ResponseStructureError + | RecordCountMismatchError + | InvalidLocationHeaderError + | ResponseParseError + | BatchTruncatedError; diff --git a/packages/fmodata/src/index.ts b/packages/fmodata/src/index.ts new file mode 100644 index 00000000..745d4f99 --- /dev/null +++ b/packages/fmodata/src/index.ts @@ -0,0 +1,119 @@ +// Barrel file - exports all public API from the client folder + +// Main API - use these functions to create tables and occurrences +export { FMServerConnection } from "./client/filemaker-odata"; + +// NEW ORM API - Drizzle-inspired field builders and operators +export { + // Field builders + textField, + numberField, + dateField, + timeField, + timestampField, + containerField, + calcField, + type FieldBuilder, + // Table definition + fmTableOccurrence, + FMTable, + type FMTableWithColumns as TableOccurrenceResult, + type InferTableSchema, + // Table helper functions + // getTableFields, + // getDefaultSelect, + // getBaseTableConfig, + // getFieldId, + // getFieldName, + // getTableId, + getTableColumns, + // Column references + type Column, + isColumn, + // Filter operators + type FilterExpression, + eq, + ne, + gt, + gte, + lt, + lte, + contains, + startsWith, + endsWith, + inArray, + notInArray, + isNull, + isNotNull, + and, + or, + not, + // OrderBy operators + type OrderByExpression, + asc, + desc, +} from "./orm/index"; + +// Type-only exports - for type annotations only, not direct instantiation +export type { Database } from "./client/database"; +export type { EntitySet } from "./client/entity-set"; +export type { + SchemaManager, + Field, + StringField, + NumericField, + DateField, + TimeField, + TimestampField, + ContainerField, +} from "./client/schema-manager"; + +// Utility types for type annotations +export type { + Result, + BatchResult, + BatchItemResult, + InferSchemaType, + ODataRecordMetadata, + Metadata, + FetchHandler, + ExecuteMethodOptions, + ExecuteOptions, +} from "./types"; + +// Re-export ffetch errors and types +export { + TimeoutError, + AbortError, + NetworkError, + RetryLimitError, + CircuitOpenError, +} from "@fetchkit/ffetch"; + +export type { FFetchOptions } from "@fetchkit/ffetch"; + +// Export our errors +export { + FMODataError, + HTTPError, + ODataError, + SchemaLockedError, + ValidationError, + ResponseStructureError, + RecordCountMismatchError, + ResponseParseError, + BatchTruncatedError, + isHTTPError, + isValidationError, + isODataError, + isSchemaLockedError, + isResponseStructureError, + isRecordCountMismatchError, + isResponseParseError, + isBatchTruncatedError, + isFMODataError, +} from "./errors"; + +export type { FMODataErrorType } from "./errors"; + +export type { Logger } from "./logger"; diff --git a/packages/fmodata/src/logger.test.ts b/packages/fmodata/src/logger.test.ts new file mode 100644 index 00000000..05c89be4 --- /dev/null +++ b/packages/fmodata/src/logger.test.ts @@ -0,0 +1,34 @@ +import { describe, expect, it } from "vitest"; +import type { LogLevel } from "./logger"; +import { shouldPublishLog } from "./logger"; + +describe("shouldPublishLog", () => { + const testCases: { + currentLogLevel: LogLevel; + logLevel: LogLevel; + expected: boolean; + }[] = [ + { currentLogLevel: "debug", logLevel: "debug", expected: true }, + { currentLogLevel: "debug", logLevel: "info", expected: true }, + { currentLogLevel: "debug", logLevel: "warn", expected: true }, + { currentLogLevel: "debug", logLevel: "error", expected: true }, + { currentLogLevel: "info", logLevel: "debug", expected: false }, + { currentLogLevel: "info", logLevel: "info", expected: true }, + { currentLogLevel: "info", logLevel: "warn", expected: true }, + { currentLogLevel: "info", logLevel: "error", expected: true }, + { currentLogLevel: "warn", logLevel: "debug", expected: false }, + { currentLogLevel: "warn", logLevel: "info", expected: false }, + { currentLogLevel: "warn", logLevel: "warn", expected: true }, + { currentLogLevel: "warn", logLevel: "error", expected: true }, + { currentLogLevel: "error", logLevel: "debug", expected: false }, + { currentLogLevel: "error", logLevel: "info", expected: false }, + { currentLogLevel: "error", logLevel: "warn", expected: false }, + { currentLogLevel: "error", logLevel: "error", expected: true }, + ]; + + testCases.forEach(({ currentLogLevel, logLevel, expected }) => { + it(`should return "${expected}" when currentLogLevel is "${currentLogLevel}" and logLevel is "${logLevel}"`, () => { + expect(shouldPublishLog(currentLogLevel, logLevel)).toBe(expected); + }); + }); +}); diff --git a/packages/fmodata/src/logger.ts b/packages/fmodata/src/logger.ts new file mode 100644 index 00000000..41841920 --- /dev/null +++ b/packages/fmodata/src/logger.ts @@ -0,0 +1,140 @@ +export const TTY_COLORS = { + reset: "\x1b[0m", + bright: "\x1b[1m", + dim: "\x1b[2m", + undim: "\x1b[22m", + underscore: "\x1b[4m", + blink: "\x1b[5m", + reverse: "\x1b[7m", + hidden: "\x1b[8m", + fg: { + black: "\x1b[30m", + red: "\x1b[31m", + green: "\x1b[32m", + yellow: "\x1b[33m", + blue: "\x1b[34m", + magenta: "\x1b[35m", + cyan: "\x1b[36m", + white: "\x1b[37m", + }, + bg: { + black: "\x1b[40m", + red: "\x1b[41m", + green: "\x1b[42m", + yellow: "\x1b[43m", + blue: "\x1b[44m", + magenta: "\x1b[45m", + cyan: "\x1b[46m", + white: "\x1b[47m", + }, +} as const; + +export type LogLevel = "debug" | "info" | "success" | "warn" | "error"; + +export const levels = ["debug", "info", "success", "warn", "error"] as const; + +export function shouldPublishLog( + currentLogLevel: LogLevel, + logLevel: LogLevel, +): boolean { + return levels.indexOf(logLevel) >= levels.indexOf(currentLogLevel); +} + +export interface Logger { + disabled?: boolean | undefined; + disableColors?: boolean | undefined; + level?: Exclude | undefined; + log?: + | (( + level: Exclude, + message: string, + ...args: any[] + ) => void) + | undefined; +} + +export type LogHandlerParams = + Parameters> extends [LogLevel, ...infer Rest] + ? Rest + : never; + +const levelColors: Record = { + info: TTY_COLORS.fg.blue, + success: TTY_COLORS.fg.green, + warn: TTY_COLORS.fg.yellow, + error: TTY_COLORS.fg.red, + debug: TTY_COLORS.fg.magenta, +}; + +const formatMessage = ( + level: LogLevel, + message: string, + colorsEnabled: boolean, +): string => { + const timestamp = new Date().toISOString(); + + if (colorsEnabled) { + return `${TTY_COLORS.dim}${timestamp}${TTY_COLORS.reset} ${ + levelColors[level] + }${level.toUpperCase()}${TTY_COLORS.reset} ${TTY_COLORS.bright}[FMODATA]:${ + TTY_COLORS.reset + } ${message}`; + } + + return `${timestamp} ${level.toUpperCase()} [FMODATA]: ${message}`; +}; + +export type InternalLogger = { + [K in LogLevel]: (...params: LogHandlerParams) => void; +} & { + get level(): LogLevel; +}; + +export const createLogger = (options?: Logger | undefined): InternalLogger => { + const enabled = options?.disabled !== true; + const logLevel = options?.level ?? "error"; + + const colorsEnabled = options?.disableColors !== true; + + const LogFunc = ( + level: LogLevel, + message: string, + args: any[] = [], + ): void => { + if (!enabled || !shouldPublishLog(logLevel, level)) { + return; + } + + const formattedMessage = formatMessage(level, message, colorsEnabled); + + if (!options || typeof options.log !== "function") { + if (level === "error") { + console.error(formattedMessage, ...args); + } else if (level === "warn") { + console.warn(formattedMessage, ...args); + } else { + console.log(formattedMessage, ...args); + } + return; + } + + options.log(level === "success" ? "info" : level, message, ...args); + }; + + const logger = Object.fromEntries( + levels.map((level) => [ + level, + (...[message, ...args]: LogHandlerParams) => + LogFunc(level, message, args), + ]), + ) as Record void>; + + return { + ...logger, + get level() { + return logLevel; + }, + }; +}; + +export const logger = createLogger(); diff --git a/packages/fmodata/src/orm/column.ts b/packages/fmodata/src/orm/column.ts new file mode 100644 index 00000000..3baf7b05 --- /dev/null +++ b/packages/fmodata/src/orm/column.ts @@ -0,0 +1,106 @@ +import type { StandardSchemaV1 } from "@standard-schema/spec"; + +/** + * Column represents a type-safe reference to a table field. + * Used in queries, filters, and operators to provide autocomplete and type checking. + * + * @template TOutput - The TypeScript type when reading from the database (output type) + * @template TInput - The TypeScript type when writing to the database (input type, for filters) + * @template TableName - The table name as a string literal type (for validation) + * @template IsContainer - Whether this column represents a container field (cannot be selected) + */ +export class Column< + TOutput = any, + TInput = TOutput, + TableName extends string = string, + IsContainer extends boolean = false, +> { + readonly fieldName: string; + readonly entityId?: `FMFID:${string}`; + readonly tableName: TableName; + readonly tableEntityId?: `FMTID:${string}`; + readonly inputValidator?: StandardSchemaV1; + + // Phantom types for TypeScript inference - never actually hold values + readonly _phantomOutput!: TOutput; + readonly _phantomInput!: TInput; + readonly _isContainer!: IsContainer; + + constructor(config: { + fieldName: string; + entityId?: `FMFID:${string}`; + tableName: TableName; + tableEntityId?: `FMTID:${string}`; + inputValidator?: StandardSchemaV1; + }) { + this.fieldName = config.fieldName; + this.entityId = config.entityId; + this.tableName = config.tableName; + this.tableEntityId = config.tableEntityId; + this.inputValidator = config.inputValidator; + } + + /** + * Get the field identifier (entity ID if available, otherwise field name). + * Used when building OData queries. + */ + getFieldIdentifier(useEntityIds?: boolean): string { + if (useEntityIds && this.entityId) { + return this.entityId; + } + return this.fieldName; + } + + /** + * Get the table identifier (entity ID if available, otherwise table name). + * Used when building OData queries. + */ + getTableIdentifier(useEntityIds?: boolean): string { + if (useEntityIds && this.tableEntityId) { + return this.tableEntityId; + } + return this.tableName; + } + + /** + * Check if this column is from a specific table. + * Useful for validation in cross-table operations. + */ + isFromTable(tableName: string): boolean { + return this.tableName === tableName; + } + + /** + * Create a string representation for debugging. + */ + toString(): string { + return `${this.tableName}.${this.fieldName}`; + } +} + +/** + * Type guard to check if a value is a Column instance. + */ +export function isColumn(value: any): value is Column { + return value instanceof Column; +} + +/** + * Create a Column with proper type inference from the inputValidator. + * This helper ensures TypeScript can infer TInput from the validator's input type. + * @internal + */ +export function createColumn< + TOutput, + TInput, + TName extends string, + IsContainer extends boolean = false, +>(config: { + fieldName: string; + entityId?: `FMFID:${string}`; + tableName: TName; + tableEntityId?: `FMTID:${string}`; + inputValidator?: StandardSchemaV1; +}): Column { + return new Column(config) as Column; +} diff --git a/packages/fmodata/src/orm/field-builders.ts b/packages/fmodata/src/orm/field-builders.ts new file mode 100644 index 00000000..d7acbaa7 --- /dev/null +++ b/packages/fmodata/src/orm/field-builders.ts @@ -0,0 +1,296 @@ +import type { StandardSchemaV1 } from "@standard-schema/spec"; + +/** + * Branded type for container field's database type. + * This allows TypeScript to distinguish container fields from regular string fields + * at the type level, enabling compile-time exclusion from select operations. + */ +export type ContainerDbType = string & { readonly __container: true }; + +/** + * FieldBuilder provides a fluent API for defining table fields with type-safe metadata. + * Supports chaining methods to configure primary keys, nullability, read-only status, entity IDs, and validators. + * + * @template TOutput - The output type after applying outputValidator (what you get when reading) + * @template TInput - The input type after applying inputValidator (what you pass when writing) + * @template TDbType - The database type (what FileMaker stores/expects) + * @template TReadOnly - Whether this field is read-only (for type-level exclusion from insert/update) + */ +export class FieldBuilder< + TOutput = any, + TInput = TOutput, + TDbType = TOutput, + TReadOnly extends boolean = false, +> { + private _primaryKey = false; + private _notNull = false; + private _readOnly = false; + private _entityId?: `FMFID:${string}`; + private _outputValidator?: StandardSchemaV1; + private _inputValidator?: StandardSchemaV1; + private _fieldType: string; + + constructor(fieldType: string) { + this._fieldType = fieldType; + } + + /** + * Mark this field as the primary key for the table. + * Primary keys are automatically read-only. + */ + primaryKey(): FieldBuilder { + const builder = this._clone() as any; + builder._primaryKey = true; + builder._readOnly = true; // Primary keys are automatically read-only + return builder; + } + + /** + * Mark this field as non-nullable. + * Updates the type to exclude null/undefined. + */ + notNull(): FieldBuilder< + NonNullable, + NonNullable, + NonNullable, + TReadOnly + > { + const builder = this._clone() as any; + builder._notNull = true; + return builder; + } + + /** + * Mark this field as read-only. + * Read-only fields are excluded from insert and update operations. + */ + readOnly(): FieldBuilder { + const builder = this._clone() as any; + builder._readOnly = true; + return builder; + } + + /** + * Assign a FileMaker field ID (FMFID) to this field. + * When useEntityIds is enabled, this ID will be used in API requests instead of the field name. + */ + entityId( + id: `FMFID:${string}`, + ): FieldBuilder { + const builder = this._clone(); + builder._entityId = id; + return builder; + } + + /** + * Set a validator for the output (reading from database). + * The output validator transforms/validates data coming FROM the database in list or get operations. + * + * @example + * numberField().readValidator(z.coerce.boolean()) + * // FileMaker returns 0/1, you get true/false + */ + readValidator( + validator: StandardSchemaV1, + ): FieldBuilder { + const builder = this._clone() as any; + builder._outputValidator = validator; + return builder; + } + + /** + * Set a validator for the input (writing to database). + * The input validator transforms/validates data going TO the database in insert, update, and filter operations. + * + * @example + * numberField().writeValidator(z.boolean().transform(v => v ? 1 : 0)) + * // You pass true/false, FileMaker gets 1/0 + */ + writeValidator( + validator: StandardSchemaV1, + ): FieldBuilder { + const builder = this._clone() as any; + builder._inputValidator = validator; + return builder; + } + + /** + * Get the metadata configuration for this field. + * @internal Used by fmTableOccurrence to extract field configuration + */ + _getConfig() { + return { + fieldType: this._fieldType, + primaryKey: this._primaryKey, + notNull: this._notNull, + readOnly: this._readOnly, + entityId: this._entityId, + outputValidator: this._outputValidator, + inputValidator: this._inputValidator, + }; + } + + /** + * Clone this builder to allow immutable chaining. + * @private + */ + private _clone(): FieldBuilder { + const builder = new FieldBuilder( + this._fieldType, + ); + builder._primaryKey = this._primaryKey; + builder._notNull = this._notNull; + builder._readOnly = this._readOnly; + builder._entityId = this._entityId; + builder._outputValidator = this._outputValidator; + builder._inputValidator = this._inputValidator; + return builder; + } +} + +/** + * Create a text field (Edm.String in FileMaker OData). + * By default, text fields are nullable. + * + * @example + * textField() // string | null + * textField().notNull() // string + * textField().entityId("FMFID:1") // with entity ID + */ +export function textField(): FieldBuilder< + string | null, + string | null, + string | null, + false +> { + return new FieldBuilder( + "text", + ); +} + +/** + * Create a number field (Edm.Decimal in FileMaker OData). + * By default, number fields are nullable. + * + * @example + * numberField() // number | null + * numberField().notNull() // number + * numberField().outputValidator(z.coerce.boolean()) // transform to boolean on read + */ +export function numberField(): FieldBuilder< + number | null, + number | null, + number | null, + false +> { + return new FieldBuilder( + "number", + ); +} + +/** + * Create a date field (Edm.Date in FileMaker OData). + * By default, date fields are nullable and represented as ISO date strings (YYYY-MM-DD). + * + * @example + * dateField() // string | null (ISO date format) + * dateField().notNull() // string + */ +export function dateField(): FieldBuilder< + string | null, + string | null, + string | null, + false +> { + return new FieldBuilder( + "date", + ); +} + +/** + * Create a time field (Edm.TimeOfDay in FileMaker OData). + * By default, time fields are nullable and represented as ISO time strings (HH:mm:ss). + * + * @example + * timeField() // string | null (ISO time format) + * timeField().notNull() // string + */ +export function timeField(): FieldBuilder< + string | null, + string | null, + string | null, + false +> { + return new FieldBuilder( + "time", + ); +} + +/** + * Create a timestamp field (Edm.DateTimeOffset in FileMaker OData). + * By default, timestamp fields are nullable and represented as ISO 8601 strings. + * + * @example + * timestampField() // string | null (ISO 8601 format) + * timestampField().notNull() // string + * timestampField().readOnly() // typical for CreationTimestamp + */ +export function timestampField(): FieldBuilder< + string | null, + string | null, + string | null, + false +> { + return new FieldBuilder( + "timestamp", + ); +} + +/** + * Create a container field (Edm.Stream in FileMaker OData). + * Container fields store binary data and are represented as base64 strings in the API. + * By default, container fields are nullable. + * + * Note: Container fields cannot be selected via .select() - they can only be accessed + * via .getSingleField() due to FileMaker OData API limitations. + * + * @example + * containerField() // string | null (base64 encoded) + * containerField().notNull() // string + */ +export function containerField(): FieldBuilder< + string | null, + string | null, + ContainerDbType | null, + false +> { + return new FieldBuilder< + string | null, + string | null, + ContainerDbType | null, + false + >("container"); +} + +/** + * Create a calculated field (read-only field computed by FileMaker). + * Calculated fields are automatically marked as read-only. + * + * @example + * calcField() // string | null + * calcField().notNull() // string + */ +export function calcField(): FieldBuilder< + string | null, + string | null, + string | null, + true +> { + const builder = new FieldBuilder< + string | null, + string | null, + string | null, + false + >("calculated"); + return builder.readOnly(); +} diff --git a/packages/fmodata/src/orm/index.ts b/packages/fmodata/src/orm/index.ts new file mode 100644 index 00000000..9138b31a --- /dev/null +++ b/packages/fmodata/src/orm/index.ts @@ -0,0 +1,60 @@ +// Field builders - main API for defining table schemas +export { + FieldBuilder, + textField, + numberField, + dateField, + timeField, + timestampField, + containerField, + calcField, + type ContainerDbType, +} from "./field-builders"; + +// Column references - used in queries and filters +export { Column, isColumn } from "./column"; + +// Filter operators - eq, gt, lt, and, or, etc. +export { + FilterExpression, + eq, + ne, + gt, + gte, + lt, + lte, + contains, + startsWith, + endsWith, + inArray, + notInArray, + isNull, + isNotNull, + and, + or, + not, + // OrderBy operators + OrderByExpression, + isOrderByExpression, + asc, + desc, +} from "./operators"; + +// Table definition - fmTableOccurrence function +export { + fmTableOccurrence, + FMTable, + type FMTableWithColumns, + type InferTableSchema, + // Helper functions for accessing FMTable internals + getTableName, + getTableEntityId, + // getTableFields, + getDefaultSelect, + getBaseTableConfig, + isUsingEntityIds, + getFieldId, + getFieldName, + getTableId, + getTableColumns, +} from "./table"; diff --git a/packages/fmodata/src/orm/operators.ts b/packages/fmodata/src/orm/operators.ts new file mode 100644 index 00000000..6cf2c7ac --- /dev/null +++ b/packages/fmodata/src/orm/operators.ts @@ -0,0 +1,487 @@ +import type { Column } from "./column"; +import { isColumn } from "./column"; +import { needsFieldQuoting } from "../client/builders/select-utils"; + +/** + * FilterExpression represents a filter condition that can be used in where() clauses. + * Internal representation of operator expressions that get converted to OData filter syntax. + */ +export class FilterExpression { + constructor( + public readonly operator: string, + public readonly operands: (Column | any | FilterExpression)[], + ) {} + + /** + * Convert this expression to OData filter syntax. + * @internal Used by QueryBuilder + */ + toODataFilter(useEntityIds?: boolean): string { + switch (this.operator) { + // Comparison operators + case "eq": + return this._binaryOp("eq", useEntityIds); + case "ne": + return this._binaryOp("ne", useEntityIds); + case "gt": + return this._binaryOp("gt", useEntityIds); + case "gte": + return this._binaryOp("ge", useEntityIds); + case "lt": + return this._binaryOp("lt", useEntityIds); + case "lte": + return this._binaryOp("le", useEntityIds); + case "in": + return this._inOp(useEntityIds); + case "notIn": + return this._notInOp(useEntityIds); + + // String operators + case "contains": + return this._functionOp("contains", useEntityIds); + case "startsWith": + return this._functionOp("startswith", useEntityIds); + case "endsWith": + return this._functionOp("endswith", useEntityIds); + + // Null checks + case "isNull": + return this._isNullOp(useEntityIds); + case "isNotNull": + return this._isNotNullOp(useEntityIds); + + // Logical operators + case "and": + return this._logicalOp("and", useEntityIds); + case "or": + return this._logicalOp("or", useEntityIds); + case "not": + return this._notOp(useEntityIds); + + default: + throw new Error(`Unknown operator: ${this.operator}`); + } + } + + private _binaryOp(op: string, useEntityIds?: boolean): string { + const [left, right] = this.operands; + // For binary ops, the column is typically the first operand and value is the second + // But we also support column-to-column comparisons, so check both + const columnForValue = + isColumn(left) && !isColumn(right) + ? left + : isColumn(right) && !isColumn(left) + ? right + : undefined; + const leftStr = this._operandToString(left, useEntityIds, columnForValue); + const rightStr = this._operandToString(right, useEntityIds, columnForValue); + return `${leftStr} ${op} ${rightStr}`; + } + + private _functionOp(fnName: string, useEntityIds?: boolean): string { + const [column, value] = this.operands; + const columnInstance = isColumn(column) ? column : undefined; + const columnStr = this._operandToString(column, useEntityIds); + const valueStr = this._operandToString(value, useEntityIds, columnInstance); + return `${fnName}(${columnStr}, ${valueStr})`; + } + + private _inOp(useEntityIds?: boolean): string { + const [column, values] = this.operands; + const columnInstance = isColumn(column) ? column : undefined; + const columnStr = this._operandToString(column, useEntityIds); + const valuesStr = (values as any[]) + .map((v) => this._operandToString(v, useEntityIds, columnInstance)) + .join(", "); + return `${columnStr} in (${valuesStr})`; + } + + private _notInOp(useEntityIds?: boolean): string { + const [column, values] = this.operands; + const columnInstance = isColumn(column) ? column : undefined; + const columnStr = this._operandToString(column, useEntityIds); + const valuesStr = (values as any[]) + .map((v) => this._operandToString(v, useEntityIds, columnInstance)) + .join(", "); + return `not (${columnStr} in (${valuesStr}))`; + } + + private _isNullOp(useEntityIds?: boolean): string { + const [column] = this.operands; + const columnStr = this._operandToString(column, useEntityIds); + return `${columnStr} eq null`; + } + + private _isNotNullOp(useEntityIds?: boolean): string { + const [column] = this.operands; + const columnStr = this._operandToString(column, useEntityIds); + return `${columnStr} ne null`; + } + + private _logicalOp(op: string, useEntityIds?: boolean): string { + const expressions = this.operands.map((expr) => { + if (expr instanceof FilterExpression) { + const innerExpr = expr.toODataFilter(useEntityIds); + // Wrap in parens if it's a logical expression to ensure precedence + if (expr.operator === "and" || expr.operator === "or") { + return `(${innerExpr})`; + } + return innerExpr; + } + throw new Error("Logical operators require FilterExpression operands"); + }); + return expressions.join(` ${op} `); + } + + private _notOp(useEntityIds?: boolean): string { + const [expr] = this.operands; + if (expr instanceof FilterExpression) { + return `not (${expr.toODataFilter(useEntityIds)})`; + } + throw new Error("NOT operator requires a FilterExpression operand"); + } + + private _operandToString( + operand: any, + useEntityIds?: boolean, + column?: Column, + ): string { + if (isColumn(operand)) { + const fieldIdentifier = operand.getFieldIdentifier(useEntityIds); + // Quote field names in OData filters per FileMaker OData API requirements + return needsFieldQuoting(fieldIdentifier) + ? `"${fieldIdentifier}"` + : fieldIdentifier; + } + + // If we have a column with an input validator, apply it to transform the value + let value = operand; + if (column?.inputValidator) { + try { + const result = column.inputValidator["~standard"].validate(value); + // Handle async validators (though they shouldn't be async for filters) + if (result instanceof Promise) { + // For filters, we can't use async validators, so skip transformation + // This is a limitation - async validators won't work in filters + value = operand; + } else if ("issues" in result && result.issues) { + // Validation failed, use original value + value = operand; + } else if ("value" in result) { + // Validation succeeded, use transformed value + value = result.value; + } + } catch (error) { + // If validation throws, use the original value (will likely cause a query error) + // This maintains backward compatibility and allows the server to handle validation + value = operand; + } + } + + if (typeof value === "string") { + return `'${value.replace(/'/g, "''")}'`; // Escape single quotes + } + if (value === null || value === undefined) { + return "null"; + } + if (value instanceof Date) { + return value.toISOString(); + } + return String(value); + } +} + +// ============================================================================ +// Comparison Operators +// ============================================================================ + +/** + * Equal operator - checks if column equals a value or another column. + * + * @example + * eq(users.name, "John") // name equals "John" + * eq(users.id, contacts.id_user) // cross-table comparison + */ +export function eq( + column: Column, + value: NoInfer, +): FilterExpression; +export function eq( + column1: Column, + column2: Column, +): FilterExpression; +export function eq(column: Column, value: any): FilterExpression { + return new FilterExpression("eq", [column, value]); +} + +/** + * Not equal operator - checks if column does not equal a value or another column. + * + * @example + * ne(users.status, "inactive") // status not equal to "inactive" + * ne(users.id, contacts.id_user) // cross-table comparison + */ +export function ne( + column: Column, + value: NoInfer, +): FilterExpression; +export function ne( + column1: Column, + column2: Column, +): FilterExpression; +export function ne(column: Column, value: any): FilterExpression { + return new FilterExpression("ne", [column, value]); +} + +/** + * Greater than operator - checks if column is greater than a value. + * + * @example + * gt(users.age, 18) // age greater than 18 + */ +export function gt( + column: Column, + value: NoInfer, +): FilterExpression { + return new FilterExpression("gt", [column, value]); +} + +/** + * Greater than or equal operator - checks if column is >= a value. + * + * @example + * gte(users.age, 18) // age >= 18 + */ +export function gte( + column: Column, + value: NoInfer, +): FilterExpression { + return new FilterExpression("gte", [column, value]); +} + +/** + * Less than operator - checks if column is less than a value. + * + * @example + * lt(users.age, 65) // age less than 65 + */ +export function lt( + column: Column, + value: NoInfer, +): FilterExpression { + return new FilterExpression("lt", [column, value]); +} + +/** + * Less than or equal operator - checks if column is <= a value. + * + * @example + * lte(users.age, 65) // age <= 65 + */ +export function lte( + column: Column, + value: NoInfer, +): FilterExpression { + return new FilterExpression("lte", [column, value]); +} + +// ============================================================================ +// String Operators +// ============================================================================ + +/** + * Contains operator - checks if a string column contains a substring. + * + * @example + * contains(users.name, "John") // name contains "John" + */ +export function contains( + column: Column, + value: NoInfer, +): FilterExpression { + return new FilterExpression("contains", [column, value]); +} + +/** + * Starts with operator - checks if a string column starts with a prefix. + * + * @example + * startsWith(users.email, "admin") // email starts with "admin" + */ +export function startsWith( + column: Column, + value: NoInfer, +): FilterExpression { + return new FilterExpression("startsWith", [column, value]); +} + +/** + * Ends with operator - checks if a string column ends with a suffix. + * + * @example + * endsWith(users.email, "@example.com") // email ends with "@example.com" + */ +export function endsWith( + column: Column, + value: NoInfer, +): FilterExpression { + return new FilterExpression("endsWith", [column, value]); +} + +// ============================================================================ +// Array Operators +// ============================================================================ + +/** + * In array operator - checks if column value is in an array of values. + * + * @example + * inArray(users.status, ["active", "pending"]) // status is "active" or "pending" + */ +export function inArray( + column: Column, + values: NoInfer[], +): FilterExpression { + return new FilterExpression("in", [column, values]); +} + +/** + * Not in array operator - checks if column value is not in an array of values. + * + * @example + * notInArray(users.status, ["deleted", "banned"]) // status is neither "deleted" nor "banned" + */ +export function notInArray( + column: Column, + values: NoInfer[], +): FilterExpression { + return new FilterExpression("notIn", [column, values]); +} + +// ============================================================================ +// Null Check Operators +// ============================================================================ + +/** + * Is null operator - checks if column value is null. + * + * @example + * isNull(users.deletedAt) // deletedAt is null + */ +export function isNull( + column: Column, +): FilterExpression { + return new FilterExpression("isNull", [column]); +} + +/** + * Is not null operator - checks if column value is not null. + * + * @example + * isNotNull(users.email) // email is not null + */ +export function isNotNull( + column: Column, +): FilterExpression { + return new FilterExpression("isNotNull", [column]); +} + +// ============================================================================ +// Logical Operators +// ============================================================================ + +/** + * AND operator - combines multiple filter expressions with logical AND. + * All expressions must be true for the record to match. + * + * @example + * and( + * eq(users.active, true), + * gt(users.age, 18) + * ) // active is true AND age > 18 + */ +export function and(...expressions: FilterExpression[]): FilterExpression { + if (expressions.length === 0) { + throw new Error("AND operator requires at least one expression"); + } + if (expressions.length === 1 && expressions[0] !== undefined) { + return expressions[0]; + } + return new FilterExpression("and", expressions); +} + +/** + * OR operator - combines multiple filter expressions with logical OR. + * At least one expression must be true for the record to match. + * + * @example + * or( + * eq(users.role, "admin"), + * eq(users.role, "moderator") + * ) // role is "admin" OR "moderator" + */ +export function or(...expressions: FilterExpression[]): FilterExpression { + if (expressions.length === 0) { + throw new Error("OR operator requires at least one expression"); + } + if (expressions.length === 1 && expressions[0] !== undefined) { + return expressions[0]; + } + return new FilterExpression("or", expressions); +} + +/** + * NOT operator - negates a filter expression. + * + * @example + * not(eq(users.status, "deleted")) // status is NOT "deleted" + */ +export function not(expression: FilterExpression): FilterExpression { + return new FilterExpression("not", [expression]); +} + +// ============================================================================ +// OrderBy Operators +// ============================================================================ + +/** + * OrderByExpression represents a sort order specification for a column. + * Used in orderBy() clauses to provide type-safe sorting with direction. + */ +export class OrderByExpression { + constructor( + public readonly column: Column, + public readonly direction: "asc" | "desc", + ) {} +} + +/** + * Type guard to check if a value is an OrderByExpression instance. + */ +export function isOrderByExpression(value: any): value is OrderByExpression { + return value instanceof OrderByExpression; +} + +/** + * Ascending order operator - sorts a column in ascending order. + * + * @example + * asc(users.name) // Sort by name ascending + */ +export function asc( + column: Column, +): OrderByExpression { + return new OrderByExpression(column, "asc"); +} + +/** + * Descending order operator - sorts a column in descending order. + * + * @example + * desc(users.age) // Sort by age descending + */ +export function desc( + column: Column, +): OrderByExpression { + return new OrderByExpression(column, "desc"); +} diff --git a/packages/fmodata/src/orm/table.ts b/packages/fmodata/src/orm/table.ts new file mode 100644 index 00000000..b61d9d73 --- /dev/null +++ b/packages/fmodata/src/orm/table.ts @@ -0,0 +1,767 @@ +import type { StandardSchemaV1 } from "@standard-schema/spec"; +import { FieldBuilder, type ContainerDbType } from "./field-builders"; +import type { FieldBuilder as FieldBuilderType } from "./field-builders"; +import { Column, createColumn } from "./column"; +import { z } from "zod/v4"; + +/** + * Extract the output type from a FieldBuilder. + * This is what you get when reading from the database. + * + * This type extracts the TOutput type parameter, which is set by readValidator() + * and represents the transformed/validated output type. + */ +export type InferFieldOutput = + F extends FieldBuilder ? TOutput : never; + +/** + * Extract the input type from a FieldBuilder. + * This is what you pass when writing to the database. + * + * This type extracts the TInput type parameter, which is set by writeValidator() + * and represents the transformed/validated input type. + */ +type InferFieldInput = + F extends FieldBuilder ? TInput : never; + +/** + * Build a schema type from field builders (output/read types). + */ +type InferSchemaFromFields< + TFields extends Record>, +> = { + [K in keyof TFields]: InferFieldOutput; +}; + +/** + * Build an input schema type from field builders (input/write types). + * Used for insert and update operations. + */ +type InferInputSchemaFromFields< + TFields extends Record>, +> = { + [K in keyof TFields]: InferFieldInput; +}; + +/** + * Check if a field is a container field by inspecting its TDbType. + * Container fields have a branded TDbType that extends ContainerDbType. + */ +type IsContainerField = + F extends FieldBuilder + ? NonNullable extends ContainerDbType + ? true + : false + : false; + +/** + * Extract only selectable (non-container) field keys from a fields record. + * Container fields are excluded because they cannot be selected via $select in FileMaker OData. + */ +type SelectableFieldKeys< + TFields extends Record>, +> = { + [K in keyof TFields]: IsContainerField extends true ? never : K; +}[keyof TFields]; + +/** + * Build a schema type excluding container fields (for query return types). + * This is used to ensure container fields don't appear in the return type + * when using defaultSelect: "schema" or "all". + */ +type InferSelectableSchemaFromFields< + TFields extends Record>, +> = { + [K in SelectableFieldKeys]: InferFieldOutput; +}; + +/** + * Internal Symbols for table properties (hidden from IDE autocomplete). + * These are used to store internal configuration that shouldn't be visible + * when users access table columns. + * @internal - Not exported from public API, only accessible via FMTable.Symbol + */ +const FMTableName = Symbol.for("fmodata:FMTableName"); +const FMTableEntityId = Symbol.for("fmodata:FMTableEntityId"); +const FMTableSchema = Symbol.for("fmodata:FMTableSchema"); +const FMTableFields = Symbol.for("fmodata:FMTableFields"); +const FMTableNavigationPaths = Symbol.for("fmodata:FMTableNavigationPaths"); +const FMTableDefaultSelect = Symbol.for("fmodata:FMTableDefaultSelect"); +const FMTableBaseTableConfig = Symbol.for("fmodata:FMTableBaseTableConfig"); +const FMTableUseEntityIds = Symbol.for("fmodata:FMTableUseEntityIds"); + +/** + * Base table class with Symbol-based internal properties. + * This follows the Drizzle ORM pattern where internal configuration + * is stored via Symbols, keeping it hidden from IDE autocomplete. + */ +export class FMTable< + TFields extends Record> = any, + TName extends string = string, + TNavigationPaths extends readonly string[] = readonly string[], +> { + /** + * Internal Symbols for accessing table metadata. + * @internal - Not intended for public use. Access table properties via columns instead. + */ + static readonly Symbol = { + Name: FMTableName, + EntityId: FMTableEntityId, + UseEntityIds: FMTableUseEntityIds, + Schema: FMTableSchema, + Fields: FMTableFields, + NavigationPaths: FMTableNavigationPaths, + DefaultSelect: FMTableDefaultSelect, + BaseTableConfig: FMTableBaseTableConfig, + }; + + /** @internal */ + [FMTableName]: TName; + + /** @internal */ + [FMTableEntityId]?: `FMTID:${string}`; + + /** @internal */ + [FMTableUseEntityIds]?: boolean; + + /** @internal */ + [FMTableSchema]: StandardSchemaV1>; + + /** @internal */ + [FMTableFields]: TFields; + + /** @internal */ + [FMTableNavigationPaths]: TNavigationPaths; + + /** @internal */ + [FMTableDefaultSelect]: + | "all" + | "schema" + | Record>; + + /** @internal */ + [FMTableBaseTableConfig]: { + schema: Record; + inputSchema?: Record; + idField?: keyof TFields; + required: readonly (keyof TFields)[]; + readOnly: readonly (keyof TFields)[]; + containerFields: readonly (keyof TFields)[]; + fmfIds?: Record; + }; + + constructor(config: { + name: TName; + entityId?: `FMTID:${string}`; + useEntityIds?: boolean; + schema: StandardSchemaV1>; + fields: TFields; + navigationPaths: TNavigationPaths; + defaultSelect: "all" | "schema" | Record>; + baseTableConfig: { + schema: Record; + inputSchema?: Record; + idField?: keyof TFields; + required: readonly (keyof TFields)[]; + readOnly: readonly (keyof TFields)[]; + containerFields: readonly (keyof TFields)[]; + fmfIds?: Record; + }; + }) { + this[FMTableName] = config.name; + this[FMTableEntityId] = config.entityId; + this[FMTableUseEntityIds] = config.useEntityIds; + this[FMTableSchema] = config.schema; + this[FMTableFields] = config.fields; + this[FMTableNavigationPaths] = config.navigationPaths; + this[FMTableDefaultSelect] = config.defaultSelect; + this[FMTableBaseTableConfig] = config.baseTableConfig; + } +} + +/** + * Type helper to extract the column map from fields. + * Table name is baked into each column type for validation. + * Container fields are marked with IsContainer=true. + * Columns include both output type (for reading) and input type (for writing/filtering). + */ +export type ColumnMap< + TFields extends Record>, + TName extends string, +> = { + [K in keyof TFields]: Column< + InferFieldOutput, + InferFieldInput, + TName, + IsContainerField + >; +}; + +/** + * Extract only selectable (non-container) columns from a table. + * This is used to prevent selecting container fields in queries. + */ +export type SelectableColumnMap< + TFields extends Record>, + TName extends string, +> = { + [K in SelectableFieldKeys]: Column< + InferFieldOutput, + InferFieldInput, + TName, + false + >; +}; + +/** + * Validates that a select object doesn't contain container field columns. + * Returns never if any container fields are found, otherwise returns the original type. + */ +export type ValidateNoContainerFields< + TSelect extends Record>, +> = { + [K in keyof TSelect]: TSelect[K] extends Column + ? never + : TSelect[K]; +} extends TSelect + ? TSelect + : { + [K in keyof TSelect]: TSelect[K] extends Column + ? "❌ Container fields cannot be selected. Use .getSingleField() instead." + : TSelect[K]; + }; + +/** + * Extract the keys from a defaultSelect function's return type. + * Used to infer which fields are selected by default for type narrowing. + */ +type ExtractDefaultSelectKeys< + TDefaultSelect, + TFields extends Record>, + TName extends string, +> = TDefaultSelect extends (columns: ColumnMap) => infer R + ? keyof R + : TDefaultSelect extends "schema" + ? keyof TFields + : keyof TFields; // "all" defaults to all keys + +/** + * Complete table type with both metadata (via Symbols) and column accessors. + * This is the return type of fmTableOccurrence - users see columns directly, + * but internal config is hidden via Symbols. + */ +export type FMTableWithColumns< + TFields extends Record>, + TName extends string, + TNavigationPaths extends readonly string[] = readonly string[], +> = FMTable & ColumnMap; + +/** + * Options for fmTableOccurrence function. + * Provides autocomplete-friendly typing while preserving inference for navigationPaths. + */ +export interface FMTableOccurrenceOptions< + TFields extends Record>, + TName extends string, +> { + /** The entity ID (FMTID) for this table occurrence */ + entityId?: `FMTID:${string}`; + + /** + * Default select behavior: + * - "all": Select all fields (including related tables) + * - "schema": Select only schema-defined fields (default) + * - function: Custom selection from columns + */ + defaultSelect?: + | "all" + | "schema" + | (( + columns: ColumnMap, + ) => Record>); + + /** Navigation paths available from this table (for expand operations) */ + navigationPaths?: readonly string[]; + + /** Whether to use entity IDs (FMTID/FMFID) instead of names in queries */ + useEntityIds?: boolean; +} + +/** + * Create a table occurrence with field builders. + * This is the main API for defining tables in the new ORM style. + * + * @example + * const users = fmTableOccurrence("users", { + * id: textField().primaryKey().entityId("FMFID:1"), + * name: textField().notNull().entityId("FMFID:6"), + * active: numberField() + * .outputValidator(z.coerce.boolean()) + * .inputValidator(z.boolean().transform(v => v ? 1 : 0)) + * .entityId("FMFID:7"), + * }, { + * entityId: "FMTID:100", + * defaultSelect: "schema", + * navigationPaths: ["contacts"], + * }); + * + * // Access columns + * users.id // Column + * users.name // Column + * + * // Use in queries + * db.from(users).select("id", "name").where(eq(users.active, true)) + */ +export function fmTableOccurrence< + const TName extends string, + const TFields extends Record>, + const TNavPaths extends readonly string[] = readonly [], +>( + name: TName, + fields: TFields, + options?: FMTableOccurrenceOptions & { + /** Navigation paths available from this table (for expand operations) */ + navigationPaths?: TNavPaths; + }, +): FMTableWithColumns { + // Extract configuration from field builders + const fieldConfigs = Object.entries(fields).map(([fieldName, builder]) => ({ + fieldName, + config: (builder as any)._getConfig(), + })); + + // Find primary key field + const primaryKeyField = fieldConfigs.find((f) => f.config.primaryKey); + const idField = primaryKeyField?.fieldName; + + // Collect required fields (notNull fields) + const required = fieldConfigs + .filter((f) => f.config.notNull) + .map((f) => f.fieldName); + + // Collect read-only fields + const readOnly = fieldConfigs + .filter((f) => f.config.readOnly) + .map((f) => f.fieldName); + + // Collect container fields (cannot be selected via $select) + const containerFields = fieldConfigs + .filter((f) => f.config.fieldType === "container") + .map((f) => f.fieldName); + + // Collect entity IDs + const fmfIds: Record = {}; + for (const { fieldName, config } of fieldConfigs) { + if (config.entityId) { + fmfIds[fieldName] = config.entityId; + } + } + + // Build Zod schema from field builders (output/read validators) + const zodSchema: Record = {}; + // Build input schema from field builders (input/write validators) + const inputSchema: Record = {}; + + for (const { fieldName, config } of fieldConfigs) { + // Use outputValidator if provided, otherwise create a basic validator + if (config.outputValidator) { + zodSchema[fieldName] = config.outputValidator; + } else { + // Create a default validator based on field type and nullability + let validator: any; + switch (config.fieldType) { + case "text": + case "date": + case "time": + case "timestamp": + case "container": + case "calculated": + validator = z.string(); + break; + case "number": + validator = z.number(); + break; + default: + validator = z.unknown(); + } + + // Add nullability if not marked as notNull + if (!config.notNull) { + validator = validator.nullable(); + } + + zodSchema[fieldName] = validator; + } + + // Store inputValidator if provided (for write operations) + if (config.inputValidator) { + inputSchema[fieldName] = config.inputValidator; + } + } + + // Create a schema validator for the entire table + const tableSchema = z.object(zodSchema) as unknown as StandardSchemaV1< + any, + InferSchemaFromFields + >; + + // Build BaseTable-compatible config + const baseTableConfig = { + schema: zodSchema as Record, + inputSchema: (Object.keys(inputSchema).length > 0 + ? inputSchema + : undefined) as Record | undefined, + idField: idField as keyof TFields | undefined, + required: required as readonly (keyof TFields)[], + readOnly: readOnly as readonly (keyof TFields)[], + containerFields: containerFields as readonly (keyof TFields)[], + fmfIds: (Object.keys(fmfIds).length > 0 ? fmfIds : undefined) as + | Record + | undefined, + }; + + // Create column instances + const columns = {} as ColumnMap; + for (const [fieldName, builder] of Object.entries(fields)) { + const config = (builder as any)._getConfig(); + (columns as any)[fieldName] = new Column({ + fieldName: String(fieldName), + entityId: config.entityId, + tableName: name, + tableEntityId: options?.entityId, + inputValidator: config.inputValidator, + }); + } + + // Resolve defaultSelect: if it's a function, call it with columns; otherwise use as-is + const defaultSelectOption = options?.defaultSelect ?? "schema"; + const resolvedDefaultSelect: + | "all" + | "schema" + | Record> = + typeof defaultSelectOption === "function" + ? defaultSelectOption(columns as ColumnMap) + : defaultSelectOption; + + // Create the FMTable instance with Symbol-based internal properties + const navigationPaths = (options?.navigationPaths ?? []) as TNavPaths; + const table = new FMTable({ + name, + entityId: options?.entityId, + useEntityIds: options?.useEntityIds, + schema: tableSchema, + fields, + navigationPaths, + defaultSelect: resolvedDefaultSelect, + baseTableConfig, + }); + + // Assign columns to the table instance (making them accessible directly) + Object.assign(table, columns); + + return table as FMTableWithColumns; +} + +// /** +// * Type guard to check if a value is a TableOccurrence or FMTable. +// * Supports both Symbol-based (new) and underscore-prefixed (legacy) formats. +// */ +// function isTableOccurrence(value: any): value is TableOccurrence { +// if (!value || typeof value !== "object") { +// return false; +// } + +// // Check for Symbol-based format (new FMTable class) +// if ( +// FMTableName in value && +// FMTableSchema in value && +// FMTableFields in value +// ) { +// return typeof value[FMTableName] === "string"; +// } + +// // Check for underscore-prefixed format (legacy interface) +// if ("_name" in value && "_schema" in value && "_fields" in value) { +// return typeof value._name === "string"; +// } + +// return false; +// } + +/** + * Helper to extract the schema type from a TableOccurrence or FMTable. + */ +export type InferTableSchema = + T extends FMTable + ? InferSchemaFromFields + : never; + +/** + * Extract the schema type from an FMTable instance. + * This is used to infer the schema from table objects passed to db.from(), expand(), etc. + */ +export type InferSchemaOutputFromFMTable> = + T extends FMTable + ? InferSchemaFromFields + : never; + +/** + * Extract the input schema type from an FMTable instance. + * This is used for insert and update operations where we need write types. + */ +export type InferInputSchemaFromFMTable> = + T extends FMTable + ? InferInputSchemaFromFields + : never; + +/** + * Helper type to check if a FieldBuilder's input type excludes null and undefined. + * This checks the TInput type parameter, which preserves nullability from notNull(). + */ +type FieldInputExcludesNullish = + F extends FieldBuilder + ? null extends TInput + ? false + : undefined extends TInput + ? false + : true + : false; + +/** + * Check if a FieldBuilder is readOnly at the type level + */ +type IsFieldReadOnly = + F extends FieldBuilderType + ? ReadOnly extends true + ? true + : false + : false; + +/** + * Compute insert data type from FMTable, making notNull fields required. + * Fields are required if their FieldBuilder's TInput type excludes null/undefined. + * All other fields are optional (can be omitted). + * readOnly fields are excluded (including primaryKey/idField since they're automatically readOnly). + */ +export type InsertDataFromFMTable> = + T extends FMTable + ? { + [K in keyof TFields as IsFieldReadOnly extends true + ? never + : FieldInputExcludesNullish extends true + ? K + : never]: InferFieldInput; + } & { + [K in keyof TFields as IsFieldReadOnly extends true + ? never + : FieldInputExcludesNullish extends true + ? never + : K]?: InferFieldInput; + } + : never; + +/** + * Compute update data type from FMTable. + * All fields are optional, but readOnly fields are excluded (including primaryKey/idField). + */ +export type UpdateDataFromFMTable> = + T extends FMTable + ? { + [K in keyof TFields as IsFieldReadOnly extends true + ? never + : K]?: InferFieldInput; + } + : never; + +/** + * Extract the table name type from an FMTable. + * This is a workaround since we can't directly index Symbols in types. + */ +export type ExtractTableName> = + T extends FMTable ? Name : never; + +/** + * Validates that a target table's name matches one of the source table's navigationPaths. + * Used to ensure type-safe expand/navigate operations. + */ +export type ValidExpandTarget< + SourceTable extends FMTable | undefined, + TargetTable extends FMTable, +> = + SourceTable extends FMTable + ? ExtractTableName extends SourceNavPaths[number] + ? TargetTable + : never + : TargetTable; + +// ============================================================================ +// Helper Functions for Accessing FMTable Internal Properties +// ============================================================================ + +/** + * Get the table name from an FMTable instance. + * @param table - FMTable instance + * @returns The table name + */ +export function getTableName>(table: T): string { + return table[FMTableName]; +} + +/** + * Get the entity ID (FMTID) from an FMTable instance. + * @param table - FMTable instance + * @returns The entity ID or undefined if not using entity IDs + */ +export function getTableEntityId>( + table: T, +): string | undefined { + return table[FMTableEntityId]; +} + +/** + * Get the schema validator from an FMTable instance. + * @param table - FMTable instance + * @returns The StandardSchemaV1 validator + */ +export function getTableSchema>( + table: T, +): StandardSchemaV1 { + return table[FMTableSchema]; +} + +/** + * Get the fields from an FMTable instance. + * @param table - FMTable instance + * @returns The fields record + */ +export function getTableFields>(table: T) { + return table[FMTableFields]; +} + +/** + * Get the navigation paths from an FMTable instance. + * @param table - FMTable instance + * @returns Array of navigation path names + */ +export function getNavigationPaths>( + table: T, +): readonly string[] { + return table[FMTableNavigationPaths]; +} + +/** + * Get the default select configuration from an FMTable instance. + * @param table - FMTable instance + * @returns Default select configuration + */ +export function getDefaultSelect>(table: T) { + return table[FMTableDefaultSelect]; +} + +/** + * Get the base table configuration from an FMTable instance. + * This provides access to schema, idField, required fields, readOnly fields, and field IDs. + * @param table - FMTable instance + * @returns Base table configuration object + */ +export function getBaseTableConfig>(table: T) { + return table[FMTableBaseTableConfig]; +} + +/** + * Check if an FMTable instance is using entity IDs (both FMTID and FMFIDs). + * @param table - FMTable instance + * @returns True if using entity IDs, false otherwise + */ +export function isUsingEntityIds>( + table: T, +): boolean { + return ( + table[FMTableEntityId] !== undefined && + table[FMTableBaseTableConfig].fmfIds !== undefined + ); +} + +/** + * Get the field ID (FMFID) for a given field name, or the field name itself if not using IDs. + * @param table - FMTable instance + * @param fieldName - Field name to get the ID for + * @returns The FMFID string or the original field name + */ +export function getFieldId>( + table: T, + fieldName: string, +): string { + const config = table[FMTableBaseTableConfig]; + if (config.fmfIds && fieldName in config.fmfIds) { + const fieldId = config.fmfIds[fieldName]; + if (fieldId) { + return fieldId; + } + } + return fieldName; +} + +/** + * Get the field name for a given field ID (FMFID), or the ID itself if not found. + * @param table - FMTable instance + * @param fieldId - The FMFID to get the field name for + * @returns The field name or the original ID + */ +export function getFieldName>( + table: T, + fieldId: string, +): string { + const config = table[FMTableBaseTableConfig]; + if (config.fmfIds) { + for (const [fieldName, fmfId] of Object.entries(config.fmfIds)) { + if (fmfId === fieldId) { + return fieldName; + } + } + } + return fieldId; +} +/** + * Get the table ID (FMTID or name) from an FMTable instance. + * Returns the FMTID if available, otherwise returns the table name. + * @param table - FMTable instance + * @returns The FMTID string or the table name + */ +export function getTableId>(table: T): string { + return table[FMTableEntityId] ?? table[FMTableName]; +} + +/** + * Get all columns from a table as an object. + * Useful for selecting all fields except some using destructuring. + * + * @example + * const { password, ...cols } = getTableColumns(users) + * db.from(users).list().select(cols) + * + * @param table - FMTable instance + * @returns Object with all columns from the table + */ +export function getTableColumns>( + table: T, +): ColumnMap> { + const fields = table[FMTableFields]; + const tableName = table[FMTableName]; + const tableEntityId = table[FMTableEntityId]; + const baseConfig = table[FMTableBaseTableConfig]; + + const columns = {} as ColumnMap>; + for (const [fieldName, builder] of Object.entries(fields)) { + const config = (builder as any)._getConfig(); + (columns as any)[fieldName] = new Column({ + fieldName: String(fieldName), + entityId: baseConfig.fmfIds?.[fieldName], + tableName: tableName, + tableEntityId: tableEntityId, + inputValidator: config.inputValidator, + }); + } + + return columns; +} diff --git a/packages/fmodata/src/transform.ts b/packages/fmodata/src/transform.ts new file mode 100644 index 00000000..64e8db7e --- /dev/null +++ b/packages/fmodata/src/transform.ts @@ -0,0 +1,263 @@ +import type { FMTable } from "./orm/table"; +import { + getBaseTableConfig, + getFieldId, + getFieldName, + getTableId, + getTableName, + isUsingEntityIds, +} from "./orm/table"; +import type { StandardSchemaV1 } from "@standard-schema/spec"; + +/** + * Transforms field names to FileMaker field IDs (FMFID) in an object + * @param data - Object with field names as keys + * @param table - FMTable instance to get field IDs from + * @returns Object with FMFID keys instead of field names + */ +export function transformFieldNamesToIds>( + data: T, + table: FMTable, +): Record { + const config = getBaseTableConfig(table); + if (!config.fmfIds) { + return data; + } + + const transformed: Record = {}; + for (const [fieldName, value] of Object.entries(data)) { + const fieldId = getFieldId(table, fieldName); + transformed[fieldId] = value; + } + return transformed; +} + +/** + * Transforms FileMaker field IDs (FMFID) to field names in an object + * @param data - Object with FMFID keys + * @param table - FMTable instance to get field names from + * @returns Object with field names as keys instead of FMFIDs + */ +export function transformFieldIdsToNames>( + data: T, + table: FMTable, +): Record { + const config = getBaseTableConfig(table); + if (!config.fmfIds) { + return data; + } + + const transformed: Record = {}; + for (const [key, value] of Object.entries(data)) { + // Check if this is an OData metadata field (starts with @) + if (key.startsWith("@")) { + transformed[key] = value; + continue; + } + + const fieldName = getFieldName(table, key); + transformed[fieldName] = value; + } + return transformed; +} + +/** + * Transforms a field name to FMFID or returns the field name if not using IDs + * @param fieldName - The field name to transform + * @param table - FMTable instance to get field ID from + * @returns The FMFID or field name + */ +export function transformFieldName( + fieldName: string, + table: FMTable, +): string { + return getFieldId(table, fieldName); +} + +/** + * Transforms a table name to FMTID or returns the name if not using IDs + * @param table - FMTable instance to get table ID from + * @returns The FMTID or table name + */ +export function transformTableName(table: FMTable): string { + return getTableId(table); +} + +/** + * Gets both table name and ID from a table + * @param table - FMTable instance + * @returns Object with name (always present) and id (may be undefined if not using IDs) + */ +export function getTableIdentifiers( + table: FMTable, +): { name: string; id: string | undefined } { + return { + name: getTableName(table), + id: isUsingEntityIds(table) ? getTableId(table) : undefined, + }; +} + +/** + * Transforms response data by converting field IDs back to field names recursively. + * Handles both single records and arrays of records, as well as nested expand relationships. + * + * @param data - Response data from FileMaker (can be single record, array, or wrapped in value property) + * @param table - FMTable instance for the main table + * @param expandConfigs - Configuration for expanded relations (optional) + * @returns Transformed data with field names instead of IDs + */ +export function transformResponseFields( + data: any, + table: FMTable, + expandConfigs?: Array<{ + relation: string; + table?: FMTable; + }>, +): any { + const config = getBaseTableConfig(table); + if (!config.fmfIds) { + return data; + } + + // Handle null/undefined + if (data === null || data === undefined) { + return data; + } + + // Handle OData list response with value array + if (data.value && Array.isArray(data.value)) { + return { + ...data, + value: data.value.map((record: any) => + transformSingleRecord(record, table, expandConfigs), + ), + }; + } + + // Handle array of records + if (Array.isArray(data)) { + return data.map((record) => + transformSingleRecord(record, table, expandConfigs), + ); + } + + // Handle single record + return transformSingleRecord(data, table, expandConfigs); +} + +/** + * Transforms a single record, converting field IDs to names and handling nested expands + */ +function transformSingleRecord( + record: any, + table: FMTable, + expandConfigs?: Array<{ + relation: string; + table?: FMTable; + }>, +): any { + if (!record || typeof record !== "object") { + return record; + } + + const transformed: Record = {}; + + for (const [key, value] of Object.entries(record)) { + // Preserve OData metadata fields + if (key.startsWith("@")) { + transformed[key] = value; + continue; + } + + // Check if this is an expanded relation (by relation name) + let expandConfig = expandConfigs?.find((ec) => ec.relation === key); + + // If not found by relation name, check if this key is a FMTID + // (FileMaker returns expanded relations with FMTID keys when using entity IDs) + if (!expandConfig && key.startsWith("FMTID:")) { + expandConfig = expandConfigs?.find( + (ec) => + ec.table && isUsingEntityIds(ec.table) && getTableId(ec.table) === key, + ); + } + + if (expandConfig && expandConfig.table) { + // Transform the expanded relation data recursively + // Use the relation name (not the FMTID) as the key + const relationKey = expandConfig.relation; + + if (Array.isArray(value)) { + transformed[relationKey] = value.map((nestedRecord) => + transformSingleRecord( + nestedRecord, + expandConfig.table!, + undefined, // Don't pass nested expand configs for now + ), + ); + } else if (value && typeof value === "object") { + transformed[relationKey] = transformSingleRecord( + value, + expandConfig.table, + undefined, + ); + } else { + transformed[relationKey] = value; + } + continue; + } + + // Transform field ID to field name + const fieldName = getFieldName(table, key); + transformed[fieldName] = value; + } + + return transformed; +} + +/** + * Transforms an array of field names to FMFIDs + * @param fieldNames - Array of field names + * @param table - FMTable instance to get field IDs from + * @returns Array of FMFIDs or field names + */ +export function transformFieldNamesArray( + fieldNames: string[], + table: FMTable, +): string[] { + const config = getBaseTableConfig(table); + if (!config.fmfIds) { + return fieldNames; + } + + return fieldNames.map((fieldName) => getFieldId(table, fieldName)); +} + +/** + * Transforms a field name in an orderBy string (e.g., "name desc" -> "FMFID:1 desc") + * @param orderByString - The orderBy string (field name with optional asc/desc) + * @param table - FMTable instance to get field ID from + * @returns Transformed orderBy string with FMFID + */ +export function transformOrderByField( + orderByString: string, + table: FMTable | undefined, +): string { + if (!table) { + return orderByString; + } + const config = getBaseTableConfig(table); + if (!config || !config.fmfIds) { + return orderByString; + } + + // Parse the orderBy string to extract field name and direction + const parts = orderByString.trim().split(/\s+/); + const fieldName = parts[0]; + if (!fieldName) { + return orderByString; + } + const direction = parts[1]; // "asc" or "desc" or undefined + + const fieldId = getFieldId(table, fieldName); + return direction ? `${fieldId} ${direction}` : fieldId; +} diff --git a/packages/fmodata/src/types.ts b/packages/fmodata/src/types.ts new file mode 100644 index 00000000..4c310d0d --- /dev/null +++ b/packages/fmodata/src/types.ts @@ -0,0 +1,287 @@ +import { type FFetchOptions } from "@fetchkit/ffetch"; +import type { StandardSchemaV1 } from "@standard-schema/spec"; +import type { InternalLogger } from "./logger"; + +export type Auth = { username: string; password: string } | { apiKey: string }; + +export interface ExecutableBuilder { + execute(): Promise>; + getRequestConfig(): { method: string; url: string; body?: any }; + + /** + * Convert this builder to a native Request object for batch processing. + * @param baseUrl - The base URL for the OData service + * @param options - Optional execution options (e.g., includeODataAnnotations) + * @returns A native Request object + */ + toRequest(baseUrl: string, options?: ExecuteOptions): Request; + + /** + * Process a raw Response object into a typed Result. + * This allows builders to apply their own validation and transformation logic. + * @param response - The native Response object from the batch operation + * @param options - Optional execution options (e.g., skipValidation, includeODataAnnotations) + * @returns A typed Result with the builder's expected return type + */ + processResponse( + response: Response, + options?: ExecuteOptions, + ): Promise>; +} + +export interface ExecutionContext { + _makeRequest( + url: string, + options?: RequestInit & FFetchOptions & { useEntityIds?: boolean }, + ): Promise>; + _setUseEntityIds?(useEntityIds: boolean): void; + _getUseEntityIds?(): boolean; + _getBaseUrl?(): string; + _getLogger?(): InternalLogger; +} + +export type InferSchemaType> = { + [K in keyof Schema]: Schema[K] extends StandardSchemaV1 + ? Output + : never; +}; + +export type WithSystemFields = + T extends Record + ? T & { + ROWID: number; + ROWMODID: number; + } + : never; + +// Helper type to exclude system fields from a union of keys +export type ExcludeSystemFields = Exclude< + T, + "ROWID" | "ROWMODID" +>; + +// Helper type to omit system fields from an object type +export type OmitSystemFields = Omit; + +// OData record metadata fields (present on each record) +export type ODataRecordMetadata = { + "@id": string; + "@editLink": string; +}; + +// OData response wrapper (top-level, internal use only) +export type ODataListResponse = { + "@context": string; + value: (T & ODataRecordMetadata)[]; +}; + +export type ODataSingleResponse = T & + ODataRecordMetadata & { + "@context": string; + }; + +// OData response for single field values +export type ODataFieldResponse = { + "@context": string; + value: T; +}; + +// Result pattern for execute responses +export type Result = + | { data: T; error: undefined } + | { data: undefined; error: E }; + +// Batch operation result types +export type BatchItemResult = { + data: T | undefined; + error: import("./errors").FMODataErrorType | undefined; + status: number; // HTTP status code (0 for truncated) +}; + +export type BatchResult = { + results: { [K in keyof T]: BatchItemResult }; + successCount: number; + errorCount: number; + truncated: boolean; + firstErrorIndex: number | null; +}; + +// Make specific keys required, rest optional +export type MakeFieldsRequired = Partial & + Required>; + +// Extract keys from schema where validator doesn't allow null/undefined (auto-required fields) +export type AutoRequiredKeys> = + { + [K in keyof Schema]: Extract< + StandardSchemaV1.InferOutput, + null | undefined + > extends never + ? K + : never; + }[keyof Schema]; + +// Helper type to compute excluded fields (readOnly fields + idField) +export type ExcludedFields< + IdField extends keyof any | undefined, + ReadOnly extends readonly any[], +> = IdField extends keyof any ? IdField | ReadOnly[number] : ReadOnly[number]; + +// Helper type for InsertData computation +type ComputeInsertData< + Schema extends Record, + IdField extends keyof Schema | undefined, + Required extends readonly any[], + ReadOnly extends readonly any[], +> = [Required[number]] extends [keyof InferSchemaType] + ? Required extends readonly (keyof InferSchemaType)[] + ? MakeFieldsRequired< + Omit, ExcludedFields>, + Exclude< + AutoRequiredKeys | Required[number], + ExcludedFields + > + > + : MakeFieldsRequired< + Omit, ExcludedFields>, + Exclude, ExcludedFields> + > + : MakeFieldsRequired< + Omit, ExcludedFields>, + Exclude, ExcludedFields> + >; + +export type ExecuteOptions = { + includeODataAnnotations?: boolean; + skipValidation?: boolean; + /** + * Overrides the default behavior of the database to use entity IDs (rather than field names) in THIS REQUEST ONLY + */ + useEntityIds?: boolean; +}; + +/** + * Type for the fetchHandler callback function. + * This is a convenience type export that matches the fetchHandler signature in FFetchOptions. + * + * @example + * ```typescript + * import type { FetchHandler } from '@proofkit/fmodata'; + * + * const myFetchHandler: FetchHandler = (input, init) => { + * console.log('Custom fetch:', input); + * return fetch(input, init); + * }; + * + * await query.execute({ + * fetchHandler: myFetchHandler + * }); + * ``` + */ +export type FetchHandler = ( + input: RequestInfo | URL, + init?: RequestInit, +) => Promise; + +/** + * Combined type for execute() method options. + * + * Uses FFetchOptions from @fetchkit/ffetch to ensure proper type inference. + * FFetchOptions is re-exported in the package to ensure type availability in consuming packages. + */ +export type ExecuteMethodOptions = + RequestInit & FFetchOptions & ExecuteOptions & EO; + +/** + * Get the Accept header value based on includeODataAnnotations option + * @param includeODataAnnotations - Whether to include OData annotations + * @returns Accept header value + */ +export function getAcceptHeader(includeODataAnnotations?: boolean): string { + return includeODataAnnotations === true + ? "application/json" + : "application/json;odata.metadata=none"; +} + +export type ConditionallyWithODataAnnotations< + T, + IncludeODataAnnotations extends boolean, +> = IncludeODataAnnotations extends true + ? T & { + "@id": string; + "@editLink": string; + } + : T; + +// Helper type to extract schema from a FMTable +export type ExtractSchemaFromOccurrence = Occ extends { + baseTable: { schema: infer S }; +} + ? S extends Record + ? S + : Record + : Record; + +export type GenericFieldMetadata = { + $Nullable?: boolean; + "@Index"?: boolean; + "@Calculation"?: boolean; + "@Summary"?: boolean; + "@Global"?: boolean; + "@Org.OData.Core.V1.Permissions"?: "Org.OData.Core.V1.Permission@Read"; +}; + +export type StringFieldMetadata = GenericFieldMetadata & { + $Type: "Edm.String"; + $DefaultValue?: "USER" | "USERNAME" | "CURRENT_USER"; + $MaxLength?: number; +}; + +export type DecimalFieldMetadata = GenericFieldMetadata & { + $Type: "Edm.Decimal"; + "@AutoGenerated"?: boolean; +}; + +export type DateFieldMetadata = GenericFieldMetadata & { + $Type: "Edm.Date"; + $DefaultValue?: "CURDATE" | "CURRENT_DATE"; +}; + +export type TimeOfDayFieldMetadata = GenericFieldMetadata & { + $Type: "Edm.TimeOfDay"; + $DefaultValue?: "CURTIME" | "CURRENT_TIME"; +}; + +export type DateTimeOffsetFieldMetadata = GenericFieldMetadata & { + $Type: "Edm.Date"; + $DefaultValue?: "CURTIMESTAMP" | "CURRENT_TIMESTAMP"; + "@VersionId"?: boolean; +}; + +export type StreamFieldMetadata = { + $Type: "Edm.Stream"; + $Nullable?: boolean; + "@EnclosedPath": string; + "@ExternalOpenPath": string; + "@ExternalSecurePath"?: string; +}; + +export type FieldMetadata = + | StringFieldMetadata + | DecimalFieldMetadata + | DateFieldMetadata + | TimeOfDayFieldMetadata + | DateTimeOffsetFieldMetadata + | StreamFieldMetadata; + +export type EntityType = { + $Kind: "EntityType"; + $Key: string[]; +} & Record; + +export type EntitySet = { + $Kind: "EntitySet"; + $Type: string; +}; + +export type Metadata = Record; diff --git a/packages/fmodata/src/validation.ts b/packages/fmodata/src/validation.ts new file mode 100644 index 00000000..116ba375 --- /dev/null +++ b/packages/fmodata/src/validation.ts @@ -0,0 +1,552 @@ +import type { ODataRecordMetadata } from "./types"; +import { StandardSchemaV1 } from "@standard-schema/spec"; +import type { FMTable } from "./orm/table"; +import { + ValidationError, + ResponseStructureError, + RecordCountMismatchError, +} from "./errors"; + +/** + * Validates and transforms input data for insert/update operations. + * Applies input validators (writeValidators) to transform user input to database format. + * Fields without input validators are passed through unchanged. + * + * @param data - The input data to validate and transform + * @param inputSchema - Optional schema containing input validators for each field + * @returns Transformed data ready to send to the server + * @throws ValidationError if any field fails validation + */ +export async function validateAndTransformInput>( + data: Partial, + inputSchema?: Record, +): Promise> { + // If no input schema, return data as-is + if (!inputSchema) { + return data; + } + + const transformedData: Record = { ...data }; + + // Process each field that has an input validator + for (const [fieldName, fieldSchema] of Object.entries(inputSchema)) { + // Only process fields that are present in the input data + if (fieldName in data) { + const inputValue = data[fieldName]; + + try { + // Run the input validator to transform the value + let result = fieldSchema["~standard"].validate(inputValue); + if (result instanceof Promise) { + result = await result; + } + + // Check for validation errors + if (result.issues) { + throw new ValidationError( + `Input validation failed for field '${fieldName}'`, + result.issues, + { + field: fieldName, + value: inputValue, + cause: result.issues, + }, + ); + } + + // Store the transformed value + transformedData[fieldName] = result.value; + } catch (error) { + // If it's already a ValidationError, re-throw it + if (error instanceof ValidationError) { + throw error; + } + + // Otherwise, wrap the error + throw new ValidationError( + `Input validation failed for field '${fieldName}'`, + [], + { + field: fieldName, + value: inputValue, + cause: error, + }, + ); + } + } + } + + // Fields without input validators are already in transformedData (passed through) + return transformedData as Partial; +} + +// Type for expand validation configuration +export type ExpandValidationConfig = { + relation: string; + targetSchema?: Record; + targetTable?: FMTable; + table?: FMTable; // For transformation + selectedFields?: string[]; + nestedExpands?: ExpandValidationConfig[]; +}; + +/** + * Validates a single record against a schema, only validating selected fields. + * Also validates expanded relations if expandConfigs are provided. + */ +export async function validateRecord>( + record: any, + schema: Record | undefined, + selectedFields?: (keyof T)[], + expandConfigs?: ExpandValidationConfig[], +): Promise< + | { valid: true; data: T & ODataRecordMetadata } + | { valid: false; error: ValidationError } +> { + // Extract OData metadata fields (don't validate them - include if present) + const { "@id": id, "@editLink": editLink, ...rest } = record; + + // Only include metadata fields if they actually exist and have values + const metadata: Partial = {}; + if (id) metadata["@id"] = id; + if (editLink) metadata["@editLink"] = editLink; + + // If no schema, just return the data with metadata + if (!schema) { + return { + valid: true, + data: { ...rest, ...metadata } as T & ODataRecordMetadata, + }; + } + + // Filter out FileMaker system fields that shouldn't be in responses by default + const { ROWID, ROWMODID, ...restWithoutSystemFields } = rest; + + // If selected fields are specified, validate only those fields + if (selectedFields && selectedFields.length > 0) { + const validatedRecord: Record = {}; + + for (const field of selectedFields) { + const fieldName = String(field); + const fieldSchema = schema[fieldName]; + + if (fieldSchema) { + const input = rest[fieldName]; + try { + let result = fieldSchema["~standard"].validate(input); + if (result instanceof Promise) result = await result; + + // if the `issues` field exists, the validation failed + if (result.issues) { + return { + valid: false, + error: new ValidationError( + `Validation failed for field '${fieldName}'`, + result.issues, + { + field: fieldName, + value: input, + cause: result.issues, + }, + ), + }; + } + + validatedRecord[fieldName] = result.value; + } catch (originalError) { + // If the validator throws directly, wrap it + return { + valid: false, + error: new ValidationError( + `Validation failed for field '${fieldName}'`, + [], + { + field: fieldName, + value: input, + cause: originalError, + }, + ), + }; + } + } else { + // For fields not in schema (like when explicitly selecting ROWID/ROWMODID) + // include them from the original response + validatedRecord[fieldName] = rest[fieldName]; + } + } + + // Validate expanded relations + if (expandConfigs && expandConfigs.length > 0) { + for (const expandConfig of expandConfigs) { + const expandValue = rest[expandConfig.relation]; + + // Check if expand field is missing + if (expandValue === undefined) { + // Check for inline error array (FileMaker returns errors inline when expand fails) + if (Array.isArray(rest.error) && rest.error.length > 0) { + // Extract error message from inline error + const errorDetail = rest.error[0]?.error; + if (errorDetail?.message) { + const errorMessage = errorDetail.message; + // Check if the error is related to this expand by checking if: + // 1. The error mentions the relation name, OR + // 2. The error mentions any of the selected fields + const isRelatedToExpand = + errorMessage + .toLowerCase() + .includes(expandConfig.relation.toLowerCase()) || + (expandConfig.selectedFields && + expandConfig.selectedFields.some((field) => + errorMessage.toLowerCase().includes(field.toLowerCase()), + )); + + if (isRelatedToExpand) { + return { + valid: false, + error: new ValidationError( + `Validation failed for expanded relation '${expandConfig.relation}': ${errorMessage}`, + [], + { + field: expandConfig.relation, + }, + ), + }; + } + } + } + // If no inline error but expand was expected, that's also an issue + // However, this might be a legitimate case (e.g., no related records) + // So we'll only fail if there's an explicit error array + } else { + // Original validation logic for when expand exists + if (Array.isArray(expandValue)) { + // Validate each item in the expanded array + const validatedExpandedItems: any[] = []; + for (let i = 0; i < expandValue.length; i++) { + const item = expandValue[i]; + const itemValidation = await validateRecord( + item, + expandConfig.targetSchema, + expandConfig.selectedFields as string[] | undefined, + expandConfig.nestedExpands, + ); + if (!itemValidation.valid) { + return { + valid: false, + error: new ValidationError( + `Validation failed for expanded relation '${expandConfig.relation}' at index ${i}: ${itemValidation.error.message}`, + itemValidation.error.issues, + { + field: expandConfig.relation, + cause: itemValidation.error.cause, + }, + ), + }; + } + validatedExpandedItems.push(itemValidation.data); + } + validatedRecord[expandConfig.relation] = validatedExpandedItems; + } else { + // Single expanded item (shouldn't happen in OData, but handle it) + const itemValidation = await validateRecord( + expandValue, + expandConfig.targetSchema, + expandConfig.selectedFields as string[] | undefined, + expandConfig.nestedExpands, + ); + if (!itemValidation.valid) { + return { + valid: false, + error: new ValidationError( + `Validation failed for expanded relation '${expandConfig.relation}': ${itemValidation.error.message}`, + itemValidation.error.issues, + { + field: expandConfig.relation, + cause: itemValidation.error.cause, + }, + ), + }; + } + validatedRecord[expandConfig.relation] = itemValidation.data; + } + } + } + } + + // Merge validated data with metadata + return { + valid: true, + data: { ...validatedRecord, ...metadata } as T & ODataRecordMetadata, + }; + } + + // Validate all fields in schema, but exclude ROWID/ROWMODID by default + const validatedRecord: Record = { ...restWithoutSystemFields }; + + for (const [fieldName, fieldSchema] of Object.entries(schema)) { + const input = rest[fieldName]; + try { + let result = fieldSchema["~standard"].validate(input); + if (result instanceof Promise) result = await result; + + // if the `issues` field exists, the validation failed + if (result.issues) { + return { + valid: false, + error: new ValidationError( + `Validation failed for field '${fieldName}'`, + result.issues, + { + field: fieldName, + value: input, + cause: result.issues, + }, + ), + }; + } + + validatedRecord[fieldName] = result.value; + } catch (originalError) { + // If the validator throws an error directly, catch and wrap it + // This preserves the original error instance for instanceof checks + return { + valid: false, + error: new ValidationError( + `Validation failed for field '${fieldName}'`, + [], + { + field: fieldName, + value: input, + cause: originalError, + }, + ), + }; + } + } + + // Validate expanded relations even when not using selected fields + if (expandConfigs && expandConfigs.length > 0) { + for (const expandConfig of expandConfigs) { + const expandValue = rest[expandConfig.relation]; + + // Check if expand field is missing + if (expandValue === undefined) { + // Check for inline error array (FileMaker returns errors inline when expand fails) + if (Array.isArray(rest.error) && rest.error.length > 0) { + // Extract error message from inline error + const errorDetail = rest.error[0]?.error; + if (errorDetail?.message) { + const errorMessage = errorDetail.message; + // Check if the error is related to this expand by checking if: + // 1. The error mentions the relation name, OR + // 2. The error mentions any of the selected fields + const isRelatedToExpand = + errorMessage + .toLowerCase() + .includes(expandConfig.relation.toLowerCase()) || + (expandConfig.selectedFields && + expandConfig.selectedFields.some((field) => + errorMessage.toLowerCase().includes(field.toLowerCase()), + )); + + if (isRelatedToExpand) { + return { + valid: false, + error: new ValidationError( + `Validation failed for expanded relation '${expandConfig.relation}': ${errorMessage}`, + [], + { + field: expandConfig.relation, + }, + ), + }; + } + } + } + // If no inline error but expand was expected, that's also an issue + // However, this might be a legitimate case (e.g., no related records) + // So we'll only fail if there's an explicit error array + } else { + // Original validation logic for when expand exists + if (Array.isArray(expandValue)) { + // Validate each item in the expanded array + const validatedExpandedItems: any[] = []; + for (let i = 0; i < expandValue.length; i++) { + const item = expandValue[i]; + const itemValidation = await validateRecord( + item, + expandConfig.targetSchema, + expandConfig.selectedFields as string[] | undefined, + expandConfig.nestedExpands, + ); + if (!itemValidation.valid) { + return { + valid: false, + error: new ValidationError( + `Validation failed for expanded relation '${expandConfig.relation}' at index ${i}: ${itemValidation.error.message}`, + itemValidation.error.issues, + { + field: expandConfig.relation, + cause: itemValidation.error.cause, + }, + ), + }; + } + validatedExpandedItems.push(itemValidation.data); + } + validatedRecord[expandConfig.relation] = validatedExpandedItems; + } else { + // Single expanded item (shouldn't happen in OData, but handle it) + const itemValidation = await validateRecord( + expandValue, + expandConfig.targetSchema, + expandConfig.selectedFields as string[] | undefined, + expandConfig.nestedExpands, + ); + if (!itemValidation.valid) { + return { + valid: false, + error: new ValidationError( + `Validation failed for expanded relation '${expandConfig.relation}': ${itemValidation.error.message}`, + itemValidation.error.issues, + { + field: expandConfig.relation, + cause: itemValidation.error.cause, + }, + ), + }; + } + validatedRecord[expandConfig.relation] = itemValidation.data; + } + } + } + } + + return { + valid: true, + data: { ...validatedRecord, ...metadata } as T & ODataRecordMetadata, + }; +} + +/** + * Validates a list response against a schema. + */ +export async function validateListResponse>( + response: any, + schema: Record | undefined, + selectedFields?: (keyof T)[], + expandConfigs?: ExpandValidationConfig[], +): Promise< + | { valid: true; data: (T & ODataRecordMetadata)[] } + | { valid: false; error: ResponseStructureError | ValidationError } +> { + // Check if response has the expected structure + if (!response || typeof response !== "object") { + return { + valid: false, + error: new ResponseStructureError("an object", response), + }; + } + + // Extract @context (for internal validation, but we won't return it) + const { "@context": context, value, ...rest } = response; + + if (!Array.isArray(value)) { + return { + valid: false, + error: new ResponseStructureError( + "'value' property to be an array", + value, + ), + }; + } + + // Validate each record in the array + const validatedRecords: (T & ODataRecordMetadata)[] = []; + + for (let i = 0; i < value.length; i++) { + const record = value[i]; + const validation = await validateRecord( + record, + schema, + selectedFields, + expandConfigs, + ); + + if (!validation.valid) { + return { + valid: false, + error: validation.error, + }; + } + + validatedRecords.push(validation.data); + } + + return { + valid: true, + data: validatedRecords, + }; +} + +/** + * Validates a single record response against a schema. + */ +export async function validateSingleResponse>( + response: any, + schema: Record | undefined, + selectedFields?: (keyof T)[], + expandConfigs?: ExpandValidationConfig[], + mode: "exact" | "maybe" = "maybe", +): Promise< + | { valid: true; data: (T & ODataRecordMetadata) | null } + | { valid: false; error: RecordCountMismatchError | ValidationError } +> { + // Check for multiple records (error in both modes) + if ( + response.value && + Array.isArray(response.value) && + response.value.length > 1 + ) { + return { + valid: false, + error: new RecordCountMismatchError( + mode === "exact" ? "one" : "at-most-one", + response.value.length, + ), + }; + } + + // Handle empty responses + if (!response || (response.value && response.value.length === 0)) { + if (mode === "exact") { + return { + valid: false, + error: new RecordCountMismatchError("one", 0), + }; + } + // mode === "maybe" - return null for empty + return { + valid: true, + data: null, + }; + } + + // Single record validation + const record = response.value?.[0] ?? response; + const validation = await validateRecord( + record, + schema, + selectedFields, + expandConfigs, + ); + + if (!validation.valid) { + return validation as { valid: false; error: ValidationError }; + } + + return { + valid: true, + data: validation.data, + }; +} diff --git a/packages/fmodata/tests/batch-error-messages.test.ts b/packages/fmodata/tests/batch-error-messages.test.ts new file mode 100644 index 00000000..32734e7e --- /dev/null +++ b/packages/fmodata/tests/batch-error-messages.test.ts @@ -0,0 +1,194 @@ +/** + * Batch Error Messages Test + * + * This test demonstrates that batch operations now properly parse and return + * FileMaker error responses instead of vague validation errors. + * + * BEFORE: "Invalid response structure: expected 'value' property to be an array" + * AFTER: "OData error: Table 'Purchase_Orders' not defined in database" with code "-1020" + */ + +import { describe, it, expect } from "vitest"; +import { z } from "zod/v4"; +import { + fmTableOccurrence, + textField, + isODataError, + isResponseStructureError, +} from "@proofkit/fmodata"; +import { createMockClient } from "./utils/test-setup"; + +/** + * Creates a mock fetch handler that returns a multipart batch response + */ +function createBatchMockFetch(batchResponseBody: string): typeof fetch { + return async ( + input: RequestInfo | URL, + init?: RequestInit, + ): Promise => { + // Extract boundary from the batch response body (first line starts with --) + const firstLine = + batchResponseBody.split("\r\n")[0] || + batchResponseBody.split("\n")[0] || + ""; + const boundary = firstLine.startsWith("--") + ? firstLine.substring(2) + : "batch_test"; + + return new Response(batchResponseBody, { + status: 200, + statusText: "OK", + headers: { + "Content-Type": `multipart/mixed; boundary=${boundary}`, + }, + }); + }; +} + +describe("Batch Error Messages - Improved Error Parsing", () => { + const client = createMockClient(); + + // Define simple schemas for batch testing + const addressesTO = fmTableOccurrence("addresses", { + id: textField().primaryKey(), + street: textField(), + }); + + const db = client.database("test_db"); + + it("should return ODataError with helpful message instead of vague ResponseStructureError", async () => { + // This simulates the exact scenario from the user's error: + // A batch with multiple queries where one uses a bad table name + const mockBatchResponse = [ + "--batch_boundary", + "Content-Type: application/http", + "", + "HTTP/1.1 200 Ok", + "Content-Type: application/json;charset=utf-8", + "", + JSON.stringify({ + "@odata.context": "test/$metadata#addresses", + value: [ + { + "@odata.id": "addresses('addr-1')", + id: "addr-1", + street: "123 Main St", + }, + ], + }), + "--batch_boundary", + "Content-Type: application/http", + "", + "HTTP/1.1 404 Not Found", + "Content-Type: application/json;charset=utf-8", + "", + JSON.stringify({ + error: { + code: "-1020", + message: "Table 'Purchase_Orders' not defined in database", + }, + }), + "--batch_boundary", + "Content-Type: application/http", + "", + "HTTP/1.1 200 Ok", + "Content-Type: application/json;charset=utf-8", + "", + JSON.stringify({ + "@odata.context": "test/$metadata#addresses", + value: [], + }), + "--batch_boundary--", + ].join("\r\n"); + + // Create three queries (simulating user's punchlistQuery, purchaseOrdersQuery, ticketsQuery) + const query1 = db.from(addressesTO).list(); + const query2 = db.from(addressesTO).list(); // Will fail with 404 in mock + const query3 = db.from(addressesTO).list(); + + // Execute batch with mock + const result = await db.batch([query1, query2, query3]).execute({ + fetchHandler: createBatchMockFetch(mockBatchResponse), + }); + + // Verify we got results + expect(result.results).toBeDefined(); + expect(result.results.length).toBe(3); + + const [r1, r2, r3] = result.results; + + // First query succeeded + expect(r1.error).toBeUndefined(); + expect(r1.data).toBeDefined(); + + // Second query failed with a HELPFUL error message + expect(r2.error).toBeDefined(); + expect(r2.data).toBeUndefined(); + + // ✅ BEFORE: This would be ResponseStructureError with vague message + // ✅ AFTER: This is now ODataError with the actual FileMaker error + expect(isResponseStructureError(r2.error)).toBe(false); // NOT a validation error + expect(isODataError(r2.error)).toBe(true); // IS an OData error + + if (isODataError(r2.error)) { + // The error now contains the actual FileMaker error details + expect(r2.error.code).toBe("-1020"); + expect(r2.error.message).toContain("Table 'Purchase_Orders' not defined"); + expect(r2.error.kind).toBe("ODataError"); + + // The error message is now helpful instead of: + // "Invalid response structure: expected 'value' property to be an array" + console.log("\n✅ Fixed Error Message:"); + console.log(` Code: ${r2.error.code}`); + console.log(` Message: ${r2.error.message}`); + console.log(` Kind: ${r2.error.kind}\n`); + } + + // Third query succeeded (not truncated in this mock) + expect(r3.error).toBeUndefined(); + expect(r3.data).toBeDefined(); + }); + + it("should handle error when table doesn't exist - the original use case", async () => { + // This is the exact scenario from the user's error message: + // They're querying a table that doesn't exist (Purchase_Orders with underscore instead of space) + const mockBatchResponse = [ + "--batch_boundary", + "Content-Type: application/http", + "", + "HTTP/1.1 404 Not Found", + "Content-Type: application/json;charset=utf-8", + "", + JSON.stringify({ + error: { + code: "-1020", + message: "Table 'Purchase_Orders' not defined in database", + }, + }), + "--batch_boundary--", + ].join("\r\n"); + + const badQuery = db.from(addressesTO).list(); + + const result = await db.batch([badQuery]).execute({ + fetchHandler: createBatchMockFetch(mockBatchResponse), + }); + + const [r1] = result.results; + + // Error should be an ODataError, not ResponseStructureError + expect(r1.error).toBeDefined(); + expect(isODataError(r1.error)).toBe(true); + + if (isODataError(r1.error)) { + // Verify we get the actual FileMaker error code and message + expect(r1.error.code).toBe("-1020"); + expect(r1.error.message).toBe( + "OData error: Table 'Purchase_Orders' not defined in database", + ); + + // This is much more helpful than: + // "Invalid response structure: expected 'value' property to be an array" + } + }); +}); diff --git a/packages/fmodata/tests/batch.test.ts b/packages/fmodata/tests/batch.test.ts new file mode 100644 index 00000000..486dcf75 --- /dev/null +++ b/packages/fmodata/tests/batch.test.ts @@ -0,0 +1,335 @@ +/** + * Batch Operations Tests + * + * Tests for batch operation parsing and error handling using mocked responses. + * These tests don't require a live server connection. + */ + +import { describe, it, expect } from "vitest"; +import { z } from "zod/v4"; +import { + fmTableOccurrence, + textField, + BatchTruncatedError, + isBatchTruncatedError, + isODataError, + ODataError, + eq, + isNotNull, +} from "@proofkit/fmodata"; +import { createMockClient } from "./utils/test-setup"; + +/** + * Creates a mock fetch handler that returns a multipart batch response + */ +function createBatchMockFetch(batchResponseBody: string): typeof fetch { + return async ( + input: RequestInfo | URL, + init?: RequestInit, + ): Promise => { + // Extract boundary from the batch response body (first line starts with --) + const firstLine = + batchResponseBody.split("\r\n")[0] || + batchResponseBody.split("\n")[0] || + ""; + const boundary = firstLine.startsWith("--") + ? firstLine.substring(2) + : "batch_test"; + + return new Response(batchResponseBody, { + status: 200, + statusText: "OK", + headers: { + "content-type": `multipart/mixed; boundary=${boundary}`, + }, + }); + }; +} + +describe("Batch Operations - Mock Tests", () => { + const client = createMockClient(); + + // Define simple schemas for batch testing + const contactsTO = fmTableOccurrence("contacts", { + PrimaryKey: textField().primaryKey(), + name: textField(), + hobby: textField(), + }); + + const usersTO = fmTableOccurrence("users", { + id: textField().primaryKey(), + name: textField(), + }); + + const db = client.database("test_db"); + + describe("Mixed success/failure responses", () => { + it("should handle batch response where first succeeds, second fails (404), and third is truncated", async () => { + // This mock response simulates a real FileMaker batch response where: + // 1. First query succeeds with data + // 2. Second query fails with 404 - table not found + // 3. Third query is never executed (truncated) because FileMaker stops on error + const mockBatchResponse = [ + "--b_test_boundary", + "Content-Type: application/http", + "", + "HTTP/1.1 200 Ok", + "Content-Type: application/json;charset=utf-8", + "Content-Length: 200", + "", + JSON.stringify({ + "@odata.context": "test/$metadata#contacts", + value: [ + { + "@odata.id": "contacts('id-1')", + PrimaryKey: "id-1", + name: "First Success Record", + hobby: "Testing", + }, + ], + }), + "--b_test_boundary", + "Content-Type: application/http", + "", + "HTTP/1.1 404 Not Found", + "Content-Type: application/json;charset=utf-8", + "Content-Length: 89", + "", + JSON.stringify({ + error: { + code: "-1020", + message: "Table 'Purchase_Orders' not defined in database", + }, + }), + "--b_test_boundary--", + ].join("\r\n"); + + // Create three queries + const query1 = db + .from(contactsTO) + .list() + .where(eq(contactsTO.hobby, "Testing")); + const query2 = db + .from(usersTO) + .list() + .where(eq(usersTO.name, "NonExistent")); + const query3 = db + .from(contactsTO) + .list() + .where(isNotNull(contactsTO.name)); + + // Execute batch with mock + const result = await db.batch([query1, query2, query3]).execute({ + fetchHandler: createBatchMockFetch(mockBatchResponse), + }); + + // Verify we got a BatchResult + expect(result).toBeDefined(); + expect(result.results).toBeDefined(); + expect(result.results.length).toBe(3); + + const [r1, r2, r3] = result.results; + + // First result should be successful + expect(r1.error).toBeUndefined(); + expect(r1.data).toBeDefined(); + expect(Array.isArray(r1.data)).toBe(true); + expect((r1.data as any[]).length).toBeGreaterThan(0); + expect(r1.status).toBe(200); + + // Second result should have an error (404) + expect(r2.error).toBeDefined(); + expect(r2.data).toBeUndefined(); + expect(r2.status).toBe(404); + + // Verify the error is an ODataError with proper details + expect(isODataError(r2.error)).toBe(true); + if (isODataError(r2.error)) { + expect(r2.error.code).toBe("-1020"); + expect(r2.error.message).toContain( + "Table 'Purchase_Orders' not defined", + ); + expect(r2.error.kind).toBe("ODataError"); + } + + // Third result should be truncated (never executed due to error in second) + expect(r3.error).toBeDefined(); + expect(r3.data).toBeUndefined(); + expect(r3.status).toBe(0); + if (r3.error && isBatchTruncatedError(r3.error)) { + expect(r3.error.operationIndex).toBe(2); + expect(r3.error.failedAtIndex).toBe(1); + } + expect(isBatchTruncatedError(r3.error)).toBe(true); + + // Verify summary statistics + expect(result.successCount).toBe(1); + expect(result.errorCount).toBe(2); + expect(result.truncated).toBe(true); + expect(result.firstErrorIndex).toBe(1); + }); + + it("should handle batch response where all three queries succeed", async () => { + const mockBatchResponse = [ + "--b_success_boundary", + "Content-Type: application/http", + "", + "HTTP/1.1 200 Ok", + "Content-Type: application/json;charset=utf-8", + "Content-Length: 200", + "", + JSON.stringify({ + "@odata.context": "test/$metadata#contacts", + value: [ + { PrimaryKey: "id-1", name: "Contact 1", hobby: "Reading" }, + { PrimaryKey: "id-2", name: "Contact 2", hobby: "Writing" }, + ], + }), + "--b_success_boundary", + "Content-Type: application/http", + "", + "HTTP/1.1 200 Ok", + "Content-Type: application/json;charset=utf-8", + "Content-Length: 150", + "", + JSON.stringify({ + "@odata.context": "test/$metadata#users", + value: [{ id: "user-1", name: "User 1" }], + }), + "--b_success_boundary", + "Content-Type: application/http", + "", + "HTTP/1.1 200 Ok", + "Content-Type: application/json;charset=utf-8", + "Content-Length: 180", + "", + JSON.stringify({ + "@odata.context": "test/$metadata#contacts", + value: [{ PrimaryKey: "id-3", name: "Contact 3", hobby: "Gaming" }], + }), + "--b_success_boundary--", + ].join("\r\n"); + + const query1 = db + .from(contactsTO) + .list() + .where(eq(contactsTO.hobby, "Reading")); + const query2 = db.from(usersTO).list().top(1); + const query3 = db + .from(contactsTO) + .list() + .where(eq(contactsTO.hobby, "Gaming")); + + const result = await db.batch([query1, query2, query3]).execute({ + fetchHandler: createBatchMockFetch(mockBatchResponse), + }); + + expect(result.results).toBeDefined(); + expect(result.results.length).toBe(3); + + const [r1, r2, r3] = result.results; + + // First query: contacts with hobby=Reading + expect(r1.error).toBeUndefined(); + expect(Array.isArray(r1.data)).toBe(true); + expect((r1.data as any[]).length).toBe(2); + expect(r1.status).toBe(200); + + // Second query: users + expect(r2.error).toBeUndefined(); + expect(Array.isArray(r2.data)).toBe(true); + expect((r2.data as any[]).length).toBe(1); + expect(r2.status).toBe(200); + + // Third query: contacts with hobby=Gaming + expect(r3.error).toBeUndefined(); + expect(Array.isArray(r3.data)).toBe(true); + expect((r3.data as any[]).length).toBe(1); + expect(r3.status).toBe(200); + + // Verify summary statistics + expect(result.successCount).toBe(3); + expect(result.errorCount).toBe(0); + expect(result.truncated).toBe(false); + expect(result.firstErrorIndex).toBeNull(); + }); + + it("should handle batch response where middle query fails with empty result set", async () => { + // This simulates when a filter returns no results (not an error, just empty) + const mockBatchResponse = [ + "--b_empty_boundary", + "Content-Type: application/http", + "", + "HTTP/1.1 200 Ok", + "Content-Type: application/json;charset=utf-8", + "Content-Length: 100", + "", + JSON.stringify({ + "@odata.context": "test/$metadata#contacts", + value: [{ PrimaryKey: "id-1", name: "Found Record", hobby: null }], + }), + "--b_empty_boundary", + "Content-Type: application/http", + "", + "HTTP/1.1 200 Ok", + "Content-Type: application/json;charset=utf-8", + "Content-Length: 50", + "", + JSON.stringify({ + "@odata.context": "test/$metadata#users", + value: [], // Empty result set + }), + "--b_empty_boundary", + "Content-Type: application/http", + "", + "HTTP/1.1 200 Ok", + "Content-Type: application/json;charset=utf-8", + "Content-Length: 100", + "", + JSON.stringify({ + "@odata.context": "test/$metadata#contacts", + value: [{ PrimaryKey: "id-2", name: "Another Record", hobby: null }], + }), + "--b_empty_boundary--", + ].join("\r\n"); + + const query1 = db.from(contactsTO).list().top(1); + const query2 = db + .from(usersTO) + .list() + .where(eq(usersTO.name, "NonExistent")); + const query3 = db.from(contactsTO).list().top(1); + + const result = await db.batch([query1, query2, query3]).execute({ + fetchHandler: createBatchMockFetch(mockBatchResponse), + }); + + expect(result.results).toBeDefined(); + expect(result.results.length).toBe(3); + + const [r1, r2, r3] = result.results; + + expect(r1.error).toBeUndefined(); + expect(Array.isArray(r1.data)).toBe(true); + expect((r1.data as any[]).length).toBe(1); + expect(r1.status).toBe(200); + + // Empty result set should still be a valid empty array + expect(r2.error).toBeUndefined(); + expect(Array.isArray(r2.data)).toBe(true); + expect((r2.data as any[]).length).toBe(0); + expect(r2.status).toBe(200); + + expect(r3.error).toBeUndefined(); + expect(Array.isArray(r3.data)).toBe(true); + expect((r3.data as any[]).length).toBe(1); + expect(r3.status).toBe(200); + + // Verify summary statistics + expect(result.successCount).toBe(3); + expect(result.errorCount).toBe(0); + expect(result.truncated).toBe(false); + expect(result.firstErrorIndex).toBeNull(); + }); + }); +}); diff --git a/packages/fmodata/tests/delete.test.ts b/packages/fmodata/tests/delete.test.ts new file mode 100644 index 00000000..6c236bc1 --- /dev/null +++ b/packages/fmodata/tests/delete.test.ts @@ -0,0 +1,243 @@ +/** + * Delete Tests + * + * Tests for the delete() method on EntitySet instances. + * This validates type safety, builder pattern, and operation modes. + */ + +import { describe, it, expect, expectTypeOf, vi } from "vitest"; +import { z } from "zod/v4"; +import { + fmTableOccurrence, + textField, + numberField, + type InferTableSchema, + eq, + and, + lt, +} from "@proofkit/fmodata"; +import { DeleteBuilder } from "@proofkit/fmodata/client/delete-builder"; +import { ExecutableDeleteBuilder } from "@proofkit/fmodata/client/delete-builder"; +import { simpleMock } from "./utils/mock-fetch"; +import { createMockClient } from "./utils/test-setup"; + +describe("delete method", () => { + const client = createMockClient(); + + const usersTO = fmTableOccurrence("users", { + id: textField().primaryKey(), + username: textField().notNull(), + email: textField().notNull(), + active: numberField().readValidator(z.coerce.boolean()).notNull(), + lastLogin: textField(), + }); + + type UserSchema = InferTableSchema; + + describe("builder pattern", () => { + it("should return DeleteBuilder when delete() is called", () => { + const db = client.database("test_db"); + + const result = db.from(usersTO).delete(); + expect(result).toBeInstanceOf(DeleteBuilder); + }); + + it("should not have execute() on initial DeleteBuilder", () => { + const db = client.database("test_db"); + + const deleteBuilder = db.from(usersTO).delete(); + + // Type check: execute should not exist on DeleteBuilder + expectTypeOf(deleteBuilder).not.toHaveProperty("execute"); + }); + + it("should return ExecutableDeleteBuilder after byId()", () => { + const db = client.database("test_db"); + + const result = db.from(usersTO).delete().byId("user-123"); + expect(result).toBeInstanceOf(ExecutableDeleteBuilder); + }); + + it("should return ExecutableDeleteBuilder after where()", () => { + const db = client.database("test_db"); + + const result = db + .from(usersTO) + .delete() + .where((q) => q.where(eq(usersTO.active, 0))); + expect(result).toBeInstanceOf(ExecutableDeleteBuilder); + }); + + it("should have execute() on ExecutableDeleteBuilder", () => { + const db = client.database("test_db"); + + const executableBuilder = db.from(usersTO).delete().byId("user-123"); + + // Type check: execute should exist + expectTypeOf(executableBuilder).toHaveProperty("execute"); + }); + }); + + describe("delete by ID", () => { + it("should generate correct URL for delete by ID", () => { + const db = client.database("test_db"); + + const deleteBuilder = db.from(usersTO).delete().byId("user-123"); + const config = deleteBuilder.getRequestConfig(); + + expect(config.method).toBe("DELETE"); + expect(config.url).toBe("/test_db/users('user-123')"); + }); + + it("should return deletedCount result type", async () => { + const db = client.database("test_db"); + + db.from(usersTO).delete().byId("user-123"); + }); + + it("should execute delete by ID and return count", async () => { + // Mock the fetch to return a count + const mockFetch = simpleMock({ + status: 204, + headers: { "fmodata.affected_rows": "1" }, + body: null, + }); + + const db = client.database("test_db"); + + const result = await db + .from(usersTO) + .delete() + .byId("user-123") + .execute({ fetchHandler: mockFetch }); + + expect(result.error).toBeUndefined(); + expect(result.data).toEqual({ deletedCount: 1 }); + }); + }); + + describe("delete by filter", () => { + it("should generate correct URL for delete by filter", () => { + const db = client.database("test_db"); + + const deleteBuilder = db + .from(usersTO) + .delete() + .where((q) => q.where(eq(usersTO.active, 0))); + + const config = deleteBuilder.getRequestConfig(); + + expect(config.method).toBe("DELETE"); + expect(config.url).toContain("/test_db/users"); + expect(config.url).toContain("$filter"); + expect(config.url).toContain("active"); + }); + + it("should support complex filters with QueryBuilder", () => { + const db = client.database("test_db"); + + const deleteBuilder = db + .from(usersTO) + .delete() + .where((q) => + q.where( + and(eq(usersTO.active, 0), lt(usersTO.lastLogin, "2023-01-01")), + ), + ); + + const config = deleteBuilder.getRequestConfig(); + + expect(config.method).toBe("DELETE"); + expect(config.url).toContain("$filter"); + }); + + it("should support QueryBuilder chaining in where callback", () => { + const db = client.database("test_db"); + + const deleteBuilder = db + .from(usersTO) + .delete() + .where((q) => q.where(eq(usersTO.active, 0)).top(10)); + + const config = deleteBuilder.getRequestConfig(); + + expect(config.method).toBe("DELETE"); + expect(config.url).toContain("$filter"); + expect(config.url).toContain("$top"); + }); + + it("should return deletedCount result type for filter-based delete", async () => { + const db = client.database("test_db"); + db.from(usersTO); + + db.from(usersTO) + .delete() + .where((q) => q.where(eq(usersTO.active, 0))); + }); + + it("should execute delete by filter and return count", async () => { + // Mock the fetch to return a count + const mockFetch = simpleMock({ + status: 204, + headers: { "fmodata.affected_rows": "5" }, + body: null, + }); + + const db = client.database("test_db"); + + const result = await db + .from(usersTO) + .delete() + .where((q) => q.where(eq(usersTO.active, 0))) + .execute({ fetchHandler: mockFetch }); + + expect(result.error).toBeUndefined(); + expect(result.data).toEqual({ deletedCount: 5 }); + }); + }); + + describe("type safety", () => { + it("should enforce type-safe filter properties", () => { + const db = client.database("test_db"); + + // This should work - valid property + db.from(usersTO) + .delete() + .where((q) => q.where(eq(usersTO.active, 0))); + }); + + it("should provide type-safe QueryBuilder in where callback", () => { + const db = client.database("test_db"); + + db.from(usersTO) + .delete() + .where((q) => { + // Type check: q should have where, orderBy, top, skip methods + expectTypeOf(q).toHaveProperty("where"); + expectTypeOf(q).toHaveProperty("orderBy"); + expectTypeOf(q).toHaveProperty("top"); + expectTypeOf(q).toHaveProperty("skip"); + + return q.where(eq(usersTO.active, 0)); + }); + }); + }); + + describe("error handling", () => { + it("should return error on failed delete", async () => { + const mockFetch = vi.fn().mockRejectedValue(new Error("Network error")); + + const db = client.database("test_db"); + + const result = await db + .from(usersTO) + .delete() + .byId("user-123") + .execute({ fetchHandler: mockFetch as any }); + + expect(result.data).toBeUndefined(); + expect(result.error).toBeInstanceOf(Error); + expect(result.error?.message).toBe("Network error"); + }); + }); +}); diff --git a/packages/fmodata/tests/e2e.test.ts b/packages/fmodata/tests/e2e.test.ts new file mode 100644 index 00000000..5431c46e --- /dev/null +++ b/packages/fmodata/tests/e2e.test.ts @@ -0,0 +1,922 @@ +/** + * End-to-End Tests + * + * Comprehensive E2E tests against a live FileMaker OData server. + * Tests basic operations, entity IDs, and batch operations. + */ + +import { describe, it, afterEach, expect, assert, expectTypeOf } from "vitest"; +import { + FMServerConnection, + fmTableOccurrence, + Metadata, + textField, + contains, + eq, + isNotNull, +} from "@proofkit/fmodata"; +import { jsonCodec } from "./utils/helpers"; +import { z } from "zod/v4"; +import { mockResponses } from "./fixtures/responses"; +import { createMockFetch, simpleMock } from "./utils/mock-fetch"; +import { + serverUrl, + username, + password, + apiKey, + database, + contacts, + users, + contactsTOWithIds, +} from "./e2e/setup"; + +if (!serverUrl) { + throw new Error("FMODATA_SERVER_URL environment variable is required"); +} + +if (!database) { + throw new Error("FMODATA_DATABASE environment variable is required"); +} + +// Track records created during tests for cleanup +const createdRecordIds: string[] = []; +const createdMarkers: string[] = []; + +afterEach(async () => { + if (!apiKey) return; // Skip cleanup if not running basic operations tests + + const connection = new FMServerConnection({ + serverUrl: serverUrl!, + auth: { apiKey }, + }); + const db = connection.database(database!); + + const entitySet = db.from(contacts); + + // Delete records by ID + for (const recordId of createdRecordIds) { + try { + await entitySet.delete().byId(recordId).execute(); + } catch (error) { + // Ignore errors - record may have already been deleted + console.warn(`Failed to delete record ${recordId}:`, error); + } + } + createdRecordIds.length = 0; + + // Delete records by marker/name pattern + for (const marker of createdMarkers) { + try { + await entitySet + .delete() + .where((q) => q.where(contains(contacts.name, marker))) + .execute(); + } catch (error) { + // Ignore errors - records may have already been deleted + console.warn(`Failed to delete records with marker ${marker}:`, error); + } + } + createdMarkers.length = 0; +}); + +describe("Basic E2E Operations", () => { + if (!apiKey) { + it.skip("API key required for basic operations tests", () => {}); + return; + } + + const connection = new FMServerConnection({ + serverUrl: serverUrl!, + auth: { apiKey }, + }); + const db = connection.database(database!); + + it("should connect to the server and list records", async () => { + const entitySet = db.from(contacts); + + // Test basic list query (limit to 10 records to avoid timeout) + const result = await entitySet.list().top(10).execute(); + if (!result.data) { + console.log(result.error); + throw new Error("Expected data to be defined"); + } + + assert(result.data, "Expected data to be defined"); + + // Verify we got a response + expect(Array.isArray(result.data)).toBe(true); + }); + + it("should run a script and get back result", async () => { + const { resultCode, result } = await db.runScript("return-input", { + scriptParam: "hello world", + }); + + expect(resultCode).toBe(0); + expect(result).toBe("hello world"); + + const randomNumber = Math.floor(10000 + Math.random() * 90000); + const { resultCode: resultCode2, result: result2 } = await db.runScript( + "return-input", + { + scriptParam: randomNumber, + }, + ); + expect(resultCode2).toBe(0); + expect(result2).toBe(randomNumber.toString()); + }); + + it("should transform the script result if a schema is provided", async () => { + const { resultCode, result } = await db.runScript("return-input", { + scriptParam: { hello: "world" }, + resultSchema: jsonCodec( + z + .object({ hello: z.string() }) + .transform((data) => ({ ...data, world: "world" })), + ), + }); + expect(resultCode).toBe(0); + expect(result).toStrictEqual({ hello: "world", world: "world" }); + }); + + it("should insert a record and verify count increased", async () => { + const entitySet = db.from(contacts); + + // Get initial count + const initialCountResult = await entitySet.list().count().execute(); + assert(initialCountResult.data, "Expected data to be defined"); + const initialCount = initialCountResult.data; + + // Insert a new record with unique name to avoid conflicts + const uniqueName = `Test User ${Date.now()}`; + const insertResult = await entitySet + .insert({ + name: uniqueName, + }) + .execute(); + + assert(insertResult.data, "Expected data to be defined"); + + const insertedRecord = insertResult.data; + + // Track record ID for cleanup (use PrimaryKey from the schema) + const recordId = insertedRecord.PrimaryKey; + if (recordId) { + createdRecordIds.push(recordId); + } + + // Verify the record was inserted with correct data + expect(insertedRecord.name).toBe(uniqueName); + + // Get count after insert + const newCountResult = await entitySet.list().count().execute(); + assert(newCountResult.data, "Expected data to be defined"); + const newCount = newCountResult.data; + + // Verify count increased by 1 + expect(newCount).toBe(initialCount + 1); + }); + + it("should update a record by ID and return count", async () => { + const entitySet = db.from(contacts); + + // First, insert a record to update + const uniqueName = `Update Test ${Date.now()}`; + const insertResult = await entitySet + .insert({ + name: uniqueName, + }) + .execute(); + + assert(insertResult.data, "Expected insert data to be defined"); + const primaryKey = insertResult.data.PrimaryKey; + assert(primaryKey, "Expected PrimaryKey to be defined"); + + // Track record ID for cleanup + createdRecordIds.push(primaryKey); + + // Update the record + const updatedName = `${uniqueName} Updated`; + const updateResult = await entitySet + .update({ name: updatedName }) + .byId(primaryKey) + .execute(); + + assert(updateResult.data, "Expected update data to be defined"); + expect(updateResult.error).toBeUndefined(); + expect(updateResult.data.updatedCount).toBe(1); + }); + + it("should update multiple records by filter and return count", async () => { + const entitySet = db.from(contacts); + + // Insert multiple records with a unique marker + const marker = `Bulk Update ${Date.now()}`; + await entitySet.insert({ name: `${marker} - 1` }).execute(); + await entitySet.insert({ name: `${marker} - 2` }).execute(); + await entitySet.insert({ name: `${marker} - 3` }).execute(); + + // Track marker for cleanup + createdMarkers.push(marker); + + // Update all records with the marker + const updateResult = await entitySet + .update({ hobby: "Updated Hobby" }) + .where((q) => q.where(contains(contacts.name, marker))) + .execute(); + + assert(updateResult.data, "Expected update data to be defined"); + expect(updateResult.error).toBeUndefined(); + expect(updateResult.data.updatedCount).toBeGreaterThanOrEqual(3); + }); + + it("should delete a record by ID and return count", async () => { + const entitySet = db.from(contacts); + + // First, insert a record to delete + const uniqueName = `Delete Test ${Date.now()}`; + const insertResult = await entitySet + .insert({ + name: uniqueName, + }) + .execute(); + + assert(insertResult.data, "Expected insert data to be defined"); + const recordId = insertResult.data.PrimaryKey; + assert(recordId, "Expected PrimaryKey to be defined"); + + // Get count before delete + const beforeCount = await entitySet.list().count().execute(); + assert(beforeCount.data, "Expected count data to be defined"); + + // Delete the record + const deleteQuery = entitySet.delete().byId(recordId); + const deleteResult = await deleteQuery.execute(); + + assert(deleteResult.data, "Expected delete data to be defined"); + expect(deleteResult.error).toBeUndefined(); + expect(deleteResult.data.deletedCount).toBe(1); + + // Verify count decreased + const afterCount = await entitySet.list().count().execute(); + assert(afterCount.data, "Expected count data to be defined"); + expect(afterCount.data).toBe(beforeCount.data - 1); + }); + + it("should delete multiple records by filter and return count", async () => { + const entitySet = db.from(contacts); + + // Insert multiple records with a unique marker + const marker = `Bulk Delete ${Date.now()}`; + await entitySet.insert({ name: `${marker} - 1` }).execute(); + await entitySet.insert({ name: `${marker} - 2` }).execute(); + await entitySet.insert({ name: `${marker} - 3` }).execute(); + + // Get count before delete + const beforeCount = await entitySet.list().count().execute(); + assert(beforeCount.data, "Expected count data to be defined"); + + // Delete all records with the marker + const deleteResult = await entitySet + .delete() + .where((q) => q.where(contains(contacts.name, marker))) + .execute(); + + assert(deleteResult.data, "Expected delete data to be defined"); + expect(deleteResult.error).toBeUndefined(); + expect(deleteResult.data.deletedCount).toBeGreaterThanOrEqual(3); + + // Verify count decreased + const afterCount = await entitySet.list().count().execute(); + assert(afterCount.data, "Expected count data to be defined"); + expect(afterCount.data).toBeLessThanOrEqual( + beforeCount.data - deleteResult.data.deletedCount, + ); + }); + + it("should properly type and validate expanded properties", async () => { + const entitySet = db.from(contacts); + + // Test expand with type safety + const result = await entitySet + .list() + .expand(users, (b: any) => b.select({ name: users.name })) + .execute(); + + // Verify we got a response + expect(result.error).toBeUndefined(); + expect(result.data).toBeDefined(); + if (!result.data) throw new Error("Expected result.data to be defined"); + expect(Array.isArray(result.data)).toBe(true); + + const firstRecord = result.data[0]; + assert(firstRecord, "Should have a first record"); + + expect(firstRecord.users).toBeDefined(); + expect(firstRecord.users.length).toBeGreaterThan(0); + }); + + it("the server should validate all fields in the expand are valid", async () => { + const notRealUsers = fmTableOccurrence("users", { + not_real_field: textField(), + }); + const result = await db + .from(contacts) + .list() + .expand(users, (b: any) => { + return b.select({ notReal: notRealUsers.not_real_field }); + }) + .execute({ + fetchHandler: createMockFetch( + mockResponses["list with invalid expand"], + ), + }); + + expect(result.error).toBeDefined(); + expect(result.error?.message).toContain("not_real_field"); + }); + + describe("Metadata", () => { + it("should retrieve database metadata in JSON format by default", async () => { + const metadata = await db.getMetadata(); + + // Type checks: default is JSON (Metadata type) + expectTypeOf(metadata).not.toBeString(); + expectTypeOf(metadata).not.toBeUnknown(); + expectTypeOf(metadata).not.toBeAny(); + expectTypeOf(metadata).toEqualTypeOf(); + + // Runtime checks + expect(metadata).toBeDefined(); + expect(typeof metadata).toBe("object"); + }); + + it("should retrieve database metadata in JSON format when explicitly specified", async () => { + const metadata = await db.getMetadata({ format: "json" }); + + // Type checks: explicit JSON (Metadata type) + expectTypeOf(metadata).not.toBeString(); + expectTypeOf(metadata).not.toBeUnknown(); + expectTypeOf(metadata).not.toBeAny(); + expectTypeOf(metadata).toEqualTypeOf(); + + // Runtime checks + expect(metadata).toBeDefined(); + expect(typeof metadata).toBe("object"); + + expect(metadata).toHaveProperty("contacts"); + }); + + it("should retrieve database metadata in XML format", async () => { + const metadata = await db.getMetadata({ format: "xml" }); + + // Type checks: XML format returns string + expectTypeOf(metadata).not.toBeUnknown(); + expectTypeOf(metadata).not.toBeAny(); + expectTypeOf(metadata).toEqualTypeOf(); + + // Runtime checks + expect(metadata).toBeDefined(); + expect(typeof metadata).toBe("string"); + expect(metadata).toContain(" { + if (!username || !password) { + it.skip("Username and password required for entity IDs tests", () => {}); + return; + } + + const connection = new FMServerConnection({ + serverUrl: serverUrl!, + auth: { username, password }, + }); + + const db = connection.database(database!, { useEntityIds: true }); + + const dbWithoutIds = connection.database(database!, { + useEntityIds: false, + }); + + it("should not use entity IDs in the queryString if useEntityIds is false", async () => { + const query = dbWithoutIds + .from(contactsTOWithIds) + .list() + .select({ + name_renamed: contactsTOWithIds.name_renamed, + hobby: contactsTOWithIds.hobby, + }) + .expand(users) + .where(eq(contactsTOWithIds.hobby, "Testing")) + .top(1); + const queryString = query.getQueryString(); + console.log(queryString); + expect(queryString).not.toContain("FMFID"); + expect(queryString).not.toContain("FMTID"); + }); + + it("should replace field names in select statements with entity IDs", async () => { + const query = db + .from(contactsTOWithIds) + .list() + .select({ + name_renamed: contactsTOWithIds.name_renamed, + hobby: contactsTOWithIds.hobby, + }) + .top(1); + + const queryString = query.getQueryString(); + expect(queryString).toContain("25770868870"); + expect(queryString).toContain("30065836166"); + expect(queryString).not.toContain("name_renamed"); + expect(queryString).not.toContain("hobby"); + }); + + it("should list records with entity IDs", async () => { + let rawResponseData: any; + + let capturedPreferHeader: string | null = null; + db.from(contactsTOWithIds) + .list() + .top(1) + .execute({ + hooks: { + before: async (req: any) => { + const headers = req.headers; + capturedPreferHeader = headers.get("Prefer"); + return; + }, + }, + fetchHandler: simpleMock({ status: 200, body: { value: [{}] } }), + }); + expect(capturedPreferHeader).toBe("fmodata.entity-ids"); + + const result = await db + .from(contactsTOWithIds) + .list() + .top(1) + .execute({ + hooks: { + after: async (req: any, res: any) => { + // Clone the response so we can read it without consuming the original + const clonedRes = res.clone(); + rawResponseData = await clonedRes.json(); + }, + }, + }); + + expect(result.error).toBeUndefined(); + expect(result.data).toBeDefined(); + if (!result.data) throw new Error("Expected result.data to be defined"); + expect(Array.isArray(result.data)).toBe(true); + + const firstRecord = result.data[0]; + assert(firstRecord, "Should have a first record"); + + // Verify the raw response contains field IDs (FMFID:xxx format) + expect(rawResponseData).toBeDefined(); + expect(rawResponseData.value).toBeDefined(); + expect(Array.isArray(rawResponseData.value)).toBe(true); + + // Check that the raw response uses field IDs (not field names) + const rawFirstRecord = rawResponseData.value[0]; + const rawFieldKeys = Object.keys(rawFirstRecord); + + // Assert that raw response has FMFIDs and NOT field names + expect(rawFieldKeys).toContain("FMFID:25770868870"); // should be "name" + expect(rawFieldKeys).not.toContain("name"); + expect(rawFieldKeys).toContain("FMFID:30065836166"); // should be "hobby" + expect(rawFieldKeys).not.toContain("hobby"); + expect(rawFieldKeys).toContain("FMFID:38655770758"); // should be "id_user" + expect(rawFieldKeys).not.toContain("id_user"); + expect(rawFieldKeys).toContain("FMFID:4296032390"); // should be "PrimaryKey" + expect(rawFieldKeys).not.toContain("PrimaryKey"); + expect(rawFieldKeys).toContain("FMFID:8590999686"); // should be "CreationTimestamp" + expect(rawFieldKeys).not.toContain("CreationTimestamp"); + + // Verify that the transformed data uses field names (not IDs) + const transformedFieldKeys = Object.keys(firstRecord); + expect(transformedFieldKeys).toContain("name_renamed"); + expect(transformedFieldKeys).toContain("hobby"); + expect(transformedFieldKeys).toContain("id_user"); + expect(transformedFieldKeys).toContain("PrimaryKey"); + expect(transformedFieldKeys).toContain("CreationTimestamp"); + expect(transformedFieldKeys).not.toContain("FMFID:25770868870"); + }); + + it("should not transform if the feature is disabled (even if ids are provided)", async () => { + let rawResponseData: any; + + const query = dbWithoutIds + .from(contacts) + .list() + .select({ hobby: contacts.hobby }) + .top(1); + + // should not use ids when useEntityIds is false + expect(query.getQueryString()).toContain("contacts"); + expect(query.getQueryString()).not.toContain("FMFID:"); + expect(query.getQueryString()).not.toContain("FMTID:"); + + const result = await query.execute({ + hooks: { + after: async (req: any, res: any) => { + // Clone the response so we can read it without consuming the original + const clonedRes = res.clone(); + rawResponseData = await clonedRes.json(); + }, + }, + }); + + if (result.error) { + console.error(result.error); + } + + expect(result.error).toBeUndefined(); + expect(result.data).toBeDefined(); + if (!result.data) throw new Error("Expected result.data to be defined"); + expect(Array.isArray(result.data)).toBe(true); + + const firstRecord = result.data[0]; + assert(firstRecord, "Should have a first record"); + + // Verify the raw response contains field IDs (FMFID:xxx format) + expect(rawResponseData).toBeDefined(); + expect(rawResponseData.value).toBeDefined(); + expect(Array.isArray(rawResponseData.value)).toBe(true); + + // Check that the raw response uses field IDs (not field names) + const rawFirstRecord = rawResponseData.value[0]; + const rawFieldKeys = Object.keys(rawFirstRecord); + + // Assert that raw response has field names and NOT FMFIDs (since useEntityIds is false) + expect(rawFieldKeys).not.toContain("FMFID:"); // should NOT have FMFIDs + expect(rawFieldKeys).toContain("hobby"); + + // Verify that the transformed data uses field names (not IDs) + const transformedFieldKeys = Object.keys(firstRecord); + expect(transformedFieldKeys).toContain("hobby"); + expect(transformedFieldKeys).not.toContain("FMFID:"); + }); + + it("should properly type and validate expanded properties with entity IDs", async () => { + // get the first record + const result = await db + .from(contactsTOWithIds) + .list() + .top(1) + .select({ PrimaryKey: contactsTOWithIds.PrimaryKey }) + .execute(); + + const firstRecord = result.data?.[0]; + assert(firstRecord, "Should have a first record"); + if (!firstRecord.PrimaryKey) { + throw new Error("Expected PrimaryKey to be defined"); + } + + // now expand the users property + const expandedResult = await db + .from(contactsTOWithIds) + .get(firstRecord.PrimaryKey) + .expand(users); + + // should use the table id in the query string + expect(expandedResult.getQueryString()).not.toContain("/contacts("); + }); +}); + +describe("Batch Operations", () => { + if (!username || !password) { + it.skip("Username and password required for batch operations tests", () => {}); + return; + } + + const connection = new FMServerConnection({ + serverUrl: serverUrl!, + auth: { username, password }, + }); + + const db = connection.database(database!); + + const batchCreatedRecordIds: string[] = []; + + afterEach(async () => { + const entitySet = db.from(contacts); + + // Delete records by ID + for (const recordId of batchCreatedRecordIds) { + try { + await entitySet.delete().byId(recordId).execute(); + } catch (error) { + // Ignore errors - record may have already been deleted + console.warn(`Failed to delete record ${recordId}:`, error); + } + } + batchCreatedRecordIds.length = 0; + }); + + it("should execute simple batch with two GET queries", async () => { + // Create two different query builders + const query1 = db.from(contacts).list().top(2); + const query2 = db.from(users).list().top(2); + + // Execute batch + const result = await db.batch([query1, query2]).execute(); + + // Verify we got results + expect(result.results).toBeDefined(); + expect(result.results.length).toBe(2); + + // Verify first result (contacts) + const [r1, r2] = result.results; + expect(r1.error).toBeUndefined(); + expect(r2.error).toBeUndefined(); + + const contactsResult = r1.data; + const usersResult = r2.data; + + if (!contactsResult) { + throw new Error("Expected contactsResult to be defined"); + } + + // Contacts should be an array + expect(Array.isArray(contactsResult)).toBe(true); + const firstContact = contactsResult[0]!; + expect(firstContact).toBeDefined(); + expect(firstContact).not.toHaveProperty("@odata.id"); + expect(firstContact).not.toHaveProperty("@odata.editLink"); + expect(firstContact.hobby).toBe("Board games"); + }); + + it("should allow adding to a batch after it has been created", async () => { + const batch = db.batch([]); + batch.addRequest(db.from(contacts).list().top(2)); + const result = await batch.execute(); + + expect(result.results).toBeDefined(); + expect(result.results.length).toBe(1); + const r1 = (result.results as unknown as any[])[0]; + if (!r1) { + throw new Error("Expected result at index 0"); + } + expect(r1.error).toBeUndefined(); + expect(r1.data).toBeDefined(); + }); + + it("should execute batch with mixed operations (GET + POST)", async () => { + // Create a GET query and a POST insert + const listQuery = db.from(contacts).list().top(2); + const insertQuery = db.from(contacts).insert({ + name: "Batch Test User", + hobby: "Testing", + }); + + // Execute batch with mixed operations + const result = await db.batch([listQuery, insertQuery]).execute(); + + // Verify we got results + expect(result.results).toBeDefined(); + expect(result.results.length).toBe(2); + + const [r1, r2] = result.results; + expect(r1.error).toBeUndefined(); + expect(r2.error).toBeUndefined(); + + const listResult = r1.data; + const insertResult = r2.data; + + if (!listResult) { + throw new Error("Expected listResult to be defined"); + } + + // Verify list result is an array + expect(Array.isArray(listResult)).toBe(true); + expect(listResult.length).toBeGreaterThan(0); + + // Verify insert result + expect(insertResult).toBeDefined(); + expect(typeof insertResult).toBe("object"); + }); + + it("should execute batch with multiple POST operations in a changeset", async () => { + // Create multiple insert operations + const insert1 = db.from(contacts).insert({ + name: "Batch User 1", + hobby: "Reading", + }); + + const insert2 = db.from(contacts).insert({ + name: "Batch User 2", + hobby: "Writing", + }); + + const insert3 = db.from(contacts).insert({ + name: "Batch User 3", + hobby: "Gaming", + }); + + // Execute batch with multiple POST operations + const result = await db.batch([insert1, insert2, insert3]).execute(); + + // Verify we got results + expect(result.results).toBeDefined(); + expect(result.results.length).toBe(3); + + const [r1, r2, r3] = result.results; + expect(r1.error).toBeUndefined(); + expect(r2.error).toBeUndefined(); + expect(r3.error).toBeUndefined(); + + // All inserts should return empty objects (204 No Content in batch) + expect(r1.data).toBeDefined(); + expect(typeof r1.data).toBe("object"); + expect(r2.data).toBeDefined(); + expect(typeof r2.data).toBe("object"); + expect(r3.data).toBeDefined(); + expect(typeof r3.data).toBe("object"); + }); + + it("should execute complex batch with multiple operation types", async () => { + // First, create a record we can update/delete + const setupInsert = await db + .from(contacts) + .insert({ + name: "Test Record for Batch", + hobby: "Testing", + }) + .execute(); + + expect(setupInsert.error).toBeUndefined(); + const testRecordId = setupInsert.data?.PrimaryKey; + if (!testRecordId) { + throw new Error("Failed to create test record"); + } + batchCreatedRecordIds.push(testRecordId); + + // Create a complex batch with multiple operation types + const listQuery = db.from(contacts).list().top(1); + const insertOp = db.from(contacts).insert({ + name: "Complex Batch Insert", + hobby: "Batch Testing", + }); + const updateOp = db + .from(contacts) + .update({ + name: "Updated via Batch", + }) + .byId(testRecordId); + const deleteOp = db.from(contacts).delete().byId(testRecordId); + + // Execute the complex batch + const result = await db + .batch([listQuery, insertOp, updateOp, deleteOp]) + .execute(); + + // Verify we got results + expect(result.results).toBeDefined(); + expect(result.results.length).toBe(4); + + const [r1, r2, r3, r4] = result.results; + expect(r1.error).toBeUndefined(); + expect(r2.error).toBeUndefined(); + expect(r3.error).toBeUndefined(); + expect(r4.error).toBeUndefined(); + + const listResult = r1.data; + const insertResult = r2.data; + const updateResult = r3.data; + const deleteResult = r4.data; + + if (!listResult) { + throw new Error("Expected listResult to be defined"); + } + + // Verify list result + expect(Array.isArray(listResult)).toBe(true); + expect(listResult.length).toBe(1); + + // Verify insert result (204 No Content in batch) + expect(insertResult).toBeDefined(); + expect(typeof insertResult).toBe("object"); + + // Verify update result + expect(updateResult).toBeDefined(); + expect(typeof updateResult).toBe("object"); + expect((updateResult as any).updatedCount).toBeDefined(); + + // Verify delete result + expect(deleteResult).toBeDefined(); + expect(typeof deleteResult).toBe("object"); + expect((deleteResult as any).deletedCount).toBeDefined(); + }); + + it("should correctly infer tuple types for batch results", async () => { + // Create a batch with different operation types + const query1 = db.from(contacts).list().top(1); + const query2 = db.from(users).list().top(1); + const insert = db.from(contacts).insert({ + name: "Type Test User", + hobby: "Testing Types", + }); + + const result = await db.batch([query1, query2, insert]).execute(); + + expect(result.results).toBeDefined(); + expect(result.results.length).toBe(3); + + const [r1, r2, r3] = result.results; + expect(r1.error).toBeUndefined(); + expect(r2.error).toBeUndefined(); + expect(r3.error).toBeUndefined(); + + if (!r1.data || !r2.data || !r3.data) { + throw new Error("Expected all results to have data"); + } + + expectTypeOf(result.results).not.toBeAny(); + + const contactsData = r1.data; + const usersData = r2.data; + const insertedContact = r3.data; + expectTypeOf(contactsData).not.toBeAny(); + expectTypeOf(usersData).not.toBeAny(); + expectTypeOf(insertedContact).not.toBeAny(); + + // Verify types are correctly inferred + expect(Array.isArray(contactsData)).toBe(true); + expect(Array.isArray(usersData)).toBe(true); + expect(typeof insertedContact).toBe("object"); + + const firstContact = contactsData[0]!; + expect(firstContact).toBeDefined(); + + const hobby: string | null = firstContact.hobby; + expect(typeof hobby).toBe("string"); + + const firstUser = usersData[0]!; + expect(firstUser).toBeDefined(); + + expectTypeOf(firstUser.name).not.toBeAny(); + + // Clean up + if (insertedContact.PrimaryKey) { + batchCreatedRecordIds.push(insertedContact.PrimaryKey); + } + }); + + it("should execute batch with 3 GET operations each with a filter", async () => { + // Create three GET queries with different filters + const query1 = db + .from(contacts) + .list() + .where(eq(contacts.hobby, "static-value")); + const query2 = db + .from(contacts) + .list() + .where(eq(contacts.id_user, "never")); + const query3 = db.from(users).list().where(isNotNull(users.name)); + + let flag = 1; + // Execute batch + const result = await db.batch([query1, query2, query3]).execute({ + hooks: { + after: () => { + flag = 2; + }, + }, + }); + + // ensure the hook was called + expect(flag).toBe(2); + + // Verify we got results + expect(result.results).toBeDefined(); + expect(result.results.length).toBe(3); + + const [r1, r2, r3] = result.results; + expect(r1.error).toBeUndefined(); + expect(r2.error).toBeUndefined(); + expect(r3.error).toBeUndefined(); + + const result1 = r1.data; + const result2 = r2.data; + const result3 = r3.data; + + // Verify first result (contacts filtered by hobby) + expect(Array.isArray(result1)).toBe(true); + if (result1 && result1.length > 0) { + const firstContact = result1[0]!; + expect(firstContact).toBeDefined(); + expect(firstContact.hobby).toBe("static-value"); + } + + // Verify second result (contacts filtered by name not null) + expect(Array.isArray(result2)).toBe(true); + + // Verify third result (users filtered by name not null) + expect(Array.isArray(result3)).toBe(true); + }); +}); diff --git a/packages/fmodata/tests/e2e/setup.ts b/packages/fmodata/tests/e2e/setup.ts new file mode 100644 index 00000000..51e17c62 --- /dev/null +++ b/packages/fmodata/tests/e2e/setup.ts @@ -0,0 +1,125 @@ +/** + * Shared setup for E2E tests + * + * Provides schemas, table occurrences, and connection setup + * used across all E2E test files. + */ + +import path from "path"; +import { config } from "dotenv"; +import { + fmTableOccurrence, + textField, + timestampField, +} from "@proofkit/fmodata"; +import { z } from "zod/v4"; + +config({ path: path.resolve(__dirname, "../../.env.local") }); + +// Load environment variables +export const serverUrl = process.env.FMODATA_SERVER_URL; +export const apiKey = process.env.FMODATA_API_KEY; +export const username = process.env.FMODATA_USERNAME; +export const password = process.env.FMODATA_PASSWORD; +export const database = process.env.FMODATA_DATABASE; + +// Define TOs with navigationPaths +export const contacts = fmTableOccurrence( + "contacts", + { + PrimaryKey: textField().primaryKey(), + CreationTimestamp: timestampField(), + CreatedBy: textField(), + ModificationTimestamp: timestampField(), + ModifiedBy: textField(), + name: textField(), + hobby: textField(), + id_user: textField(), + }, + { + navigationPaths: ["users"], + }, +); + +export const users = fmTableOccurrence( + "users", + { + id: textField().primaryKey(), + CreationTimestamp: timestampField(), + CreatedBy: textField(), + ModificationTimestamp: timestampField(), + ModifiedBy: textField(), + name: textField(), + id_customer: textField(), + }, + { + navigationPaths: ["contacts"], + }, +); + +// Define TOs with entity IDs and navigationPaths +export const contactsTOWithIds = fmTableOccurrence( + "contacts", + { + PrimaryKey: textField().primaryKey().entityId("FMFID:4296032390"), + CreationTimestamp: timestampField().readOnly().entityId("FMFID:8590999686"), + CreatedBy: textField().readOnly().entityId("FMFID:12885966982"), + ModificationTimestamp: timestampField() + .readOnly() + .entityId("FMFID:17180934278"), + ModifiedBy: textField().readOnly().entityId("FMFID:21475901574"), + name_renamed: textField().entityId("FMFID:25770868870"), // in FM: "name" + hobby: textField().entityId("FMFID:30065836166"), + id_user: textField().entityId("FMFID:38655770758"), + }, + { + entityId: "FMTID:1065094", + navigationPaths: ["users"], + }, +); + +export const usersTOWithIds = fmTableOccurrence( + "users", + { + id: textField().primaryKey().entityId("FMFID:4296032389"), + CreationTimestamp: timestampField().readOnly().entityId("FMFID:8590999685"), + CreatedBy: textField().readOnly().entityId("FMFID:12885966981"), + ModificationTimestamp: timestampField() + .readOnly() + .entityId("FMFID:17180934277"), + ModifiedBy: textField().readOnly().entityId("FMFID:21475901573"), + name: textField().entityId("FMFID:25770868869"), + id_customer: textField().entityId("FMFID:30065836165"), + }, + { + entityId: "FMTID:1065093", + navigationPaths: ["contacts"], + }, +); + +// Export occurrences array for backward compatibility +export const occurrencesWithIds = [contactsTOWithIds, usersTOWithIds] as const; + +// Schema for batch operations tests +export const contactsTOForBatch = fmTableOccurrence("contacts", { + PrimaryKey: textField().primaryKey(), + CreationTimestamp: timestampField(), + CreatedBy: textField(), + ModificationTimestamp: timestampField(), + ModifiedBy: textField(), + name: textField(), + hobby: textField().readValidator( + z.string().transform((val) => "static-value"), + ), + id_user: textField(), +}); + +export const usersTOForBatch = fmTableOccurrence("users", { + id: textField().primaryKey(), + CreationTimestamp: timestampField(), + CreatedBy: textField(), + ModificationTimestamp: timestampField(), + ModifiedBy: textField(), + name: textField(), + id_customer: textField(), +}); diff --git a/packages/fmodata/tests/errors.test.ts b/packages/fmodata/tests/errors.test.ts new file mode 100644 index 00000000..10e01f40 --- /dev/null +++ b/packages/fmodata/tests/errors.test.ts @@ -0,0 +1,636 @@ +/** + * Error Handling Tests + * + * Tests for rich error handling in the library, including: + * - HTTP errors (4xx, 5xx) + * - Network errors (timeout, abort, retry limit, circuit open) + * - Validation errors with library-specific formatting (Zod example) + * - OData errors + * - Response structure errors + * - Type guards and error detection + */ + +import { describe, it, expect, assert } from "vitest"; +import { z, ZodError } from "zod/v4"; +import { + fmTableOccurrence, + textField, + numberField, + HTTPError, + ODataError, + SchemaLockedError, + ValidationError, + ResponseStructureError, + RecordCountMismatchError, + isHTTPError, + isValidationError, + isODataError, + isSchemaLockedError, + isResponseStructureError, + isRecordCountMismatchError, +} from "@proofkit/fmodata"; +import { createMockClient } from "./utils/test-setup"; +import { simpleMock, createMockFetch } from "./utils/mock-fetch"; +import { validateHeaderValue } from "http"; + +describe("Error Handling", () => { + const client = createMockClient(); + + const users = fmTableOccurrence("users", { + id: textField().primaryKey(), + username: textField(), + email: textField().readValidator(z.string().email()), + active: numberField().readValidator(z.coerce.boolean()), + age: numberField().readValidator(z.number().int().min(0).max(150)), + }); + + describe("HTTP Errors", () => { + it("should return HTTPError for 404 Not Found", async () => { + const db = client.database("testdb"); + const result = await db + .from(users) + .list() + .execute({ + fetchHandler: simpleMock({ status: 404 }), + }); + + expect(result.error).toBeDefined(); + expect(result.data).toBeUndefined(); + expect(result.error).toBeInstanceOf(HTTPError); + + const httpError = result.error as HTTPError; + expect(httpError.status).toBe(404); + expect(httpError.isNotFound()).toBe(true); + expect(httpError.is4xx()).toBe(true); + expect(httpError.is5xx()).toBe(false); + }); + + it("should return HTTPError for 401 Unauthorized", async () => { + const db = client.database("testdb"); + const result = await db + .from(users) + .list() + .execute({ + fetchHandler: simpleMock({ status: 401 }), + }); + + expect(result.error).toBeDefined(); + expect(result.error).toBeInstanceOf(HTTPError); + + const httpError = result.error as HTTPError; + expect(httpError.status).toBe(401); + expect(httpError.isUnauthorized()).toBe(true); + }); + + it("should return HTTPError for 500 Server Error", async () => { + const db = client.database("testdb"); + const result = await db + .from(users) + .list() + .execute({ + fetchHandler: simpleMock({ status: 500 }), + }); + + expect(result.error).toBeDefined(); + expect(result.error).toBeInstanceOf(HTTPError); + + const httpError = result.error as HTTPError; + expect(httpError.status).toBe(500); + expect(httpError.is5xx()).toBe(true); + expect(httpError.is4xx()).toBe(false); + }); + + it("should include response body in HTTPError", async () => { + const errorBody = { message: "Custom error message" }; + const db = client.database("testdb"); + const result = await db + .from(users) + .list() + .execute({ + fetchHandler: simpleMock({ + status: 400, + body: errorBody, + }), + }); + + expect(result.error).toBeInstanceOf(HTTPError); + const httpError = result.error as HTTPError; + expect(httpError.response).toEqual(errorBody); + }); + }); + + describe("OData Errors", () => { + it("should return ODataError for OData error responses", async () => { + const odataError = { + error: { + code: "INVALID_REQUEST", + message: "Invalid OData query", + target: "$filter", + }, + }; + + const db = client.database("testdb"); + const result = await db + .from(users) + .list() + .execute({ + fetchHandler: createMockFetch({ + url: "https://api.example.com", + method: "GET", + status: 400, + response: odataError, + headers: { "content-type": "application/json" }, + }), + }); + + expect(result.error).toBeDefined(); + expect(result.error).toBeInstanceOf(ODataError); + + const odataErr = result.error as ODataError; + expect(odataErr.code).toBe("INVALID_REQUEST"); + expect(odataErr.details).toEqual(odataError.error); + }); + + it("should return SchemaLockedError for database schema locked error (code 303)", async () => { + const schemaLockedError = { + error: { + code: "303", + message: "Database schema is locked by another user", + }, + }; + + const db = client.database("testdb"); + const result = await db + .from(users) + .list() + .execute({ + fetchHandler: createMockFetch({ + url: "https://api.example.com", + method: "GET", + status: 400, + response: schemaLockedError, + headers: { "content-type": "application/json" }, + }), + }); + + expect(result.error).toBeDefined(); + expect(result.error).toBeInstanceOf(SchemaLockedError); + + const schemaError = result.error as SchemaLockedError; + expect(schemaError.code).toBe("303"); + expect(schemaError.message).toContain("Database schema is locked"); + expect(schemaError.details).toEqual(schemaLockedError.error); + expect(schemaError.kind).toBe("SchemaLockedError"); + }); + + it("should return SchemaLockedError when error code is numeric 303", async () => { + const schemaLockedError = { + error: { + code: 303, + message: "Database schema is locked by another user", + }, + }; + + const db = client.database("testdb"); + const result = await db + .from(users) + .list() + .execute({ + fetchHandler: createMockFetch({ + url: "https://api.example.com", + method: "GET", + status: 400, + response: schemaLockedError, + headers: { "content-type": "application/json" }, + }), + }); + + expect(result.error).toBeDefined(); + expect(result.error).toBeInstanceOf(SchemaLockedError); + }); + }); + + describe("Validation Errors", () => { + it("should return ValidationError when schema validation fails", async () => { + const db = client.database("testdb"); + + // Return data that doesn't match schema (email is invalid, age is out of range) + const invalidData = [ + { + id: "1", + username: "testuser", + email: "not-an-email", // Invalid email + active: true, + age: 200, // Out of range (max 150) + }, + ]; + + const result = await db + .from(users) + .list() + .execute({ + fetchHandler: createMockFetch(invalidData), + }); + + expect(result.error).toBeDefined(); + expect(result.error).toBeInstanceOf(ValidationError); + + const validationError = result.error as ValidationError; + expect(validationError.issues).toBeDefined(); + expect(Array.isArray(validationError.issues)).toBe(true); + expect(validationError.issues.length).toBeGreaterThan(0); + expect(validationError.value).toBeDefined(); + }); + + it("should preserve Standard Schema issues in cause property", async () => { + const db = client.database("testdb"); + + const invalidData = [ + { + id: "1", + username: "testuser", + email: "not-an-email", + active: true, + age: 200, + }, + ]; + + const result = await db + .from(users) + .list() + .execute({ + fetchHandler: createMockFetch(invalidData), + }); + + expect(result.error).toBeInstanceOf(ValidationError); + const validationError = result.error as ValidationError; + + // The cause property (ES2022 Error.cause) contains the Standard Schema issues array + // This follows the same pattern as uploadthing and is validator-agnostic + assert(validationError.cause, "Cause is not defined"); + + // The cause should be the Standard Schema issues array + expect(Array.isArray(validationError.cause)).toBe(true); + expect(validationError.cause).toBe(validationError.issues); + + // The issues array is always available + expect(validationError.issues).toBeDefined(); + expect(Array.isArray(validationError.issues)).toBe(true); + expect(validationError.issues.length).toBeGreaterThan(0); + + // ensure the end user can pass this back to zod + expect(z.prettifyError(validationError)).toBeDefined(); + }); + + it("should include field name in ValidationError", async () => { + const db = client.database("testdb"); + + const invalidData = [ + { + id: "1", + username: "testuser", + email: "not-an-email", + active: true, + age: 25, + }, + ]; + + const result = await db + .from(users) + .list() + .execute({ + fetchHandler: createMockFetch(invalidData), + }); + + expect(result.error).toBeInstanceOf(ValidationError); + const validationError = result.error as ValidationError; + + // The error should mention which field failed + expect(validationError.message).toContain("email"); + }); + }); + + describe("Response Structure Errors", () => { + it("should return ResponseStructureError for invalid response structure", async () => { + const db = client.database("testdb"); + + // Return invalid structure (not an object) + const result = await db + .from(users) + .list() + .execute({ + fetchHandler: createMockFetch({ + url: "https://api.example.com", + method: "GET", + status: 200, + response: "not an object", // Invalid - should be object with value array + headers: { "content-type": "application/json" }, + }), + }); + + expect(result.error).toBeDefined(); + expect(result.error).toBeInstanceOf(ResponseStructureError); + + const structureError = result.error as ResponseStructureError; + expect(structureError.expected).toContain("object"); + }); + + it("should return ResponseStructureError when value is not an array", async () => { + const db = client.database("testdb"); + + const result = await db + .from(users) + .list() + .execute({ + fetchHandler: createMockFetch({ + url: "https://api.example.com", + method: "GET", + status: 200, + response: { value: "not an array" }, // Invalid - value should be array + headers: { "content-type": "application/json" }, + }), + }); + + expect(result.error).toBeDefined(); + expect(result.error).toBeInstanceOf(ResponseStructureError); + }); + }); + + describe("Record Count Mismatch Errors", () => { + it("should return RecordCountMismatchError for single() when multiple records found", async () => { + const db = client.database("testdb"); + + const multipleRecords = [ + { + id: "1", + username: "user1", + email: "user1@test.com", + active: true, + age: 25, + }, + { + id: "2", + username: "user2", + email: "user2@test.com", + active: true, + age: 30, + }, + ]; + + const result = await db + .from(users) + .list() + .single() + .execute({ + fetchHandler: createMockFetch(multipleRecords), + }); + + expect(result.error).toBeDefined(); + expect(result.error).toBeInstanceOf(RecordCountMismatchError); + + const countError = result.error as RecordCountMismatchError; + expect(countError.expected).toBe("one"); + expect(countError.received).toBe(2); + }); + + it("should return RecordCountMismatchError for single() when no records found", async () => { + const db = client.database("testdb"); + + const result = await db + .from(users) + .list() + .single() + .execute({ + fetchHandler: createMockFetch([]), + }); + + expect(result.error).toBeDefined(); + expect(result.error).toBeInstanceOf(RecordCountMismatchError); + + const countError = result.error as RecordCountMismatchError; + expect(countError.expected).toBe("one"); + expect(countError.received).toBe(0); + }); + }); + + describe("Type Guards", () => { + it("should correctly identify HTTPError using type guard", async () => { + const db = client.database("testdb"); + const result = await db + .from(users) + .list() + .execute({ + fetchHandler: simpleMock({ status: 404 }), + }); + + expect(result.error).toBeDefined(); + expect(isHTTPError(result.error)).toBe(true); + + if (isHTTPError(result.error)) { + // TypeScript should know this is HTTPError + expect(result.error.status).toBe(404); + } + }); + + it("should correctly identify ValidationError using type guard", async () => { + const db = client.database("testdb"); + const result = await db + .from(users) + .list() + .execute({ + fetchHandler: createMockFetch([ + { + id: "1", + username: "test", + email: "invalid-email", + active: true, + age: 25, + }, + ]), + }); + + expect(result.error).toBeDefined(); + expect(isValidationError(result.error)).toBe(true); + + if (isValidationError(result.error)) { + // TypeScript should know this is ValidationError + expect(result.error.issues).toBeDefined(); + } + }); + + it("should correctly identify ODataError using type guard", async () => { + const db = client.database("testdb"); + const result = await db + .from(users) + .list() + .execute({ + fetchHandler: createMockFetch({ + url: "https://api.example.com", + method: "GET", + status: 400, + response: { error: { code: "ERROR", message: "Test" } }, + headers: { "content-type": "application/json" }, + }), + }); + + expect(result.error).toBeDefined(); + expect(isODataError(result.error)).toBe(true); + + if (isODataError(result.error)) { + // TypeScript should know this is ODataError + expect(result.error.code).toBeDefined(); + } + }); + + it("should correctly identify SchemaLockedError using type guard", async () => { + const db = client.database("testdb"); + const result = await db + .from(users) + .list() + .execute({ + fetchHandler: createMockFetch({ + url: "https://api.example.com", + method: "GET", + status: 400, + response: { + error: { code: "303", message: "Database schema is locked" }, + }, + headers: { "content-type": "application/json" }, + }), + }); + + expect(result.error).toBeDefined(); + expect(isSchemaLockedError(result.error)).toBe(true); + + if (isSchemaLockedError(result.error)) { + // TypeScript should know this is SchemaLockedError + expect(result.error.code).toBe("303"); + expect(result.error.kind).toBe("SchemaLockedError"); + } + }); + + it("should correctly identify ResponseStructureError using type guard", async () => { + const db = client.database("testdb"); + const result = await db + .from(users) + .list() + .execute({ + fetchHandler: createMockFetch({ + url: "https://api.example.com", + method: "GET", + status: 200, + response: "invalid", + headers: { "content-type": "application/json" }, + }), + }); + + expect(result.error).toBeDefined(); + expect(isResponseStructureError(result.error)).toBe(true); + }); + + it("should correctly identify RecordCountMismatchError using type guard", async () => { + const db = client.database("testdb"); + const result = await db + .from(users) + .list() + .single() + .execute({ + fetchHandler: createMockFetch([ + { + id: "1", + username: "user1", + email: "user1@test.com", + active: true, + age: 25, + }, + { + id: "2", + username: "user2", + email: "user2@test.com", + active: true, + age: 30, + }, + ]), + }); + + expect(result.error).toBeDefined(); + expect(isRecordCountMismatchError(result.error)).toBe(true); + }); + }); + + describe("Error Properties", () => { + it("should include timestamp in all errors", async () => { + const db = client.database("testdb"); + const result = await db + .from(users) + .list() + .execute({ + fetchHandler: simpleMock({ status: 404 }), + }); + + expect(result.error).toBeDefined(); + if (result.error && "timestamp" in result.error) { + expect(result.error.timestamp).toBeInstanceOf(Date); + } + }); + + it("should include kind property for discriminated unions", async () => { + const db = client.database("testdb"); + const result = await db + .from(users) + .list() + .execute({ + fetchHandler: simpleMock({ status: 404 }), + }); + + expect(result.error).toBeDefined(); + if (result.error && "kind" in result.error) { + expect(result.error.kind).toBe("HTTPError"); + } + }); + }); + + describe("Error Handling Patterns", () => { + it("should allow instanceof checks (like ffetch pattern)", async () => { + const db = client.database("testdb"); + const result = await db + .from(users) + .list() + .execute({ + fetchHandler: simpleMock({ status: 404 }), + }); + + if (result.error) { + if (result.error instanceof HTTPError) { + expect(result.error.status).toBe(404); + } else { + throw new Error("Expected HTTPError"); + } + } + }); + + it("should allow switch statement on kind property", async () => { + const db = client.database("testdb"); + const result = await db + .from(users) + .list() + .execute({ + fetchHandler: simpleMock({ status: 404 }), + }); + + if (result.error && "kind" in result.error) { + switch (result.error.kind) { + case "HTTPError": + expect((result.error as HTTPError).status).toBe(404); + break; + case "ValidationError": + throw new Error("Unexpected ValidationError"); + case "ODataError": + throw new Error("Unexpected ODataError"); + default: + throw new Error("Unexpected error kind"); + } + } + }); + }); +}); diff --git a/packages/fmodata/tests/expands.test.ts b/packages/fmodata/tests/expands.test.ts new file mode 100644 index 00000000..d5de7a22 --- /dev/null +++ b/packages/fmodata/tests/expands.test.ts @@ -0,0 +1,591 @@ +/** + * Expand API Specification Tests + * + * These tests define the expected TypeScript behavior for the expand() API. + * They use expectTypeOf to validate strict typing at compile time. + * + * DO NOT RUN THESE TESTS YET - they define the API we want to build. + */ + +import { describe, it, expect, expectTypeOf, assert } from "vitest"; +import { z } from "zod/v4"; +import { + fmTableOccurrence, + textField, + numberField, + eq, +} from "@proofkit/fmodata"; +import { createMockClient, users, contacts } from "./utils/test-setup"; +import { first } from "es-toolkit/compat"; +import { simpleMock } from "./utils/mock-fetch"; +import { mockResponses } from "./fixtures/responses"; + +describe("Expand API Specification", () => { + // Spec test table definitions (simplified for type testing) + const userCustomer = fmTableOccurrence( + "user_customer", + { + id: textField().primaryKey(), + name: textField().notNull(), + address: textField(), + tier: textField().notNull(), + }, + { + defaultSelect: "all", + }, + ); + + const contacts = fmTableOccurrence( + "contacts", + { + id: textField().primaryKey(), + name: textField().notNull(), + hobby: textField(), + id_user: textField().notNull(), + }, + { + defaultSelect: "all", + navigationPaths: ["users", "other_users"], + }, + ); + + const users = fmTableOccurrence( + "users", + { + id: textField().primaryKey(), + username: textField().notNull(), + email: textField().notNull(), + active: numberField().readValidator(z.coerce.boolean()).notNull(), + id_customer: textField(), + }, + { + defaultSelect: "all", + navigationPaths: ["user_customer", "contacts"], + }, + ); + + const otherUsers = fmTableOccurrence( + "other_users", + { + id: textField().primaryKey(), + username: textField().notNull(), + email: textField().notNull(), + active: numberField().readValidator(z.coerce.boolean()).notNull(), + id_customer: textField().notNull(), + }, + { + defaultSelect: "all", + }, + ); + + // Real server schema table definitions (for validation tests that use captured responses) + const contactsReal = fmTableOccurrence( + "contacts", + { + PrimaryKey: textField().primaryKey(), + CreationTimestamp: textField(), + CreatedBy: textField(), + ModificationTimestamp: textField(), + ModifiedBy: textField(), + name: textField(), + hobby: textField(), + id_user: textField(), + my_calc: textField(), + }, + { + defaultSelect: "all", + navigationPaths: ["users"], + }, + ); + + const usersReal = fmTableOccurrence( + "users", + { + id: textField().primaryKey(), + CreationTimestamp: textField(), + CreatedBy: textField(), + ModificationTimestamp: textField(), + ModifiedBy: textField(), + name: textField(), + id_customer: textField(), + }, + { + defaultSelect: "all", + navigationPaths: ["user_customer", "contacts"], + }, + ); + + const client = createMockClient(); + + // type UserFieldNames = keyof InferTableSchema; + // type CustomerFieldNames = keyof InferTableSchema; + + const db = client.database("test_db"); + + describe("Simple expand (no callback)", () => { + it("should generate query string for simple expand", () => { + const queryString = db + .from(contacts) + .list() + .expand(users) + .getQueryString(); + expect(queryString).toBe("/contacts?$top=1000&$expand=users"); + }); + + it("should not allow arbitrary string relations", () => { + db.from(contacts) + .list() + // @ts-expect-error - arbitrary string relation + .expand("arbitrary_relation") + .getQueryString(); + }); + }); + + describe("Expand with callback - select", () => { + it("should type callback builder to target table schema", () => { + db.from(contacts) + .list() + .expand(users, (builder) => { + // builder.select should only accept fields from users table + expectTypeOf(builder.select).parameter(0).not.toEqualTypeOf(); + + return builder.select({ + username: users.username, + email: users.email, + }); + }); + }); + + it("should have a properly typed response", async () => { + async () => { + // checking types only, don't actually make a request + const result = await db + .from(contacts) + .list() + .expand(users, (b) => + b.select({ username: users.username, email: users.email }), + ) + .execute(); + + const firstRecord = result.data![0]!; + + // runtime tests to ensure the fields are not present + // @ts-expect-error - these fields should not be present + expect(firstRecord.ROWID).toBeUndefined(); + // @ts-expect-error - these fields should not be present + expect(firstRecord.ROWMODID).toBeUndefined(); + + // ROWID and MODID weren't selected, it's not returned by default + expectTypeOf(firstRecord).not.toHaveProperty("ROWID"); + expectTypeOf(firstRecord).not.toHaveProperty("ROWMODID"); + + // no select was called, so all fields are returned + expectTypeOf(firstRecord).toHaveProperty("name"); + + // users was expanded, so it will be an array in the response + expectTypeOf(firstRecord).toHaveProperty("users"); + expectTypeOf(firstRecord.users).toBeArray(); + const firstUser = firstRecord.users[0]!; + }; + }); + + it("should generate query string with $select", () => { + const queryString = db + .from(contacts) + .list() + .expand(users, (b) => + b.select({ username: users.username, email: users.email }), + ) + .getQueryString(); + + expect(queryString).toBe( + "/contacts?$top=1000&$expand=users($select=username,email)", + ); + }); + + it("should enforce callback returns builder", () => { + db.from(contacts) + .list() + .expand(users, (b) => { + // Must return the builder + return b.select({ username: users.username }); + }); + }); + }); + + describe("Expand with callback - filter", () => { + it("should generate query string with $filter", () => { + const queryString = db + .from(contacts) + .list() + .expand(users, (b) => b.where(eq(users.active, 1))) + .getQueryString(); + + expect(queryString).toContain("$expand=users($filter=active"); + }); + }); + + describe("Expand with callback - orderBy", () => { + it("should generate query string with $orderby", () => { + const queryString = db + .from(contacts) + .list() + .expand(users, (b) => b.orderBy("username")) + .getQueryString(); + + expect(queryString).toContain("$expand=users($orderby=username"); + }); + }); + + describe("Expand with callback - top and skip", () => { + it("should generate query string with $top", () => { + const queryString = db + .from(contacts) + .list() + .expand(users, (b) => b.top(5)) + .getQueryString(); + + expect(queryString).toContain("$expand=users($top=5"); + }); + + it("should generate query string with $skip", () => { + const queryString = db + .from(contacts) + .list() + .expand(users, (b) => b.skip(10)) + .getQueryString(); + + expect(queryString).toContain("$expand=users($skip=10"); + }); + }); + + describe("Multiple expands (chaining)", () => { + it("should allow chaining multiple expand calls", () => { + const queryString = db + .from(contacts) + .list() + .expand(users, (b) => b.select({ username: users.username })) + .expand(otherUsers) + .getQueryString(); + + expect(queryString).toBe( + "/contacts?$top=1000&$expand=users($select=username),other_users", + ); + }); + + it("should type each expand callback independently", () => { + db.from(contacts) + .list() + .expand(users, (builder) => { + // First callback typed to users + expectTypeOf(builder.select).parameter(0).not.toEqualTypeOf(); + + return builder.select({ username: users.username }); + }) + .expand(otherUsers, (builder) => { + // Second callback - arbitrary relation so accepts any + return builder.select({ email: otherUsers.email }); + }); + }); + }); + + describe("Nested expands", () => { + it("should type nested expand callback to nested target schema", () => { + const query = db + .from(contacts) + .list() + .expand(users, (usersBuilder) => { + return usersBuilder + .select({ username: users.username, email: users.email }) + .expand(userCustomer, (customerBuilder) => { + // customerBuilder should be typed to customer schema + // Verify it accepts valid fields + return customerBuilder.select({ + name: userCustomer.name, + tier: userCustomer.tier, + }); + }); + }); + + // type tests, don't run this code + async () => { + const result = await query.execute(); + + const firstRecord = result.data![0]!; + + const firstUser = firstRecord.users[0]!; + + // @ts-expect-error - this field was not selected, so it shouldn't be in the type + firstUser.id_customer; + expectTypeOf(firstUser).not.toHaveProperty("id_customer"); + expectTypeOf(firstUser).toHaveProperty("username"); + }; + }); + + it("should validate nested expands on single record", async () => { + // This test uses real server schema (contactsReal, usersReal) to match captured responses + const mockData = mockResponses["deep nested expand"]; + const result = await db + .from(contactsReal) + .get("B5BFBC89-03E0-47FC-ABB6-D51401730227") + .expand(usersReal, (usersBuilder) => { + return usersBuilder + .select({ name: usersReal.name, id: usersReal.id }) + .expand(userCustomer, (customerBuilder) => { + return customerBuilder.select({ name: userCustomer.name }); + }); + }) + .execute({ + fetchHandler: simpleMock({ + status: mockData.status, + body: mockData.response, + headers: mockData.headers, + }), + }); + + assert(result.data, "Result data should be defined"); + expect(result.data.name).toBe("Eric"); + expect(result.data.hobby).toBe("Board games"); + expect(result.data.users).toBeDefined(); + + // Type check: verify that only selected fields are typed correctly + const firstUser = result.data.users?.[0]; + assert(firstUser, "First user should be defined"); + expectTypeOf(firstUser).toHaveProperty("name"); + expectTypeOf(firstUser).toHaveProperty("id"); + expectTypeOf(firstUser).toHaveProperty("user_customer"); + // @ts-expect-error - id_customer was not selected, should not be in type + expectTypeOf(firstUser.id_customer).toBeNever(); + + // Verify nested expand structure exists + expect(firstUser.id).toBe("1A269FA3-82E6-465A-94FA-39EE3F2F9B5D"); + expect(firstUser.name).toBe("Test User"); + expect(firstUser.user_customer).toBeDefined(); + expect(Array.isArray(firstUser.user_customer)).toBe(true); + expect(firstUser.user_customer.length).toBe(1); + + // Verify nested customer data + const firstCustomer = firstUser.user_customer?.[0]; + assert(firstCustomer, "First customer should be defined"); + + expectTypeOf(firstCustomer).toHaveProperty("name"); + // @ts-expect-error - other fields were not selected + expectTypeOf(firstCustomer.address).toBeNever(); + // @ts-expect-error - tier was not selected + expectTypeOf(firstCustomer.tier).toBeNever(); + + expect(firstCustomer.name).toBe("test"); + }); + + it("should validate nested expands on list query", async () => { + // This test uses real server schema (contactsReal, usersReal) to match captured responses + const mockData = mockResponses["list with nested expand"]; + const result = await db + .from(contactsReal) + .list() + .expand(usersReal, (usersBuilder) => { + // No select on users - all fields should be returned + return usersBuilder.expand(userCustomer, (customerBuilder) => { + return customerBuilder.select({ name: userCustomer.name }); + }); + }) + .execute({ + fetchHandler: simpleMock({ + status: mockData.status, + body: mockData.response, + headers: mockData.headers, + }), + }); + + expect(result.data).toBeDefined(); + expect(Array.isArray(result.data)).toBe(true); + expect(result.data?.length).toBe(2); + + // Type check: verify list results are properly typed + const firstContact = result.data?.[0]; + if (firstContact) { + // Contact should have all its fields (no select was called on contacts) + expectTypeOf(firstContact).toHaveProperty("name"); + expectTypeOf(firstContact).toHaveProperty("PrimaryKey"); + expectTypeOf(firstContact).toHaveProperty("hobby"); + + // Verify users expand exists and is typed correctly + expectTypeOf(firstContact).toHaveProperty("users"); + expectTypeOf(firstContact.users).toBeArray(); + + // Verify runtime data (note: response has 'name' not 'id' due to real server schema) + expect(firstContact.name).toBe("Eric"); + expect(firstContact.hobby).toBe("Board games"); + expect(firstContact.users).toBeDefined(); + expect(Array.isArray(firstContact.users)).toBe(true); + expect(firstContact.users.length).toBe(1); + + const firstUser = firstContact.users?.[0]; + if (firstUser) { + // All user fields should be present (no select was used) + expectTypeOf(firstUser).toHaveProperty("id"); + expectTypeOf(firstUser).toHaveProperty("name"); + expectTypeOf(firstUser).toHaveProperty("id_customer"); + expectTypeOf(firstUser).toHaveProperty("user_customer"); + + // Verify runtime data exists + expect(firstUser.id).toBe("1A269FA3-82E6-465A-94FA-39EE3F2F9B5D"); + expect(firstUser.name).toBe("Test User"); + expect(firstUser.id_customer).toBe( + "3026B56E-0C6E-4F31-B666-EE8AC5B36542", + ); + expect(firstUser.user_customer).toBeDefined(); + expect(Array.isArray(firstUser.user_customer)).toBe(true); + expect(firstUser.user_customer.length).toBe(1); + + // Verify nested customer data with selected fields only + const firstCustomer = firstUser.user_customer?.[0]; + if (firstCustomer) { + // Only 'name' was selected in nested expand + expectTypeOf(firstCustomer).toHaveProperty("name"); + // @ts-expect-error - address was not selected, should not be in type + expectTypeOf(firstCustomer.address).toBeNever(); + // @ts-expect-error - tier was not selected, should not be in type + expectTypeOf(firstCustomer.tier).toBeNever(); + + expect(firstCustomer.name).toBe("test"); + } + } + + // Check second contact which has a different user structure + const secondContact = result.data?.[1]; + if (secondContact) { + expect(secondContact.name).toBe("Adam"); + expect(secondContact.hobby).toBe("trees"); + expect(secondContact.users).toBeDefined(); + expect(secondContact.users.length).toBe(1); + + const secondUser = secondContact.users?.[0]; + if (secondUser) { + expect(secondUser.id).toBe("53D36C9A-8F90-4C21-A38F-F278D4F77718"); + expect(secondUser.name).toBe("adam user"); + expect(secondUser.id_customer).toBeNull(); + // This user has no customer, should be empty array + expect(secondUser.user_customer).toEqual([]); + } + } + } + }); + + it("should generate query string with nested $expand", () => { + const queryString = db + .from(contacts) + .list() + .expand(users, (b) => + b + .select({ username: users.username }) + .expand(userCustomer, (nested) => + nested.select({ name: userCustomer.name }), + ), + ) + .getQueryString(); + + expect(queryString).toBe( + "/contacts?$top=1000&$expand=users($select=username;$expand=user_customer($select=name))", + ); + }); + + it("should support deeply nested expands (3 levels)", () => { + const queryString = db + .from(contacts) + .list() + .expand(users, (b) => + b.expand(userCustomer, (nested) => + // If customer had relations, we could expand further + nested.select({ name: userCustomer.name }), + ), + ) + .getQueryString(); + + expect(queryString).toContain("$expand=user_customer($select=name)"); + }); + }); + + describe("Complex combinations", () => { + it("should support select + filter + orderBy + nested expand", () => { + const queryString = db + .from(contacts) + .list() + .expand(users, (b) => + b + .select({ username: users.username, email: users.email }) + .where(eq(users.active, 1)) + .orderBy("username") + .top(10) + .expand(userCustomer, (nested) => + nested.select({ name: userCustomer.name }), + ), + ) + .getQueryString(); + + // Should contain all query options + expect(queryString).toContain("$select=username,email"); + expect(queryString).toContain("$filter=active"); + expect(queryString).toContain("$orderby=username"); + expect(queryString).toContain("$top=10"); + expect(queryString).toContain("$expand=user_customer($select=name)"); + }); + + it("should support multiple expands with different options", () => { + const queryString = db + .from(contacts) + .list() + .expand(users, (b) => + b.select({ username: users.username }).where(eq(users.active, 1)), + ) + .expand(otherUsers, (b) => b.select({ email: otherUsers.email }).top(5)) + .getQueryString(); + + expect(queryString).toBe( + "/contacts?$top=1000&$expand=users($select=username;$filter=active eq 1),other_users($select=email;$top=5)", + ); + }); + }); + + describe("Integration with existing query methods", () => { + it("should work with select on parent query", () => { + const queryString = db + .from(contacts) + .list() + .select({ name: contacts.name, hobby: contacts.hobby }) + .expand(users, (b) => b.select({ username: users.username })) + .getQueryString(); + + expect(queryString).toContain("$select=name,hobby"); + expect(queryString).toContain("$expand=users($select=username)"); + }); + + it("should work with filter on parent query", () => { + const queryString = db + .from(contacts) + .list() + .where(eq(contacts.name, "Eric")) + .expand(users) + .getQueryString(); + + expect(queryString).toContain("$filter=name eq"); + expect(queryString).toContain("$expand=users"); + }); + + it("should work with orderBy, top, skip on parent query", () => { + const queryString = db + .from(contacts) + .list() + .orderBy("name") + .top(20) + .skip(10) + .expand(users, (b) => b.select({ username: users.username })) + .getQueryString(); + + expect(queryString).toContain("$orderby=name"); + expect(queryString).toContain("$top=20"); + expect(queryString).toContain("$skip=10"); + expect(queryString).toContain("$expand=users($select=username)"); + }); + }); +}); diff --git a/packages/fmodata/tests/field-id-transforms.test.ts b/packages/fmodata/tests/field-id-transforms.test.ts new file mode 100644 index 00000000..aaf9d9ae --- /dev/null +++ b/packages/fmodata/tests/field-id-transforms.test.ts @@ -0,0 +1,551 @@ +/** + * Field ID Transformation Tests + * + * Tests that field names are transparently transformed to/from FileMaker field IDs (FMFIDs) + * and table occurrence IDs (FMTIDs) when using BaseTableWithIds and TableOccurrenceWithIds. + * + * Uses mock responses to verify: + * 1. Requests are sent with FMFIDs and FMTIDs + * 2. Responses with FMFID keys are transformed back to field names + * 3. User experience remains unchanged (uses field names throughout) + */ + +import { describe, it, expect, beforeEach } from "vitest"; +import { + createMockClient, + contactsTOWithIds, + usersTOWithIds, +} from "./utils/test-setup"; +import { simpleMock } from "./utils/mock-fetch"; +import { eq } from "@proofkit/fmodata"; + +describe("Field ID Transformation", () => { + let capturedRequests: Array<{ url: string; options: any }> = []; + + beforeEach(() => { + capturedRequests = []; + }); + + describe("Query with Select", () => { + it("should send request with FMFIDs and FMTID", async () => { + const connection = createMockClient(); + const db = connection.database("test.fmp12", { + useEntityIds: true, + }); + + const mockResponse = { + "@context": "https://api.example.com/$metadata#users", + value: [ + { + "@id": + "https://api.example.com/users('550e8400-e29b-41d4-a716-446655440001')", + "@editLink": "users('550e8400-e29b-41d4-a716-446655440001')", + "FMFID:1": "550e8400-e29b-41d4-a716-446655440001", + "FMFID:6": "Alice", + "FMFID:7": true, + }, + ], + }; + + await db + .from(usersTOWithIds) + .list() + .select({ + id: usersTOWithIds.id, + name: usersTOWithIds.name, + active: usersTOWithIds.active, + }) + .execute({ + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { + const url = input instanceof Request ? input.url : input.toString(); + capturedRequests.push({ url, options: init }); + return simpleMock({ body: mockResponse, status: 200 })(input, init); + }, + }); + + // Verify the request used FMTIDs for table and FMFIDs for fields + expect(capturedRequests).toHaveLength(1); + const request = capturedRequests[0]!; + expect(request.url).toContain("FMTID:1065093"); // Table ID + // FMFIDs are URL-encoded in the query string + expect(decodeURIComponent(request.url)).toContain("FMFID:1"); // id field + expect(decodeURIComponent(request.url)).toContain("FMFID:6"); // name field + expect(decodeURIComponent(request.url)).toContain("FMFID:7"); // active field + }); + + it("should transform FMFID response keys back to field names", async () => { + const connection = createMockClient(); + const db = connection.database("test.fmp12"); + + const mockResponse = { + "@context": "https://api.example.com/$metadata#users", + value: [ + { + "@id": + "https://api.example.com/users('550e8400-e29b-41d4-a716-446655440001')", + "@editLink": "users('550e8400-e29b-41d4-a716-446655440001')", + "FMFID:1": "550e8400-e29b-41d4-a716-446655440001", + "FMFID:6": "Alice", + "FMFID:7": true, + }, + { + "@id": + "https://api.example.com/users('550e8400-e29b-41d4-a716-446655440002')", + "@editLink": "users('550e8400-e29b-41d4-a716-446655440002')", + "FMFID:1": "550e8400-e29b-41d4-a716-446655440002", + "FMFID:6": "Bob", + "FMFID:7": false, + }, + ], + }; + + const result = await db + .from(usersTOWithIds) + .list() + .select({ + id: usersTOWithIds.id, + name: usersTOWithIds.name, + active: usersTOWithIds.active, + }) + .execute({ + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { + const url = input instanceof Request ? input.url : input.toString(); + capturedRequests.push({ url, options: init }); + return simpleMock({ body: mockResponse, status: 200 })(input, init); + }, + }); + + // User should receive data with field names, not FMFIDs + expect(result.data).toHaveLength(2); + expect(result.data![0]).toMatchObject({ + id: "550e8400-e29b-41d4-a716-446655440001", + name: "Alice", + active: true, + }); + expect(result.data![1]).toMatchObject({ + id: "550e8400-e29b-41d4-a716-446655440002", + name: "Bob", + active: false, + }); + }); + }); + + describe("Filter Operations", () => { + it("should transform field names to FMFIDs in filter", async () => { + const connection = createMockClient(); + const db = connection.database("test.fmp12", { + useEntityIds: true, + }); + + const mockResponse = { value: [] }; + + await db + .from(usersTOWithIds) + .list() + .select({ id: usersTOWithIds.id, name: usersTOWithIds.name }) + .where(eq(usersTOWithIds.active, true)) + .execute({ + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { + const url = input instanceof Request ? input.url : input.toString(); + capturedRequests.push({ url, options: init }); + return simpleMock({ body: mockResponse, status: 200 })(input, init); + }, + }); + + // Verify filter uses FMFID for the field name + const request = capturedRequests[0]!; + expect(decodeURIComponent(request.url)).toContain("FMFID:7"); // active field in filter + expect(request.url).toContain("eq%201"); + }); + }); + + describe("OrderBy Operations", () => { + it("should transform field names to FMFIDs in orderBy", async () => { + const connection = createMockClient(); + const db = connection.database("test.fmp12", { + useEntityIds: true, + }); + + const mockResponse = { value: [] }; + + await db + .from(usersTOWithIds) + .list() + .select({ id: usersTOWithIds.id, name: usersTOWithIds.name }) + .orderBy(["name", "desc"]) + .execute({ + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { + const url = input instanceof Request ? input.url : input.toString(); + capturedRequests.push({ url, options: init }); + return simpleMock({ body: mockResponse, status: 200 })(input, init); + }, + }); + + // Verify orderBy uses FMFID + const request = capturedRequests[0]!; + expect(decodeURIComponent(request.url)).toContain("FMFID:6"); // name field in orderBy + }); + }); + + describe("Get by ID", () => { + it("should use FMTID in URL", async () => { + const connection = createMockClient(); + const db = connection.database("test.fmp12", { + useEntityIds: true, + }); + + const mockResponse = { + "@id": + "https://api.example.com/users('550e8400-e29b-41d4-a716-446655440001')", + "@editLink": "users('550e8400-e29b-41d4-a716-446655440001')", + "FMFID:1": "550e8400-e29b-41d4-a716-446655440001", + "FMFID:6": "Alice", + }; + + await db + .from(usersTOWithIds) + .get("550e8400-e29b-41d4-a716-446655440001") + .execute({ + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { + const url = input instanceof Request ? input.url : input.toString(); + capturedRequests.push({ url, options: init }); + return simpleMock({ body: mockResponse, status: 200 })(input, init); + }, + }); + + const request = capturedRequests[0]!; + // For GET operations, the table name should NOT be FMTID (it's in the path, not the entity key) + expect(request.url).toContain( + "FMTID:1065093('550e8400-e29b-41d4-a716-446655440001')", + ); + }); + + it("should transform response field IDs back to names", async () => { + const connection = createMockClient(); + const db = connection.database("test.fmp12", { + useEntityIds: true, + }); + + const mockResponse = { + "@id": + "https://api.example.com/users('550e8400-e29b-41d4-a716-446655440001')", + "@editLink": "users('550e8400-e29b-41d4-a716-446655440001')", + "FMFID:1": "550e8400-e29b-41d4-a716-446655440001", + "FMFID:2": "2024-01-01T00:00:00Z", + "FMFID:3": "admin", + "FMFID:4": "2024-01-02T00:00:00Z", + "FMFID:5": "admin", + "FMFID:6": "Alice", + "FMFID:7": true, + "FMFID:8": "test", + "FMFID:9": "customer-1", + }; + + const result = await db + .from(usersTOWithIds) + .get("550e8400-e29b-41d4-a716-446655440001") + .execute({ + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { + const url = input instanceof Request ? input.url : input.toString(); + capturedRequests.push({ url, options: init }); + return simpleMock({ body: mockResponse, status: 200 })(input, init); + }, + }); + + expect(result.data).toMatchObject({ + id: "550e8400-e29b-41d4-a716-446655440001", + name: "Alice", + active: true, + id_customer: "customer-1", + }); + }); + }); + + describe("Insert Operations", () => { + it("should transform field names to FMFIDs in request body", async () => { + const connection = createMockClient(); + const db = connection.database("test.fmp12", { + useEntityIds: true, + }); + + const mockResponse = { + "@id": "https://api.example.com/users('new-user')", + "@editLink": "users('new-user')", + "FMFID:1": "new-user", + "FMFID:2": "2024-01-01T00:00:00Z", + "FMFID:3": "admin", + "FMFID:4": "2024-01-02T00:00:00Z", + "FMFID:5": "admin", + "FMFID:6": "Charlie", + "FMFID:7": true, + "FMFID:8": "test", + "FMFID:9": null, + }; + + let capturedBody: any; + const result = await db + .from(usersTOWithIds) + .insert({ + name: "Charlie", + active: true, + fake_field: "test", + }) + .execute({ + fetchHandler: async (input, init) => { + let url = input instanceof Request ? input.url : input.toString(); + // Capture body - it might be in the Request object itself + let bodyText: string | null = null; + if (input instanceof Request && input.body) { + bodyText = await input.text(); + } else if (init?.body) { + bodyText = init.body as string; + } + capturedBody = bodyText ? JSON.parse(bodyText) : {}; + capturedRequests.push({ url, options: init || {} }); + return simpleMock({ body: mockResponse, status: 201 })(url, init); + }, + }); + + expect(capturedRequests).toHaveLength(1); + const request = capturedRequests[0]!; + expect(request.url).toContain("FMTID:1065093"); // Table ID + + // Check that the body has FMFIDs (not field names) + expect(capturedBody).toMatchObject({ + "FMFID:6": "Charlie", // name + "FMFID:7": 1, // active (number field, 1 = true) + "FMFID:8": "test", // fake_field + }); + }); + + it("should transform response field IDs back to names", async () => { + const connection = createMockClient(); + const db = connection.database("test.fmp12", { + useEntityIds: true, + }); + + const mockResponse = { + "@id": + "https://api.example.com/users('550e8400-e29b-41d4-a716-446655440003')", + "@editLink": "users('550e8400-e29b-41d4-a716-446655440003')", + "FMFID:1": "550e8400-e29b-41d4-a716-446655440003", + "FMFID:2": "2024-01-01T00:00:00Z", + "FMFID:3": "admin", + "FMFID:4": "2024-01-02T00:00:00Z", + "FMFID:5": "admin", + "FMFID:6": "Charlie", + "FMFID:7": true, + "FMFID:8": "test", + "FMFID:9": null, + }; + + const result = await db + .from(usersTOWithIds) + .insert({ + name: "Charlie", + active: true, + fake_field: "test", + }) + .execute({ + fetchHandler: async (input, init) => { + const url = input instanceof Request ? input.url : input.toString(); + capturedRequests.push({ url, options: init || {} }); + return simpleMock({ body: mockResponse, status: 201 })(input, init); + }, + }); + + expect(result.data).toMatchObject({ + id: "550e8400-e29b-41d4-a716-446655440003", + name: "Charlie", + active: true, + }); + }); + }); + + describe("Update Operations", () => { + it("should transform field names to FMFIDs in update body", async () => { + const connection = createMockClient(); + const db = connection.database("test.fmp12", { + useEntityIds: true, + }); + + let capturedBody: any; + await db + .from(usersTOWithIds) + .update({ + name: "Alice Updated", + active: false, + }) + .byId("550e8400-e29b-41d4-a716-446655440001") + .execute({ + fetchHandler: async (input, init) => { + let url = input instanceof Request ? input.url : input.toString(); + // Capture body - it might be in the Request object itself + let bodyText: string | null = null; + if (input instanceof Request && input.body) { + bodyText = await input.text(); + } else if (init?.body) { + bodyText = init.body as string; + } + capturedBody = bodyText ? JSON.parse(bodyText) : {}; + capturedRequests.push({ url, options: init || {} }); + return simpleMock({ body: 1, status: 200 })(url, init); + }, + }); + + expect(capturedRequests).toHaveLength(1); + const request = capturedRequests[0]!; + expect(request.url).toContain("FMTID:1065093"); // Table ID + + // Check that the body has FMFIDs (not field names) + expect(capturedBody).toMatchObject({ + "FMFID:6": "Alice Updated", // name + "FMFID:7": 0, // active (number field, 0 = false) + }); + }); + }); + + describe("Expand Operations", () => { + it("should use FMFIDs for expanded relation fields", async () => { + const connection = createMockClient(); + const db = connection.database("test.fmp12", { + useEntityIds: true, + }); + + const mockResponse = { value: [] }; + + await db + .from(contactsTOWithIds) + .list() + .expand(usersTOWithIds, (b: any) => + b.select({ id: usersTOWithIds.id, name: usersTOWithIds.name }), + ) + .execute({ + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { + const url = input instanceof Request ? input.url : input.toString(); + capturedRequests.push({ url, options: init }); + return simpleMock({ body: mockResponse, status: 200 })(input, init); + }, + }); + + const request = capturedRequests[0]!; + expect(request.url).toContain("FMTID:200"); // contacts table + expect(request.url).toContain("$expand=FMTID:1065093"); // relation name preserved + expect(decodeURIComponent(request.url)).toContain("FMFID:1"); // id field in expand + expect(decodeURIComponent(request.url)).toContain("FMFID:6"); // name field in expand + }); + + it("should transform expanded relation response fields back to names", async () => { + const connection = createMockClient(); + const db = connection.database("test.fmp12", { + useEntityIds: true, + }); + + const mockResponse = { + "@context": "https://api.example.com/$metadata#contacts", + value: [ + { + "@id": "https://api.example.com/contacts('contact-1')", + "@editLink": "contacts('contact-1')", + "FMFID:10": "contact-1", + "FMFID:11": null, + "FMFID:12": null, + "FMFID:13": null, + "FMFID:14": null, + "FMFID:15": "Contact One", + "FMFID:16": null, + "FMFID:17": "550e8400-e29b-41d4-a716-446655440001", + "FMTID:1065093": [ + { + "@id": + "https://api.example.com/FMTID:1065093('550e8400-e29b-41d4-a716-446655440001')", + "@editLink": "users('550e8400-e29b-41d4-a716-446655440001')", + "FMFID:1": "550e8400-e29b-41d4-a716-446655440001", + "FMFID:2": "2024-01-01T00:00:00Z", + "FMFID:3": "admin", + "FMFID:4": "2024-01-02T00:00:00Z", + "FMFID:5": "admin", + "FMFID:6": "Alice", + "FMFID:7": true, + "FMFID:8": "test", + "FMFID:9": null, + }, + ], + }, + ], + }; + + const result = await db + .from(contactsTOWithIds) + .list() + .expand(usersTOWithIds, (b: any) => + b.select({ id: usersTOWithIds.id, name: usersTOWithIds.name }), + ) + .execute({ + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { + const url = input instanceof Request ? input.url : input.toString(); + capturedRequests.push({ url, options: init }); + return simpleMock({ body: mockResponse, status: 200 })(input, init); + }, + }); + + // For this test, we'll skip full validation since expanded relations + // add dynamic fields not in the schema. Just verify the transformation happened. + if (result.error) { + // If validation failed, check raw response to ensure transformation occurred + console.log( + "Note: Validation failed for expanded data (expected - dynamic fields)", + ); + } else { + expect(result.data).toBeDefined(); + expect(result.data).toHaveLength(1); + if (result.data && result.data[0]) { + const contact = result.data[0]; + expect(contact).toMatchObject({ + PrimaryKey: "contact-1", + name: "Contact One", + id_user: "550e8400-e29b-41d4-a716-446655440001", + }); + // Check expanded relation was transformed + expect(contact.users).toHaveLength(1); + expect(contact.users[0]).toMatchObject({ + id: "550e8400-e29b-41d4-a716-446655440001", + name: "Alice", + }); + } + } + }); + }); + + describe("Prefer Header", () => { + it("should include 'Prefer: fmodata.entity-ids' header when using entity IDs", async () => { + const connection = createMockClient(); + const db = connection.database("test.fmp12", { + useEntityIds: true, + }); + + const mockResponse = { value: [] }; + + await db + .from(usersTOWithIds) + .list() + .select({ id: usersTOWithIds.id, name: usersTOWithIds.name }) + .execute({ + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { + const url = input instanceof Request ? input.url : input.toString(); + const headers = (init as RequestInit)?.headers as Record< + string, + string + >; + capturedRequests.push({ url, options: { ...init, headers } }); + + // Verify the Prefer header is present + expect(headers).toBeDefined(); + expect(headers.Prefer).toBe("fmodata.entity-ids"); + + return simpleMock({ body: mockResponse, status: 200 })(input, init); + }, + }); + }); + }); +}); diff --git a/packages/fmodata/tests/filters.test.ts b/packages/fmodata/tests/filters.test.ts new file mode 100644 index 00000000..2e5d439e --- /dev/null +++ b/packages/fmodata/tests/filters.test.ts @@ -0,0 +1,550 @@ +/** + * Mock Fetch Tests + * + * These tests use captured responses from real FileMaker OData API calls + * to test the client without requiring a live server connection. + * + * The mock responses are stored in tests/fixtures/responses.ts and are + * captured using the capture script: pnpm capture + * + * To add new tests: + * 1. First, ensure you have a corresponding mock response captured + * 2. Create a test that uses the same query pattern + * 3. The mock fetch will automatically match the request URL to the stored response + */ + +import { describe, it, expect, expectTypeOf } from "vitest"; +import { + eq, + ne, + gt, + gte, + lt, + lte, + contains, + startsWith, + endsWith, + inArray, + notInArray, + and, + or, + isNull, + isNotNull, + fmTableOccurrence, + textField, +} from "@proofkit/fmodata"; +import { + createMockClient, + users, + contacts, + usersTOWithIds, +} from "./utils/test-setup"; +import { z } from "zod/v4"; + +describe("Filter Tests", () => { + const client = createMockClient(); + const db = client.database("fmdapi_test.fmp12"); + + it("should enforce correct operator types for each field type", () => { + // ✅ String operators + const stringQuery = db + .from(contacts) + .list() + .where(eq(contacts.name, "John")); + expect(stringQuery.getQueryString()).toBe( + "/contacts?$filter=name eq 'John'&$top=1000", + ); + + // ✅ Boolean operators + // Note: active field has a writeValidator that converts boolean to number (1/0) + const boolQuery = db.from(users).list().where(eq(users.active, true)); + expect(boolQuery.getQueryString()).toBe( + "/users?$filter=active eq 1&$top=1000", + ); + }); + + it("should support equality operator", () => { + const query = db.from(contacts).list().where(eq(contacts.name, "John")); + expect(query.getQueryString()).toBe( + "/contacts?$filter=name eq 'John'&$top=1000", + ); + }); + + it("should support multiple conditions with AND", () => { + const query = db + .from(contacts) + .list() + .where(and(eq(contacts.name, "John"), ne(contacts.name, "Jane"))); + expect(query.getQueryString()).toContain("name eq 'John'"); + expect(query.getQueryString()).toContain("and"); + }); + + it("should support string operators", () => { + // Contains operator + const containsQuery = db + .from(contacts) + .list() + .where(contains(contacts.name, "John")); + expect(containsQuery.getQueryString()).toContain("contains"); + + // Starts with operator + const startsWithQuery = db + .from(contacts) + .list() + .where(startsWith(contacts.name, "J")); + expect(startsWithQuery.getQueryString()).toContain("startswith"); + + // Ends with operator + const endsWithQuery = db + .from(contacts) + .list() + .where(endsWith(contacts.name, "n")); + expect(endsWithQuery.getQueryString()).toContain("endswith"); + }); + + it("should support logical operators", () => { + const query = db + .from(users) + .list() + .where(and(contains(users.name, "John"), eq(users.active, true))); + expect(query.getQueryString()).toContain("contains"); + expect(query.getQueryString()).toContain("and"); + }); + + it("should support or operator", () => { + const query = db + .from(users) + .list() + .where(or(eq(users.name, "John"), eq(users.name, "Jane"))); + expect(query.getQueryString()).toContain("or"); + }); + + it("should support in operator", () => { + const query = db + .from(contacts) + .list() + .where(inArray(contacts.name, ["John", "Jane", "Bob"])); + + const queryString = query.getQueryString(); + expect(queryString).toContain("in"); + expect(queryString).toContain("$filter=name in ('John', 'Jane', 'Bob')"); + + const specialTable = fmTableOccurrence( + "special_table", + { + id: textField().primaryKey(), + name: textField(), + }, + { defaultSelect: "all" }, + ); + + const query2 = db + .from(specialTable) + .list() + .where(inArray(specialTable.id, ["John", "Jane", "Bob"])); + + const queryString2 = query2.getQueryString(); + expect(queryString2).toContain("in"); + expect(queryString2).toContain(`$filter="id" in ('John', 'Jane', 'Bob')`); + }); + + it("should support null values", () => { + const query = db.from(users).list().where(isNull(users.name)); + expect(query.getQueryString()).toContain("null"); + }); + + it("should properly escape or quote field names in filters", () => { + /** + * From the FileMaker docs: + * Enclose field names that include special characters, such as spaces or underscores, in double-quotation marks. + */ + const weirdTable = fmTableOccurrence( + "weird_table", + { + id: textField().primaryKey(), + "name with spaces": textField(), + }, + { defaultSelect: "all" }, + ); + const query = db + .from(weirdTable) + .list() + .where(eq(weirdTable["name with spaces"], "John")); + expect(query.getQueryString()).toContain( + "$filter=\"name with spaces\" eq 'John'", + ); + + const query2 = db.from(weirdTable).list().where(eq(weirdTable.id, "John")); + expect(query2.getQueryString()).toContain(`$filter="id" eq 'John'`); + }); + + it("should support complex nested filters", () => { + const query = db + .from(users) + .list() + .where( + and( + or(eq(users.name, "John"), eq(users.name, "Jane")), + eq(users.active, true), + ), + ); + expect(query.getQueryString()).toContain("or"); + expect(query.getQueryString()).toContain("and"); + }); + + it("should combine $count with filter", () => { + const queryString = db + .from(users) + .list() + .where(eq(users.active, true)) + .count() + .getQueryString(); + + expect(queryString).toContain("$count"); + expect(queryString).toContain("$filter"); + }); + + it("should combine $select and $filter", () => { + const queryString = db + .from(users) + .list() + .select({ name: users.name, id: users.id }) + .where(eq(users.active, true)) + .getQueryString(); + + expect(queryString).toContain("$select"); + expect(queryString).toContain("$filter"); + expect(queryString).toContain("name"); + expect(queryString).toContain("active"); + }); + + it("should combine $select, $filter, and $orderby", () => { + const queryString = db + .from(users) + .list() + .select({ name: users.name, id: users.id }) + .where(eq(users.active, true)) + .orderBy("name") + .getQueryString(); + + expect(queryString).toContain("$select"); + expect(queryString).toContain("$filter"); + expect(queryString).toContain("$orderby"); + }); + + it("should combine multiple query parameters", () => { + const queryString = db + .from(users) + .list() + .select({ name: users.name, id: users.id }) + .where(eq(users.active, true)) + .orderBy("name") + .top(10) + .skip(0) + .getQueryString(); + + expect(queryString).toContain("$select"); + expect(queryString).toContain("$filter"); + expect(queryString).toContain("$orderby"); + expect(queryString).toContain("$top"); + expect(queryString).toContain("$skip"); + }); + + it("should combine $select, $filter, $orderby, $top, and $expand", () => { + const queryString = db + .from(users) + .list() + .select({ name: users.name, id: users.id }) + .where(eq(users.active, true)) + .orderBy("name") + .top(25) + .expand(contacts) + .getQueryString(); + + expect(queryString).toContain("$select"); + expect(queryString).toContain("$filter"); + expect(queryString).toContain("$orderby"); + expect(queryString).toContain("$top"); + expect(queryString).toContain("$expand"); + }); + + it("should generate query string with single() and filter", () => { + const queryString = db + .from(users) + .list() + .where(eq(users.id, "123")) + .single() + .getQueryString(); + + expect(queryString).toContain("$filter"); + expect(queryString).toContain("id"); + }); + + it("should use & to separate multiple parameters", () => { + const queryString = db + .from(users) + .list() + .select({ name: users.name }) + .where(eq(users.active, true)) + .top(10) + .getQueryString(); + + // Should have & between parameters + const matches = queryString.match(/&/g); + expect(matches?.length).toBeGreaterThan(0); + }); + + it("should URL encode special characters in filter values", () => { + const queryString = db + .from(contacts) + .list() + .where(eq(contacts.name, "John & Jane")) + .getQueryString(); + + expect(queryString).toContain("$filter"); + // Special characters should be properly encoded + expect(queryString).toBeDefined(); + }); + + it("should use entity IDs when enabled", () => { + const queryString = db + .from(usersTOWithIds) + .list() + .where(eq(usersTOWithIds.id, "123")) + .getQueryString(); + + expect(queryString).toContain("$filter"); + expect(queryString).toContain("FMFID"); + + const dbWithIds = createMockClient().database("fmdapi_test.fmp12", { + useEntityIds: true, + }); + + const queryStringWithIds = dbWithIds + .from(usersTOWithIds) + .list() + .where(eq(usersTOWithIds.id, "123")) + .getQueryString(); + + expect(queryStringWithIds).toContain("$filter"); + expect(queryStringWithIds).toContain("FMFID"); + }); + + // it("should not allow filter on the wrong table", ()=>{}) + + it("should use the write validator for all operations", () => { + const testTable = fmTableOccurrence( + "test", + { + text: textField().primaryKey(), + textNumber: textField().writeValidator(z.number().transform(toString)), + enum: textField().writeValidator(z.enum(["a", "b", "c"])), + transform: textField().writeValidator( + z.string().transform(() => "static-value"), + ), + }, + { useEntityIds: false }, + ); + + // ------------------ Test eq (equal) operator ------------------ + // @ts-expect-error - should not allow number + eq(testTable.text, 1); // text field + + // @ts-expect-error - should not allow string + eq(testTable.textNumber, "1"); // text field + eq(testTable.textNumber, 1); // number field + + eq(testTable.enum, "a"); // enum field + // @ts-expect-error - should not allow invalid enum value + eq(testTable.enum, "d"); + + // ------------------ Test ne (not equal) operator ------------------ + // @ts-expect-error - should not allow number + ne(testTable.text, 1); + // @ts-expect-error - should not allow string + ne(testTable.textNumber, "1"); + ne(testTable.textNumber, 1); + ne(testTable.enum, "a"); + // @ts-expect-error - should not allow invalid enum value + ne(testTable.enum, "d"); + + // ------------------ Test gt (greater than) operator ------------------ + // @ts-expect-error - should not allow number + gt(testTable.text, 1); + // @ts-expect-error - should not allow string + gt(testTable.textNumber, "1"); + gt(testTable.textNumber, 1); + gt(testTable.enum, "a"); + // @ts-expect-error - should not allow invalid enum value + gt(testTable.enum, "d"); + + // ------------------ Test gte (greater than or equal) operator ------------------ + // @ts-expect-error - should not allow number + gte(testTable.text, 1); + // @ts-expect-error - should not allow string + gte(testTable.textNumber, "1"); + gte(testTable.textNumber, 1); + gte(testTable.enum, "a"); + // @ts-expect-error - should not allow invalid enum value + gte(testTable.enum, "d"); + + // ------------------ Test lt (less than) operator ------------------ + // @ts-expect-error - should not allow number + lt(testTable.text, 1); + // @ts-expect-error - should not allow string + lt(testTable.textNumber, "1"); + lt(testTable.textNumber, 1); + lt(testTable.enum, "a"); + // @ts-expect-error - should not allow invalid enum value + lt(testTable.enum, "d"); + + // ------------------ Test lte (less than or equal) operator ------------------ + // @ts-expect-error - should not allow number + lte(testTable.text, 1); + // @ts-expect-error - should not allow string + lte(testTable.textNumber, "1"); + lte(testTable.textNumber, 1); + lte(testTable.enum, "a"); + // @ts-expect-error - should not allow invalid enum value + lte(testTable.enum, "d"); + + // ------------------ Test contains operator ------------------ + // @ts-expect-error - should not allow number + contains(testTable.text, 1); + // @ts-expect-error - should not allow string + contains(testTable.textNumber, "1"); + contains(testTable.textNumber, 1); + contains(testTable.enum, "a"); + // @ts-expect-error - should not allow invalid enum value + contains(testTable.enum, "d"); + + // ------------------ Test startsWith operator ------------------ + // @ts-expect-error - should not allow number + startsWith(testTable.text, 1); + // @ts-expect-error - should not allow string + startsWith(testTable.textNumber, "1"); + startsWith(testTable.textNumber, 1); + startsWith(testTable.enum, "a"); + // @ts-expect-error - should not allow invalid enum value + startsWith(testTable.enum, "d"); + + // ------------------ Test endsWith operator ------------------ + // @ts-expect-error - should not allow number + endsWith(testTable.text, 1); + // @ts-expect-error - should not allow string + endsWith(testTable.textNumber, "1"); + endsWith(testTable.textNumber, 1); + endsWith(testTable.enum, "a"); + // @ts-expect-error - should not allow invalid enum value + endsWith(testTable.enum, "d"); + + // ------------------ Test inArray operator ------------------ + // @ts-expect-error - should not allow number array + inArray(testTable.text, [1, 2]); + // @ts-expect-error - should not allow string array + inArray(testTable.textNumber, ["1", "2"]); + inArray(testTable.textNumber, [1, 2]); + inArray(testTable.enum, ["a", "b"]); + // @ts-expect-error - should not allow invalid enum values + inArray(testTable.enum, ["d", "e"]); + + // ------------------ Test notInArray operator ------------------ + // @ts-expect-error - should not allow number array + notInArray(testTable.text, [1, 2]); + // @ts-expect-error - should not allow string array + notInArray(testTable.textNumber, ["1", "2"]); + notInArray(testTable.textNumber, [1, 2]); + notInArray(testTable.enum, ["a", "b"]); + // @ts-expect-error - should not allow invalid enum values + notInArray(testTable.enum, ["d", "e"]); + + // Test that write validators are used for all operators + const queryStringEq = db + .from(testTable) + .list() + .where(eq(testTable.transform, "anything")) + .getQueryString(); + expect(queryStringEq).toContain("$filter"); + expect(queryStringEq).toContain("static-value"); + + const queryStringNe = db + .from(testTable) + .list() + .where(ne(testTable.transform, "anything")) + .getQueryString(); + expect(queryStringNe).toContain("$filter"); + expect(queryStringNe).toContain("static-value"); + + const queryStringGt = db + .from(testTable) + .list() + .where(gt(testTable.transform, "anything")) + .getQueryString(); + expect(queryStringGt).toContain("$filter"); + expect(queryStringGt).toContain("static-value"); + + const queryStringGte = db + .from(testTable) + .list() + .where(gte(testTable.transform, "anything")) + .getQueryString(); + expect(queryStringGte).toContain("$filter"); + expect(queryStringGte).toContain("static-value"); + + const queryStringLt = db + .from(testTable) + .list() + .where(lt(testTable.transform, "anything")) + .getQueryString(); + expect(queryStringLt).toContain("$filter"); + expect(queryStringLt).toContain("static-value"); + + const queryStringLte = db + .from(testTable) + .list() + .where(lte(testTable.transform, "anything")) + .getQueryString(); + expect(queryStringLte).toContain("$filter"); + expect(queryStringLte).toContain("static-value"); + + const queryStringContains = db + .from(testTable) + .list() + .where(contains(testTable.transform, "anything")) + .getQueryString(); + expect(queryStringContains).toContain("$filter"); + expect(queryStringContains).toContain("static-value"); + + const queryStringStartsWith = db + .from(testTable) + .list() + .where(startsWith(testTable.transform, "anything")) + .getQueryString(); + expect(queryStringStartsWith).toContain("$filter"); + expect(queryStringStartsWith).toContain("static-value"); + + const queryStringEndsWith = db + .from(testTable) + .list() + .where(endsWith(testTable.transform, "anything")) + .getQueryString(); + expect(queryStringEndsWith).toContain("$filter"); + expect(queryStringEndsWith).toContain("static-value"); + + const queryStringInArray = db + .from(testTable) + .list() + .where(inArray(testTable.transform, ["anything"])) + .getQueryString(); + expect(queryStringInArray).toContain("$filter"); + expect(queryStringInArray).toContain("static-value"); + + const queryStringNotInArray = db + .from(testTable) + .list() + .where(notInArray(testTable.transform, ["anything"])) + .getQueryString(); + expect(queryStringNotInArray).toContain("$filter"); + expect(queryStringNotInArray).toContain("static-value"); + }); +}); diff --git a/packages/fmodata/tests/fixtures/metadata.xml b/packages/fmodata/tests/fixtures/metadata.xml new file mode 100644 index 00000000..e01fee57 --- /dev/null +++ b/packages/fmodata/tests/fixtures/metadata.xml @@ -0,0 +1,19965 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Capabilities.V1.ConformanceLevelType/Intermediate + + + + + application/json;metadata=full;IEEE754Compatible=false;streaming=true + + application/json;metadata=minimal;IEEE754Compatible=false;streaming=true + + application/json;metadata=none;IEEE754Compatible=false;streaming=true + + + + + + + + contains + endswith + startswith + length + indexof + substring + tolower + toupper + trim + concat + year + month + day + hour + minute + second + fractionalseconds + date + time + totaloffsetminutes + totalseconds + now + mindatetime + maxdatetime + round + floor + ceiling + cast + + + + + + + Org.OData.Aggregation.V1.Aggregate + Org.OData.Aggregation.V1.GroupBy + + + + + + + \ No newline at end of file diff --git a/packages/fmodata/tests/fixtures/responses.ts b/packages/fmodata/tests/fixtures/responses.ts new file mode 100644 index 00000000..9b9aa9b5 --- /dev/null +++ b/packages/fmodata/tests/fixtures/responses.ts @@ -0,0 +1,669 @@ +/** + * Mock Response Fixtures + * + * This file contains captured responses from real FileMaker OData API calls. + * These responses are used by the mock fetch implementation to replay API responses + * in tests without requiring a live server connection. + * + * Format: + * - Each response is keyed by a descriptive query name + * - Each response object contains: + * - url: The full request URL (for matching) + * - method: HTTP method (typically "GET") + * - status: Response status code + * - response: The actual response data (JSON-parsed) + * + * To add new mock responses: + * 1. Add a query definition to scripts/capture-responses.ts + * 2. Run: pnpm capture + * 3. The captured response will be added to this file automatically + * + * You can manually edit responses here if you need to modify test data. + */ + +export type MockResponse = { + url: string; + method: string; + status: number; + headers?: { + "content-type"?: string; + location?: string; + }; + response: any; +}; + +export type MockResponses = Record; + +/** + * Captured mock responses from FileMaker OData API + * + * These responses are used in tests by passing them to createMockFetch() at the + * per-execution level. Each test explicitly declares which response it expects. + */ +export const mockResponses = { + "list-basic": { + url: "https://api.example.com/otto/fmi/odata/v4/fmdapi_test.fmp12/contacts$top=10", + method: "GET", + status: 400, + headers: { + "content-type": "application/json;charset=utf-8", + }, + response: { + error: { + code: "-1002", + message: "Error: syntax error in URL at: '$top'", + }, + }, + }, + + "list-with-select": { + url: "https://api.example.com/otto/fmi/odata/v4/fmdapi_test.fmp12/contacts?$select=name,PrimaryKey&$top=10", + method: "GET", + status: 200, + headers: { + "content-type": "application/json;charset=utf-8", + }, + response: { + "@context": + 'https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/$metadata#contacts("name","PrimaryKey")', + value: [ + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')", + name: "Eric", + PrimaryKey: "B5BFBC89-03E0-47FC-ABB6-D51401730227", + }, + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('D61B338B-B06E-4985-ABFD-CB3B2EF4F4C4')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('D61B338B-B06E-4985-ABFD-CB3B2EF4F4C4')", + name: "Adam", + PrimaryKey: "D61B338B-B06E-4985-ABFD-CB3B2EF4F4C4", + }, + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('8EE70436-18A8-4FF5-96F0-4DCE721496B2')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('8EE70436-18A8-4FF5-96F0-4DCE721496B2')", + name: "Ben", + PrimaryKey: "8EE70436-18A8-4FF5-96F0-4DCE721496B2", + }, + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('A16D1D68-6A97-44C9-95FD-70A3206E6B69')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('A16D1D68-6A97-44C9-95FD-70A3206E6B69')", + name: "Carter", + PrimaryKey: "A16D1D68-6A97-44C9-95FD-70A3206E6B69", + }, + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('35B60054-E7FC-423A-92BD-3FFE5E48C42D')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('35B60054-E7FC-423A-92BD-3FFE5E48C42D')", + name: "Vance", + PrimaryKey: "35B60054-E7FC-423A-92BD-3FFE5E48C42D", + }, + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('4244DDF7-59E1-4C21-9795-CF0603F4B87F')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('4244DDF7-59E1-4C21-9795-CF0603F4B87F')", + name: "test2", + PrimaryKey: "4244DDF7-59E1-4C21-9795-CF0603F4B87F", + }, + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('7E0E56EA-DC0C-4C96-89B1-600188F3AC63')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('7E0E56EA-DC0C-4C96-89B1-600188F3AC63')", + name: "Test User 1762703536689", + PrimaryKey: "7E0E56EA-DC0C-4C96-89B1-600188F3AC63", + }, + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('AD888459-A733-4839-AAB4-3BAEA0CC9BDA')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('AD888459-A733-4839-AAB4-3BAEA0CC9BDA')", + name: "Update Test 1762703536876 Updated", + PrimaryKey: "AD888459-A733-4839-AAB4-3BAEA0CC9BDA", + }, + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('3AAAF90A-70D8-42FF-910E-AFF5C65FE49B')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('3AAAF90A-70D8-42FF-910E-AFF5C65FE49B')", + name: "Bulk Update 1762703537073 - 1", + PrimaryKey: "3AAAF90A-70D8-42FF-910E-AFF5C65FE49B", + }, + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('AA35F00A-57F7-46FD-8CAA-C879032E551E')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('AA35F00A-57F7-46FD-8CAA-C879032E551E')", + name: "Bulk Update 1762703537073 - 2", + PrimaryKey: "AA35F00A-57F7-46FD-8CAA-C879032E551E", + }, + ], + }, + }, + + "list-with-orderby": { + url: "https://api.example.com/otto/fmi/odata/v4/fmdapi_test.fmp12/contacts?$orderby=name&$top=5", + method: "GET", + status: 200, + headers: { + "content-type": "application/json;charset=utf-8", + }, + response: { + "@context": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/$metadata#contacts", + value: [ + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('00000000-0000-0000-0000-000000000000')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('00000000-0000-0000-0000-000000000000')", + PrimaryKey: "00000000-0000-0000-0000-000000000000", + CreationTimestamp: "2025-12-05T16:36:53Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-12-05T16:36:53Z", + ModifiedBy: "admin", + name: null, + hobby: "Should fail", + id_user: null, + my_calc: "you betcha", + }, + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('D61B338B-B06E-4985-ABFD-CB3B2EF4F4C4')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('D61B338B-B06E-4985-ABFD-CB3B2EF4F4C4')", + PrimaryKey: "D61B338B-B06E-4985-ABFD-CB3B2EF4F4C4", + CreationTimestamp: "2025-10-31T11:13:13Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-10-31T15:56:07Z", + ModifiedBy: "admin", + name: "Adam", + hobby: "trees", + id_user: "53D36C9A-8F90-4C21-A38F-F278D4F77718", + my_calc: "you betcha", + }, + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('1FE5EFB1-E42D-4AC1-94BF-9AA6AD11F9CE')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('1FE5EFB1-E42D-4AC1-94BF-9AA6AD11F9CE')", + PrimaryKey: "1FE5EFB1-E42D-4AC1-94BF-9AA6AD11F9CE", + CreationTimestamp: "2025-12-05T16:35:10Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-12-05T16:35:10Z", + ModifiedBy: "admin", + name: "After Delete Fail - 1764974109900", + hobby: "Should this succeed?", + id_user: null, + my_calc: "you betcha", + }, + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('D17802D1-7A37-494E-BE57-408129E0B251')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('D17802D1-7A37-494E-BE57-408129E0B251')", + PrimaryKey: "D17802D1-7A37-494E-BE57-408129E0B251", + CreationTimestamp: "2025-12-05T16:36:21Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-12-05T16:36:21Z", + ModifiedBy: "admin", + name: "After Delete Fail - 1764974181090", + hobby: "Should this succeed?", + id_user: null, + my_calc: "you betcha", + }, + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('593F9FCC-D71C-42A9-B9DF-AAF1B36C7D84')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('593F9FCC-D71C-42A9-B9DF-AAF1B36C7D84')", + PrimaryKey: "593F9FCC-D71C-42A9-B9DF-AAF1B36C7D84", + CreationTimestamp: "2025-12-05T16:36:53Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-12-05T16:36:53Z", + ModifiedBy: "admin", + name: "After Delete Fail - 1764974213190", + hobby: "Should this succeed?", + id_user: null, + my_calc: "you betcha", + }, + ], + }, + }, + + "list-with-pagination": { + url: "https://api.example.com/otto/fmi/odata/v4/fmdapi_test.fmp12/contacts?$top=2&$skip=2", + method: "GET", + status: 200, + headers: { + "content-type": "application/json;charset=utf-8", + }, + response: { + "@context": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/$metadata#contacts", + value: [ + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('8EE70436-18A8-4FF5-96F0-4DCE721496B2')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('8EE70436-18A8-4FF5-96F0-4DCE721496B2')", + PrimaryKey: "8EE70436-18A8-4FF5-96F0-4DCE721496B2", + CreationTimestamp: "2025-10-31T11:13:16Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-10-31T15:56:24Z", + ModifiedBy: "admin", + name: "Ben", + hobby: "zoo", + id_user: "D1B49B69-DE29-49BC-9BE8-35E0A47D843F", + my_calc: "you betcha", + }, + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('A16D1D68-6A97-44C9-95FD-70A3206E6B69')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('A16D1D68-6A97-44C9-95FD-70A3206E6B69')", + PrimaryKey: "A16D1D68-6A97-44C9-95FD-70A3206E6B69", + CreationTimestamp: "2025-10-31T11:13:23Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-10-31T11:27:32Z", + ModifiedBy: "admin", + name: "Carter", + hobby: "Cooking", + id_user: null, + my_calc: "you betcha", + }, + ], + }, + }, + + "insert-return-minimal": { + url: "https://api.example.com/otto/fmi/odata/v4/fmdapi_test.fmp12/contacts", + method: "GET", + status: 204, + headers: { + "content-type": "application/json;charset=utf-8", + location: + "https://acme-dev.ottomatic.cloud/fmi/odata/v4/fmdapi_test.fmp12/contacts(ROWID=11073)", + }, + response: null, + }, + + insert: { + url: "https://api.example.com/otto/fmi/odata/v4/fmdapi_test.fmp12/contacts", + method: "GET", + status: 201, + headers: { + "content-type": "application/json;charset=utf-8", + location: + "https://acme-dev.ottomatic.cloud/fmi/odata/v4/fmdapi_test.fmp12/contacts('F88124B8-53D1-482D-9EF9-08BA79702DA5')", + }, + response: { + "@context": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/$metadata#contacts/$entity", + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('F88124B8-53D1-482D-9EF9-08BA79702DA5')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('F88124B8-53D1-482D-9EF9-08BA79702DA5')", + PrimaryKey: "F88124B8-53D1-482D-9EF9-08BA79702DA5", + CreationTimestamp: "2025-12-15T11:32:53Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-12-15T11:32:53Z", + ModifiedBy: "admin", + name: "Capture test", + hobby: null, + id_user: null, + my_calc: "you betcha", + }, + }, + + "single-record": { + url: "https://api.example.com/otto/fmi/odata/v4/fmdapi_test.fmp12/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')", + method: "GET", + status: 200, + headers: { + "content-type": "application/json;charset=utf-8", + }, + response: { + "@context": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/$metadata#contacts/$entity", + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')", + PrimaryKey: "B5BFBC89-03E0-47FC-ABB6-D51401730227", + CreationTimestamp: "2025-10-31T10:03:27Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-10-31T15:55:53Z", + ModifiedBy: "admin", + name: "Eric", + hobby: "Board games", + id_user: "1A269FA3-82E6-465A-94FA-39EE3F2F9B5D", + my_calc: "you betcha", + }, + }, + + "error-invalid-field-select": { + url: "https://api.example.com/otto/fmi/odata/v4/fmdapi_test.fmp12/contacts?$select=InvalidFieldName", + method: "GET", + status: 400, + headers: { + "content-type": "application/json;charset=utf-8", + }, + response: { + error: { + code: "8309", + message: + "The field named 'InvalidFieldName' does not exist in a specified table (9)", + }, + }, + }, + + "error-invalid-field-orderby": { + url: "https://api.example.com/otto/fmi/odata/v4/fmdapi_test.fmp12/contacts?$orderby=InvalidFieldName", + method: "GET", + status: 400, + headers: { + "content-type": "application/json;charset=utf-8", + }, + response: { + error: { + code: "8309", + message: + "The field named 'InvalidFieldName' does not exist in a specified table (9)", + }, + }, + }, + + "error-invalid-record-id": { + url: "https://api.example.com/otto/fmi/odata/v4/fmdapi_test.fmp12/contacts('00000000-0000-0000-0000-000000000000')", + method: "GET", + status: 200, + headers: { + "content-type": "application/json;charset=utf-8", + }, + response: { + "@context": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/$metadata#contacts/$entity", + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('00000000-0000-0000-0000-000000000000')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('00000000-0000-0000-0000-000000000000')", + PrimaryKey: "00000000-0000-0000-0000-000000000000", + CreationTimestamp: "2025-12-05T16:36:53Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-12-05T16:36:53Z", + ModifiedBy: "admin", + name: null, + hobby: "Should fail", + id_user: null, + my_calc: "you betcha", + }, + }, + + "single-field": { + url: "https://api.example.com/otto/fmi/odata/v4/fmdapi_test.fmp12/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')/name", + method: "GET", + status: 200, + headers: { + "content-type": "application/json;charset=utf-8", + }, + response: { + "@context": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/$metadata#contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')/name", + value: "Eric", + }, + }, + + "simple-navigation": { + url: "https://api.example.com/otto/fmi/odata/v4/fmdapi_test.fmp12/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')/users", + method: "GET", + status: 200, + headers: { + "content-type": "application/json;charset=utf-8", + }, + response: { + "@context": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/$metadata#users", + value: [ + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/users('1A269FA3-82E6-465A-94FA-39EE3F2F9B5D')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/users('1A269FA3-82E6-465A-94FA-39EE3F2F9B5D')", + id: "1A269FA3-82E6-465A-94FA-39EE3F2F9B5D", + CreationTimestamp: "2025-08-03T11:38:20Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-11-03T12:34:42Z", + ModifiedBy: "admin", + name: "Test User", + id_customer: "3026B56E-0C6E-4F31-B666-EE8AC5B36542", + }, + ], + }, + }, + + "list with invalid expand": { + url: "https://api.example.com/otto/fmi/odata/v4/fmdapi_test.fmp12/contacts?$expand=users($select=not_real_field)", + method: "GET", + status: 200, + headers: { + "content-type": "application/json;charset=utf-8", + }, + response: { + "@context": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/$metadata#contacts", + value: [ + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')", + PrimaryKey: "B5BFBC89-03E0-47FC-ABB6-D51401730227", + CreationTimestamp: "2025-10-31T10:03:27Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-10-31T15:55:53Z", + ModifiedBy: "admin", + name: "Eric", + hobby: "Board games", + id_user: "1A269FA3-82E6-465A-94FA-39EE3F2F9B5D", + my_calc: "you betcha", + error: [ + { + error: { + code: "8309", + message: + 'FQL0009/(1:20): The column named "not_real_field" does not exist in table "users".', + }, + }, + ], + }, + ], + }, + }, + + "get with expand": { + url: "https://api.example.com/otto/fmi/odata/v4/fmdapi_test.fmp12/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')?$expand=users", + method: "GET", + status: 200, + headers: { + "content-type": "application/json;charset=utf-8", + }, + response: { + "@context": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/$metadata#contacts/$entity", + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')", + PrimaryKey: "B5BFBC89-03E0-47FC-ABB6-D51401730227", + CreationTimestamp: "2025-10-31T10:03:27Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-10-31T15:55:53Z", + ModifiedBy: "admin", + name: "Eric", + hobby: "Board games", + id_user: "1A269FA3-82E6-465A-94FA-39EE3F2F9B5D", + my_calc: "you betcha", + users: [ + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/users('1A269FA3-82E6-465A-94FA-39EE3F2F9B5D')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/users('1A269FA3-82E6-465A-94FA-39EE3F2F9B5D')", + id: "1A269FA3-82E6-465A-94FA-39EE3F2F9B5D", + CreationTimestamp: "2025-08-03T11:38:20Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-11-03T12:34:42Z", + ModifiedBy: "admin", + name: "Test User", + id_customer: "3026B56E-0C6E-4F31-B666-EE8AC5B36542", + }, + ], + }, + }, + + "deep nested expand": { + url: "https://api.example.com/otto/fmi/odata/v4/fmdapi_test.fmp12/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')?$expand=users($expand=user_customer($select=name))", + method: "GET", + status: 200, + headers: { + "content-type": "application/json;charset=utf-8", + }, + response: { + "@context": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/$metadata#contacts/$entity", + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')", + PrimaryKey: "B5BFBC89-03E0-47FC-ABB6-D51401730227", + CreationTimestamp: "2025-10-31T10:03:27Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-10-31T15:55:53Z", + ModifiedBy: "admin", + name: "Eric", + hobby: "Board games", + id_user: "1A269FA3-82E6-465A-94FA-39EE3F2F9B5D", + my_calc: "you betcha", + users: [ + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/users('1A269FA3-82E6-465A-94FA-39EE3F2F9B5D')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/users('1A269FA3-82E6-465A-94FA-39EE3F2F9B5D')", + id: "1A269FA3-82E6-465A-94FA-39EE3F2F9B5D", + CreationTimestamp: "2025-08-03T11:38:20Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-11-03T12:34:42Z", + ModifiedBy: "admin", + name: "Test User", + id_customer: "3026B56E-0C6E-4F31-B666-EE8AC5B36542", + user_customer: [ + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/user_customer('3026B56E-0C6E-4F31-B666-EE8AC5B36542')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/user_customer('3026B56E-0C6E-4F31-B666-EE8AC5B36542')", + name: "test", + }, + ], + }, + ], + }, + }, + + "list with nested expand": { + url: "https://api.example.com/otto/fmi/odata/v4/fmdapi_test.fmp12/contacts?$top=2&$expand=users($expand=user_customer($select=name))", + method: "GET", + status: 200, + headers: { + "content-type": "application/json;charset=utf-8", + }, + response: { + "@context": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/$metadata#contacts", + value: [ + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')", + PrimaryKey: "B5BFBC89-03E0-47FC-ABB6-D51401730227", + CreationTimestamp: "2025-10-31T10:03:27Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-10-31T15:55:53Z", + ModifiedBy: "admin", + name: "Eric", + hobby: "Board games", + id_user: "1A269FA3-82E6-465A-94FA-39EE3F2F9B5D", + my_calc: "you betcha", + users: [ + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/users('1A269FA3-82E6-465A-94FA-39EE3F2F9B5D')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/users('1A269FA3-82E6-465A-94FA-39EE3F2F9B5D')", + id: "1A269FA3-82E6-465A-94FA-39EE3F2F9B5D", + CreationTimestamp: "2025-08-03T11:38:20Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-11-03T12:34:42Z", + ModifiedBy: "admin", + name: "Test User", + id_customer: "3026B56E-0C6E-4F31-B666-EE8AC5B36542", + user_customer: [ + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/user_customer('3026B56E-0C6E-4F31-B666-EE8AC5B36542')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/user_customer('3026B56E-0C6E-4F31-B666-EE8AC5B36542')", + name: "test", + }, + ], + }, + ], + }, + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('D61B338B-B06E-4985-ABFD-CB3B2EF4F4C4')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('D61B338B-B06E-4985-ABFD-CB3B2EF4F4C4')", + PrimaryKey: "D61B338B-B06E-4985-ABFD-CB3B2EF4F4C4", + CreationTimestamp: "2025-10-31T11:13:13Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-10-31T15:56:07Z", + ModifiedBy: "admin", + name: "Adam", + hobby: "trees", + id_user: "53D36C9A-8F90-4C21-A38F-F278D4F77718", + my_calc: "you betcha", + users: [ + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/users('53D36C9A-8F90-4C21-A38F-F278D4F77718')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/users('53D36C9A-8F90-4C21-A38F-F278D4F77718')", + id: "53D36C9A-8F90-4C21-A38F-F278D4F77718", + CreationTimestamp: "2025-10-31T15:55:56Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-10-31T15:56:03Z", + ModifiedBy: "admin", + name: "adam user", + id_customer: null, + user_customer: [], + }, + ], + }, + ], + }, + }, +} satisfies MockResponses; diff --git a/packages/fmodata/tests/fixtures/sample-metadata.json b/packages/fmodata/tests/fixtures/sample-metadata.json new file mode 100644 index 00000000..07fa27c7 --- /dev/null +++ b/packages/fmodata/tests/fixtures/sample-metadata.json @@ -0,0 +1,217 @@ +{ + "$Version": "4.01", + "@ServerVersion": "OData Engine 22.0.2", + "WebData.fmp12": { + "@Org.OData.Core.V1.SchemaVersion": "56947de023c1618", + "Addresses_": { + "$Kind": "EntityType", + "@TableID": "FMTID:1065109", + "$Key": ["ADDRESS address"], + "ADDRESS code": { + "@FieldID": "FMFID:4296032405", + "$Nullable": true, + "@AutoGenerated": true, + "@Index": true, + "$Type": "Edm.Decimal" + }, + "ADDRESS name": { + "@FieldID": "FMFID:12885966997", + "$Nullable": true, + "$Type": "Edm.String" + }, + "ADDRESS address": { + "@FieldID": "FMFID:17180934293", + "$Nullable": true, + "$Type": "Edm.String" + }, + "ADDRESS city": { + "@FieldID": "FMFID:25770868885", + "$Nullable": true, + "@Index": true, + "@Calculation": true, + "@Org.OData.Core.V1.Permissions": "Org.OData.Core.V1.Permission@Read", + "$Type": "Edm.String" + }, + "ADDRESS state": { + "@FieldID": "FMFID:30065836181", + "$Nullable": true, + "$DefaultValue": "TX", + "@Index": true, + "$Type": "Edm.String" + }, + "ADDRESS zip": { + "@FieldID": "FMFID:34360803477", + "$Nullable": true, + "@Index": true, + "$Type": "Edm.String" + }, + "full_address": { + "@FieldID": "FMFID:38655770773", + "$Nullable": true, + "@Calculation": true, + "@Org.OData.Core.V1.Permissions": "Org.OData.Core.V1.Permission@Read", + "$Type": "Edm.String" + }, + "search_address": { + "@FieldID": "FMFID:51540672661", + "$Nullable": true, + "@Global": true, + "@Org.OData.Core.V1.Permissions": "Org.OData.Core.V1.Permission@Read", + "$Type": "Edm.String" + }, + "created_date": { + "@FieldID": "FMFID:120260149397", + "$Nullable": true, + "@Index": true, + "$Type": "Edm.Date" + }, + "modified_date": { + "@FieldID": "FMFID:124555116693", + "$Type": "Edm.Date" + }, + "Tickets": { + "$Kind": "NavigationProperty", + "$Collection": true, + "$Type": "WebData.fmp12.Tickets_" + } + }, + "Builder_Contacts_": { + "$Kind": "EntityType", + "@TableID": "FMTID:1065107", + "$Key": ["__pk_builder_contacts_id"], + "__pk_builder_contacts_id": { + "@FieldID": "FMFID:4296032403", + "$Type": "Edm.String" + }, + "CreationTimestamp": { + "@FieldID": "FMFID:8590999699", + "$DefaultValue": "CURRENT_TIMESTAMP", + "$Type": "Edm.DateTimeOffset" + }, + "CreatedBy": { + "@FieldID": "FMFID:12885966995", + "$DefaultValue": "USER", + "$Type": "Edm.String" + }, + "ModificationTimestamp": { + "@FieldID": "FMFID:17180934291", + "$DefaultValue": "CURRENT_TIMESTAMP", + "@VersionID": true, + "$Type": "Edm.DateTimeOffset" + }, + "ModifiedBy": { + "@FieldID": "FMFID:21475901587", + "$DefaultValue": "USER", + "$Type": "Edm.String" + }, + "_fk_builder_id": { + "@FieldID": "FMFID:25770868883", + "$Nullable": true, + "@Index": true, + "$Type": "Edm.String" + }, + "First_name": { + "@FieldID": "FMFID:30065836179", + "$Nullable": true, + "@Index": true, + "$Type": "Edm.String" + }, + "Last_name": { + "@FieldID": "FMFID:34360803475", + "$Nullable": true, + "@Index": true, + "$Type": "Edm.String" + }, + "Email": { + "@FieldID": "FMFID:47245705363", + "$Nullable": true, + "@Index": true, + "$Type": "Edm.String" + }, + "web_portal_access": { + "@FieldID": "FMFID:55835639955", + "$Nullable": true, + "@Index": true, + "$Type": "Edm.String" + }, + "Addresses": { + "$Kind": "NavigationProperty", + "$Type": "WebData.fmp12.Addresses_" + } + }, + "Tickets_": { + "$Kind": "EntityType", + "@TableID": "FMTID:1065110", + "STATIC_1": { + "@FieldID": "FMFID:4296032406", + "$Nullable": true, + "$DefaultValue": "1", + "@Index": true, + "$Type": "Edm.Decimal" + }, + "ticket_id": { + "@FieldID": "FMFID:8590999702", + "$Nullable": true, + "@AutoGenerated": true, + "@Index": true, + "$Type": "Edm.Decimal" + }, + "work_order_id": { + "@FieldID": "FMFID:12885966998", + "$Nullable": true, + "@Index": true, + "$Type": "Edm.String" + }, + "ticket_status": { + "@FieldID": "FMFID:17180934294", + "$Nullable": true, + "@Index": true, + "$Type": "Edm.String" + }, + "description": { + "@FieldID": "FMFID:21475901590", + "$Nullable": true, + "$Type": "Edm.String" + }, + "priority": { + "@FieldID": "FMFID:25770868886", + "$Nullable": true, + "@Index": true, + "$Type": "Edm.Decimal" + }, + "due_date": { + "@FieldID": "FMFID:30065836182", + "$Nullable": true, + "$Type": "Edm.Date" + }, + "photo": { + "@FieldID": "FMFID:34360803478", + "$Nullable": true, + "@ExternalSecurePath": "WebData.fmp12/", + "$Type": "Edm.Binary" + }, + "created_timestamp": { + "@FieldID": "FMFID:38655770774", + "$Nullable": true, + "$DefaultValue": "CURRENT_TIMESTAMP", + "$Type": "Edm.DateTimeOffset" + }, + "Address": { + "$Kind": "NavigationProperty", + "$Type": "WebData.fmp12.Addresses_" + } + }, + "Addresses": { + "$Kind": "EntitySet", + "$Type": "WebData.fmp12.Addresses_" + }, + "Builder_Contacts": { + "$Kind": "EntitySet", + "$Type": "WebData.fmp12.Builder_Contacts_" + }, + "Tickets": { + "$Kind": "EntitySet", + "$Type": "WebData.fmp12.Tickets_" + } + } +} diff --git a/packages/fmodata/tests/fixtures/sample-occurances.ts b/packages/fmodata/tests/fixtures/sample-occurances.ts new file mode 100644 index 00000000..b556b595 --- /dev/null +++ b/packages/fmodata/tests/fixtures/sample-occurances.ts @@ -0,0 +1,79 @@ +import { + fmTableOccurrence, + textField, + numberField, + dateField, + containerField, + timestampField, +} from "@proofkit/fmodata"; +import { z } from "zod/v4"; + +// ============================================================================ +// Define all TableOccurrences with navigationPaths +// ============================================================================ + +export const Addresses = fmTableOccurrence( + "Addresses", + { + "ADDRESS code": numberField().entityId("FMFID:4296032405"), + "ADDRESS name": textField().entityId("FMFID:12885966997"), + "ADDRESS address": textField().primaryKey().entityId("FMFID:17180934293"), // Key field - never null + "ADDRESS city": textField().readOnly().entityId("FMFID:25770868885"), + "ADDRESS state": textField().entityId("FMFID:30065836181"), + "ADDRESS zip": textField().entityId("FMFID:34360803477"), + full_address: textField().readOnly().entityId("FMFID:38655770773"), + search_address: textField().readOnly().entityId("FMFID:51540672661"), + created_date: dateField().entityId("FMFID:120260149397"), // Edm.Date + modified_date: dateField().notNull().entityId("FMFID:124555116693"), // Not marked as nullable in metadata + }, + { + entityId: "FMTID:1065109", + navigationPaths: ["Tickets"], + }, +); + +export const Builder_Contacts = fmTableOccurrence( + "Builder_Contacts", + { + __pk_builder_contacts_id: textField() + .primaryKey() + .entityId("FMFID:4296032403"), // Key field - never null + CreationTimestamp: timestampField().notNull().entityId("FMFID:8590999699"), // DateTimeOffset, not nullable + CreatedBy: textField().notNull().entityId("FMFID:12885966995"), // Not nullable + ModificationTimestamp: timestampField() + .notNull() + .entityId("FMFID:17180934291"), // DateTimeOffset, not nullable + ModifiedBy: textField().notNull().entityId("FMFID:21475901587"), // Not nullable + _fk_builder_id: textField().entityId("FMFID:25770868883"), + First_name: textField().entityId("FMFID:30065836179"), + Last_name: textField().entityId("FMFID:34360803475"), + Email: textField().entityId("FMFID:47245705363"), + web_portal_access: textField().entityId("FMFID:55835639955"), + }, + { + entityId: "FMTID:1065107", + navigationPaths: ["Addresses"], + }, +); + +export const Tickets = fmTableOccurrence( + "Tickets", + { + STATIC_1: numberField().entityId("FMFID:4296032406"), + ticket_id: numberField().primaryKey().entityId("FMFID:8590999702"), + work_order_id: textField().entityId("FMFID:12885966998"), + ticket_status: textField().entityId("FMFID:17180934294"), + description: textField().entityId("FMFID:21475901590"), + priority: numberField().entityId("FMFID:25770868886"), + due_date: dateField().entityId("FMFID:30065836182"), // Edm.Date + photo: containerField().entityId("FMFID:34360803478"), // Edm.Binary (base64 string) + created_timestamp: timestampField().entityId("FMFID:38655770774"), // DateTimeOffset + }, + { + entityId: "FMTID:1065110", + navigationPaths: ["Addresses"], + }, +); + +// Export as array for use with database() +export const occurrences = [Addresses, Builder_Contacts, Tickets]; diff --git a/packages/fmodata/tests/fmids-validation.test.ts b/packages/fmodata/tests/fmids-validation.test.ts new file mode 100644 index 00000000..6b87319c --- /dev/null +++ b/packages/fmodata/tests/fmids-validation.test.ts @@ -0,0 +1,234 @@ +/** + * Tests for BaseTable and TableOccurrence with entity IDs + * + * These tests verify: + * 1. Successful instantiation with entity IDs using setup functions + * 2. Entity ID functionality works correctly + * 3. Backward compatibility of regular classes + */ + +import { describe, it, expect } from "vitest"; +import { z } from "zod/v4"; +import { fmTableOccurrence, textField, FMTable } from "@proofkit/fmodata"; +import { createMockClient, users, contacts } from "./utils/test-setup"; + +describe("BaseTable with entity IDs", () => { + it("should create a table with fmfIds using fmTableOccurrence", () => { + const table = fmTableOccurrence("test_table", { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().entityId("FMFID:2"), + email: textField().entityId("FMFID:3"), + }); + + expect(table).toBeInstanceOf(FMTable); + const fmfIds = (table as any)[FMTable.Symbol.BaseTableConfig]?.fmfIds; + expect(fmfIds).toBeDefined(); + expect(fmfIds?.id).toBe("FMFID:1"); + expect(fmfIds?.name).toBe("FMFID:2"); + expect(fmfIds?.email).toBe("FMFID:3"); + expect(fmfIds !== undefined).toBe(true); + }); + + it("should enforce fmfIds format with template literal type", () => { + // This should work + const table = fmTableOccurrence("test_table", { + id: textField().primaryKey().entityId("FMFID:123"), + name: textField().entityId("FMFID:abc"), + }); + + expect(table.id.entityId).toBe("FMFID:123"); + }); + + it("should inherit all table functionality", () => { + const table = fmTableOccurrence("test_table", { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().readOnly().entityId("FMFID:2"), + email: textField().entityId("FMFID:3"), + }); + + expect((table as any)[FMTable.Symbol.Schema]).toBeDefined(); + expect((table as any)[FMTable.Symbol.BaseTableConfig].idField).toBe("id"); + expect((table as any)[FMTable.Symbol.BaseTableConfig].readOnly).toContain( + "name", + ); + }); +}); + +describe("TableOccurrence with entity IDs", () => { + it("should create a table with entityId using fmTableOccurrence", () => { + const tableOcc = fmTableOccurrence( + "test_table", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().entityId("FMFID:2"), + }, + { + entityId: "FMTID:100", + }, + ); + + expect(tableOcc).toBeInstanceOf(FMTable); + expect((tableOcc as any)[FMTable.Symbol.EntityId]).toBe("FMTID:100"); + expect((tableOcc as any)[FMTable.Symbol.Name]).toBe("test_table"); + expect((tableOcc as any)[FMTable.Symbol.EntityId] !== undefined).toBe(true); + }); + + it("should work with fmTableOccurrence helper", () => { + const tableOcc = fmTableOccurrence( + "test_table", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().entityId("FMFID:2"), + }, + { + entityId: "FMTID:100", + }, + ); + + expect((tableOcc as any)[FMTable.Symbol.EntityId]).toBe("FMTID:100"); + expect((tableOcc as any)[FMTable.Symbol.EntityId] !== undefined).toBe(true); + }); + + it("should inherit all table functionality", () => { + const tableOcc = fmTableOccurrence( + "test_table", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().entityId("FMFID:2"), + email: textField().entityId("FMFID:3"), + }, + { + entityId: "FMTID:100", + defaultSelect: "all", + }, + ); + + expect((tableOcc as any)[FMTable.Symbol.DefaultSelect]).toBe("all"); + expect((tableOcc as any)[FMTable.Symbol.NavigationPaths]).toBeDefined(); + }); +}); + +describe("Type enforcement (compile-time)", () => { + it("should allow tables with and without entity IDs", () => { + const regularTableOcc = fmTableOccurrence("test", { + id: textField().primaryKey(), + name: textField(), + }); + + const withIdsTableOcc = fmTableOccurrence( + "test", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().entityId("FMFID:2"), + }, + { + entityId: "FMTID:100", + }, + ); + + expect(regularTableOcc).toBeDefined(); + expect(withIdsTableOcc).toBeDefined(); + expect( + (withIdsTableOcc as any)[FMTable.Symbol.BaseTableConfig].fmfIds, + ).toBeDefined(); + }); + + it("should not allow mixture of occurrences when creating a database", () => { + const regularTableOcc = fmTableOccurrence("regular", { + id: textField().primaryKey(), + name: textField(), + }); + + const withIdsTableOcc = fmTableOccurrence( + "withIds", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().entityId("FMFID:2"), + }, + { + entityId: "FMTID:100", + }, + ); + + // Note: The new ORM pattern doesn't have the same mixing restriction + // Both tables can be used together regardless of entity IDs + expect(() => { + createMockClient().database("test"); + }).not.toThrow(); + + // Should not throw when mixed if useEntityIds is set to false + expect(() => { + createMockClient().database("test", { + useEntityIds: false, + }); + }).not.toThrow(); + + // Note: The new ORM pattern handles entity IDs differently + // This test may need adjustment based on actual behavior + }); + + it("should create table without entity IDs", () => { + const tableOcc = fmTableOccurrence("test", { + id: textField().primaryKey(), + name: textField(), + }); + + expect(tableOcc).toBeInstanceOf(FMTable); + }); +}); + +describe("Navigation type validation", () => { + it("should allow navigation with any table", () => { + // Navigation can use any table - unified classes allow mixing + const relatedTO = fmTableOccurrence( + "related", + { + id: textField().primaryKey().entityId("FMFID:3"), + }, + { + entityId: "FMTID:200", + }, + ); + + const mainTO = fmTableOccurrence( + "main", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().entityId("FMFID:2"), + }, + { + entityId: "FMTID:100", + navigationPaths: ["related"], + }, + ); + + expect(mainTO).toBeDefined(); + expect((relatedTO as any)[FMTable.Symbol.EntityId]).toBe("FMTID:200"); + }); +}); + +describe("Helper functions", () => { + it("should create table with fmTableOccurrence helper", () => { + const to = fmTableOccurrence("test", { + id: textField().primaryKey(), + }); + + expect(to).toBeInstanceOf(FMTable); + expect((to as any)[FMTable.Symbol.Name]).toBe("test"); + }); + + it("should create table with entity IDs using fmTableOccurrence helper", () => { + const to = fmTableOccurrence( + "test", + { + id: textField().primaryKey().entityId("FMFID:1"), + }, + { + entityId: "FMTID:100", + }, + ); + + expect(to).toBeInstanceOf(FMTable); + expect((to as any)[FMTable.Symbol.EntityId]).toBe("FMTID:100"); + }); +}); diff --git a/packages/fmodata/tests/insert.test.ts b/packages/fmodata/tests/insert.test.ts new file mode 100644 index 00000000..dac78d18 --- /dev/null +++ b/packages/fmodata/tests/insert.test.ts @@ -0,0 +1,138 @@ +/** + * Insert and Update Tests + * + * Tests for the insert() and update() methods with returnFullRecord option. + */ + +import { describe, it, expect, expectTypeOf } from "vitest"; +import { createMockFetch } from "./utils/mock-fetch"; +import { mockResponses } from "./fixtures/responses"; +import { createMockClient, contacts, users } from "./utils/test-setup"; + +describe("insert and update operations with returnFullRecord", () => { + const client = createMockClient(); + + it("should insert a record and return the created record with metadata", async () => { + const db = client.database("fmdapi_test.fmp12", {}); + + const result = await db + .from(contacts) + .insert({ + name: "Capture test", + }) + .execute({ + fetchHandler: createMockFetch(mockResponses["insert"]!), + }); + + // Verify no errors + expect(result.error).toBeUndefined(); + expect(result.data).toBeDefined(); + // @ts-expect-error - should not return odata annotations + result.data?.["@editLink"]; + // @ts-expect-error - should not return odata annotations + result.data?.["@id"]; + + expect(result.data).not.toHaveProperty("@editLink"); + expect(result.data).not.toHaveProperty("@id"); + + // Verify the inserted record has expected structure (not specific values that change with captures) + expect(result.data).toHaveProperty("PrimaryKey"); + expect(typeof result.data?.PrimaryKey).toBe("string"); + expect(result.data?.PrimaryKey).toMatch( + /^[0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}$/i, + ); + + // Check fields that should have stable values + expect(result.data).toMatchObject({ + name: "Capture test", + hobby: null, + id_user: null, + my_calc: "you betcha", + }); + }); + + it("should allow returnFullRecord=false to get just ROWID", async () => { + const db = client.database("fmdapi_test.fmp12"); + + const result = await db + .from(contacts) + .insert( + { + name: "Capture test", + }, + // Set returnFullRecord to false to get just the ROWID + { returnFullRecord: false }, + ) + .execute({ + fetchHandler: createMockFetch(mockResponses["insert-return-minimal"]!), + }); + + // Type check: when returnFullRecord is false, result should only have ROWID + expectTypeOf(result.data).toEqualTypeOf<{ ROWID: number } | undefined>(); + + // Type check: when returnFullRecord is true or omitted, result should have full record + const fullResult = await db + .from(contacts) + .insert( + { + name: "anything", + }, + { returnFullRecord: true }, + ) + .execute({ + fetchHandler: createMockFetch(mockResponses["insert"]!), + }); + + expectTypeOf(fullResult.data).not.toEqualTypeOf< + { ROWID: number } | undefined + >(); + + expect(result.error).toBeUndefined(); + expect(result.data).toBeDefined(); + + // when returnFullRecord=false, the library should extract the ROWID from the location header + expect(result.data).toHaveProperty("ROWID"); + expect(typeof result.data?.ROWID).toBe("number"); + expect(result.data?.ROWID).toBeGreaterThan(0); + }); + + it("should allow returnFullRecord=true for update to get full record", async () => { + const db = client.database("fmdapi_test.fmp12"); + + // Test with returnFullRecord=true + const result = await db + .from(contacts) + .update({ name: "Updated name" }, { returnFullRecord: true }) + .byId("331F5862-2ABF-4FB6-AA24-A00F7359BDDA") + .execute({ + fetchHandler: createMockFetch(mockResponses["insert"]!), // Reuse insert mock, same structure + }); + + // Type check: when returnFullRecord is true, result should have full record + expectTypeOf(result.data).not.toEqualTypeOf< + { updatedCount: number } | undefined + >(); + + // Test without returnFullRecord (default - returns count) + const countResult = await db + .from(contacts) + .update({ name: "Updated name" }) + .byId("331F5862-2ABF-4FB6-AA24-A00F7359BDDA") + .execute({ + fetchHandler: createMockFetch(mockResponses["insert"]!), + }); + + // Type check: default should return count + expectTypeOf(countResult.data).toEqualTypeOf< + { updatedCount: number } | undefined + >(); + + expect(result.error).toBeUndefined(); + expect(result.data).toBeDefined(); + + // when returnFullRecord=true, should return the full updated record + expect(result.data).toHaveProperty("PrimaryKey"); + expect(typeof result.data?.PrimaryKey).toBe("string"); + expect(result.data?.name).toBe("Capture test"); // From mock response + }); +}); diff --git a/packages/fmodata/tests/list-methods.test.ts b/packages/fmodata/tests/list-methods.test.ts new file mode 100644 index 00000000..13aec45a --- /dev/null +++ b/packages/fmodata/tests/list-methods.test.ts @@ -0,0 +1,15 @@ +import { describe, it } from "vitest"; +import { createMockClient, users } from "./utils/test-setup"; + +const client = createMockClient(); +const db = client.database("test_db"); + +describe("list methods", () => { + it("should not run query unless you await the method", async () => { + const { data, error } = await db + .from(users) + .list() + .select({ CreatedBy: users.CreatedBy, active: users.active }) + .execute(); + }); +}); diff --git a/packages/fmodata/tests/mock.test.ts b/packages/fmodata/tests/mock.test.ts new file mode 100644 index 00000000..89cc44fd --- /dev/null +++ b/packages/fmodata/tests/mock.test.ts @@ -0,0 +1,245 @@ +/** + * Mock Fetch Tests + * + * These tests use captured responses from real FileMaker OData API calls + * to test the client without requiring a live server connection. + * + * The mock responses are stored in tests/fixtures/responses.ts and are + * captured using the capture script: pnpm capture + * + * To add new tests: + * 1. First, ensure you have a corresponding mock response captured + * 2. Create a test that uses the same query pattern + * 3. The mock fetch will automatically match the request URL to the stored response + */ + +import { describe, it, expect, expectTypeOf } from "vitest"; +import { createMockFetch, simpleMock } from "./utils/mock-fetch"; +import { mockResponses } from "./fixtures/responses"; +import { createMockClient, contacts } from "./utils/test-setup"; +import { eq } from "@proofkit/fmodata"; +import { assert } from "console"; + +describe("Mock Fetch Tests", () => { + const client = createMockClient(); + const db = client.database("fmdapi_test.fmp12"); + + describe("List queries", () => { + it("should execute a basic list query using mocked response", async () => { + const result = await db + .from(contacts) + .list() + .execute({ + fetchHandler: createMockFetch(mockResponses["list-with-pagination"]!), + }); + + expect(result).toBeDefined(); + expect(result.error).toBeUndefined(); + expect(result.data).toBeDefined(); + if (!result.data) throw new Error("Expected result.data to be defined"); + expect(Array.isArray(result.data)).toBe(true); + + const firstRecord = result.data[0]; + expect(firstRecord).not.toHaveProperty("@id"); + expect(firstRecord).not.toHaveProperty("@editLink"); + }); + + it("should return odata annotations if requested", async () => { + const result = await db + .from(contacts) + .list() + .execute({ + fetchHandler: createMockFetch(mockResponses["list-with-pagination"]!), + includeODataAnnotations: true, + }); + + expect(result).toBeDefined(); + expect(result.error).toBeUndefined(); + expect(result.data).toBeDefined(); + if (!result.data) throw new Error("Expected result.data to be defined"); + expect(Array.isArray(result.data)).toBe(true); + + const firstRecord = result.data[0]; + expect(firstRecord).toHaveProperty("@id"); + expect(firstRecord).toHaveProperty("@editLink"); + }); + + it("should execute a list query with $select using mocked response", async () => { + const result = await db + .from(contacts) + .list() + .select({ name: contacts.name, PrimaryKey: contacts.PrimaryKey }) + .execute({ + fetchHandler: createMockFetch(mockResponses["list-with-pagination"]!), + }); + + expect(result).toBeDefined(); + if (result.error) { + console.log(result.error); + } + expect(result.error).toBeUndefined(); + expect(result.data).toBeDefined(); + if (!result.data) throw new Error("Expected result.data to be defined"); + if (result.data.length > 0) { + const firstRecord = result.data[0] as any; + // Verify selected fields are present (if captured response has them) + expect(firstRecord).toBeDefined(); + } + }); + + it("should execute a list query with $top using mocked response", async () => { + const result = await db + .from(contacts) + .list() + .top(5) + .execute({ + fetchHandler: createMockFetch(mockResponses["list-with-orderby"]!), + }); + + expect(result).toBeDefined(); + expect(result.error).toBeUndefined(); + expect(result.data).toBeDefined(); + if (!result.data) throw new Error("Expected result.data to be defined"); + // If the mock response limits results, verify we got limited results + if (result.data.length > 0) { + expect(result.data.length).toBeLessThanOrEqual(5); + } + }); + + it("should execute a list query with $orderby using mocked response", async () => { + const result = await db + .from(contacts) + .list() + .orderBy("name") + .top(5) + .execute({ + fetchHandler: createMockFetch(mockResponses["list-with-orderby"]!), + }); + + expect(result).toBeDefined(); + expect(result.data).toBeDefined(); + expect(result.error).toBeUndefined(); + expect(Array.isArray(result.data)).toBe(true); + }); + + it("should error if more than 1 record is returned in single mode", async () => { + const result = await db + .from(contacts) + .list() + .single() + .execute({ + fetchHandler: createMockFetch(mockResponses["list-with-orderby"]!), + }); + + expect(result).toBeDefined(); + expect(result.data).toBeUndefined(); + expect(result.error).toBeDefined(); + }); + it("should not error if no records are returned in maybeSingle mode", async () => { + const result = await db + .from(contacts) + .list() + .maybeSingle() + .execute({ + fetchHandler: simpleMock({ status: 200, body: { value: [] } }), + }); + + expect(result.data).toBeNull(); + expect(result.error).toBeUndefined(); + + assert(!result.error, "Expected no error"); + expectTypeOf(result.data).toBeNullable(); + }); + it("should error if more than 1 record is returned in maybeSingle mode", async () => { + const result = await db + .from(contacts) + .list() + .maybeSingle() + .execute({ + // TODO: add better mock data + fetchHandler: simpleMock({ status: 200, body: { value: [{}, {}] } }), + }); + + expect(result.data).toBeUndefined(); + expect(result.error).toBeDefined(); + }); + + it("should execute a list query with pagination using mocked response", async () => { + const result = await db + .from(contacts) + .list() + .top(2) + .skip(2) + .execute({ + fetchHandler: createMockFetch(mockResponses["list-with-pagination"]!), + }); + + expect(result).toBeDefined(); + expect(result.data).toBeDefined(); + expect(result.error).toBeUndefined(); + expect(Array.isArray(result.data)).toBe(true); + }); + }); + + describe("Single record queries", () => { + it("should execute a single record query using mocked response", async () => { + const result = await db + .from(contacts) + .get("B5BFBC89-03E0-47FC-ABB6-D51401730227") + .execute({ + fetchHandler: createMockFetch(mockResponses["single-record"]!), + }); + + expect(result).toBeDefined(); + expect(result.data).toBeDefined(); + expect(result.error).toBeUndefined(); + + // Single record queries return the record directly, not wrapped in { value: [...] } + expect(typeof result.data).toBe("object"); + }); + + it("should execute a single field query using mocked response", async () => { + // Note: Type errors for wrong columns are now caught at compile time + // We can't easily test this with @ts-expect-error since we'd need a wrong table's column + + const result = await db + .from(contacts) + .get("B5BFBC89-03E0-47FC-ABB6-D51401730227") + .getSingleField(contacts.name) + .execute({ + fetchHandler: createMockFetch(mockResponses["single-field"]!), + }); + + expect(result).toBeDefined(); + expect(result.data).toBeDefined(); + expect(result.error).toBeUndefined(); + + if (result.data) { + expectTypeOf(result.data).toEqualTypeOf(); + } + + // Single field queries return the field value directly + expect(result.data).toBe("Eric"); + }); + }); + + describe("Query builder methods", () => { + it("should generate correct query strings even with mocks", () => { + const queryString = db + .from(contacts) + .list() + .select({ name: contacts.name, hobby: contacts.hobby }) + .where(eq(contacts.name, "John")) + .orderBy("name") + .top(10) + .getQueryString(); + + expect(queryString).toContain("$select"); + expect(queryString).toContain("name"); + expect(queryString).toContain("hobby"); + expect(queryString).toContain("$filter"); + expect(queryString).toContain("$orderby"); + expect(queryString).toContain("$top"); + }); + }); +}); diff --git a/packages/fmodata/tests/navigate.test.ts b/packages/fmodata/tests/navigate.test.ts new file mode 100644 index 00000000..e4714d8e --- /dev/null +++ b/packages/fmodata/tests/navigate.test.ts @@ -0,0 +1,183 @@ +/** + * Navigation Tests + * + * Tests for the navigate() function on RecordBuilder instances. + * This validates that navigation properties can be accessed from record instances. + */ + +import { describe, it, expect, expectTypeOf } from "vitest"; +import { + createMockClient, + users, + invoices, + contacts, + lineItems, + arbitraryTable, +} from "./utils/test-setup"; + +describe("navigate", () => { + const client = createMockClient(); + + it("should not allow navigation to an invalid relation", () => { + const db = client.database("test_db"); + const record = db.from(users).get("test-id"); + + // @ts-expect-error - arbitraryTable is not a valid navigation target + record.navigate(arbitraryTable); + + const entitySet = db.from(contacts); + + // @ts-expect-error - bad is not a valid navigation target + const entityQueryBuilder = entitySet.navigate("bad"); + + // expect( + // entityQueryBuilder + // .list() + // // this won't error because the table is already invalid, so we've gotten back to any state + // .select({ arbitrary_field: arbitraryTable.name }) + // .getQueryString(), + // ).toBe("/contacts/bad?$select=name&$top=1000"); + + // this one should work + entitySet.navigate(users); + + // @ts-expect-error - arbitraryTable is not a valid expand target + record.expand(arbitraryTable); + }); + + it("should return a QueryBuilder when navigating to a valid relation", () => { + const db = client.database("test_db"); + const record = db.from(contacts).get("test-id"); + + const queryBuilder = record.navigate(users); + + expectTypeOf(queryBuilder.select).parameter(0).not.toEqualTypeOf(); + + // Use actual fields from usersBase schema + expect( + queryBuilder + .select({ name: users.name, active: users.active }) + .getQueryString(), + ).toBe("/contacts('test-id')/users?$select=name,active"); + }); + + it("should navigate w/o needing to get a record first", () => { + const db = client.database("test_db"); + const queryBuilder = db.from(contacts).navigate(users).list(); + + const queryString = queryBuilder.getQueryString(); + + expect(queryString).toBe("/contacts/users?$top=1000"); + }); + + it("should handle expands", () => { + const db = client.database("test_db"); + expect( + db + .from(contacts) + .navigate(users) + .list() + .expand(contacts) + .getQueryString(), + ).toBe("/contacts/users?$top=1000&$expand=contacts"); + + const entitySet = db.from(users).list(); + expectTypeOf(entitySet.expand).parameter(0).not.toEqualTypeOf(); + + expect(db.from(users).list().expand(contacts).getQueryString()).toBe( + "/users?$top=1000&$expand=contacts", + ); + }); + + it("should provide type-safe navigation with invoices and lineItems", () => { + const db = client.database("test_db"); + + // contacts -> invoices navigation + const invoiceQuery = db.from(contacts).navigate(invoices).list(); + expectTypeOf(invoiceQuery.select).parameter(0).not.toEqualTypeOf(); + invoiceQuery.select({ + invoiceNumber: invoices.invoiceNumber, + total: invoices.total, + // @ts-expect-error - not valid since we navigated to invoices, not contacts + other: contacts.name, + }); + + // invoices -> lineItems navigation + const lineItemsQuery = db.from(invoices).navigate(lineItems).list(); + expectTypeOf(lineItemsQuery.select) + .parameter(0) + .not.toEqualTypeOf(); + + // Should allow valid fields from lineItems schema + lineItemsQuery.select({ + description: lineItems.description, + quantity: lineItems.quantity, + }); + + expect(lineItemsQuery.getQueryString()).toBe( + "/invoices/lineItems?$top=1000", + ); + }); + + it("should support multi-hop navigation patterns", async () => { + const db = client.database("test_db"); + + const query = db + .from(contacts) + .navigate(invoices) + .navigate(lineItems) + .list(); + expect(query.getQueryString()).toBe( + "/contacts/invoices/lineItems?$top=1000", + ); + + // Navigate from a specific contact to their invoices + const contactInvoices = db + .from(contacts) + .get("contact-123") + .navigate(invoices); + + expect( + contactInvoices + .select({ + invoiceNumber: invoices.invoiceNumber, + status: invoices.status, + }) + .getQueryString(), + ).toBe("/contacts('contact-123')/invoices?$select=invoiceNumber,status"); + + // Navigate from a specific invoice to its line items + const invoiceLineItems = db.from(invoices).get("inv-456").expand(lineItems); + + expect( + invoiceLineItems + .select({ + invoiceNumber: invoices.invoiceNumber, + total: invoices.total, + }) + .getQueryString(), + ).toBe( + "/invoices('inv-456')?$select=invoiceNumber,total&$expand=lineItems", + ); + + const nestedExpand = db + .from(contacts) + .get("contact-123") + .expand(invoices, (b: any) => + b.expand(lineItems, (b: any) => + b.select({ + description: lineItems.description, + quantity: lineItems.quantity, + }), + ), + ); + + expect(nestedExpand.getQueryString()).toBe( + "/contacts('contact-123')?$expand=invoices($expand=lineItems($select=description,quantity))", + ); + + // await nestedExpand.execute({ + // fetchHandler: simpleMock({ status: 200, body: {} }), + // }); + }); +}); diff --git a/packages/fmodata/tests/orm-api.test.ts b/packages/fmodata/tests/orm-api.test.ts new file mode 100644 index 00000000..1ac7c939 --- /dev/null +++ b/packages/fmodata/tests/orm-api.test.ts @@ -0,0 +1,312 @@ +import { describe, it, expect } from "vitest"; +import { + fmTableOccurrence, + textField, + numberField, + timestampField, + eq, + gt, + and, + or, + contains, + isColumn, + type Column, + FMTable, +} from "@proofkit/fmodata"; +import { z } from "zod/v4"; + +describe("ORM API", () => { + describe("Field Builders", () => { + it("should create a text field", () => { + const field = textField(); + + const config = field._getConfig(); + expect(config.fieldType).toBe("text"); + expect(config.notNull).toBe(false); + expect(config.primaryKey).toBe(false); + }); + + it("should chain methods correctly", () => { + const field = textField().notNull().entityId("FMFID:1"); + const config = field._getConfig(); + expect(config.notNull).toBe(true); + expect(config.entityId).toBe("FMFID:1"); + }); + + it("should mark primary key as read-only", () => { + const field = textField().primaryKey(); + const config = field._getConfig(); + expect(config.primaryKey).toBe(true); + expect(config.readOnly).toBe(true); + }); + + it("should support output validator", () => { + const validator = z.enum(["a", "b", "c"]); + const field = textField().readValidator(validator); + const config = field._getConfig(); + expect(config.outputValidator).toBe(validator); + }); + + it("should support input validator", () => { + const validator = z.boolean().transform((v) => (v ? 1 : 0)); + const field = numberField().writeValidator(validator); + const config = field._getConfig(); + expect(config.inputValidator).toBe(validator); + }); + + it("should support both read and write validators", () => { + const readValidator = z.coerce.boolean(); + const writeValidator = z.boolean().transform((v) => (v ? 1 : 0)); + const field = numberField() + .readValidator(readValidator) + .writeValidator(writeValidator); + const config = field._getConfig(); + expect(config.outputValidator).toBe(readValidator); + expect(config.inputValidator).toBe(writeValidator); + }); + }); + + describe("Table Definition", () => { + it("should create a table occurrence", () => { + const users = fmTableOccurrence( + "users", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().notNull().entityId("FMFID:2"), + email: textField().entityId("FMFID:3"), + }, + { + entityId: "FMTID:100", + defaultSelect: "schema", + navigationPaths: ["contacts"], + }, + ); + + expect((users as any)[FMTable.Symbol.Name]).toBe("users"); + expect((users as any)[FMTable.Symbol.EntityId]).toBe("FMTID:100"); + expect((users as any)[FMTable.Symbol.NavigationPaths]).toEqual([ + "contacts", + ]); + }); + + it("should create column references", () => { + const users = fmTableOccurrence( + "users", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().notNull().entityId("FMFID:2"), + }, + { entityId: "FMTID:100" }, + ); + + expect(isColumn(users.id)).toBe(true); + expect(users.id.fieldName).toBe("id"); + expect(users.id.entityId).toBe("FMFID:1"); + expect(users.id.tableName).toBe("users"); + expect(users.id.tableEntityId).toBe("FMTID:100"); + }); + + it("should extract metadata correctly", () => { + const users = fmTableOccurrence( + "users", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().notNull().entityId("FMFID:2"), + email: textField().entityId("FMFID:3"), + createdAt: timestampField().readOnly().entityId("FMFID:4"), + }, + { entityId: "FMTID:100" }, + ); + + const config = (users as any)[FMTable.Symbol.BaseTableConfig]; + expect(config.idField).toBe("id"); + expect(config.required).toContain("name"); + expect(config.readOnly).toContain("id"); // primary key + expect(config.readOnly).toContain("createdAt"); + expect(config.fmfIds).toEqual({ + id: "FMFID:1", + name: "FMFID:2", + email: "FMFID:3", + createdAt: "FMFID:4", + }); + }); + + it("should store inputSchema when writeValidators are present", () => { + const writeValidator = z.boolean().transform((v) => (v ? 1 : 0)); + const users = fmTableOccurrence( + "users", + { + id: textField().primaryKey(), + active: numberField().writeValidator(writeValidator), + name: textField(), + }, + {}, + ); + + const config = (users as any)[FMTable.Symbol.BaseTableConfig]; + expect(config.inputSchema).toBeDefined(); + expect(config.inputSchema?.active).toBe(writeValidator); + expect(config.inputSchema?.name).toBeUndefined(); // No writeValidator for name + }); + + it("should not store inputSchema when no writeValidators are present", () => { + const users = fmTableOccurrence( + "users", + { + id: textField().primaryKey(), + name: textField(), + }, + {}, + ); + + const config = (users as any)[FMTable.Symbol.BaseTableConfig]; + expect(config.inputSchema).toBeUndefined(); + }); + }); + + describe("Column References", () => { + it("should identify columns", () => { + const users = fmTableOccurrence( + "users", + { id: textField(), name: textField() }, + {}, + ); + + expect(isColumn(users.id)).toBe(true); + expect(isColumn(users.name)).toBe(true); + expect(isColumn("not a column")).toBe(false); + }); + + it("should get field identifier", () => { + const users = fmTableOccurrence( + "users", + { id: textField().entityId("FMFID:1") }, + {}, + ); + + expect(users.id.getFieldIdentifier(false)).toBe("id"); + expect(users.id.getFieldIdentifier(true)).toBe("FMFID:1"); + }); + + it("should check table membership", () => { + const users = fmTableOccurrence("users", { id: textField() }, {}); + + expect(users.id.isFromTable("users")).toBe(true); + expect(users.id.isFromTable("contacts")).toBe(false); + }); + }); + + describe("Filter Operators", () => { + const users = fmTableOccurrence( + "users", + { + id: textField().entityId("FMFID:1"), + name: textField().entityId("FMFID:2"), + age: numberField().entityId("FMFID:3"), + }, + { entityId: "FMTID:100" }, + ); + + it("should create eq operator", () => { + const expr = eq(users.name, "John"); + expect(expr.operator).toBe("eq"); + expect(expr.toODataFilter(false)).toBe("name eq 'John'"); + }); + + it("should create gt operator", () => { + const expr = gt(users.age, 18); + expect(expr.operator).toBe("gt"); + expect(expr.toODataFilter(false)).toBe("age gt 18"); + }); + + it("should create contains operator", () => { + const expr = contains(users.name, "John"); + expect(expr.operator).toBe("contains"); + expect(expr.toODataFilter(false)).toBe("contains(name, 'John')"); + }); + + it("should support column-to-column comparison", () => { + const contacts = fmTableOccurrence( + "contacts", + { id_user: textField() }, + {}, + ); + const expr = eq(users.id, contacts.id_user); + expect(expr.toODataFilter(false)).toBe('"id" eq "id_user"'); + }); + + it("should use entity IDs when enabled", () => { + const expr = eq(users.name, "John"); + expect(expr.toODataFilter(true)).toBe("FMFID:2 eq 'John'"); + }); + + it("should create and operator", () => { + const expr = and(eq(users.name, "John"), gt(users.age, 18)); + expect(expr.operator).toBe("and"); + expect(expr.toODataFilter(false)).toBe("name eq 'John' and age gt 18"); + }); + + it("should create or operator", () => { + const expr = or(eq(users.name, "John"), eq(users.name, "Jane")); + expect(expr.operator).toBe("or"); + expect(expr.toODataFilter(false)).toBe( + "name eq 'John' or name eq 'Jane'", + ); + }); + + it("should handle nested logical operators", () => { + const expr = and( + eq(users.name, "John"), + or(gt(users.age, 18), eq(users.age, 18)), + ); + expect(expr.toODataFilter(false)).toBe( + "name eq 'John' and (age gt 18 or age eq 18)", + ); + }); + + it("should escape single quotes in strings", () => { + const expr = eq(users.name, "O'Brien"); + expect(expr.toODataFilter(false)).toBe("name eq 'O''Brien'"); + }); + }); + + describe("Type Safety", () => { + it("should infer output types from validators", () => { + const users = fmTableOccurrence( + "users", + { + status: textField().readValidator( + z.enum(["active", "pending", "inactive"]), + ), + }, + {}, + ); + + // Type test - the column type matches the validator output type + // Since the field is nullable by default, the type includes null + const col: Column<"active" | "pending" | "inactive" | null, "status"> = + users.status as any; // Type assertion needed due to nullable field inference + expect(col.fieldName).toBe("status"); + }); + + it("should handle nullable fields", () => { + const users = fmTableOccurrence( + "users", + { + email: textField(), // nullable by default + name: textField().notNull(), // not null + }, + {}, + ); + + // Type test + const emailCol: Column = + users.email; + const nameCol: Column = + users.name; + + expect(emailCol.fieldName).toBe("email"); + expect(nameCol.fieldName).toBe("name"); + }); + }); +}); diff --git a/packages/fmodata/tests/query-strings.test.ts b/packages/fmodata/tests/query-strings.test.ts new file mode 100644 index 00000000..22cfdedd --- /dev/null +++ b/packages/fmodata/tests/query-strings.test.ts @@ -0,0 +1,537 @@ +/** + * OData Query String Generation Tests + * + * This test file validates that the query builder correctly generates OData + * query strings. These tests focus on ensuring that: + * + * - Query options ($select, $filter, $orderby, $top, $skip, $expand, $count) + * are correctly formatted according to OData v4 specification + * - Query string encoding and escaping is handled properly + * - Method chaining produces correct combined query strings + * - Edge cases and special characters are handled correctly + * + * These tests do NOT: + * - Execute actual HTTP requests (.execute() is never called) + * - Test network behavior or responses + * - Require a mock fetch implementation + * + * They serve to ensure the query builder generates valid OData query strings + * that will be correctly parsed by OData endpoints. + */ + +import { describe, expect, expectTypeOf, it } from "vitest"; +import { createMockClient } from "./utils/test-setup"; +import { + numberField, + textField, + fmTableOccurrence, + asc, + desc, + eq, + gt, + and, + or, + isNull, +} from "@proofkit/fmodata"; + +const users = fmTableOccurrence( + "users", + { + id: textField().primaryKey(), + name: textField(), + "name with spaces": textField(), + "special%char": textField(), + "special&char": textField(), + email: textField(), + age: numberField(), + }, + { navigationPaths: ["contacts"] }, +); +const contacts = fmTableOccurrence("contacts", { + PrimaryKey: textField().primaryKey(), + name: textField(), + "name with spaces": textField(), + "special%char": textField(), + "special&char": textField(), +}); + +describe("OData Query String Generation", () => { + const client = createMockClient(); + const db = client.database("TestDB"); + + describe("$select", () => { + it("should generate $select query for single field", () => { + const queryString = db + .from(users) + .list() + .select({ name: users.name }) + .getQueryString(); + + expect(queryString).toContain("$select"); + expect(queryString).toContain("name"); + }); + it("should auto quote fields with special characters", () => { + const base = db.from(users).list(); + + const queryString = base.select({ id: users.id }).getQueryString(); + expect(queryString).toContain('$select="id"'); + expect(queryString).toContain("$top=1000"); + const queryString2 = base + .select({ name: users["name with spaces"] }) + .getQueryString(); + expect(queryString2).toContain('$select="name with spaces"'); + expect(queryString2).toContain("$top=1000"); + + const queryString3 = base + .select({ test: users["special%char"] }) + .getQueryString(); + expect(queryString3).toContain("$top=1000"); + expect( + queryString3.includes('$select="special%char"') || + queryString3.includes('$select="special%char"'), + ).toBe(true); + + const queryString4 = base + .select({ test: users["special&char"] }) + .getQueryString(); + expect(queryString4).toContain('$select="special&char"'); + expect(queryString4).toContain("$top=1000"); + + const queryString5 = base + .select({ name: users.name }) + .expand(contacts, (b: any) => b.select({ id: contacts.PrimaryKey })) + .getQueryString(); + expect(queryString5).toContain("$select=name"); + expect(queryString5).toContain("$top=1000"); + expect(queryString5).toContain("$expand=contacts($select=PrimaryKey)"); + const queryString7 = db + .from(users) + .list() + .select({ name: users.name }) + .expand(contacts, (b: any) => + b.select({ name: contacts["name with spaces"] }), + ) + .getQueryString(); + expect(queryString7).toContain("$select=name"); + expect(queryString7).toContain("$top=1000"); + expect( + queryString7.includes('$expand=contacts($select="name with spaces")') || + queryString7.includes('$expand=contacts($select="name with spaces")'), + ).toBe(true); + + const queryString8 = db + .from(users) + .list() + .select({ name: users.name }) + .expand(contacts, (b: any) => + b.select({ test: contacts["special%char"] }), + ) + .getQueryString(); + expect(queryString8).toContain("$select=name"); + expect(queryString8).toContain("$top=1000"); + expect( + queryString8.includes('$expand=contacts($select="special%char")') || + queryString8.includes('$expand=contacts($select="special%char")'), + ).toBe(true); + + const queryString9 = db + .from(users) + .list() + .select({ name: users.name }) + .expand(contacts, (b: any) => + b.select({ test: contacts["special&char"] }), + ) + .getQueryString(); + expect(queryString9).toContain("$select=name"); + expect(queryString9).toContain("$top=1000"); + expect(queryString9).toContain( + '$expand=contacts($select="special&char")', + ); + }); + + it("should generate $select query for multiple fields", () => { + const queryString = db + .from(users) + .list() + .select({ name: users.name, email: users.email, age: users.age }) + .getQueryString(); + + expect(queryString).toContain("$select"); + expect(queryString).toContain("name"); + expect(queryString).toContain("email"); + expect(queryString).toContain("age"); + }); + + it("should generate $select with comma-separated fields", () => { + const queryString = db + .from(users) + .list() + .select({ id: users.id, name: users.name }) + .getQueryString(); + + // OData format: $select=id,name + const selectPart = queryString.match(/\$select=([^&]+)/)?.[1]; + expect(selectPart).toBeDefined(); + + expect(selectPart?.split(",")).toContain("name"); + }); + }); + + describe("$filter", () => { + it("should generate $filter with equality operator", () => { + const queryString = db + .from(users) + .list() + .where(eq(users.name, "John")) + .getQueryString(); + + expect(queryString).toContain("$filter"); + expect(queryString).toContain("name"); + expect(queryString).toContain("eq"); + expect(queryString).toContain("John"); + expect(queryString).not.toContain("operands"); + expect(queryString).toBe( + `/users?$filter=name eq 'John'&$top=1000&$select=\"id\",name,"name with spaces","special%char","special&char",email,age`, + ); + }); + + it("should generate $filter with numeric comparison", () => { + const queryString = db + .from(users) + .list() + .where(gt(users.age, 18)) + .getQueryString(); + + expect(queryString).toContain("$filter"); + expect(queryString).toContain("age"); + expect(queryString).toContain("gt"); + }); + + it("should generate $filter with multiple conditions using AND", () => { + const queryString = db + .from(users) + .list() + .where(and(eq(users.name, "John"), gt(users.age, 18))) + .getQueryString(); + + expect(queryString).toContain("$filter"); + expect(queryString).toContain("name"); + expect(queryString).toContain("age"); + }); + + it("should generate $filter with OR conditions", () => { + // Note: This test assumes users table has a status field + // If not, we may need to adjust the test + const queryString = db + .from(users) + .list() + .where(or(eq(users.name, "active"), eq(users.name, "pending"))) + .getQueryString(); + + expect(queryString).toContain("$filter"); + expect(queryString).toContain("name"); + }); + + it("should handle string values with quotes in filter", () => { + const queryString = db + .from(users) + .list() + .where(eq(users.name, "John O'Connor")) + .getQueryString(); + + expect(queryString).toContain("$filter"); + // OData should properly escape quotes + expect(queryString).toContain("John"); + }); + + it("should handle null values in filter", () => { + const queryString = db + .from(users) + .list() + .where(isNull(users.name)) + .getQueryString(); + + expect(queryString).toContain("$filter"); + expect(queryString).toContain("null"); + }); + }); + + describe("$orderby", () => { + it("should generate $orderby for ascending order", () => { + const queryString = db + .from(users) + .list() + .orderBy(asc(users.name)) + .getQueryString(); + + expect(queryString).toContain("$orderby"); + expect(queryString).toContain("name"); + + // without asc should also work, as it's the default + const queryString2 = db + .from(users) + .list() + .orderBy(users.name) + .getQueryString(); + }); + + it("should generate $orderby for descending order", () => { + const queryString = db + .from(users) + .list() + .orderBy(desc(users.name)) + .getQueryString(); + + expect(queryString).toContain("$orderby"); + expect(queryString).toContain("name"); + expect(queryString).toContain("desc"); + }); + + it("should allow order by with multiple fields", () => { + const queryString = db + .from(users) + .list() + .orderBy(users.name, desc(users.age)) // Raw string - no type safety + .getQueryString(); + + expect(queryString).toContain("$orderby"); + expect(queryString).toContain("name"); + expect(queryString).toContain("age"); + }); + + it("should not allow order by with fields from other tables", () => { + db.from(users) + .list() + // @ts-expect-error - contacts.PrimaryKey is not a valid field + .orderBy(contacts.PrimaryKey); + + // @ts-expect-error - contacts.name is not a valid field + db.from(users).list().orderBy(asc(contacts.name)); + // @ts-expect-error - contacts.name is not a valid field + db.from(users).list().orderBy(desc(contacts.name)); + // @ts-expect-error - contacts.name is not a valid field + db.from(users).list().orderBy(users.name, desc(contacts.name)); + }); + }); + + describe("$top", () => { + it("should generate $top query parameter", () => { + const queryString = db.from(users).list().top(10).getQueryString(); + + expect(queryString).toContain("$top"); + expect(queryString).toContain("10"); + }); + + it("should generate $top with different values", () => { + const queryString = db.from(users).list().top(25).getQueryString(); + + expect(queryString).toContain("$top"); + expect(queryString).toContain("25"); + }); + }); + + describe("$skip", () => { + it("should generate $skip query parameter", () => { + const queryString = db.from(users).list().skip(20).getQueryString(); + + expect(queryString).toContain("$skip"); + expect(queryString).toContain("20"); + }); + + it("should generate $skip with zero value", () => { + const queryString = db.from(users).list().skip(0).getQueryString(); + + expect(queryString).toContain("$skip"); + expect(queryString).toContain("0"); + }); + }); + + describe("$expand", () => { + it("should generate $expand query parameter", () => { + const queryString = db + .from(users) + .list() + .expand(contacts) + .getQueryString(); + + expect(queryString).toContain("$expand"); + expect(queryString).toContain("contacts"); + }); + }); + + describe("$count", () => { + it("should generate query with $count parameter", () => { + const queryString = db.from(users).list().count().getQueryString(); + + expect(queryString).toContain("$count"); + }); + + it("should generate $count with other query parameters", () => { + const queryString = db + .from(users) + .list() + .where("status eq 'active'") + .count() + .getQueryString(); + + expect(queryString).toContain("$count"); + expect(queryString).toContain("$filter"); + }); + }); + + describe("Combined query parameters", () => { + it("should combine $select and $filter", () => { + const queryString = db + .from(users) + .list() + .select({ name: users.name, email: users.email }) + .where("age gt 18") + .getQueryString(); + + expect(queryString).toContain("$select"); + expect(queryString).toContain("$filter"); + expect(queryString).toContain("name"); + expect(queryString).toContain("age"); + }); + + it("should combine $select, $filter, and $orderby", () => { + const queryString = db + .from(users) + .list() + .select({ name: users.name, email: users.email }) + .where("status eq 'active'") + .orderBy("name") + .getQueryString(); + + expect(queryString).toContain("$select"); + expect(queryString).toContain("$filter"); + expect(queryString).toContain("$orderby"); + }); + + it("should combine $top and $skip for pagination", () => { + const queryString = db + .from(users) + .list() + .top(10) + .skip(20) + .getQueryString(); + + expect(queryString).toContain("$top"); + expect(queryString).toContain("$skip"); + expect(queryString).toContain("10"); + expect(queryString).toContain("20"); + }); + + it("should combine multiple query parameters", () => { + const queryString = db + .from(users) + .list() + .select({ name: users.name, email: users.email }) + .where("age gt 18") + .orderBy("name") + .top(10) + .skip(0) + .getQueryString(); + + expect(queryString).toContain("$select"); + expect(queryString).toContain("$filter"); + expect(queryString).toContain("$orderby"); + expect(queryString).toContain("$top"); + expect(queryString).toContain("$skip"); + }); + + it("should combine $select, $filter, $orderby, $top, and $expand", () => { + const queryString = db + .from(users) + .list() + .select({ name: users.name, email: users.email }) + .where("status eq 'active'") + .orderBy("name") + .top(25) + .expand(contacts) + .getQueryString(); + + expect(queryString).toContain("$select"); + expect(queryString).toContain("$filter"); + expect(queryString).toContain("$orderby"); + expect(queryString).toContain("$top"); + expect(queryString).toContain("$expand"); + }); + }); + + describe("single() mode", () => { + it("should generate query string for single record", () => { + const queryString = db + .from(users) + .list() + .select({ name: users.name }) + .single() + .getQueryString(); + + expect(queryString).toContain("$select"); + // single() mode affects execution, not query string format + expect(queryString).toBeDefined(); + }); + + it("should generate query string with single() and filter", () => { + const queryString = db + .from(users) + .list() + .where("id eq '123'") + .single() + .getQueryString(); + + expect(queryString).toContain("$filter"); + expect(queryString).toContain("id"); + }); + }); + + describe("Query string format validation", () => { + it("should use & to separate multiple parameters", () => { + const queryString = db + .from(users) + .list() + .select({ name: users.name }) + .where("age gt 18") + .top(10) + .getQueryString(); + + // Should have & between parameters + const matches = queryString.match(/&/g); + expect(matches?.length).toBeGreaterThan(0); + }); + + it("should URL encode special characters in values", () => { + const queryString = db + .from(users) + .list() + .where("name eq 'John & Jane'") + .getQueryString(); + + expect(queryString).toContain("$filter"); + // Special characters should be properly encoded + expect(queryString).toBeDefined(); + }); + }); + + describe("list() method", () => { + it("should generate query string from list() builder", () => { + const queryString = db.from(users).list().getQueryString(); + + expect(queryString).toBeDefined(); + expect(typeof queryString).toBe("string"); + }); + + it("should combine list() with query parameters", () => { + const queryString = db + .from(users) + .list() + .select({ name: users.name }) + .top(10) + .getQueryString(); + + expect(queryString).toContain("$select"); + expect(queryString).toContain("$top"); + }); + }); +}); diff --git a/packages/fmodata/tests/record-builder-select-expand.test.ts b/packages/fmodata/tests/record-builder-select-expand.test.ts new file mode 100644 index 00000000..f507f2ee --- /dev/null +++ b/packages/fmodata/tests/record-builder-select-expand.test.ts @@ -0,0 +1,834 @@ +/** + * RecordBuilder Select/Expand Tests + * + * Tests for type-safe select() and expand() methods on the RecordBuilder (.get()) + * These tests validate: + * - Type-safe field selection with proper return type narrowing + * - Type-safe relation expansion with callback support + * - Query string generation + * - Response validation with expanded data + */ + +import { describe, it, expect, expectTypeOf } from "vitest"; +import { z } from "zod/v4"; +import { createMockFetch } from "./utils/mock-fetch"; +import { + createMockClient, + contacts, + users, + arbitraryTable, + invoices, +} from "./utils/test-setup"; +import { + fmTableOccurrence, + textField, + timestampField, + numberField, + containerField, + eq, +} from "@proofkit/fmodata"; + +describe("RecordBuilder Select/Expand", () => { + const client = createMockClient(); + const db = client.database("test_db"); + + // Create occurrences with different defaultSelect values for testing + const contactsWithSchemaSelect = fmTableOccurrence( + "contacts", + { + PrimaryKey: textField().primaryKey(), + CreationTimestamp: timestampField(), + CreatedBy: textField(), + ModificationTimestamp: timestampField(), + ModifiedBy: textField(), + name: textField(), + hobby: textField(), + id_user: textField(), + }, + { + defaultSelect: "schema", // Should select all schema fields + navigationPaths: ["users"], + }, + ); + + const contactsWithArraySelect = fmTableOccurrence( + "contacts", + { + PrimaryKey: textField().primaryKey(), + CreationTimestamp: timestampField(), + CreatedBy: textField(), + ModificationTimestamp: timestampField(), + ModifiedBy: textField(), + name: textField(), + hobby: textField(), + id_user: textField(), + }, + { + defaultSelect: (table) => ({ + name: table.name, + hobby: table.hobby, + id_user: table.id_user, + }), // Specific fields + navigationPaths: ["users"], + }, + ); + + // Create occurrences with navigation where target has different defaultSelect values + const contactsForExpandTest = fmTableOccurrence( + "contacts", + { + PrimaryKey: textField().primaryKey(), + CreationTimestamp: timestampField(), + CreatedBy: textField(), + ModificationTimestamp: timestampField(), + ModifiedBy: textField(), + name: textField(), + hobby: textField(), + id_user: textField(), + }, + { + defaultSelect: "all", // Parent table uses all + navigationPaths: ["users"], + }, + ); + + const usersWithSchemaSelect = fmTableOccurrence( + "users", + { + id: textField().primaryKey().readValidator(z.uuid()), + CreationTimestamp: timestampField(), + CreatedBy: textField(), + ModificationTimestamp: timestampField(), + ModifiedBy: textField(), + name: textField(), + active: numberField().readValidator(z.coerce.boolean()), + fake_field: textField(), + id_customer: textField(), + }, + { + defaultSelect: "schema", // Target table uses schema + navigationPaths: ["contacts"], + }, + ); + + const usersWithArraySelect = fmTableOccurrence( + "users", + { + id: textField().primaryKey().readValidator(z.uuid()), + CreationTimestamp: timestampField(), + CreatedBy: textField(), + ModificationTimestamp: timestampField(), + ModifiedBy: textField(), + name: textField(), + active: numberField().readValidator(z.coerce.boolean()), + fake_field: textField(), + id_customer: textField(), + }, + { + defaultSelect: (table) => ({ + name: table.name, + active: table.active, + }), // Target table uses specific fields + navigationPaths: ["contacts"], + }, + ); + + // const dbWithExpandArraySelect = client.database("test_db_expand_array"); + describe("defaultSelect on get()", () => { + it("should apply defaultSelect: 'schema' fields to query string when no select is called", () => { + const queryString = db + .from(contactsWithSchemaSelect) + .get("test-uuid") + .getQueryString(); + + // When defaultSelect is "schema", the query should include $select with all schema fields + expect(queryString).toContain("$select="); + + // Should contain all fields from the contacts schema + expect(queryString).toContain("PrimaryKey"); + expect(queryString).toContain("CreationTimestamp"); + expect(queryString).toContain("CreatedBy"); + expect(queryString).toContain("ModificationTimestamp"); + expect(queryString).toContain("ModifiedBy"); + expect(queryString).toContain("name"); + expect(queryString).toContain("hobby"); + expect(queryString).toContain("id_user"); + }); + + it("should apply defaultSelect: array of fields to query string when no select is called", () => { + const queryString = db + .from(contactsWithArraySelect) + .get("test-uuid") + .getQueryString(); + + // When defaultSelect is an array, the query should include $select with those specific fields + expect(queryString).toContain("$select="); + expect(queryString).toContain("name"); + expect(queryString).toContain("hobby"); + expect(queryString).toContain("id_user"); + + // Should NOT contain fields not in the array + expect(queryString).not.toContain("PrimaryKey"); + expect(queryString).not.toContain("CreationTimestamp"); + }); + + it("should NOT apply defaultSelect when defaultSelect is 'all'", () => { + const queryString = db.from(contacts).get("test-uuid").getQueryString(); + + // When defaultSelect is "all", no $select should be added + // (current behavior - FileMaker returns all fields) + expect(queryString).toBe("/contacts('test-uuid')"); + expect(queryString).not.toContain("$select="); + }); + + it("should override defaultSelect when explicit select() is called", () => { + const queryString = db + .from(contactsWithSchemaSelect) + .get("test-uuid") + .select({ name: contactsWithSchemaSelect.name }) // Explicit select should override defaultSelect + .getQueryString(); + + expect(queryString).toContain("$select=name"); + // Should not contain other schema fields when explicit select is used + expect(queryString).not.toContain("PrimaryKey"); + expect(queryString).not.toContain("hobby"); + }); + }); + + describe("defaultSelect within expand()", () => { + it("should apply target table defaultSelect: 'schema' in expand when no callback select is called", () => { + // When expanding to 'users' which has defaultSelect: "schema", + // the expand should automatically include $select with all user schema fields + const queryString = db + .from(contactsWithSchemaSelect) + .get("test-uuid") + .expand(usersWithSchemaSelect) + .getQueryString(); + + // The expand should include $select for the target table's schema fields + expect(queryString).toContain("$expand=users"); + expect(queryString).toContain("$select="); + + // Should contain user schema fields within the expand + expect(queryString).toContain("id"); + expect(queryString).toContain("name"); + expect(queryString).toContain("active"); + }); + + it("should apply target table defaultSelect: array in expand when no callback select is called", () => { + // When expanding to 'users' which has defaultSelect: ["name", "active"], + // the expand should automatically include $select with those specific fields + const queryString = db + .from(contactsWithArraySelect) + .get("test-uuid") + .expand(usersWithArraySelect) + .getQueryString(); + + // The expand should include $select for the target table's default fields + expect(queryString).toContain("$expand=users($select="); + expect(queryString).toContain("name"); + expect(queryString).toContain("active"); + + // Should NOT contain fields not in the defaultSelect array + expect(queryString).not.toMatch(/\$expand=users\([^)]*id[^)]*\)/); + }); + + it("should override target defaultSelect when callback provides explicit select", () => { + // Even though users has defaultSelect: ["name", "active"], + // an explicit callback select should override it + const queryString = db + .from(contactsWithArraySelect) + .get("test-uuid") + .expand(users, (b: any) => b.select({ id: users.id })) + .getQueryString(); + + // Should only have the explicitly selected field (quotes may vary based on odata-query library) + expect(queryString).toContain("$expand=users($select="); + expect(queryString).toMatch(/\$select=["']?id["']?\)/); + // Should NOT contain the defaultSelect fields + expect(queryString).not.toContain("active"); + }); + + it("should apply defaultSelect in expand on list() queries too", () => { + const queryString = db + .from(contactsWithArraySelect) + .list() + .expand(usersWithSchemaSelect) + .getQueryString(); + + // The expand should include $select for the target table's default fields + expect(queryString).toContain("$expand=users($select="); + expect(queryString).toContain("name"); + expect(queryString).toContain("active"); + }); + }); + + describe("select() method", () => { + it("should generate query string with $select for single field", () => { + const queryString = db + .from(contacts) + .get("test-uuid") + .select({ name: contacts.name }) + .getQueryString(); + + expect(queryString).toBe("/contacts('test-uuid')?$select=name"); + }); + + it("should generate query string with $select for multiple fields", () => { + const queryString = db + .from(contacts) + .get("test-uuid") + .select({ + name: contacts.name, + hobby: contacts.hobby, + id_user: contacts.id_user, + }) + .getQueryString(); + + expect(queryString).toContain("$select="); + expect(queryString).toContain("name"); + expect(queryString).toContain("hobby"); + expect(queryString).toContain("id_user"); + }); + + it("should deduplicate selected fields", () => { + const queryString = db + .from(contacts) + .get("test-uuid") + .select({ name: contacts.name, hobby: contacts.hobby }) + .getQueryString(); + + // Count occurrences of "name" - should only appear once + const nameCount = (queryString.match(/name/g) || []).length; + expect(nameCount).toBe(1); + }); + + it("should narrow return type to selected fields only", () => { + const recordBuilder = db + .from(contacts) + .get("test-uuid") + .select({ name: contacts.name, hobby: contacts.hobby }); + + // Type test - the execute result should only have name and hobby + // This is a compile-time check + expectTypeOf(recordBuilder.execute).returns.resolves.toMatchTypeOf<{ + data: + | { + name: string | null; + hobby: string | null; + } + | undefined; + error: any; + }>(); + }); + + it("should provide type errors for non-existent fields", () => { + () => { + db.from(contacts) + .get("test-uuid") + // @ts-expect-error - nonexistent is not a valid column + .select({ name: contacts.nonexistent }); + }; + }); + + it("should include selected fields in getRequestConfig URL", () => { + const config = db + .from(contacts) + .get("test-uuid") + .select({ name: contacts.name, hobby: contacts.hobby }) + .getRequestConfig(); + + expect(config.url).toContain("$select="); + expect(config.url).toContain("name"); + expect(config.url).toContain("hobby"); + }); + }); + + describe("expand() method", () => { + it("should generate query string with simple $expand", () => { + const queryString = db + .from(contacts) + .get("test-uuid") + .expand(users) + .getQueryString(); + + expect(queryString).toBe("/contacts('test-uuid')?$expand=users"); + }); + + it("should generate query string with $expand and nested $select", () => { + const queryString = db + .from(contacts) + .get("test-uuid") + .expand(users, (b: any) => + b.select({ name: users.name, active: users.active }), + ) + .getQueryString(); + + expect(queryString).toBe( + "/contacts('test-uuid')?$expand=users($select=name,active)", + ); + }); + + it("should provide autocomplete for known relations", () => { + const recordBuilder = db.from(contacts).get("test-uuid"); + + // The expand parameter should suggest "users" | (string & {}) + expectTypeOf(recordBuilder.expand) + .parameter(0) + .not.toEqualTypeOf(); + }); + + it("should type callback builder to target table schema", () => { + db.from(contacts) + .get("test-uuid") + .expand(users, (builder: any) => { + // builder.select should only accept fields from users table + expectTypeOf(builder.select).parameter(0).not.toEqualTypeOf(); + + return builder.select({ name: users.name, active: users.active }); + }); + }); + + it("should not allow arbitrary string relations", () => { + const queryString = db + .from(contacts) + .get("test-uuid") + // @ts-expect-error - arbitraryTable is not a valid expand target + .expand(arbitraryTable) + .getQueryString(); + + expect(queryString).toContain( + "/contacts('test-uuid')?$expand=arbitrary_table", + ); + }); + + it("should support $filter in expand callback", () => { + const queryString = db + .from(contacts) + .get("test-uuid") + .expand(users, (b: any) => b.where(eq(users.active, true))) + .getQueryString(); + + expect(queryString).toContain("$expand=users($filter=active"); + }); + + it("should support $orderby in expand callback", () => { + const queryString = db + .from(contacts) + .get("test-uuid") + .expand(users, (b: any) => b.orderBy("name")) + .getQueryString(); + + expect(queryString).toContain("$expand=users($orderby=name"); + }); + + it("should support $top in expand callback", () => { + const queryString = db + .from(contacts) + .get("test-uuid") + .expand(users, (b: any) => b.top(5)) + .getQueryString(); + + expect(queryString).toContain("$expand=users($top=5"); + }); + + it("should support $skip in expand callback", () => { + const queryString = db + .from(contacts) + .get("test-uuid") + .expand(users, (b: any) => b.skip(10)) + .getQueryString(); + + expect(queryString).toContain("$expand=users($skip=10"); + }); + + it("should support nested expands", () => { + // users -> contacts (circular navigation from setup) + const queryString = db + .from(contacts) + .get("test-uuid") + .expand(users, (b: any) => + b + .select({ name: users.name }) + .expand(contacts, (nested: any) => + nested.select({ name: contacts.name }), + ), + ) + .getQueryString(); + + expect(queryString).toBe( + "/contacts('test-uuid')?$expand=users($select=name;$expand=contacts($select=name))", + ); + }); + + it("should support multiple expands via chaining", () => { + const queryString = db + .from(contacts) + .get("test-uuid") + .expand(users, (b: any) => b.select({ name: users.name })) + .expand(invoices) + .getQueryString(); + + expect(queryString).toBe( + "/contacts('test-uuid')?$expand=users($select=name),invoices", + ); + }); + }); + + describe("select() + expand() combined", () => { + it("should generate query string with both $select and $expand", () => { + const queryString = db + .from(contacts) + .get("test-uuid") + .select({ name: contacts.name, hobby: contacts.hobby }) + .expand(users, (b: any) => b.select({ name: users.name })) + .getQueryString(); + + expect(queryString).toContain("$select=name,hobby"); + expect(queryString).toContain("$expand=users($select=name)"); + }); + + it("should return properly typed result with both select and expand", () => { + const recordBuilder = db + .from(contacts) + .get("test-uuid") + .select({ name: contacts.name, hobby: contacts.hobby }) + .expand(users, (b: any) => + b.select({ name: users.name, active: users.active }), + ); + + async () => { + const { data, error } = await recordBuilder.execute(); + data?.users.map((user) => user.CreatedBy); + }; + }); + }); + + describe("execute() with mocked responses", () => { + it("should execute query with select and return narrowed fields", async () => { + const mockResponse = { + url: "https://example.com/test", + method: "GET", + status: 200, + headers: { "content-type": "application/json;charset=utf-8" }, + response: { + "@context": "$metadata#contacts/$entity", + "@id": "contacts('test-uuid')", + "@editLink": "contacts('test-uuid')", + name: "John Doe", + hobby: "Reading", + }, + }; + + const result = await db + .from(contacts) + .get("test-uuid") + .select({ name: contacts.name, hobby: contacts.hobby }) + .execute({ + fetchHandler: createMockFetch(mockResponse), + }); + + expect(result.error).toBeUndefined(); + expect(result.data).toBeDefined(); + expect(result.data?.name).toBe("John Doe"); + expect(result.data?.hobby).toBe("Reading"); + }); + + it("should execute query with expand and include related records", async () => { + const mockResponse = { + url: "https://example.com/test", + method: "GET", + status: 200, + headers: { "content-type": "application/json;charset=utf-8" }, + response: { + "@context": "$metadata#contacts/$entity", + "@id": "contacts('test-uuid')", + "@editLink": "contacts('test-uuid')", + PrimaryKey: "test-uuid", + CreationTimestamp: "2025-01-01T00:00:00Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-01-01T00:00:00Z", + ModifiedBy: "admin", + name: "John Doe", + hobby: "Reading", + id_user: "user-1", + users: [ + { + "@id": "users('user-1')", + "@editLink": "users('user-1')", + name: "johndoe", + active: true, + }, + ], + }, + }; + + const result = await db + .from(contacts) + .get("test-uuid") + .expand(users, (b: any) => + b.select({ name: users.name, active: users.active }), + ) + .execute({ + fetchHandler: createMockFetch(mockResponse), + }); + + expect(result.error).toBeUndefined(); + expect(result.data).toBeDefined(); + expect(result.data?.name).toBe("John Doe"); + expect(result.data?.users).toBeDefined(); + expect(result.data?.users).toHaveLength(1); + expect(result.data?.users[0]?.name).toBe("johndoe"); + }); + + it("should strip OData annotations by default", async () => { + const mockResponse = { + url: "https://example.com/test", + method: "GET", + status: 200, + headers: { "content-type": "application/json;charset=utf-8" }, + response: { + "@context": "$metadata#contacts/$entity", + "@id": "contacts('test-uuid')", + "@editLink": "contacts('test-uuid')", + name: "John Doe", + hobby: "Reading", + }, + }; + + const result = await db + .from(contacts) + .get("test-uuid") + .select({ name: contacts.name, hobby: contacts.hobby }) + .execute({ + fetchHandler: createMockFetch(mockResponse), + }); + + expect(result.data).toBeDefined(); + // OData annotations should be stripped + expect((result.data as any)["@id"]).toBeUndefined(); + expect((result.data as any)["@editLink"]).toBeUndefined(); + }); + + it("should include OData annotations when requested", async () => { + const mockResponse = { + url: "https://example.com/test", + method: "GET", + status: 200, + headers: { "content-type": "application/json;charset=utf-8" }, + response: { + "@context": "$metadata#contacts/$entity", + "@id": "contacts('test-uuid')", + "@editLink": "contacts('test-uuid')", + name: "John Doe", + hobby: "Reading", + }, + }; + + const result = await db + .from(contacts) + .get("test-uuid") + .select({ name: contacts.name, hobby: contacts.hobby }) + .execute({ + fetchHandler: createMockFetch(mockResponse), + includeODataAnnotations: true, + }); + + expect(result.data).toBeDefined(); + // OData annotations should be present + expect((result.data as any)["@id"]).toBe("contacts('test-uuid')"); + expect((result.data as any)["@editLink"]).toBe("contacts('test-uuid')"); + }); + }); + + describe("getSingleField() mutual exclusion", () => { + it("should work independently of select/expand", () => { + // getSingleField should work as before, returning just the field value + const queryString = db + .from(contacts) + .get("test-uuid") + .getSingleField(contacts.name) + .getQueryString(); + + // getSingleField adds /fieldName to the URL, not $select + expect(queryString).toBe("/contacts('test-uuid')/name"); + expect(queryString).not.toContain("$select"); + }); + }); + + describe("getRequestConfig()", () => { + it("should include query params in URL", () => { + const config = db + .from(contacts) + .get("test-uuid") + .select({ name: contacts.name }) + .expand(users) + .getRequestConfig(); + + expect(config.method).toBe("GET"); + expect(config.url).toContain("$select=name"); + expect(config.url).toContain("$expand=users"); + }); + }); + + describe("Complex combinations", () => { + it("should support select + filter + orderBy + top + nested expand", () => { + // Using contacts -> users -> contacts (circular navigation from setup) + const queryString = db + .from(contacts) + .get("test-uuid") + .select({ name: contacts.name, hobby: contacts.hobby }) + .expand(users, (b: any) => + b + .select({ name: users.name, active: users.active }) + .where(eq(users.active, true)) + .orderBy(users.name) + .top(10) + .expand(contacts, (nested: any) => + nested.select({ name: contacts.name }), + ), + ) + .getQueryString(); + + // Should contain all query options + expect(queryString).toContain("$select=name,hobby"); + expect(queryString).toContain("$select=name,active"); + expect(queryString).toContain("$filter=active"); + expect(queryString).toContain("$orderby=name"); + expect(queryString).toContain("$top=10"); + expect(queryString).toContain("$expand=contacts($select=name)"); + }); + + it("should support multiple expands with different options", () => { + const queryString = db + .from(contacts) + .get("test-uuid") + .expand(users, (b: any) => + b.select({ name: users.name }).where(eq(users.active, true)), + ) + .expand(invoices, (b: any) => + b.select({ invoiceNumber: invoices.invoiceNumber }).top(5), + ) + .getQueryString(); + + expect(queryString).toContain("users($select=name;$filter=active eq 1)"); + expect(queryString).toContain("invoices($select=invoiceNumber;$top=5)"); + }); + }); + + describe("Container Field Exclusion", () => { + it("should exclude container fields from defaultSelect: schema", async () => { + const mockResponse = { + url: "https://example.com/test", + method: "GET", + status: 200, + headers: { "content-type": "application/json;charset=utf-8" }, + response: { + "@odata.context": + "https://example.com/fmi/odata/v4/test_db/$metadata#contacts/$entity", + PrimaryKey: "test-uuid", + CreationTimestamp: "2025-01-01T00:00:00Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-01-01T00:00:00Z", + ModifiedBy: "admin", + name: "John Doe", + hobby: "Reading", + id_user: "user-123", + // Note: image field should NOT be included even though it's in the schema + }, + }; + + const result = await db + .from(contactsWithSchemaSelect) + .get("test-uuid") + .execute({ + fetchHandler: createMockFetch(mockResponse), + }); + + expect(result.data).toBeDefined(); + expect(result.error).toBeUndefined(); + + // Container field should not appear in the result type or query + const queryString = db + .from(contactsWithSchemaSelect) + .get("test-uuid") + .getQueryString(); + + // Should contain non-container fields + expect(queryString).toContain("$select="); + // Should NOT contain the image field + expect(queryString).not.toContain("image"); + }); + + it("should reject container field selection at compile time", () => { + // Type test - this should produce a compile error + expectTypeOf(() => { + // @ts-expect-error - container fields cannot be selected + db.from(contacts).get("test-uuid").select({ image: contacts.image }); + }).toBeFunction(); + }); + + it("should allow getSingleField() to access container fields", () => { + const queryString = db + .from(contacts) + .get("test-uuid") + .getSingleField(contacts.image) + .getQueryString(); + + expect(queryString).toBe("/contacts('test-uuid')/image"); + }); + + it("should exclude container fields from list queries with defaultSelect: schema", () => { + const queryString = db + .from(contactsWithSchemaSelect) + .list() + .getQueryString(); + + // Should have a select parameter + expect(queryString).toContain("$select="); + // Should NOT contain the image field + expect(queryString).not.toContain("image"); + // Should contain other fields + expect(queryString).toContain("name"); + }); + + it("should reject container field selection in list queries at compile time", () => { + // Type test - this should produce a compile error + expectTypeOf(() => { + // @ts-expect-error - container fields cannot be selected + db.from(contacts).list().select({ image: contacts.image }); + }).toBeFunction(); + }); + + it("should allow selecting non-container fields normally", () => { + const queryString = db + .from(contacts) + .get("test-uuid") + .select({ name: contacts.name, hobby: contacts.hobby }) + .getQueryString(); + + expect(queryString).toContain("$select=name,hobby"); + expect(queryString).not.toContain("image"); + }); + + it("should allow non-container fields in expanded relations", () => { + // Non-container fields should work fine in expanded relations + const queryString = db + .from(contacts) + .get("test-uuid") + .expand(users, (b: any) => b.select({ name: users.name })) + .getQueryString(); + + expect(queryString).toContain("users($select=name)"); + + // Verify main select also works with non-container fields + const queryString2 = db + .from(contacts) + .get("test-uuid") + .select({ name: contacts.name, hobby: contacts.hobby }) + .getQueryString(); + + expect(queryString2).toContain("$select=name,hobby"); + }); + }); +}); diff --git a/packages/fmodata/tests/sanitize-json.test.ts b/packages/fmodata/tests/sanitize-json.test.ts new file mode 100644 index 00000000..fe55b285 --- /dev/null +++ b/packages/fmodata/tests/sanitize-json.test.ts @@ -0,0 +1,228 @@ +/** + * JSON Sanitization Tests + * + * Tests for the sanitizeFileMakerJson function that handles FileMaker's + * invalid JSON responses containing unquoted `?` characters as field values. + */ + +import { describe, it, expect } from "vitest"; +import { + sanitizeFileMakerJson, + safeJsonParse, +} from "@proofkit/fmodata/client/sanitize-json"; +import { ResponseParseError } from "@proofkit/fmodata/errors"; + +describe("sanitizeFileMakerJson", () => { + describe("basic sanitization", () => { + it("should replace single unquoted ? value with null", () => { + const input = '{"field": ?}'; + const expected = '{"field": null}'; + expect(sanitizeFileMakerJson(input)).toBe(expected); + }); + + it("should replace multiple unquoted ? values with null", () => { + const input = '{"field1": ?, "field2": ?}'; + const expected = '{"field1": null, "field2": null}'; + expect(sanitizeFileMakerJson(input)).toBe(expected); + }); + + it("should handle mixed valid and invalid values", () => { + const input = '{"field1": "valid", "field2": ?, "field3": null}'; + const expected = '{"field1": "valid", "field2": null, "field3": null}'; + expect(sanitizeFileMakerJson(input)).toBe(expected); + }); + + it("should handle ? at the end of an object", () => { + const input = '{"field1": "value", "field2": ?}'; + const expected = '{"field1": "value", "field2": null}'; + expect(sanitizeFileMakerJson(input)).toBe(expected); + }); + + it("should handle ? followed by comma", () => { + const input = '{"field1": ?, "field2": "value"}'; + const expected = '{"field1": null, "field2": "value"}'; + expect(sanitizeFileMakerJson(input)).toBe(expected); + }); + }); + + describe("should not modify valid JSON", () => { + it("should not modify ? inside string values", () => { + const input = '{"field": "Is this a question?"}'; + expect(sanitizeFileMakerJson(input)).toBe(input); + }); + + it("should not modify ? in the middle of string values", () => { + const input = '{"field": "What? Really?"}'; + expect(sanitizeFileMakerJson(input)).toBe(input); + }); + + it("should not modify escaped ? in strings", () => { + const input = '{"field": "test\\?test"}'; + expect(sanitizeFileMakerJson(input)).toBe(input); + }); + + it("should not modify normal null values", () => { + const input = '{"field": null}'; + expect(sanitizeFileMakerJson(input)).toBe(input); + }); + + it("should not modify numeric values", () => { + const input = '{"field": 123}'; + expect(sanitizeFileMakerJson(input)).toBe(input); + }); + + it("should not modify boolean values", () => { + const input = '{"field1": true, "field2": false}'; + expect(sanitizeFileMakerJson(input)).toBe(input); + }); + + it("should not modify empty strings", () => { + const input = '{"field": ""}'; + expect(sanitizeFileMakerJson(input)).toBe(input); + }); + }); + + describe("nested objects and arrays", () => { + it("should handle ? in nested objects", () => { + const input = '{"outer": {"inner": ?}}'; + const expected = '{"outer": {"inner": null}}'; + expect(sanitizeFileMakerJson(input)).toBe(expected); + }); + + it("should handle ? in arrays", () => { + const input = '{"value": [1, ?, 3]}'; + + const result = sanitizeFileMakerJson(input); + + // The sanitized JSON should be parseable + expect(() => JSON.parse(result)).not.toThrow(); + + // And should have the correct values + const parsed = JSON.parse(result); + expect(parsed.value).toEqual([1, null, 3]); + }); + + it("should handle complex nested structures", () => { + const input = + '{"users": [{"name": "John", "age": ?}, {"name": ?, "age": 30}]}'; + const expected = + '{"users": [{"name": "John", "age": null}, {"name": null, "age": 30}]}'; + expect(sanitizeFileMakerJson(input)).toBe(expected); + }); + }); + + describe("whitespace handling", () => { + it("should handle no whitespace around ?", () => { + const input = '{"field":?}'; + const expected = '{"field": null}'; + expect(sanitizeFileMakerJson(input)).toBe(expected); + }); + + it("should handle extra whitespace around ?", () => { + const input = '{"field": ? }'; + const expected = '{"field": null }'; + expect(sanitizeFileMakerJson(input)).toBe(expected); + }); + + it("should handle newlines around ?", () => { + const input = '{"field":\n?\n}'; + const expected = '{"field": null\n}'; + expect(sanitizeFileMakerJson(input)).toBe(expected); + }); + }); + + describe("realistic FileMaker OData responses", () => { + it("should sanitize a typical FileMaker list response", () => { + // Manual construction to ensure the ? is unquoted + const inputWithUnquoted = + '{"@odata.context":"$metadata#Users","value":[{"ROWID":1,"name":"John","email":"john@example.com","phone":?},{"ROWID":2,"name":"Jane","email":?,"phone":"555-1234"}]}'; + + const result = sanitizeFileMakerJson(inputWithUnquoted); + + // The sanitized JSON should be parseable + expect(() => JSON.parse(result)).not.toThrow(); + + // And should have the correct values + const parsed = JSON.parse(result); + expect(parsed.value[0].phone).toBeNull(); + expect(parsed.value[1].email).toBeNull(); + expect(parsed.value[0].email).toBe("john@example.com"); + expect(parsed.value[1].phone).toBe("555-1234"); + }); + + it("should sanitize response with all fields as ?", () => { + const input = + '{"@odata.context":"$metadata#Test","value":[{"field1":?,"field2":?,"field3":?}]}'; + + const result = sanitizeFileMakerJson(input); + + // The sanitized JSON should be parseable + expect(() => JSON.parse(result)).not.toThrow(); + + // And should have the correct values + const parsed = JSON.parse(result); + expect(parsed.value[0].field1).toBeNull(); + expect(parsed.value[0].field2).toBeNull(); + expect(parsed.value[0].field3).toBeNull(); + }); + }); +}); + +describe("safeJsonParse", () => { + it("should parse valid JSON from Response", async () => { + const data = { field: "value" }; + const response = new Response(JSON.stringify(data)); + const result = await safeJsonParse(response); + expect(result).toEqual(data); + }); + + it("should parse and sanitize invalid FileMaker JSON from Response", async () => { + const invalidJson = '{"field1": "valid", "field2": ?, "field3": null}'; + const response = new Response(invalidJson); + const result = await safeJsonParse(response); + expect(result).toEqual({ field1: "valid", field2: null, field3: null }); + }); + + it("should handle complex nested invalid JSON", async () => { + const invalidJson = + '{"users":[{"name":"John","age":?},{"name":?,"age":30}]}'; + const response = new Response(invalidJson); + const result = await safeJsonParse(response); + expect(result).toEqual({ + users: [ + { name: "John", age: null }, + { name: null, age: 30 }, + ], + }); + }); + + it("should throw ResponseParseError for completely invalid JSON", async () => { + const invalidJson = "not json at all"; + const response = new Response(invalidJson); + await expect(safeJsonParse(response)).rejects.toThrow(ResponseParseError); + + // Verify the error includes the sanitized text for debugging + try { + await safeJsonParse(new Response(invalidJson)); + } catch (err) { + expect(err).toBeInstanceOf(ResponseParseError); + const parseError = err as ResponseParseError; + expect(parseError.rawText).toBe(invalidJson); + expect(parseError.cause).toBeInstanceOf(SyntaxError); + } + }); + + it("should throw ResponseParseError for empty response body", async () => { + const response = new Response(""); + await expect(safeJsonParse(response)).rejects.toThrow(ResponseParseError); + + // Verify the error includes empty string as rawText + try { + await safeJsonParse(new Response("")); + } catch (err) { + expect(err).toBeInstanceOf(ResponseParseError); + const parseError = err as ResponseParseError; + expect(parseError.rawText).toBe(""); + } + }); +}); diff --git a/packages/fmodata/tests/schema-manager.test.ts b/packages/fmodata/tests/schema-manager.test.ts new file mode 100644 index 00000000..1adfa80e --- /dev/null +++ b/packages/fmodata/tests/schema-manager.test.ts @@ -0,0 +1,381 @@ +/** + * Schema Manager E2E Tests + * + * These tests execute real schema management operations against a live FileMaker OData server. + * They require valid credentials and a running server to pass. + * + * Setup: + * - Create a `.env.local` file in this package directory with the following variables: + * - FMODATA_SERVER_URL - The FileMaker OData server URL (e.g., https://api.example.com) + * - FMODATA_API_KEY - API key for bearer token authentication + * - FMODATA_DATABASE - The database name to use for testing + * + * Note: These tests may be skipped if environment variables are not set. + * Run with: pnpm test schema-manager + */ + +import path from "path"; +import { describe, it, expect, afterEach } from "vitest"; +import { config } from "dotenv"; +import { FMServerConnection } from "@proofkit/fmodata"; +import type { + Field, + StringField, + NumericField, + DateField, + TimeField, + TimestampField, + ContainerField, +} from "@proofkit/fmodata"; + +config({ path: path.resolve(__dirname, "../.env.local") }); + +// Load environment variables +const serverUrl = process.env.FMODATA_SERVER_URL; +const apiKey = process.env.FMODATA_API_KEY; +const database = process.env.FMODATA_DATABASE; + +describe("SchemaManager E2E Tests", () => { + // Skip tests if credentials are not available + if (!serverUrl || !apiKey || !database) { + console.warn( + "Skipping SchemaManager E2E tests: FMODATA_SERVER_URL, FMODATA_API_KEY, and FMODATA_DATABASE environment variables are required", + ); + return; + } + + const connection = new FMServerConnection({ + serverUrl, + auth: { apiKey }, + }); + + const db = connection.database(database); + + // Generate unique table name for this test run + const testTableName = `test_schema_${Date.now()}`; + + // Track all tables created during tests for cleanup + const createdTables: string[] = []; + + // Cleanup: Delete all test tables after each test + afterEach(async () => { + for (const tableName of createdTables) { + try { + await db.schema.deleteTable(tableName); + } catch (error) { + // Ignore errors - table may have already been deleted or may not exist + console.warn(`Failed to delete test table ${tableName}:`, error); + } + } + createdTables.length = 0; + }); + + it("should create a table with various field types", async () => { + // Create table with most field types (container added separately) + const fields: Field[] = [ + { + name: "id", + type: "string", + primary: true, + maxLength: 36, + }, + { + name: "username", + type: "string", + nullable: false, + unique: true, + maxLength: 50, + }, + { + name: "email", + type: "string", + nullable: false, + maxLength: 255, + }, + { + name: "age", + type: "numeric", + nullable: true, + }, + { + name: "birth_date", + type: "date", + nullable: true, + }, + { + name: "start_time", + type: "time", + nullable: true, + }, + { + name: "created_at", + type: "timestamp", + nullable: true, + }, + ]; + + const tableDefinition = await db.schema.createTable(testTableName, fields); + createdTables.push(testTableName); + + expect(tableDefinition).toBeDefined(); + expect(tableDefinition.tableName).toBe(testTableName); + expect(tableDefinition.fields).toBeDefined(); + expect(Array.isArray(tableDefinition.fields)).toBe(true); + }); + + it("should create a table with string fields that have maxLength and repetitions", async () => { + const tableName = `${testTableName}_repeating`; + const fields: Field[] = [ + { + name: "id", + type: "string", + primary: true, + maxLength: 36, + }, + { + name: "tags", + type: "string", + repetitions: 5, + maxLength: 50, + }, + ]; + + const tableDefinition = await db.schema.createTable(tableName, fields); + + createdTables.push(tableName); + + expect(tableDefinition).toBeDefined(); + expect(tableDefinition.tableName).toBe(tableName); + }); + + it("should create a table with string fields that have default values", async () => { + const tableName = `${testTableName}_defaults`; + const fields: StringField[] = [ + { + name: "id", + type: "string", + primary: true, + maxLength: 36, + }, + { + name: "created_by", + type: "string", + default: "USER", + }, + { + name: "username", + type: "string", + default: "USERNAME", + }, + ]; + + const tableDefinition = await db.schema.createTable(tableName, fields); + + createdTables.push(tableName); + + expect(tableDefinition).toBeDefined(); + expect(tableDefinition.tableName).toBe(tableName); + }); + + it("should add fields to an existing table", async () => { + // First create a table + const initialFields: Field[] = [ + { + name: "id", + type: "string", + primary: true, + maxLength: 36, + }, + { + name: "username", + type: "string", + nullable: false, + maxLength: 50, + }, + ]; + + await db.schema.createTable(testTableName, initialFields); + createdTables.push(testTableName); + + // Then add more fields + const newFields: Field[] = [ + { + name: "email", + type: "string", + nullable: false, + unique: true, + maxLength: 255, + }, + { + name: "phone", + type: "string", + nullable: true, + maxLength: 20, + }, + { + name: "age", + type: "numeric", + nullable: true, + }, + ]; + + const updatedTable = await db.schema.addFields(testTableName, newFields); + + expect(updatedTable).toBeDefined(); + expect(updatedTable.tableName).toBe(testTableName); + expect(updatedTable.fields).toBeDefined(); + expect(Array.isArray(updatedTable.fields)).toBe(true); + }); + + it("should create and delete an index", async () => { + // First create a table + const fields: Field[] = [ + { + name: "id", + type: "string", + primary: true, + maxLength: 36, + }, + { + name: "email", + type: "string", + nullable: false, + maxLength: 255, + }, + ]; + + await db.schema.createTable(testTableName, fields); + createdTables.push(testTableName); + + // Create an index + const index = await db.schema.createIndex(testTableName, "email"); + + expect(index).toBeDefined(); + expect(index.indexName).toBe("email"); + + // Delete the index + await db.schema.deleteIndex(testTableName, "email"); + + // If no error is thrown, the operation succeeded + expect(true).toBe(true); + }); + + it("should delete a field from a table", async () => { + // First create a table with multiple fields + const fields: Field[] = [ + { + name: "id", + type: "string", + primary: true, + maxLength: 36, + }, + { + name: "username", + type: "string", + nullable: false, + maxLength: 50, + }, + { + name: "temp_field", + type: "string", + nullable: true, + maxLength: 100, + }, + ]; + + await db.schema.createTable(testTableName, fields); + createdTables.push(testTableName); + + // Delete a field + await db.schema.deleteField(testTableName, "temp_field"); + + // If no error is thrown, the operation succeeded + expect(true).toBe(true); + }); + + it("should delete a table", async () => { + const tableName = `${testTableName}_delete`; + // First create a table + const fields: Field[] = [ + { + name: "id", + type: "string", + primary: true, + maxLength: 36, + }, + { + name: "name", + type: "string", + nullable: false, + maxLength: 100, + }, + ]; + + await db.schema.createTable(tableName, fields); + createdTables.push(tableName); + + // Verify table exists by trying to add a field (should not throw) + await db.schema.addFields(tableName, [ + { + name: "description", + type: "string", + nullable: true, + }, + ]); + + // Delete the table (remove from tracking since we're deleting it explicitly) + await db.schema.deleteTable(tableName); + const index = createdTables.indexOf(tableName); + if (index > -1) { + createdTables.splice(index, 1); + } + + // If no error is thrown, the operation succeeded + expect(true).toBe(true); + }); + + it("should handle field type definitions correctly", () => { + // Type checking test - ensure TypeScript accepts valid field types + const stringField: StringField = { + name: "name", + type: "string", + maxLength: 100, + }; + + const numericField: NumericField = { + name: "age", + type: "numeric", + }; + + const dateField: DateField = { + name: "birth_date", + type: "date", + default: "CURRENT_DATE", + }; + + const timeField: TimeField = { + name: "start_time", + type: "time", + default: "CURRENT_TIME", + }; + + const timestampField: TimestampField = { + name: "created_at", + type: "timestamp", + default: "CURRENT_TIMESTAMP", + }; + + const containerField: ContainerField = { + name: "avatar", + type: "container", + externalSecurePath: "/secure/path", + }; + + expect(stringField.type).toBe("string"); + expect(numericField.type).toBe("numeric"); + expect(dateField.type).toBe("date"); + expect(timeField.type).toBe("time"); + expect(timestampField.type).toBe("timestamp"); + expect(containerField.type).toBe("container"); + }); +}); diff --git a/packages/fmodata/tests/scripts.test.ts b/packages/fmodata/tests/scripts.test.ts new file mode 100644 index 00000000..f669a6b8 --- /dev/null +++ b/packages/fmodata/tests/scripts.test.ts @@ -0,0 +1,95 @@ +/** + * Script Tests + * + * Tests for running FileMaker scripts via the OData API. + */ + +import { describe, it, expectTypeOf } from "vitest"; +import { z } from "zod/v4"; +import { jsonCodec } from "./utils/helpers"; +import { createMockClient } from "./utils/test-setup"; + +describe("scripts", () => { + const client = createMockClient(); + + it("should handle expands", () => { + expectTypeOf(client.listDatabaseNames).returns.resolves.toBeArray(); + const db = client.database("test_db"); + + expectTypeOf(db.listTableNames).returns.resolves.toBeArray(); + + const resp = db.runScript("script name"); + // Catch the promise to prevent unhandled rejection (this is a type-only test) + resp.catch(() => {}); + expectTypeOf(resp).resolves.toEqualTypeOf<{ + resultCode: number; + result?: string; + }>(); + }); + + it("should allow script param", () => { + const db = client.database("test_db"); + + () => { + // don't actual run these calls, we're just checking the types + + // optional second param. + db.runScript("script name"); + + // script param can be string, number, or object. + db.runScript("script name", { + scriptParam: "param", + }); + + db.runScript("script name", { + scriptParam: 123, + }); + + db.runScript("script name", { + scriptParam: { hello: "world" }, // will be stringified in odata request + }); + }; + }); + + it("should throw a type error if script name is invalid string", () => { + // OData doesn't support script names with special characters (for example, @, &, /) or script names beginning with a number. + + const db = client.database("test_db"); + + () => { + // don't actual run these calls, we're just checking the types + + // these should only fail at runtime, don't enforce these at the type level + + db.runScript("123BadScriptName"); + db.runScript("@BadScriptName"); + db.runScript("/BadScriptName"); + db.runScript("BadScriptName@123"); + db.runScript("BadScriptName/123"); + }; + }); + + it("should validate/transform script result if schema provided", () => { + const db = client.database("test_db"); + + () => { + // don't actual run these calls, we're just checking the types + + const schema = jsonCodec( + z.object({ + hello: z.string(), + world: z.number(), + }), + ); + + const result = db.runScript("script name", { + resultSchema: schema, + }); + + expectTypeOf(result).resolves.toEqualTypeOf<{ + resultCode: number; + result: z.infer; + }>(); + }; + }); +}); diff --git a/packages/fmodata/tests/tsconfig.build.json b/packages/fmodata/tests/tsconfig.build.json new file mode 100644 index 00000000..b90b2dc4 --- /dev/null +++ b/packages/fmodata/tests/tsconfig.build.json @@ -0,0 +1,37 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + /* Strict any checking for tests */ + "noImplicitAny": true, + "strictNullChecks": true, + "strictFunctionTypes": true, + "strictBindCallApply": true, + "strictPropertyInitialization": true, + "noImplicitThis": true, + "alwaysStrict": true, + + /* Disallow explicit any (using @ts-expect-error for intentional any testing) */ + /* Note: TypeScript doesn't have a built-in noExplicitAny, but we can use eslint */ + + /* Path mappings for package name imports - point to dist for build testing */ + "baseUrl": "..", + "paths": { + "@proofkit/fmodata": ["./dist/esm"], + "@proofkit/fmodata/*": ["./dist/esm/*"] + }, + + /* Ensure node_modules types are accessible */ + "skipLibCheck": false, + "moduleResolution": "Bundler", + + /* Include test files */ + "rootDir": "..", + "outDir": "../dist" + }, + "include": [ + "../dist/esm/**/*.d.ts", + "../node_modules/@fetchkit/**/*.d.ts", + "./**/*.ts" + ], + "exclude": ["../src/**/*", "../dist/**/*.js", "../dist/**/*.js.map"] +} diff --git a/packages/fmodata/tests/tsconfig.json b/packages/fmodata/tests/tsconfig.json new file mode 100644 index 00000000..b738ae13 --- /dev/null +++ b/packages/fmodata/tests/tsconfig.json @@ -0,0 +1,29 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + /* Strict any checking for tests */ + "noImplicitAny": true, + "strictNullChecks": true, + "strictFunctionTypes": true, + "strictBindCallApply": true, + "strictPropertyInitialization": true, + "noImplicitThis": true, + "alwaysStrict": true, + + /* Disallow explicit any (using @ts-expect-error for intentional any testing) */ + /* Note: TypeScript doesn't have a built-in noExplicitAny, but we can use eslint */ + + /* Path mappings for package name imports */ + "baseUrl": "..", + "paths": { + "@proofkit/fmodata": ["./src"], + "@proofkit/fmodata/*": ["./src/*"] + }, + + /* Include test files */ + "rootDir": "..", + "outDir": "../dist" + }, + "include": ["../src/**/*.ts", "./**/*.ts"], + "exclude": ["../dist", "../node_modules"] +} diff --git a/packages/fmodata/tests/typescript.test.ts b/packages/fmodata/tests/typescript.test.ts new file mode 100644 index 00000000..5b286569 --- /dev/null +++ b/packages/fmodata/tests/typescript.test.ts @@ -0,0 +1,557 @@ +/** + * TypeScript-only API ergonomics tests + * + * This test file focuses on exploring and validating the end-user API without + * executing actual queries. These tests are designed to: + * + * - Verify TypeScript type correctness and API structure + * - Explore API ergonomics and ensure methods can be chained correctly + * - Test query builder creation without making network requests + * - Catch breaking changes in the public API during refactoring + * + * These tests do NOT: + * - Execute actual HTTP requests (.execute() is never called) + * - Require a mock fetch implementation + * - Test runtime behavior or network responses + * + * They serve as compile-time verification and API documentation examples, + * helping ensure the API remains ergonomic and type-safe as the library evolves. + */ + +import { describe, expect, it, expectTypeOf, beforeEach } from "vitest"; +import { z } from "zod/v4"; +import { + fmTableOccurrence, + textField, + numberField, + FMServerConnection, + FMTable, + getTableColumns, + eq, +} from "@proofkit/fmodata"; +import { createMockFetch } from "./utils/mock-fetch"; +import { createMockClient, contacts, users } from "./utils/test-setup"; + +describe("fmodata", () => { + describe("API ergonomics", () => { + const client = createMockClient(); + const db = client.database("TestDB"); + + it("should support list() with query chaining", () => { + const table = db.from(contacts); + const listBuilder = table.list(); + + expect(listBuilder).toBeDefined(); + expect(listBuilder.getQueryString).toBeDefined(); + }); + + it("should support get() for single record retrieval", () => { + const table = db.from(contacts); + const getBuilder = table.get("my-uuid"); + + expect(getBuilder).toBeDefined(); + expect(getBuilder.getRequestConfig).toBeDefined(); + }); + + it("should support getSingleField() API", () => { + const table = db.from(contacts); + const singleFieldBuilder = table + .get("my-uuid") + .getSingleField(contacts.name); + + expect(singleFieldBuilder).toBeDefined(); + expect(singleFieldBuilder.getRequestConfig).toBeDefined(); + }); + + it("should support select() for returning arrays of records", () => { + const table = db.from(contacts); + const selectBuilder = table + .list() + .select({ name: contacts.name, hobby: contacts.hobby }); + + expect(selectBuilder).toBeDefined(); + expect(selectBuilder.getQueryString).toBeDefined(); + }); + + it("should support single() modifier on select()", () => { + const table = db.from(contacts); + const singleSelectBuilder = table + .list() + .select({ name: contacts.name, hobby: contacts.hobby }) + .single(); + + expect(singleSelectBuilder).toBeDefined(); + expect(singleSelectBuilder.getQueryString).toBeDefined(); + }); + + it("should generate query strings correctly", () => { + const table = db.from(contacts); + const queryString = table + .list() + .select({ name: contacts.name, hobby: contacts.hobby }) + .getQueryString(); + + expect(queryString).toBeDefined(); + expect(typeof queryString).toBe("string"); + }); + + it("should infer field names for select() based on schema", () => { + const users = fmTableOccurrence("Users", { + id: textField().primaryKey(), + name: textField(), + email: textField(), + age: numberField(), + }); + + const db = client.database("TestDB"); + const entitySet = db.from(users); + + // These should have autocomplete for "id", "name", "email", "age" + const query1 = entitySet + .list() + .select({ id: users.id, name: users.name }); + const query2 = entitySet + .list() + .select({ email: users.email, age: users.age }); + const query3 = entitySet.list().select({ + id: users.id, + name: users.name, + email: users.email, + age: users.age, + }); + + expect(query1).toBeDefined(); + expect(query2).toBeDefined(); + expect(query3).toBeDefined(); + + // These should be TypeScript errors - fields not in schema + const _typeChecks = () => { + // @ts-expect-error - should pass an object + entitySet.list().select("invalidField"); + // @ts-expect-error - should pass an object + entitySet.list().select(""); + // @ts-expect-error - should pass an object with column references + entitySet.list().select({ invalidField: true }); + entitySet.list().select({ + age: users.age, + // @ts-expect-error - column must be from the correct table + name: contacts.name, + }); + }; + void _typeChecks; + }); + + it("should infer field names for select() with entity IDs", () => { + const products = fmTableOccurrence( + "Products", + { + productId: textField() + .primaryKey() + .readOnly() + .entityId("FMFID:1000001"), + productName: textField().entityId("FMFID:1000002"), + price: numberField().entityId("FMFID:1000003"), + category: textField().entityId("FMFID:1000004"), + inStock: numberField() + .readValidator(z.coerce.boolean()) + .entityId("FMFID:1000005"), + }, + { + entityId: "FMTID:2000001", + }, + ); + + const entitySet = db.from(products); + + // Type inspection to debug the issue + type OccurrenceType = typeof products; + // ^? Should show FMTable with fields + type EntitySetType = typeof entitySet; + // ^? Should show EntitySet with schema + + // These should have autocomplete for "productId", "productName", "price", "category", "inStock" + const query1 = entitySet.list().select({ + productId: products.productId, + productName: products.productName, + }); + const listQuery = entitySet.list(); + type ListQueryType = typeof listQuery; + // ^? First param should be schema type, not never + type Autocomplete1 = Parameters[0]; + // ^? + const query2 = entitySet.list().select({ + price: products.price, + category: products.category, + inStock: products.inStock, + }); + const query3 = entitySet.list().select({ + productId: products.productId, + productName: products.productName, + price: products.price, + category: products.category, + inStock: products.inStock, + }); + + expect(query1).toBeDefined(); + expect(query2).toBeDefined(); + expect(query3).toBeDefined(); + + // These should be TypeScript errors - fields not in schema + const _typeChecks = () => { + // @ts-expect-error - should pass an object + entitySet.list().select("invalidField"); + // @ts-expect-error - should pass an object + entitySet.list().select(""); + // @ts-expect-error - should pass an object with column references + entitySet.list().select({ invalidField: true }); + entitySet.list().select({ + anyName: products.productName, + // @ts-expect-error - column must be from the correct table + name: contacts.name, + }); + }; + void _typeChecks; + }); + + it("should not allow getQueryString() on EntitySet directly", () => { + const entitySet = db.from(users); + + // TypeScript should error if trying to call getQueryString() directly on EntitySet + // You must first call a method like list(), select(), filter(), etc. to get a QueryBuilder + const _typeCheck = () => { + // @ts-expect-error - EntitySet does not have getQueryString method + entitySet.getQueryString(); + }; + void _typeCheck; + + // Correct usage: call list() first to get a QueryBuilder + const queryBuilder = entitySet.list(); + expect(queryBuilder.getQueryString).toBeDefined(); + expect(typeof queryBuilder.getQueryString()).toBe("string"); + }); + }); + + describe("BaseTable and TableOccurrence", () => { + const client = createMockClient(); + + it("should create BaseTable and TableOccurrence", () => { + const tableOcc = fmTableOccurrence("Users", { + id: numberField().primaryKey(), + name: textField(), + email: textField(), + }); + + // Check that the table has the expected name via Symbol + expect((tableOcc as any)[FMTable.Symbol.Name]).toBe("Users"); + expect((tableOcc as any)[FMTable.Symbol.Schema]).toBeDefined(); + expect((tableOcc as any)[FMTable.Symbol.BaseTableConfig].idField).toBe( + "id", + ); + }); + + it("should use TableOccurrence with database.from()", () => { + const users = fmTableOccurrence("Users", { + id: numberField().primaryKey(), + name: textField(), + email: textField(), + }); + + const db = client.database("TestDB"); + const entitySet = db.from(users); + + const queryBuilder = entitySet + .list() + .select({ id: users.id, name: users.name }); + expect(queryBuilder).toBeDefined(); + expect(queryBuilder.getQueryString()).toContain("$select"); + + const recordBuilder = entitySet.get("123"); + expect(recordBuilder).toBeDefined(); + expect(recordBuilder.getRequestConfig().url).toContain("Users"); + }); + + it("should allow table occurrences to be reused across different contexts", () => { + const products = fmTableOccurrence("Products", { + id: numberField().primaryKey(), + name: textField(), + }); + + const client1 = createMockClient(); + const client2 = createMockClient(); + + const db1 = client1.database("DB1"); + const db2 = client2.database("DB2"); + + const entitySet1 = db1.from(products); + const entitySet2 = db2.from(products); + + expect(entitySet1.get("1").getRequestConfig().url).toContain("Products"); + expect(entitySet2.get("1").getRequestConfig().url).toContain("Products"); + }); + + it("should support navigation properties with navigationPaths", () => { + const users = fmTableOccurrence( + "Users", + { + id: textField().primaryKey(), + name: textField(), + email: textField(), + }, + { + navigationPaths: ["Orders"], + }, + ); + + const orders = fmTableOccurrence( + "Orders", + { + orderId: textField().primaryKey(), + userId: textField(), + total: numberField(), + }, + { + navigationPaths: ["Users"], + }, + ); + + expect((users as any)[FMTable.Symbol.NavigationPaths]).toContain( + "Orders", + ); + expect((orders as any)[FMTable.Symbol.NavigationPaths]).toContain( + "Users", + ); + }); + + it("should support base table without idField", () => { + const categories = fmTableOccurrence("Categories", { + categoryId: textField(), + name: textField(), + description: textField(), + // No primaryKey() - idField is undefined + }); + + expect((categories as any)[FMTable.Symbol.Name]).toBe("Categories"); + expect( + (categories as any)[FMTable.Symbol.BaseTableConfig].idField, + ).toBeUndefined(); + expect((categories as any)[FMTable.Symbol.Schema]).toBeDefined(); + }); + }); + + describe("Type safety and result parsing", () => { + it("should properly type the result of a query", async () => { + const client = new FMServerConnection({ + serverUrl: "https://api.example.com", + auth: { apiKey: "test-api-key" }, + fetchClientOptions: { + fetchHandler: createMockFetch([ + { + "@id": "1", + "@editLink": "https://api.example.com/Users/1", + id: 1, + name: "John Doe", + active: 0, // should coerce to boolean false + activeHuman: "active", + }, + ]), + }, + }); + + const usersTO = fmTableOccurrence("Users", { + id: numberField().primaryKey(), + name: textField().notNull(), + active: numberField().readValidator(z.coerce.boolean()).notNull(), + activeHuman: textField().readValidator(z.enum(["active", "inactive"])), + }); + + const db = client.database("TestDB"); + const usersQuery = db.from(usersTO); + const result = await usersQuery.list().execute(); + + if (!result.data || !result.data[0]) { + console.error(result); + throw new Error("Expected at least one result"); + } + + const firstResult = result.data[0]; + + expectTypeOf(firstResult.name).toEqualTypeOf(); + expectTypeOf(firstResult.active).toEqualTypeOf(); + expect(firstResult.active).toBe(false); + expectTypeOf(firstResult.activeHuman).toEqualTypeOf< + "active" | "inactive" + >(); + + const result2 = await usersQuery + .list() + .select(getTableColumns(usersTO)) + .execute(); + + if (!result2.data || !result2.data[0]) { + console.error(result); + throw new Error("Expected at least one result"); + } + + const firstResult2 = result2.data[0]; + + expectTypeOf(firstResult2.name).toEqualTypeOf(); + expectTypeOf(firstResult2.active).toEqualTypeOf(); + expect(firstResult2.active).toBe(false); + expectTypeOf(firstResult2.activeHuman).toEqualTypeOf< + "active" | "inactive" + >(); + + expect(result).toBeDefined(); + expect(result.data).toBeDefined(); + expect(result.data?.length).toBe(1); + }); + }); + + describe("Type-safe orderBy API", () => { + /** + * These tests document the DESIRED orderBy API for typed databases. + * + * DESIRED API: + * - .orderBy("name") → single field, default asc + * - .orderBy(["name", "desc"]) → single field with direction (tuple) + * - .orderBy([["name", "asc"], ["id", "desc"]]) → multiple fields (array of tuples) + * + * The tuple syntax should ONLY accept "asc" or "desc" as the second value, + * NOT field names. This provides: + * - Clear autocomplete: second position shows only "asc" | "desc" + * - Unambiguous syntax: no confusion between [field, field] vs [field, direction] + * + * Uses existing occurrences from test-setup.ts. + */ + + it("should support single field orderBy with default ascending", () => { + const client = createMockClient(); + const db = client.database("fmdapi_test.fmp12"); + + // ✅ Single field name - defaults to ascending + const query = db.from(users).list().orderBy("name"); + + expect(query).toBeDefined(); + expect(query.getQueryString()).toContain("$orderby"); + expect(query.getQueryString()).toContain("name"); + + // ✅ Invalid field names are now caught at compile time + // @ts-expect-error - "anyInvalidField" is not a valid field + db.from("users").list().orderBy("anyInvalidField"); + }); + + it("should support tuple syntax for single field with explicit direction", () => { + const client = createMockClient(); + const db = client.database("fmdapi_test.fmp12"); + + // ✅ Tuple syntax: [fieldName, direction] + // Second value autocompletes to "asc" | "desc" ONLY + const ascQuery = db.from(users).list().orderBy(["name", "asc"]); + const descQuery = db.from(users).list().orderBy(["id", "desc"]); + + expect(ascQuery.getQueryString()).toContain("$orderby"); + expect(ascQuery.getQueryString()).toBe( + "/users?$orderby=name asc&$top=1000", + ); + expect(descQuery.getQueryString()).toContain("$orderby"); + expect(descQuery.getQueryString()).toBe( + "/users?$orderby=id desc&$top=1000", + ); + + // ✅ Second value must be "asc" or "desc" - field names are rejected + // @ts-expect-error - "name" is not a valid direction + db.from("users").list().orderBy(["name", "name"]); + }); + + it("should support tuple syntax with entity IDs and transform field names to FMFIDs", () => { + const client = createMockClient(); + const db = client.database("test.fmp12"); + + // ✅ Tuple syntax: [fieldName, direction] + // Field names are transformed to FMFIDs in the query string + // Table name is also transformed to FMTID when using entity IDs + const ascQuery = db.from(users).list().orderBy(["name", "asc"]); + const descQuery = db.from(users).list().orderBy(["id", "desc"]); + + expect(ascQuery.getQueryString()).toContain("$orderby"); + expect(ascQuery.getQueryString()).toBe( + "/users?$orderby=name asc&$top=1000", + ); + expect(descQuery.getQueryString()).toContain("$orderby"); + expect(descQuery.getQueryString()).toBe( + "/users?$orderby=id desc&$top=1000", + ); + + // ✅ Second value must be "asc" or "desc" - field names are rejected + // @ts-expect-error - "name" is not a valid direction + db.from(users).list().orderBy(["name", "name"]); + }); + + it("should support array of tuples for multiple fields", () => { + const client = createMockClient(); + const db = client.database("fmdapi_test.fmp12"); + + // ✅ Array of tuples for multiple fields with explicit directions + const query = db + .from(users) + .list() + .orderBy([ + ["name", "asc"], + ["id", "desc"], + ]); + + expect(query).toBeDefined(); + expect(query.getQueryString()).toContain("$orderby"); + }); + + it("should chain orderBy with other query methods", () => { + const client = createMockClient(); + const db = client.database("fmdapi_test.fmp12"); + + const query = db + .from(users) + .list() + .select({ name: users.name, id: users.id, active: users.active }) + .where(eq(users.active, true)) + .orderBy(["name", "asc"]) + .top(10) + .skip(0); + + const queryString = query.getQueryString(); + + expect(queryString).toContain("$select"); + expect(queryString).toContain("$filter"); + expect(queryString).toContain("$orderby"); + expect(queryString).toContain("$top"); + expect(queryString).toContain("$skip"); + }); + + /** + * Type error tests - validates compile-time type checking for orderBy. + * + * Custom TypeSafeOrderBy type enforces: + * - Single field: keyof T + * - Tuple: [keyof T, 'asc' | 'desc'] - second position MUST be direction + * - Multiple fields: Array<[keyof T, 'asc' | 'desc']> - array of tuples + */ + it("should reject invalid usage at compile time", () => { + const client = createMockClient(); + const db = client.database("fmdapi_test.fmp12"); + + const _typeChecks = () => { + // ✅ Invalid field name is caught + // @ts-expect-error - "nonexistent" is not a valid field name + db.from("users").list().orderBy(["nonexistent", "asc"]); + + // ✅ Second position must be "asc" or "desc", not a field name + // @ts-expect-error - "name" is not a valid direction + db.from("users").list().orderBy(["name", "name"]); + + // ✅ Ambiguous [field, field] syntax is now rejected + // @ts-expect-error - "id" is not a valid direction + db.from("users").list().orderBy(["name", "id"]); + }; + void _typeChecks; + }); + }); +}); diff --git a/packages/fmodata/tests/update.test.ts b/packages/fmodata/tests/update.test.ts new file mode 100644 index 00000000..d42410a6 --- /dev/null +++ b/packages/fmodata/tests/update.test.ts @@ -0,0 +1,481 @@ +/** + * Insert and Update Tests + * + * Tests for the insert() and update() methods on EntitySet instances. + * This validates type safety and required field constraints. + */ + +import { describe, it, expect, expectTypeOf, vi } from "vitest"; +import { z } from "zod/v4"; +import { + fmTableOccurrence, + textField, + numberField, + type InferTableSchema, + eq, + and, + lt, + Result, +} from "@proofkit/fmodata"; +import { InsertBuilder } from "@proofkit/fmodata/client/insert-builder"; +import { UpdateBuilder } from "@proofkit/fmodata/client/update-builder"; +import { ExecutableUpdateBuilder } from "@proofkit/fmodata/client/update-builder"; +import { simpleMock } from "./utils/mock-fetch"; +import { createMockClient } from "./utils/test-setup"; + +describe("insert and update methods", () => { + const client = createMockClient(); + + const contactsTO = fmTableOccurrence( + "contacts", + { + id: textField().primaryKey(), + name: textField().notNull(), + hobby: textField(), + }, + { + navigationPaths: ["users"], + }, + ); + + const users = fmTableOccurrence( + "users", + { + id: textField().primaryKey(), + username: textField().notNull(), + email: textField(), + count: numberField(), + active: numberField() + .notNull() + .readValidator(z.coerce.boolean().default(true)) + .writeValidator(z.boolean().transform((v) => (v ? 1 : 0))), + }, + { + navigationPaths: ["contacts", "test"], + }, + ); + + // Users with required fields for insert + const usersWithRequired = fmTableOccurrence("usersWithRequired", { + id: textField().primaryKey(), + username: textField().notNull(), + email: textField().notNull(), + createdAt: textField(), + }); + + const testTO = fmTableOccurrence("test", { + id: textField().primaryKey(), + name: textField().notNull(), + }); + + type UserFieldNames = keyof InferTableSchema; + + describe("insert method", () => { + it("should return InsertBuilder when called", () => { + const db = client.database("test_db"); + + const result = db.from(users).insert({ username: "test", active: true }); + expect(result).toBeInstanceOf(InsertBuilder); + }); + + it("should accept all fields as optional when no required specified", () => { + const db = client.database("test_db"); + + // @ts-expect-error - some fields are required, no empty object is allowed + db.from(users).insert({}); + + // @ts-expect-error - a required fields is missing + db.from(users).insert({ username: "test" }); + + // Should accept all fields + db.from(users).insert({ + username: "test", + email: "test@example.com", + active: true, + }); + }); + + it("should require specified fields when required is set", () => { + const db = client.database("test_db"); + + // These should work - required fields are username and email + db.from(usersWithRequired).insert({ + username: "test", + email: "test@example.com", + }); + + db.from(usersWithRequired).insert({ + username: "test", + email: "test@example.com", + }); + + // Type check: username and email should be required + expectTypeOf(db.from(usersWithRequired).insert) + .parameter(0) + .toHaveProperty("username"); + expectTypeOf(db.from(usersWithRequired).insert) + .parameter(0) + .toHaveProperty("email"); + }); + + it("should have execute() that returns Result without ODataRecordMetadata by default", () => { + const db = client.database("test_db"); + + const builder = db.from(users).insert({ username: "test", active: true }); + + expectTypeOf(builder.execute).returns.resolves.toMatchTypeOf<{ + data: InferTableSchema | undefined; + error: Error | undefined; + }>(); + }); + }); + + describe("update method with builder pattern", () => { + it("should return UpdateBuilder when update() is called", () => { + const db = client.database("test_db"); + + const result = db.from(users).update({ username: "newname" }); + expect(result).toBeInstanceOf(UpdateBuilder); + }); + + it("should not have execute() on initial UpdateBuilder", () => { + const db = client.database("test_db"); + + const updateBuilder = db.from(users).update({ username: "newname" }); + + // Type check: execute should not exist on UpdateBuilder + expectTypeOf(updateBuilder).not.toHaveProperty("execute"); + }); + + it("should return ExecutableUpdateBuilder after byId()", () => { + const db = client.database("test_db"); + + const result = db + .from(users) + .update({ username: "newname" }) + .byId("user-123"); + expect(result).toBeInstanceOf(ExecutableUpdateBuilder); + }); + + it("should return ExecutableUpdateBuilder after where()", () => { + const db = client.database("test_db"); + + const result = db + .from(users) + .update({ active: false }) + .where((q) => q.where(eq(users.active, true))); + expect(result).toBeInstanceOf(ExecutableUpdateBuilder); + }); + }); + + describe("update by ID", () => { + it("should generate correct URL for update by ID", () => { + const db = client.database("test_db"); + + const updateBuilder = db + .from(users) + .update({ username: "newname" }) + .byId("user-123"); + const config = updateBuilder.getRequestConfig(); + + expect(config.method).toBe("PATCH"); + expect(config.url).toBe("/test_db/users('user-123')"); + expect(config.body).toBe(JSON.stringify({ username: "newname" })); + }); + + it("should return updatedCount type for update by ID", async () => { + const db = client.database("test_db"); + + const updateBuilder = db + .from(users) + .update({ username: "newname" }) + .byId("user-123"); + + // Type check: execute should return Result<{ updatedCount: number }> + expectTypeOf(updateBuilder.execute).returns.resolves.toEqualTypeOf< + Result<{ updatedCount: number }> + >(); + }); + + it("should execute update by ID and return count", async () => { + const mockFetch = simpleMock({ + status: 200, + headers: { "fmodata.affected_rows": "1" }, + body: null, + }); + + const db = client.database("test_db"); + + const result = await db + .from(users) + .update({ username: "newname" }) + .byId("user-123") + .execute({ fetchHandler: mockFetch }); + + expect(result.error).toBeUndefined(); + expect(result.data).toBeDefined(); + expect(result.data?.updatedCount).toBe(1); + }); + }); + + describe("update by filter", () => { + it("should generate correct URL for update by filter", () => { + const db = client.database("test_db"); + + const updateBuilder = db + .from(users) + .update({ active: false }) + .where((q) => q.where(eq(users.active, true))); + + const config = updateBuilder.getRequestConfig(); + + expect(config.method).toBe("PATCH"); + expect(config.url).toContain("/test_db/users"); + expect(config.url).toContain("$filter"); + expect(config.body).toBe(JSON.stringify({ active: false })); + }); + + it("should support complex filters with QueryBuilder", () => { + const db = client.database("test_db"); + + const updateBuilder = db + .from(users) + .update({ active: false }) + .where((q) => q.where(and(eq(users.active, true), lt(users.count, 5)))); + + const config = updateBuilder.getRequestConfig(); + + expect(config.method).toBe("PATCH"); + expect(config.url).toContain("$filter"); + }); + + it("should support QueryBuilder chaining in where callback", () => { + const db = client.database("test_db"); + + const updateBuilder = db + .from(users) + .update({ active: false }) + .where((q) => q.where(eq(users.active, true)).top(10)); + + const config = updateBuilder.getRequestConfig(); + + expect(config.method).toBe("PATCH"); + expect(config.url).toContain("$filter"); + expect(config.url).toContain("$top"); + }); + + it("should return updatedCount result type for filter-based update", async () => { + const db = client.database("test_db"); + + const updateBuilder = db + .from(users) + .update({ active: false }) + .where((q) => q.where(eq(users.active, true))); + + // Type check: execute should return Result<{ updatedCount: number }> + expectTypeOf(updateBuilder.execute).returns.resolves.toMatchTypeOf<{ + data: { updatedCount: number } | undefined; + error: Error | undefined; + }>(); + }); + + it("should execute update by filter and return count", async () => { + const mockFetch = simpleMock({ + status: 204, + headers: { "fmodata.affected_rows": "3" }, + body: null, + }); + + const db = client.database("test_db"); + + const result = await db + .from(users) + .update({ active: false }) + .where((q) => q.where(eq(users.active, true))) + .execute({ fetchHandler: mockFetch }); + + expect(result.error).toBeUndefined(); + expect(result.data).toEqual({ updatedCount: 3 }); + }); + }); + + describe("update with optional fields", () => { + it("should allow all fields to be optional for updates", () => { + const db = client.database("test_db"); + + // All fields should be optional for updates (updateRequired removed) + db.from(usersWithRequired).update({ + username: "test", + }); + + db.from(usersWithRequired).update({ + email: "test@example.com", + }); + + // Can update with empty object + db.from(usersWithRequired).update({}); + }); + + it("should keep all fields optional regardless of insert requirements", () => { + const usersForUpdate = fmTableOccurrence("usersForUpdate", { + id: textField().primaryKey(), + username: textField().notNull(), // Required for insert, but not for update + email: textField().notNull(), // Required for insert, but not for update + status: textField(), + }); + + const db = client.database("test_db"); + + // All fields are optional for update, even those required for insert + db.from(usersForUpdate).update({ + status: "active", + }); + + db.from(usersForUpdate).update({ + username: "newname", + }); + + db.from(usersForUpdate).update({}); + }); + }); + + describe("readOnly fields", () => { + it("should exclude id field from insert automatically", () => { + const usersWithReadOnly = fmTableOccurrence("usersWithReadOnly", { + id: textField().primaryKey(), + createdAt: textField().readOnly(), + modifiedAt: textField().readOnly(), + username: textField(), + email: textField(), + }); + + const db = client.database("test_db"); + + // id, createdAt, and modifiedAt should not be available for insert + db.from(usersWithReadOnly).insert({ + username: "john", + // email: "john@example.com", + + // @ts-expect-error - primary key should be readOnly by default + id: "123", + }); + + db.from(usersWithReadOnly).insert({ + username: "john", + + // @ts-expect-error - createdAt should be readOnly + createdAt: "2025-01-01", + }); + + db.from(usersWithReadOnly).insert({ + username: "john", + + // @ts-expect-error - createdAt should be readOnly + modifiedAt: "2025-01-01", + }); + + // Type check: id, createdAt, modifiedAt should not be in insert data type + expectTypeOf(db.from(usersWithReadOnly).insert) + .parameter(0) + .not.toHaveProperty("id"); + + expectTypeOf(db.from(usersWithReadOnly).insert) + .parameter(0) + .not.toHaveProperty("createdAt"); + + expectTypeOf(db.from(usersWithReadOnly).insert) + .parameter(0) + .not.toHaveProperty("modifiedAt"); + }); + + it("should exclude id field and readOnly fields from update", () => { + const usersWithReadOnlyTO = fmTableOccurrence("usersWithReadOnly", { + id: textField().primaryKey(), + createdAt: textField().readOnly(), + modifiedAt: textField().readOnly(), + username: textField(), + email: textField(), + }); + + const db = client.database("test_db"); + + // id, createdAt, and modifiedAt should not be available for update + db.from(usersWithReadOnlyTO).update({ + username: "newname", + }); + + db.from(usersWithReadOnlyTO).update({ + email: "newemail@example.com", + }); + + // Type check: id, createdAt, modifiedAt should not be in update data type + expectTypeOf(db.from(usersWithReadOnlyTO).update) + .parameter(0) + .not.toHaveProperty("id"); + + expectTypeOf(db.from(usersWithReadOnlyTO).update) + .parameter(0) + .not.toHaveProperty("createdAt"); + + expectTypeOf(db.from(usersWithReadOnlyTO).update) + .parameter(0) + .not.toHaveProperty("modifiedAt"); + }); + + it("should allow inserts without specifying readOnly fields", () => { + const usersWithReadOnlyTO = fmTableOccurrence("usersWithReadOnly", { + id: textField().primaryKey(), + createdAt: textField().readOnly(), + username: textField(), + email: textField(), // nullable by default + }); + + const db = client.database("test_db"); + + // Should work - id and createdAt are excluded automatically + db.from(usersWithReadOnlyTO).insert({ + username: "john", + email: "john@example.com", + }); + + // Should work - email is optional (nullable) + db.from(usersWithReadOnlyTO).insert({ + username: "jane", + }); + }); + }); + + describe("error handling", () => { + it("should return error on failed update by ID", async () => { + const mockFetch = vi.fn().mockRejectedValue(new Error("Network error")); + + const db = client.database("test_db"); + + const result = await db + .from(users) + .update({ username: "newname" }) + .byId("user-123") + .execute({ fetchHandler: mockFetch as any }); + + expect(result.data).toBeUndefined(); + expect(result.error).toBeInstanceOf(Error); + expect(result.error?.message).toBe("Network error"); + }); + + it("should return error on failed update by filter", async () => { + const mockFetch = vi.fn().mockRejectedValue(new Error("Network error")); + + const db = client.database("test_db"); + + const result = await db + .from(users) + .update({ active: false }) + .where((q) => q.where(eq(users.active, true))) + .execute({ fetchHandler: mockFetch as any }); + + expect(result.data).toBeUndefined(); + expect(result.error).toBeInstanceOf(Error); + expect(result.error?.message).toBe("Network error"); + }); + }); +}); diff --git a/packages/fmodata/tests/use-entity-ids-override.test.ts b/packages/fmodata/tests/use-entity-ids-override.test.ts new file mode 100644 index 00000000..ceed3e46 --- /dev/null +++ b/packages/fmodata/tests/use-entity-ids-override.test.ts @@ -0,0 +1,326 @@ +/** + * Tests for per-request useEntityIds override + * + * These tests verify that the useEntityIds option can be overridden at the request level + * using ExecuteOptions, allowing users to disable entity IDs for specific requests even + * when the database is configured to use them by default. + */ + +import { describe, it, expect } from "vitest"; +import { z } from "zod/v4"; +import { + FMServerConnection, + fmTableOccurrence, + textField, +} from "@proofkit/fmodata"; +import { simpleMock } from "./utils/mock-fetch"; + +// Create database with entity IDs +const contactsTO = fmTableOccurrence( + "contacts", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().entityId("FMFID:2"), + }, + { + entityId: "FMTID:100", + }, +); + +describe("Per-request useEntityIds override", () => { + it("should allow disabling entity IDs for a specific request", async () => { + // Create connection with entity IDs enabled by default + const connection = new FMServerConnection({ + serverUrl: "https://test.com", + auth: { username: "test", password: "test" }, + }); + + const db = connection.database("TestDB"); + + // First request: use default (should have entity ID header) + await db + .from(contactsTO) + .list() + .execute({ + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { + const headers = (init as RequestInit)?.headers as Record< + string, + string + >; + expect(headers?.Prefer).toBe("fmodata.entity-ids"); + return simpleMock({ body: { value: [] }, status: 200 })(input, init); + }, + }); + + // Second request: explicitly disable entity IDs for this request only + await db + .from(contactsTO) + .list() + .execute({ + useEntityIds: false, + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { + const headers = (init as RequestInit)?.headers as Record< + string, + string + >; + expect(headers?.Prefer).toBeUndefined(); + return simpleMock({ body: { value: [] }, status: 200 })(input, init); + }, + }); + + // Third request: explicitly enable entity IDs for this request + await db + .from(contactsTO) + .list() + .execute({ + useEntityIds: true, + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { + const headers = (init as RequestInit)?.headers as Record< + string, + string + >; + expect(headers?.Prefer).toBe("fmodata.entity-ids"); + return simpleMock({ body: { value: [] }, status: 200 })(input, init); + }, + }); + }); + + it("should allow enabling entity IDs for a specific request when disabled by default", async () => { + // Create connection without entity IDs by default + const connection = new FMServerConnection({ + serverUrl: "https://test.com", + auth: { username: "test", password: "test" }, + }); + + const db = connection.database("TestDB", { + useEntityIds: false, + }); + + // First request: use default (should NOT have entity ID header) + await db + .from(contactsTO) + .list() + .execute({ + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { + const headers = (init as RequestInit)?.headers as Record< + string, + string + >; + expect(headers?.Prefer).toBeUndefined(); + return simpleMock({ body: { value: [] }, status: 200 })(input, init); + }, + }); + + // Second request: explicitly enable entity IDs for this request only + await db + .from(contactsTO) + .list() + .execute({ + useEntityIds: true, + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { + const headers = (init as RequestInit)?.headers as Record< + string, + string + >; + expect(headers?.Prefer).toBe("fmodata.entity-ids"); + return simpleMock({ body: { value: [] }, status: 200 })(input, init); + }, + }); + + // Third request: confirm default is still disabled + await db + .from(contactsTO) + .list() + .execute({ + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { + const headers = (init as RequestInit)?.headers as Record< + string, + string + >; + expect(headers?.Prefer).toBeUndefined(); + return simpleMock({ body: { value: [] }, status: 200 })(input, init); + }, + }); + }); + + it("should work with insert operations", async () => { + const connection = new FMServerConnection({ + serverUrl: "https://test.com", + auth: { username: "test", password: "test" }, + }); + + const contactsTO = fmTableOccurrence( + "contacts", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().entityId("FMFID:2"), + }, + { + entityId: "FMTID:100", + }, + ); + + const db = connection.database("TestDB"); + + // Insert with default settings (entity IDs enabled) + await db + .from(contactsTO) + .insert({ name: "Test" }) + .execute({ + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { + const headers = (init as RequestInit)?.headers as Record< + string, + string + >; + expect(headers?.Prefer).toContain("fmodata.entity-ids"); + return simpleMock({ body: { id: "1", name: "Test" }, status: 200 })( + input, + init, + ); + }, + }); + + // Insert with entity IDs disabled for this request + await db + .from(contactsTO) + .insert({ name: "Test" }) + .execute({ + useEntityIds: false, + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { + const headers = (init as RequestInit)?.headers as Record< + string, + string + >; + expect(headers?.Prefer).not.toContain("fmodata.entity-ids"); + return simpleMock({ body: { id: "1", name: "Test" }, status: 200 })( + input, + init, + ); + }, + }); + }); + + it("should work with update operations", async () => { + const connection = new FMServerConnection({ + serverUrl: "https://test.com", + auth: { username: "test", password: "test" }, + }); + + const contactsTO = fmTableOccurrence( + "contacts", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().entityId("FMFID:2"), + }, + { + entityId: "FMTID:100", + }, + ); + + const db = connection.database("TestDB"); + + // Update with entity IDs disabled + await db + .from(contactsTO) + .update({ name: "Updated" }) + .byId("123") + .execute({ + useEntityIds: false, + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { + const headers = (init as RequestInit)?.headers as Record< + string, + string + >; + expect(headers?.Prefer).toBeUndefined(); + return simpleMock({ + body: "1", + status: 200, + headers: { "fmodata.affected_rows": "1" }, + })(input, init); + }, + }); + + // Update with entity IDs enabled + await db + .from(contactsTO) + .update({ name: "Updated" }) + .byId("123") + .execute({ + useEntityIds: true, + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { + const headers = (init as RequestInit)?.headers as Record< + string, + string + >; + expect(headers?.Prefer).toBe("fmodata.entity-ids"); + return simpleMock({ + body: "1", + status: 200, + headers: { "fmodata.affected_rows": "1" }, + })(input, init); + }, + }); + }); + + it("should work with delete operations", async () => { + const connection = new FMServerConnection({ + serverUrl: "https://test.com", + auth: { username: "test", password: "test" }, + }); + + const contactsTO = fmTableOccurrence( + "contacts", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().entityId("FMFID:2"), + }, + { + entityId: "FMTID:100", + }, + ); + + const db = connection.database("TestDB"); + + // Delete with entity IDs enabled + await db + .from(contactsTO) + .delete() + .byId("123") + .execute({ + useEntityIds: true, + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { + const headers = (init as RequestInit)?.headers as Record< + string, + string + >; + expect(headers?.Prefer).toBe("fmodata.entity-ids"); + return simpleMock({ + body: "1", + status: 204, + headers: { "fmodata.affected_rows": "1" }, + })(input, init); + }, + }); + + // Delete with entity IDs disabled + await db + .from(contactsTO) + .delete() + .byId("123") + .execute({ + useEntityIds: false, + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { + const headers = (init as RequestInit)?.headers as Record< + string, + string + >; + expect(headers?.Prefer).toBeUndefined(); + return simpleMock({ + body: "1", + status: 204, + headers: { "fmodata.affected_rows": "1" }, + })(input, init); + }, + }); + }); +}); diff --git a/packages/fmodata/tests/utils/helpers.ts b/packages/fmodata/tests/utils/helpers.ts new file mode 100644 index 00000000..9261db48 --- /dev/null +++ b/packages/fmodata/tests/utils/helpers.ts @@ -0,0 +1,19 @@ +import { z } from "zod/v4"; + +export const jsonCodec = (schema: T) => + z.codec(z.string(), schema, { + decode: (jsonString, ctx) => { + try { + return JSON.parse(jsonString); + } catch (err: any) { + ctx.issues.push({ + code: "invalid_format", + format: "json", + input: jsonString, + message: err.message, + }); + return z.NEVER; + } + }, + encode: (value) => JSON.stringify(value), + }); diff --git a/packages/fmodata/tests/utils/mock-fetch.ts b/packages/fmodata/tests/utils/mock-fetch.ts new file mode 100644 index 00000000..817a2fca --- /dev/null +++ b/packages/fmodata/tests/utils/mock-fetch.ts @@ -0,0 +1,174 @@ +/** + * Mock Fetch Utility + * + * This utility creates a mock fetch function that returns a single pre-recorded API response. + * It's designed to be compatible with @fetchkit/ffetch and can be passed via fetchClientOptions + * or as a per-execution override. + * + * Usage: + * ```ts + * import { createMockFetch } from './tests/utils/mock-fetch'; + * import { mockResponses } from './tests/fixtures/responses'; + * + * // Use a specific response for a single query execution + * const result = await db.from('contacts').list().execute({ + * fetchHandler: createMockFetch(mockResponses['list-basic']) + * }); + * + * // Or use a simple array (wraps in OData format) + * const result = await db.from('contacts').list().execute({ + * fetchHandler: createMockFetch([{ id: 1, name: 'John' }]) + * }); + * ``` + * + * Benefits: + * - Each test explicitly declares which response it expects + * - No URL matching logic needed - the response is used directly + * - Tests are more robust and easier to understand + * - Supports both full MockResponse objects and simple data arrays + */ + +import type { MockResponse } from "../fixtures/responses"; +import { MOCK_SERVER_URL } from "./mock-server-url"; + +/** + * Creates a mock fetch function that returns the provided response + * + * @param response - Either a full MockResponse object or a simple array/data to wrap in OData format + * @returns A fetch-compatible function that returns the mocked response + */ +/** + * Recursively removes @id and @editLink fields from an object or array + */ +function stripODataAnnotations(data: any): any { + if (Array.isArray(data)) { + return data.map(stripODataAnnotations); + } + if (data && typeof data === "object") { + const { "@id": _id, "@editLink": _editLink, ...rest } = data; + const result: any = {}; + for (const [key, value] of Object.entries(rest)) { + result[key] = stripODataAnnotations(value); + } + return result; + } + return data; +} + +export function createMockFetch(response: MockResponse | any[]): typeof fetch { + return async ( + input: RequestInfo | URL, + init?: RequestInit, + ): Promise => { + // Extract Accept header from request - handle different formats + let acceptHeader = ""; + + if (input instanceof Request) { + acceptHeader = input.headers.get("Accept") || ""; + } else if (init?.headers) { + // Handle different HeadersInit formats + if (init.headers instanceof Headers) { + acceptHeader = init.headers.get("Accept") || ""; + } else if (Array.isArray(init.headers)) { + const acceptEntry = init.headers.find( + ([key]) => key.toLowerCase() === "accept", + ); + acceptHeader = acceptEntry ? acceptEntry[1] : ""; + } else { + // Record + acceptHeader = init.headers["Accept"] || init.headers["accept"] || ""; + } + } + + // Determine if we should strip annotations based on Accept header + // If Accept header contains "odata.metadata=none", strip annotations + // Otherwise (Accept: "application/json"), include annotations + const shouldStripAnnotations = acceptHeader.includes("odata.metadata=none"); + + // Handle simple array input (legacy mockFetch behavior) + if (Array.isArray(response)) { + const data = shouldStripAnnotations + ? stripODataAnnotations({ value: response }) + : { value: response }; + return new Response(JSON.stringify(data), { + status: 200, + statusText: "OK", + headers: { + "content-type": "application/json", + }, + }); + } + + // Handle full MockResponse object + const mockResponse = response as MockResponse; + const contentType = + mockResponse.headers?.["content-type"] || + "application/json;charset=utf-8"; + const isJson = contentType.includes("application/json"); + + // Build headers including any custom headers from mockResponse + const headers = new Headers({ + "content-type": contentType, + }); + + // Add any additional headers from the mock response + if (mockResponse.headers) { + Object.entries(mockResponse.headers).forEach(([key, value]) => { + if (key !== "content-type" && value) { + headers.set(key, value); + } + }); + } + + // Status 204 (No Content) cannot have a body + if (mockResponse.status === 204) { + return new Response(null, { + status: mockResponse.status, + statusText: "No Content", + headers, + }); + } + + // Strip annotations if Accept header requests it + const responseData = shouldStripAnnotations + ? stripODataAnnotations(mockResponse.response) + : mockResponse.response; + + // Format response body based on content type + const responseBody = isJson + ? JSON.stringify(responseData) + : String(responseData); + + return new Response(responseBody, { + status: mockResponse.status, + statusText: + mockResponse.status >= 200 && mockResponse.status < 300 + ? "OK" + : "Error", + headers, + }); + }; +} + +/** + * Helper to create a mock response with standard structure + * Useful for operations that return counts via headers (delete, bulk update) + */ +export interface SimpleMockConfig { + status: number; + body?: any; + headers?: Record; +} + +export function simpleMock(config: SimpleMockConfig): typeof fetch { + return createMockFetch({ + url: MOCK_SERVER_URL, + method: "GET", + status: config.status, + response: config.body ?? null, + headers: { + "content-type": "application/json", + ...config.headers, + }, + }); +} diff --git a/packages/fmodata/tests/utils/mock-server-url.ts b/packages/fmodata/tests/utils/mock-server-url.ts new file mode 100644 index 00000000..7d565718 --- /dev/null +++ b/packages/fmodata/tests/utils/mock-server-url.ts @@ -0,0 +1,9 @@ +/** + * Mock Server URL Constant + * + * This constant defines the mock server URL used in test fixtures. + * All captured responses have their server URLs replaced with this value + * to avoid storing actual test server names in the codebase. + */ +export const MOCK_SERVER_URL = "api.example.com"; + diff --git a/packages/fmodata/tests/utils/test-setup.ts b/packages/fmodata/tests/utils/test-setup.ts new file mode 100644 index 00000000..dd39c953 --- /dev/null +++ b/packages/fmodata/tests/utils/test-setup.ts @@ -0,0 +1,216 @@ +/** + * Shared Test Setup Components + * + * Provides reusable table occurrences and mock client + * for use across test files. Based on e2e.test.ts schemas. + */ + +import { + FMServerConnection, + fmTableOccurrence, + textField, + numberField, + timestampField, + dateField, + type InferTableSchema, + type FieldBuilder, +} from "@proofkit/fmodata"; +import { z } from "zod/v4"; + +// Helper function for boolean fields (FileMaker stores as 0/1) +const booleanField = (): FieldBuilder => + numberField() + // Parses the number to a boolean when reading from the database + .readValidator(z.coerce.boolean()) + // Allows the user to pass a boolean when inserting or updating, converting it back to number + .writeValidator(z.boolean().transform((val) => (val ? 1 : 0))); + +export const hobbyEnum = z.enum([ + "Board games", + "Reading", + "Traveling", + "Unknown", +]); + +// Table occurrences using new ORM patterns + +export const contacts = fmTableOccurrence( + "contacts", + { + PrimaryKey: textField().primaryKey(), + CreationTimestamp: timestampField().readOnly(), + CreatedBy: textField().readOnly(), + ModificationTimestamp: timestampField().readOnly(), + ModifiedBy: textField(), + name: textField(), + hobby: textField().readValidator(hobbyEnum.nullable().catch("Unknown")), + id_user: textField(), + image: containerField(), // should not be included in the default select when set to "all" or "schema" + }, + { + defaultSelect: "all", + navigationPaths: ["users", "invoices"], + }, +); + +export const users = fmTableOccurrence( + "users", + { + id: textField().primaryKey().readValidator(z.uuid()), + CreationTimestamp: timestampField(), + CreatedBy: textField(), + ModificationTimestamp: timestampField(), + ModifiedBy: textField(), + name: textField(), + active: booleanField(), + fake_field: textField().readValidator( + z.string().catch("I only exist in the schema, not the database"), + ), + id_customer: textField(), + }, + { + defaultSelect: "all", + navigationPaths: ["contacts"], + }, +); + +export const invoices = fmTableOccurrence( + "invoices", + { + id: textField().primaryKey(), + invoiceNumber: textField().notNull(), + id_contact: textField(), + invoiceDate: dateField(), + dueDate: dateField(), + total: numberField(), + status: textField().readValidator( + z.enum(["draft", "sent", "paid", "overdue"]).nullable(), + ), + }, + { + defaultSelect: "all", + navigationPaths: ["lineItems", "contacts"], + }, +); + +export const lineItems = fmTableOccurrence( + "lineItems", + { + id: textField().primaryKey(), + id_invoice: textField(), + description: textField(), + quantity: numberField(), + unitPrice: numberField(), + lineTotal: numberField(), + }, + { + defaultSelect: "all", + navigationPaths: ["invoices"], + }, +); + +// Table occurrences with entity IDs +export const contactsTOWithIds = fmTableOccurrence( + "contacts", + { + PrimaryKey: textField().primaryKey().entityId("FMFID:10"), + CreationTimestamp: timestampField().entityId("FMFID:11"), + CreatedBy: textField().entityId("FMFID:12"), + ModificationTimestamp: timestampField().entityId("FMFID:13"), + ModifiedBy: textField().entityId("FMFID:14"), + name: textField().entityId("FMFID:15"), + hobby: textField() + .entityId("FMFID:16") + .readValidator(hobbyEnum.nullable().catch("Unknown")), + id_user: textField().entityId("FMFID:17"), + }, + { + entityId: "FMTID:200", + useEntityIds: true, + defaultSelect: "all", + navigationPaths: ["users"], + }, +); + +export const usersTOWithIds = fmTableOccurrence( + "users", + { + id: textField().primaryKey().entityId("FMFID:1").readValidator(z.uuid()), + CreationTimestamp: timestampField().entityId("FMFID:2"), + CreatedBy: textField().entityId("FMFID:3"), + ModificationTimestamp: timestampField().entityId("FMFID:4"), + ModifiedBy: textField().entityId("FMFID:5"), + name: textField().entityId("FMFID:6"), + active: booleanField().entityId("FMFID:7"), + fake_field: textField() + .entityId("FMFID:8") + .readValidator( + z.string().catch("I only exist in the schema, not the database"), + ), + id_customer: textField().entityId("FMFID:9"), + }, + { + entityId: "FMTID:1065093", + useEntityIds: true, + defaultSelect: "all", + navigationPaths: ["contacts"], + }, +); + +export const arbitraryTable = fmTableOccurrence("arbitrary_table", { + id: textField().primaryKey(), + name: textField().notNull(), +}); + +// Simple users table occurrence (same name as usersTO to test validation) +export const usersSimpleTO = fmTableOccurrence("users", { + id: textField().primaryKey().notNull(), + name: textField().notNull(), + // intentionally missing fields to test validation +}); + +// Types - extract from table occurrences for backward compatibility +export type ContactSchema = InferTableSchema; +export type UserSchema = InferTableSchema; +export type InvoiceSchema = InferTableSchema; +export type LineItemSchema = InferTableSchema; + +// Backward-compatible base table exports for tests that need .schema property +// These extract the schema from the new FMTable instances +import { containerField, FMTable } from "@proofkit/fmodata"; + +function getSchemaFromTable>(table: T) { + return (table as any)[FMTable.Symbol.Schema]; +} + +// export const contactsBase = { +// schema: getSchemaFromTable(contactsTO), +// } as const; + +// export const usersBase = { +// schema: getSchemaFromTable(usersTO), +// } as const; + +// export const invoicesBase = { +// schema: getSchemaFromTable(invoicesTO), +// } as const; + +// export const lineItemsBase = { +// schema: getSchemaFromTable(lineItemsTO), +// } as const; + +// export const contactsBaseWithIds = { +// schema: getSchemaFromTable(contactsTOWithIds), +// } as const; + +// export const usersBaseWithIds = { +// schema: getSchemaFromTable(usersTOWithIds), +// } as const; + +// Mock client factory - ensures unit tests never hit real databases +export function createMockClient(): FMServerConnection { + return new FMServerConnection({ + serverUrl: "https://api.example.com", + auth: { apiKey: "test-api-key" }, + }); +} diff --git a/packages/fmodata/tests/validation.test.ts b/packages/fmodata/tests/validation.test.ts new file mode 100644 index 00000000..81ba172b --- /dev/null +++ b/packages/fmodata/tests/validation.test.ts @@ -0,0 +1,209 @@ +/** + * Mock Fetch Tests + * + * These tests use captured responses from real FileMaker OData API calls + * to test the client without requiring a live server connection. + * + * The mock responses are stored in tests/fixtures/responses.ts and are + * captured using the capture script: pnpm capture + * + * To add new tests: + * 1. First, ensure you have a corresponding mock response captured + * 2. Create a test that uses the same query pattern + * 3. The mock fetch will automatically match the request URL to the stored response + */ + +import { describe, it, expect, expectTypeOf, assert } from "vitest"; +import { simpleMock } from "./utils/mock-fetch"; +import { + createMockClient, + hobbyEnum, + usersSimpleTO, + contacts, + users, +} from "./utils/test-setup"; +import { z } from "zod/v4"; +import { fmTableOccurrence, textField } from "@proofkit/fmodata"; + +describe("Validation Tests", () => { + const client = createMockClient(); + const db = client.database("fmdapi_test.fmp12"); + const simpleDb = client.database("fmdapi_test.fmp12"); + + describe("validateRecord", () => { + it("should validate a single record", async () => { + const result = await db + .from(contacts) + .list() + .select({ hobby: contacts.hobby }) + .execute({ + fetchHandler: simpleMock({ + status: 200, + body: { + "@context": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/$metadata#contacts", + value: [ + { + hobby: "Invalid Hobby", + }, + ], + }, + }), + }); + + assert(result.data, "Result data should be defined"); + const firstRecord = result.data?.[0]; + assert(firstRecord, "First record should be defined"); + + // should use catch block to validate the hobby + expect(firstRecord?.hobby).toBe("Unknown"); + }); + + it("should validate records within an expand expression", async () => { + const result = await db + .from(contacts) + .list() + .expand(users, (b: any) => + b.select({ name: users.name, fake_field: users.fake_field }), + ) + .execute({ + fetchHandler: simpleMock({ + status: 200, + body: { + "@context": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/$metadata#contacts", + value: [ + { + PrimaryKey: "B5BFBC89-03E0-47FC-ABB6-D51401730227", + CreationTimestamp: "2025-10-31T10:03:27Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-10-31T15:55:53Z", + ModifiedBy: "admin", + name: "Eric", + hobby: "Board games", + id_user: "1A269FA3-82E6-465A-94FA-39EE3F2F9B5D", + users: [ + { + name: "Test User", + }, + ], + }, + ], + }, + }), + }); + + assert(result.data, "Result data should be defined"); + expect(result.error).toBeUndefined(); + if (!result.data) throw new Error("Expected result.data to be defined"); + const firstRecord = result.data[0]!; + assert(firstRecord, "First record should be defined"); + + // Verify the contact record is validated + expect(firstRecord.name).toBe("Eric"); + expect(firstRecord.hobby).toBe("Board games"); + + // Verify the expanded users are validated and present + expect(firstRecord.users).toBeDefined(); + expect(Array.isArray(firstRecord.users)).toBe(true); + expect(firstRecord.users.length).toBe(1); + + const expandedUser = firstRecord.users[0]!; + + assert(expandedUser, "Expanded user should be defined"); + + // Verify the expanded user fields are validated according to schema + expect(expandedUser.name).toBe("Test User"); + expect(expandedUser.fake_field).toBe( + "I only exist in the schema, not the database", + ); + }); + }); + it("should automatically select only fields in the schema", async () => { + const simpleUsers = fmTableOccurrence("users", { + id: textField().primaryKey().notNull(), + name: textField().notNull(), + }); + const query = simpleDb.from(simpleUsers).list(); + + const queryString = query.getQueryString(); + + expect(queryString).toContain(`$select=`); + expect(queryString).toContain(`name`); + expect(queryString).toContain(`"id"`); // must quote the id field + expect(queryString).not.toContain(`$expand`); + }); + + it("should skip validation if requested", async () => { + const result = await db + .from(contacts) + .list() + .execute({ + skipValidation: true, + fetchHandler: simpleMock({ + status: 200, + body: { + "@context": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/$metadata#contacts", + value: [ + { + PrimaryKey: "B5BFBC89-03E0-47FC-ABB6-D51401730227", + hobby: "not a valid hobby", + }, + ], + }, + }), + }); + + expect(result).toBeDefined(); + expect(result.error).toBeUndefined(); + expect(result.data).toBeDefined(); + if (!result.data) throw new Error("Expected result.data to be defined"); + expect(Array.isArray(result.data)).toBe(true); + + const firstRecord = result.data[0]!; + // types should not change, even if skipValidation is true + expectTypeOf(firstRecord.hobby).toEqualTypeOf | null>(); + + expect(firstRecord?.hobby).toBe("not a valid hobby"); + }); + + it("should return odata annotations if requested, even if skipValidation is true", async () => { + const result = await db + .from(contacts) + .list() + .execute({ + skipValidation: true, + includeODataAnnotations: true, + fetchHandler: simpleMock({ + status: 200, + body: { + "@context": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/$metadata#contacts", + value: [ + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts(B5BFBC89-03E0-47FC-ABB6-D51401730227)", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts(B5BFBC89-03E0-47FC-ABB6-D51401730227)", + PrimaryKey: "B5BFBC89-03E0-47FC-ABB6-D51401730227", + hobby: "not a valid hobby", + }, + ], + }, + }), + }); + + expect(result).toBeDefined(); + expect(result.error).toBeUndefined(); + expect(result.data).toBeDefined(); + if (!result.data) throw new Error("Expected result.data to be defined"); + expect(Array.isArray(result.data)).toBe(true); + + const firstRecord = result.data[0]!; + expect(firstRecord).toHaveProperty("@id"); + expect(firstRecord).toHaveProperty("@editLink"); + }); +}); diff --git a/packages/fmodata/tsconfig.json b/packages/fmodata/tsconfig.json new file mode 100644 index 00000000..f3678481 --- /dev/null +++ b/packages/fmodata/tsconfig.json @@ -0,0 +1,32 @@ +{ + "compilerOptions": { + /* Base Options: */ + "esModuleInterop": true, + "skipLibCheck": true, + "target": "es2022", + "allowJs": true, + "resolveJsonModule": true, + "moduleDetection": "force", + "isolatedModules": true, + + /* Strictness */ + "strict": true, + "noUncheckedIndexedAccess": true, + "noImplicitOverride": true, + + /* If transpiling with TypeScript: */ + "module": "ESNext", + "moduleResolution": "Bundler", + "outDir": "dist", + "rootDir": "src", + "sourceMap": true, + + /* AND if you're building for a library: */ + "declaration": true, + + /* AND if you're building for a library in a monorepo: */ + "declarationMap": true + }, + "exclude": ["*.config.ts", "tests", "dist"], + "include": ["./src/index.ts", "./src/**/*.ts"] +} diff --git a/packages/fmodata/vite.config.ts b/packages/fmodata/vite.config.ts new file mode 100644 index 00000000..3207ea2b --- /dev/null +++ b/packages/fmodata/vite.config.ts @@ -0,0 +1,16 @@ +import { defineConfig, mergeConfig } from "vite"; +import { tanstackViteConfig } from "@tanstack/vite-config"; + +const config = defineConfig({ + plugins: [], +}); + +export default mergeConfig( + config, + tanstackViteConfig({ + entry: "./src/index.ts", + srcDir: "./src", + cjs: false, + outDir: "./dist", + }), +); diff --git a/packages/fmodata/vitest.config.ts b/packages/fmodata/vitest.config.ts new file mode 100644 index 00000000..f01ce6aa --- /dev/null +++ b/packages/fmodata/vitest.config.ts @@ -0,0 +1,27 @@ +import { defineConfig } from "vitest/config"; +import { resolve } from "path"; + +export default defineConfig({ + resolve: { + alias: { + "@proofkit/fmodata": process.env.TEST_BUILD + ? resolve(__dirname, "./dist/esm") + : resolve(__dirname, "./src"), + }, + }, + test: { + testTimeout: 15000, + // Exclude E2E tests from default test runs + // When you pass a file path directly (e.g., vitest run tests/e2e.test.ts), + // vitest will run it regardless of the exclude pattern + // Run E2E tests with: pnpm test:e2e + exclude: ["**/node_modules/**", "**/dist/**", "tests/e2e/**"], + typecheck: { + enabled: true, + include: ["src/**/*.ts", "tests/**/*.test.ts", "tests/**/*.test-d.ts"], + tsconfig: process.env.TEST_BUILD + ? "./tests/tsconfig.build.json" + : "./tests/tsconfig.json", + }, + }, +}); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 0abda548..dbf4bbc7 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -429,6 +429,9 @@ importers: '@proofkit/registry': specifier: workspace:* version: link:../registry + '@rollup/plugin-replace': + specifier: ^6.0.3 + version: 6.0.3(rollup@4.40.2) '@t3-oss/env-nextjs': specifier: ^0.10.1 version: 0.10.1(typescript@5.9.2)(zod@3.25.64) @@ -599,7 +602,60 @@ importers: specifier: ^3.2.4 version: 3.2.4(@types/debug@4.1.12)(@types/node@22.17.1)(@vitest/ui@3.2.4)(happy-dom@15.11.7)(jiti@2.4.2)(lightningcss@1.30.1)(msw@2.10.2(@types/node@22.17.1)(typescript@5.9.2))(tsx@4.20.3)(yaml@2.8.0) - packages/fmodata: {} + packages/fmodata: + dependencies: + '@fetchkit/ffetch': + specifier: ^4.2.0 + version: 4.2.0 + dotenv: + specifier: ^16.5.0 + version: 16.5.0 + es-toolkit: + specifier: ^1.38.0 + version: 1.38.0 + neverthrow: + specifier: ^8.2.0 + version: 8.2.0 + odata-query: + specifier: ^8.0.4 + version: 8.0.4 + devDependencies: + '@standard-schema/spec': + specifier: ^1.0.0 + version: 1.0.0 + '@tanstack/vite-config': + specifier: ^0.2.0 + version: 0.2.0(@types/node@22.17.1)(rollup@4.40.2)(typescript@5.9.3)(vite@6.3.5(@types/node@22.17.1)(jiti@2.4.2)(lightningcss@1.30.1)(tsx@4.20.3)(yaml@2.8.0)) + '@types/node': + specifier: ^22.17.1 + version: 22.17.1 + fast-xml-parser: + specifier: ^5.3.2 + version: 5.3.2 + prettier: + specifier: ^3.5.3 + version: 3.5.3 + publint: + specifier: ^0.3.12 + version: 0.3.12 + tsx: + specifier: ^4.19.2 + version: 4.20.3 + typescript: + specifier: ^5.9.3 + version: 5.9.3 + vite: + specifier: ^6.3.4 + version: 6.3.5(@types/node@22.17.1)(jiti@2.4.2)(lightningcss@1.30.1)(tsx@4.20.3)(yaml@2.8.0) + vite-plugin-dts: + specifier: ^4.5.4 + version: 4.5.4(@types/node@22.17.1)(rollup@4.40.2)(typescript@5.9.3)(vite@6.3.5(@types/node@22.17.1)(jiti@2.4.2)(lightningcss@1.30.1)(tsx@4.20.3)(yaml@2.8.0)) + vitest: + specifier: ^4.0.7 + version: 4.0.15(@types/node@22.17.1)(@vitest/ui@3.2.4)(happy-dom@15.11.7)(jiti@2.4.2)(lightningcss@1.30.1)(msw@2.10.2(@types/node@22.17.1)(typescript@5.9.3))(tsx@4.20.3)(yaml@2.8.0) + zod: + specifier: 4.1.12 + version: 4.1.12 packages/registry: dependencies: @@ -1888,6 +1944,9 @@ packages: resolution: {integrity: sha512-0J+zgWxHN+xXONWIyPWKFMgVuJoZuGiIFu8yxk7RJjxkzpGmyja5wRFqZIVtjDVOQpV+Rw0iOAjYPE2eQyjr0w==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@fetchkit/ffetch@4.2.0': + resolution: {integrity: sha512-OUpnod9/vhM0S6jQFLfOp+xnTy0wGQSKADg1+ZZ9oiJqCqYxEbOQ+OOJ/y9x/CBUc/j0BBw/930vndmpthwWjA==} + '@floating-ui/core@1.7.1': resolution: {integrity: sha512-azI0DrjMMfIug/ExbBaeDVJXcY0a7EPvPjb2xAJPa4HeimBX+Z18HK8QQR3jb6356SnDDdxx+hinMLcJEDdOjw==} @@ -2141,6 +2200,9 @@ packages: '@jridgewell/sourcemap-codec@1.5.0': resolution: {integrity: sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==} + '@jridgewell/sourcemap-codec@1.5.5': + resolution: {integrity: sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==} + '@jridgewell/trace-mapping@0.3.25': resolution: {integrity: sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==} @@ -2216,16 +2278,29 @@ packages: '@microsoft/api-extractor-model@7.29.6': resolution: {integrity: sha512-gC0KGtrZvxzf/Rt9oMYD2dHvtN/1KPEYsrQPyMKhLHnlVuO/f4AFN3E4toqZzD2pt4LhkKoYmL2H9tX3yCOyRw==} + '@microsoft/api-extractor-model@7.32.1': + resolution: {integrity: sha512-u4yJytMYiUAnhcNQcZDTh/tVtlrzKlyKrQnLOV+4Qr/5gV+cpufWzCYAB1Q23URFqD6z2RoL2UYncM9xJVGNKA==} + '@microsoft/api-extractor@7.47.7': resolution: {integrity: sha512-fNiD3G55ZJGhPOBPMKD/enozj8yxJSYyVJWxRWdcUtw842rvthDHJgUWq9gXQTensFlMHv2wGuCjjivPv53j0A==} hasBin: true + '@microsoft/api-extractor@7.55.1': + resolution: {integrity: sha512-l8Z+8qrLkZFM3HM95Dbpqs6G39fpCa7O5p8A7AkA6hSevxkgwsOlLrEuPv0ADOyj5dI1Af5WVDiwpKG/ya5G3w==} + hasBin: true + '@microsoft/tsdoc-config@0.17.1': resolution: {integrity: sha512-UtjIFe0C6oYgTnad4q1QP4qXwLhe6tIpNTRStJ2RZEPIkqQPREAwE5spzVxsdn9UaEMUqhh0AqSx3X4nWAKXWw==} + '@microsoft/tsdoc-config@0.18.0': + resolution: {integrity: sha512-8N/vClYyfOH+l4fLkkr9+myAoR6M7akc8ntBJ4DJdWH2b09uVfr71+LTMpNyG19fNqWDg8KEDZhx5wxuqHyGjw==} + '@microsoft/tsdoc@0.15.1': resolution: {integrity: sha512-4aErSrCR/On/e5G2hDP0wjooqDdauzEbIq8hIkIe5pXV0rtWJZvdCEKL0ykZxex+IxIwBp0eGeV48hQN07dXtw==} + '@microsoft/tsdoc@0.16.0': + resolution: {integrity: sha512-xgAyonlVVS+q7Vc7qLW0UrJU7rSFcETRWsqdXZtjzRU8dF+6CkozTK4V4y1LwOX7j8r/vHphjDeMeGI4tNGeGA==} + '@modelcontextprotocol/sdk@1.17.2': resolution: {integrity: sha512-EFLRNXR/ixpXQWu6/3Cu30ndDFIFNaqUXcTqsGebujeMan9FzhAaFFswLRiFj61rgygDRr8WO1N+UijjgRxX9g==} engines: {node: '>=18'} @@ -3131,6 +3206,15 @@ packages: '@rolldown/pluginutils@1.0.0-beta.54': resolution: {integrity: sha512-AHgcZ+w7RIRZ65ihSQL8YuoKcpD9Scew4sEeP1BBUT9QdTo6KjwHrZZXjID6nL10fhKessCH6OPany2QKwAwTQ==} + '@rollup/plugin-replace@6.0.3': + resolution: {integrity: sha512-J4RZarRvQAm5IF0/LwUUg+obsm+xZhYnbMXmXROyoSE1ATJe3oXSb9L5MMppdxP2ylNSjv6zFBwKYjcKMucVfA==} + engines: {node: '>=14.0.0'} + peerDependencies: + rollup: ^1.20.0||^2.0.0||^3.0.0||^4.0.0 + peerDependenciesMeta: + rollup: + optional: true + '@rollup/pluginutils@5.1.4': resolution: {integrity: sha512-USm05zrsFxYLPdWWq+K3STlWiT/3ELn3RcV5hJMghpeAIhxfsUIg6mt12CBJBInWMV4VneoV7SfGv8xIwo2qNQ==} engines: {node: '>=14.0.0'} @@ -3246,6 +3330,14 @@ packages: '@rushstack/eslint-patch@1.11.0': resolution: {integrity: sha512-zxnHvoMQVqewTJr/W4pKjF0bMGiKJv1WX7bSrkl46Hg0QjESbzBROWK0Wg4RphzSOS5Jiy7eFimmM3UgMrMZbQ==} + '@rushstack/node-core-library@5.19.0': + resolution: {integrity: sha512-BxAopbeWBvNJ6VGiUL+5lbJXywTdsnMeOS8j57Cn/xY10r6sV/gbsTlfYKjzVCUBZATX2eRzJHSMCchsMTGN6A==} + peerDependencies: + '@types/node': '*' + peerDependenciesMeta: + '@types/node': + optional: true + '@rushstack/node-core-library@5.7.0': resolution: {integrity: sha512-Ff9Cz/YlWu9ce4dmqNBZpA45AEya04XaBFIjV7xTVeEf+y/kTjEasmozqFELXlNG4ROdevss75JrrZ5WgufDkQ==} peerDependencies: @@ -3254,9 +3346,20 @@ packages: '@types/node': optional: true + '@rushstack/problem-matcher@0.1.1': + resolution: {integrity: sha512-Fm5XtS7+G8HLcJHCWpES5VmeMyjAKaWeyZU5qPzZC+22mPlJzAsOxymHiWIfuirtPckX3aptWws+K2d0BzniJA==} + peerDependencies: + '@types/node': '*' + peerDependenciesMeta: + '@types/node': + optional: true + '@rushstack/rig-package@0.5.3': resolution: {integrity: sha512-olzSSjYrvCNxUFZowevC3uz8gvKr3WTpHQ7BkpjtRpA3wK+T0ybep/SRUMfr195gBzJm5gaXw0ZMgjIyHqJUow==} + '@rushstack/rig-package@0.6.0': + resolution: {integrity: sha512-ZQmfzsLE2+Y91GF15c65L/slMRVhF6Hycq04D4TwtdGaUAbIXXg9c5pKA5KFU7M4QMaihoobp9JJYpYcaY3zOw==} + '@rushstack/terminal@0.14.0': resolution: {integrity: sha512-juTKMAMpTIJKudeFkG5slD8Z/LHwNwGZLtU441l/u82XdTBfsP+LbGKJLCNwP5se+DMCT55GB8x9p6+C4UL7jw==} peerDependencies: @@ -3265,9 +3368,20 @@ packages: '@types/node': optional: true + '@rushstack/terminal@0.19.4': + resolution: {integrity: sha512-f4XQk02CrKfrMgyOfhYd3qWI944dLC21S4I/LUhrlAP23GTMDNG6EK5effQtFkISwUKCgD9vMBrJZaPSUquxWQ==} + peerDependencies: + '@types/node': '*' + peerDependenciesMeta: + '@types/node': + optional: true + '@rushstack/ts-command-line@4.22.6': resolution: {integrity: sha512-QSRqHT/IfoC5nk9zn6+fgyqOPXHME0BfchII9EUPR19pocsNp/xSbeBCbD3PIR2Lg+Q5qk7OFqk1VhWPMdKHJg==} + '@rushstack/ts-command-line@5.1.4': + resolution: {integrity: sha512-H0I6VdJ6sOUbktDFpP2VW5N29w8v4hRoNZOQz02vtEi6ZTYL1Ju8u+TcFiFawUDrUsx/5MQTUhd79uwZZVwVlA==} + '@sec-ant/readable-stream@0.4.1': resolution: {integrity: sha512-831qok9r2t8AlxLko40y2ebgSDhenenCatLVeW/uBtnHPyhHOvG0C7TvfgecV+wHzIm5KUICgzmVpWS+IMEAeg==} @@ -3855,6 +3969,9 @@ packages: '@vitest/expect@3.2.4': resolution: {integrity: sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig==} + '@vitest/expect@4.0.15': + resolution: {integrity: sha512-Gfyva9/GxPAWXIWjyGDli9O+waHDC0Q0jaLdFP1qPAUUfo1FEXPXUfUkp3eZA0sSq340vPycSyOlYUeM15Ft1w==} + '@vitest/mocker@2.1.9': resolution: {integrity: sha512-tVL6uJgoUdi6icpxmdrn5YNo3g3Dxv+IHJBr0GXHaEdTcw3F+cPKnsXFhli6nO+f/6SDKPHEK1UN+k+TQv0Ehg==} peerDependencies: @@ -3877,30 +3994,53 @@ packages: vite: optional: true + '@vitest/mocker@4.0.15': + resolution: {integrity: sha512-CZ28GLfOEIFkvCFngN8Sfx5h+Se0zN+h4B7yOsPVCcgtiO7t5jt9xQh2E1UkFep+eb9fjyMfuC5gBypwb07fvQ==} + peerDependencies: + msw: ^2.4.9 + vite: ^6.0.0 || ^7.0.0-0 + peerDependenciesMeta: + msw: + optional: true + vite: + optional: true + '@vitest/pretty-format@2.1.9': resolution: {integrity: sha512-KhRIdGV2U9HOUzxfiHmY8IFHTdqtOhIzCpd8WRdJiE7D/HUcZVD0EgQCVjm+Q9gkUXWgBvMmTtZgIG48wq7sOQ==} '@vitest/pretty-format@3.2.4': resolution: {integrity: sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==} + '@vitest/pretty-format@4.0.15': + resolution: {integrity: sha512-SWdqR8vEv83WtZcrfLNqlqeQXlQLh2iilO1Wk1gv4eiHKjEzvgHb2OVc3mIPyhZE6F+CtfYjNlDJwP5MN6Km7A==} + '@vitest/runner@2.1.9': resolution: {integrity: sha512-ZXSSqTFIrzduD63btIfEyOmNcBmQvgOVsPNPe0jYtESiXkhd8u2erDLnMxmGrDCwHCCHE7hxwRDCT3pt0esT4g==} '@vitest/runner@3.2.4': resolution: {integrity: sha512-oukfKT9Mk41LreEW09vt45f8wx7DordoWUZMYdY/cyAk7w5TWkTRCNZYF7sX7n2wB7jyGAl74OxgwhPgKaqDMQ==} + '@vitest/runner@4.0.15': + resolution: {integrity: sha512-+A+yMY8dGixUhHmNdPUxOh0la6uVzun86vAbuMT3hIDxMrAOmn5ILBHm8ajrqHE0t8R9T1dGnde1A5DTnmi3qw==} + '@vitest/snapshot@2.1.9': resolution: {integrity: sha512-oBO82rEjsxLNJincVhLhaxxZdEtV0EFHMK5Kmx5sJ6H9L183dHECjiefOAdnqpIgT5eZwT04PoggUnW88vOBNQ==} '@vitest/snapshot@3.2.4': resolution: {integrity: sha512-dEYtS7qQP2CjU27QBC5oUOxLE/v5eLkGqPE0ZKEIDGMs4vKWe7IjgLOeauHsR0D5YuuycGRO5oSRXnwnmA78fQ==} + '@vitest/snapshot@4.0.15': + resolution: {integrity: sha512-A7Ob8EdFZJIBjLjeO0DZF4lqR6U7Ydi5/5LIZ0xcI+23lYlsYJAfGn8PrIWTYdZQRNnSRlzhg0zyGu37mVdy5g==} + '@vitest/spy@2.1.9': resolution: {integrity: sha512-E1B35FwzXXTs9FHNK6bDszs7mtydNi5MIfUWpceJ8Xbfb1gBMscAnwLbEu+B44ed6W3XjL9/ehLPHR1fkf1KLQ==} '@vitest/spy@3.2.4': resolution: {integrity: sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==} + '@vitest/spy@4.0.15': + resolution: {integrity: sha512-+EIjOJmnY6mIfdXtE/bnozKEvTC4Uczg19yeZ2vtCz5Yyb0QQ31QWVQ8hswJ3Ysx/K2EqaNsVanjr//2+P3FHw==} + '@vitest/ui@3.2.4': resolution: {integrity: sha512-hGISOaP18plkzbWEcP/QvtRW1xDXF2+96HbEX6byqQhAUbiS5oH6/9JwW+QsQCIYON2bI6QZBF+2PvOmrRZ9wA==} peerDependencies: @@ -3912,6 +4052,9 @@ packages: '@vitest/utils@3.2.4': resolution: {integrity: sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==} + '@vitest/utils@4.0.15': + resolution: {integrity: sha512-HXjPW2w5dxhTD0dLwtYHDnelK3j8sR8cWIaLxr22evTyY6q8pRCjZSmhRWVjBaOVXChQd6AwMzi9pucorXCPZA==} + '@volar/language-core@2.4.14': resolution: {integrity: sha512-X6beusV0DvuVseaOEy7GoagS4rYHgDHnTrdOj5jeUb49fW5ceQyP9Ej5rBhqgz2wJggl+2fDbbojq1XKaxDi6w==} @@ -3938,6 +4081,14 @@ packages: typescript: optional: true + '@vue/language-core@2.2.0': + resolution: {integrity: sha512-O1ZZFaaBGkKbsRfnVH1ifOK1/1BUkyK+3SQsfnh6PmMmD4qJcTU8godCeA96jjDRTL6zgnK7YzCHfaUlH2r0Mw==} + peerDependencies: + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + '@vue/shared@3.5.14': resolution: {integrity: sha512-oXTwNxVfc9EtP1zzXAlSlgARLXNC84frFYkS0HHz0h3E4WZSP9sywqjqzGCP9Y34M8ipNmd380pVgmMuwELDyQ==} @@ -3984,6 +4135,9 @@ packages: ajv@8.13.0: resolution: {integrity: sha512-PRA911Blj99jR5RMeTunVbNXMF6Lp4vZXnk5GQjcnUWUTsrXtekg/pnmFFI2u/I36Y/2bITGS30GZCXei6uNkA==} + alien-signals@0.4.14: + resolution: {integrity: sha512-itUAVzhczTmP2U5yX67xVpsbbOiquusbWVyA9N+sy6+r6YVbFkahXvNCeEPWEOMhwDYwbVbGHFkVL03N9I5g+Q==} + ansi-colors@4.1.3: resolution: {integrity: sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==} engines: {node: '>=6'} @@ -4268,6 +4422,10 @@ packages: resolution: {integrity: sha512-mCuXncKXk5iCLhfhwTc0izo0gtEmpz5CtG2y8GiOINBlMVS6v8TMRc5TaLWKS6692m9+dVVfzgeVxR5UxWHTYw==} engines: {node: '>=12'} + chai@6.2.1: + resolution: {integrity: sha512-p4Z49OGG5W/WBCPSS/dH3jQ73kD6tiMmUM+bckNK6Jr5JHMG3k9bg/BvKR8lKmtVBKmOiuVaV2ws8s9oSbwysg==} + engines: {node: '>=18'} + chalk@4.1.2: resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} engines: {node: '>=10'} @@ -5254,6 +5412,10 @@ packages: resolution: {integrity: sha512-/kP8CAwxzLVEeFrMm4kMmy4CCDlpipyA7MYLVrdJIkV0fYF0UaigQHRsxHiuY/GEea+bh4KSv3TIlgr+2UL6bw==} engines: {node: '>=12.0.0'} + expect-type@1.2.2: + resolution: {integrity: sha512-JhFGDVJ7tmDJItKhYgJCGLOWjuK9vPxiXoUFLwLDc99NlmklilbiQJwoctZtt13+xMw91MCk/REan6MWHqDjyA==} + engines: {node: '>=12.0.0'} + expect@29.7.0: resolution: {integrity: sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} @@ -5308,6 +5470,10 @@ packages: fast-levenshtein@2.0.6: resolution: {integrity: sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==} + fast-xml-parser@5.3.2: + resolution: {integrity: sha512-n8v8b6p4Z1sMgqRmqLJm3awW4NX7NkaKPfb3uJIBTSH7Pdvufi3PQ3/lJLQrvxcMYl7JI2jnDO90siPEpD8JBA==} + hasBin: true + fastq@1.19.1: resolution: {integrity: sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==} @@ -5330,6 +5496,15 @@ packages: picomatch: optional: true + fdir@6.5.0: + resolution: {integrity: sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==} + engines: {node: '>=12.0.0'} + peerDependencies: + picomatch: ^3 || ^4 + peerDependenciesMeta: + picomatch: + optional: true + fetch-blob@3.2.0: resolution: {integrity: sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==} engines: {node: ^12.20 || >= 14.13} @@ -6313,6 +6488,10 @@ packages: resolution: {integrity: sha512-9rrA30MRRP3gBD3HTGnC6cDFpaE1kVDWxWgqWJUN0RvDNAo+Nz/9GxB+nHOH0ifbVFy0hSA1V6vFDvnx54lTEQ==} engines: {node: '>=14'} + local-pkg@1.1.2: + resolution: {integrity: sha512-arhlxbFRmoQHl33a0Zkle/YWlmNwoyt6QNZEIJcqNbdrsix5Lvc4HyyI3EnwxTYlZYc32EbYrQ8SzEZ7dqgg9A==} + engines: {node: '>=14'} + locate-path@3.0.0: resolution: {integrity: sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==} engines: {node: '>=6'} @@ -6393,6 +6572,9 @@ packages: magic-string@0.30.17: resolution: {integrity: sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==} + magic-string@0.30.21: + resolution: {integrity: sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==} + magicast@0.3.5: resolution: {integrity: sha512-L0WhttDl+2BOsybvEOLK7fW3UA0OQ0IQ2d6Zl2x/a6vVRs3bAY0ECOSHHeL5jD+SbOpOCUEi0y1DgHEn9Qn1AQ==} @@ -6904,6 +7086,9 @@ packages: resolution: {integrity: sha512-gXah6aZrcUxjWg2zR2MwouP2eHlCBzdV4pygudehaKXSGW4v2AsRQUK+lwwXhii6KFZcunEnmSUoYp5CXibxtA==} engines: {node: '>= 0.4'} + obug@2.1.1: + resolution: {integrity: sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==} + odata-query@8.0.4: resolution: {integrity: sha512-v66MVxAZxlmOlFVaC9gvcDX5OcHO6yqc08AXhNhQ9LMbSzJKJ88uY1a7uDmLw2u4oMPGOMjnb8jdimA4kOD4Rw==} @@ -7116,6 +7301,10 @@ packages: resolution: {integrity: sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==} engines: {node: '>=12'} + picomatch@4.0.3: + resolution: {integrity: sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==} + engines: {node: '>=12'} + pify@4.0.1: resolution: {integrity: sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==} engines: {node: '>=6'} @@ -7127,8 +7316,8 @@ packages: pkg-types@1.3.1: resolution: {integrity: sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==} - pkg-types@2.2.0: - resolution: {integrity: sha512-2SM/GZGAEkPp3KWORxQZns4M+WSeXbC2HEvmOIJe3Cmiv6ieAJvdVhDldtHqM5J1Y7MrR1XhkBT/rMlhh9FdqQ==} + pkg-types@2.3.0: + resolution: {integrity: sha512-SIqCzDRg0s9npO5XQ3tNZioRY1uK06lA41ynBC1YmFTmnY6FjUjVt6s4LoADmwoig1qqD0oK8h1p/8mlMx8Oig==} possible-typed-array-names@1.1.0: resolution: {integrity: sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==} @@ -7774,6 +7963,9 @@ packages: resolution: {integrity: sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==} engines: {node: '>= 0.8'} + std-env@3.10.0: + resolution: {integrity: sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==} + std-env@3.9.0: resolution: {integrity: sha512-UGvjygr6F6tpH7o2qyqR6QYpwraIjKSdtzyBdyytFOHmPZY917kwdwLG0RbOjWOnKmnm3PeHjaoLLMie7kPLQw==} @@ -7883,6 +8075,9 @@ packages: strip-literal@3.0.0: resolution: {integrity: sha512-TcccoMhJOM3OebGhSBEmp3UZ2SfDMZUEBdRA/9ynfLi8yYajyWX3JiXArcJt4Umh4vISpspkQIY8ZZoCqjbviA==} + strnum@2.1.1: + resolution: {integrity: sha512-7ZvoFTiCnGxBtDqJ//Cu6fWtZtc7Y3x+QOirG15wztbdngGSkht27o2pyGWrVy0b4WAy3jbKmnoK6g5VlVNUUw==} + strtok3@9.1.1: resolution: {integrity: sha512-FhwotcEqjr241ZbjFzjlIYg6c5/L/s4yBGWSMvJ9UoExiSqL+FnFA/CaeZx17WGaZMS/4SOZp8wH18jSS4R4lw==} engines: {node: '>=16'} @@ -7993,6 +8188,10 @@ packages: tinyexec@1.0.1: resolution: {integrity: sha512-5uC6DDlmeqiOwCPmK9jMSdOuZTh8bU39Ys6yidB+UTt5hfZUPGAypSgFRiEp+jbi9qH40BLDvy85jIU88wKSqw==} + tinyexec@1.0.2: + resolution: {integrity: sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==} + engines: {node: '>=18'} + tinyglobby@0.2.13: resolution: {integrity: sha512-mEwzpUgrLySlveBwEVDMKk5B57bhLPYovRfPAXD5gA/98Opn0rCDj3GtLwFvCvH5RK9uPCExUROW5NjDwvqkxw==} engines: {node: '>=12.0.0'} @@ -8001,6 +8200,10 @@ packages: resolution: {integrity: sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ==} engines: {node: '>=12.0.0'} + tinyglobby@0.2.15: + resolution: {integrity: sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==} + engines: {node: '>=12.0.0'} + tinygradient@1.1.5: resolution: {integrity: sha512-8nIfc2vgQ4TeLnk2lFj4tRLvvJwEfQuabdsmvDdQPT0xlk9TaNtpGd6nNRxXoK6vQhN6RSzj+Cnp5tTQmpxmbw==} @@ -8016,6 +8219,10 @@ packages: resolution: {integrity: sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==} engines: {node: '>=14.0.0'} + tinyrainbow@3.0.3: + resolution: {integrity: sha512-PSkbLUoxOFRzJYjjxHJt9xro7D+iilgMX/C9lawzVuYiIdcihh9DXmVibBe8lmcFrRi/VzlPjBxbN7rH24q8/Q==} + engines: {node: '>=14.0.0'} + tinyspy@3.0.2: resolution: {integrity: sha512-n1cw8k1k0x4pgA2+9XrOkFydTerNcJ1zWCO5Nn9scWHTD+5tp8dghT2x1uduQePZTZgd3Tupf+x9BxJjeJi77Q==} engines: {node: '>=14.0.0'} @@ -8222,11 +8429,21 @@ packages: engines: {node: '>=14.17'} hasBin: true + typescript@5.8.2: + resolution: {integrity: sha512-aJn6wq13/afZp/jT9QZmwEjDqqvSGp1VT5GVg+f/t6/oVyrgXM6BY1h9BRh/O5p3PlUPAe+WuiEZOmb/49RqoQ==} + engines: {node: '>=14.17'} + hasBin: true + typescript@5.9.2: resolution: {integrity: sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A==} engines: {node: '>=14.17'} hasBin: true + typescript@5.9.3: + resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==} + engines: {node: '>=14.17'} + hasBin: true + ufo@1.6.1: resolution: {integrity: sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA==} @@ -8406,6 +8623,15 @@ packages: vite: optional: true + vite-plugin-dts@4.5.4: + resolution: {integrity: sha512-d4sOM8M/8z7vRXHHq/ebbblfaxENjogAAekcfcDCCwAyvGqnPrc7f4NZbvItS+g4WTgerW0xDwSz5qz11JT3vg==} + peerDependencies: + typescript: '*' + vite: '*' + peerDependenciesMeta: + vite: + optional: true + vite-plugin-externalize-deps@0.9.0: resolution: {integrity: sha512-wg3qb5gCy2d1KpPKyD9wkXMcYJ84yjgziHrStq9/8R7chhUC73mhQz+tVtvhFiICQHsBn1pnkY4IBbPqF9JHNw==} peerDependencies: @@ -8543,6 +8769,40 @@ packages: jsdom: optional: true + vitest@4.0.15: + resolution: {integrity: sha512-n1RxDp8UJm6N0IbJLQo+yzLZ2sQCDyl1o0LeugbPWf8+8Fttp29GghsQBjYJVmWq3gBFfe9Hs1spR44vovn2wA==} + engines: {node: ^20.0.0 || ^22.0.0 || >=24.0.0} + hasBin: true + peerDependencies: + '@edge-runtime/vm': '*' + '@opentelemetry/api': ^1.9.0 + '@types/node': ^20.0.0 || ^22.0.0 || >=24.0.0 + '@vitest/browser-playwright': 4.0.15 + '@vitest/browser-preview': 4.0.15 + '@vitest/browser-webdriverio': 4.0.15 + '@vitest/ui': 4.0.15 + happy-dom: '*' + jsdom: '*' + peerDependenciesMeta: + '@edge-runtime/vm': + optional: true + '@opentelemetry/api': + optional: true + '@types/node': + optional: true + '@vitest/browser-playwright': + optional: true + '@vitest/browser-preview': + optional: true + '@vitest/browser-webdriverio': + optional: true + '@vitest/ui': + optional: true + happy-dom: + optional: true + jsdom: + optional: true + vscode-uri@3.1.0: resolution: {integrity: sha512-/BpdSx+yCQGnCvecbyXdxHDkuk55/G3xwnC0GqY4gmQ3j+A+g8kzzgB4Nk/SINjqn6+waqw3EgbVF2QKExkRxQ==} @@ -8711,6 +8971,9 @@ packages: zod@3.25.76: resolution: {integrity: sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==} + zod@4.1.12: + resolution: {integrity: sha512-JInaHOamG8pt5+Ey8kGmdcAcg3OL9reK8ltczgHTAwNhMys/6ThXHityHxVV2p3fkw/c+MAvBHFVYHFZDmjMCQ==} + zwitch@2.0.4: resolution: {integrity: sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==} @@ -9108,7 +9371,7 @@ snapshots: '@better-auth/utils@0.2.5': dependencies: - typescript: 5.9.2 + typescript: 5.9.3 uncrypto: 0.1.3 '@better-fetch/fetch@1.1.17': {} @@ -9769,6 +10032,8 @@ snapshots: '@eslint/core': 0.14.0 levn: 0.4.1 + '@fetchkit/ffetch@4.2.0': {} + '@floating-ui/core@1.7.1': dependencies: '@floating-ui/utils': 0.2.9 @@ -9968,7 +10233,7 @@ snapshots: '@jridgewell/gen-mapping@0.3.13': dependencies: - '@jridgewell/sourcemap-codec': 1.5.0 + '@jridgewell/sourcemap-codec': 1.5.5 '@jridgewell/trace-mapping': 0.3.30 '@jridgewell/gen-mapping@0.3.8': @@ -9983,6 +10248,8 @@ snapshots: '@jridgewell/sourcemap-codec@1.5.0': {} + '@jridgewell/sourcemap-codec@1.5.5': {} + '@jridgewell/trace-mapping@0.3.25': dependencies: '@jridgewell/resolve-uri': 3.1.2 @@ -9991,7 +10258,7 @@ snapshots: '@jridgewell/trace-mapping@0.3.30': dependencies: '@jridgewell/resolve-uri': 3.1.2 - '@jridgewell/sourcemap-codec': 1.5.0 + '@jridgewell/sourcemap-codec': 1.5.5 '@levischuck/tiny-cbor@0.2.11': {} @@ -10108,6 +10375,14 @@ snapshots: transitivePeerDependencies: - '@types/node' + '@microsoft/api-extractor-model@7.32.1(@types/node@22.17.1)': + dependencies: + '@microsoft/tsdoc': 0.16.0 + '@microsoft/tsdoc-config': 0.18.0 + '@rushstack/node-core-library': 5.19.0(@types/node@22.17.1) + transitivePeerDependencies: + - '@types/node' + '@microsoft/api-extractor@7.47.7(@types/node@22.17.1)': dependencies: '@microsoft/api-extractor-model': 7.29.6(@types/node@22.17.1) @@ -10126,6 +10401,25 @@ snapshots: transitivePeerDependencies: - '@types/node' + '@microsoft/api-extractor@7.55.1(@types/node@22.17.1)': + dependencies: + '@microsoft/api-extractor-model': 7.32.1(@types/node@22.17.1) + '@microsoft/tsdoc': 0.16.0 + '@microsoft/tsdoc-config': 0.18.0 + '@rushstack/node-core-library': 5.19.0(@types/node@22.17.1) + '@rushstack/rig-package': 0.6.0 + '@rushstack/terminal': 0.19.4(@types/node@22.17.1) + '@rushstack/ts-command-line': 5.1.4(@types/node@22.17.1) + diff: 8.0.2 + lodash: 4.17.21 + minimatch: 10.0.3 + resolve: 1.22.10 + semver: 7.5.4 + source-map: 0.6.1 + typescript: 5.8.2 + transitivePeerDependencies: + - '@types/node' + '@microsoft/tsdoc-config@0.17.1': dependencies: '@microsoft/tsdoc': 0.15.1 @@ -10133,8 +10427,17 @@ snapshots: jju: 1.4.0 resolve: 1.22.10 + '@microsoft/tsdoc-config@0.18.0': + dependencies: + '@microsoft/tsdoc': 0.16.0 + ajv: 8.12.0 + jju: 1.4.0 + resolve: 1.22.10 + '@microsoft/tsdoc@0.15.1': {} + '@microsoft/tsdoc@0.16.0': {} + '@modelcontextprotocol/sdk@1.17.2': dependencies: ajv: 6.12.6 @@ -10893,11 +11196,18 @@ snapshots: '@rolldown/pluginutils@1.0.0-beta.54': {} + '@rollup/plugin-replace@6.0.3(rollup@4.40.2)': + dependencies: + '@rollup/pluginutils': 5.1.4(rollup@4.40.2) + magic-string: 0.30.21 + optionalDependencies: + rollup: 4.40.2 + '@rollup/pluginutils@5.1.4(rollup@4.40.2)': dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 estree-walker: 2.0.2 - picomatch: 4.0.2 + picomatch: 4.0.3 optionalDependencies: rollup: 4.40.2 @@ -10965,6 +11275,19 @@ snapshots: '@rushstack/eslint-patch@1.11.0': {} + '@rushstack/node-core-library@5.19.0(@types/node@22.17.1)': + dependencies: + ajv: 8.13.0 + ajv-draft-04: 1.0.0(ajv@8.13.0) + ajv-formats: 3.0.1(ajv@8.13.0) + fs-extra: 11.3.0 + import-lazy: 4.0.0 + jju: 1.4.0 + resolve: 1.22.10 + semver: 7.5.4 + optionalDependencies: + '@types/node': 22.17.1 + '@rushstack/node-core-library@5.7.0(@types/node@22.17.1)': dependencies: ajv: 8.13.0 @@ -10978,11 +11301,20 @@ snapshots: optionalDependencies: '@types/node': 22.17.1 + '@rushstack/problem-matcher@0.1.1(@types/node@22.17.1)': + optionalDependencies: + '@types/node': 22.17.1 + '@rushstack/rig-package@0.5.3': dependencies: resolve: 1.22.10 strip-json-comments: 3.1.1 + '@rushstack/rig-package@0.6.0': + dependencies: + resolve: 1.22.10 + strip-json-comments: 3.1.1 + '@rushstack/terminal@0.14.0(@types/node@22.17.1)': dependencies: '@rushstack/node-core-library': 5.7.0(@types/node@22.17.1) @@ -10990,6 +11322,14 @@ snapshots: optionalDependencies: '@types/node': 22.17.1 + '@rushstack/terminal@0.19.4(@types/node@22.17.1)': + dependencies: + '@rushstack/node-core-library': 5.19.0(@types/node@22.17.1) + '@rushstack/problem-matcher': 0.1.1(@types/node@22.17.1) + supports-color: 8.1.1 + optionalDependencies: + '@types/node': 22.17.1 + '@rushstack/ts-command-line@4.22.6(@types/node@22.17.1)': dependencies: '@rushstack/terminal': 0.14.0(@types/node@22.17.1) @@ -10999,6 +11339,15 @@ snapshots: transitivePeerDependencies: - '@types/node' + '@rushstack/ts-command-line@5.1.4(@types/node@22.17.1)': + dependencies: + '@rushstack/terminal': 0.19.4(@types/node@22.17.1) + '@types/argparse': 1.0.38 + argparse: 1.0.10 + string-argv: 0.3.2 + transitivePeerDependencies: + - '@types/node' + '@sec-ant/readable-stream@0.4.1': {} '@shikijs/core@3.13.0': @@ -11107,7 +11456,7 @@ snapshots: enhanced-resolve: 5.18.2 jiti: 2.4.2 lightningcss: 1.30.1 - magic-string: 0.30.17 + magic-string: 0.30.21 source-map-js: 1.2.1 tailwindcss: 4.1.11 @@ -11193,6 +11542,19 @@ snapshots: - typescript - vite + '@tanstack/vite-config@0.2.0(@types/node@22.17.1)(rollup@4.40.2)(typescript@5.9.3)(vite@6.3.5(@types/node@22.17.1)(jiti@2.4.2)(lightningcss@1.30.1)(tsx@4.20.3)(yaml@2.8.0))': + dependencies: + rollup-plugin-preserve-directives: 0.4.0(rollup@4.40.2) + vite-plugin-dts: 4.2.3(@types/node@22.17.1)(rollup@4.40.2)(typescript@5.9.3)(vite@6.3.5(@types/node@22.17.1)(jiti@2.4.2)(lightningcss@1.30.1)(tsx@4.20.3)(yaml@2.8.0)) + vite-plugin-externalize-deps: 0.9.0(vite@6.3.5(@types/node@22.17.1)(jiti@2.4.2)(lightningcss@1.30.1)(tsx@4.20.3)(yaml@2.8.0)) + vite-tsconfig-paths: 5.1.4(typescript@5.9.3)(vite@6.3.5(@types/node@22.17.1)(jiti@2.4.2)(lightningcss@1.30.1)(tsx@4.20.3)(yaml@2.8.0)) + transitivePeerDependencies: + - '@types/node' + - rollup + - supports-color + - typescript + - vite + '@tokenizer/token@0.3.0': {} '@trpc/client@11.0.0-rc.441(@trpc/server@11.0.0-rc.441)': @@ -11269,7 +11631,7 @@ snapshots: '@types/estree-jsx@1.0.5': dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 '@types/estree@1.0.7': {} @@ -11594,7 +11956,7 @@ snapshots: '@unrs/resolver-binding-wasm32-wasi@1.7.9': dependencies: - '@napi-rs/wasm-runtime': 0.2.11 + '@napi-rs/wasm-runtime': 0.2.12 optional: true '@unrs/resolver-binding-win32-arm64-msvc@1.7.9': @@ -11662,11 +12024,20 @@ snapshots: chai: 5.2.0 tinyrainbow: 2.0.0 + '@vitest/expect@4.0.15': + dependencies: + '@standard-schema/spec': 1.0.0 + '@types/chai': 5.2.2 + '@vitest/spy': 4.0.15 + '@vitest/utils': 4.0.15 + chai: 6.2.1 + tinyrainbow: 3.0.3 + '@vitest/mocker@2.1.9(msw@2.10.2(@types/node@22.17.1)(typescript@5.9.2))(vite@5.4.19(@types/node@22.17.1)(lightningcss@1.30.1))': dependencies: '@vitest/spy': 2.1.9 estree-walker: 3.0.3 - magic-string: 0.30.17 + magic-string: 0.30.21 optionalDependencies: msw: 2.10.2(@types/node@22.17.1)(typescript@5.9.2) vite: 5.4.19(@types/node@22.17.1)(lightningcss@1.30.1) @@ -11675,7 +12046,7 @@ snapshots: dependencies: '@vitest/spy': 3.2.4 estree-walker: 3.0.3 - magic-string: 0.30.17 + magic-string: 0.30.21 optionalDependencies: msw: 2.10.2(@types/node@22.17.1)(typescript@5.9.2) vite: 6.3.5(@types/node@22.17.1)(jiti@1.21.7)(lightningcss@1.30.1)(tsx@4.20.3)(yaml@2.8.0) @@ -11684,11 +12055,20 @@ snapshots: dependencies: '@vitest/spy': 3.2.4 estree-walker: 3.0.3 - magic-string: 0.30.17 + magic-string: 0.30.21 optionalDependencies: msw: 2.10.2(@types/node@22.17.1)(typescript@5.9.2) vite: 6.3.5(@types/node@22.17.1)(jiti@2.4.2)(lightningcss@1.30.1)(tsx@4.20.3)(yaml@2.8.0) + '@vitest/mocker@4.0.15(msw@2.10.2(@types/node@22.17.1)(typescript@5.9.3))(vite@6.3.5(@types/node@22.17.1)(jiti@2.4.2)(lightningcss@1.30.1)(tsx@4.20.3)(yaml@2.8.0))': + dependencies: + '@vitest/spy': 4.0.15 + estree-walker: 3.0.3 + magic-string: 0.30.21 + optionalDependencies: + msw: 2.10.2(@types/node@22.17.1)(typescript@5.9.3) + vite: 6.3.5(@types/node@22.17.1)(jiti@2.4.2)(lightningcss@1.30.1)(tsx@4.20.3)(yaml@2.8.0) + '@vitest/pretty-format@2.1.9': dependencies: tinyrainbow: 1.2.0 @@ -11697,6 +12077,10 @@ snapshots: dependencies: tinyrainbow: 2.0.0 + '@vitest/pretty-format@4.0.15': + dependencies: + tinyrainbow: 3.0.3 + '@vitest/runner@2.1.9': dependencies: '@vitest/utils': 2.1.9 @@ -11708,16 +12092,27 @@ snapshots: pathe: 2.0.3 strip-literal: 3.0.0 + '@vitest/runner@4.0.15': + dependencies: + '@vitest/utils': 4.0.15 + pathe: 2.0.3 + '@vitest/snapshot@2.1.9': dependencies: '@vitest/pretty-format': 2.1.9 - magic-string: 0.30.17 + magic-string: 0.30.21 pathe: 1.1.2 '@vitest/snapshot@3.2.4': dependencies: '@vitest/pretty-format': 3.2.4 - magic-string: 0.30.17 + magic-string: 0.30.21 + pathe: 2.0.3 + + '@vitest/snapshot@4.0.15': + dependencies: + '@vitest/pretty-format': 4.0.15 + magic-string: 0.30.21 pathe: 2.0.3 '@vitest/spy@2.1.9': @@ -11728,6 +12123,8 @@ snapshots: dependencies: tinyspy: 4.0.3 + '@vitest/spy@4.0.15': {} + '@vitest/ui@3.2.4(vitest@3.2.4)': dependencies: '@vitest/utils': 3.2.4 @@ -11751,6 +12148,11 @@ snapshots: loupe: 3.2.0 tinyrainbow: 2.0.0 + '@vitest/utils@4.0.15': + dependencies: + '@vitest/pretty-format': 4.0.15 + tinyrainbow: 3.0.3 + '@volar/language-core@2.4.14': dependencies: '@volar/source-map': 2.4.14 @@ -11794,6 +12196,32 @@ snapshots: optionalDependencies: typescript: 5.9.2 + '@vue/language-core@2.1.6(typescript@5.9.3)': + dependencies: + '@volar/language-core': 2.4.14 + '@vue/compiler-dom': 3.5.14 + '@vue/compiler-vue2': 2.7.16 + '@vue/shared': 3.5.14 + computeds: 0.0.1 + minimatch: 9.0.5 + muggle-string: 0.4.1 + path-browserify: 1.0.1 + optionalDependencies: + typescript: 5.9.3 + + '@vue/language-core@2.2.0(typescript@5.9.3)': + dependencies: + '@volar/language-core': 2.4.14 + '@vue/compiler-dom': 3.5.14 + '@vue/compiler-vue2': 2.7.16 + '@vue/shared': 3.5.14 + alien-signals: 0.4.14 + minimatch: 9.0.5 + muggle-string: 0.4.1 + path-browserify: 1.0.1 + optionalDependencies: + typescript: 5.9.3 + '@vue/shared@3.5.14': {} accepts@2.0.0: @@ -11838,6 +12266,8 @@ snapshots: require-from-string: 2.0.2 uri-js: 4.4.1 + alien-signals@0.4.14: {} + ansi-colors@4.1.3: {} ansi-escapes@4.3.2: @@ -12151,7 +12581,7 @@ snapshots: ohash: 2.0.11 pathe: 2.0.3 perfect-debounce: 1.0.0 - pkg-types: 2.2.0 + pkg-types: 2.3.0 rc9: 2.1.2 optionalDependencies: magicast: 0.3.5 @@ -12191,6 +12621,8 @@ snapshots: loupe: 3.1.4 pathval: 2.0.0 + chai@6.2.1: {} + chalk@4.1.2: dependencies: ansi-styles: 4.3.0 @@ -13287,7 +13719,7 @@ snapshots: estree-util-scope@1.0.0: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 devlop: 1.1.0 estree-util-to-js@2.0.0: @@ -13309,7 +13741,7 @@ snapshots: estree-walker@3.0.3: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 esutils@2.0.3: {} @@ -13357,6 +13789,8 @@ snapshots: expect-type@1.2.1: {} + expect-type@1.2.2: {} + expect@29.7.0: dependencies: '@jest/expect-utils': 29.7.0 @@ -13445,6 +13879,10 @@ snapshots: fast-levenshtein@2.0.6: {} + fast-xml-parser@5.3.2: + dependencies: + strnum: 2.1.1 + fastq@1.19.1: dependencies: reusify: 1.1.0 @@ -13461,6 +13899,10 @@ snapshots: optionalDependencies: picomatch: 4.0.2 + fdir@6.5.0(picomatch@4.0.3): + optionalDependencies: + picomatch: 4.0.3 + fetch-blob@3.2.0: dependencies: node-domexception: 1.0.0 @@ -14526,6 +14968,12 @@ snapshots: mlly: 1.7.4 pkg-types: 1.3.1 + local-pkg@1.1.2: + dependencies: + mlly: 1.7.4 + pkg-types: 2.3.0 + quansync: 0.2.11 + locate-path@3.0.0: dependencies: p-locate: 3.0.0 @@ -14594,6 +15042,10 @@ snapshots: dependencies: '@jridgewell/sourcemap-codec': 1.5.0 + magic-string@0.30.21: + dependencies: + '@jridgewell/sourcemap-codec': 1.5.5 + magicast@0.3.5: dependencies: '@babel/parser': 7.27.7 @@ -14884,7 +15336,7 @@ snapshots: micromark-extension-mdx-expression@3.0.1: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 devlop: 1.1.0 micromark-factory-mdx-expression: 2.0.3 micromark-factory-space: 2.0.1 @@ -14895,7 +15347,7 @@ snapshots: micromark-extension-mdx-jsx@3.0.2: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 devlop: 1.1.0 estree-util-is-identifier-name: 3.0.0 micromark-factory-mdx-expression: 2.0.3 @@ -14912,7 +15364,7 @@ snapshots: micromark-extension-mdxjs-esm@3.0.0: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 devlop: 1.1.0 micromark-core-commonmark: 2.0.3 micromark-util-character: 2.1.1 @@ -14948,7 +15400,7 @@ snapshots: micromark-factory-mdx-expression@2.0.3: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 devlop: 1.1.0 micromark-factory-space: 2.0.1 micromark-util-character: 2.1.1 @@ -15012,7 +15464,7 @@ snapshots: micromark-util-events-to-acorn@2.0.3: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 '@types/unist': 3.0.3 devlop: 1.1.0 estree-util-visit: 2.0.0 @@ -15185,6 +15637,32 @@ snapshots: transitivePeerDependencies: - '@types/node' + msw@2.10.2(@types/node@22.17.1)(typescript@5.9.3): + dependencies: + '@bundled-es-modules/cookie': 2.0.1 + '@bundled-es-modules/statuses': 1.0.1 + '@bundled-es-modules/tough-cookie': 0.1.6 + '@inquirer/confirm': 5.1.12(@types/node@22.17.1) + '@mswjs/interceptors': 0.39.2 + '@open-draft/deferred-promise': 2.2.0 + '@open-draft/until': 2.1.0 + '@types/cookie': 0.6.0 + '@types/statuses': 2.0.6 + graphql: 16.11.0 + headers-polyfill: 4.0.3 + is-node-process: 1.2.0 + outvariant: 1.4.3 + path-to-regexp: 6.3.0 + picocolors: 1.1.1 + strict-event-emitter: 0.5.1 + type-fest: 4.41.0 + yargs: 17.7.2 + optionalDependencies: + typescript: 5.9.3 + transitivePeerDependencies: + - '@types/node' + optional: true + muggle-string@0.4.1: {} mute-stream@2.0.0: {} @@ -15324,7 +15802,7 @@ snapshots: citty: 0.1.6 consola: 3.4.2 pathe: 2.0.3 - pkg-types: 2.2.0 + pkg-types: 2.3.0 tinyexec: 0.3.2 oauth4webapi@2.17.0: {} @@ -15377,6 +15855,8 @@ snapshots: define-properties: 1.2.1 es-object-atoms: 1.1.1 + obug@2.1.1: {} + odata-query@8.0.4: dependencies: tslib: 2.8.1 @@ -15618,6 +16098,8 @@ snapshots: picomatch@4.0.2: {} + picomatch@4.0.3: {} + pify@4.0.1: {} pkce-challenge@5.0.0: {} @@ -15628,7 +16110,7 @@ snapshots: mlly: 1.7.4 pathe: 2.0.3 - pkg-types@2.2.0: + pkg-types@2.3.0: dependencies: confbox: 0.2.2 exsolve: 1.0.7 @@ -15881,7 +16363,7 @@ snapshots: recma-build-jsx@1.0.0: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 estree-util-build-jsx: 3.0.1 vfile: 6.0.3 @@ -15897,14 +16379,14 @@ snapshots: recma-parse@1.0.0: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 esast-util-from-js: 2.0.1 unified: 11.0.5 vfile: 6.0.3 recma-stringify@1.0.0: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 estree-util-to-js: 2.0.0 unified: 11.0.5 vfile: 6.0.3 @@ -15943,7 +16425,7 @@ snapshots: rehype-recma@1.0.0: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 '@types/hast': 3.0.4 hast-util-to-estree: 3.1.3 transitivePeerDependencies: @@ -16091,7 +16573,7 @@ snapshots: rollup-plugin-preserve-directives@0.4.0(rollup@4.40.2): dependencies: '@rollup/pluginutils': 5.1.4(rollup@4.40.2) - magic-string: 0.30.17 + magic-string: 0.30.21 rollup: 4.40.2 rollup@4.40.2: @@ -16436,6 +16918,8 @@ snapshots: statuses@2.0.2: {} + std-env@3.10.0: {} + std-env@3.9.0: {} stdin-discarder@0.1.0: @@ -16568,6 +17052,8 @@ snapshots: dependencies: js-tokens: 9.0.1 + strnum@2.1.1: {} + strtok3@9.1.1: dependencies: '@tokenizer/token': 0.3.0 @@ -16687,6 +17173,8 @@ snapshots: tinyexec@1.0.1: {} + tinyexec@1.0.2: {} + tinyglobby@0.2.13: dependencies: fdir: 6.4.4(picomatch@4.0.2) @@ -16697,6 +17185,11 @@ snapshots: fdir: 6.4.6(picomatch@4.0.2) picomatch: 4.0.2 + tinyglobby@0.2.15: + dependencies: + fdir: 6.5.0(picomatch@4.0.3) + picomatch: 4.0.3 + tinygradient@1.1.5: dependencies: '@types/tinycolor2': 1.4.6 @@ -16708,6 +17201,8 @@ snapshots: tinyrainbow@2.0.0: {} + tinyrainbow@3.0.3: {} + tinyspy@3.0.2: {} tinyspy@4.0.3: {} @@ -16762,6 +17257,10 @@ snapshots: optionalDependencies: typescript: 5.9.2 + tsconfck@3.1.5(typescript@5.9.3): + optionalDependencies: + typescript: 5.9.3 + tsconfig-paths@3.15.0: dependencies: '@types/json5': 0.0.29 @@ -16927,8 +17426,12 @@ snapshots: typescript@5.6.1-rc: {} + typescript@5.8.2: {} + typescript@5.9.2: {} + typescript@5.9.3: {} + ufo@1.6.1: {} uglify-js@3.19.3: @@ -17015,7 +17518,7 @@ snapshots: dependencies: '@rollup/pluginutils': 5.1.4(rollup@4.40.2) debug: 4.4.1 - magic-string: 0.30.17 + magic-string: 0.30.21 oxc-parser: 0.36.0 unplugin: 1.16.1 optionalDependencies: @@ -17187,7 +17690,7 @@ snapshots: debug: 4.4.1 kolorist: 1.8.0 local-pkg: 0.5.1 - magic-string: 0.30.17 + magic-string: 0.30.21 typescript: 5.9.2 optionalDependencies: vite: 6.3.5(@types/node@22.17.1)(jiti@2.4.2)(lightningcss@1.30.1)(tsx@4.20.3)(yaml@2.8.0) @@ -17196,6 +17699,44 @@ snapshots: - rollup - supports-color + vite-plugin-dts@4.2.3(@types/node@22.17.1)(rollup@4.40.2)(typescript@5.9.3)(vite@6.3.5(@types/node@22.17.1)(jiti@2.4.2)(lightningcss@1.30.1)(tsx@4.20.3)(yaml@2.8.0)): + dependencies: + '@microsoft/api-extractor': 7.47.7(@types/node@22.17.1) + '@rollup/pluginutils': 5.1.4(rollup@4.40.2) + '@volar/typescript': 2.4.14 + '@vue/language-core': 2.1.6(typescript@5.9.3) + compare-versions: 6.1.1 + debug: 4.4.1 + kolorist: 1.8.0 + local-pkg: 0.5.1 + magic-string: 0.30.21 + typescript: 5.9.3 + optionalDependencies: + vite: 6.3.5(@types/node@22.17.1)(jiti@2.4.2)(lightningcss@1.30.1)(tsx@4.20.3)(yaml@2.8.0) + transitivePeerDependencies: + - '@types/node' + - rollup + - supports-color + + vite-plugin-dts@4.5.4(@types/node@22.17.1)(rollup@4.40.2)(typescript@5.9.3)(vite@6.3.5(@types/node@22.17.1)(jiti@2.4.2)(lightningcss@1.30.1)(tsx@4.20.3)(yaml@2.8.0)): + dependencies: + '@microsoft/api-extractor': 7.55.1(@types/node@22.17.1) + '@rollup/pluginutils': 5.1.4(rollup@4.40.2) + '@volar/typescript': 2.4.14 + '@vue/language-core': 2.2.0(typescript@5.9.3) + compare-versions: 6.1.1 + debug: 4.4.1 + kolorist: 1.8.0 + local-pkg: 1.1.2 + magic-string: 0.30.17 + typescript: 5.9.3 + optionalDependencies: + vite: 6.3.5(@types/node@22.17.1)(jiti@2.4.2)(lightningcss@1.30.1)(tsx@4.20.3)(yaml@2.8.0) + transitivePeerDependencies: + - '@types/node' + - rollup + - supports-color + vite-plugin-externalize-deps@0.9.0(vite@6.3.5(@types/node@22.17.1)(jiti@2.4.2)(lightningcss@1.30.1)(tsx@4.20.3)(yaml@2.8.0)): dependencies: vite: 6.3.5(@types/node@22.17.1)(jiti@2.4.2)(lightningcss@1.30.1)(tsx@4.20.3)(yaml@2.8.0) @@ -17211,6 +17752,17 @@ snapshots: - supports-color - typescript + vite-tsconfig-paths@5.1.4(typescript@5.9.3)(vite@6.3.5(@types/node@22.17.1)(jiti@2.4.2)(lightningcss@1.30.1)(tsx@4.20.3)(yaml@2.8.0)): + dependencies: + debug: 4.4.1 + globrex: 0.1.2 + tsconfck: 3.1.5(typescript@5.9.3) + optionalDependencies: + vite: 6.3.5(@types/node@22.17.1)(jiti@2.4.2)(lightningcss@1.30.1)(tsx@4.20.3)(yaml@2.8.0) + transitivePeerDependencies: + - supports-color + - typescript + vite@5.4.19(@types/node@22.17.1)(lightningcss@1.30.1): dependencies: esbuild: 0.21.5 @@ -17378,6 +17930,45 @@ snapshots: - tsx - yaml + vitest@4.0.15(@types/node@22.17.1)(@vitest/ui@3.2.4)(happy-dom@15.11.7)(jiti@2.4.2)(lightningcss@1.30.1)(msw@2.10.2(@types/node@22.17.1)(typescript@5.9.3))(tsx@4.20.3)(yaml@2.8.0): + dependencies: + '@vitest/expect': 4.0.15 + '@vitest/mocker': 4.0.15(msw@2.10.2(@types/node@22.17.1)(typescript@5.9.3))(vite@6.3.5(@types/node@22.17.1)(jiti@2.4.2)(lightningcss@1.30.1)(tsx@4.20.3)(yaml@2.8.0)) + '@vitest/pretty-format': 4.0.15 + '@vitest/runner': 4.0.15 + '@vitest/snapshot': 4.0.15 + '@vitest/spy': 4.0.15 + '@vitest/utils': 4.0.15 + es-module-lexer: 1.7.0 + expect-type: 1.2.2 + magic-string: 0.30.21 + obug: 2.1.1 + pathe: 2.0.3 + picomatch: 4.0.3 + std-env: 3.10.0 + tinybench: 2.9.0 + tinyexec: 1.0.2 + tinyglobby: 0.2.15 + tinyrainbow: 3.0.3 + vite: 6.3.5(@types/node@22.17.1)(jiti@2.4.2)(lightningcss@1.30.1)(tsx@4.20.3)(yaml@2.8.0) + why-is-node-running: 2.3.0 + optionalDependencies: + '@types/node': 22.17.1 + '@vitest/ui': 3.2.4(vitest@3.2.4) + happy-dom: 15.11.7 + transitivePeerDependencies: + - jiti + - less + - lightningcss + - msw + - sass + - sass-embedded + - stylus + - sugarss + - terser + - tsx + - yaml + vscode-uri@3.1.0: {} walk-up-path@3.0.1: {} @@ -17555,4 +18146,6 @@ snapshots: zod@3.25.76: {} + zod@4.1.12: {} + zwitch@2.0.4: {} diff --git a/thunder-tests/environments/tc_env_local.json b/thunder-tests/environments/tc_env_local.json new file mode 100644 index 00000000..c1663887 --- /dev/null +++ b/thunder-tests/environments/tc_env_local.json @@ -0,0 +1,11 @@ +{ + "_id": "5079be56-1fbc-4485-abda-744a899ba3d9", + "name": "(Local Env)", + "default": false, + "global": true, + "local": true, + "sortNum": -1, + "created": "2025-11-19T14:15:14.214Z", + "modified": "2025-11-19T14:15:14.214Z", + "data": [] +} \ No newline at end of file diff --git a/thunder-tests/thunderActivity.json b/thunder-tests/thunderActivity.json new file mode 100644 index 00000000..9c93301d --- /dev/null +++ b/thunder-tests/thunderActivity.json @@ -0,0 +1,168 @@ +[ + { + "_id": "746cd695-3e96-46ca-afe9-312da66dbd45", + "colId": "history", + "containerId": "", + "name": "Base Request", + "url": "{{base_url}}/fmi/odata/v4/fmdapi_test/$crossjoin(isolated_contacts,isolated_users)?$top=5&$expand=isolated_contacts($select=id_user),isolated_users($select=\"id\")", + "method": "GET", + "sortNum": 0, + "created": "2025-11-19T14:17:07.725Z", + "modified": "2025-11-19T17:23:45.731Z", + "headers": [], + "params": [ + { + "name": "$top", + "value": "5", + "isPath": false + }, + { + "name": "$expand", + "value": "isolated_contacts($select=id_user),isolated_users($select=\"id\")", + "isPath": false + } + ], + "auth": { + "type": "basic", + "basic": { + "username": "{{username}}", + "password": "{{password}}" + } + } + }, + { + "_id": "4b092a18-a4c1-4278-9263-858f648b6a7c", + "colId": "history", + "containerId": "", + "name": "Insert Record", + "url": "{{base_url}}/fmi/odata/v4/fmdapi_test/contacts", + "method": "POST", + "sortNum": 0, + "created": "2025-11-19T14:17:07.725Z", + "modified": "2025-11-19T14:41:34.827Z", + "headers": [ + { + "name": "Prefer", + "value": "return=representation" + }, + { + "name": "Prefer", + "value": "return=minimal", + "isDisabled": true + } + ], + "body": { + "type": "json", + "raw": "{\n \"name\": \"ThunderClient\"\n}", + "form": [] + }, + "auth": { + "type": "basic", + "basic": { + "username": "{{username}}", + "password": "{{password}}" + } + } + }, + { + "_id": "b060217a-bf79-408f-836e-b172f8d6ad42", + "colId": "history", + "containerId": "", + "name": "Update Record", + "url": "{{base_url}}/fmi/odata/v4/fmdapi_test/contacts('B7EC5945-1830-4829-8464-69F970EE0D65')", + "method": "DELETE", + "sortNum": 0, + "created": "2025-11-19T14:17:07.725Z", + "modified": "2025-11-19T16:45:13.699Z", + "headers": [ + { + "name": "Prefer", + "value": "return=representation", + "isDisabled": true + }, + { + "name": "Prefer", + "value": "return=minimal" + } + ], + "body": { + "type": "json", + "raw": "{\n \"name\": \"ThunderClient\"\n}", + "form": [] + }, + "auth": { + "type": "basic", + "basic": { + "username": "{{username}}", + "password": "{{password}}" + } + } + }, + { + "_id": "86701f03-c2ed-48a6-accd-faaa9d31be5b", + "colId": "history", + "containerId": "", + "name": "Crossjoin", + "url": "{{base_url}}/fmi/odata/v4/fmdapi_test", + "method": "GET", + "sortNum": 0, + "created": "2025-11-19T14:17:07.725Z", + "modified": "2025-11-19T14:17:07.725Z", + "headers": [], + "auth": { + "type": "basic", + "basic": { + "username": "{{username}}", + "password": "{{password}}" + } + } + }, + { + "_id": "3187a7dc-9e8a-44fb-927f-e0a5bf792b4e", + "colId": "history", + "containerId": "", + "name": "Webhook", + "url": "{{base_url}}/fmi/odata/v4/fmdapi_test/Webhook.GetAll", + "method": "GET", + "sortNum": 0, + "created": "2025-11-19T14:17:07.725Z", + "modified": "2025-11-19T18:04:10.450Z", + "headers": [], + "body": { + "type": "json", + "raw": "{\n \"webhook\": \"https://webhook.site/0137b6ae-b547-4b9f-8360-78c57a757266\",\n \"headers\": {\n \"Content-Type\": \"application/json\"\n },\n \"tableName\": \"contacts\",\n \"select\": \"PrimaryKey,ROWID\",\n \"notifySchemaChanges\": true\n}", + "form": [] + }, + "auth": { + "type": "basic", + "basic": { + "username": "{{username}}", + "password": "{{password}}" + } + } + }, + { + "_id": "848da16a-12a2-460f-be2b-8139fc9343a0", + "colId": "history", + "containerId": "", + "name": "metadata", + "url": "{{base_url}}/fmi/odata/v4/fmdapi_test/$metadata", + "method": "GET", + "sortNum": 0, + "created": "2025-11-19T14:17:07.725Z", + "modified": "2025-11-19T18:23:08.111Z", + "headers": [], + "body": { + "type": "json", + "raw": "{\n \"webhook\": \"https://webhook.site/0137b6ae-b547-4b9f-8360-78c57a757266\",\n \"headers\": {\n \"Content-Type\": \"application/json\"\n },\n \"tableName\": \"contacts\",\n \"select\": \"PrimaryKey,ROWID\",\n \"notifySchemaChanges\": true\n}", + "form": [] + }, + "auth": { + "type": "basic", + "basic": { + "username": "{{username}}", + "password": "{{password}}" + } + } + } +] \ No newline at end of file