diff --git a/.changeset/real-ideas-sort.md b/.changeset/real-ideas-sort.md new file mode 100644 index 00000000..58f02bac --- /dev/null +++ b/.changeset/real-ideas-sort.md @@ -0,0 +1,6 @@ +--- +"@proofkit/typegen": minor +--- + +New command: `npx @proofkit/typegen@latest ui` will launch a web UI for configuring and running your typegen config. +(beta) support for @proofkit/fmodata typegen config. diff --git a/packages/fmodata/package.json b/packages/fmodata/package.json index 18decf0d..7b51a9f0 100644 --- a/packages/fmodata/package.json +++ b/packages/fmodata/package.json @@ -1,6 +1,6 @@ { "name": "@proofkit/fmodata", - "version": "0.1.0-alpha.19", + "version": "0.1.0-alpha.20", "description": "FileMaker OData API client", "repository": "git@github.com:proofgeist/proofkit.git", "author": "Eric <37158449+eluce2@users.noreply.github.com>", diff --git a/packages/fmodata/scripts/experiment-batch.ts b/packages/fmodata/scripts/experiment-batch.ts deleted file mode 100644 index 44174f20..00000000 --- a/packages/fmodata/scripts/experiment-batch.ts +++ /dev/null @@ -1,614 +0,0 @@ -/** - * Batch Operations Experiment Script - * - * This script experiments with batch operations containing inserts, updates, - * and deletes to understand how FileMaker handles them, especially when - * some operations fail. - * - * Usage: - * cd packages/fmodata && pnpm tsx scripts/experiment-batch.ts - */ - -import { config } from "dotenv"; -import path from "path"; -import { fileURLToPath } from "url"; -import { z } from "zod/v4"; -import { - FMServerConnection, - fmTableOccurrence, - textField, - timestampField, - eq, -} from "../src/index"; - -// Get __dirname equivalent in ES modules -const __filename = fileURLToPath(import.meta.url); -const __dirname = path.dirname(__filename); - -// Load environment variables -config({ path: path.resolve(__dirname, "../.env.local") }); - -const serverUrl = process.env.FMODATA_SERVER_URL; -const username = process.env.FMODATA_USERNAME; -const password = process.env.FMODATA_PASSWORD; -const database = process.env.FMODATA_DATABASE; - -if (!serverUrl || !username || !password || !database) { - throw new Error( - "Environment variables required: FMODATA_SERVER_URL, FMODATA_USERNAME, FMODATA_PASSWORD, FMODATA_DATABASE", - ); -} - -// Define schemas -const contactsTO = fmTableOccurrence("contacts", { - PrimaryKey: textField().primaryKey(), - CreationTimestamp: timestampField(), - CreatedBy: textField(), - ModificationTimestamp: timestampField(), - ModifiedBy: textField(), - name: textField(), - hobby: textField(), - id_user: textField(), -}); - -// Create connection -const connection = new FMServerConnection({ - serverUrl, - auth: { username, password }, -}); - -const db = connection.database(database, { - occurrences: [contactsTO], -}); - -// Track created records for cleanup -const createdRecordIds: string[] = []; - -async function cleanup() { - console.log("\n๐Ÿงน Cleaning up created records..."); - for (const id of createdRecordIds) { - try { - await db.from("contacts").delete().byId(id).execute(); - console.log(` Deleted: ${id}`); - } catch (error) { - console.log(` Failed to delete ${id}:`, error); - } - } -} - -async function experiment1_MultipleInserts() { - console.log("\n" + "=".repeat(60)); - console.log("EXPERIMENT 1: Multiple Inserts in a Batch"); - console.log("=".repeat(60)); - - const timestamp = Date.now(); - const insert1 = db.from("contacts").insert({ - name: `Batch Insert 1 - ${timestamp}`, - hobby: "Insert Test", - }); - - const insert2 = db.from("contacts").insert({ - name: `Batch Insert 2 - ${timestamp}`, - hobby: "Insert Test", - }); - - const insert3 = db.from("contacts").insert({ - name: `Batch Insert 3 - ${timestamp}`, - hobby: "Insert Test", - }); - - console.log("\nExecuting batch with 3 insert operations..."); - - const result = await db.batch([insert1, insert2, insert3]).execute(); - - console.log("\nResult:"); - console.log(JSON.stringify(result, null, 2)); - - if (result.data) { - // Track for cleanup - for (const item of result.data) { - if (item && typeof item === "object" && "PrimaryKey" in item) { - createdRecordIds.push(item.PrimaryKey as string); - } - } - } - - return result; -} - -async function experiment2_MixedOperations() { - console.log("\n" + "=".repeat(60)); - console.log( - "EXPERIMENT 2: Mixed Operations (GET + INSERT + UPDATE + DELETE)", - ); - console.log("=".repeat(60)); - - // First, create a record we can update/delete - const timestamp = Date.now(); - const setupResult = await db - .from("contacts") - .insert({ - name: `Setup Record - ${timestamp}`, - hobby: "Will be updated", - }) - .execute(); - - if (setupResult.error || !setupResult.data) { - console.log("Failed to create setup record:", setupResult.error); - return; - } - - const setupRecordId = setupResult.data.PrimaryKey; - console.log(`\nCreated setup record: ${setupRecordId}`); - - // Now create a batch with mixed operations - const listQuery = db.from("contacts").list().top(2); - - const insertOp = db.from("contacts").insert({ - name: `Mixed Batch Insert - ${timestamp}`, - hobby: "Mixed Test", - }); - - const updateOp = db - .from("contacts") - .update({ hobby: "Updated via batch" }) - .byId(setupRecordId); - - const deleteOp = db.from("contacts").delete().byId(setupRecordId); - - console.log("\nExecuting batch with: GET, INSERT, UPDATE, DELETE..."); - - const result = await db - .batch([listQuery, insertOp, updateOp, deleteOp]) - .execute(); - - console.log("\nResult:"); - console.log(JSON.stringify(result, null, 2)); - - if (result.data) { - // Track insert result for cleanup - const insertResult = result.data[1]; - if ( - insertResult && - typeof insertResult === "object" && - "PrimaryKey" in insertResult - ) { - createdRecordIds.push(insertResult.PrimaryKey as string); - } - } - - return result; -} - -async function experiment3_FailingOperation() { - console.log("\n" + "=".repeat(60)); - console.log("EXPERIMENT 3: Batch with a Failing Operation in the Middle"); - console.log("=".repeat(60)); - - const timestamp = Date.now(); - - // Create a valid insert - const insert1 = db.from("contacts").insert({ - name: `Before Failure - ${timestamp}`, - hobby: "Should succeed", - }); - - // Try to update a non-existent record (should fail) - const failingUpdate = db - .from("contacts") - .update({ hobby: "This should fail" }) - .byId("00000000-0000-0000-0000-000000000000"); - - // Another valid insert (should this succeed or fail?) - const insert2 = db.from("contacts").insert({ - name: `After Failure - ${timestamp}`, - hobby: "Should this succeed?", - }); - - console.log( - "\nExecuting batch with: INSERT (valid), UPDATE (invalid ID), INSERT (valid)...", - ); - console.log( - "Question: What happens to the third operation when the second fails?", - ); - - const result = await db.batch([insert1, failingUpdate, insert2]).execute(); - - console.log("\nResult:"); - console.log(JSON.stringify(result, null, 2)); - - if (result.data) { - for (const item of result.data) { - if (item && typeof item === "object" && "PrimaryKey" in item) { - createdRecordIds.push(item.PrimaryKey as string); - } - } - } - - return result; -} - -async function experiment4_FailingDelete() { - console.log("\n" + "=".repeat(60)); - console.log("EXPERIMENT 4: Batch with a Failing Delete"); - console.log("=".repeat(60)); - - const timestamp = Date.now(); - - // Create a valid insert - const insert1 = db.from("contacts").insert({ - name: `Before Delete Fail - ${timestamp}`, - hobby: "Should succeed", - }); - - // Try to delete a non-existent record - const failingDelete = db - .from("contacts") - .delete() - .byId("00000000-0000-0000-0000-000000000000"); - - // Another valid insert - const insert2 = db.from("contacts").insert({ - name: `After Delete Fail - ${timestamp}`, - hobby: "Should this succeed?", - }); - - console.log("\nExecuting batch with: INSERT, DELETE (invalid ID), INSERT..."); - - const result = await db.batch([insert1, failingDelete, insert2]).execute(); - - console.log("\nResult:"); - console.log(JSON.stringify(result, null, 2)); - - if (result.data) { - for (const item of result.data) { - if (item && typeof item === "object" && "PrimaryKey" in item) { - createdRecordIds.push(item.PrimaryKey as string); - } - } - } - - return result; -} - -async function experiment5_AllGetWithOneFailure() { - console.log("\n" + "=".repeat(60)); - console.log( - "EXPERIMENT 5: Multiple GETs with One Filter that Returns Nothing", - ); - console.log("=".repeat(60)); - - // Query that should return results - const query1 = db.from("contacts").list().top(2); - - // Query with a filter that returns empty (not an error, just no results) - const query2 = db - .from(contactsTO) - .list() - .where(eq(contactsTO.name, "THIS_NAME_DEFINITELY_DOES_NOT_EXIST_12345")); - - // Another query that should return results - const query3 = db.from("contacts").list().top(1); - - console.log( - "\nExecuting batch with: GET (valid), GET (empty filter), GET (valid)...", - ); - - const result = await db.batch([query1, query2, query3]).execute(); - - console.log("\nResult:"); - console.log(JSON.stringify(result, null, 2)); - - return result; -} - -async function experiment6_RawResponseInspection() { - console.log("\n" + "=".repeat(60)); - console.log("EXPERIMENT 6: Raw Response Inspection - Direct Fetch"); - console.log("=".repeat(60)); - - // Make a direct batch request to see raw response - const timestamp = Date.now(); - const boundary = "batch_direct_test_123"; - - const baseUrl = `${serverUrl}/fmi/odata/v4/${database}`; - const batchUrl = `${baseUrl}/$batch`; - - // Build a simple batch body with one GET - const batchBody = [ - `--${boundary}`, - "Content-Type: application/http", - "Content-Transfer-Encoding: binary", - "", - `GET ${baseUrl}/contacts?$top=1 HTTP/1.1`, - "", - "", - `--${boundary}--`, - ].join("\r\n"); - - console.log("\n--- Sending Request ---"); - console.log("URL:", batchUrl); - console.log("Body:", batchBody); - - const authHeader = `Basic ${Buffer.from(`${username}:${password}`).toString("base64")}`; - - const response = await fetch(batchUrl, { - method: "POST", - headers: { - Authorization: authHeader, - "Content-Type": `multipart/mixed; boundary=${boundary}`, - "OData-Version": "4.0", - }, - body: batchBody, - }); - - console.log("\n--- Response Info ---"); - console.log("Status:", response.status, response.statusText); - console.log("Content-Type:", response.headers.get("content-type")); - - const responseText = await response.text(); - console.log("\n--- Raw Response Body ---"); - console.log(responseText); - console.log("--- End Raw Response ---"); -} - -async function experiment7_RawResponseWithInsert() { - console.log("\n" + "=".repeat(60)); - console.log("EXPERIMENT 7: Raw Response - Insert with Prefer header"); - console.log("=".repeat(60)); - - const timestamp = Date.now(); - const boundary = "batch_insert_test_456"; - const changesetBoundary = "changeset_insert_789"; - - const baseUrl = `${serverUrl}/fmi/odata/v4/${database}`; - const batchUrl = `${baseUrl}/$batch`; - - const insertBody = JSON.stringify({ - name: `Direct Insert Test - ${timestamp}`, - hobby: "Testing", - }); - - // Build a batch with INSERT using return=representation - const batchBody = [ - `--${boundary}`, - `Content-Type: multipart/mixed; boundary=${changesetBoundary}`, - "", - `--${changesetBoundary}`, - "Content-Type: application/http", - "Content-Transfer-Encoding: binary", - "", - `POST ${baseUrl}/contacts HTTP/1.1`, - "Content-Type: application/json", - "Prefer: return=representation", - `Content-Length: ${insertBody.length}`, - "", - insertBody, - `--${changesetBoundary}--`, - `--${boundary}--`, - ].join("\r\n"); - - console.log("\n--- Sending Insert Request ---"); - console.log("Body:\n", batchBody); - - const authHeader = `Basic ${Buffer.from(`${username}:${password}`).toString("base64")}`; - - const response = await fetch(batchUrl, { - method: "POST", - headers: { - Authorization: authHeader, - "Content-Type": `multipart/mixed; boundary=${boundary}`, - "OData-Version": "4.0", - }, - body: batchBody, - }); - - console.log("\n--- Response Info ---"); - console.log("Status:", response.status, response.statusText); - console.log("Content-Type:", response.headers.get("content-type")); - - const responseText = await response.text(); - console.log("\n--- Raw Response Body ---"); - console.log(responseText); - console.log("--- End Raw Response ---"); - - // Try to extract created record ID for cleanup - const pkMatch = responseText.match(/"PrimaryKey":\s*"([^"]+)"/); - if (pkMatch && pkMatch[1]) { - createdRecordIds.push(pkMatch[1]); - console.log("\nCreated record ID:", pkMatch[1]); - } -} - -async function experiment8_TrueError() { - console.log("\n" + "=".repeat(60)); - console.log("EXPERIMENT 8: Raw Response - Query Non-Existent Table"); - console.log("=".repeat(60)); - - const boundary = "batch_error_test"; - const baseUrl = `${serverUrl}/fmi/odata/v4/${database}`; - const batchUrl = `${baseUrl}/$batch`; - - // Build: GET (valid), GET (non-existent table), GET (valid) - const batchBody = [ - `--${boundary}`, - "Content-Type: application/http", - "Content-Transfer-Encoding: binary", - "", - `GET ${baseUrl}/contacts?$top=1 HTTP/1.1`, - "", - "", - `--${boundary}`, - "Content-Type: application/http", - "Content-Transfer-Encoding: binary", - "", - `GET ${baseUrl}/THIS_TABLE_DOES_NOT_EXIST?$top=1 HTTP/1.1`, - "", - "", - `--${boundary}`, - "Content-Type: application/http", - "Content-Transfer-Encoding: binary", - "", - `GET ${baseUrl}/contacts?$top=2 HTTP/1.1`, - "", - "", - `--${boundary}--`, - ].join("\r\n"); - - console.log("\n--- Sending Request with Non-Existent Table ---"); - - const authHeader = `Basic ${Buffer.from(`${username}:${password}`).toString("base64")}`; - - const response = await fetch(batchUrl, { - method: "POST", - headers: { - Authorization: authHeader, - "Content-Type": `multipart/mixed; boundary=${boundary}`, - "OData-Version": "4.0", - }, - body: batchBody, - }); - - console.log("\n--- Response Info ---"); - console.log("Status:", response.status, response.statusText); - - const responseText = await response.text(); - console.log("\n--- Raw Response Body ---"); - console.log(responseText); - console.log("--- End Raw Response ---"); -} - -async function experiment9_RawResponseWithFailure() { - console.log("\n" + "=".repeat(60)); - console.log("EXPERIMENT 9: Raw Response - Mixed with Failure"); - console.log("=".repeat(60)); - - const timestamp = Date.now(); - const boundary = "batch_fail_test"; - const cs1 = "changeset_1"; - const cs2 = "changeset_2"; - - const baseUrl = `${serverUrl}/fmi/odata/v4/${database}`; - const batchUrl = `${baseUrl}/$batch`; - - const insertBody1 = JSON.stringify({ - name: `Before Fail - ${timestamp}`, - hobby: "Test", - }); - const updateBody = JSON.stringify({ hobby: "Should fail" }); - const insertBody2 = JSON.stringify({ - name: `After Fail - ${timestamp}`, - hobby: "Test", - }); - - // Build: INSERT (valid), UPDATE (invalid ID), INSERT (valid) - const batchBody = [ - // First changeset: valid insert - `--${boundary}`, - `Content-Type: multipart/mixed; boundary=${cs1}`, - "", - `--${cs1}`, - "Content-Type: application/http", - "Content-Transfer-Encoding: binary", - "", - `POST ${baseUrl}/contacts HTTP/1.1`, - "Content-Type: application/json", - "Prefer: return=representation", - `Content-Length: ${insertBody1.length}`, - "", - insertBody1, - `--${cs1}--`, - // Second changeset: invalid update - `--${boundary}`, - `Content-Type: multipart/mixed; boundary=${cs2}`, - "", - `--${cs2}`, - "Content-Type: application/http", - "Content-Transfer-Encoding: binary", - "", - `PATCH ${baseUrl}/contacts('00000000-0000-0000-0000-000000000000') HTTP/1.1`, - "Content-Type: application/json", - `Content-Length: ${updateBody.length}`, - "", - updateBody, - `--${cs2}--`, - // Third changeset: valid insert - `--${boundary}`, - `Content-Type: multipart/mixed; boundary=changeset_3`, - "", - `--changeset_3`, - "Content-Type: application/http", - "Content-Transfer-Encoding: binary", - "", - `POST ${baseUrl}/contacts HTTP/1.1`, - "Content-Type: application/json", - "Prefer: return=representation", - `Content-Length: ${insertBody2.length}`, - "", - insertBody2, - `--changeset_3--`, - `--${boundary}--`, - ].join("\r\n"); - - console.log("\n--- Sending Mixed Request with Invalid Update ---"); - - const authHeader = `Basic ${Buffer.from(`${username}:${password}`).toString("base64")}`; - - const response = await fetch(batchUrl, { - method: "POST", - headers: { - Authorization: authHeader, - "Content-Type": `multipart/mixed; boundary=${boundary}`, - "OData-Version": "4.0", - }, - body: batchBody, - }); - - console.log("\n--- Response Info ---"); - console.log("Status:", response.status, response.statusText); - - const responseText = await response.text(); - console.log("\n--- Raw Response Body ---"); - console.log(responseText); - console.log("--- End Raw Response ---"); - - // Extract created record IDs for cleanup - const pkMatches = responseText.matchAll(/"PrimaryKey":\s*"([^"]+)"/g); - for (const match of pkMatches) { - if (match[1]) { - createdRecordIds.push(match[1]); - console.log("Created record ID:", match[1]); - } - } -} - -async function main() { - console.log("๐Ÿ”ฌ Batch Operations Experiment"); - console.log("================================"); - console.log(`Server: ${serverUrl}`); - console.log(`Database: ${database}`); - console.log(""); - - try { - // Run experiments - await experiment1_MultipleInserts(); - await experiment2_MixedOperations(); - await experiment3_FailingOperation(); - await experiment4_FailingDelete(); - await experiment5_AllGetWithOneFailure(); - await experiment6_RawResponseInspection(); - await experiment7_RawResponseWithInsert(); - await experiment8_TrueError(); - await experiment9_RawResponseWithFailure(); - - console.log("\n" + "=".repeat(60)); - console.log("ALL EXPERIMENTS COMPLETE"); - console.log("=".repeat(60)); - } catch (error) { - console.error("\nโŒ Experiment failed with error:", error); - } finally { - await cleanup(); - } -} - -main().catch(console.error); diff --git a/packages/fmodata/src/client/builders/expand-builder.ts b/packages/fmodata/src/client/builders/expand-builder.ts index 89d5ae20..97373eba 100644 --- a/packages/fmodata/src/client/builders/expand-builder.ts +++ b/packages/fmodata/src/client/builders/expand-builder.ts @@ -40,7 +40,7 @@ export class ExpandBuilder { return configs.map((config) => { const targetTable = config.targetTable; - let targetSchema: Record | undefined; + let targetSchema: Partial> | undefined; if (targetTable) { const baseTableConfig = getBaseTableConfig(targetTable); const containerFields = baseTableConfig.containerFields || []; diff --git a/packages/fmodata/src/client/database.ts b/packages/fmodata/src/client/database.ts index 5b350fb2..2017e8b3 100644 --- a/packages/fmodata/src/client/database.ts +++ b/packages/fmodata/src/client/database.ts @@ -6,6 +6,19 @@ import { SchemaManager } from "./schema-manager"; import { FMTable } from "../orm/table"; import { WebhookManager } from "./webhook-builder"; +type MetadataArgs = { + format?: "xml" | "json"; + /** + * If provided, only the metadata for the specified table will be returned. + * Requires FileMaker Server 22.0.4 or later. + */ + tableName?: string; + /** + * If true, a reduced payload size will be returned by omitting certain annotations. + */ + reduceAnnotations?: boolean; +}; + export class Database { private _useEntityIds: boolean = false; private _includeSpecialColumns: IncludeSpecialColumns; @@ -62,19 +75,35 @@ export class Database { * Retrieves the OData metadata for this database. * @param args Optional configuration object * @param args.format The format to retrieve metadata in. Defaults to "json". + * @param args.tableName If provided, only the metadata for the specified table will be returned. Requires FileMaker Server 22.0.4 or later. + * @param args.reduceAnnotations If true, a reduced payload size will be returned by omitting certain annotations. * @returns The metadata in the specified format */ - async getMetadata(args: { format: "xml" }): Promise; - async getMetadata(args?: { format?: "json" }): Promise; - async getMetadata(args?: { - format?: "xml" | "json"; - }): Promise { + async getMetadata(args: { format: "xml" } & MetadataArgs): Promise; + async getMetadata( + args?: { format?: "json" } & MetadataArgs, + ): Promise; + async getMetadata(args?: MetadataArgs): Promise { + // Build the URL - if tableName is provided, append %23{tableName} to the path + let url = `/${this.databaseName}/$metadata`; + if (args?.tableName) { + url = `/${this.databaseName}/$metadata%23${args.tableName}`; + } + + // Build headers + const headers: Record = { + Accept: args?.format === "xml" ? "application/xml" : "application/json", + }; + + // Add Prefer header if reduceAnnotations is true + if (args?.reduceAnnotations) { + headers["Prefer"] = 'include-annotations="-*"'; + } + const result = await this.context._makeRequest< Record | string - >(`/${this.databaseName}/$metadata`, { - headers: { - Accept: args?.format === "xml" ? "application/xml" : "application/json", - }, + >(url, { + headers, }); if (result.error) { throw result.error; diff --git a/packages/fmodata/src/client/entity-set.ts b/packages/fmodata/src/client/entity-set.ts index a2612ba1..a43afd71 100644 --- a/packages/fmodata/src/client/entity-set.ts +++ b/packages/fmodata/src/client/entity-set.ts @@ -19,6 +19,7 @@ import { getDefaultSelect, getTableName, getTableColumns, + getTableSchema, } from "../orm/table"; import type { FieldBuilder } from "../orm/field-builders"; import { createLogger, InternalLogger } from "../logger"; @@ -121,20 +122,8 @@ export class EntitySet< if (this.occurrence) { // FMTable - access via helper functions const defaultSelectValue = getDefaultSelect(this.occurrence); - const tableSchema = (this.occurrence as any)[FMTableClass.Symbol.Schema]; - let schema: Record | undefined; - - if (tableSchema) { - // Extract schema from StandardSchemaV1 - const zodSchema = tableSchema["~standard"]?.schema; - if ( - zodSchema && - typeof zodSchema === "object" && - "shape" in zodSchema - ) { - schema = zodSchema.shape as Record; - } - } + // Schema is stored directly as Partial> + const schema = getTableSchema(this.occurrence); if (defaultSelectValue === "schema") { // Use getTableColumns to get all columns and select them @@ -226,20 +215,8 @@ export class EntitySet< if (this.occurrence) { // FMTable - access via helper functions const defaultSelectValue = getDefaultSelect(this.occurrence); - const tableSchema = (this.occurrence as any)[FMTableClass.Symbol.Schema]; - let schema: Record | undefined; - - if (tableSchema) { - // Extract schema from StandardSchemaV1 - const zodSchema = tableSchema["~standard"]?.schema; - if ( - zodSchema && - typeof zodSchema === "object" && - "shape" in zodSchema - ) { - schema = zodSchema.shape as Record; - } - } + // Schema is stored directly as Partial> + const schema = getTableSchema(this.occurrence); if (defaultSelectValue === "schema") { // Use getTableColumns to get all columns and select them diff --git a/packages/fmodata/src/client/error-parser.ts b/packages/fmodata/src/client/error-parser.ts index 7533a2e3..01d31fc2 100644 --- a/packages/fmodata/src/client/error-parser.ts +++ b/packages/fmodata/src/client/error-parser.ts @@ -55,3 +55,6 @@ export async function parseErrorResponse( return new HTTPError(url, response.status, response.statusText, errorBody); } + + + diff --git a/packages/fmodata/src/client/query/response-processor.ts b/packages/fmodata/src/client/query/response-processor.ts index ba99121b..98ab4d61 100644 --- a/packages/fmodata/src/client/query/response-processor.ts +++ b/packages/fmodata/src/client/query/response-processor.ts @@ -7,7 +7,7 @@ import { transformResponseFields } from "../../transform"; import { validateListResponse, validateSingleResponse } from "../../validation"; import type { ExpandValidationConfig } from "../../validation"; import type { ExpandConfig } from "./expand-builder"; -import { FMTable as FMTableClass } from "../../orm/table"; +import { FMTable as FMTableClass, getTableSchema } from "../../orm/table"; import { InternalLogger } from "../../logger"; /** @@ -38,20 +38,10 @@ function buildExpandValidationConfigs( const targetTable = config.targetTable; // Extract schema from target table/occurrence - let targetSchema: Record | undefined; - if (targetTable) { - const tableSchema = (targetTable as any)[FMTableClass.Symbol.Schema]; - if (tableSchema) { - const zodSchema = tableSchema["~standard"]?.schema; - if ( - zodSchema && - typeof zodSchema === "object" && - "shape" in zodSchema - ) { - targetSchema = zodSchema.shape as Record; - } - } - } + // Schema is stored directly as Partial> + const targetSchema = targetTable + ? (getTableSchema(targetTable) as Record | undefined) + : undefined; // Extract selected fields from options const selectedFields = config.options?.select @@ -194,16 +184,10 @@ export async function processQueryResponse( // Validation path // Get schema from occurrence if available - let schema: Record | undefined; - if (occurrence) { - const tableSchema = (occurrence as any)[FMTableClass.Symbol.Schema]; - if (tableSchema) { - const zodSchema = tableSchema["~standard"]?.schema; - if (zodSchema && typeof zodSchema === "object" && "shape" in zodSchema) { - schema = zodSchema.shape as Record; - } - } - } + // Schema is stored directly as Partial> + const schema = occurrence + ? getTableSchema(occurrence) + : undefined; const selectedFields = config.queryOptions.select ? ((Array.isArray(config.queryOptions.select) diff --git a/packages/fmodata/src/orm/field-builders.ts b/packages/fmodata/src/orm/field-builders.ts index f099dc02..b856d1d5 100644 --- a/packages/fmodata/src/orm/field-builders.ts +++ b/packages/fmodata/src/orm/field-builders.ts @@ -29,6 +29,7 @@ export class FieldBuilder< private _outputValidator?: StandardSchemaV1; private _inputValidator?: StandardSchemaV1; private _fieldType: string; + private _comment?: string; constructor(fieldType: string) { this._fieldType = fieldType; @@ -120,6 +121,19 @@ export class FieldBuilder< return builder; } + /** + * Add a comment to this field for metadata purposes. + * This helps future developers understand the purpose of the field. + * + * @example + * textField().comment("Account name of the user who last modified each record") + */ + comment(comment: string): FieldBuilder { + const builder = this._clone(); + builder._comment = comment; + return builder; + } + /** * Get the metadata configuration for this field. * @internal Used by fmTableOccurrence to extract field configuration @@ -133,6 +147,7 @@ export class FieldBuilder< entityId: this._entityId, outputValidator: this._outputValidator, inputValidator: this._inputValidator, + comment: this._comment, }; } @@ -150,6 +165,7 @@ export class FieldBuilder< builder._entityId = this._entityId; builder._outputValidator = this._outputValidator; builder._inputValidator = this._inputValidator; + builder._comment = this._comment; return builder; } } diff --git a/packages/fmodata/src/orm/table.ts b/packages/fmodata/src/orm/table.ts index b61d9d73..3bd2808e 100644 --- a/packages/fmodata/src/orm/table.ts +++ b/packages/fmodata/src/orm/table.ts @@ -2,7 +2,7 @@ import type { StandardSchemaV1 } from "@standard-schema/spec"; import { FieldBuilder, type ContainerDbType } from "./field-builders"; import type { FieldBuilder as FieldBuilderType } from "./field-builders"; import { Column, createColumn } from "./column"; -import { z } from "zod/v4"; +// import { z } from "zod/v4"; /** * Extract the output type from a FieldBuilder. @@ -89,6 +89,7 @@ const FMTableNavigationPaths = Symbol.for("fmodata:FMTableNavigationPaths"); const FMTableDefaultSelect = Symbol.for("fmodata:FMTableDefaultSelect"); const FMTableBaseTableConfig = Symbol.for("fmodata:FMTableBaseTableConfig"); const FMTableUseEntityIds = Symbol.for("fmodata:FMTableUseEntityIds"); +const FMTableComment = Symbol.for("fmodata:FMTableComment"); /** * Base table class with Symbol-based internal properties. @@ -113,6 +114,7 @@ export class FMTable< NavigationPaths: FMTableNavigationPaths, DefaultSelect: FMTableDefaultSelect, BaseTableConfig: FMTableBaseTableConfig, + Comment: FMTableComment, }; /** @internal */ @@ -125,7 +127,10 @@ export class FMTable< [FMTableUseEntityIds]?: boolean; /** @internal */ - [FMTableSchema]: StandardSchemaV1>; + [FMTableComment]?: string; + + /** @internal */ + [FMTableSchema]: Partial>; /** @internal */ [FMTableFields]: TFields; @@ -141,8 +146,8 @@ export class FMTable< /** @internal */ [FMTableBaseTableConfig]: { - schema: Record; - inputSchema?: Record; + schema: Partial>; + inputSchema?: Partial>; idField?: keyof TFields; required: readonly (keyof TFields)[]; readOnly: readonly (keyof TFields)[]; @@ -154,13 +159,14 @@ export class FMTable< name: TName; entityId?: `FMTID:${string}`; useEntityIds?: boolean; - schema: StandardSchemaV1>; + comment?: string; + schema: Partial>; fields: TFields; navigationPaths: TNavigationPaths; defaultSelect: "all" | "schema" | Record>; baseTableConfig: { - schema: Record; - inputSchema?: Record; + schema: Partial>; + inputSchema?: Partial>; idField?: keyof TFields; required: readonly (keyof TFields)[]; readOnly: readonly (keyof TFields)[]; @@ -171,6 +177,7 @@ export class FMTable< this[FMTableName] = config.name; this[FMTableEntityId] = config.entityId; this[FMTableUseEntityIds] = config.useEntityIds; + this[FMTableComment] = config.comment; this[FMTableSchema] = config.schema; this[FMTableFields] = config.fields; this[FMTableNavigationPaths] = config.navigationPaths; @@ -267,6 +274,9 @@ export interface FMTableOccurrenceOptions< /** The entity ID (FMTID) for this table occurrence */ entityId?: `FMTID:${string}`; + /** The comment for this table */ + comment?: string; + /** * Default select behavior: * - "all": Select all fields (including related tables) @@ -358,39 +368,14 @@ export function fmTableOccurrence< } // Build Zod schema from field builders (output/read validators) - const zodSchema: Record = {}; + const outputSchema: Partial> = {}; // Build input schema from field builders (input/write validators) const inputSchema: Record = {}; for (const { fieldName, config } of fieldConfigs) { - // Use outputValidator if provided, otherwise create a basic validator + // Use outputValidator if provided if (config.outputValidator) { - zodSchema[fieldName] = config.outputValidator; - } else { - // Create a default validator based on field type and nullability - let validator: any; - switch (config.fieldType) { - case "text": - case "date": - case "time": - case "timestamp": - case "container": - case "calculated": - validator = z.string(); - break; - case "number": - validator = z.number(); - break; - default: - validator = z.unknown(); - } - - // Add nullability if not marked as notNull - if (!config.notNull) { - validator = validator.nullable(); - } - - zodSchema[fieldName] = validator; + outputSchema[fieldName as keyof TFields] = config.outputValidator; } // Store inputValidator if provided (for write operations) @@ -399,18 +384,13 @@ export function fmTableOccurrence< } } - // Create a schema validator for the entire table - const tableSchema = z.object(zodSchema) as unknown as StandardSchemaV1< - any, - InferSchemaFromFields - >; - // Build BaseTable-compatible config const baseTableConfig = { - schema: zodSchema as Record, - inputSchema: (Object.keys(inputSchema).length > 0 - ? inputSchema - : undefined) as Record | undefined, + schema: outputSchema as Partial>, + inputSchema: + Object.keys(inputSchema).length > 0 + ? (inputSchema as Partial>) + : undefined, idField: idField as keyof TFields | undefined, required: required as readonly (keyof TFields)[], readOnly: readOnly as readonly (keyof TFields)[], @@ -449,7 +429,8 @@ export function fmTableOccurrence< name, entityId: options?.entityId, useEntityIds: options?.useEntityIds, - schema: tableSchema, + comment: options?.comment, + schema: outputSchema, fields, navigationPaths, defaultSelect: resolvedDefaultSelect, @@ -621,11 +602,11 @@ export function getTableEntityId>( /** * Get the schema validator from an FMTable instance. * @param table - FMTable instance - * @returns The StandardSchemaV1 validator + * @returns The StandardSchemaV1 validator record (partial - only fields with validators) */ export function getTableSchema>( table: T, -): StandardSchemaV1 { +): Partial> { return table[FMTableSchema]; } @@ -732,6 +713,17 @@ export function getTableId>(table: T): string { return table[FMTableEntityId] ?? table[FMTableName]; } +/** + * Get the comment from an FMTable instance. + * @param table - FMTable instance + * @returns The comment string or undefined if not set + */ +export function getTableComment>( + table: T, +): string | undefined { + return table[FMTableComment]; +} + /** * Get all columns from a table as an object. * Useful for selecting all fields except some using destructuring. diff --git a/packages/fmodata/src/validation.ts b/packages/fmodata/src/validation.ts index f13b7530..6382bf3d 100644 --- a/packages/fmodata/src/validation.ts +++ b/packages/fmodata/src/validation.ts @@ -19,7 +19,7 @@ import { */ export async function validateAndTransformInput>( data: Partial, - inputSchema?: Record, + inputSchema?: Partial>, ): Promise> { // If no input schema, return data as-is if (!inputSchema) { @@ -30,6 +30,9 @@ export async function validateAndTransformInput>( // Process each field that has an input validator for (const [fieldName, fieldSchema] of Object.entries(inputSchema)) { + // Skip if no schema for this field + if (!fieldSchema) continue; + // Only process fields that are present in the input data if (fieldName in data) { const inputValue = data[fieldName]; @@ -83,7 +86,7 @@ export async function validateAndTransformInput>( // Type for expand validation configuration export type ExpandValidationConfig = { relation: string; - targetSchema?: Record; + targetSchema?: Partial>; targetTable?: FMTable; table?: FMTable; // For transformation selectedFields?: string[]; @@ -96,7 +99,7 @@ export type ExpandValidationConfig = { */ export async function validateRecord>( record: any, - schema: Record | undefined, + schema: Partial> | undefined, selectedFields?: (keyof T)[], expandConfigs?: ExpandValidationConfig[], includeSpecialColumns?: boolean, @@ -316,6 +319,9 @@ export async function validateRecord>( const validatedRecord: Record = { ...restWithoutSystemFields }; for (const [fieldName, fieldSchema] of Object.entries(schema)) { + // Skip if no schema for this field + if (!fieldSchema) continue; + const input = rest[fieldName]; try { let result = fieldSchema["~standard"].validate(input); @@ -468,7 +474,7 @@ export async function validateRecord>( */ export async function validateListResponse>( response: any, - schema: Record | undefined, + schema: Partial> | undefined, selectedFields?: (keyof T)[], expandConfigs?: ExpandValidationConfig[], includeSpecialColumns?: boolean, @@ -531,7 +537,7 @@ export async function validateListResponse>( */ export async function validateSingleResponse>( response: any, - schema: Record | undefined, + schema: Partial> | undefined, selectedFields?: (keyof T)[], expandConfigs?: ExpandValidationConfig[], mode: "exact" | "maybe" = "maybe", diff --git a/packages/fmodata/tests/tsconfig.build.json b/packages/fmodata/tests/tsconfig.build.json index 1cfdbed6..55d2550f 100644 --- a/packages/fmodata/tests/tsconfig.build.json +++ b/packages/fmodata/tests/tsconfig.build.json @@ -43,3 +43,6 @@ + + + diff --git a/packages/typegen/package.json b/packages/typegen/package.json index 420d8191..5abc8387 100644 --- a/packages/typegen/package.json +++ b/packages/typegen/package.json @@ -28,13 +28,7 @@ "default": "./dist/esm/types.js" } }, - "./api": { - "import": { - "types": "./dist/esm/server/contract.d.ts", - "default": "./dist/esm/server/contract.js" - } - }, - "./api-app": { + "./webui-server": { "import": { "types": "./dist/esm/server/app.d.ts", "default": "./dist/esm/server/app.js" diff --git a/packages/typegen/proofkit-typegen.config.jsonc b/packages/typegen/proofkit-typegen.config.jsonc index 3fb62456..2e5db011 100644 --- a/packages/typegen/proofkit-typegen.config.jsonc +++ b/packages/typegen/proofkit-typegen.config.jsonc @@ -1,45 +1,57 @@ { "config": [ - { - "type": "fmdapi", - "layouts": [], - "path": "schema", - "clearOldFiles": false, - "validator": "zod/v4", - "clientSuffix": "Layout", - "generateClient": true - }, { "type": "fmodata", - "configName": "OData Test", "path": "schema", - "metadataPath": "schema/metadata.xml", - "downloadMetadata": true, "clearOldFiles": false, + "alwaysOverrideFieldNames": true, "tables": [ { - "tableName": "customer", + "tableName": "isolated_contacts", + "reduceMetadata": false, + }, + { + "tableName": "fmdapi_test", "fields": [ + { + "fieldName": "CreationTimestamp", + "exclude": true, + }, + { + "fieldName": "CreatedBy", + "exclude": true, + }, { "fieldName": "ModificationTimestamp", - "exclude": true + "exclude": true, }, { "fieldName": "ModifiedBy", - "exclude": true - } - ] - }, - { - "tableName": "contacts", - "fields": [ + "exclude": true, + }, + { + "fieldName": "anything", + "exclude": true, + }, + { + "fieldName": "fieldWithValues", + "exclude": true, + }, + { + "fieldName": "myContainer", + "exclude": true, + }, + { + "fieldName": "repeatingContainer", + "exclude": true, + }, { - "fieldName": "hobby", - "exclude": true - } - ] - } - ] - } - ] + "fieldName": "booleanField", + "exclude": true, + }, + ], + }, + ], + }, + ], } diff --git a/packages/typegen/src/fmodata/downloadMetadata.ts b/packages/typegen/src/fmodata/downloadMetadata.ts index 8b8cf974..fd761c62 100644 --- a/packages/typegen/src/fmodata/downloadMetadata.ts +++ b/packages/typegen/src/fmodata/downloadMetadata.ts @@ -1,6 +1,4 @@ import { FMServerConnection } from "@proofkit/fmodata"; -import { writeFile, mkdir } from "node:fs/promises"; -import { resolve, dirname } from "node:path"; import type { z } from "zod/v4"; import type { typegenConfigSingle } from "../types"; import { getEnvValues, validateEnvValues } from "../getEnvValues"; @@ -11,16 +9,22 @@ type FmodataConfig = Extract< >; /** - * Downloads OData metadata from a FileMaker server and saves it to a file. + * Downloads OData metadata for a single table from a FileMaker server. * - * @param config - The fmodata config object containing connection details - * @param metadataPath - The path where the metadata file should be saved - * @returns Promise that resolves when the metadata has been downloaded and saved + * @param params - Object containing function parameters + * @param params.config - The fmodata config object containing connection details + * @param params.tableName - The name of the table to download metadata for + * @returns Promise that resolves with the XML metadata string */ -export async function downloadMetadata( - config: FmodataConfig, - metadataPath: string, -): Promise { +export async function downloadTableMetadata({ + config, + tableName, + reduceAnnotations = false, +}: { + config: FmodataConfig; + tableName: string; + reduceAnnotations?: boolean; +}): Promise { const envValues = getEnvValues(config.envNames); const validationResult = validateEnvValues(envValues, config.envNames); @@ -42,14 +46,12 @@ export async function downloadMetadata( const database = connection.database(db); - // Download metadata in XML format - const fullMetadata = await database.getMetadata({ format: "xml" }); - - // Resolve output path (ensure directory exists) - const resolvedPath = resolve(metadataPath); - const outputDir = dirname(resolvedPath); - await mkdir(outputDir, { recursive: true }); - await writeFile(resolvedPath, fullMetadata, "utf-8"); + // Download metadata for the specific table in XML format + const tableMetadata = await database.getMetadata({ + tableName, + format: "xml", + reduceAnnotations, + }); - return; + return tableMetadata; } diff --git a/packages/typegen/src/fmodata/generateODataTypes.ts b/packages/typegen/src/fmodata/generateODataTypes.ts index 8ecc2765..7b86534b 100644 --- a/packages/typegen/src/fmodata/generateODataTypes.ts +++ b/packages/typegen/src/fmodata/generateODataTypes.ts @@ -1,8 +1,16 @@ import { writeFile, mkdir } from "node:fs/promises"; import { join, resolve } from "node:path"; import fs from "fs-extra"; +import { + Project, + SourceFile, + CallExpression, + ObjectLiteralExpression, + PropertyAssignment, +} from "ts-morph"; import type { ParsedMetadata, EntityType } from "./parseMetadata"; import { FmodataConfig } from "../types"; +import { formatAndSaveSourceFiles } from "../formatting"; interface GeneratedTO { varName: string; @@ -10,6 +18,9 @@ interface GeneratedTO { navigation: string[]; usedFieldBuilders: Set; needsZod: boolean; + entitySetName: string; + entityType: EntityType; + tableOverride?: NonNullable[number]; } /** @@ -108,6 +119,8 @@ function generateTableOccurrence( entityType: EntityType, entityTypeToSetMap: Map, tableOverride?: NonNullable[number], + existingFields?: ParsedTableOccurrence, + alwaysOverrideFieldNames?: boolean, ): GeneratedTO { const fmtId = entityType["@TableID"]; const keyFields = entityType.$Key || []; @@ -208,6 +221,34 @@ function generateTableOccurrence( const [fieldName, metadata] = entry; const fieldOverride = fieldOverridesMap.get(fieldName); + // Try to match existing field: first by entity ID, then by name + let matchedExistingField: ParsedField | null = null; + let finalFieldName = fieldName; + + if (existingFields) { + // Try matching by entity ID first + if (metadata["@FieldID"]) { + matchedExistingField = matchFieldByEntityId( + existingFields.fieldsByEntityId, + metadata["@FieldID"], + ); + if (matchedExistingField) { + // Use existing field name unless alwaysOverrideFieldNames is true + if (!alwaysOverrideFieldNames) { + finalFieldName = matchedExistingField.fieldName; + } + } + } + + // If no match by entity ID, try matching by name + if (!matchedExistingField) { + matchedExistingField = matchFieldByName( + existingFields.fields, + fieldName, + ); + } + } + // Apply typeOverride if provided, otherwise use inferred type const fieldBuilder = mapODataTypeToFieldBuilder( metadata.$Type, @@ -247,9 +288,9 @@ function generateTableOccurrence( const isReadOnly = readOnlyFields.includes(fieldName); const isLastField = i === validFieldEntries.length - 1; - let line = ` ${JSON.stringify(fieldName)}: ${fieldBuilder}`; + let line = ` ${JSON.stringify(finalFieldName)}: ${fieldBuilder}`; - // Chain methods: primaryKey, readOnly, notNull, entityId + // Chain methods: primaryKey, readOnly, notNull, entityId, comment if (isKeyField) { line += ".primaryKey()"; } @@ -264,6 +305,14 @@ function generateTableOccurrence( if (metadata["@FieldID"]) { line += `.entityId(${JSON.stringify(metadata["@FieldID"])})`; } + if (metadata["@FMComment"]) { + line += `.comment(${JSON.stringify(metadata["@FMComment"])})`; + } + + // Preserve user customizations from existing field + if (matchedExistingField) { + line = preserveUserCustomizations(matchedExistingField, line); + } // Add comma if not the last field if (!isLastField) { @@ -287,6 +336,9 @@ function generateTableOccurrence( if (fmtId) { optionsParts.push(`entityId: ${JSON.stringify(fmtId)}`); } + if (entityType["@FMComment"]) { + optionsParts.push(`comment: ${JSON.stringify(entityType["@FMComment"])}`); + } // Always include navigationPaths, even if empty const navPaths = navigationTargets.map((n) => JSON.stringify(n)).join(", "); optionsParts.push(`navigationPaths: [${navPaths}]`); @@ -306,6 +358,9 @@ ${fieldLines.join("\n")} navigation: navigationTargets, usedFieldBuilders, needsZod, + entitySetName, + entityType, + tableOverride, }; } @@ -358,6 +413,452 @@ function sanitizeFileName(name: string): string { return /^\d/.test(sanitized) ? `_${sanitized}` : sanitized; } +/** + * Represents a parsed field from an existing file + */ +interface ParsedField { + fieldName: string; + entityId?: string; + fullChainText: string; + userCustomizations: string; // Everything after the base chain (e.g., .inputValidator(...).outputValidator(...)) +} + +/** + * Represents a parsed table occurrence from an existing file + */ +interface ParsedTableOccurrence { + varName: string; + entitySetName: string; + tableEntityId?: string; + fields: Map; // keyed by field name + fieldsByEntityId: Map; // keyed by entity ID + existingImports: string[]; // All existing import statements as strings +} + +/** + * Extracts user customizations (like .inputValidator() and .outputValidator()) from a method chain + */ +function extractUserCustomizations( + chainText: string, + baseChainEnd: number, +): string { + // We want to preserve user-added chained calls even if they were placed: + // - before a standard method (e.g. textField().inputValidator(...).entityId(...)) + // - on fields that have no standard methods at all (possible when reduceMetadata is true) + // + // `baseChainEnd` should point to the end of the generator-owned "base builder chain" + // (e.g. `textField()` or `numberField().outputValidator(z.coerce.boolean())`). + // Everything after that may contain standard methods *and* user customizations. + // We extract only the non-standard chained calls and return them as a string + // that can be appended to the regenerated chain. + + const standardMethodNames = new Set([ + "primaryKey", + "readOnly", + "notNull", + "entityId", + "comment", + ]); + + const start = Math.max(0, Math.min(baseChainEnd, chainText.length)); + const tail = chainText.slice(start); + if (!tail.includes(".")) { + return ""; + } + + function isIdentChar(c: string): boolean { + return /[A-Za-z0-9_$]/.test(c); + } + + function skipWhitespace(s: string, idx: number): number { + while (idx < s.length && /\s/.test(s[idx] ?? "")) idx++; + return idx; + } + + // Best-effort scanning helpers: handle nested parentheses and quoted strings. + function scanString(s: string, idx: number, quote: string): number { + // idx points at opening quote + idx++; + while (idx < s.length) { + const ch = s[idx]; + if (ch === "\\") { + idx += 2; + continue; + } + if (ch === quote) { + return idx + 1; + } + idx++; + } + return idx; + } + + function scanTemplateLiteral(s: string, idx: number): number { + // idx points at opening backtick + idx++; + while (idx < s.length) { + const ch = s[idx]; + if (ch === "\\") { + idx += 2; + continue; + } + if (ch === "`") { + return idx + 1; + } + if (ch === "$" && s[idx + 1] === "{") { + idx += 2; // skip ${ + let braceDepth = 1; + while (idx < s.length && braceDepth > 0) { + const c = s[idx]; + if (c === "'" || c === '"') { + idx = scanString(s, idx, c); + continue; + } + if (c === "`") { + idx = scanTemplateLiteral(s, idx); + continue; + } + if (c === "{") braceDepth++; + else if (c === "}") braceDepth--; + idx++; + } + continue; + } + idx++; + } + return idx; + } + + function scanAngleBrackets(s: string, idx: number): number { + // idx points at '<' + let depth = 0; + while (idx < s.length) { + const ch = s[idx]; + if (ch === "'" || ch === '"') { + idx = scanString(s, idx, ch); + continue; + } + if (ch === "`") { + idx = scanTemplateLiteral(s, idx); + continue; + } + if (ch === "<") depth++; + if (ch === ">") { + depth--; + idx++; + if (depth === 0) return idx; + continue; + } + idx++; + } + return idx; + } + + function scanParens(s: string, idx: number): number { + // idx points at '(' + let depth = 0; + while (idx < s.length) { + const ch = s[idx]; + if (ch === "'" || ch === '"') { + idx = scanString(s, idx, ch); + continue; + } + if (ch === "`") { + idx = scanTemplateLiteral(s, idx); + continue; + } + if (ch === "(") depth++; + if (ch === ")") { + depth--; + idx++; + if (depth === 0) return idx; + continue; + } + idx++; + } + return idx; + } + + const keptSegments: string[] = []; + let i = 0; + while (i < tail.length) { + const dot = tail.indexOf(".", i); + if (dot === -1) break; + + let j = dot + 1; + if (j >= tail.length) break; + if (!isIdentChar(tail[j] ?? "")) { + i = j; + continue; + } + + const nameStart = j; + while (j < tail.length && isIdentChar(tail[j] ?? "")) j++; + const methodName = tail.slice(nameStart, j); + + j = skipWhitespace(tail, j); + + // Optional generic type args: .foo<...>(...) + if (tail[j] === "<") { + j = scanAngleBrackets(tail, j); + j = skipWhitespace(tail, j); + } + + // Method call args: (...) + if (tail[j] === "(") { + const end = scanParens(tail, j); + const segment = tail.slice(dot, end); + if (!standardMethodNames.has(methodName)) { + keptSegments.push(segment); + } + i = end; + continue; + } + + // Property access or malformed chain segment: keep it if it's not standard. + // Capture up to the next '.' or end. + const nextDot = tail.indexOf(".", j); + const end = nextDot === -1 ? tail.length : nextDot; + const segment = tail.slice(dot, end); + if (!standardMethodNames.has(methodName)) { + keptSegments.push(segment); + } + i = end; + } + + return keptSegments.join(""); +} + +/** + * Parses an existing table occurrence file and extracts field definitions + */ +function parseExistingTableFile( + sourceFile: SourceFile, +): ParsedTableOccurrence | null { + // Find the fmTableOccurrence call by searching all call expressions + let callExpr: CallExpression | null = null; + + sourceFile.forEachDescendant((node) => { + if (node.getKindName() === "CallExpression") { + const expr = node as CallExpression; + const expression = expr.getExpression(); + if ( + expression.getKindName() === "Identifier" && + expression.getText() === "fmTableOccurrence" + ) { + callExpr = expr; + } + } + }); + + if (!callExpr) { + return null; + } + + // TypeScript needs explicit type here + const call: CallExpression = callExpr; + + // Extract variable name from the containing variable declaration + let varName = ""; + let parent = call.getParent(); + while (parent) { + if (parent.getKindName() === "VariableDeclaration") { + // TypeScript needs explicit cast here + const varDecl = parent as any; + varName = varDecl.getName(); + break; + } + parent = parent.getParent() ?? undefined; + } + + if (!varName) { + // Try to find from export declaration + const exportDecl = sourceFile.getExportDeclarations().find((decl) => { + const namedExports = decl.getNamedExports(); + return namedExports.length > 0; + }); + if (exportDecl) { + const namedExports = exportDecl.getNamedExports(); + if (namedExports.length > 0) { + const firstExport = namedExports[0]; + if (firstExport) { + varName = firstExport.getName(); + } + } + } + } + + // Get arguments to fmTableOccurrence + const args = call.getArguments(); + if (args.length < 2) { + return null; + } + + const entitySetNameArg = args[0]; + if (!entitySetNameArg) { + return null; + } + const entitySetName = entitySetNameArg.getText().replace(/['"]/g, ""); + + // Get the fields object (second argument) + const fieldsArg = args[1]; + if (!fieldsArg || fieldsArg.getKindName() !== "ObjectLiteralExpression") { + return null; + } + const fieldsObject = fieldsArg as ObjectLiteralExpression; + + // Get options object (third argument, if present) + let tableEntityId: string | undefined; + if (args.length >= 3) { + const optionsArg = args[2]; + if (optionsArg && optionsArg.getKindName() === "ObjectLiteralExpression") { + const optionsObject = optionsArg as ObjectLiteralExpression; + const entityIdProp = optionsObject.getProperty("entityId"); + if (entityIdProp && entityIdProp.getKindName() === "PropertyAssignment") { + const value = (entityIdProp as PropertyAssignment) + .getInitializer() + ?.getText(); + if (value) { + tableEntityId = value.replace(/['"]/g, ""); + } + } + } + } + + // Extract existing imports + const existingImports: string[] = []; + const importDeclarations = sourceFile.getImportDeclarations(); + for (const importDecl of importDeclarations) { + const importText = importDecl.getFullText(); + if (importText.trim()) { + existingImports.push(importText.trim()); + } + } + + // Parse each field + const fields = new Map(); + const fieldsByEntityId = new Map(); + + const properties = fieldsObject.getProperties(); + for (const prop of properties) { + if (prop.getKindName() !== "PropertyAssignment") { + continue; + } + const fieldProp = prop as PropertyAssignment; + const fieldNameNode = fieldProp.getNameNode(); + const fieldName = fieldNameNode.getText().replace(/['"]/g, ""); + + const initializer = fieldProp.getInitializer(); + if (!initializer) { + continue; + } + + const chainText = initializer.getText(); + + // Extract entity ID from .entityId() call + let entityId: string | undefined; + const entityIdMatch = chainText.match(/\.entityId\(['"]([^'"]+)['"]\)/); + if (entityIdMatch) { + entityId = entityIdMatch[1]; + } + + // Extract user customizations (everything after standard methods) + const userCustomizations = extractUserCustomizations(chainText, 0); + + const parsedField: ParsedField = { + fieldName, + entityId, + fullChainText: chainText, + userCustomizations, + }; + + fields.set(fieldName, parsedField); + if (entityId) { + fieldsByEntityId.set(entityId, parsedField); + } + } + + return { + varName, + entitySetName, + tableEntityId, + fields, + fieldsByEntityId, + existingImports, + }; +} + +/** + * Matches a field from metadata to an existing field by entity ID + */ +function matchFieldByEntityId( + existingFields: Map, + metadataEntityId: string | undefined, +): ParsedField | null { + if (!metadataEntityId) { + return null; + } + return existingFields.get(metadataEntityId) || null; +} + +/** + * Matches a field from metadata to an existing field by name + */ +function matchFieldByName( + existingFields: Map, + fieldName: string, +): ParsedField | null { + return existingFields.get(fieldName) || null; +} + +/** + * Preserves user customizations from an existing field chain + */ +function preserveUserCustomizations( + existingField: ParsedField | undefined, + newChain: string, +): string { + if (!existingField) { + return newChain; + } + + const standardMethods = [ + ".primaryKey()", + ".readOnly()", + ".notNull()", + ".entityId(", + ".comment(", + ]; + + // Determine where the generator-owned base builder chain ends in the new chain + // (before any standard methods added by the generator). + let baseChainEnd = newChain.length; + for (const method of standardMethods) { + const idx = newChain.indexOf(method); + if (idx !== -1 && idx < baseChainEnd) { + baseChainEnd = idx; + } + } + + const baseBuilderPrefix = newChain.slice(0, baseChainEnd); + const existingChainText = existingField.fullChainText; + const existingBaseEnd = existingChainText.startsWith(baseBuilderPrefix) + ? baseBuilderPrefix.length + : 0; + + const userCustomizations = extractUserCustomizations( + existingChainText, + existingBaseEnd, + ); + + if (!userCustomizations) { + return newChain; + } + + // Append extracted user customizations to the regenerated chain + return newChain + userCustomizations; +} + /** * Generates TypeScript table occurrence files from parsed OData metadata. * @@ -367,10 +868,17 @@ function sanitizeFileName(name: string): string { */ export async function generateODataTypes( metadata: ParsedMetadata, - config: FmodataConfig, + config: FmodataConfig & { + alwaysOverrideFieldNames?: boolean; + }, ): Promise { const { entityTypes, entitySets } = metadata; - const { path, clearOldFiles = true, tables } = config; + const { + path, + clearOldFiles = true, + tables, + alwaysOverrideFieldNames = true, + } = config; const outputPath = path ?? "schema"; // Build a map from entity type name to entity set name @@ -379,6 +887,16 @@ export async function generateODataTypes( entityTypeToSetMap.set(entitySet.EntityType, entitySetName); } + // Build a set of allowed table names from config + const allowedTableNames = new Set(); + if (tables) { + for (const tableOverride of tables) { + if (tableOverride?.tableName) { + allowedTableNames.add(tableOverride.tableName); + } + } + } + // Build a table overrides map from the array for easier lookup const tableOverridesMap = new Map< string, @@ -396,23 +914,36 @@ export async function generateODataTypes( const generatedTOs: GeneratedTO[] = []; for (const [entitySetName, entitySet] of entitySets.entries()) { - // Get table override config if it exists - const tableOverride = tableOverridesMap.get(entitySetName); - - // Skip excluded tables - if (tableOverride?.exclude === true) { + // Only generate types for tables specified in config + if (allowedTableNames.size > 0 && !allowedTableNames.has(entitySetName)) { continue; } + // Get table override config if it exists + const tableOverride = tableOverridesMap.get(entitySetName); + const entityType = entityTypes.get(entitySet.EntityType); if (entityType) { + // Determine alwaysOverrideFieldNames: table-level override takes precedence + const tableAlwaysOverrideFieldNames = + tableOverride?.alwaysOverrideFieldNames ?? alwaysOverrideFieldNames; + + // First generate without existing fields to get the structure + // We'll regenerate with existing fields later if the file exists const generated = generateTableOccurrence( entitySetName, entityType, entityTypeToSetMap, tableOverride, + undefined, + tableAlwaysOverrideFieldNames, ); - generatedTOs.push(generated); + generatedTOs.push({ + ...generated, + entitySetName, + entityType, + tableOverride, + }); } } @@ -425,6 +956,9 @@ export async function generateODataTypes( fs.emptyDirSync(resolvedOutputPath); } + // Create ts-morph project for file manipulation + const project = new Project({}); + // Generate one file per table occurrence const exportStatements: string[] = []; @@ -432,25 +966,345 @@ export async function generateODataTypes( const fileName = `${sanitizeFileName(generated.varName)}.ts`; const filePath = join(resolvedOutputPath, fileName); - // Generate imports based on what's actually used in this file - const imports = generateImports( - generated.usedFieldBuilders, - generated.needsZod, + // Check if file exists and parse it + let existingFields: ParsedTableOccurrence | undefined; + if (fs.existsSync(filePath) && !clearOldFiles) { + try { + const existingSourceFile = project.addSourceFileAtPath(filePath); + const parsed = parseExistingTableFile(existingSourceFile); + if (parsed) { + existingFields = parsed; + } + } catch (error) { + // If parsing fails, continue without existing fields + console.warn(`Failed to parse existing file ${filePath}:`, error); + } + } + + // Determine alwaysOverrideFieldNames: table-level override takes precedence + const tableAlwaysOverrideFieldNames = + generated.tableOverride?.alwaysOverrideFieldNames ?? + alwaysOverrideFieldNames; + + // Regenerate with existing fields merged in if file exists + const regenerated = existingFields + ? generateTableOccurrence( + generated.entitySetName, + generated.entityType, + entityTypeToSetMap, + generated.tableOverride, + existingFields, + tableAlwaysOverrideFieldNames, + ) + : generated; + + // Track removed fields (fields in existing but not in metadata) + const removedFields: ParsedField[] = []; + if (existingFields) { + for (const existingField of existingFields.fields.values()) { + // Check if this field is still in metadata + const stillExists = Array.from( + generated.entityType.Properties.keys(), + ).some((metaFieldName) => { + const metaField = generated.entityType.Properties.get(metaFieldName); + if (!metaField) return false; + + // Match by entity ID or name + if ( + existingField.entityId && + metaField["@FieldID"] === existingField.entityId + ) { + return true; + } + if (metaFieldName === existingField.fieldName) { + return true; + } + return false; + }); + + if (!stillExists) { + removedFields.push(existingField); + } + } + } + + // Generate required imports based on what's actually used in this file + const requiredImports = generateImports( + regenerated.usedFieldBuilders, + regenerated.needsZod, ); - const fileContent = `${imports} + // Parse import statements to extract module and named imports + function parseImport(importText: string): { + module: string; + namedImports: string[]; // Base names only (for comparison) + fullNamedImports: string[]; // Full specifiers including aliases (e.g., "x as y") + fullText: string; + } | null { + const trimmed = importText.trim(); + if (!trimmed.startsWith("import")) { + return null; + } -${generated.code} -`; + // Extract module specifier using regex + const moduleMatch = trimmed.match(/from\s+['"]([^'"]+)['"]/); + if (!moduleMatch || !moduleMatch[1]) { + return null; + } + const module = moduleMatch[1]; + + // Extract named imports + const namedImports: string[] = []; // Base names for comparison + const fullNamedImports: string[] = []; // Full specifiers with aliases preserved + const namedMatch = trimmed.match(/\{([^}]+)\}/); + if (namedMatch && namedMatch[1]) { + const importsList = namedMatch[1]; + // Split by comma and clean up + importsList.split(",").forEach((imp) => { + const cleaned = imp.trim(); + if (cleaned) { + // Preserve the full import specifier (including alias) + fullNamedImports.push(cleaned); + + // Extract base name for comparison (e.g., "x as y" -> "x") + const aliasMatch = cleaned.match(/^(\w+)(?:\s+as\s+\w+)?$/); + if (aliasMatch && aliasMatch[1]) { + namedImports.push(aliasMatch[1]); + } else { + namedImports.push(cleaned); + } + } + }); + } + + return { module, namedImports, fullNamedImports, fullText: trimmed }; + } - await writeFile(filePath, fileContent, "utf-8"); + // If file exists, preserve existing imports and merge with required ones + let finalImports = requiredImports; + if (existingFields && existingFields.existingImports.length > 0) { + // Parse all existing imports by module + const existingImportsByModule = new Map< + string, + { + namedImports: Set; // Base names for comparison + fullNamedImports: Map; // Map base name -> full specifier (preserves aliases) + fullText: string; + } + >(); + + for (const existingImport of existingFields.existingImports) { + const parsed = parseImport(existingImport); + if (parsed) { + const existing = existingImportsByModule.get(parsed.module); + if (existing) { + // Merge named imports from duplicate imports + parsed.namedImports.forEach((imp) => + existing.namedImports.add(imp), + ); + // Preserve full import specifiers (with aliases) + parsed.fullNamedImports.forEach((fullSpec) => { + const baseName = + fullSpec.match(/^(\w+)(?:\s+as\s+\w+)?$/)?.[1] || fullSpec; + existing.fullNamedImports.set(baseName, fullSpec); + }); + } else { + const fullNamedImportsMap = new Map(); + parsed.fullNamedImports.forEach((fullSpec) => { + const baseName = + fullSpec.match(/^(\w+)(?:\s+as\s+\w+)?$/)?.[1] || fullSpec; + fullNamedImportsMap.set(baseName, fullSpec); + }); + existingImportsByModule.set(parsed.module, { + namedImports: new Set(parsed.namedImports), + fullNamedImports: fullNamedImportsMap, + fullText: parsed.fullText, + }); + } + } + } + + // Parse required imports + const requiredImportLines = requiredImports + .split("\n") + .filter((line) => line.trim()); + const requiredImportsByModule = new Map>(); + + for (const requiredLine of requiredImportLines) { + const parsed = parseImport(requiredLine); + if (parsed) { + const existing = requiredImportsByModule.get(parsed.module); + if (existing) { + parsed.namedImports.forEach((imp) => existing.add(imp)); + } else { + requiredImportsByModule.set( + parsed.module, + new Set(parsed.namedImports), + ); + } + } + } + + // Build final imports: preserve existing, update if needed, add missing + const finalImportLines: string[] = []; + const handledModules = new Set(); + const processedModules = new Set(); + + // Process existing imports - deduplicate by module + for (const existingImport of existingFields.existingImports) { + const parsed = parseImport(existingImport); + if (parsed && parsed.module) { + // Skip if we've already processed this module (deduplicate) + if (processedModules.has(parsed.module)) { + continue; + } + processedModules.add(parsed.module); + + // Use the merged named imports from existingImportsByModule + const existing = existingImportsByModule.get(parsed.module); + const allExistingImports = existing + ? Array.from(existing.namedImports) + : parsed.namedImports; + + const required = requiredImportsByModule.get(parsed.module); + if (required) { + // Check if we need to add any missing named imports + const missingImports = Array.from(required).filter( + (imp) => !allExistingImports.includes(imp), + ); + if (missingImports.length > 0) { + // Build import list: use preserved full specifiers (with aliases) for existing, + // and base names for new required imports + const importSpecs: string[] = []; + + // Add existing imports using their preserved full specifiers (with aliases) + if (existing) { + allExistingImports.forEach((baseName) => { + const fullSpec = existing.fullNamedImports.get(baseName); + if (fullSpec) { + importSpecs.push(fullSpec); + } else { + importSpecs.push(baseName); + } + }); + } else { + // Fallback to parsed full named imports + parsed.fullNamedImports.forEach((fullSpec) => { + importSpecs.push(fullSpec); + }); + } + + // Add missing required imports (without aliases) + importSpecs.push(...missingImports); + + // Sort imports (but preserve aliases) + importSpecs.sort(); + + finalImportLines.push( + `import { ${importSpecs.join(", ")} } from "${parsed.module}";`, + ); + } else { + // Keep existing import format with preserved aliases + const importSpecs: string[] = []; + if (existing) { + allExistingImports.forEach((baseName) => { + const fullSpec = existing.fullNamedImports.get(baseName); + if (fullSpec) { + importSpecs.push(fullSpec); + } else { + importSpecs.push(baseName); + } + }); + } else { + parsed.fullNamedImports.forEach((fullSpec) => { + importSpecs.push(fullSpec); + }); + } + importSpecs.sort(); + finalImportLines.push( + `import { ${importSpecs.join(", ")} } from "${parsed.module}";`, + ); + } + handledModules.add(parsed.module); + requiredImportsByModule.delete(parsed.module); + } else { + // Keep existing import (not in required imports - user added it) + // Preserve aliases from existing imports + const importSpecs: string[] = []; + if (existing) { + allExistingImports.forEach((baseName) => { + const fullSpec = existing.fullNamedImports.get(baseName); + if (fullSpec) { + importSpecs.push(fullSpec); + } else { + importSpecs.push(baseName); + } + }); + } else { + parsed.fullNamedImports.forEach((fullSpec) => { + importSpecs.push(fullSpec); + }); + } + importSpecs.sort(); + finalImportLines.push( + `import { ${importSpecs.join(", ")} } from "${parsed.module}";`, + ); + } + } else { + // Keep non-import lines as-is (comments, etc.) + finalImportLines.push(existingImport); + } + } + + // Add any required imports that don't exist yet + for (const [module, namedImports] of requiredImportsByModule.entries()) { + if (module && !handledModules.has(module)) { + const importsList = Array.from(namedImports).sort().join(", "); + if (importsList) { + finalImportLines.push( + `import { ${importsList} } from "${module}";`, + ); + } + } + } + + finalImports = finalImportLines.join("\n") + "\n"; + } + + // Build file content with removed fields commented out + let fileContent = finalImports + "\n"; + + if (removedFields.length > 0) { + fileContent += + "// ============================================================================\n"; + fileContent += "// Removed fields (not found in metadata)\n"; + fileContent += + "// ============================================================================\n"; + for (const removedField of removedFields) { + const matchInfo = removedField.entityId + ? ` (was matched by entityId ${removedField.entityId})` + : ""; + fileContent += `// @removed: Field not found in metadata${matchInfo}\n`; + fileContent += `// ${JSON.stringify(removedField.fieldName)}: ${removedField.fullChainText},\n\n`; + } + } + + fileContent += regenerated.code; + + // Create or update source file + project.createSourceFile(filePath, fileContent, { + overwrite: true, + }); // Collect export statement for index file exportStatements.push( - `export { ${generated.varName} } from "./${sanitizeFileName(generated.varName)}";`, + `export { ${regenerated.varName} } from "./${sanitizeFileName(regenerated.varName)}";`, ); } + // Format and save all files + await formatAndSaveSourceFiles(project); + // Generate index.ts file that exports all table occurrences const indexContent = `// ============================================================================ // Auto-generated index file - exports all table occurrences diff --git a/packages/typegen/src/fmodata/index.ts b/packages/typegen/src/fmodata/index.ts index a8ee11ea..9376351b 100644 --- a/packages/typegen/src/fmodata/index.ts +++ b/packages/typegen/src/fmodata/index.ts @@ -1,4 +1,4 @@ -export { downloadMetadata } from "./downloadMetadata"; +export { downloadTableMetadata } from "./downloadMetadata"; export { parseMetadata, parseMetadataFromFile, diff --git a/packages/typegen/src/fmodata/parseMetadata.ts b/packages/typegen/src/fmodata/parseMetadata.ts index a715b75d..f6d1ef65 100644 --- a/packages/typegen/src/fmodata/parseMetadata.ts +++ b/packages/typegen/src/fmodata/parseMetadata.ts @@ -12,6 +12,7 @@ export interface FieldMetadata { "@AutoGenerated"?: boolean; "@Index"?: boolean; "@VersionID"?: boolean; + "@FMComment"?: string; } export interface NavigationProperty { @@ -22,6 +23,7 @@ export interface NavigationProperty { export interface EntityType { Name: string; "@TableID": string; + "@FMComment"?: string; $Key?: string[]; Properties: Map; NavigationProperties: NavigationProperty[]; @@ -91,14 +93,16 @@ export async function parseMetadata( const entityTypeName = entityTypeEl["@_Name"] || entityTypeEl.Name; if (!entityTypeName) continue; - // Get TableID from Annotation + // Get TableID and FMComment from Annotations let tableId = ""; + let tableComment: string | undefined; const annotations = ensureArray(entityTypeEl.Annotation); for (const ann of annotations) { const term = ann["@_Term"] || ann.Term; if (term === "com.filemaker.odata.TableID") { tableId = ann["@_String"] || ann.String || ""; - break; + } else if (term === "com.filemaker.odata.FMComment") { + tableComment = ann["@_String"] || ann.String || undefined; } } @@ -136,6 +140,7 @@ export async function parseMetadata( let hasIndex = false; let isVersionId = false; let permissions: string | undefined; + let fieldComment: string | undefined; const propAnnotations = ensureArray(propEl.Annotation); for (const ann of propAnnotations) { @@ -152,6 +157,8 @@ export async function parseMetadata( hasIndex = ann["@_Bool"] === "true" || ann.Bool === "true"; } else if (term === "com.filemaker.odata.VersionID") { isVersionId = ann["@_Bool"] === "true" || ann.Bool === "true"; + } else if (term === "com.filemaker.odata.FMComment") { + fieldComment = ann["@_String"] || ann.String || undefined; } else if (term === "Org.OData.Core.V1.Permissions") { const enumMember = ann.EnumMember; if (enumMember) { @@ -174,6 +181,7 @@ export async function parseMetadata( "@AutoGenerated": isAutoGenerated, "@Index": hasIndex, "@VersionID": isVersionId, + "@FMComment": fieldComment, }); } @@ -196,6 +204,7 @@ export async function parseMetadata( entityTypes.set(entityTypeName, { Name: entityTypeName, "@TableID": tableId, + "@FMComment": tableComment, $Key: keyFields, Properties: properties, NavigationProperties: navigationProperties, diff --git a/packages/typegen/src/fmodata/typegen.ts b/packages/typegen/src/fmodata/typegen.ts index f9807271..29d469e1 100644 --- a/packages/typegen/src/fmodata/typegen.ts +++ b/packages/typegen/src/fmodata/typegen.ts @@ -1,23 +1,68 @@ import { FmodataConfig } from "../types"; -import { downloadMetadata as downloadMetadataFn } from "./downloadMetadata"; -import fs from "fs-extra"; -import { parseMetadata } from "./parseMetadata"; +import { downloadTableMetadata } from "./downloadMetadata"; +import { parseMetadata, type ParsedMetadata } from "./parseMetadata"; import { generateODataTypes } from "./generateODataTypes"; export async function generateODataTablesSingle(config: FmodataConfig) { - const { downloadMetadata, metadataPath } = config; + const { tables, reduceMetadata = false } = config; - if (downloadMetadata) { - await downloadMetadataFn(config, metadataPath); + if (!tables || tables.length === 0) { + throw new Error("No tables specified in config"); } - const metadataExists = await fs.pathExists(metadataPath); - if (!metadataExists) { - throw new Error(`Metadata file not found at ${metadataPath}`); + // Download and parse metadata for each table + const allEntityTypes = new Map< + string, + ParsedMetadata["entityTypes"] extends Map ? V : never + >(); + const allEntitySets = new Map< + string, + ParsedMetadata["entitySets"] extends Map ? V : never + >(); + let namespace = ""; + + for (const tableConfig of tables) { + const tableName = tableConfig.tableName; + + // Download metadata for this table + const tableMetadataXml = await downloadTableMetadata({ + config, + tableName, + reduceAnnotations: tableConfig.reduceMetadata ?? reduceMetadata, + }); + + // Parse the metadata + const parsedMetadata = await parseMetadata(tableMetadataXml); + + // Merge entity types + for (const [ + entityTypeName, + entityType, + ] of parsedMetadata.entityTypes.entries()) { + allEntityTypes.set(entityTypeName, entityType); + } + + // Merge entity sets + for (const [ + entitySetName, + entitySet, + ] of parsedMetadata.entitySets.entries()) { + allEntitySets.set(entitySetName, entitySet); + } + + // Use namespace from first table (should be the same for all) + if (!namespace) { + namespace = parsedMetadata.namespace; + } } - const metadata = await fs.readFile(metadataPath, "utf-8"); - const parsedMetadata = await parseMetadata(metadata); + // Combine all parsed metadata + const mergedMetadata: ParsedMetadata = { + entityTypes: allEntityTypes, + entitySets: allEntitySets, + namespace, + }; - await generateODataTypes(parsedMetadata, config); + // Generate types from merged metadata + await generateODataTypes(mergedMetadata, config); } diff --git a/packages/typegen/src/server/app.ts b/packages/typegen/src/server/app.ts index a27336eb..15b03c8f 100644 --- a/packages/typegen/src/server/app.ts +++ b/packages/typegen/src/server/app.ts @@ -14,7 +14,7 @@ import { import { ContentfulStatusCode } from "hono/utils/http-status"; import { generateTypedClients } from "../typegen"; import { FMServerConnection } from "@proofkit/fmodata"; -import { downloadMetadata, parseMetadata } from "../fmodata"; +import { downloadTableMetadata, parseMetadata } from "../fmodata"; export interface ApiContext { cwd: string; @@ -58,8 +58,8 @@ export function createApiApp(context: ApiContext) { // GET /api/config .get("/config", async (c) => { - const { configPath } = context; - const fullPath = path.resolve(context.cwd, configPath); + const { configPath, cwd } = context; + const fullPath = path.resolve(cwd, configPath); const exists = fs.existsSync(fullPath); @@ -67,6 +67,7 @@ export function createApiApp(context: ApiContext) { return c.json({ exists: false, path: configPath, + fullPath: fullPath, config: null, }); } @@ -79,6 +80,7 @@ export function createApiApp(context: ApiContext) { return c.json({ exists: true, path: configPath, + fullPath: fullPath, config: parsed.config, }); } catch (err) { @@ -313,21 +315,64 @@ export function createApiApp(context: ApiContext) { }, ) .post( - "/download-metadata", - zValidator("json", z.object({ config: typegenConfigSingle })), + "/table-metadata", + zValidator( + "json", + z.object({ + config: typegenConfigSingle, + tableName: z.string(), + }), + ), async (c) => { const input = c.req.valid("json"); const config = input.config; + const { tableName } = input; if (config.type !== "fmodata") { return c.json({ error: "Invalid config type" }, 400); } - const { metadataPath } = config; - await downloadMetadata(config, metadataPath); - return c.json({ success: true }); + const tableConfig = config.tables.find( + (t) => t.tableName === tableName, + ); + try { + // Download metadata for the specified table + const tableMetadataXml = await downloadTableMetadata({ + config: config, + tableName, + reduceAnnotations: tableConfig?.reduceMetadata ?? false, + }); + // Parse the metadata + const parsedMetadata = await parseMetadata(tableMetadataXml); + // Convert Maps to objects for JSON serialization + // Also convert nested Maps (like Properties) to objects + const serializedMetadata = { + entityTypes: Object.fromEntries( + Array.from(parsedMetadata.entityTypes.entries()).map( + ([key, value]) => [ + key, + { + ...value, + Properties: Object.fromEntries(value.Properties), + }, + ], + ), + ), + entitySets: Object.fromEntries(parsedMetadata.entitySets), + namespace: parsedMetadata.namespace, + }; + return c.json({ parsedMetadata: serializedMetadata }); + } catch (err) { + return c.json( + { + error: + err instanceof Error ? err.message : "Failed to fetch metadata", + }, + 500, + ); + } }, ) .get( - "/parse-metadata", + "/list-tables", zValidator("query", z.object({ config: z.string() })), async (c) => { const input = c.req.valid("query"); @@ -341,27 +386,30 @@ export function createApiApp(context: ApiContext) { if (config.type !== "fmodata") { return c.json({ error: "Invalid config type" }, 400); } - const { metadataPath } = config; - const metadata = await fs.readFile(metadataPath, "utf-8"); - const parsedMetadata = await parseMetadata(metadata); - // Convert Maps to objects for JSON serialization - // Also convert nested Maps (like Properties) to objects - const serializedMetadata = { - entityTypes: Object.fromEntries( - Array.from(parsedMetadata.entityTypes.entries()).map( - ([key, value]) => [ - key, - { - ...value, - Properties: Object.fromEntries(value.Properties), - }, - ], - ), - ), - entitySets: Object.fromEntries(parsedMetadata.entitySets), - namespace: parsedMetadata.namespace, - }; - return c.json({ parsedMetadata: serializedMetadata }); + try { + const result = createOdataClientFromConfig(config); + if ("error" in result) { + return c.json( + { + error: result.error, + kind: result.kind, + suspectedField: result.suspectedField, + }, + result.statusCode as ContentfulStatusCode, + ); + } + const { db } = result; + const tableNames = await db.listTableNames(); + return c.json({ tables: tableNames }); + } catch (err) { + return c.json( + { + error: + err instanceof Error ? err.message : "Failed to list tables", + }, + 500, + ); + } }, ) // POST /api/test-connection diff --git a/packages/typegen/src/server/contract.ts b/packages/typegen/src/server/contract.ts deleted file mode 100644 index 3a2eef04..00000000 --- a/packages/typegen/src/server/contract.ts +++ /dev/null @@ -1,75 +0,0 @@ -import { z } from "zod/v4"; -import { typegenConfigSingle, typegenConfig } from "../types"; -import type { ApiApp } from "./app"; - -// Re-export config types for convenience -export type SingleConfig = z.infer; -export type ConfigsArray = z.infer[]; - -// GET /api/config response -export const getConfigResponseSchema = z.object({ - exists: z.boolean(), - path: z.string(), - config: z - .union([z.array(typegenConfigSingle), typegenConfigSingle]) - .nullable(), -}); -export type GetConfigResponse = z.infer; - -// POST /api/config request -export const postConfigRequestSchema = z.union([ - z.array(typegenConfigSingle), - typegenConfigSingle, -]); -export type PostConfigRequest = z.infer; - -// POST /api/config response -export const postConfigResponseSchema = z.object({ - success: z.boolean(), - error: z.string().optional(), - issues: z - .array( - z.object({ - path: z.array(z.union([z.string(), z.number()])), - message: z.string(), - }), - ) - .optional(), -}); -export type PostConfigResponse = z.infer; - -// POST /api/run request (stub) -export const runTypegenRequestSchema = z.object({ - config: z - .union([z.array(typegenConfigSingle), typegenConfigSingle]) - .optional(), -}); -export type RunTypegenRequest = z.infer; - -// POST /api/run response (stub) -export const runTypegenResponseSchema = z.object({ - success: z.boolean(), - error: z.string().optional(), - message: z.string().optional(), -}); -export type RunTypegenResponse = z.infer; - -// GET /api/layouts response (stub) -export const getLayoutsResponseSchema = z.object({ - layouts: z.array( - z.object({ - layoutName: z.string(), - schemaName: z.string().optional(), - }), - ), -}); -export type GetLayoutsResponse = z.infer; - -// GET /api/env-names response -export const getEnvNamesResponseSchema = z.object({ - value: z.string().optional(), -}); -export type GetEnvNamesResponse = z.infer; - -// Re-export ApiApp type for client usage -export type { ApiApp }; diff --git a/packages/typegen/src/types.ts b/packages/typegen/src/types.ts index 2d21c91c..ee2fe244 100644 --- a/packages/typegen/src/types.ts +++ b/packages/typegen/src/types.ts @@ -88,60 +88,59 @@ const path = z .meta({ description: "The folder path to output the generated files" }); // Field-level override configuration -const fieldOverride = z - .object({ - // Field name to apply override to - fieldName: z.string().meta({ - description: "The field name this override applies to", - }), - // Exclude this field from generation - exclude: z.boolean().optional().meta({ - description: "If true, this field will be excluded from generation", - }), - // Override the inferred type from metadata - typeOverride: z - .enum([ - "text", // textField() - "number", // numberField() - "boolean", // numberField().outputValidator(z.coerce.boolean()) - "fmBooleanNumber", // Same as boolean, explicit FileMaker 0/1 pattern - "date", // dateField() - "timestamp", // timestampField() - "container", // containerField() - ]) - .optional() - .meta({ - description: - "Override the inferred field type from metadata. Options: text, number, boolean, fmBooleanNumber, date, timestamp, container", - }), - }) - .optional(); - -// Table-level override configuration -const tableOverride = z - .object({ - // Table name to apply override to - tableName: z.string().meta({ - description: - "The entity set name (table occurrence name) this override applies to", - }), - // Exclude entire table from generation - exclude: z.boolean().optional().meta({ - description: - "If true, this entire table will be excluded from generation", - }), - // Override the generated TypeScript variable name - // (original entity set name is still used for the path) - variableName: z.string().optional().meta({ +const fieldOverride = z.object({ + // Field name to apply override to + fieldName: z.string().meta({ + description: "The field name this override applies to", + }), + // Exclude this field from generation + exclude: z.boolean().optional().meta({ + description: "If true, this field will be excluded from generation", + }), + // Override the inferred type from metadata + typeOverride: z + .enum([ + "text", // textField() + "number", // numberField() + "boolean", // numberField().outputValidator(z.coerce.boolean()) + "fmBooleanNumber", // Same as boolean, explicit FileMaker 0/1 pattern + "date", // dateField() + "timestamp", // timestampField() + "container", // containerField() + ]) + .optional() + .meta({ description: - "Override the generated TypeScript variable name. The original entity set name is still used for the OData path.", - }), - // Field-specific overrides as an array - fields: z.array(fieldOverride).optional().meta({ - description: "Field-specific overrides as an array", + "Override the inferred field type from metadata. Options: text, number, boolean, fmBooleanNumber, date, timestamp, container", }), - }) - .optional(); +}); + +// Table-level configuration (opt-in model) +const tableConfig = z.object({ + // Table name to generate + tableName: z.string().meta({ + description: + "The entity set name (table occurrence name) to generate. This table will be included in metadata download and type generation.", + }), + // Override the generated TypeScript variable name + // (original entity set name is still used for the path) + variableName: z.string().optional().meta({ + description: + "Override the generated TypeScript variable name. The original entity set name is still used for the OData path.", + }), + // Field-specific overrides as an array + fields: z.array(fieldOverride).optional().meta({ + description: "Field-specific overrides as an array", + }), + reduceMetadata: z.boolean().optional().meta({ + description: + "If undefined, the top-level setting will be used. If true, reduced OData annotations will be requested from the server to reduce payload size. This will prevent comments, entity ids, and other properties from being generated.", + }), + alwaysOverrideFieldNames: z.boolean().optional().meta({ + description: + "If undefined, the top-level setting will be used. If true, field names will always be updated to match metadata, even when matching by entity ID. If false, existing field names are preserved when matching by entity ID.", + }), +}); const typegenConfigSingleBase = z.discriminatedUnion("type", [ z.object({ @@ -177,25 +176,23 @@ const typegenConfigSingleBase = z.discriminatedUnion("type", [ z.object({ type: z.literal("fmodata"), configName: z.string().optional(), - envNames, + envNames: z.optional(envNames), path, - metadataPath: z - .string() - .refine((val) => val.toLowerCase().endsWith(".xml"), { - message: "Metadata path must point to a file ending with .xml", - }) - .meta({ description: "Path to save the downloaded metadata XML file" }), - downloadMetadata: z.boolean().default(false).meta({ + reduceMetadata: z.boolean().optional().meta({ description: - "Allows the tool to automatically download the metadata from the server and save it to the metadataPath. Will be re-downloaded on each run. Otherwise, you must manually provide/update the XML file.", + "If true, reduced OData annotations will be requested from the server to reduce payload size. This will prevent comments, entity ids, and other properties from being generated.", }), clearOldFiles: z.boolean().default(false).optional().meta({ description: "If false, the path will not be cleared before the new files are written. Only the `client` and `generated` directories are cleared to allow for potential overrides to be kept.", }), - tables: z.array(tableOverride).optional().meta({ + alwaysOverrideFieldNames: z.boolean().default(true).optional().meta({ + description: + "If true (default), field names will always be updated to match metadata, even when matching by entity ID. If false, existing field names are preserved when matching by entity ID.", + }), + tables: z.array(tableConfig).default([]).meta({ description: - "Table-specific overrides as an array. Allows excluding tables/fields, renaming variables, and overriding field types.", + "Required array of tables to generate. Only the tables specified here will be downloaded and generated. Each table can have field-level overrides for excluding fields, renaming variables, and overriding field types.", }), }), ]); diff --git a/packages/typegen/tests/fmodata-preserve-customizations.test.ts b/packages/typegen/tests/fmodata-preserve-customizations.test.ts new file mode 100644 index 00000000..d1c12b8e --- /dev/null +++ b/packages/typegen/tests/fmodata-preserve-customizations.test.ts @@ -0,0 +1,187 @@ +import { describe, expect, it } from "vitest"; +import fs from "node:fs/promises"; +import path from "node:path"; +import os from "node:os"; +import { generateODataTypes } from "../src/fmodata/generateODataTypes"; +import type { ParsedMetadata } from "../src/fmodata/parseMetadata"; + +function makeMetadata({ + entitySetName, + entityTypeName, + fields, +}: { + entitySetName: string; + entityTypeName: string; + fields: Array<{ name: string; type: string; fieldId: string }>; +}): ParsedMetadata { + const entityTypes = new Map(); + const entitySets = new Map(); + + const properties = new Map(); + for (const f of fields) { + properties.set(f.name, { + $Type: f.type, + $Nullable: true, + "@FieldID": f.fieldId, + }); + } + + entityTypes.set(entityTypeName, { + Name: entityTypeName.split(".").at(-1) ?? entityTypeName, + "@TableID": "T1", + Properties: properties, + NavigationProperties: [], + }); + + entitySets.set(entitySetName, { + Name: entitySetName, + EntityType: entityTypeName, + }); + + return { + entityTypes, + entitySets, + namespace: "NS", + }; +} + +describe("fmodata generateODataTypes preserves user customizations", () => { + it("preserves custom chained calls even when placed before standard methods", async () => { + const tmpDir = await fs.mkdtemp( + path.join(os.tmpdir(), "proofkit-fmodata-preserve-"), + ); + + try { + const entitySetName = "MyTable"; + const entityTypeName = "NS.MyTable"; + const metadata = makeMetadata({ + entitySetName, + entityTypeName, + fields: [{ name: "FieldA", type: "Edm.String", fieldId: "F1" }], + }); + + const existingFilePath = path.join(tmpDir, "MyTable.ts"); + await fs.writeFile( + existingFilePath, + [ + `import { fmTableOccurrence, textField } from "@proofkit/fmdapi";`, + `import { z } from "zod/v4";`, + ``, + `export const MyTable = fmTableOccurrence("MyTable", {`, + ` "FieldA": textField().inputValidator(z.string()).entityId("F1"),`, + `});`, + ``, + ].join("\n"), + "utf8", + ); + + await generateODataTypes(metadata, { + type: "fmodata", + path: tmpDir, + clearOldFiles: false, + tables: [{ tableName: "MyTable" }], + }); + + const regenerated = await fs.readFile(existingFilePath, "utf8"); + expect(regenerated).toContain( + `FieldA: textField().entityId("F1").inputValidator(z.string())`, + ); + } finally { + await fs.rm(tmpDir, { recursive: true, force: true }); + } + }); + + it("preserves custom chained calls when no standard methods exist", async () => { + const tmpDir = await fs.mkdtemp( + path.join(os.tmpdir(), "proofkit-fmodata-preserve-"), + ); + + try { + const entitySetName = "MyTable"; + const entityTypeName = "NS.MyTable"; + const metadata = makeMetadata({ + entitySetName, + entityTypeName, + // Simulate reduceMetadata: no FieldID => generator won't emit .entityId() + fields: [{ name: "FieldB", type: "Edm.String", fieldId: "" }], + }); + + const existingFilePath = path.join(tmpDir, "MyTable.ts"); + await fs.writeFile( + existingFilePath, + [ + `import { fmTableOccurrence, textField } from "@proofkit/fmdapi";`, + `import { z } from "zod/v4";`, + ``, + `export const MyTable = fmTableOccurrence("MyTable", {`, + ` "FieldB": textField().inputValidator(z.string()),`, + `});`, + ``, + ].join("\n"), + "utf8", + ); + + await generateODataTypes(metadata, { + type: "fmodata", + path: tmpDir, + clearOldFiles: false, + tables: [{ tableName: "MyTable" }], + }); + + const regenerated = await fs.readFile(existingFilePath, "utf8"); + expect(regenerated).toContain( + `FieldB: textField().inputValidator(z.string())`, + ); + } finally { + await fs.rm(tmpDir, { recursive: true, force: true }); + } + }); + + it("preserves aliased imports when regenerating files", async () => { + const tmpDir = await fs.mkdtemp( + path.join(os.tmpdir(), "proofkit-fmodata-preserve-"), + ); + + try { + const entitySetName = "MyTable"; + const entityTypeName = "NS.MyTable"; + const metadata = makeMetadata({ + entitySetName, + entityTypeName, + fields: [{ name: "FieldA", type: "Edm.String", fieldId: "F1" }], + }); + + const existingFilePath = path.join(tmpDir, "MyTable.ts"); + await fs.writeFile( + existingFilePath, + [ + `import { fmTableOccurrence, textField as tf } from "@proofkit/fmdapi";`, + `import { z as zod } from "zod/v4";`, + ``, + `export const MyTable = fmTableOccurrence("MyTable", {`, + ` "FieldA": tf().entityId("F1").inputValidator(zod.string()),`, + `});`, + ``, + ].join("\n"), + "utf8", + ); + + await generateODataTypes(metadata, { + type: "fmodata", + path: tmpDir, + clearOldFiles: false, + tables: [{ tableName: "MyTable" }], + }); + + const regenerated = await fs.readFile(existingFilePath, "utf8"); + // Verify aliased imports are preserved + expect(regenerated).toContain(`textField as tf`); + expect(regenerated).toContain(`z as zod`); + // Verify the code still uses the aliases + expect(regenerated).toContain(`tf().entityId("F1")`); + expect(regenerated).toContain(`zod.string()`); + } finally { + await fs.rm(tmpDir, { recursive: true, force: true }); + } + }); +}); diff --git a/packages/typegen/tests/typegen.test.ts b/packages/typegen/tests/typegen.test.ts index 1416364a..67185e24 100644 --- a/packages/typegen/tests/typegen.test.ts +++ b/packages/typegen/tests/typegen.test.ts @@ -1,13 +1,4 @@ -import { - describe, - expect, - it, - afterAll, - beforeAll, - vi, - beforeEach, - afterEach, -} from "vitest"; +import { describe, expect, it, beforeEach } from "vitest"; import { generateTypedClients } from "../src/typegen"; import { typegenConfigSingle } from "../src/types"; import { OttoAPIKey } from "../../fmdapi/src"; @@ -16,7 +7,6 @@ import fs from "fs/promises"; import path from "path"; import { execSync } from "child_process"; -import dotenv from "dotenv"; // // Load the correct .env.local relative to this test file's directory // dotenv.config({ path: path.resolve(__dirname, ".env.local") }); @@ -140,7 +130,11 @@ describe("typegen", () => { }); it("basic typegen with zod", async () => { - const config: z.infer = { + const config: Extract< + z.infer, + { type: "fmdapi" } + > = { + type: "fmdapi", layouts: [ { layoutName: "layout", @@ -163,7 +157,11 @@ describe("typegen", () => { it("basic typegen without zod", async () => { // Define baseGenPath within the scope or ensure it's accessible // Assuming baseGenPath is accessible from the describe block's scope - const config: z.infer = { + const config: Extract< + z.infer, + { type: "fmdapi" } + > = { + type: "fmdapi", layouts: [ // add your layouts and name schemas here { @@ -189,7 +187,11 @@ describe("typegen", () => { }, 30000); it("basic typegen with strict numbers", async () => { - const config: z.infer = { + const config: Extract< + z.infer, + { type: "fmdapi" } + > = { + type: "fmdapi", layouts: [ { layoutName: "layout", @@ -223,7 +225,11 @@ describe("typegen", () => { }, 30000); it("zod validator", async () => { - const config: z.infer = { + const config: Extract< + z.infer, + { type: "fmdapi" } + > = { + type: "fmdapi", layouts: [ { layoutName: "layout", diff --git a/packages/typegen/vite.config.ts b/packages/typegen/vite.config.ts index 1730806f..a297b02e 100644 --- a/packages/typegen/vite.config.ts +++ b/packages/typegen/vite.config.ts @@ -12,7 +12,7 @@ export default mergeConfig( "./src/index.ts", "./src/cli.ts", "./src/types.ts", - "./src/server/contract.ts", + "./src/server/app.ts", ], srcDir: "./src", cjs: false, diff --git a/packages/typegen/web/package.json b/packages/typegen/web/package.json index 772f3d43..2748e154 100644 --- a/packages/typegen/web/package.json +++ b/packages/typegen/web/package.json @@ -24,8 +24,9 @@ "@radix-ui/react-tooltip": "^1.2.8", "@remixicon/react": "^4.7.0", "@tailwindcss/vite": "^4.1.18", - "@tanstack/react-query": "^5.76.1", + "@tanstack/react-query": "^5.90.12", "@tanstack/react-table": "^8.21.3", + "@uidotdev/usehooks": "^2.4.1", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", "cmdk": "^1.1.1", @@ -41,6 +42,7 @@ "zod": "^4.1.13" }, "devDependencies": { + "@tanstack/react-query-devtools": "^5.91.1", "@types/node": "^22.17.1", "@types/react": "^19.1.10", "@types/react-dom": "^19.1.7", diff --git a/packages/typegen/web/src/App.tsx b/packages/typegen/web/src/App.tsx index a629fc4e..bd4cd394 100644 --- a/packages/typegen/web/src/App.tsx +++ b/packages/typegen/web/src/App.tsx @@ -20,8 +20,11 @@ import { ConfigSummary } from "./components/ConfigSummary"; import { type SingleConfig } from "./lib/config-utils"; import { Form } from "./components/ui/form"; import { useConfig } from "./hooks/useConfig"; +import { useHealthCheck } from "./hooks/useHealthCheck"; import { Badge } from "./components/ui/badge"; import { ConfigEditor } from "./components/ConfigEditor"; +import { EmptyState } from "./components/EmptyState"; +import { ConnectionWarning } from "./components/ConnectionWarning"; // Normalize config to always be an array function normalizeConfig( @@ -36,7 +39,38 @@ function normalizeConfig( return []; } +// Create config objects for each type +function createFmdapiConfig(): SingleConfig { + return { + type: "fmdapi", + envNames: { + server: undefined, + db: undefined, + auth: undefined, + }, + layouts: [], + }; +} + +function createFmodataConfig(): SingleConfig { + return { + type: "fmodata", + envNames: { + server: undefined, + db: undefined, + auth: undefined, + }, + tables: [], + alwaysOverrideFieldNames: true, + }; +} + function App() { + // Health check to detect if server is down + const { isHealthy } = useHealthCheck({ + enabled: true, + }); + // Load and save config using custom hook const { configDataResponse, @@ -72,6 +106,42 @@ function App() { // Get configs from form values for data access const configs = form.watch("config"); + // Extract exists and path from configDataResponse + const configExists = configDataResponse?.exists ?? false; + const configPath = configDataResponse?.path; + const fullPath = configDataResponse?.fullPath; + + // Determine empty state conditions + const isFileMissing = !configExists; + const isEmptyConfig = configExists && configs.length === 0; + const showEmptyState = isFileMissing || isEmptyConfig; + + // Unified handler for creating configs (works for both file creation and adding) + const handleAddConfig = async (type: "fmdapi" | "fmodata") => { + const newConfig = + type === "fmdapi" ? createFmdapiConfig() : createFmodataConfig(); + + // If file doesn't exist, create it with the new config + if (isFileMissing) { + try { + await saveMutation.mutateAsync([newConfig]); + await refetch(); + setTimeout(() => { + setActiveAccordionItem(0); + }, 100); + } catch (err) { + const apiType = type === "fmdapi" ? "Data API" : "OData"; + console.error(`Failed to create config file with ${apiType}:`, err); + } + } else { + // File exists, just append to form + append(newConfig); + setTimeout(() => { + setActiveAccordionItem(fields.length); + }, 1); + } + }; + // Run typegen mutation const runTypegenMutation = useMutation({ mutationFn: async () => { @@ -112,6 +182,12 @@ function App() { return (
+ {/* Connection Warning Overlay - Shows when server is unreachable */} + {/* Only show if we've lost connection (not during initial load or retries) */} + {!isHealthy && !isLoading && !isRetrying && ( + refetch()} /> + )} + {/* Loading Overlay - Preserves form state underneath */} {isLoading && (
@@ -159,147 +235,147 @@ function App() {
-
- - -
+ {!isFileMissing && ( +
+ + +
+ )}
- setActiveAccordionItem(Number(value))} - type="single" - variant="outline" - collapsible - className="w-full lg:w-[75%] mx-auto" - > - {fields.map((field, index) => { - const config = configs[index]; - return ( - - - - - - remove(index)} - /> - - - ); - })} - -
- - - - - - { - append({ - type: "fmdapi", - envNames: { - server: undefined, - db: undefined, - auth: undefined, - }, - layouts: [], - }); - setTimeout(() => { - setActiveAccordionItem(fields.length); - }, 1); - }} - > -
-

Data API

- - Legacy - -
-
- Reads/writes data using layout-specific context -
-
- { - append({ - type: "fmodata", - envNames: { - server: undefined, - db: undefined, - auth: undefined, - }, - downloadMetadata: false, - metadataPath: "schema/metadata.xml", - }); - setTimeout(() => { - setActiveAccordionItem(fields.length); - }, 1); - }} - > -
-

OData

- - New - -
-
- Reads/writes data directly to the database tables, using - the relationship graph as context -
-
-
-
+ {!isLoading && showEmptyState ? ( +
+ handleAddConfig("fmdapi") + : undefined + } + onAddFmodata={ + isFileMissing || isEmptyConfig + ? () => handleAddConfig("fmodata") + : undefined + } + />
- + ) : ( + setActiveAccordionItem(Number(value))} + type="single" + variant="outline" + collapsible + className="w-full lg:w-[75%] mx-auto" + > + {fields.map((field, index) => { + const config = configs[index]; + return ( + + + + + + remove(index)} + /> + + + ); + })} + +
+ + + + + + handleAddConfig("fmdapi")} + > +
+

Data API

+ + Legacy + +
+
+ Reads/writes data using layout-specific context +
+
+ handleAddConfig("fmodata")} + > +
+

OData

+ + New + +
+
+ Reads/writes data directly to the database tables, + using the relationship graph as context +
+
+
+
+
+
+ )}
diff --git a/packages/typegen/web/src/components/ConfigEditor.tsx b/packages/typegen/web/src/components/ConfigEditor.tsx index b648f54a..c62e1769 100644 --- a/packages/typegen/web/src/components/ConfigEditor.tsx +++ b/packages/typegen/web/src/components/ConfigEditor.tsx @@ -1,6 +1,5 @@ import { useFormContext, useWatch } from "react-hook-form"; -import { useState, useEffect, useId, useRef } from "react"; -import { useQueryClient } from "@tanstack/react-query"; +import { useState, useEffect, useId } from "react"; import { Input, InputWrapper, InputGroup, InputAddon } from "./ui/input"; import { Select, @@ -32,17 +31,8 @@ import { DialogHeader, DialogTitle, } from "./ui/dialog"; -import { - PlayIcon, - Trash2, - Loader2, - DownloadIcon, - AlertTriangle, - CheckCircleIcon, -} from "lucide-react"; +import { PlayIcon, Trash2, Loader2 } from "lucide-react"; import { useRunTypegen } from "../hooks/useRunTypegen"; -import { useFileExists } from "../hooks/useFileExists"; -import { client } from "../lib/api"; interface ConfigEditorProps { index: number; @@ -75,50 +65,7 @@ export function ConfigEditor({ index, onRemove }: ConfigEditorProps) { }); const [usingWebviewer, setUsingWebviewer] = useState(!!webviewerScriptName); const [showRemoveDialog, setShowRemoveDialog] = useState(false); - const [showDownloadDialog, setShowDownloadDialog] = useState(false); - const [isDownloading, setIsDownloading] = useState(false); const { runTypegen, isRunning } = useRunTypegen(); - const queryClient = useQueryClient(); - - // Watch the downloadMetadata field to check if auto-download is enabled - const downloadMetadata = useWatch({ - control, - name: `config.${index}.downloadMetadata` as const, - }); - - // Debounced metadata path for file existence check - const metadataPath = useWatch({ - control, - name: `config.${index}.metadataPath` as const, - }); - const [debouncedMetadataPath, setDebouncedMetadataPath] = useState< - string | undefined - >(undefined); - const metadataPathTimerRef = useRef(null); - - // Debounce the metadata path to prevent excessive API calls - useEffect(() => { - if (metadataPathTimerRef.current) { - clearTimeout(metadataPathTimerRef.current); - } - - metadataPathTimerRef.current = setTimeout(() => { - setDebouncedMetadataPath( - metadataPath && metadataPath.trim() !== "" ? metadataPath : undefined, - ); - }, 300); // 300ms debounce - - return () => { - if (metadataPathTimerRef.current) { - clearTimeout(metadataPathTimerRef.current); - } - }; - }, [metadataPath]); - - // Check if the file exists (only for fmodata config type) - const { data: fileExistsData } = useFileExists( - configType === "fmodata" ? debouncedMetadataPath : undefined, - ); // Get the current config value const currentConfig = watch(`config.${index}` as const); @@ -144,62 +91,6 @@ export function ConfigEditor({ index, onRemove }: ConfigEditorProps) { } }; - const handleDownloadMetadata = async () => { - if (!currentConfig || currentConfig.type !== "fmodata") { - return; - } - - setIsDownloading(true); - try { - const res = await client.api["download-metadata"].$post({ - json: { config: currentConfig }, - }); - - if (!res.ok) { - const errorData = (await res.json().catch(() => ({}))) as { - error?: string; - }; - throw new Error(errorData.error || "Failed to download metadata"); - } - - // Refetch the file exists query to check if the download was successful - await queryClient.refetchQueries({ - queryKey: ["fileExists", debouncedMetadataPath], - }); - - // Invalidate the parse-metadata query so it automatically reparses - // Construct the configKey the same way useParseMetadata does - const configKey = currentConfig - ? JSON.stringify({ - type: currentConfig.type, - metadataPath: currentConfig.metadataPath, - }) - : ""; - queryClient.invalidateQueries({ - queryKey: ["parseMetadata", index, configKey], - }); - } catch (err) { - console.error("Failed to download metadata:", err); - // You might want to show an error toast here - } finally { - setIsDownloading(false); - setShowDownloadDialog(false); - } - }; - - const handleDownloadClick = (e: React.MouseEvent) => { - e.preventDefault(); - e.stopPropagation(); - - // If auto-download is enabled, skip the warning - if (downloadMetadata) { - handleDownloadMetadata(); - } else { - // Show confirmation dialog - setShowDownloadDialog(true); - } - }; - return (
{configErrors?.root && ( @@ -405,90 +296,30 @@ export function ConfigEditor({ index, onRemove }: ConfigEditorProps) { {configType === "fmodata" && (
-
- ( - - )} - /> -
- -
- { - if (!value || value.trim() === "") { - return true; // Allow empty, will be caught by required validation if needed - } - if (!value.toLowerCase().endsWith(".xml")) { - return "Metadata path must point to a file ending with .xml"; - } - return true; - }, - }} - render={({ field }) => ( - - - Metadata Path{" "} - - - - - - { - field.onBlur(); - trigger(`config.${index}.metadataPath`); - }} - /> - {fileExistsData?.exists === false && - debouncedMetadataPath && - debouncedMetadataPath.trim() !== "" && ( - - )} - {fileExistsData?.exists === true && - debouncedMetadataPath && - debouncedMetadataPath.trim() !== "" && ( - - )} - - - - - - - - - )} - /> -
+ ( + + )} + /> + ( + + )} + />
)} @@ -565,53 +396,6 @@ export function ConfigEditor({ index, onRemove }: ConfigEditorProps) { - - {/* Download Metadata Confirmation Dialog */} - - - - - - Warning: Download Metadata - - - Downloading metadata could crash the OData service on your server - if the relationship graph is too large. Files with large - relationship graphs containing many table occurrences using the - same base tables are particularly at risk. - - - We recommend creating a dedicated file with a simple graph and - external file references to avoid this issue. - - - - - - - -
); } diff --git a/packages/typegen/web/src/components/ConfigList.css b/packages/typegen/web/src/components/ConfigList.css index 1744229b..29ca5fd2 100644 --- a/packages/typegen/web/src/components/ConfigList.css +++ b/packages/typegen/web/src/components/ConfigList.css @@ -84,3 +84,6 @@ + + + diff --git a/packages/typegen/web/src/components/ConfigList.tsx b/packages/typegen/web/src/components/ConfigList.tsx index 6e0deeff..fc123d74 100644 --- a/packages/typegen/web/src/components/ConfigList.tsx +++ b/packages/typegen/web/src/components/ConfigList.tsx @@ -69,3 +69,6 @@ export function ConfigList({ + + + diff --git a/packages/typegen/web/src/components/ConnectionWarning.tsx b/packages/typegen/web/src/components/ConnectionWarning.tsx new file mode 100644 index 00000000..cc188a9b --- /dev/null +++ b/packages/typegen/web/src/components/ConnectionWarning.tsx @@ -0,0 +1,69 @@ +import { AlertTriangle, RefreshCw, X } from "lucide-react"; +import { Button } from "./ui/button"; + +interface ConnectionWarningProps { + onRefresh?: () => void; +} + +export function ConnectionWarning({ onRefresh }: ConnectionWarningProps) { + const handleRefresh = () => { + if (onRefresh) { + onRefresh(); + } else { + window.location.reload(); + } + }; + + return ( +
+
+
+ +
+

+ UI Server Unavailable +

+

+ Did you stop the @proofkit/ui command? +

+
+

+ To resolve this, you can: +

+
    +
  • Close this browser tab or window
  • +
  • Refresh the page if you plan to reconnect
  • +
  • Rerun the @proofkit/ui command
  • +
+
+
+ + +
+
+
+
+
+ ); +} diff --git a/packages/typegen/web/src/components/EmptyState.tsx b/packages/typegen/web/src/components/EmptyState.tsx new file mode 100644 index 00000000..89a545be --- /dev/null +++ b/packages/typegen/web/src/components/EmptyState.tsx @@ -0,0 +1,163 @@ +import { FileText, Database } from "lucide-react"; +import { Button } from "./ui/button"; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuTrigger, +} from "./ui/dropdown-menu"; +import { Plus } from "lucide-react"; +import { Badge } from "./ui/badge"; +import { Tooltip, TooltipTrigger, TooltipContent } from "./ui/tooltip"; + +interface EmptyStateProps { + variant: "file-missing" | "empty-config"; + configPath?: string; + onAddFmdapi?: () => void; + onAddFmodata?: () => void; +} + +export function EmptyState({ + variant, + configPath, + onAddFmdapi, + onAddFmodata, +}: EmptyStateProps) { + if (variant === "file-missing") { + return ( +
+
+ +
+

No Config File Found

+

+ A config file will be created at the{" "} + + + + current working directory + + + +

+ {configPath || "proofkit-typegen.config.json"} +

+ + + . +

+

+ Want to create it somewhere else?
+ Restart the server with the{" "} + + --config + {" "} + option and specify a file path. +

+ {(onAddFmdapi || onAddFmodata) && ( + + + + + + {onAddFmdapi && ( + +
+

Data API

+ + Legacy + +
+
+ Reads/writes data using layout-specific context +
+
+ )} + {onAddFmodata && ( + +
+

OData

+ + New + +
+
+ Reads/writes data directly to the database tables, using the + relationship graph as context +
+
+ )} +
+
+ )} +
+ ); + } + + // Empty config state + return ( +
+
+ +
+

No Connections Yet

+

+ Add your first FileMaker connection to get started +

+ {(onAddFmdapi || onAddFmodata) && ( + + + + + + {onAddFmdapi && ( + +
+

Data API

+ + Legacy + +
+
+ Reads/writes data using layout-specific context +
+
+ )} + {onAddFmodata && ( + +
+

OData

+ + New + +
+
+ Reads/writes data directly to the database tables, using the + relationship graph as context +
+
+ )} +
+
+ )} +
+ ); +} diff --git a/packages/typegen/web/src/components/EnvVarDialog.tsx b/packages/typegen/web/src/components/EnvVarDialog.tsx index 7d86b63f..816825f9 100644 --- a/packages/typegen/web/src/components/EnvVarDialog.tsx +++ b/packages/typegen/web/src/components/EnvVarDialog.tsx @@ -1,25 +1,22 @@ import { useEffect, useState } from "react"; -import { useWatch, useFormContext } from "react-hook-form"; +import { useFormContext, useWatch } from "react-hook-form"; import { Button } from "./ui/button"; import { Dialog, DialogContent, + DialogDescription, DialogHeader, DialogTitle, DialogTrigger, } from "./ui/dialog"; -import { - Select, - SelectContent, - SelectItem, - SelectTrigger, - SelectValue, -} from "./ui/select"; import { defaultEnvNames } from "../../../src/constants"; import { EnvVarField } from "./EnvVarField"; import { useEnvVarIndicator } from "./useEnvVarIndicator"; import { useEnvValue } from "../lib/envValues"; import { useTestConnection, setDialogOpen } from "../hooks/useTestConnection"; +import { Alert, AlertContent, AlertDescription, AlertIcon } from "./ui/alert"; +import { Card, CardContent, CardTitle } from "./ui/card"; +import { Separator } from "./ui/separator"; import { Loader2, CheckCircle2, @@ -27,19 +24,31 @@ import { AlertCircle, AlertTriangle, Server, + Info, } from "lucide-react"; interface EnvVarDialogProps { index: number; } +// Helper to safely extract error message from various error formats +function getErrorMessage(error: unknown): string { + if (typeof error === "string") { + return error; + } + if (error instanceof Error) { + return error.message; + } + if (error && typeof error === "object" && "message" in error) { + return String((error as { message: unknown }).message); + } + return ""; +} + export function EnvVarDialog({ index }: EnvVarDialogProps) { const { control, setValue, getValues } = useFormContext<{ config: any[]; }>(); - const [authTypeSelector, setAuthTypeSelector] = useState< - "none" | "apiKey" | "username" - >("apiKey"); const [dialogOpen, setDialogOpenState] = useState(false); // Track dialog open state to pause background tests @@ -50,30 +59,68 @@ export function EnvVarDialog({ index }: EnvVarDialogProps) { }; }, [index, dialogOpen]); - // Watch the envNames.auth value for this config + // Get indicator data + const { hasCustomValues, serverValue, serverLoading, dbValue, dbLoading } = + useEnvVarIndicator(index); + + // Watch the auth env names from the form const envNamesAuth = useWatch({ control, name: `config.${index}.envNames.auth` as const, }); - // Get indicator data - const { - hasCustomValues, - serverValue, - serverLoading, - dbValue, - dbLoading, - authEnvName: baseAuthEnvName, - } = useEnvVarIndicator(index); + // Determine the actual env names to use (from form or defaults) + const apiKeyEnvName = + envNamesAuth && + typeof envNamesAuth === "object" && + "apiKey" in envNamesAuth && + envNamesAuth.apiKey && + envNamesAuth.apiKey.trim() !== "" + ? envNamesAuth.apiKey + : defaultEnvNames.apiKey; + const usernameEnvName = + envNamesAuth && + typeof envNamesAuth === "object" && + "username" in envNamesAuth && + envNamesAuth.username && + envNamesAuth.username.trim() !== "" + ? envNamesAuth.username + : defaultEnvNames.username; + const passwordEnvName = + envNamesAuth && + typeof envNamesAuth === "object" && + "password" in envNamesAuth && + envNamesAuth.password && + envNamesAuth.password.trim() !== "" + ? envNamesAuth.password + : defaultEnvNames.password; - // Determine auth env name based on auth type selector - const authEnvName = - baseAuthEnvName || - (authTypeSelector === "apiKey" - ? defaultEnvNames.apiKey - : defaultEnvNames.username); + // Resolve all three auth env values + const { data: apiKeyValue, isLoading: apiKeyLoading } = + useEnvValue(apiKeyEnvName); + const { data: usernameValue, isLoading: usernameLoading } = + useEnvValue(usernameEnvName); + const { data: passwordValue, isLoading: passwordLoading } = + useEnvValue(passwordEnvName); - const { data: authValue, isLoading: authLoading } = useEnvValue(authEnvName); + // Determine which authentication method will be used + // Default to API key if it resolves to a value, otherwise use username/password if both resolve + const activeAuthMethod = + !apiKeyLoading && + apiKeyValue !== undefined && + apiKeyValue !== null && + apiKeyValue !== "" + ? "apiKey" + : !usernameLoading && + !passwordLoading && + usernameValue !== undefined && + usernameValue !== null && + usernameValue !== "" && + passwordValue !== undefined && + passwordValue !== null && + passwordValue !== "" + ? "username" + : null; // Test connection hook - enable when dialog is closed, disable when open // When dialog is open, it will only run when the retry button is clicked @@ -86,6 +133,23 @@ export function EnvVarDialog({ index }: EnvVarDialogProps) { } = useTestConnection(index, { enabled: !dialogOpen }); // Check if any values resolve to undefined/null/empty (only check after loading completes) + // For auth, check that at least one complete auth method is configured (either API key OR username+password) + const hasApiKeyAuth = + !apiKeyLoading && + apiKeyValue !== undefined && + apiKeyValue !== null && + apiKeyValue !== ""; + const hasUsernamePasswordAuth = + !usernameLoading && + !passwordLoading && + usernameValue !== undefined && + usernameValue !== null && + usernameValue !== "" && + passwordValue !== undefined && + passwordValue !== null && + passwordValue !== ""; + const hasAuth = hasApiKeyAuth || hasUsernamePasswordAuth; + const hasUndefinedValues = (!serverLoading && (serverValue === undefined || @@ -93,40 +157,26 @@ export function EnvVarDialog({ index }: EnvVarDialogProps) { serverValue === "")) || (!dbLoading && (dbValue === undefined || dbValue === null || dbValue === "")) || - (!authLoading && - (authValue === undefined || authValue === null || authValue === "")); + (!apiKeyLoading && !usernameLoading && !passwordLoading && !hasAuth); - // Initialize auth type selector based on current form value + // Initialize auth fields if not already set useEffect(() => { - let authSelector: "none" | "apiKey" | "username" = "apiKey"; - - if (envNamesAuth) { - if (typeof envNamesAuth === "object") { - // Check for username first (since it has two fields, it's more specific) - if ("username" in envNamesAuth || "password" in envNamesAuth) { - authSelector = "username"; - } else if ("apiKey" in envNamesAuth) { - authSelector = "apiKey"; - } - // If it's an empty object {}, don't change the selector or reset values - // This preserves the current state when the server returns {} - } - } else { - // Only initialize if auth is truly undefined/null - // Check current form value to avoid overwriting - const currentAuth = getValues(`config.${index}.envNames.auth` as any); - if (!currentAuth) { - setValue(`config.${index}.envNames.auth` as const, { - apiKey: "", - }); - } - } - - // Only update selector if it's different to avoid unnecessary re-renders - if (authSelector !== authTypeSelector) { - setAuthTypeSelector(authSelector); + const currentAuth = getValues(`config.${index}.envNames.auth` as any); + if (!currentAuth) { + setValue(`config.${index}.envNames.auth` as const, { + apiKey: "", + username: "", + password: "", + }); + } else if (typeof currentAuth === "object") { + // Ensure all fields exist + setValue(`config.${index}.envNames.auth` as const, { + apiKey: currentAuth.apiKey || "", + username: currentAuth.username || "", + password: currentAuth.password || "", + }); } - }, [envNamesAuth, setValue, getValues, index, authTypeSelector]); + }, [setValue, getValues, index]); return ( @@ -155,7 +205,12 @@ export function EnvVarDialog({ index }: EnvVarDialogProps) { Custom Environment Variable Names + + Enter the names{" "} + of the environment variables below, not the values + +
-
-
- - -
+
+ + + Authentication + {/* API Key on its own line */} + + + {/* OR Divider */} +
+ + + OR + + +
-
- {authTypeSelector === "apiKey" && ( -
+ {/* Username and Password on the same line */} +
-
- )} - - {authTypeSelector === "username" && ( - <> -
- -
-
- -
- - )} -
+ +
+
+
+ + + + + + + You will need to rerun the{" "} + + @proofkit/typegen ui + {" "} + command if you change any environment variables. + + + + {/* Test Connection Section */}
@@ -346,7 +366,7 @@ export function EnvVarDialog({ index }: EnvVarDialogProps) {
{errorDetails.message || - errorDetails.error || + getErrorMessage(errorDetails.error as unknown) || "Unknown error"}
{errorDetails.details?.missing && ( @@ -414,7 +434,17 @@ export function EnvVarDialog({ index }: EnvVarDialogProps) {
)} {testError && !errorDetails && ( -
{testError.message}
+
+ {testError instanceof Error + ? testError.message + : typeof testError === "object" && + testError !== null && + "message" in testError + ? String( + (testError as { message: unknown }).message, + ) + : "Unknown error"} +
)}
)} diff --git a/packages/typegen/web/src/components/EnvVarField.tsx b/packages/typegen/web/src/components/EnvVarField.tsx index 881ca0b8..68d2b08d 100644 --- a/packages/typegen/web/src/components/EnvVarField.tsx +++ b/packages/typegen/web/src/components/EnvVarField.tsx @@ -1,10 +1,9 @@ -import { useState, useEffect, useRef, memo } from "react"; +import { useMemo } from "react"; import { useFormContext, useWatch, Path, PathValue } from "react-hook-form"; import { z } from "zod"; -import { Eye, EyeOff } from "lucide-react"; +import { CircleCheck, CircleSlash, Loader } from "lucide-react"; import { configSchema } from "../lib/schema"; -import { Input } from "./ui/input"; -import { Button } from "./ui/button"; +import { Input, InputWrapper } from "./ui/input"; import { FormControl, FormField, @@ -13,94 +12,13 @@ import { FormMessage, } from "./ui/form"; import { useEnvValue } from "../lib/envValues"; +import { useDebounce } from "@uidotdev/usehooks"; +import { Tooltip, TooltipContent, TooltipTrigger } from "./ui/tooltip"; +import { cn } from "@/lib/utils"; type FormData = z.infer; type FormConfig = { config: FormData[] }; -// Separate component for value display to prevent Input re-renders -const EnvValueDisplay = memo(function EnvValueDisplay({ - fieldName, - defaultValue, -}: { - fieldName: Path; - defaultValue: string; -}) { - const { control } = useFormContext(); - const [isVisible, setIsVisible] = useState(false); - const [debouncedEnvName, setDebouncedEnvName] = useState( - undefined, - ); - const timerRef = useRef(null); - - // Watch the env name value - but debounce updates to prevent re-renders - const envNameRaw = useWatch({ - control, - name: fieldName, - defaultValue: "", - }) as string | undefined; - - // Treat empty string as undefined to use default - const envName = - envNameRaw && envNameRaw.trim() !== "" ? envNameRaw : undefined; - - // Debounce the env name to prevent excessive re-renders and API calls - useEffect(() => { - if (timerRef.current) { - clearTimeout(timerRef.current); - } - - timerRef.current = setTimeout(() => { - setDebouncedEnvName(envName); - }, 300); // 300ms debounce - - return () => { - if (timerRef.current) { - clearTimeout(timerRef.current); - } - }; - }, [envName]); - - // Get the resolved value from the server (using debounced value) - const { data: envValue, isLoading } = useEnvValue( - debouncedEnvName ?? defaultValue, - ); - - if (!envName && !defaultValue) return null; - - return ( -
- {isLoading ? ( - Loading... - ) : envValue ? ( - <> - - - Value:{" "} - - {isVisible ? envValue : "****"} - - - - ) : ( - Not set - )} -
- ); -}); - interface EnvVarFieldProps> { fieldName: TFieldName extends Path ? PathValue extends string | undefined @@ -110,6 +28,7 @@ interface EnvVarFieldProps> { label: string; placeholder: string; defaultValue: string; + dimField?: boolean; } export function EnvVarField>({ @@ -117,8 +36,36 @@ export function EnvVarField>({ label, placeholder, defaultValue, + dimField = false, }: EnvVarFieldProps) { const { control } = useFormContext(); + const envName = useWatch({ + control, + name: fieldName, + defaultValue: undefined, + }); + + const debouncedEnvName = useDebounce(envName, 300); + + // Get the resolved value from the server (using debounced value) + // Ensure debouncedEnvName is a string or undefined before passing to useEnvValue + // Handle nested paths where watch might return objects or other types + const envNameForQuery: string | undefined = (() => { + if (typeof debouncedEnvName === "string") { + return debouncedEnvName.trim() !== "" ? debouncedEnvName : undefined; + } + return undefined; + })(); + const { data: envValue, isLoading } = useEnvValue( + envNameForQuery ?? defaultValue, + ); + + const valueState: "loading" | "not-set" | "set" = useMemo(() => { + if (isLoading) return "loading"; + if (envValue === undefined || envValue === null || envValue === "") + return "not-set"; + return "set"; + }, [isLoading, envValue]); return ( >({ name={fieldName} render={({ field }) => ( - {label} + + {label}{" "} + {dimField ? ( + (not used) + ) : ( + "" + )} + - + + + {valueState === "set" ? ( + + + + + {envValue} + + ) : valueState === "loading" ? ( + + ) : ( + + + + + Not set + + )} + - )} diff --git a/packages/typegen/web/src/components/ErrorBoundary.tsx b/packages/typegen/web/src/components/ErrorBoundary.tsx new file mode 100644 index 00000000..edfe4d11 --- /dev/null +++ b/packages/typegen/web/src/components/ErrorBoundary.tsx @@ -0,0 +1,115 @@ +import React, { Component, ErrorInfo, ReactNode } from "react"; +import { Button } from "./ui/button"; +import { AlertTriangle } from "lucide-react"; + +interface Props { + children: ReactNode; + fallback?: ReactNode; +} + +interface State { + hasError: boolean; + error: Error | null; + errorInfo: ErrorInfo | null; +} + +export class ErrorBoundary extends Component { + constructor(props: Props) { + super(props); + this.state = { + hasError: false, + error: null, + errorInfo: null, + }; + } + + static getDerivedStateFromError(error: Error): Partial { + return { + hasError: true, + error, + }; + } + + componentDidCatch(error: Error, errorInfo: ErrorInfo) { + console.error("ErrorBoundary caught an error:", error, errorInfo); + this.setState({ + error, + errorInfo, + }); + } + + handleReset = () => { + this.setState({ + hasError: false, + error: null, + errorInfo: null, + }); + }; + + render() { + if (this.state.hasError) { + if (this.props.fallback) { + return this.props.fallback; + } + + return ( +
+
+
+ +

+ Something went wrong +

+
+

+ An unexpected error occurred. Please try refreshing the page or + contact support if the problem persists. +

+ {this.state.error && ( +
+ + Error details + +
+
+ {this.state.error.name}: {this.state.error.message} +
+ {this.state.error.stack && ( +
+                      {this.state.error.stack}
+                    
+ )} + {this.state.errorInfo && ( +
+
+ Component Stack: +
+
+                        {this.state.errorInfo.componentStack}
+                      
+
+ )} +
+
+ )} +
+ + +
+
+
+ ); + } + + return this.props.children; + } +} + + diff --git a/packages/typegen/web/src/components/MetadataFieldsDialog.tsx b/packages/typegen/web/src/components/MetadataFieldsDialog.tsx index d965c75c..8803fc5c 100644 --- a/packages/typegen/web/src/components/MetadataFieldsDialog.tsx +++ b/packages/typegen/web/src/components/MetadataFieldsDialog.tsx @@ -1,4 +1,4 @@ -import { useMemo, useState, useCallback, useRef } from "react"; +import { useMemo, useState, useCallback, useRef, useEffect } from "react"; import { useFormContext, useWatch } from "react-hook-form"; import { Search, Check, Key } from "lucide-react"; import { @@ -6,13 +6,16 @@ import { getCoreRowModel, getSortedRowModel, getFilteredRowModel, + getPaginationRowModel, type ColumnDef, } from "@tanstack/react-table"; import { DataGrid, DataGridContainer } from "./ui/data-grid"; import { DataGridTable } from "./ui/data-grid-table"; import { DataGridColumnHeader } from "./ui/data-grid-column-header"; +import { DataGridPagination } from "./ui/data-grid-pagination"; import { Input, InputWrapper } from "./ui/input"; import { Switch } from "./ui/switch"; +import { Skeleton } from "./ui/skeleton"; import { Select, SelectContent, @@ -28,13 +31,22 @@ import { DialogTitle, DialogBody, } from "./ui/dialog"; -import type { ParsedMetadataResponse } from "../hooks/useParseMetadata"; +import { useTableMetadata } from "../hooks/useTableMetadata"; import type { SingleConfig } from "../lib/config-utils"; +import { InfoTooltip } from "./InfoTooltip"; +import { + FormControl, + FormField, + FormItem, + FormLabel, + FormMessage, +} from "./ui/form"; // Memoize model functions outside component to ensure stable references const coreRowModel = getCoreRowModel(); const sortedRowModel = getSortedRowModel(); const filteredRowModel = getFilteredRowModel(); +const paginationRowModel = getPaginationRowModel(); // Stable empty array to prevent infinite re-renders const EMPTY_FIELDS_CONFIG: any[] = []; @@ -97,43 +109,26 @@ interface MetadataFieldsDialogProps { open: boolean; onOpenChange: (open: boolean) => void; tableName: string | null; - parsedMetadata: ParsedMetadataResponse["parsedMetadata"] | undefined; configIndex: number; } -// Wrapper component to conditionally mount the content export function MetadataFieldsDialog({ open, onOpenChange, tableName, - parsedMetadata, configIndex, }: MetadataFieldsDialogProps) { - // Only render the full content when dialog is open - // This prevents expensive hook computations when closed - if (!open) { - return null; - } - - return ( - + // Fetch metadata - query is paused when dialog is not open + const { + data: parsedMetadata, + isLoading, + isError, + error, + } = useTableMetadata( + configIndex, + tableName, + open, // enabled flag ); -} - -// Inner component that handles all the expensive hooks and rendering -function MetadataFieldsDialogContent({ - open, - onOpenChange, - tableName, - parsedMetadata, - configIndex, -}: MetadataFieldsDialogProps) { const { control, setValue } = useFormContext<{ config: SingleConfig[] }>(); const [globalFilter, setGlobalFilter] = useState(""); @@ -153,14 +148,63 @@ function MetadataFieldsDialogContent({ // Use a ref to store the latest fieldsConfig to avoid unstable dependencies const fieldsConfigRef = useRef(EMPTY_FIELDS_CONFIG); + // Extract the specific table's config - use stable reference to prevent infinite re-renders + const tableConfig = useMemo(() => { + if (!tableName || !allTablesConfig || !Array.isArray(allTablesConfig)) { + return undefined; + } + return allTablesConfig.find((t) => t?.tableName === tableName); + }, [tableName, allTablesConfig]); + + // Compute the table index for use in form paths + const tableIndex = useMemo(() => { + if (!tableName || !allTablesConfig || !Array.isArray(allTablesConfig)) { + return -1; + } + return allTablesConfig.findIndex((t) => t?.tableName === tableName); + }, [tableName, allTablesConfig]); + + // Ensure table exists in config when dialog opens (if table is included) + // This ensures we have a stable index for useController + useEffect(() => { + if (!open || !tableName || configType !== "fmodata") return; + if (tableIndex < 0) { + // Table doesn't exist yet, but we need it to exist for the form fields + // Only create it if we're actually configuring it (it should be included) + const currentTables = Array.isArray(allTablesConfig) + ? allTablesConfig + : []; + setValue( + `config.${configIndex}.tables` as any, + [...currentTables, { tableName }], + { shouldDirty: false }, // Don't mark as dirty since this is just initialization + ); + } + }, [ + open, + tableName, + tableIndex, + configType, + configIndex, + allTablesConfig, + setValue, + ]); + + // Get the current table index - this will update after useEffect ensures table exists + const currentTableIndex = useMemo(() => { + if (!tableName || !allTablesConfig || !Array.isArray(allTablesConfig)) { + return -1; + } + return allTablesConfig.findIndex((t) => t?.tableName === tableName); + }, [tableName, allTablesConfig]); + // Extract only the specific table's fields config - use stable reference to prevent infinite re-renders const fieldsConfig = useMemo(() => { - if (!tableName || !allTablesConfig || !Array.isArray(allTablesConfig)) { + if (!tableConfig) { return EMPTY_FIELDS_CONFIG; } - const tableConfig = allTablesConfig.find((t) => t?.tableName === tableName); - return (tableConfig?.fields ?? EMPTY_FIELDS_CONFIG) as any[]; - }, [tableName, allTablesConfig]); + return (tableConfig.fields ?? EMPTY_FIELDS_CONFIG) as any[]; + }, [tableConfig]); // Keep ref in sync fieldsConfigRef.current = fieldsConfig; @@ -276,6 +320,18 @@ function MetadataFieldsDialogContent({ [configType, configIndex, tableName, allTablesConfig, setValue], ); + // Get the field name for variableName - table should exist due to ensuredTableIndex above + const variableNameFieldName = + `config.${configIndex}.tables.${currentTableIndex >= 0 ? currentTableIndex : 0}.variableName` as any; + + // Get the field name for reduceMetadata - table should exist due to ensuredTableIndex above + const reduceMetadataFieldName = + `config.${configIndex}.tables.${currentTableIndex >= 0 ? currentTableIndex : 0}.reduceMetadata` as any; + + // Get the field name for alwaysOverrideFieldNames - table should exist due to ensuredTableIndex above + const alwaysOverrideFieldNamesFieldName = + `config.${configIndex}.tables.${currentTableIndex >= 0 ? currentTableIndex : 0}.alwaysOverrideFieldNames` as any; + // Helper to set field type override - use ref to avoid dependency on fieldsConfig const setFieldTypeOverride = useCallback( (fieldName: string, typeOverride: string | undefined) => { @@ -674,6 +730,9 @@ function MetadataFieldsDialogContent({
); }, + meta: { + skeleton: , + }, }, { accessorKey: "fieldName", @@ -698,6 +757,9 @@ function MetadataFieldsDialogContent({
); }, + meta: { + skeleton: , + }, }, { accessorKey: "fieldType", @@ -710,6 +772,9 @@ function MetadataFieldsDialogContent({ {info.getValue() as string} ), + meta: { + skeleton: , + }, }, { id: "typeOverride", @@ -745,6 +810,9 @@ function MetadataFieldsDialogContent({ ); }, + meta: { + skeleton: , + }, }, { accessorKey: "nullable", @@ -755,6 +823,9 @@ function MetadataFieldsDialogContent({ cell: (info) => ( ), + meta: { + skeleton: , + }, }, { accessorKey: "global", @@ -765,6 +836,9 @@ function MetadataFieldsDialogContent({ cell: (info) => ( ), + meta: { + skeleton: , + }, }, { accessorKey: "readOnly", @@ -773,6 +847,9 @@ function MetadataFieldsDialogContent({ ), enableSorting: true, cell: (info) => , + meta: { + skeleton: , + }, }, ], [ @@ -792,13 +869,24 @@ function MetadataFieldsDialogContent({ getCoreRowModel: coreRowModel, getSortedRowModel: sortedRowModel, getFilteredRowModel: filteredRowModel, + getPaginationRowModel: paginationRowModel, globalFilterFn: "includesString", state: { globalFilter, }, onGlobalFilterChange: setGlobalFilter, + initialState: { + pagination: { + pageSize: 10, + }, + }, }); + // Calculate the number of included (non-excluded) fields + const selectedFieldsCount = useMemo(() => { + return fieldsData.filter((row) => !row.isExcluded).length; + }, [fieldsData]); + return ( - Fields for {tableName || "Table"} + + Including {selectedFieldsCount} of {fieldsData.length} fields for{" "} + {tableName || "Table"} +
@@ -820,17 +911,186 @@ function MetadataFieldsDialogContent({
- - - - - + {isError ? ( +
+
+
+ Failed to load fields +
+ {error instanceof Error && ( +
+ {error.message} +
+ )} +
+
+ ) : ( + + + +
+ +
+
+
+ )} +
+
+
+ ( + + + Variable Name Override + + + + { + const value = e.target.value.trim(); + field.onChange(value || undefined); + }} + /> + + + + )} + /> + { + const isDefault = field.value === undefined; + return ( + + + Always Update Field Names{" "} + + + + + + + + ); + }} + /> + { + const isDefault = field.value === undefined; + return ( + + + Reduce Metadata Annotations{" "} + + + + + + + + ); + }} + /> +
diff --git a/packages/typegen/web/src/components/MetadataTablesEditor.tsx b/packages/typegen/web/src/components/MetadataTablesEditor.tsx index 90827568..114b2259 100644 --- a/packages/typegen/web/src/components/MetadataTablesEditor.tsx +++ b/packages/typegen/web/src/components/MetadataTablesEditor.tsx @@ -1,79 +1,186 @@ -import { useMemo, useState, useCallback, useRef } from "react"; import { useFormContext, useWatch } from "react-hook-form"; -import { useParseMetadata } from "../hooks/useParseMetadata"; -import { Loader2, AlertTriangle, Search } from "lucide-react"; +import { Button } from "./ui/button"; +import { SingleConfig } from "../lib/config-utils"; +import { AlertTriangle, Loader2, Search, RefreshCw } from "lucide-react"; +import { useListTables } from "../hooks/useListTables"; +import { useTestConnection } from "../hooks/useTestConnection"; +import { Switch } from "./ui/switch"; +import { Input, InputWrapper } from "./ui/input"; +import { useMemo, useState, useCallback, useRef, useEffect } from "react"; +import { MetadataFieldsDialog } from "./MetadataFieldsDialog"; +import { useTableMetadata } from "../hooks/useTableMetadata"; import { useReactTable, getCoreRowModel, getSortedRowModel, getFilteredRowModel, + getPaginationRowModel, type ColumnDef, } from "@tanstack/react-table"; import { DataGrid, DataGridContainer } from "./ui/data-grid"; import { DataGridTable } from "./ui/data-grid-table"; import { DataGridColumnHeader } from "./ui/data-grid-column-header"; -import { Input, InputWrapper } from "./ui/input"; -import { Switch } from "./ui/switch"; -import { DropdownMenuItem } from "./ui/dropdown-menu"; -import { MetadataFieldsDialog } from "./MetadataFieldsDialog"; -import type { SingleConfig } from "../lib/config-utils"; +import { DataGridPagination } from "./ui/data-grid-pagination"; +import { Skeleton } from "./ui/skeleton"; + +interface MetadataTablesEditorProps { + configIndex: number; +} + +interface TableRow { + tableName: string; + isIncluded: boolean; + fieldCount?: number; + includedFieldCount?: number; +} // Memoize model functions outside component to ensure stable references const coreRowModel = getCoreRowModel(); const sortedRowModel = getSortedRowModel(); const filteredRowModel = getFilteredRowModel(); +const paginationRowModel = getPaginationRowModel(); -// Stable empty array to prevent infinite re-renders -const EMPTY_TABLES_CONFIG: any[] = []; - -interface MetadataTablesEditorProps { +// Helper component to fetch and display field count for a table +function FieldCountCell({ + tableName, + isIncluded, + configIndex, +}: { + tableName: string; + isIncluded: boolean; configIndex: number; -} +}) { + const { control } = useFormContext<{ config: SingleConfig[] }>(); + const { data: parsedMetadata, isLoading } = useTableMetadata( + configIndex, + tableName, + isIncluded, // Only fetch when table is included + ); -interface TableRow { - tableName: string; - totalFieldCount: number; - includedFieldCount: number; - entityType: string; - isExcluded: boolean; + // Watch the tables config directly to ensure reactivity + const allTablesConfig = useWatch({ + control, + name: `config.${configIndex}.tables` as const, + }); + + const tableConfig = Array.isArray(allTablesConfig) + ? allTablesConfig.find((t) => t?.tableName === tableName) + : undefined; + const fieldsConfig = tableConfig?.fields ?? []; + + const fieldCount = useMemo(() => { + if (!parsedMetadata?.entitySets || !parsedMetadata?.entityTypes) { + return undefined; + } + + const entitySet = Object.values(parsedMetadata.entitySets).find( + (es) => es.Name === tableName, + ); + if (!entitySet) return undefined; + + const entityType = parsedMetadata.entityTypes[entitySet.EntityType]; + if (!entityType?.Properties) return undefined; + + const properties = entityType.Properties; + // Handle both Map and object formats + if (properties instanceof Map) { + return properties.size; + } else if (typeof properties === "object") { + return Object.keys(properties).length; + } + return undefined; + }, [parsedMetadata, tableName]); + + const includedFieldCount = useMemo(() => { + if (fieldCount === undefined) return undefined; + + // Count excluded fields + const excludedFields = fieldsConfig.filter( + (f) => f?.exclude === true, + ).length; + + // Total fields minus excluded fields + return fieldCount - excludedFields; + }, [fieldCount, fieldsConfig]); + + if (isLoading) { + return ; + } + + if (fieldCount === undefined) { + return -; + } + + // Show "included / total" if some fields are excluded, otherwise just total + if (includedFieldCount !== undefined && includedFieldCount < fieldCount) { + return ( + + {includedFieldCount} / {fieldCount} + + ); + } + + return {fieldCount}; } export function MetadataTablesEditor({ configIndex, }: MetadataTablesEditorProps) { - const { - data: parsedMetadata, - isLoading, - isError, - error, - fileExists, - } = useParseMetadata(configIndex); - const { control, setValue } = useFormContext<{ config: SingleConfig[] }>(); const config = useWatch({ control, name: `config.${configIndex}` as const, }); + // Get tables config - memoize to prevent unnecessary recalculations + const tablesConfig = useMemo(() => { + if (config?.type === "fmodata" && "tables" in config) { + return config.tables ?? []; + } + return []; + }, [config]); + + // Local state to control whether to enable the query + // Initialize based on whether there are tables in the config + const [shouldLoadTables, setShouldLoadTables] = useState(() => { + if (config?.type === "fmodata" && "tables" in config) { + return (config.tables ?? []).length > 0; + } + return false; + }); + + // Update shouldLoadTables when tablesConfig changes (e.g., user adds tables manually) + useEffect(() => { + if (tablesConfig.length > 0 && !shouldLoadTables) { + setShouldLoadTables(true); + } + }, [tablesConfig.length, shouldLoadTables]); + + // Check connection test status + const { status: testStatus, errorDetails } = useTestConnection(configIndex); + const hasConnectionError = testStatus === "error"; + + const { + tables, + isLoading: isLoadingTables, + isError: isErrorTables, + error: errorTables, + refetch: refetchTables, + } = useListTables(configIndex, shouldLoadTables); + const [selectedTableName, setSelectedTableName] = useState( null, ); const [isDialogOpen, setIsDialogOpen] = useState(false); - const [globalFilter, setGlobalFilter] = useState(""); - - // Get tables config, ensuring it exists - use stable reference to prevent infinite re-renders - const tablesConfig = - config?.type === "fmodata" - ? (config.tables ?? EMPTY_TABLES_CONFIG) - : EMPTY_TABLES_CONFIG; + const [searchFilter, setSearchFilter] = useState(""); // Use a ref to store the latest config to avoid unstable callback dependencies const configRef = useRef(config); configRef.current = config; - // Helper to toggle table exclusion - use ref to avoid dependency on config - const toggleTableExclude = useCallback( - (tableName: string, exclude: boolean) => { + // Helper to toggle table inclusion + const toggleTableInclude = useCallback( + (tableName: string, include: boolean) => { const currentConfig = configRef.current; if (currentConfig?.type !== "fmodata") return; @@ -82,31 +189,24 @@ export function MetadataTablesEditor({ (t) => t?.tableName === tableName, ); - if (exclude) { - // Set exclude to true - if (tableIndex >= 0) { - // Update existing entry - const newTables = [...currentTables]; - newTables[tableIndex] = { ...newTables[tableIndex]!, exclude: true }; - setValue(`config.${configIndex}.tables` as any, newTables, { - shouldDirty: true, - }); - } else { - // Add new entry + if (include) { + // Add table if not already present + if (tableIndex < 0) { setValue( `config.${configIndex}.tables` as any, - [...currentTables, { tableName, exclude: true }], + [...currentTables, { tableName }], { shouldDirty: true }, ); } } else { - // Remove exclude (or remove entire entry if no other config) + // Remove table if present if (tableIndex >= 0) { const tableConfig = currentTables[tableIndex]!; - const { exclude: _, ...rest } = tableConfig; - - if (Object.keys(rest).length === 1 && rest.tableName) { - // Only tableName left, remove entire entry + // If table has other config (like fields), we might want to keep it + // But for now, if it's just tableName, remove it + const { tableName: _, ...rest } = tableConfig; + if (Object.keys(rest).length === 0) { + // No other config, remove entirely const newTables = currentTables.filter((_, i) => i !== tableIndex); setValue( `config.${configIndex}.tables` as any, @@ -114,12 +214,13 @@ export function MetadataTablesEditor({ { shouldDirty: true }, ); } else { - // Keep other properties - const newTables = [...currentTables]; - newTables[tableIndex] = rest as any; - setValue(`config.${configIndex}.tables` as any, newTables, { - shouldDirty: true, - }); + // Has other config, but we're removing it anyway per user request + const newTables = currentTables.filter((_, i) => i !== tableIndex); + setValue( + `config.${configIndex}.tables` as any, + newTables.length > 0 ? newTables : undefined, + { shouldDirty: true }, + ); } } } @@ -127,179 +228,41 @@ export function MetadataTablesEditor({ [configIndex, setValue], ); - // Helper to include all tables - const includeAllTables = useCallback(() => { - const currentConfig = configRef.current; - if (currentConfig?.type !== "fmodata" || !parsedMetadata?.entitySets) - return; - - const currentTables = currentConfig.tables ?? []; - const allTableNames = Object.values(parsedMetadata.entitySets).map( - (es) => es.Name, - ); - - // Remove exclude flags from all tables - const newTables = currentTables - .map((tableConfig) => { - const tableName = tableConfig?.tableName; - if (tableName && allTableNames.includes(tableName)) { - const { exclude: _, ...rest } = tableConfig; - // If only tableName is left, don't include it - if (Object.keys(rest).length === 1 && rest.tableName) { - return null; - } - return Object.keys(rest).length > 1 ? rest : null; - } - return tableConfig; - }) - .filter((t) => t !== null) as any[]; - - setValue( - `config.${configIndex}.tables` as any, - newTables.length > 0 ? newTables : undefined, - { shouldDirty: true }, - ); - }, [configIndex, setValue, parsedMetadata]); - - // Helper to exclude all tables - const excludeAllTables = useCallback(() => { - const currentConfig = configRef.current; - if (currentConfig?.type !== "fmodata" || !parsedMetadata?.entitySets) - return; - - const currentTables = currentConfig.tables ?? []; - const allTableNames = Object.values(parsedMetadata.entitySets).map( - (es) => es.Name, - ); - - // Create a map of existing table configs - const tableConfigMap = new Map(currentTables.map((t) => [t?.tableName, t])); - - // Update or add exclude flag for all tables - const newTables = allTableNames.map((tableName) => { - const existing = tableConfigMap.get(tableName); - if (existing) { - return { ...existing, exclude: true }; - } - return { tableName, exclude: true }; - }); - - setValue(`config.${configIndex}.tables` as any, newTables, { - shouldDirty: true, - }); - }, [configIndex, setValue, parsedMetadata]); - - // Prepare table data with field counts and include status - const tableData = useMemo(() => { - if (!parsedMetadata?.entitySets || !parsedMetadata?.entityTypes) { - return []; - } - - return Object.values(parsedMetadata.entitySets).map((entitySet) => { - // Find the corresponding entity type to get field count - const entityType = parsedMetadata.entityTypes[entitySet.EntityType]; - - // Handle both Map and object formats for Properties - let totalFieldCount = 0; - let fieldNames: string[] = []; - if (entityType?.Properties) { - if (entityType.Properties instanceof Map) { - totalFieldCount = entityType.Properties.size; - fieldNames = Array.from(entityType.Properties.keys()); - } else if (typeof entityType.Properties === "object") { - fieldNames = Object.keys(entityType.Properties); - totalFieldCount = fieldNames.length; - } - } - - const tableConfig = Array.isArray(tablesConfig) - ? tablesConfig.find((t) => t?.tableName === entitySet.Name) - : undefined; - const isExcluded = tableConfig?.exclude === true; - - // Count excluded fields - const excludedFieldsSet = new Set(); - if (tableConfig?.fields && Array.isArray(tableConfig.fields)) { - for (const fieldConfig of tableConfig.fields) { - if (fieldConfig?.exclude === true && fieldConfig.fieldName) { - excludedFieldsSet.add(fieldConfig.fieldName); - } - } - } - - const includedFieldCount = totalFieldCount - excludedFieldsSet.size; - - return { - tableName: entitySet.Name, - totalFieldCount, - includedFieldCount, - entityType: entitySet.EntityType, - isExcluded, - }; - }); - }, [parsedMetadata, tablesConfig]); - - // Check if all tables are included or excluded - const allTablesIncluded = useMemo(() => { - return tableData.length > 0 && tableData.every((row) => !row.isExcluded); - }, [tableData]); - - const allTablesExcluded = useMemo(() => { - return tableData.length > 0 && tableData.every((row) => row.isExcluded); - }, [tableData]); - - // Define columns - const columns = useMemo[]>( + // Convert tables to table rows (filtering will be handled by DataGrid) + const tableRows = useMemo(() => { + if (!tables) return []; + return tables.map((tableName) => ({ + tableName, + isIncluded: tablesConfig.some((t) => t?.tableName === tableName), + })); + }, [tables, tablesConfig]); + + // Define columns for tables table + const tablesColumns = useMemo[]>( () => [ { - accessorKey: "isExcluded", + accessorKey: "isIncluded", header: ({ column }) => ( - - { - e.stopPropagation(); - includeAllTables(); - }} - disabled={allTablesIncluded} - > - Include All - - { - e.stopPropagation(); - excludeAllTables(); - }} - disabled={allTablesExcluded} - > - Exclude All - - - } - /> + ), enableSorting: true, - size: 60, - minSize: 60, - maxSize: 60, + size: 100, cell: (info) => { const row = info.row.original; - const isExcluded = row.isExcluded; return ( -
+
{ - toggleTableExclude(row.tableName, !checked); + toggleTableInclude(row.tableName, checked); }} - onClick={(e) => e.stopPropagation()} />
); }, + meta: { + skeleton: , + }, }, { accessorKey: "tableName", @@ -312,88 +275,109 @@ export function MetadataTablesEditor({ return ( {info.getValue() as string} ); }, + meta: { + skeleton: , + }, }, { - accessorFn: (row) => row.includedFieldCount, id: "fieldCount", header: ({ column }) => ( - + ), - enableSorting: true, + enableSorting: false, + size: 100, cell: (info) => { const row = info.row.original; - const hasExclusions = row.includedFieldCount !== row.totalFieldCount; + if (!row.isIncluded) { + return null; + } return ( - - {hasExclusions - ? `${row.includedFieldCount} / ${row.totalFieldCount}` - : row.totalFieldCount} - + ); }, + meta: { + skeleton: , + }, + }, + { + id: "actions", + header: () => null, + enableSorting: false, + size: 150, + cell: (info) => { + const row = info.row.original; + return ( +
+ +
+ ); + }, + meta: { + skeleton: , + }, }, ], - [ - toggleTableExclude, - includeAllTables, - excludeAllTables, - allTablesIncluded, - allTablesExcluded, - ], + [toggleTableInclude], ); - // Create table instance - use memoized model functions for stable references - const table = useReactTable({ - data: tableData, - columns, + // Create tables table instance + const tablesTable = useReactTable({ + data: tableRows, + columns: tablesColumns, getCoreRowModel: coreRowModel, getSortedRowModel: sortedRowModel, getFilteredRowModel: filteredRowModel, + getPaginationRowModel: paginationRowModel, globalFilterFn: "includesString", state: { - globalFilter, + globalFilter: searchFilter, + }, + onGlobalFilterChange: setSearchFilter, + initialState: { + pagination: { + pageSize: 10, + }, }, - onGlobalFilterChange: setGlobalFilter, }); - // Handle row click to open dialog with fields - const handleRowClick = (row: TableRow) => { - setSelectedTableName(row.tableName); - setIsDialogOpen(true); - }; - - if (fileExists === false) { - return ( -
-

OData Tables

-

- Metadata file does not exist. Download the metadata file first to see - available tables. -

-
- ); - } - - if (isLoading) { + // Show loading state only when actively loading + if (isLoadingTables && shouldLoadTables) { return (

OData Tables

- Parsing metadata... + Loading tables...
); } - if (isError) { + // Show error state only if we attempted to load + if (isErrorTables && shouldLoadTables) { return (

OData Tables

@@ -401,9 +385,11 @@ export function MetadataTablesEditor({
-
Failed to parse metadata
- {error instanceof Error && ( -
{error.message}
+
Failed to load tables
+ {errorTables instanceof Error && ( +
+ {errorTables.message} +
)}
@@ -412,14 +398,75 @@ export function MetadataTablesEditor({ ); } - if (!parsedMetadata || tableData.length === 0) { + // Show button to load tables if not yet loaded + if (!shouldLoadTables) { + // Show connection warning if there are connection errors + if (hasConnectionError) { + return ( +
+

OData Tables

+
+
+ +
+
+
Connection test failed
+ {errorDetails?.message && ( +
+ {errorDetails.message} +
+ )} +
+ Fix the connection issue in the "Server Connection Settings" + dialog before loading tables. +
+
+
+
+
+
+ ); + } + + // Show button to load tables if connection is good + return ( +
+
+

OData Tables

+
+
+

+ Your connection looks good! Click the button below to pick the + tables you want to generate types for. +

+ +
+
+ ); + } + + // Show empty state only after loading + if (!tables || tables.length === 0) { return (

OData Tables

- {!parsedMetadata - ? "No metadata available." - : "No tables found in metadata."} + No tables found in database.

); @@ -428,25 +475,46 @@ export function MetadataTablesEditor({ return ( <>
-

OData Tables

+
+

OData Tables

+ +
+
setGlobalFilter(e.target.value)} + value={searchFilter} + onChange={(e) => setSearchFilter(e.target.value)} /> + - - + +
+ +
+
+ +
@@ -456,7 +524,6 @@ export function MetadataTablesEditor({ open={isDialogOpen} onOpenChange={setIsDialogOpen} tableName={selectedTableName} - parsedMetadata={parsedMetadata} configIndex={configIndex} /> diff --git a/packages/typegen/web/src/components/TableItemEditor.tsx b/packages/typegen/web/src/components/TableItemEditor.tsx new file mode 100644 index 00000000..95f263d9 --- /dev/null +++ b/packages/typegen/web/src/components/TableItemEditor.tsx @@ -0,0 +1,84 @@ +import { useFormContext, useWatch } from "react-hook-form"; +import { Button } from "./ui/button"; +import { + FormControl, + FormField, + FormItem, + FormMessage, +} from "./ui/form"; +import { SingleConfig } from "../lib/config-utils"; +import { TableSelectorCompact } from "./TableSelectorCompact"; +import { CircleMinus } from "lucide-react"; +import { MetadataFieldsDialog } from "./MetadataFieldsDialog"; +import { useState } from "react"; +import { useTableMetadata } from "../hooks/useTableMetadata"; + +interface TableItemEditorProps { + configIndex: number; + tableIndex: number; + onRemove: () => void; +} + +export function TableItemEditor({ + configIndex, + tableIndex, + onRemove, +}: TableItemEditorProps) { + const { watch } = useFormContext<{ config: SingleConfig[] }>(); + const tableName = watch( + `config.${configIndex}.tables.${tableIndex}.tableName`, + ); + + const [isDialogOpen, setIsDialogOpen] = useState(false); + + // Fetch metadata when dialog is opened + const { data: dialogTableMetadata } = useTableMetadata( + configIndex, + isDialogOpen ? tableName : null, + ); + + return ( + <> +
+
+ +
+
+ {tableName && ( + + )} + +
+
+ + + + ); +} + diff --git a/packages/typegen/web/src/components/TableSelector.tsx b/packages/typegen/web/src/components/TableSelector.tsx new file mode 100644 index 00000000..d180ffb1 --- /dev/null +++ b/packages/typegen/web/src/components/TableSelector.tsx @@ -0,0 +1,150 @@ +import * as React from "react"; +import { Path, useFormContext } from "react-hook-form"; +import { cn } from "@/lib/utils"; +import { Button, ButtonArrow } from "@/components/ui/button"; +import { + Command, + CommandCheck, + CommandEmpty, + CommandGroup, + CommandInput, + CommandItem, + CommandList, +} from "@/components/ui/command"; +import { + Popover, + PopoverContent, + PopoverTrigger, +} from "@/components/ui/popover"; +import { + FormControl, + FormField, + FormItem, + FormLabel, + FormMessage, +} from "./ui/form"; +import { SingleConfig } from "@/lib/config-utils"; +import { InfoTooltip } from "./InfoTooltip"; +import { useListTables } from "../hooks/useListTables"; +import { Loader2, AlertTriangle } from "lucide-react"; + +type FormData = { config: SingleConfig[] }; + +export function TableSelector({ + configIndex, + path, +}: { + configIndex: number; + path: Path; +}) { + const { control } = useFormContext(); + const [open, setOpen] = React.useState(false); + + const { + tables, + isLoading, + isError, + error, + } = useListTables(configIndex); + + // Transform tables array into combobox format + const tableOptions = React.useMemo(() => { + if (!tables) return []; + return tables.map((table) => ({ + value: table, + label: table, + })); + }, [tables]); + + return ( + ( + + + Table Occurrence Name{" "} + + + + + + + + + + + + {isLoading ? ( +
+ + Loading tables... +
+ ) : isError ? ( +
+
+ +
+
+ {error instanceof Error + ? error.message + : "Failed to load tables"} +
+
+
+
+ ) : ( + <> + No table found. + + {tableOptions.map((table) => ( + { + const newValue = + currentValue === field.value + ? "" + : currentValue; + field.onChange(newValue); + setOpen(false); + }} + > + {table.label} + {field.value === table.value && } + + ))} + + + )} +
+
+
+
+
+ +
+ )} + /> + ); +} + +export default TableSelector; + diff --git a/packages/typegen/web/src/components/TableSelectorCompact.tsx b/packages/typegen/web/src/components/TableSelectorCompact.tsx new file mode 100644 index 00000000..5e81da7e --- /dev/null +++ b/packages/typegen/web/src/components/TableSelectorCompact.tsx @@ -0,0 +1,143 @@ +import * as React from "react"; +import { Path, useFormContext } from "react-hook-form"; +import { cn } from "@/lib/utils"; +import { Button, ButtonArrow } from "@/components/ui/button"; +import { + Command, + CommandCheck, + CommandEmpty, + CommandGroup, + CommandInput, + CommandItem, + CommandList, +} from "@/components/ui/command"; +import { + Popover, + PopoverContent, + PopoverTrigger, +} from "@/components/ui/popover"; +import { + FormControl, + FormField, + FormItem, + FormMessage, +} from "./ui/form"; +import { SingleConfig } from "@/lib/config-utils"; +import { useListTables } from "../hooks/useListTables"; +import { Loader2, AlertTriangle } from "lucide-react"; + +type FormData = { config: SingleConfig[] }; + +export function TableSelectorCompact({ + configIndex, + path, +}: { + configIndex: number; + path: Path; +}) { + const { control } = useFormContext(); + const [open, setOpen] = React.useState(false); + + const { + tables, + isLoading, + isError, + error, + } = useListTables(configIndex); + + // Transform tables array into combobox format + const tableOptions = React.useMemo(() => { + if (!tables) return []; + return tables.map((table) => ({ + value: table, + label: table, + })); + }, [tables]); + + return ( + ( + + + + + + + + + + + {isLoading ? ( +
+ + Loading tables... +
+ ) : isError ? ( +
+
+ +
+
+ {error instanceof Error + ? error.message + : "Failed to load tables"} +
+
+
+
+ ) : ( + <> + No table found. + + {tableOptions.map((table) => ( + { + const newValue = + currentValue === field.value + ? "" + : currentValue; + field.onChange(newValue); + setOpen(false); + }} + > + {table.label} + {field.value === table.value && } + + ))} + + + )} +
+
+
+
+
+ +
+ )} + /> + ); +} + + diff --git a/packages/typegen/web/src/components/data-grid/skeleton.tsx b/packages/typegen/web/src/components/data-grid/skeleton.tsx new file mode 100644 index 00000000..c48f4c53 --- /dev/null +++ b/packages/typegen/web/src/components/data-grid/skeleton.tsx @@ -0,0 +1,434 @@ +import { useMemo, useState } from "react"; +import { Avatar, AvatarFallback, AvatarImage } from "@/components/ui/avatar"; +import { Badge } from "@/components/ui/badge"; +import { Button } from "@/components/ui/button"; +import { + Card, + CardFooter, + CardHeader, + CardTable, + CardTitle, + CardToolbar, +} from "@/components/ui/card"; +import { DataGrid } from "@/components/ui/data-grid"; +import { DataGridColumnHeader } from "@/components/ui/data-grid-column-header"; +import { DataGridPagination } from "@/components/ui/data-grid-pagination"; +import { DataGridTable } from "@/components/ui/data-grid-table"; +import { ScrollArea, ScrollBar } from "@/components/ui/scroll-area"; +import { Skeleton } from "@/components/ui/skeleton"; +import { + ColumnDef, + getCoreRowModel, + getFilteredRowModel, + getPaginationRowModel, + getSortedRowModel, + PaginationState, + SortingState, + useReactTable, +} from "@tanstack/react-table"; + +interface IData { + id: string; + name: string; + availability: "online" | "away" | "busy" | "offline"; + avatar: string; + status: "active" | "inactive"; + flag: string; // Emoji flags + email: string; + company: string; + role: string; + joined: string; + location: string; + balance: number; +} + +const demoData: IData[] = [ + { + id: "1", + name: "Kathryn Campbell", + availability: "online", + avatar: "1.png", + status: "active", + flag: "๐Ÿ‡บ๐Ÿ‡ธ", + email: "kathryn@apple.com", + company: "Apple", + role: "CEO", + joined: "2021-04-15", + location: "San Francisco, USA", + balance: 5143.03, + }, + { + id: "2", + name: "Robert Smith", + availability: "away", + avatar: "2.png", + status: "inactive", + flag: "๐Ÿ‡ฌ๐Ÿ‡ง", + email: "robert@openai.com", + company: "OpenAI", + role: "CTO", + joined: "2020-07-20", + location: "London, UK", + balance: 4321.87, + }, + { + id: "3", + name: "Sophia Johnson", + availability: "busy", + avatar: "3.png", + status: "active", + flag: "๐Ÿ‡จ๐Ÿ‡ฆ", + email: "sophia@meta.com", + company: "Meta", + role: "Designer", + joined: "2019-03-12", + location: "Toronto, Canada", + balance: 7654.98, + }, + { + id: "4", + name: "Lucas Walker", + availability: "offline", + avatar: "4.png", + status: "inactive", + flag: "๐Ÿ‡ฆ๐Ÿ‡บ", + email: "lucas@tesla.com", + company: "Tesla", + role: "Developer", + joined: "2022-01-18", + location: "Sydney, Australia", + balance: 3456.45, + }, + { + id: "5", + name: "Emily Davis", + availability: "online", + avatar: "5.png", + status: "active", + flag: "๐Ÿ‡ฉ๐Ÿ‡ช", + email: "emily@sap.com", + company: "SAP", + role: "Lawyer", + joined: "2023-05-23", + location: "Berlin, Germany", + balance: 9876.54, + }, + { + id: "6", + name: "James Lee", + availability: "away", + avatar: "6.png", + status: "active", + flag: "๐Ÿ‡ฒ๐Ÿ‡พ", + email: "james@keenthemes.com", + company: "Keenthemes", + role: "Director", + joined: "2018-11-30", + location: "Kuala Lumpur, MY", + balance: 6214.22, + }, + { + id: "7", + name: "Isabella Martinez", + availability: "busy", + avatar: "7.png", + status: "inactive", + flag: "๐Ÿ‡ช๐Ÿ‡ธ", + email: "isabella@bbva.es", + company: "BBVA", + role: "Product Manager", + joined: "2021-06-14", + location: "Barcelona, Spain", + balance: 5321.77, + }, + { + id: "8", + name: "Benjamin Harris", + availability: "offline", + avatar: "8.png", + status: "active", + flag: "๐Ÿ‡ฏ๐Ÿ‡ต", + email: "benjamin@sony.jp", + company: "Sony", + role: "Marketing Lead", + joined: "2020-10-22", + location: "Tokyo, Japan", + balance: 8452.39, + }, + { + id: "9", + name: "Olivia Brown", + availability: "online", + avatar: "9.png", + status: "active", + flag: "๐Ÿ‡ซ๐Ÿ‡ท", + email: "olivia@lvmh.fr", + company: "LVMH", + role: "Data Scientist", + joined: "2019-09-17", + location: "Paris, France", + balance: 7345.1, + }, + { + id: "10", + name: "Michael Clark", + availability: "away", + avatar: "10.png", + status: "inactive", + flag: "๐Ÿ‡ฎ๐Ÿ‡น", + email: "michael@eni.it", + company: "ENI", + role: "Engineer", + joined: "2023-02-11", + location: "Milan, Italy", + balance: 5214.88, + }, + { + id: "11", + name: "Ava Wilson", + availability: "busy", + avatar: "11.png", + status: "active", + flag: "๐Ÿ‡ง๐Ÿ‡ท", + email: "ava@vale.br", + company: "Vale", + role: "Software Engineer", + joined: "2022-12-01", + location: "Rio de Janeiro, Brazil", + balance: 9421.5, + }, + { + id: "12", + name: "David Young", + availability: "offline", + avatar: "12.png", + status: "active", + flag: "๐Ÿ‡ฎ๐Ÿ‡ณ", + email: "david@tata.in", + company: "Tata", + role: "Sales Manager", + joined: "2020-03-27", + location: "Mumbai, India", + balance: 4521.67, + }, +]; + +export default function DataGridDemo() { + const [pagination, setPagination] = useState({ + pageIndex: 0, + pageSize: 5, + }); + const [sorting, setSorting] = useState([ + { id: "name", desc: true }, + ]); + const [isLoading, setIsLoading] = useState(true); + + const handleToggleLoading = () => { + setIsLoading((prev) => !prev); + }; + + const columns = useMemo[]>( + () => [ + { + accessorKey: "name", + id: "name", + header: ({ column }) => ( + + ), + cell: ({ row }) => { + return ( +
+ + + N + +
+
+ {row.original.name} +
+
+ {row.original.email} +
+
+
+ ); + }, + meta: { + skeleton: ( +
+ +
+ + +
+
+ ), + }, + size: 200, + enableSorting: true, + enableHiding: false, + enableResizing: true, + }, + { + accessorKey: "email", + id: "email", + header: ({ column }) => ( + + ), + cell: (info) => ( + + {info.getValue() as string} + + ), + size: 150, + meta: { + headerClassName: "", + cellClassName: "text-left", + skeleton: , + }, + enableSorting: true, + enableHiding: true, + enableResizing: true, + }, + { + accessorKey: "location", + id: "location", + header: ({ column }) => ( + + ), + cell: ({ row }) => { + return ( +
+ {row.original.flag} +
+ {row.original.location} +
+
+ ); + }, + size: 160, + meta: { + headerClassName: "", + cellClassName: "text-start", + skeleton: , + }, + enableSorting: true, + enableHiding: true, + enableResizing: true, + }, + { + accessorKey: "status", + id: "status", + header: ({ column }) => ( + + ), + cell: ({ row }) => { + const status = row.original.status; + + if (status == "active") { + return ( + + Approved + + ); + } else { + return ( + + Pending + + ); + } + }, + meta: { + skeleton: , + }, + size: 100, + enableSorting: true, + enableHiding: true, + enableResizing: false, + }, + ], + [], + ); + + const [columnOrder, setColumnOrder] = useState( + columns.map((column) => column.id as string), + ); + + const table = useReactTable({ + columns, + data: demoData, + pageCount: Math.ceil((demoData?.length || 0) / pagination.pageSize), + getRowId: (row: IData) => row.id, + state: { + pagination, + sorting, + columnOrder, + }, + onPaginationChange: setPagination, + onSortingChange: setSorting, + onColumnOrderChange: setColumnOrder, + getCoreRowModel: getCoreRowModel(), + getFilteredRowModel: getFilteredRowModel(), + getPaginationRowModel: getPaginationRowModel(), + getSortedRowModel: getSortedRowModel(), + }); + + return ( + + + + Employees + + + + + + + + + + + + + + + + ); +} diff --git a/packages/typegen/web/src/components/theme-provider.tsx b/packages/typegen/web/src/components/theme-provider.tsx index 8f098f03..60378b2b 100644 --- a/packages/typegen/web/src/components/theme-provider.tsx +++ b/packages/typegen/web/src/components/theme-provider.tsx @@ -76,3 +76,6 @@ export const useTheme = () => { + + + diff --git a/packages/typegen/web/src/components/ui/avatar.tsx b/packages/typegen/web/src/components/ui/avatar.tsx new file mode 100644 index 00000000..288f5fd1 --- /dev/null +++ b/packages/typegen/web/src/components/ui/avatar.tsx @@ -0,0 +1,67 @@ +'use client'; + +import * as React from 'react'; +import { cn } from '@/lib/utils'; +import { cva, VariantProps } from 'class-variance-authority'; +import { Avatar as AvatarPrimitive } from 'radix-ui'; + +const avatarStatusVariants = cva('flex items-center rounded-full size-2 border-2 border-background', { + variants: { + variant: { + online: 'bg-green-600', + offline: 'bg-zinc-400 dark:bg-zinc-500', + busy: 'bg-yellow-600', + away: 'bg-blue-600', + }, + }, + defaultVariants: { + variant: 'online', + }, +}); + +function Avatar({ className, ...props }: React.ComponentProps) { + return ( + + ); +} + +function AvatarImage({ className, ...props }: React.ComponentProps) { + return ( +
+ +
+ ); +} + +function AvatarFallback({ className, ...props }: React.ComponentProps) { + return ( + + ); +} + +function AvatarIndicator({ className, ...props }: React.HTMLAttributes) { + return ( +
+ ); +} + +function AvatarStatus({ + className, + variant, + ...props +}: React.HTMLAttributes & VariantProps) { + return
; +} + +export { Avatar, AvatarFallback, AvatarImage, AvatarIndicator, AvatarStatus, avatarStatusVariants }; diff --git a/packages/typegen/web/src/components/ui/card.tsx b/packages/typegen/web/src/components/ui/card.tsx new file mode 100644 index 00000000..fe068010 --- /dev/null +++ b/packages/typegen/web/src/components/ui/card.tsx @@ -0,0 +1,147 @@ +'use client'; + +import * as React from 'react'; +import { cn } from '@/lib/utils'; +import { cva, type VariantProps } from 'class-variance-authority'; + +// Define CardContext +type CardContextType = { + variant: 'default' | 'accent'; +}; + +const CardContext = React.createContext({ + variant: 'default', // Default value +}); + +// Hook to use CardContext +const useCardContext = () => { + const context = React.useContext(CardContext); + if (!context) { + throw new Error('useCardContext must be used within a Card component'); + } + return context; +}; + +// Variants +const cardVariants = cva('flex flex-col items-stretch text-card-foreground rounded-xl', { + variants: { + variant: { + default: 'bg-card border border-border shadow-xs black/5', + accent: 'bg-muted shadow-xs p-1', + }, + }, + defaultVariants: { + variant: 'default', + }, +}); + +const cardHeaderVariants = cva('flex items-center justify-between flex-wrap px-5 min-h-14 gap-2.5', { + variants: { + variant: { + default: 'border-b border-border', + accent: '', + }, + }, + defaultVariants: { + variant: 'default', + }, +}); + +const cardContentVariants = cva('grow p-5', { + variants: { + variant: { + default: '', + accent: 'bg-card rounded-t-xl [&:last-child]:rounded-b-xl', + }, + }, + defaultVariants: { + variant: 'default', + }, +}); + +const cardTableVariants = cva('grid grow', { + variants: { + variant: { + default: '', + accent: 'bg-card rounded-xl', + }, + }, + defaultVariants: { + variant: 'default', + }, +}); + +const cardFooterVariants = cva('flex items-center px-5 min-h-14', { + variants: { + variant: { + default: 'border-t border-border', + accent: 'bg-card rounded-b-xl mt-[2px]', + }, + }, + defaultVariants: { + variant: 'default', + }, +}); + +// Card Component +function Card({ + className, + variant = 'default', + ...props +}: React.HTMLAttributes & VariantProps) { + return ( + +
+ + ); +} + +// CardHeader Component +function CardHeader({ className, ...props }: React.HTMLAttributes) { + const { variant } = useCardContext(); + return
; +} + +// CardContent Component +function CardContent({ className, ...props }: React.HTMLAttributes) { + const { variant } = useCardContext(); + return
; +} + +// CardTable Component +function CardTable({ className, ...props }: React.HTMLAttributes) { + const { variant } = useCardContext(); + return
; +} + +// CardFooter Component +function CardFooter({ className, ...props }: React.HTMLAttributes) { + const { variant } = useCardContext(); + return
; +} + +// Other Components +function CardHeading({ className, ...props }: React.HTMLAttributes) { + return
; +} + +function CardToolbar({ className, ...props }: React.HTMLAttributes) { + return
; +} + +function CardTitle({ className, ...props }: React.HTMLAttributes) { + return ( +

+ ); +} + +function CardDescription({ className, ...props }: React.HTMLAttributes) { + return
; +} + +// Exports +export { Card, CardContent, CardDescription, CardFooter, CardHeader, CardHeading, CardTable, CardTitle, CardToolbar }; diff --git a/packages/typegen/web/src/components/ui/scroll-area.tsx b/packages/typegen/web/src/components/ui/scroll-area.tsx new file mode 100644 index 00000000..db620816 --- /dev/null +++ b/packages/typegen/web/src/components/ui/scroll-area.tsx @@ -0,0 +1,53 @@ +'use client'; + +import * as React from 'react'; +import { cn } from '@/lib/utils'; +import { ScrollArea as ScrollAreaPrimitive } from 'radix-ui'; + +function ScrollArea({ + className, + viewportClassName, + children, + viewportRef, + ...props +}: React.ComponentProps & { + viewportRef?: React.Ref; + viewportClassName?: string; +}) { + return ( + + + {children} + + + + + ); +} + +function ScrollBar({ + className, + orientation = 'vertical', + ...props +}: React.ComponentProps) { + return ( + + + + ); +} + +export { ScrollArea, ScrollBar }; diff --git a/packages/typegen/web/src/components/ui/tooltip.tsx b/packages/typegen/web/src/components/ui/tooltip.tsx index 3bc4f26f..8c0f3afe 100644 --- a/packages/typegen/web/src/components/ui/tooltip.tsx +++ b/packages/typegen/web/src/components/ui/tooltip.tsx @@ -52,3 +52,6 @@ export { Tooltip, TooltipTrigger, TooltipContent, TooltipProvider }; + + + diff --git a/packages/typegen/web/src/hooks/useHealthCheck.ts b/packages/typegen/web/src/hooks/useHealthCheck.ts new file mode 100644 index 00000000..ae3b35b1 --- /dev/null +++ b/packages/typegen/web/src/hooks/useHealthCheck.ts @@ -0,0 +1,61 @@ +import { useRef } from "react"; +import { useQuery } from "@tanstack/react-query"; +import { client } from "../lib/api"; + +/** + * Simple health check function that pings the config endpoint. + * Returns true if server is reachable, false otherwise. + */ +async function checkHealth(): Promise { + try { + const res = await client.api.config.$get(); + return res.ok; + } catch (error) { + // Connection errors indicate server is down + return false; + } +} + +/** + * Hook to periodically check if the server is reachable. + * Returns true if the server is healthy, false if unreachable. + * Only reports unhealthy after having had at least one successful connection. + */ +export function useHealthCheck(options?: { + interval?: number; // Polling interval in milliseconds (default: 5000) + enabled?: boolean; // Whether to enable health checking (default: true) +}) { + const { interval = 5000, enabled = true } = options || {}; + const hasConnectedRef = useRef(false); // Track if we've ever successfully connected + + const { data, isFetching: isChecking } = useQuery({ + queryKey: ["health-check"], + queryFn: checkHealth, + enabled, + refetchInterval: interval, + retry: false, // Don't retry - we want to detect failures immediately + staleTime: 0, // Always consider stale to ensure polling + gcTime: 0, // Don't cache health check results + }); + + // Determine health status + // If we got a successful response (data === true), mark as healthy and track connection + // If we got data === false, only report unhealthy if we've connected before + let isHealthy: boolean; + if (data === true) { + hasConnectedRef.current = true; + isHealthy = true; + } else if (data === false) { + // Only report unhealthy if we've connected before + // This prevents false positives on initial page load + isHealthy = hasConnectedRef.current ? false : true; + } else { + // Still loading or no data yet - default to healthy + isHealthy = true; + } + + return { + isHealthy, + isChecking, + }; +} diff --git a/packages/typegen/web/src/hooks/useListTables.ts b/packages/typegen/web/src/hooks/useListTables.ts new file mode 100644 index 00000000..56ce25e4 --- /dev/null +++ b/packages/typegen/web/src/hooks/useListTables.ts @@ -0,0 +1,59 @@ +import { useQuery } from "@tanstack/react-query"; +import { useWatch, useFormContext } from "react-hook-form"; +import { client } from "../lib/api"; +import type { SingleConfig } from "../lib/config-utils"; + +export function useListTables(configIndex: number, enabled?: boolean) { + const { control } = useFormContext<{ config: SingleConfig[] }>(); + + // Watch the config at the given index + const config = useWatch({ + control, + name: `config.${configIndex}` as const, + }); + + // Only query if config is fmodata type and enabled is true (or undefined for backward compatibility) + const shouldQuery = config?.type === "fmodata" && (enabled ?? true); + + const { data, error, isLoading, isError, refetch } = useQuery<{ + tables: string[]; + }>({ + queryKey: ["listTables", configIndex], + queryFn: async () => { + if (!config || config.type !== "fmodata") { + throw new Error("Config not found or invalid type"); + } + + const res = await client.api["list-tables"].$get({ + query: { + config: JSON.stringify(config), + }, + }); + + if (!res.ok) { + const errorData = (await res.json().catch(() => ({}))) as { + error?: string; + }; + throw new Error(errorData.error || "Failed to list tables"); + } + + const result = await res.json(); + return result as { tables: string[] }; + }, + enabled: !!shouldQuery, + staleTime: Infinity, // Never consider data stale while page is open + refetchOnWindowFocus: false, + refetchOnMount: false, + refetchOnReconnect: false, + retry: false, + }); + + return { + tables: data?.tables ?? [], + error, + isLoading, + isError, + refetch, + }; +} + diff --git a/packages/typegen/web/src/hooks/useRunTypegen.ts b/packages/typegen/web/src/hooks/useRunTypegen.ts index 898cc4f3..025cf6da 100644 --- a/packages/typegen/web/src/hooks/useRunTypegen.ts +++ b/packages/typegen/web/src/hooks/useRunTypegen.ts @@ -18,3 +18,6 @@ export function useRunTypegen() { }; } + + + diff --git a/packages/typegen/web/src/hooks/useParseMetadata.ts b/packages/typegen/web/src/hooks/useTableMetadata.ts similarity index 50% rename from packages/typegen/web/src/hooks/useParseMetadata.ts rename to packages/typegen/web/src/hooks/useTableMetadata.ts index aa17de38..3fe73eae 100644 --- a/packages/typegen/web/src/hooks/useParseMetadata.ts +++ b/packages/typegen/web/src/hooks/useTableMetadata.ts @@ -2,7 +2,6 @@ import { useQuery } from "@tanstack/react-query"; import { useWatch, useFormContext } from "react-hook-form"; import { client } from "../lib/api"; import type { SingleConfig } from "../lib/config-utils"; -import { useFileExists } from "./useFileExists"; // Type for the parsed metadata response export interface ParsedMetadataResponse { @@ -22,7 +21,11 @@ export interface ParsedMetadataResponse { }; } -export function useParseMetadata(configIndex: number) { +export function useTableMetadata( + configIndex: number, + tableName: string | null, + enabled: boolean = true, +) { const { control } = useFormContext<{ config: SingleConfig[] }>(); // Watch the config at the given index @@ -31,57 +34,35 @@ export function useParseMetadata(configIndex: number) { name: `config.${configIndex}` as const, }); - // Get the metadata path - const metadataPath = - config?.type === "fmodata" ? config.metadataPath : undefined; - - // Check if the file exists using the existing hook - const { data: fileExistsData } = useFileExists(metadataPath); - - // Only query if config is fmodata type and file exists + // Only query if enabled, config is fmodata type, and tableName is provided const shouldQuery = + enabled && config?.type === "fmodata" && - fileExistsData?.exists === true && - metadataPath && - metadataPath.trim() !== ""; - - // Create a stable key for the config to use in queryKey - // Use metadataPath as the key since that's what determines if we need to re-parse - const configKey = - config && config.type === "fmodata" - ? JSON.stringify({ - type: config.type, - metadataPath: config.metadataPath, - }) - : ""; + tableName !== null && + tableName.trim() !== ""; const { data, error, isLoading, isError } = useQuery({ - queryKey: ["parseMetadata", configIndex, configKey], + queryKey: ["tableMetadata", configIndex, tableName], queryFn: async () => { - if (!config || config.type !== "fmodata") { - throw new Error("Config not found or invalid type"); + if (!config || config.type !== "fmodata" || !tableName) { + throw new Error("Config not found, invalid type, or table name missing"); } - // For complex objects in query params, we need to JSON stringify - // The server will need to parse this, or we could change to POST - // For now, let's try passing it as a JSON string in the query - const res = await client.api["parse-metadata"].$get({ - query: { - config: JSON.stringify(config), - }, + const res = await client.api["table-metadata"].$post({ + json: { config, tableName }, }); if (!res.ok) { const errorData = (await res.json().catch(() => ({}))) as { error?: string; }; - throw new Error(errorData.error || "Failed to parse metadata"); + throw new Error(errorData.error || "Failed to fetch table metadata"); } const result = await res.json(); return result as ParsedMetadataResponse; }, - enabled: !!shouldQuery, + enabled: shouldQuery, staleTime: 5 * 60 * 1000, // 5 minutes - don't refetch often refetchOnWindowFocus: false, refetchOnMount: false, @@ -93,6 +74,6 @@ export function useParseMetadata(configIndex: number) { error, isLoading, isError, - fileExists: fileExistsData?.exists, }; } + diff --git a/packages/typegen/web/src/lib/api.ts b/packages/typegen/web/src/lib/api.ts index 20550e1c..1f34a9f4 100644 --- a/packages/typegen/web/src/lib/api.ts +++ b/packages/typegen/web/src/lib/api.ts @@ -1,5 +1,5 @@ import { hc } from "hono/client"; -import type { ApiApp } from "@proofkit/typegen/api"; +import type { ApiApp } from "@proofkit/typegen/webui-server"; import type { SingleConfig } from "./config-utils"; // Create typed client using the server app type diff --git a/packages/typegen/web/src/main.tsx b/packages/typegen/web/src/main.tsx index 56c7bb39..70afc95c 100644 --- a/packages/typegen/web/src/main.tsx +++ b/packages/typegen/web/src/main.tsx @@ -1,8 +1,10 @@ import React from "react"; import ReactDOM from "react-dom/client"; import { QueryClient, QueryClientProvider } from "@tanstack/react-query"; +import { ReactQueryDevtools } from "@tanstack/react-query-devtools"; import App from "./App"; import { ThemeProvider } from "./components/theme-provider"; +import { ErrorBoundary } from "./components/ErrorBoundary"; import "./index.css"; const queryClient = new QueryClient({ @@ -24,10 +26,13 @@ if (!rootElement) { ReactDOM.createRoot(rootElement).render( - - - - - + + + + + + + + , ); diff --git a/packages/typegen/web/tsconfig.app.json b/packages/typegen/web/tsconfig.app.json index 6bec83e9..e2fe556e 100644 --- a/packages/typegen/web/tsconfig.app.json +++ b/packages/typegen/web/tsconfig.app.json @@ -15,3 +15,6 @@ + + + diff --git a/packages/typegen/web/tsconfig.json b/packages/typegen/web/tsconfig.json index 1e75c774..3348f7fd 100644 --- a/packages/typegen/web/tsconfig.json +++ b/packages/typegen/web/tsconfig.json @@ -20,7 +20,7 @@ "@/*": ["./src/*"], "@proofkit/typegen": ["../src/index.ts"], "@proofkit/typegen/*": ["../src/*"], - "@proofkit/typegen/api": ["../src/server/contract.ts"] + "@proofkit/typegen/webui-server": ["../src/server/app.ts"] } }, "include": ["src", "../src"], diff --git a/packages/typegen/web/vite.config.ts b/packages/typegen/web/vite.config.ts index 6b3ba389..eee7f976 100644 --- a/packages/typegen/web/vite.config.ts +++ b/packages/typegen/web/vite.config.ts @@ -2,23 +2,24 @@ import { defineConfig } from "vite"; import react from "@vitejs/plugin-react"; import tailwindcss from "@tailwindcss/vite"; import path from "path"; +import { fileURLToPath } from "url"; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); // Plugin to resolve @proofkit/typegen subpath exports const resolveTypegenSubpaths = () => { - const contractPath = path.resolve(__dirname, "../src/server/contract.ts"); + const appPath = path.resolve(__dirname, "../src/server/app.ts"); return { name: "resolve-typegen-subpaths", - enforce: "pre", + enforce: "pre" as const, resolveId(id) { - if ( - id === "@proofkit/typegen/api" || - id === "@proofkit/typegen/api-app" - ) { - return contractPath; + if (id === "@proofkit/typegen/webui-server") { + return appPath; } // Also handle if Vite is trying to resolve it as a file path - if (id.endsWith("/api") && id.includes("@proofkit/typegen")) { - return contractPath; + if (id.endsWith("/webui-server") && id.includes("@proofkit/typegen")) { + return appPath; } return null; }, @@ -27,6 +28,11 @@ const resolveTypegenSubpaths = () => { export default defineConfig({ plugins: [react(), tailwindcss(), resolveTypegenSubpaths()], + define: { + "process.env.NODE_ENV": JSON.stringify( + process.env.NODE_ENV || "development", + ), + }, resolve: { alias: { "@": path.resolve(__dirname, "src"), diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 19c2c6b3..e2690d91 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -149,22 +149,22 @@ importers: version: 2.1.1 fumadocs-core: specifier: 15.7.13 - version: 15.7.13(@types/react@19.1.10)(next@15.5.8(@babel/core@7.28.5)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 15.7.13(@types/react@19.1.10)(next@15.5.8(@babel/core@7.27.7)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1) fumadocs-docgen: specifier: ^2.1.0 version: 2.1.0 fumadocs-mdx: specifier: 11.6.4 - version: 11.6.4(acorn@8.14.1)(fumadocs-core@15.7.13(@types/react@19.1.10)(next@15.5.8(@babel/core@7.28.5)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(next@15.5.8(@babel/core@7.28.5)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)) + version: 11.6.4(acorn@8.14.1)(fumadocs-core@15.7.13(@types/react@19.1.10)(next@15.5.8(@babel/core@7.27.7)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(next@15.5.8(@babel/core@7.27.7)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)) fumadocs-twoslash: specifier: ^3.1.7 - version: 3.1.7(@types/react-dom@19.1.7(@types/react@19.1.10))(@types/react@19.1.10)(fumadocs-ui@15.7.13(@types/react-dom@19.1.7(@types/react@19.1.10))(@types/react@19.1.10)(next@15.5.8(@babel/core@7.28.5)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(tailwindcss@4.1.11))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(typescript@5.9.3) + version: 3.1.7(@types/react-dom@19.1.7(@types/react@19.1.10))(@types/react@19.1.10)(fumadocs-ui@15.7.13(@types/react-dom@19.1.7(@types/react@19.1.10))(@types/react@19.1.10)(next@15.5.8(@babel/core@7.27.7)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(tailwindcss@4.1.11))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(typescript@5.9.3) fumadocs-typescript: specifier: ^4.0.8 - version: 4.0.8(@types/react@19.1.10)(fumadocs-core@15.7.13(@types/react@19.1.10)(next@15.5.8(@babel/core@7.28.5)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(fumadocs-ui@15.7.13(@types/react-dom@19.1.7(@types/react@19.1.10))(@types/react@19.1.10)(next@15.5.8(@babel/core@7.28.5)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(tailwindcss@4.1.11))(typescript@5.9.3) + version: 4.0.8(@types/react@19.1.10)(fumadocs-core@15.7.13(@types/react@19.1.10)(next@15.5.8(@babel/core@7.27.7)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(fumadocs-ui@15.7.13(@types/react-dom@19.1.7(@types/react@19.1.10))(@types/react@19.1.10)(next@15.5.8(@babel/core@7.27.7)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(tailwindcss@4.1.11))(typescript@5.9.3) fumadocs-ui: specifier: 15.7.13 - version: 15.7.13(@types/react-dom@19.1.7(@types/react@19.1.10))(@types/react@19.1.10)(next@15.5.8(@babel/core@7.28.5)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(tailwindcss@4.1.11) + version: 15.7.13(@types/react-dom@19.1.7(@types/react@19.1.10))(@types/react@19.1.10)(next@15.5.8(@babel/core@7.27.7)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(tailwindcss@4.1.11) hono: specifier: ^4.9.0 version: 4.9.0 @@ -176,7 +176,7 @@ importers: version: 0.511.0(react@19.1.1) next: specifier: ^15.5.8 - version: 15.5.8(@babel/core@7.28.5)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 15.5.8(@babel/core@7.27.7)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) next-themes: specifier: ^0.4.6 version: 0.4.6(react-dom@19.1.1(react@19.1.1))(react@19.1.1) @@ -694,8 +694,6 @@ importers: specifier: ^4.0.7 version: 4.0.15(@types/node@22.17.1)(happy-dom@15.11.7)(jiti@1.21.7)(lightningcss@1.30.2)(msw@2.10.2(@types/node@22.17.1)(typescript@5.9.3))(tsx@4.21.0)(yaml@2.8.0) - packages/tmp: {} - packages/typegen: dependencies: '@clack/prompts': @@ -835,11 +833,14 @@ importers: specifier: ^4.1.18 version: 4.1.18(vite@6.3.5(@types/node@22.17.1)(jiti@2.6.1)(lightningcss@1.30.2)(tsx@4.21.0)(yaml@2.8.0)) '@tanstack/react-query': - specifier: ^5.76.1 - version: 5.76.1(react@19.2.3) + specifier: ^5.90.12 + version: 5.90.12(react@19.2.3) '@tanstack/react-table': specifier: ^8.21.3 version: 8.21.3(react-dom@19.2.3(react@19.2.3))(react@19.2.3) + '@uidotdev/usehooks': + specifier: ^2.4.1 + version: 2.4.1(react-dom@19.2.3(react@19.2.3))(react@19.2.3) class-variance-authority: specifier: ^0.7.1 version: 0.7.1 @@ -880,6 +881,9 @@ importers: specifier: ^4.1.13 version: 4.1.13 devDependencies: + '@tanstack/react-query-devtools': + specifier: ^5.91.1 + version: 5.91.1(@tanstack/react-query@5.90.12(react@19.2.3))(react@19.2.3) '@types/node': specifier: ^22.17.1 version: 22.17.1 @@ -2681,8 +2685,8 @@ packages: resolution: {integrity: sha512-Iq4RxYC7y0pA/hLgcUGpYYs5Vze4qNmJk0Qi1uIrg2bHGpm6A06nbjWcH9h4HQsddkDFFlanLj/zYBH3Sxdb4w==} engines: {node: '>= 20.0.0'} - '@oxc-project/types@0.102.0': - resolution: {integrity: sha512-8Skrw405g+/UJPKWJ1twIk3BIH2nXdiVlVNtYT23AXVwpsd79es4K+KYt06Fbnkc5BaTvk/COT2JuCLYdwnCdA==} + '@oxc-project/types@0.103.0': + resolution: {integrity: sha512-bkiYX5kaXWwUessFRSoXFkGIQTmc6dLGdxuRTrC+h8PSnIdZyuXHHlLAeTmOue5Br/a0/a7dHH0Gca6eXn9MKg==} '@oxc-resolver/binding-darwin-arm64@9.0.2': resolution: {integrity: sha512-MVyRgP2gzJJtAowjG/cHN3VQXwNLWnY+FpOEsyvDepJki1SdAX/8XDijM1yN6ESD1kr9uhBKjGelC6h3qtT+rA==} @@ -3608,79 +3612,79 @@ packages: peerDependencies: react: '>=18.2.0' - '@rolldown/binding-android-arm64@1.0.0-beta.54': - resolution: {integrity: sha512-zZRx/ur3Fai3fxiEmVp48+6GCBR48PRWJR1X3TTMn9yiq2bBHlYPgBaQtDOYWXv5H3J5dXujeTyGnuoY+kdGCg==} + '@rolldown/binding-android-arm64@1.0.0-beta.55': + resolution: {integrity: sha512-5cPpHdO+zp+klznZnIHRO1bMHDq5hS9cqXodEKAaa/dQTPDjnE91OwAsy3o1gT2x4QaY8NzdBXAvutYdaw0WeA==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [android] - '@rolldown/binding-darwin-arm64@1.0.0-beta.54': - resolution: {integrity: sha512-zMyFEJmbIs91x22HAA/eUvmZHgjX8tGsD3TJ+WC9aY4bCdl3w84H9vMZmChSHAF1dYvGNH4KQDI2IubeZaCYtg==} + '@rolldown/binding-darwin-arm64@1.0.0-beta.55': + resolution: {integrity: sha512-l0887CGU2SXZr0UJmeEcXSvtDCOhDTTYXuoWbhrEJ58YQhQk24EVhDhHMTyjJb1PBRniUgNc1G0T51eF8z+TWw==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [darwin] - '@rolldown/binding-darwin-x64@1.0.0-beta.54': - resolution: {integrity: sha512-Ex7QttdaVnEpmE/zroUT5Qm10e2+Vjd9q0LX9eXm59SitxDODMpC8GI1Rct5RrLf4GLU4DzdXBj6DGzuR+6g6w==} + '@rolldown/binding-darwin-x64@1.0.0-beta.55': + resolution: {integrity: sha512-d7qP2AVYzN0tYIP4vJ7nmr26xvmlwdkLD/jWIc9Z9dqh5y0UGPigO3m5eHoHq9BNazmwdD9WzDHbQZyXFZjgtA==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [darwin] - '@rolldown/binding-freebsd-x64@1.0.0-beta.54': - resolution: {integrity: sha512-E1XO10ryM/Vxw3Q1wvs9s2mSpVBfbHtzkbJcdu26qh17ZmVwNWLiIoqEcbkXm028YwkReG4Gd2gCZ3NxgTQ28Q==} + '@rolldown/binding-freebsd-x64@1.0.0-beta.55': + resolution: {integrity: sha512-j311E4NOB0VMmXHoDDZhrWidUf7L/Sa6bu/+i2cskvHKU40zcUNPSYeD2YiO2MX+hhDFa5bJwhliYfs+bTrSZw==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [freebsd] - '@rolldown/binding-linux-arm-gnueabihf@1.0.0-beta.54': - resolution: {integrity: sha512-oS73Uks8jczQR9pg0Bj718vap/x71exyJ5yuxu4X5V4MhwRQnky7ANSPm6ARUfraxOqt49IBfcMeGnw2rTSqdA==} + '@rolldown/binding-linux-arm-gnueabihf@1.0.0-beta.55': + resolution: {integrity: sha512-lAsaYWhfNTW2A/9O7zCpb5eIJBrFeNEatOS/DDOZ5V/95NHy50g4b/5ViCqchfyFqRb7MKUR18/+xWkIcDkeIw==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm] os: [linux] - '@rolldown/binding-linux-arm64-gnu@1.0.0-beta.54': - resolution: {integrity: sha512-pY8N2X5C+/ZQcy0eRdfOzOP//OFngP1TaIqDjFwfBPws2UNavKS8SpxhPEgUaYIaT0keVBd/TB+eVy9z+CIOtw==} + '@rolldown/binding-linux-arm64-gnu@1.0.0-beta.55': + resolution: {integrity: sha512-2x6ffiVLZrQv7Xii9+JdtyT1U3bQhKj59K3eRnYlrXsKyjkjfmiDUVx2n+zSyijisUqD62fcegmx2oLLfeTkCA==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [linux] - '@rolldown/binding-linux-arm64-musl@1.0.0-beta.54': - resolution: {integrity: sha512-cgTooAFm2MUmFriB7IYaWBNyqrGlRPKG+yaK2rGFl2rcdOcO24urY4p3eyB0ogqsRLvJbIxwjjYiWiIP7Eo1Cw==} + '@rolldown/binding-linux-arm64-musl@1.0.0-beta.55': + resolution: {integrity: sha512-QbNncvqAXziya5wleI+OJvmceEE15vE4yn4qfbI/hwT/+8ZcqxyfRZOOh62KjisXxp4D0h3JZspycXYejxAU3w==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [linux] - '@rolldown/binding-linux-x64-gnu@1.0.0-beta.54': - resolution: {integrity: sha512-nGyLT1Qau0W+kEL44V2jhHmvfS3wyJW08E4WEu2E6NuIy+uChKN1X0aoxzFIDi2owDsYaZYez/98/f268EupIQ==} + '@rolldown/binding-linux-x64-gnu@1.0.0-beta.55': + resolution: {integrity: sha512-YZCTZZM+rujxwVc6A+QZaNMJXVtmabmFYLG2VGQTKaBfYGvBKUgtbMEttnp/oZ88BMi2DzadBVhOmfQV8SuHhw==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [linux] - '@rolldown/binding-linux-x64-musl@1.0.0-beta.54': - resolution: {integrity: sha512-KH374P0TUjDXssROT/orvzaWrzGOptD13PTrltgKwbDprJTMknoLiYsOD6Ttz92O2VuAcCtFuJ1xbyFM2Uo/Xg==} + '@rolldown/binding-linux-x64-musl@1.0.0-beta.55': + resolution: {integrity: sha512-28q9OQ/DDpFh2keS4BVAlc3N65/wiqKbk5K1pgLdu/uWbKa8hgUJofhXxqO+a+Ya2HVTUuYHneWsI2u+eu3N5Q==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [linux] - '@rolldown/binding-openharmony-arm64@1.0.0-beta.54': - resolution: {integrity: sha512-oMAVO4wbfAbhpBxPsSp8R7ntL2DchpNfO+tGhN8/sI9jsbYwOv78uIW1fTwOBslhjTVFltGJ+l23mubNQcYNaQ==} + '@rolldown/binding-openharmony-arm64@1.0.0-beta.55': + resolution: {integrity: sha512-LiCA4BjCnm49B+j1lFzUtlC+4ZphBv0d0g5VqrEJua/uyv9Ey1v9tiaMql1C8c0TVSNDUmrkfHQ71vuQC7YfpQ==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [openharmony] - '@rolldown/binding-wasm32-wasi@1.0.0-beta.54': - resolution: {integrity: sha512-MYY/FmY+HehHiQkNx04W5oLy/Fqd1hXYqZmmorSDXvAHnxMbSgmdFicKsSYOg/sVGHBMEP1tTn6kV5sWrS45rA==} + '@rolldown/binding-wasm32-wasi@1.0.0-beta.55': + resolution: {integrity: sha512-nZ76tY7T0Oe8vamz5Cv5CBJvrqeQxwj1WaJ2GxX8Msqs0zsQMMcvoyxOf0glnJlxxgKjtoBxAOxaAU8ERbW6Tg==} engines: {node: '>=14.0.0'} cpu: [wasm32] - '@rolldown/binding-win32-arm64-msvc@1.0.0-beta.54': - resolution: {integrity: sha512-66o3uKxUmcYskT9exskxs3OVduXf5x0ndlMkYOjSpBgqzhLtkub136yDvZkNT1OkNDET0odSwcU7aWdpnwzAyg==} + '@rolldown/binding-win32-arm64-msvc@1.0.0-beta.55': + resolution: {integrity: sha512-TFVVfLfhL1G+pWspYAgPK/FSqjiBtRKYX9hixfs508QVEZPQlubYAepHPA7kEa6lZXYj5ntzF87KC6RNhxo+ew==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [win32] - '@rolldown/binding-win32-x64-msvc@1.0.0-beta.54': - resolution: {integrity: sha512-FbbbrboChLBXfeEsOfaypBGqzbdJ/CcSA2BPLCggojnIHy58Jo+AXV7HATY8opZk7194rRbokIT8AfPJtZAWtg==} + '@rolldown/binding-win32-x64-msvc@1.0.0-beta.55': + resolution: {integrity: sha512-j1WBlk0p+ISgLzMIgl0xHp1aBGXenoK2+qWYc/wil2Vse7kVOdFq9aeQ8ahK6/oxX2teQ5+eDvgjdywqTL+daA==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [win32] @@ -3688,8 +3692,8 @@ packages: '@rolldown/pluginutils@1.0.0-beta.27': resolution: {integrity: sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA==} - '@rolldown/pluginutils@1.0.0-beta.54': - resolution: {integrity: sha512-AHgcZ+w7RIRZ65ihSQL8YuoKcpD9Scew4sEeP1BBUT9QdTo6KjwHrZZXjID6nL10fhKessCH6OPany2QKwAwTQ==} + '@rolldown/pluginutils@1.0.0-beta.55': + resolution: {integrity: sha512-vajw/B3qoi7aYnnD4BQ4VoCcXQWnF0roSwE2iynbNxgW4l9mFwtLmLmUhpDdcTBfKyZm1p/T0D13qG94XBLohA==} '@rollup/plugin-replace@6.0.3': resolution: {integrity: sha512-J4RZarRvQAm5IF0/LwUUg+obsm+xZhYnbMXmXROyoSE1ATJe3oXSb9L5MMppdxP2ylNSjv6zFBwKYjcKMucVfA==} @@ -4136,11 +4140,28 @@ packages: '@tanstack/query-core@5.76.0': resolution: {integrity: sha512-FN375hb8ctzfNAlex5gHI6+WDXTNpe0nbxp/d2YJtnP+IBM6OUm7zcaoCW6T63BawGOYZBbKC0iPvr41TteNVg==} + '@tanstack/query-core@5.90.12': + resolution: {integrity: sha512-T1/8t5DhV/SisWjDnaiU2drl6ySvsHj1bHBCWNXd+/T+Hh1cf6JodyEYMd5sgwm+b/mETT4EV3H+zCVczCU5hg==} + + '@tanstack/query-devtools@5.91.1': + resolution: {integrity: sha512-l8bxjk6BMsCaVQH6NzQEE/bEgFy1hAs5qbgXl0xhzezlaQbPk6Mgz9BqEg2vTLPOHD8N4k+w/gdgCbEzecGyNg==} + + '@tanstack/react-query-devtools@5.91.1': + resolution: {integrity: sha512-tRnJYwEbH0kAOuToy8Ew7bJw1lX3AjkkgSlf/vzb+NpnqmHPdWM+lA2DSdGQSLi1SU0PDRrrCI1vnZnci96CsQ==} + peerDependencies: + '@tanstack/react-query': ^5.90.10 + react: ^18 || ^19 + '@tanstack/react-query@5.76.1': resolution: {integrity: sha512-YxdLZVGN4QkT5YT1HKZQWiIlcgauIXEIsMOTSjvyD5wLYK8YVvKZUPAysMqossFJJfDpJW3pFn7WNZuPOqq+fw==} peerDependencies: react: ^18 || ^19 + '@tanstack/react-query@5.90.12': + resolution: {integrity: sha512-graRZspg7EoEaw0a8faiUASCyJrqjKPdqJ9EwuDRUF9mEYJ1YPczI9H+/agJ0mOJkPCJDk0lsz5QTrLZ/jQ2rg==} + peerDependencies: + react: ^18 || ^19 + '@tanstack/react-table@8.21.3': resolution: {integrity: sha512-5nNMTSETP4ykGegmVkhjcS8tTLW6Vl4axfEGQN3v0zdHYbK4UfoqfPChclTrJ4EoK9QynqAu9oUf8VEmrpZ5Ww==} engines: {node: '>=12'} @@ -4459,6 +4480,13 @@ packages: peerDependencies: typescript: '*' + '@uidotdev/usehooks@2.4.1': + resolution: {integrity: sha512-1I+RwWyS+kdv3Mv0Vmc+p0dPYH0DTRAo04HLyXReYBL9AeseDWUJyi4THuksBJcu9F0Pih69Ak150VDnqbVnXg==} + engines: {node: '>=16'} + peerDependencies: + react: '>=18.0.0' + react-dom: '>=18.0.0' + '@ungap/structured-clone@1.3.0': resolution: {integrity: sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==} @@ -8387,8 +8415,8 @@ packages: vue-tsc: optional: true - rolldown@1.0.0-beta.54: - resolution: {integrity: sha512-3lIvjCWgjPL3gmiATUdV1NeVBGJZy6FdtwgLPol25tAkn46Q/MsVGfCSNswXwFOxGrxglPaN20IeALSIFuFyEg==} + rolldown@1.0.0-beta.55: + resolution: {integrity: sha512-r8Ws43aYCnfO07ao0SvQRz4TBAtZJjGWNvScRBOHuiNHvjfECOJBIqJv0nUkL1GYcltjvvHswRilDF1ocsC0+g==} engines: {node: ^20.19.0 || >=22.12.0} hasBin: true @@ -11389,7 +11417,7 @@ snapshots: '@orama/orama@3.1.14': {} - '@oxc-project/types@0.102.0': {} + '@oxc-project/types@0.103.0': {} '@oxc-resolver/binding-darwin-arm64@9.0.2': optional: true @@ -11423,7 +11451,7 @@ snapshots: '@oxc-resolver/binding-wasm32-wasi@9.0.2': dependencies: - '@napi-rs/wasm-runtime': 0.2.11 + '@napi-rs/wasm-runtime': 0.2.12 optional: true '@oxc-resolver/binding-win32-arm64-msvc@9.0.2': @@ -12670,50 +12698,50 @@ snapshots: dependencies: react: 19.2.3 - '@rolldown/binding-android-arm64@1.0.0-beta.54': + '@rolldown/binding-android-arm64@1.0.0-beta.55': optional: true - '@rolldown/binding-darwin-arm64@1.0.0-beta.54': + '@rolldown/binding-darwin-arm64@1.0.0-beta.55': optional: true - '@rolldown/binding-darwin-x64@1.0.0-beta.54': + '@rolldown/binding-darwin-x64@1.0.0-beta.55': optional: true - '@rolldown/binding-freebsd-x64@1.0.0-beta.54': + '@rolldown/binding-freebsd-x64@1.0.0-beta.55': optional: true - '@rolldown/binding-linux-arm-gnueabihf@1.0.0-beta.54': + '@rolldown/binding-linux-arm-gnueabihf@1.0.0-beta.55': optional: true - '@rolldown/binding-linux-arm64-gnu@1.0.0-beta.54': + '@rolldown/binding-linux-arm64-gnu@1.0.0-beta.55': optional: true - '@rolldown/binding-linux-arm64-musl@1.0.0-beta.54': + '@rolldown/binding-linux-arm64-musl@1.0.0-beta.55': optional: true - '@rolldown/binding-linux-x64-gnu@1.0.0-beta.54': + '@rolldown/binding-linux-x64-gnu@1.0.0-beta.55': optional: true - '@rolldown/binding-linux-x64-musl@1.0.0-beta.54': + '@rolldown/binding-linux-x64-musl@1.0.0-beta.55': optional: true - '@rolldown/binding-openharmony-arm64@1.0.0-beta.54': + '@rolldown/binding-openharmony-arm64@1.0.0-beta.55': optional: true - '@rolldown/binding-wasm32-wasi@1.0.0-beta.54': + '@rolldown/binding-wasm32-wasi@1.0.0-beta.55': dependencies: '@napi-rs/wasm-runtime': 1.1.0 optional: true - '@rolldown/binding-win32-arm64-msvc@1.0.0-beta.54': + '@rolldown/binding-win32-arm64-msvc@1.0.0-beta.55': optional: true - '@rolldown/binding-win32-x64-msvc@1.0.0-beta.54': + '@rolldown/binding-win32-x64-msvc@1.0.0-beta.55': optional: true '@rolldown/pluginutils@1.0.0-beta.27': {} - '@rolldown/pluginutils@1.0.0-beta.54': {} + '@rolldown/pluginutils@1.0.0-beta.55': {} '@rollup/plugin-replace@6.0.3(rollup@4.40.2)': dependencies: @@ -13113,14 +13141,24 @@ snapshots: '@tanstack/query-core@5.76.0': {} + '@tanstack/query-core@5.90.12': {} + + '@tanstack/query-devtools@5.91.1': {} + + '@tanstack/react-query-devtools@5.91.1(@tanstack/react-query@5.90.12(react@19.2.3))(react@19.2.3)': + dependencies: + '@tanstack/query-devtools': 5.91.1 + '@tanstack/react-query': 5.90.12(react@19.2.3) + react: 19.2.3 + '@tanstack/react-query@5.76.1(react@19.1.1)': dependencies: '@tanstack/query-core': 5.76.0 react: 19.1.1 - '@tanstack/react-query@5.76.1(react@19.2.3)': + '@tanstack/react-query@5.90.12(react@19.2.3)': dependencies: - '@tanstack/query-core': 5.76.0 + '@tanstack/query-core': 5.90.12 react: 19.2.3 '@tanstack/react-table@8.21.3(react-dom@19.2.3(react@19.2.3))(react@19.2.3)': @@ -13536,6 +13574,11 @@ snapshots: transitivePeerDependencies: - supports-color + '@uidotdev/usehooks@2.4.1(react-dom@19.2.3(react@19.2.3))(react@19.2.3)': + dependencies: + react: 19.2.3 + react-dom: 19.2.3(react@19.2.3) + '@ungap/structured-clone@1.3.0': {} '@unrs/resolver-binding-darwin-arm64@1.7.9': @@ -13634,6 +13677,15 @@ snapshots: chai: 6.2.1 tinyrainbow: 3.0.3 + '@vitest/mocker@4.0.15(msw@2.10.2(@types/node@22.17.1)(typescript@5.9.3))(vite@6.3.5(@types/node@22.17.1)(jiti@1.21.7)(lightningcss@1.30.2)(tsx@4.21.0)(yaml@2.8.0))': + dependencies: + '@vitest/spy': 4.0.15 + estree-walker: 3.0.3 + magic-string: 0.30.21 + optionalDependencies: + msw: 2.10.2(@types/node@22.17.1)(typescript@5.9.3) + vite: 6.3.5(@types/node@22.17.1)(jiti@1.21.7)(lightningcss@1.30.2)(tsx@4.21.0)(yaml@2.8.0) + '@vitest/mocker@4.0.15(msw@2.10.2(@types/node@22.17.1)(typescript@5.9.3))(vite@6.3.5(@types/node@22.17.1)(jiti@2.6.1)(lightningcss@1.30.2)(tsx@4.20.3)(yaml@2.8.0))': dependencies: '@vitest/spy': 4.0.15 @@ -15000,7 +15052,7 @@ snapshots: '@typescript-eslint/parser': 8.33.1(eslint@9.27.0(jiti@2.6.1))(typescript@5.9.3) eslint: 9.27.0(jiti@2.6.1) eslint-import-resolver-node: 0.3.9 - eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.31.0(@typescript-eslint/parser@8.33.1(eslint@9.27.0(jiti@2.6.1))(typescript@5.9.3))(eslint@9.27.0(jiti@2.6.1)))(eslint@9.27.0(jiti@2.6.1)) + eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.31.0)(eslint@9.27.0(jiti@2.6.1)) eslint-plugin-import: 2.31.0(@typescript-eslint/parser@8.33.1(eslint@9.27.0(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@9.27.0(jiti@2.6.1)) eslint-plugin-jsx-a11y: 6.10.2(eslint@9.27.0(jiti@2.6.1)) eslint-plugin-react: 7.37.5(eslint@9.27.0(jiti@2.6.1)) @@ -15020,7 +15072,7 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.31.0(@typescript-eslint/parser@8.33.1(eslint@9.27.0(jiti@2.6.1))(typescript@5.9.3))(eslint@9.27.0(jiti@2.6.1)))(eslint@9.27.0(jiti@2.6.1)): + eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.31.0)(eslint@9.27.0(jiti@2.6.1)): dependencies: '@nolyfill/is-core-module': 1.0.39 debug: 4.4.1(supports-color@5.5.0) @@ -15035,14 +15087,14 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-module-utils@2.12.0(@typescript-eslint/parser@8.33.1(eslint@9.27.0(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.31.0(@typescript-eslint/parser@8.33.1(eslint@9.27.0(jiti@2.6.1))(typescript@5.9.3))(eslint@9.27.0(jiti@2.6.1)))(eslint@9.27.0(jiti@2.6.1)))(eslint@9.27.0(jiti@2.6.1)): + eslint-module-utils@2.12.0(@typescript-eslint/parser@8.33.1(eslint@9.27.0(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@9.27.0(jiti@2.6.1)): dependencies: debug: 3.2.7 optionalDependencies: '@typescript-eslint/parser': 8.33.1(eslint@9.27.0(jiti@2.6.1))(typescript@5.9.3) eslint: 9.27.0(jiti@2.6.1) eslint-import-resolver-node: 0.3.9 - eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.31.0(@typescript-eslint/parser@8.33.1(eslint@9.27.0(jiti@2.6.1))(typescript@5.9.3))(eslint@9.27.0(jiti@2.6.1)))(eslint@9.27.0(jiti@2.6.1)) + eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.31.0)(eslint@9.27.0(jiti@2.6.1)) transitivePeerDependencies: - supports-color @@ -15057,7 +15109,7 @@ snapshots: doctrine: 2.1.0 eslint: 9.27.0(jiti@2.6.1) eslint-import-resolver-node: 0.3.9 - eslint-module-utils: 2.12.0(@typescript-eslint/parser@8.33.1(eslint@9.27.0(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.31.0(@typescript-eslint/parser@8.33.1(eslint@9.27.0(jiti@2.6.1))(typescript@5.9.3))(eslint@9.27.0(jiti@2.6.1)))(eslint@9.27.0(jiti@2.6.1)))(eslint@9.27.0(jiti@2.6.1)) + eslint-module-utils: 2.12.0(@typescript-eslint/parser@8.33.1(eslint@9.27.0(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@9.27.0(jiti@2.6.1)) hasown: 2.0.2 is-core-module: 2.16.1 is-glob: 4.0.3 @@ -15565,7 +15617,7 @@ snapshots: fsevents@2.3.3: optional: true - fumadocs-core@15.7.13(@types/react@19.1.10)(next@15.5.8(@babel/core@7.28.5)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1): + fumadocs-core@15.7.13(@types/react@19.1.10)(next@15.5.8(@babel/core@7.27.7)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1): dependencies: '@formatjs/intl-localematcher': 0.6.1 '@orama/orama': 3.1.14 @@ -15586,7 +15638,7 @@ snapshots: unist-util-visit: 5.0.0 optionalDependencies: '@types/react': 19.1.10 - next: 15.5.8(@babel/core@7.28.5)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + next: 15.5.8(@babel/core@7.27.7)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) react: 19.1.1 react-dom: 19.1.1(react@19.1.1) transitivePeerDependencies: @@ -15601,7 +15653,7 @@ snapshots: unist-util-visit: 5.0.0 zod: 3.25.76 - fumadocs-mdx@11.6.4(acorn@8.14.1)(fumadocs-core@15.7.13(@types/react@19.1.10)(next@15.5.8(@babel/core@7.28.5)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(next@15.5.8(@babel/core@7.28.5)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)): + fumadocs-mdx@11.6.4(acorn@8.14.1)(fumadocs-core@15.7.13(@types/react@19.1.10)(next@15.5.8(@babel/core@7.27.7)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(next@15.5.8(@babel/core@7.27.7)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)): dependencies: '@mdx-js/mdx': 3.1.0(acorn@8.14.1) '@standard-schema/spec': 1.0.0 @@ -15610,11 +15662,11 @@ snapshots: esbuild: 0.25.4 estree-util-value-to-estree: 3.4.0 fast-glob: 3.3.3 - fumadocs-core: 15.7.13(@types/react@19.1.10)(next@15.5.8(@babel/core@7.28.5)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + fumadocs-core: 15.7.13(@types/react@19.1.10)(next@15.5.8(@babel/core@7.27.7)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1) gray-matter: 4.0.3 js-yaml: 4.1.0 lru-cache: 11.1.0 - next: 15.5.8(@babel/core@7.28.5)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + next: 15.5.8(@babel/core@7.27.7)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) picocolors: 1.1.1 unist-util-visit: 5.0.0 zod: 3.25.76 @@ -15622,11 +15674,11 @@ snapshots: - acorn - supports-color - fumadocs-twoslash@3.1.7(@types/react-dom@19.1.7(@types/react@19.1.10))(@types/react@19.1.10)(fumadocs-ui@15.7.13(@types/react-dom@19.1.7(@types/react@19.1.10))(@types/react@19.1.10)(next@15.5.8(@babel/core@7.28.5)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(tailwindcss@4.1.11))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(typescript@5.9.3): + fumadocs-twoslash@3.1.7(@types/react-dom@19.1.7(@types/react@19.1.10))(@types/react@19.1.10)(fumadocs-ui@15.7.13(@types/react-dom@19.1.7(@types/react@19.1.10))(@types/react@19.1.10)(next@15.5.8(@babel/core@7.27.7)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(tailwindcss@4.1.11))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(typescript@5.9.3): dependencies: '@radix-ui/react-popover': 1.1.15(@types/react-dom@19.1.7(@types/react@19.1.10))(@types/react@19.1.10)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) '@shikijs/twoslash': 3.13.0(typescript@5.9.3) - fumadocs-ui: 15.7.13(@types/react-dom@19.1.7(@types/react@19.1.10))(@types/react@19.1.10)(next@15.5.8(@babel/core@7.28.5)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(tailwindcss@4.1.11) + fumadocs-ui: 15.7.13(@types/react-dom@19.1.7(@types/react@19.1.10))(@types/react@19.1.10)(next@15.5.8(@babel/core@7.27.7)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(tailwindcss@4.1.11) mdast-util-from-markdown: 2.0.2 mdast-util-gfm: 3.1.0 mdast-util-to-hast: 13.2.0 @@ -15642,10 +15694,10 @@ snapshots: - supports-color - typescript - fumadocs-typescript@4.0.8(@types/react@19.1.10)(fumadocs-core@15.7.13(@types/react@19.1.10)(next@15.5.8(@babel/core@7.28.5)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(fumadocs-ui@15.7.13(@types/react-dom@19.1.7(@types/react@19.1.10))(@types/react@19.1.10)(next@15.5.8(@babel/core@7.28.5)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(tailwindcss@4.1.11))(typescript@5.9.3): + fumadocs-typescript@4.0.8(@types/react@19.1.10)(fumadocs-core@15.7.13(@types/react@19.1.10)(next@15.5.8(@babel/core@7.27.7)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(fumadocs-ui@15.7.13(@types/react-dom@19.1.7(@types/react@19.1.10))(@types/react@19.1.10)(next@15.5.8(@babel/core@7.27.7)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(tailwindcss@4.1.11))(typescript@5.9.3): dependencies: estree-util-value-to-estree: 3.4.0 - fumadocs-core: 15.7.13(@types/react@19.1.10)(next@15.5.8(@babel/core@7.28.5)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + fumadocs-core: 15.7.13(@types/react@19.1.10)(next@15.5.8(@babel/core@7.27.7)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1) hast-util-to-estree: 3.1.3 hast-util-to-jsx-runtime: 2.3.6 remark: 15.0.1 @@ -15656,11 +15708,11 @@ snapshots: unist-util-visit: 5.0.0 optionalDependencies: '@types/react': 19.1.10 - fumadocs-ui: 15.7.13(@types/react-dom@19.1.7(@types/react@19.1.10))(@types/react@19.1.10)(next@15.5.8(@babel/core@7.28.5)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(tailwindcss@4.1.11) + fumadocs-ui: 15.7.13(@types/react-dom@19.1.7(@types/react@19.1.10))(@types/react@19.1.10)(next@15.5.8(@babel/core@7.27.7)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(tailwindcss@4.1.11) transitivePeerDependencies: - supports-color - fumadocs-ui@15.7.13(@types/react-dom@19.1.7(@types/react@19.1.10))(@types/react@19.1.10)(next@15.5.8(@babel/core@7.28.5)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(tailwindcss@4.1.11): + fumadocs-ui@15.7.13(@types/react-dom@19.1.7(@types/react@19.1.10))(@types/react@19.1.10)(next@15.5.8(@babel/core@7.27.7)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(tailwindcss@4.1.11): dependencies: '@radix-ui/react-accordion': 1.2.12(@types/react-dom@19.1.7(@types/react@19.1.10))(@types/react@19.1.10)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) '@radix-ui/react-collapsible': 1.1.12(@types/react-dom@19.1.7(@types/react@19.1.10))(@types/react@19.1.10)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) @@ -15673,7 +15725,7 @@ snapshots: '@radix-ui/react-slot': 1.2.3(@types/react@19.1.10)(react@19.1.1) '@radix-ui/react-tabs': 1.1.13(@types/react-dom@19.1.7(@types/react@19.1.10))(@types/react@19.1.10)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) class-variance-authority: 0.7.1 - fumadocs-core: 15.7.13(@types/react@19.1.10)(next@15.5.8(@babel/core@7.28.5)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + fumadocs-core: 15.7.13(@types/react@19.1.10)(next@15.5.8(@babel/core@7.27.7)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1) lodash.merge: 4.6.2 next-themes: 0.4.6(react-dom@19.1.1(react@19.1.1))(react@19.1.1) postcss-selector-parser: 7.1.0 @@ -15684,7 +15736,7 @@ snapshots: tailwind-merge: 3.3.1 optionalDependencies: '@types/react': 19.1.10 - next: 15.5.8(@babel/core@7.28.5)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + next: 15.5.8(@babel/core@7.27.7)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) tailwindcss: 4.1.11 transitivePeerDependencies: - '@mixedbread/sdk' @@ -18197,7 +18249,7 @@ snapshots: reusify@1.1.0: {} - rolldown-plugin-dts@0.15.10(rolldown@1.0.0-beta.54)(typescript@5.9.3): + rolldown-plugin-dts@0.15.10(rolldown@1.0.0-beta.55)(typescript@5.9.3): dependencies: '@babel/generator': 7.28.5 '@babel/parser': 7.28.5 @@ -18207,14 +18259,14 @@ snapshots: debug: 4.4.1(supports-color@5.5.0) dts-resolver: 2.1.3 get-tsconfig: 4.13.0 - rolldown: 1.0.0-beta.54 + rolldown: 1.0.0-beta.55 optionalDependencies: typescript: 5.9.3 transitivePeerDependencies: - oxc-resolver - supports-color - rolldown-plugin-dts@0.15.6(rolldown@1.0.0-beta.54)(typescript@5.9.3): + rolldown-plugin-dts@0.15.6(rolldown@1.0.0-beta.55)(typescript@5.9.3): dependencies: '@babel/generator': 7.28.3 '@babel/parser': 7.28.3 @@ -18224,31 +18276,31 @@ snapshots: debug: 4.4.1(supports-color@5.5.0) dts-resolver: 2.1.1 get-tsconfig: 4.13.0 - rolldown: 1.0.0-beta.54 + rolldown: 1.0.0-beta.55 optionalDependencies: typescript: 5.9.3 transitivePeerDependencies: - oxc-resolver - supports-color - rolldown@1.0.0-beta.54: + rolldown@1.0.0-beta.55: dependencies: - '@oxc-project/types': 0.102.0 - '@rolldown/pluginutils': 1.0.0-beta.54 + '@oxc-project/types': 0.103.0 + '@rolldown/pluginutils': 1.0.0-beta.55 optionalDependencies: - '@rolldown/binding-android-arm64': 1.0.0-beta.54 - '@rolldown/binding-darwin-arm64': 1.0.0-beta.54 - '@rolldown/binding-darwin-x64': 1.0.0-beta.54 - '@rolldown/binding-freebsd-x64': 1.0.0-beta.54 - '@rolldown/binding-linux-arm-gnueabihf': 1.0.0-beta.54 - '@rolldown/binding-linux-arm64-gnu': 1.0.0-beta.54 - '@rolldown/binding-linux-arm64-musl': 1.0.0-beta.54 - '@rolldown/binding-linux-x64-gnu': 1.0.0-beta.54 - '@rolldown/binding-linux-x64-musl': 1.0.0-beta.54 - '@rolldown/binding-openharmony-arm64': 1.0.0-beta.54 - '@rolldown/binding-wasm32-wasi': 1.0.0-beta.54 - '@rolldown/binding-win32-arm64-msvc': 1.0.0-beta.54 - '@rolldown/binding-win32-x64-msvc': 1.0.0-beta.54 + '@rolldown/binding-android-arm64': 1.0.0-beta.55 + '@rolldown/binding-darwin-arm64': 1.0.0-beta.55 + '@rolldown/binding-darwin-x64': 1.0.0-beta.55 + '@rolldown/binding-freebsd-x64': 1.0.0-beta.55 + '@rolldown/binding-linux-arm-gnueabihf': 1.0.0-beta.55 + '@rolldown/binding-linux-arm64-gnu': 1.0.0-beta.55 + '@rolldown/binding-linux-arm64-musl': 1.0.0-beta.55 + '@rolldown/binding-linux-x64-gnu': 1.0.0-beta.55 + '@rolldown/binding-linux-x64-musl': 1.0.0-beta.55 + '@rolldown/binding-openharmony-arm64': 1.0.0-beta.55 + '@rolldown/binding-wasm32-wasi': 1.0.0-beta.55 + '@rolldown/binding-win32-arm64-msvc': 1.0.0-beta.55 + '@rolldown/binding-win32-x64-msvc': 1.0.0-beta.55 rollup-plugin-preserve-directives@0.4.0(rollup@4.40.2): dependencies: @@ -18965,8 +19017,8 @@ snapshots: diff: 8.0.2 empathic: 2.0.0 hookable: 5.5.3 - rolldown: 1.0.0-beta.54 - rolldown-plugin-dts: 0.15.6(rolldown@1.0.0-beta.54)(typescript@5.9.3) + rolldown: 1.0.0-beta.55 + rolldown-plugin-dts: 0.15.6(rolldown@1.0.0-beta.55)(typescript@5.9.3) semver: 7.7.2 tinyexec: 1.0.1 tinyglobby: 0.2.14 @@ -18990,8 +19042,8 @@ snapshots: diff: 8.0.2 empathic: 2.0.0 hookable: 5.5.3 - rolldown: 1.0.0-beta.54 - rolldown-plugin-dts: 0.15.10(rolldown@1.0.0-beta.54)(typescript@5.9.3) + rolldown: 1.0.0-beta.55 + rolldown-plugin-dts: 0.15.10(rolldown@1.0.0-beta.55)(typescript@5.9.3) semver: 7.7.2 tinyexec: 1.0.1 tinyglobby: 0.2.14 @@ -19486,7 +19538,7 @@ snapshots: vitest@4.0.15(@types/node@22.17.1)(happy-dom@15.11.7)(jiti@1.21.7)(lightningcss@1.30.2)(msw@2.10.2(@types/node@22.17.1)(typescript@5.9.3))(tsx@4.21.0)(yaml@2.8.0): dependencies: '@vitest/expect': 4.0.15 - '@vitest/mocker': 4.0.15(msw@2.10.2(@types/node@22.17.1)(typescript@5.9.3))(vite@6.3.5(@types/node@22.17.1)(jiti@2.6.1)(lightningcss@1.30.2)(tsx@4.21.0)(yaml@2.8.0)) + '@vitest/mocker': 4.0.15(msw@2.10.2(@types/node@22.17.1)(typescript@5.9.3))(vite@6.3.5(@types/node@22.17.1)(jiti@1.21.7)(lightningcss@1.30.2)(tsx@4.21.0)(yaml@2.8.0)) '@vitest/pretty-format': 4.0.15 '@vitest/runner': 4.0.15 '@vitest/snapshot': 4.0.15