diff --git a/.gitignore b/.gitignore index da87bd13bb..b816c5fbfb 100644 --- a/.gitignore +++ b/.gitignore @@ -98,4 +98,5 @@ alice-auth-manager-data alice-auth-manager-data !/packages/contracts/dist/ !/packages/contracts/dist/dev -.secret \ No newline at end of file +.secret +debug diff --git a/README.md b/README.md index 6f78e54240..3115b2829f 100644 --- a/README.md +++ b/README.md @@ -69,6 +69,28 @@ pnpm run test:target packages/e2e/src/tickets/delegation.spec.ts Append additional Jest flags after the path if you need finer filtering. +### Debugging node requests (curl dumps) + +When running in Node.js (e2e tests, scripts, etc), you can persist generated curl commands to `./debug/` (this directory is gitignored). This lets you replay the exact request without re-running the full flow. + +```bash +# enable writing curl commands to disk +export LIT_DEBUG_CURL=1 + +# optional: change output directory (defaults to ./debug) +export LIT_DEBUG_CURL_DIR=./debug +``` + +After your run, copy the correlation id (for example `X-Request-Id` from node calls, or `x-correlation-id` / the `Request()` value from Wrapped Keys errors) and print the stored curl command: + +```bash +pnpm debug:curl -- +``` + +If you only have a prefix/substring, `pnpm debug:curl` will try to match it (and will list matches if more than one file fits). + +More details: `docs/guides/debugging-node-requests.mdx`. + ## QA Starter Kit workflow When you need to validate SDK integrations against backend or node features, lean on the [QA Starter Kit](https://github.com/LIT-Protocol/QA-kit). That repo installs published packages, so it mirrors how downstream teams will consume the SDK. diff --git a/docs/docs.json b/docs/docs.json index fa5be34198..a109807891 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -154,7 +154,8 @@ "group": "Guides", "pages": [ "guides/lit-action-sign-as-action", - "guides/server-sessions" + "guides/server-sessions", + "guides/debugging-node-requests" ] }, { @@ -246,4 +247,4 @@ "discord": "https://litgateway.com/discord" } } -} \ No newline at end of file +} diff --git a/docs/guides/debugging-node-requests.mdx b/docs/guides/debugging-node-requests.mdx new file mode 100644 index 0000000000..7a30bb8c63 --- /dev/null +++ b/docs/guides/debugging-node-requests.mdx @@ -0,0 +1,39 @@ +--- +title: "Debugging Node Requests" +description: "Dump curl commands by request id so you can replay calls without re-running full flows." +--- + +# Overview + +When debugging a failing Lit node call, it helps to replay the exact HTTP request without re-running an entire end-to-end flow. The SDK can generate a `curl` command for each request and (optionally) write it to disk keyed by the request correlation id (for example `X-Request-Id` or `x-correlation-id`). + +# Enable curl dumps (Node.js only) + +Set `LIT_DEBUG_CURL=1` in your environment when running in Node.js (tests, scripts, backends, etc). This is disabled by default and does not write anything in browser builds. + +```bash +export LIT_DEBUG_CURL=1 +``` + +Optional: override the output directory (defaults to `./debug`, relative to `process.cwd()`): + +```bash +export LIT_DEBUG_CURL_DIR=./debug +``` + +After you run whatever code triggers requests, you should see files created under `./debug/` named by request id. + +# Retrieve a curl command by request id + +Once you have the correlation id (for example `X-Request-Id` from node calls, or `x-correlation-id` from Wrapped Keys service calls), you can print the stored curl command and paste it directly into a terminal: + +```bash +pnpm debug:curl -- +``` + +If you only have a prefix/substring, `pnpm debug:curl` will try to match it (and will list matches if more than one file fits). + +# Security notes + +- The generated curl command includes request headers and body, which may contain signatures or other sensitive data. +- The default `./debug` directory is gitignored in this repo; keep these files local and do not share them. diff --git a/package.json b/package.json index 17d706fd41..637560b0c8 100644 --- a/package.json +++ b/package.json @@ -5,6 +5,7 @@ "scripts": { "sync:contracts": "nx run contracts:sync", "sync:docs-changelog": "node tools/sync-docs-changelog.js", + "debug:curl": "node tools/debug-curl.js", "sync:wk": "node packages/wrapped-keys-lit-actions/sync-actions-to-ipfs.js", "sync:check": "sh -c 'if [ -n \"$PKG\" ]; then pnpm syncpack list-mismatches --types prod,dev,peer --filter \"$PKG\"; else pnpm syncpack list-mismatches --types prod,dev,peer; fi'", "sync:fix": "sh -c 'if [ -n \"$PKG\" ]; then pnpm syncpack fix-mismatches --types prod,dev,peer --filter \"$PKG\"; else pnpm syncpack fix-mismatches --types prod,dev,peer; fi'", diff --git a/packages/access-control-conditions/package.json b/packages/access-control-conditions/package.json index 13b04b911d..c0fdec3a8b 100644 --- a/packages/access-control-conditions/package.json +++ b/packages/access-control-conditions/package.json @@ -25,6 +25,7 @@ "main": "./src/index.js", "typings": "./src/index.d.ts", "dependencies": { + "@lit-protocol/logger": "workspace:*", "ethers": "5.7.2", "zod": "3.24.3", "@ethersproject/providers": "5.7.0" diff --git a/packages/artillery/README.md b/packages/artillery/README.md index f9489f009f..0eff394e57 100644 --- a/packages/artillery/README.md +++ b/packages/artillery/README.md @@ -6,7 +6,7 @@ Usage via root scripts remains the same, now pointing to `packages/artillery`. # 🚀 Run Artillery tests -- LOG_LEVEL= `debug` | `info` | `silent` | `debug2` (raw console.log) +- LOG_LEVEL= `debug` | `info` | `silent` | `debug_text` (console-style text output, not JSON; `debug2` is a deprecated alias) - NETWORK= `naga-dev` | `naga-staging` ## Setup Commands diff --git a/packages/auth-helpers/package.json b/packages/auth-helpers/package.json index e37dc10f62..dbed2aa183 100644 --- a/packages/auth-helpers/package.json +++ b/packages/auth-helpers/package.json @@ -29,6 +29,7 @@ "main": "./src/index.js", "typings": "./src/index.d.ts", "dependencies": { + "@lit-protocol/logger": "workspace:*", "@wagmi/core": "2.22.1", "ethers": "5.7.2", "siwe": "2.3.2", diff --git a/packages/auth-helpers/src/lib/auth-config-builder.ts b/packages/auth-helpers/src/lib/auth-config-builder.ts index 85f9f609f8..cc4e756d54 100644 --- a/packages/auth-helpers/src/lib/auth-config-builder.ts +++ b/packages/auth-helpers/src/lib/auth-config-builder.ts @@ -33,6 +33,9 @@ import { } from './resources'; // Corrected path: from ../lib/auth-config-builder.ts to ../lib/resources/ import { z } from 'zod'; +import { getChildLogger } from '@lit-protocol/logger'; + +const logger = getChildLogger({ module: 'auth-config-builder' }); // Infer the AuthConfig type from the Zod schema type AuthConfig = z.infer; @@ -163,7 +166,7 @@ export const createAuthConfigBuilder = (): IAuthConfigBuilder => { return parsedConfig; } catch (e) { if (e instanceof z.ZodError) { - console.error('AuthConfig validation failed:', e.errors); + logger.error({ errors: e.errors }, 'AuthConfig validation failed'); } throw new Error(`Failed to build AuthConfig: ${(e as Error).message}`); } diff --git a/packages/auth/package.json b/packages/auth/package.json index 09d4cd268b..2397b5a29d 100644 --- a/packages/auth/package.json +++ b/packages/auth/package.json @@ -22,6 +22,7 @@ "universal" ], "dependencies": { + "@lit-protocol/logger": "workspace:*", "@noble/hashes": "1.8.0", "@noble/curves": "1.8.1", "@simplewebauthn/browser": "7.2.0", diff --git a/packages/auth/src/lib/authenticators/helper/pollResponse.ts b/packages/auth/src/lib/authenticators/helper/pollResponse.ts index 5c419bda41..c5f7f9269a 100644 --- a/packages/auth/src/lib/authenticators/helper/pollResponse.ts +++ b/packages/auth/src/lib/authenticators/helper/pollResponse.ts @@ -35,6 +35,10 @@ * ``` */ +import { getChildLogger } from '@lit-protocol/logger'; + +const logger = getChildLogger({ module: 'auth.pollResponse' }); + /** * Defines the parameters for the pollResponse function. * @template TResponse The expected type of the JSON response from the URL. @@ -82,7 +86,7 @@ export async function pollResponse({ }: PollResponseParams): Promise { for (let i = 0; i < maxRetries; i++) { try { - console.log( + logger.debug( `${errorMessageContext}: Polling attempt ${ i + 1 }/${maxRetries} for ${url}` @@ -96,27 +100,28 @@ export async function pollResponse({ ); } // Log other non-ok statuses but continue retrying unless it's a client error type that won't resolve on its own. - console.error( + logger.warn( + { status: response.status, url }, `${errorMessageContext}: Polling attempt ${ i + 1 - } failed with HTTP status: ${ - response.status - } for URL ${url}. Retrying...` + } failed with HTTP status. Retrying...` ); // Optionally, specific handling for other critical HTTP errors could be added here to throw immediately. } else { const data = (await response.json()) as TResponse; - console.log( - `${errorMessageContext}: Polling attempt ${ - i + 1 - }/${maxRetries} - current status/data:`, - data + logger.debug( + { + attempt: i + 1, + maxRetries, + data, + }, + `${errorMessageContext}: Polling attempt - current status/data` ); if (isErrorCondition?.(data)) { - console.error( - `${errorMessageContext}: Error condition met during polling.`, - data + logger.error( + { data }, + `${errorMessageContext}: Error condition met during polling` ); // Attempt to get more specific error details if available const errorDetails = @@ -133,9 +138,9 @@ export async function pollResponse({ } if (isCompleteCondition(data)) { - console.log( - `${errorMessageContext}: Completion condition met successfully.`, - data + logger.info( + { data }, + `${errorMessageContext}: Completion condition met successfully` ); return data; } @@ -143,11 +148,9 @@ export async function pollResponse({ } } catch (error: any) { const message = error instanceof Error ? error.message : String(error); - console.error( - `${errorMessageContext}: Error during polling attempt ${ - i + 1 - }/${maxRetries} for ${url}:`, - message + logger.warn( + { error, attempt: i + 1, maxRetries, url }, + `${errorMessageContext}: Error during polling attempt` ); // If it's the last attempt, or a critical error (like 404 or an explicit error condition from isErrorCondition), rethrow. if ( diff --git a/packages/auth/src/lib/authenticators/stytch/factors/2fa/StytchTotp2FAAuthenticator.ts b/packages/auth/src/lib/authenticators/stytch/factors/2fa/StytchTotp2FAAuthenticator.ts index 62a336a054..ae583be871 100644 --- a/packages/auth/src/lib/authenticators/stytch/factors/2fa/StytchTotp2FAAuthenticator.ts +++ b/packages/auth/src/lib/authenticators/stytch/factors/2fa/StytchTotp2FAAuthenticator.ts @@ -3,6 +3,9 @@ import { AuthData } from '@lit-protocol/schemas'; import { AuthMethod, StytchToken } from '@lit-protocol/types'; import { AuthMethodTypeStringMap } from '../../../../types'; import { totpAuthFactorParser } from '../../parsers'; +import { getChildLogger } from '@lit-protocol/logger'; + +const _logger = getChildLogger({ module: 'StytchTotp2FAAuthenticator' }); /** * Configuration for the Stytch TOTP authenticate method. @@ -114,7 +117,7 @@ export class StytchTotp2FAAuthenticator { } accessToken = verifyData.accessToken; } catch (e: any) { - console.error('Error verifying TOTP via auth service:', e); + _logger.error({ e }, 'Error verifying TOTP via auth service'); throw e; } @@ -146,7 +149,7 @@ export class StytchTotp2FAAuthenticator { authMethodId: generatedAuthMethodId, }); } catch (e) { - console.error('Error processing Stytch TOTP token:', e); + _logger.error({ e }, 'Error processing Stytch TOTP token'); reject(e); } }); @@ -243,7 +246,7 @@ export class StytchTotp2FAAuthenticator { const createData = await createResponse.json(); - console.log('createData', createData); + _logger.debug({ createData }, 'TOTP registration create response'); if ( !createData.totpRegistrationId || @@ -262,7 +265,10 @@ export class StytchTotp2FAAuthenticator { recoveryCodes: createData.recoveryCodes || [], }; } catch (e: any) { - console.error('Error initiating TOTP registration via auth service:', e); + _logger.error( + { e }, + 'Error initiating TOTP registration via auth service' + ); throw e; } } @@ -304,14 +310,17 @@ export class StytchTotp2FAAuthenticator { ); } - console.log('verifyData', verifyData); + _logger.debug({ verifyData }, 'TOTP registration verify response'); return { accessToken: verifyData.accessToken, totpId: verifyData.totpId || '', }; } catch (e: any) { - console.error('Error verifying TOTP registration via auth service:', e); + _logger.error( + { e }, + 'Error verifying TOTP registration via auth service' + ); throw e; } } diff --git a/packages/auth/src/lib/authenticators/stytch/factors/StytchEmailOtpAuthenticator.ts b/packages/auth/src/lib/authenticators/stytch/factors/StytchEmailOtpAuthenticator.ts index c9fd85def6..49bc68fcfd 100644 --- a/packages/auth/src/lib/authenticators/stytch/factors/StytchEmailOtpAuthenticator.ts +++ b/packages/auth/src/lib/authenticators/stytch/factors/StytchEmailOtpAuthenticator.ts @@ -3,6 +3,9 @@ import { AuthData } from '@lit-protocol/schemas'; import { AuthMethod, StytchToken } from '@lit-protocol/types'; import { AuthMethodTypeStringMap } from '../../../types'; import { emailOtpAuthFactorParser } from '../parsers'; +import { getChildLogger } from '@lit-protocol/logger'; + +const _logger = getChildLogger({ module: 'StytchEmailOtpAuthenticator' }); /** * Configuration for initiating the Stytch Email OTP sending process. @@ -74,7 +77,7 @@ export class StytchEmailOtpAuthenticator { methodId: responseData.methodId, }; } catch (e: any) { - console.error('Error in sendOtp:', e); + _logger.error({ e }, 'Error in sendOtp'); throw e; // Re-throw the error to be handled by the caller } } @@ -116,7 +119,7 @@ export class StytchEmailOtpAuthenticator { accessToken = verifyData.accessToken; userId = verifyData.userId; } catch (e: any) { - console.error('Error verifying OTP via auth service:', e); + _logger.error({ e }, 'Error verifying OTP via auth service'); throw e; // Re-throw the error } @@ -158,7 +161,7 @@ export class StytchEmailOtpAuthenticator { }, }); } catch (e) { - console.error('Error processing Stytch token:', e); + _logger.error({ e }, 'Error processing Stytch token'); reject(e); } }); diff --git a/packages/auth/src/lib/authenticators/stytch/factors/StytchSmsOtpAuthenticator.ts b/packages/auth/src/lib/authenticators/stytch/factors/StytchSmsOtpAuthenticator.ts index 0a9bbc48da..53be9b84ae 100644 --- a/packages/auth/src/lib/authenticators/stytch/factors/StytchSmsOtpAuthenticator.ts +++ b/packages/auth/src/lib/authenticators/stytch/factors/StytchSmsOtpAuthenticator.ts @@ -3,6 +3,9 @@ import { AuthData } from '@lit-protocol/schemas'; import { AuthMethod, StytchToken } from '@lit-protocol/types'; import { AuthMethodTypeStringMap } from '../../../types'; import { smsOtpAuthFactorParser } from '../parsers'; +import { getChildLogger } from '@lit-protocol/logger'; + +const _logger = getChildLogger({ module: 'StytchSmsOtpAuthenticator' }); /** * Configuration for initiating the Stytch SMS OTP sending process. @@ -70,7 +73,7 @@ export class StytchSmsOtpAuthenticator { } return { methodId: responseData.methodId }; } catch (e: any) { - console.error('Error in StytchSmsOtpAuthenticator sendOtp:', e); + _logger.error({ e }, 'Error in StytchSmsOtpAuthenticator sendOtp'); throw e; } } @@ -114,7 +117,7 @@ export class StytchSmsOtpAuthenticator { accessToken = verifyData.accessToken; userId = verifyData.userId; } catch (e: any) { - console.error('Error verifying SMS OTP via auth service:', e); + _logger.error({ e }, 'Error verifying SMS OTP via auth service'); throw e; } @@ -151,7 +154,7 @@ export class StytchSmsOtpAuthenticator { }, }); } catch (e) { - console.error('Error processing Stytch SMS token:', e); + _logger.error({ e }, 'Error processing Stytch SMS token'); reject(e); } }); diff --git a/packages/auth/src/lib/authenticators/stytch/factors/StytchWhatsAppOtpAuthenticator.ts b/packages/auth/src/lib/authenticators/stytch/factors/StytchWhatsAppOtpAuthenticator.ts index ae00ab14b3..058308c1b6 100644 --- a/packages/auth/src/lib/authenticators/stytch/factors/StytchWhatsAppOtpAuthenticator.ts +++ b/packages/auth/src/lib/authenticators/stytch/factors/StytchWhatsAppOtpAuthenticator.ts @@ -3,6 +3,9 @@ import { AuthData } from '@lit-protocol/schemas'; import { AuthMethod, StytchToken } from '@lit-protocol/types'; import { AuthMethodTypeStringMap } from '../../../types'; import { whatsAppOtpAuthFactorParser } from '../parsers'; +import { getChildLogger } from '@lit-protocol/logger'; + +const _logger = getChildLogger({ module: 'StytchWhatsAppOtpAuthenticator' }); /** * Configuration for initiating the Stytch WhatsApp OTP sending process. @@ -70,7 +73,7 @@ export class StytchWhatsAppOtpAuthenticator { } return { methodId: responseData.methodId }; } catch (e: any) { - console.error('Error in StytchWhatsAppOtpAuthenticator sendOtp:', e); + _logger.error({ e }, 'Error in StytchWhatsAppOtpAuthenticator sendOtp'); throw e; } } @@ -113,7 +116,7 @@ export class StytchWhatsAppOtpAuthenticator { accessToken = verifyData.accessToken; userId = verifyData.userId; } catch (e: any) { - console.error('Error verifying WhatsApp OTP via auth service:', e); + _logger.error({ e }, 'Error verifying WhatsApp OTP via auth service'); throw e; } @@ -152,7 +155,7 @@ export class StytchWhatsAppOtpAuthenticator { }, }); } catch (e) { - console.error('Error processing Stytch WhatsApp token:', e); + _logger.error({ e }, 'Error processing Stytch WhatsApp token'); reject(e); } }); diff --git a/packages/auth/src/lib/storage/localStorage.ts b/packages/auth/src/lib/storage/localStorage.ts index bfbb55e1df..22cd486e8f 100644 --- a/packages/auth/src/lib/storage/localStorage.ts +++ b/packages/auth/src/lib/storage/localStorage.ts @@ -2,6 +2,9 @@ import type { LitAuthStorageProvider } from './types'; import type { LitAuthData } from '../types'; import { getGlobal } from '@lit-protocol/constants'; import { PKPData } from '@lit-protocol/schemas'; +import { getChildLogger } from '@lit-protocol/logger'; + +const logger = getChildLogger({ module: 'localStorage' }); const LOCALSTORAGE_LIT_AUTH_PREFIX = 'lit-auth'; const LOCALSTORAGE_LIT_PKP_PREFIX = 'lit-pkp-tokens'; @@ -221,7 +224,7 @@ export function localStorage({ return parsed.tokenIds || null; } catch (error) { - console.warn('Failed to parse cached PKP tokens:', error); + logger.warn({ error }, 'Failed to parse cached PKP tokens'); return null; } }, @@ -332,7 +335,7 @@ export function localStorage({ return parsed.pkps || null; } catch (error) { - console.warn('Failed to parse cached PKP data:', error); + logger.warn({ error }, 'Failed to parse cached PKP data'); return null; } }, @@ -389,7 +392,7 @@ export function localStorage({ } return null; } catch (error) { - console.warn('Failed to parse cached PKP details:', error); + logger.warn({ error }, 'Failed to parse cached PKP details'); return null; } }, @@ -437,7 +440,7 @@ export function localStorage({ return parsed.tokenIds || null; } catch (error) { - console.warn('Failed to parse cached PKP tokens by address:', error); + logger.warn({ error }, 'Failed to parse cached PKP tokens by address'); return null; } }, diff --git a/packages/auth/src/lib/storage/localStorageNode.ts b/packages/auth/src/lib/storage/localStorageNode.ts index 7560c120cd..238611906e 100644 --- a/packages/auth/src/lib/storage/localStorageNode.ts +++ b/packages/auth/src/lib/storage/localStorageNode.ts @@ -17,6 +17,9 @@ import { PKPData } from '@lit-protocol/schemas'; import type { LitAuthData } from '../types'; import type { LitAuthStorageProvider } from './types'; +import { getChildLogger } from '@lit-protocol/logger'; + +const logger = getChildLogger({ module: 'localStorageNode' }); const LOCALSTORAGE_LIT_AUTH_PREFIX = 'lit-auth'; const LOCALSTORAGE_LIT_PKP_PREFIX = 'lit-pkp-tokens'; @@ -146,10 +149,9 @@ const getNodeStorageInstance = async (storagePath: string): Promise => { const module = await import('node-localstorage'); NodeLocalStorageConstructor = module.LocalStorage; } catch (e) { - console.error( - "localStorageNode: Failed to dynamically import 'node-localstorage'. " + - "Ensure it's installed if running in a Node.js environment. Error: ", - e + logger.error( + { error: e }, + "localStorageNode: Failed to dynamically import 'node-localstorage'. Ensure it's installed if running in a Node.js environment." ); throw new Error( "localStorageNode: 'node-localstorage' module unavailable." @@ -172,7 +174,7 @@ export function localStorageNode({ }: LocalStorageNodeConfig): LitAuthStorageProvider { if (!isNodeEnvironment) { // Return a stub provider for non-Node.js environments - console.warn( + logger.warn( 'localStorageNode: Detected non-Node.js environment. ' + 'Returning a non-functional stub. This provider is for Node.js use only.' ); @@ -187,7 +189,7 @@ export function localStorageNode({ authMethodId, tokenIds, }): Promise { - console.warn( + logger.warn( 'localStorageNode (stub): writePKPTokens called in browser.' ); }, @@ -195,59 +197,59 @@ export function localStorageNode({ authMethodType, authMethodId, }): Promise { - console.warn( + logger.warn( 'localStorageNode (stub): readPKPTokens called in browser.' ); return null; }, async write({ address, authData }): Promise { - console.warn('localStorageNode (stub): write called in browser.'); + logger.warn('localStorageNode (stub): write called in browser.'); }, async read({ address }): Promise { - console.warn('localStorageNode (stub): read called in browser.'); + logger.warn('localStorageNode (stub): read called in browser.'); return null; }, async writeInnerDelegationAuthSig({ publicKey, authSig }) { - console.warn( + logger.warn( 'localStorageNode (stub): writeInnerDelegationAuthSig called in browser.' ); }, async readInnerDelegationAuthSig({ publicKey }) { - console.warn( + logger.warn( 'localStorageNode (stub): readInnerDelegationAuthSig called in browser.' ); return null; }, async writePKPs({ authMethodType, authMethodId, pkps }): Promise { - console.warn('localStorageNode (stub): writePKPs called in browser.'); + logger.warn('localStorageNode (stub): writePKPs called in browser.'); }, async readPKPs({ authMethodType, authMethodId, }): Promise { - console.warn('localStorageNode (stub): readPKPs called in browser.'); + logger.warn('localStorageNode (stub): readPKPs called in browser.'); return null; }, async writePKPDetails({ tokenId, publicKey, ethAddress }): Promise { - console.warn( + logger.warn( 'localStorageNode (stub): writePKPDetails called in browser.' ); }, async readPKPDetails({ tokenId, }): Promise<{ publicKey: string; ethAddress: string } | null> { - console.warn( + logger.warn( 'localStorageNode (stub): readPKPDetails called in browser.' ); return null; }, async writePKPTokensByAddress({ ownerAddress, tokenIds }): Promise { - console.warn( + logger.warn( 'localStorageNode (stub): writePKPTokensByAddress called in browser.' ); }, async readPKPTokensByAddress({ ownerAddress }): Promise { - console.warn( + logger.warn( 'localStorageNode (stub): readPKPTokensByAddress called in browser.' ); return null; @@ -313,9 +315,9 @@ export function localStorageNode({ // Ensure robust parsing return JSON.parse(value) as LitAuthData; } catch (error) { - console.error( - 'localStorageNode: Failed to parse stored auth data:', - error + logger.error( + { error }, + 'localStorageNode: Failed to parse stored auth data' ); // Optionally clear the corrupted item by re-getting store instance // const storeToClear = await getMemoisedStorageInstance(); @@ -408,9 +410,9 @@ export function localStorageNode({ return parsed.tokenIds || null; } catch (error) { - console.error( - 'localStorageNode: Failed to parse cached PKP tokens:', - error + logger.error( + { error }, + 'localStorageNode: Failed to parse cached PKP tokens' ); return null; } @@ -466,9 +468,9 @@ export function localStorageNode({ return parsed.pkps || null; } catch (error) { - console.error( - 'localStorageNode: Failed to parse cached PKP data:', - error + logger.error( + { error }, + 'localStorageNode: Failed to parse cached PKP data' ); return null; } @@ -528,9 +530,9 @@ export function localStorageNode({ } return null; } catch (error) { - console.error( - 'localStorageNode: Failed to parse cached PKP details:', - error + logger.error( + { error }, + 'localStorageNode: Failed to parse cached PKP details' ); return null; } @@ -575,9 +577,9 @@ export function localStorageNode({ return parsed.tokenIds || null; } catch (error) { - console.error( - 'localStorageNode: Failed to parse cached PKP tokens:', - error + logger.error( + { error }, + 'localStorageNode: Failed to parse cached PKP tokens' ); return null; } diff --git a/packages/crypto/package.json b/packages/crypto/package.json index 2692bd3d46..5816ec1232 100644 --- a/packages/crypto/package.json +++ b/packages/crypto/package.json @@ -25,6 +25,7 @@ "main": "./src/index.js", "typings": "./src/index.d.ts", "dependencies": { + "@lit-protocol/logger": "workspace:*", "@lit-protocol/nacl": "7.1.1", "@lit-protocol/uint8arrays": "7.1.1", "@noble/curves": "1.8.1", diff --git a/packages/crypto/src/lib/misc.ts b/packages/crypto/src/lib/misc.ts index 7eae447d76..e9c07035ed 100644 --- a/packages/crypto/src/lib/misc.ts +++ b/packages/crypto/src/lib/misc.ts @@ -19,11 +19,7 @@ import { NodeErrorV3, RelayClaimProcessor, } from '@lit-protocol/types'; -import { getGlobal } from '@lit-protocol/constants'; - -const globalScope = getGlobal(); - -const logBuffer: any[][] = []; +import { getChildLogger, logger as sdkLogger } from '@lit-protocol/logger'; const ajv = new Ajv(); // Module scoped variable to store the LitNodeClientConfig passed to LitCore @@ -41,9 +37,7 @@ export const setMiscLitConfig = (config: LitNodeClientConfig | undefined) => { * @returns { void } */ export const printError = (e: Error): void => { - console.log('Error Stack', e.stack); - console.log('Error Name', e.name); - console.log('Error Message', e.message); + sdkLogger.error({ err: e, stack: e.stack, name: e.name }, e.message); }; /** @@ -113,13 +107,8 @@ export const findMostCommonResponse = >( return result as T; }; -declare global { - var logger: any; - var logManager: any; -} - export const getLoggerbyId = (id: string) => { - return globalScope.logManager.get(id); + return getChildLogger({ requestId: id }); }; /** @@ -131,101 +120,21 @@ export const getLoggerbyId = (id: string) => { * @returns { void } */ export const log = (...args: any): void => { - if (!globalThis) { - // there is no globalThis, just print the log - console.log(...args); - return; - } - - // check if config is loaded yet - if (!litConfig) { - // config isn't loaded yet, push into buffer - logBuffer.push(args); - return; - } - - // if there are logs in buffer, print them first and empty the buffer. - while (logBuffer.length > 0) { - const log = logBuffer.shift() ?? ''; - globalThis?.logger && globalThis?.logger.debug(...log); - } - - globalThis?.logger && globalThis?.logger.debug(...args); + sdkLogger.debug(...args); }; export const logWithRequestId = (id: string, ...args: any) => { - if (!globalThis) { - // there is no globalThis, just print the log - console.log(...args); - return; - } - - // check if config is loaded yet - if (!litConfig) { - // config isn't loaded yet, push into buffer - logBuffer.push(args); - return; - } - - // if there are there are logs in buffer, print them first and empty the buffer. - while (logBuffer.length > 0) { - const log = logBuffer.shift() ?? ''; - globalThis?.logger && - globalScope.logManager.get(globalScope.logger.category, id).debug(...log); - } - - globalThis?.logger && - globalScope.logManager.get(globalScope.logger.category, id).debug(...args); + const child = getChildLogger({ requestId: id }); + child.debug(...args); }; export const logErrorWithRequestId = (id: string, ...args: any) => { - if (!globalThis) { - // there is no globalThis, just print the log - console.log(...args); - return; - } - - // check if config is loaded yet - if (!litConfig) { - // config isn't loaded yet, push into buffer - logBuffer.push(args); - return; - } - - // if there are there are logs in buffer, print them first and empty the buffer. - while (logBuffer.length > 0) { - const log = logBuffer.shift() ?? ''; - globalThis?.logger && - globalScope.logManager.get(globalScope.logger.category, id).error(...log); - } - - globalThis?.logger && - globalScope.logManager.get(globalScope.logger.category, id).error(...args); + const child = getChildLogger({ requestId: id }); + child.error(...args); }; export const logError = (...args: any) => { - if (!globalThis) { - // there is no globalThis, just print the log - console.log(...args); - return; - } - - // check if config is loaded yet - if (!litConfig) { - // config isn't loaded yet, push into buffer - logBuffer.push(args); - return; - } - - // if there are there are logs in buffer, print them first and empty the buffer. - while (logBuffer.length > 0) { - const log = logBuffer.shift() ?? ''; - globalThis?.logger && - globalScope.logManager.get(globalScope.logger.category).error(...log); - } - - globalThis?.logger && - globalScope.logManager.get(globalScope.logger.category).error(...args); + sdkLogger.error(...args); }; /** diff --git a/packages/e2e/package.json b/packages/e2e/package.json index a38def6520..cd8eee4356 100644 --- a/packages/e2e/package.json +++ b/packages/e2e/package.json @@ -73,8 +73,6 @@ "ethers": "5.7.2", "jose": "4.14.4", "pako": "2.1.0", - "pino": "9.6.0", - "pino-caller": "4.0.0", "siwe": "2.3.2", "siwe-recap": "0.0.2-alpha.0", "stytch": "12.4.0", diff --git a/packages/lit-client/package.json b/packages/lit-client/package.json index 582e0e8d16..965ebf2818 100644 --- a/packages/lit-client/package.json +++ b/packages/lit-client/package.json @@ -45,6 +45,7 @@ } }, "dependencies": { + "@lit-protocol/logger": "workspace:*", "@lit-protocol/uint8arrays": "7.1.1", "bs58": "6.0.0", "typestub-ipfs-only-hash": "^4.0.0", diff --git a/packages/lit-client/src/lib/LitClient/intergrations/createPkpViemAccount.ts b/packages/lit-client/src/lib/LitClient/intergrations/createPkpViemAccount.ts index 37451c451c..5116c2a72d 100644 --- a/packages/lit-client/src/lib/LitClient/intergrations/createPkpViemAccount.ts +++ b/packages/lit-client/src/lib/LitClient/intergrations/createPkpViemAccount.ts @@ -202,7 +202,7 @@ export async function createPKPViemAccount({ return tx as TransactionSerializable; } catch (err) { - console.error('viem => failed to populate tx fields:', err); + _logger.error({ err }, 'viem => failed to populate tx fields'); throw err; } } @@ -270,18 +270,21 @@ export async function createPKPViemAccount({ populatedTx.maxFeePerGas = baseFeeEstimate * 2n + priorityFee; // Conservative estimate populatedTx.type = 'eip1559'; - console.log('viem => defaulting to EIP-1559 fees'); - console.log( - 'viem => maxPriorityFeePerGas:', - populatedTx.maxPriorityFeePerGas + _logger.debug('viem => defaulting to EIP-1559 fees'); + _logger.debug( + { maxPriorityFeePerGas: populatedTx.maxPriorityFeePerGas }, + 'viem => maxPriorityFeePerGas' + ); + _logger.debug( + { maxFeePerGas: populatedTx.maxFeePerGas }, + 'viem => maxFeePerGas' ); - console.log('viem => maxFeePerGas:', populatedTx.maxFeePerGas); } // Set default gas if not provided if (!populatedTx.gas) { populatedTx.gas = 21000n; // Default gas for simple transfers - console.log('viem => defaulting gas to 21000'); + _logger.debug({ gas: populatedTx.gas }, 'viem => defaulting gas'); } // Ensure type is set for clarity diff --git a/packages/lit-client/src/lib/LitNodeClient/LitNodeApi/src/handshake.ts b/packages/lit-client/src/lib/LitNodeClient/LitNodeApi/src/handshake.ts index a2e56e6485..fd6e1046a3 100644 --- a/packages/lit-client/src/lib/LitNodeClient/LitNodeApi/src/handshake.ts +++ b/packages/lit-client/src/lib/LitNodeClient/LitNodeApi/src/handshake.ts @@ -163,7 +163,7 @@ export const handshake = async (params: { return errorData as RawHandshakeResponse; } } catch (parseError) { - console.error('🔍 Failed to parse errorObject:', parseError); + _logger.error({ parseError }, '🔍 Failed to parse errorObject'); } } @@ -185,8 +185,9 @@ export const resolveHandshakeResponse = ({ ); if (!latestBlockhash) { - console.error( - `Error getting latest blockhash from the nodes. Request ID: ${requestId}` + _logger.error( + { requestId }, + 'Error getting latest blockhash from the nodes' ); throw new InvalidEthBlockhash( diff --git a/packages/lit-client/src/lib/LitNodeClient/LitNodeApi/src/helper/sendNodeRequest.ts b/packages/lit-client/src/lib/LitNodeClient/LitNodeApi/src/helper/sendNodeRequest.ts index 6309347460..c4b717f83e 100644 --- a/packages/lit-client/src/lib/LitNodeClient/LitNodeApi/src/helper/sendNodeRequest.ts +++ b/packages/lit-client/src/lib/LitNodeClient/LitNodeApi/src/helper/sendNodeRequest.ts @@ -1,5 +1,9 @@ import { NetworkError } from '@lit-protocol/constants'; -import { getChildLogger } from '@lit-protocol/logger'; +import { + generateCurlCommand, + getChildLogger, + writeCurlCommandDebugFile, +} from '@lit-protocol/logger'; const _logger = getChildLogger({ module: 'sendNodeRequest', @@ -7,19 +11,6 @@ const _logger = getChildLogger({ const ABORT_TIMEOUT = 20_000; // Abort after 20s -/** - * Generates a CURL command string from request parameters for debugging purposes - */ -function generateCurlCommand(url: string, req: any): string { - const headers = Object.entries(req.headers) - .map(([key, value]) => `-H "${key}: ${value}"`) - .join(' '); - - const body = req.body ? `--data '${req.body}'` : ''; - - return `curl -X ${req.method} ${headers} ${body} "${url}"`.trim(); -} - export async function sendNodeRequest( // Interface for common request parameters params: { @@ -49,7 +40,7 @@ export async function sendNodeRequest( const requestData = { ...params.data, epoch: params.epoch }; try { - const req = { + const req: RequestInit = { method: 'POST', headers: _headers, body: JSON.stringify(requestData), @@ -62,6 +53,11 @@ export async function sendNodeRequest( // Generate and log CURL command const curlCommand = generateCurlCommand(_fullUrl, req); _logger.info({ curlCommand }, '🔄 CURL command:'); + await writeCurlCommandDebugFile({ + requestId: params.requestId, + curlCommand, + idHeaderName: 'X-Request-Id', + }); // if (_fullUrl.includes('sign_session_key')) { // console.log("Curl command: ", curlCommand); @@ -85,7 +81,10 @@ export async function sendNodeRequest( if (isDebugMode) { const timestamp = new Date().toISOString(); - console.log(`🔄 response at ${timestamp}`, response); + _logger.debug( + { timestamp, status: response.status, url: _fullUrl }, + '🔄 response received' + ); } const isJson = response.headers diff --git a/packages/logger/README.md b/packages/logger/README.md index e20602eadc..13eabd0f87 100644 --- a/packages/logger/README.md +++ b/packages/logger/README.md @@ -1,6 +1,163 @@ -# logger +# @lit-protocol/logger -This package provides a centralized logging utility for the Lit Protocol SDK, offering structured logging capabilities across all packages. It is based in pino logger for minimal overhead and enables consistent log formatting, level-based filtering, and standardized error reporting throughout the Lit Protocol ecosystem. +Centralized logging for the Lit Protocol SDK. The default backend is structured `pino` logging, but you can attach custom transports (DataDog, Sentry, your own system) and it works in both Node.js and browsers. + +## Basic usage + +```ts +import { logger, getChildLogger } from '@lit-protocol/logger'; + +logger.info('SDK started'); + +const log = getChildLogger({ module: 'my-feature' }); +log.debug({ foo: 'bar' }, 'doing work'); +``` + +## Log levels + +Logging verbosity is controlled by: + +- Node.js: `process.env.LOG_LEVEL` +- Browser: `globalThis.LOG_LEVEL` + +Supported levels: `silent`, `fatal`, `error`, `warn`, `info`, `debug`, `trace`, `debug_text`. + +`debug_text` switches the default output to console-style text (not JSON). `debug2` is a deprecated alias for `debug_text`. + +## Configuration + +Use `setLoggerOptions` at app startup to change level/name or add metadata: + +```ts +import { setLoggerOptions } from '@lit-protocol/logger'; + +setLoggerOptions({ + level: 'info', + name: 'MyApp', + bindings: { app: 'my-app' }, +}); +``` + +### Custom transports + +To forward logs to any system, provide `transports`. Each transport receives a normalized `LogEntry`: + +```ts +type LogEntry = { + level: LogLevel; + time: number; + msg?: string; + data?: unknown; + bindings: Record; + args: unknown[]; +}; +``` + +Example: + +```ts +import { setLoggerOptions } from '@lit-protocol/logger'; + +setLoggerOptions({ + level: 'info', + transports: [ + (entry) => { + mySink.send(entry); + }, + ], + useDefaultTransports: true, // keep default pino/console output +}); +``` + +To _replace_ the default backend entirely, set `useDefaultTransports: false`. + +### DataDog examples + +**Node.js:** DataDog agents can ingest JSON logs from stdout. The default pino output is compatible; just set `LOG_LEVEL` and run your app. + +**Browser:** using `@datadog/browser-logs`: + +```ts +import { datadogLogs } from '@datadog/browser-logs'; +import { setLoggerOptions } from '@lit-protocol/logger'; + +setLoggerOptions({ + level: 'info', + useDefaultTransports: false, + transports: [ + ({ level, msg, bindings, data }) => { + if (level === 'silent') return; + + const status = + level === 'fatal' + ? 'error' + : level === 'trace' || level === 'debug_text' || level === 'debug2' + ? 'debug' + : level; + + const context = { ...bindings, ...(data as any) }; + + if (status === 'error' && (context as any).err instanceof Error) { + datadogLogs.logger.error(msg || 'error', context, (context as any).err); + return; + } + + (datadogLogs.logger as any)[status](msg || 'log', context); + }, + ], +}); +``` + +### OpenTelemetry example (Node.js) + +You can forward logs via `transports` to the OpenTelemetry Logs API: + +```ts +import { logs, SeverityNumber } from '@opentelemetry/api-logs'; +import { + LoggerProvider, + BatchLogRecordProcessor, +} from '@opentelemetry/sdk-logs'; +import { OTLPLogExporter } from '@opentelemetry/exporter-logs-otlp-http'; +import { setLoggerOptions } from '@lit-protocol/logger'; + +const provider = new LoggerProvider(); +provider.addLogRecordProcessor( + new BatchLogRecordProcessor( + new OTLPLogExporter({ url: 'http://localhost:4318/v1/logs' }) + ) +); +logs.setGlobalLoggerProvider(provider); + +const otelLogger = logs.getLogger('lit-sdk'); + +const levelToSeverity: Record = { + fatal: SeverityNumber.FATAL, + error: SeverityNumber.ERROR, + warn: SeverityNumber.WARN, + info: SeverityNumber.INFO, + debug: SeverityNumber.DEBUG, + debug_text: SeverityNumber.DEBUG, + trace: SeverityNumber.TRACE, +}; + +setLoggerOptions({ + level: 'info', + useDefaultTransports: false, + transports: [ + ({ level, msg, bindings, data, time }) => { + if (level === 'silent') return; + otelLogger.emit({ + body: msg ?? 'log', + severityNumber: levelToSeverity[level] ?? SeverityNumber.UNSPECIFIED, + severityText: level, + attributes: { ...bindings, ...(data as any) }, + timestamp: time, + }); + }, + ], +}); +``` ## Building @@ -8,4 +165,4 @@ Run `nx build logger` to build the library. ## Running unit tests -Run `nx test logger` to execute the unit tests via [Jest](https://jestjs.io). +Run `nx test logger` to execute unit tests via Jest. diff --git a/packages/logger/src/index.ts b/packages/logger/src/index.ts index 8c9c7650cd..498276c2c0 100644 --- a/packages/logger/src/index.ts +++ b/packages/logger/src/index.ts @@ -1,2 +1,3 @@ export * from './lib/logger'; export { getChildLogger } from './lib/logger'; +export * from './lib/curlDebug'; diff --git a/packages/logger/src/lib/curlDebug.ts b/packages/logger/src/lib/curlDebug.ts new file mode 100644 index 0000000000..5bf14dc5bf --- /dev/null +++ b/packages/logger/src/lib/curlDebug.ts @@ -0,0 +1,148 @@ +type FsPromisesLike = { + mkdir: (path: string, options: { recursive: boolean }) => Promise; + writeFile: ( + path: string, + data: string, + options: { encoding: 'utf8' } + ) => Promise; +}; + +type PathLike = { + dirname: (path: string) => string; + isAbsolute: (path: string) => boolean; + join: (...paths: string[]) => string; +}; + +function getEnvVar(key: string): string | undefined { + try { + if (typeof process === 'undefined' || typeof process.env !== 'object') { + return undefined; + } + const value = process.env[key]; + return typeof value === 'string' ? value : undefined; + } catch { + return undefined; + } +} + +function isNodeJsRuntime(): boolean { + try { + return ( + typeof process !== 'undefined' && + typeof process.versions === 'object' && + !!process.versions?.node + ); + } catch { + return false; + } +} + +function isWriteCurlDebugEnabled(): boolean { + const envValue = getEnvVar('LIT_DEBUG_CURL'); + return envValue === 'true' || envValue === '1'; +} + +function sanitizeRequestIdForFilename(requestId: string): string | null { + const trimmed = requestId.trim(); + if (trimmed.length === 0) return null; + + const sanitized = trimmed.replace(/[^a-zA-Z0-9._-]/g, '_'); + if (sanitized === '.' || sanitized === '..') return null; + + return sanitized; +} + +function getNodeRequire(): ((specifier: string) => any) | undefined { + try { + // eslint-disable-next-line no-eval + const req = eval('require') as unknown; + return typeof req === 'function' ? (req as any) : undefined; + } catch { + return undefined; + } +} + +export function generateCurlCommand(url: string, init: RequestInit): string { + const method = init.method ?? 'GET'; + + const headerEntries: Array<[string, string]> = []; + if (Array.isArray(init.headers)) { + for (const entry of init.headers) { + if (!Array.isArray(entry) || entry.length !== 2) continue; + headerEntries.push([String(entry[0]), String(entry[1])]); + } + } else if ( + init.headers && + typeof (init.headers as Headers).forEach === 'function' + ) { + (init.headers as Headers).forEach((value, key) => { + headerEntries.push([key, value]); + }); + } else if (init.headers && typeof init.headers === 'object') { + for (const [key, value] of Object.entries( + init.headers as Record + )) { + headerEntries.push([key, String(value)]); + } + } + + const headers = headerEntries + .map(([key, value]) => `-H "${key}: ${value}"`) + .join(' '); + + const body = typeof init.body === 'string' ? `--data '${init.body}'` : ''; + + return `curl -X ${method} ${headers} ${body} "${url}"`.trim(); +} + +export async function writeCurlCommandDebugFile(params: { + requestId: string; + curlCommand: string; + idHeaderName?: string; +}): Promise { + if (!isNodeJsRuntime() || !isWriteCurlDebugEnabled()) { + return; + } + + try { + const safeRequestId = sanitizeRequestIdForFilename(params.requestId); + if (!safeRequestId) { + return; + } + + const requireFn = getNodeRequire(); + if (!requireFn) { + return; + } + + const fsModule = requireFn('node:fs/promises'); + const pathModule = requireFn('node:path'); + + const fs: FsPromisesLike = fsModule?.default ?? fsModule; + const path: PathLike = pathModule?.default ?? pathModule; + + const debugDirEnv = getEnvVar('LIT_DEBUG_CURL_DIR'); + const debugDir = + typeof debugDirEnv === 'string' && debugDirEnv.trim().length > 0 + ? debugDirEnv.trim() + : 'debug'; + + const dirPath = path.isAbsolute(debugDir) + ? debugDir + : path.join(process.cwd(), debugDir); + + const filePath = path.join(dirPath, safeRequestId); + const idHeaderName = params.idHeaderName ?? 'request-id'; + + await fs.mkdir(path.dirname(filePath), { recursive: true }); + await fs.writeFile( + filePath, + `# ${idHeaderName}: ${params.requestId}\n${params.curlCommand}\n`, + { + encoding: 'utf8', + } + ); + } catch { + // Best-effort debug helper; ignore filesystem errors. + } +} diff --git a/packages/logger/src/lib/logger.spec.ts b/packages/logger/src/lib/logger.spec.ts index 0cd4983395..4017320b50 100644 --- a/packages/logger/src/lib/logger.spec.ts +++ b/packages/logger/src/lib/logger.spec.ts @@ -1,6 +1,11 @@ import { Writable } from 'stream'; -import { logger, setLoggerOptions, getChildLogger } from './logger'; +import { + logger, + setLoggerOptions, + getChildLogger, + getDefaultLevel, +} from './logger'; class TestStream extends Writable { public data = ''; @@ -16,8 +21,8 @@ class TestStream extends Writable { } describe('logger', () => { - it('should have default level "info"', () => { - expect(logger.level).toBe('info'); + it('should have default level from environment', () => { + expect(logger.level).toBe(getDefaultLevel()); }); it('setLoggerOptions should update logger options', () => { diff --git a/packages/logger/src/lib/logger.ts b/packages/logger/src/lib/logger.ts index c375930874..91ebe70e90 100644 --- a/packages/logger/src/lib/logger.ts +++ b/packages/logger/src/lib/logger.ts @@ -4,114 +4,349 @@ import pinoInstance, { Logger as Pino, } from 'pino'; -const isNode = () => { - let isNode = false; - // @ts-ignore - if (typeof process === 'object') { - // @ts-ignore - if (typeof process.versions === 'object') { - // @ts-ignore - if (typeof process.versions.node !== 'undefined') { - isNode = true; - } - } - } - return isNode; +export type LogLevel = + | 'silent' + | 'fatal' + | 'error' + | 'warn' + | 'info' + | 'debug' + | 'debug_text' + | 'debug2' // deprecated alias for debug_text + | 'trace'; + +export interface LogEntry { + level: LogLevel; + time: number; + msg?: string; + data?: unknown; + bindings: Record; + args: unknown[]; +} + +export type LogTransport = (entry: LogEntry) => void | Promise; + +const LEVEL_RANK: Record = { + silent: 100, + fatal: 60, + error: 50, + warn: 40, + info: 30, + debug: 20, + debug_text: 15, + debug2: 15, + trace: 10, }; -export const getDefaultLevel = () => { - let logLevel = 'silent'; +const canonicalizeLevel = (level: LogLevel): LogLevel => + level === 'debug2' ? 'debug_text' : level; + +const isNodeEnvironment = () => + typeof process !== 'undefined' && + process.versions != null && + process.versions.node != null; + +export const getDefaultLevel = (): LogLevel => { + let logLevel: string | undefined; - if (isNode()) { - logLevel = process.env['LOG_LEVEL'] || 'silent'; + if (isNodeEnvironment()) { + logLevel = process.env['LOG_LEVEL']; } else { - // @ts-ignore - logLevel = globalThis['LOG_LEVEL'] || 'silent'; + // @ts-ignore - globalThis is available in browsers + logLevel = globalThis['LOG_LEVEL']; } - // console.log('✅ logLevel', logLevel); - return logLevel; + const level = (logLevel as LogLevel) || 'silent'; + return canonicalizeLevel(level); }; -const DEFAULT_LOGGER_OPTIONS = { +const DEFAULT_LOGGER_OPTIONS: LoggerOptions = { name: 'LitProtocolSDK', - level: getDefaultLevel() === 'debug2' ? 'debug' : getDefaultLevel(), + level: getDefaultLevel(), }; -// Custom logger wrapper for debug2 level -const createConsoleLogger = (name: string): any => { - const baseLogger = { - level: 'debug', // Use standard level to avoid pino errors - - // Standard log levels that delegate to console - fatal: (...args: any[]) => console.error(`[${name}] FATAL:`, ...args), - error: (...args: any[]) => console.error(`[${name}] ERROR:`, ...args), - warn: (...args: any[]) => console.warn(`[${name}] WARN:`, ...args), - info: (...args: any[]) => console.info(`[${name}] INFO:`, ...args), - debug: (...args: any[]) => console.log(`[${name}] DEBUG:`, ...args), - trace: (...args: any[]) => console.log(`[${name}] TRACE:`, ...args), - - // Custom debug2 level using console.log - debug2: (...args: any[]) => console.log(`[${name}] DEBUG2:`, ...args), - - // Child logger creation - child: (bindings: any) => { - const childName = bindings.module ? `${name}:${bindings.module}` : name; - return createConsoleLogger(childName); - }, +type LoggerImpl = { + level?: string; + fatal: (...args: any[]) => void; + error: (...args: any[]) => void; + warn: (...args: any[]) => void; + info: (...args: any[]) => void; + debug: (...args: any[]) => void; + debug_text?: (...args: any[]) => void; + debug2?: (...args: any[]) => void; + debugText?: (...args: any[]) => void; + trace: (...args: any[]) => void; + child?: (bindings: Record) => LoggerImpl; + isLevelEnabled?: (level: string) => boolean; +}; + +type Logger = Pino & LoggerImpl; - // Silent method (no-op) - silent: () => {}, +interface InternalConfig { + level: LogLevel; + name: string; + bindings: Record; + transports: LogTransport[]; + useDefaultTransports: boolean; + impl: LoggerImpl; +} - // Add stub methods for pino compatibility - on: () => baseLogger, - addLevel: () => {}, +const normalizeLevelForPino = (level: LogLevel): string => + canonicalizeLevel(level) === 'debug_text' + ? 'debug' + : canonicalizeLevel(level); + +const createConsoleLogger = (name: string): LoggerImpl => { + const baseLogger: LoggerImpl = { + level: 'debug', + fatal: (...args) => console.error(`[${name}] FATAL:`, ...args), + error: (...args) => console.error(`[${name}] ERROR:`, ...args), + warn: (...args) => console.warn(`[${name}] WARN:`, ...args), + info: (...args) => console.info(`[${name}] INFO:`, ...args), + debug: (...args) => console.log(`[${name}] DEBUG:`, ...args), + trace: (...args) => console.log(`[${name}] TRACE:`, ...args), + child: (bindings) => { + const moduleName = bindings['module']; + const childName = + typeof moduleName === 'string' ? `${name}:${moduleName}` : name; + return createConsoleLogger(childName); + }, isLevelEnabled: () => true, - levelVal: 30, - version: '1.0.0', }; + const debugText = (...args: any[]) => + console.log(`[${name}] DEBUG_TEXT:`, ...args); + + baseLogger.debug_text = debugText; + baseLogger.debugText = debugText; + baseLogger.debug2 = debugText; + return baseLogger; }; -type Logger = Pino; -let logger: Logger = ( - getDefaultLevel() === 'debug2' - ? createConsoleLogger(DEFAULT_LOGGER_OPTIONS.name) - : pinoInstance(DEFAULT_LOGGER_OPTIONS) -) as Logger; +const createDefaultImpl = ( + options: LoggerOptions, + destination?: DestinationStream +): LoggerImpl => { + const requestedLevel = canonicalizeLevel( + ((options.level as LogLevel) || getDefaultLevel()) as LogLevel + ); + + if (requestedLevel === 'debug_text') { + return createConsoleLogger(options.name || DEFAULT_LOGGER_OPTIONS.name!); + } + + const effectiveLevel = normalizeLevelForPino(requestedLevel); + + const pinoOptions: LoggerOptions = { + ...DEFAULT_LOGGER_OPTIONS, + ...options, + level: effectiveLevel, + }; + + if (!isNodeEnvironment()) { + pinoOptions.browser = { + asObject: true, + ...(pinoOptions.browser || {}), + }; + } + + return pinoInstance(pinoOptions, destination) as unknown as LoggerImpl; +}; + +const config: InternalConfig = { + level: getDefaultLevel(), + name: DEFAULT_LOGGER_OPTIONS.name!, + bindings: {}, + transports: [], + useDefaultTransports: true, + impl: createDefaultImpl(DEFAULT_LOGGER_OPTIONS), +}; + +const shouldLog = (level: LogLevel): boolean => { + if (config.level === 'silent') return false; + return LEVEL_RANK[level] >= LEVEL_RANK[config.level]; +}; + +const extractMsgAndData = (args: unknown[]): Pick => { + if (args.length === 0) return {}; + const [first, second] = args; + + if (typeof first === 'string') { + if (second && typeof second === 'object') { + return { msg: first, data: second }; + } + return { msg: first }; + } + + if (first instanceof Error) { + if (typeof second === 'string') { + return { msg: second, data: { err: first } }; + } + return { msg: first.message, data: { err: first } }; + } + + if (first && typeof first === 'object') { + const msg = + typeof second === 'string' + ? second + : typeof (first as any).msg === 'string' + ? (first as any).msg + : undefined; + return { msg, data: first }; + } + + return { msg: String(first) }; +}; + +const emitToTransports = (entry: LogEntry) => { + for (const transport of config.transports) { + try { + void transport(entry); + } catch { + // ignore transport errors + } + } +}; + +const logWithLevel = ( + level: LogLevel, + getImpl: () => LoggerImpl, + bindings: Record, + args: unknown[] +) => { + const canonicalLevel = canonicalizeLevel(level); + if (!shouldLog(canonicalLevel)) return; + + const impl = getImpl(); + const implLevel = normalizeLevelForPino(canonicalLevel); + const mergedBindings = { + name: config.name, + ...config.bindings, + ...bindings, + }; + const implMethod = + // @ts-ignore - dynamic level access + (impl as any)[canonicalLevel] || + (canonicalLevel === 'debug_text' ? impl.debug : undefined); + + if ( + config.useDefaultTransports && + typeof implMethod === 'function' && + (!impl.isLevelEnabled || impl.isLevelEnabled(implLevel)) + ) { + implMethod.apply(impl, args as any); + } + + if (config.transports.length > 0) { + const { msg, data } = extractMsgAndData(args); + emitToTransports({ + level: canonicalLevel, + time: Date.now(), + msg, + data, + bindings: mergedBindings, + args, + }); + } +}; + +const createLoggerWrapper = ( + getImpl: () => LoggerImpl, + bindings: Record +): LoggerImpl => { + const wrapper: LoggerImpl = { + get level() { + return config.level; + }, + fatal: (...args) => logWithLevel('fatal', getImpl, bindings, args), + error: (...args) => logWithLevel('error', getImpl, bindings, args), + warn: (...args) => logWithLevel('warn', getImpl, bindings, args), + info: (...args) => logWithLevel('info', getImpl, bindings, args), + debug: (...args) => logWithLevel('debug', getImpl, bindings, args), + trace: (...args) => logWithLevel('trace', getImpl, bindings, args), + debug_text: (...args) => + logWithLevel('debug_text', getImpl, bindings, args), + debugText: (...args) => logWithLevel('debug_text', getImpl, bindings, args), + debug2: (...args) => logWithLevel('debug_text', getImpl, bindings, args), + child: (childBindings) => { + const mergedBindings = { ...bindings, ...childBindings }; + return createLoggerWrapper(() => { + const impl = getImpl(); + return impl.child ? impl.child(childBindings) : impl; + }, mergedBindings); + }, + }; + + return wrapper; +}; + +const rootLogger = createLoggerWrapper(() => config.impl, { + name: config.name, +}); + +type ExtraLoggerOptions = { + transports?: LogTransport[]; + useDefaultTransports?: boolean; + impl?: LoggerImpl; + bindings?: Record; +}; function setLoggerOptions( - loggerOptions: LoggerOptions, + loggerOptions: LoggerOptions & ExtraLoggerOptions, destination?: DestinationStream ): Logger { - const finalOptions = { - ...DEFAULT_LOGGER_OPTIONS, - ...loggerOptions, - }; + const { transports, useDefaultTransports, impl, bindings, ...pinoOptions } = + loggerOptions || {}; + + if (bindings) { + config.bindings = { ...config.bindings, ...bindings }; + } - // Use console logger for debug2 level - if (finalOptions.level === 'debug2') { - logger = createConsoleLogger( - finalOptions.name || 'LitProtocolSDK' - ) as Logger; + if (typeof useDefaultTransports === 'boolean') { + config.useDefaultTransports = useDefaultTransports; + } + + if (transports) { + config.transports = transports; + } + + const level = canonicalizeLevel( + ((loggerOptions.level as LogLevel) || getDefaultLevel()) as LogLevel + ); + config.level = level; + + const name = (loggerOptions.name as string) || DEFAULT_LOGGER_OPTIONS.name!; + config.name = name; + + if (impl) { + config.impl = impl; } else { - // Ensure we don't pass debug2 to pino - convert to debug instead - const pinoOptions = { - ...finalOptions, - level: finalOptions.level === 'debug2' ? 'debug' : finalOptions.level, - }; - logger = pinoInstance(pinoOptions, destination); + if (level === 'debug_text') { + config.impl = createConsoleLogger(name); + } else { + const effectivePinoOptions: LoggerOptions = { + ...DEFAULT_LOGGER_OPTIONS, + ...pinoOptions, + level: normalizeLevelForPino(level), + name, + base: { ...(pinoOptions.base || {}), ...config.bindings }, + }; + + config.impl = createDefaultImpl(effectivePinoOptions, destination); + } } - return logger; + return rootLogger as unknown as Logger; } function getChildLogger( - ...childParams: Parameters + ...childParams: Parameters> ): Logger { - return logger.child(...childParams); + // Root logger always has child() + // @ts-ignore + return (rootLogger.child as any)(...childParams) as Logger; } -export { getChildLogger, logger, setLoggerOptions }; +export { getChildLogger, rootLogger as logger, setLoggerOptions }; export type { Logger }; diff --git a/packages/networks/package.json b/packages/networks/package.json index bdffb69307..e32ffd0fb0 100644 --- a/packages/networks/package.json +++ b/packages/networks/package.json @@ -19,6 +19,7 @@ }, "dependencies": { "@lit-protocol/contracts": "workspace:*", + "@lit-protocol/logger": "workspace:*", "@lit-protocol/nacl": "7.1.1", "@noble/curves": "1.8.1", "@wagmi/core": "2.22.1", @@ -26,8 +27,6 @@ "elysia": "1.2.25", "ethers": "5.7.2", "node-localstorage": "3.0.5", - "pino": "9.6.0", - "pino-caller": "4.0.0", "zod": "3.24.3", "@noble/hashes": "^1.8.0", "tslib": "^2.8.1" diff --git a/packages/networks/src/networks/shared/helpers/handleAuthServerRequest.ts b/packages/networks/src/networks/shared/helpers/handleAuthServerRequest.ts index 91a6f7b8af..a871a8f94a 100644 --- a/packages/networks/src/networks/shared/helpers/handleAuthServerRequest.ts +++ b/packages/networks/src/networks/shared/helpers/handleAuthServerRequest.ts @@ -57,7 +57,7 @@ export const handleAuthServerRequest = async (params: { data: returnValue.data, }; } catch (error: any) { - console.error(`Error during ${params.jobName} polling:`, error); + logger.error({ error }, `Error during ${params.jobName} polling`); const errMsg = error instanceof Error ? error.message : String(error); throw new Error(`Failed to ${params.jobName} after polling: ${errMsg}`); } diff --git a/packages/networks/src/networks/shared/logger.ts b/packages/networks/src/networks/shared/logger.ts index 1c500dc8b6..9791a255f4 100644 --- a/packages/networks/src/networks/shared/logger.ts +++ b/packages/networks/src/networks/shared/logger.ts @@ -1,78 +1,9 @@ -import type { Logger as PinoLogger } from 'pino'; -import pino from 'pino'; - -const getLogLevel = (): string => { - // Check for process.env.LOG_LEVEL in a Node.js-like environment - if ( - typeof process !== 'undefined' && - process.env && - typeof process.env['LOG_LEVEL'] === 'string' - ) { - const level = process.env['LOG_LEVEL']; - // Convert debug2 to debug for pino compatibility - return level === 'debug2' ? 'debug' : level; - } - // Default log level for browser or when LOG_LEVEL is not set - return 'info'; -}; - -// Initial logger setup - this variable will be exported -let logger: PinoLogger; - -const isNodeEnvironment = - typeof process !== 'undefined' && - process.versions != null && - process.versions.node != null; - -if (isNodeEnvironment) { - // Node.js initial setup (basic pino, will be attempted to be enhanced) - logger = pino({ level: getLogLevel() }); -} else { - // Browser setup - logger = pino({ - level: getLogLevel(), - browser: { - asObject: true, // Makes log objects easier to inspect in browser consoles - }, - }); -} - -// Asynchronous function to attempt to enhance the logger with pino-caller in Node.js -async function tryEnhanceLoggerForNode() { - // This check is technically redundant if this function is only called in Node context, - // but it's a good safeguard. - if (isNodeEnvironment) { - try { - // Dynamically import pino-caller. This prevents it from being in browser bundles. - const pinoCallerModule: any = await import('pino-caller'); - // Handle potential differences in how CJS modules are exposed via dynamic import and avoid type mismatches - const pinoCallerWrapper: (logger: any) => any = - (pinoCallerModule && (pinoCallerModule.default || pinoCallerModule)) || - ((l: any) => l); - - // Create a new pino instance specifically for pino-caller to wrap. - // This ensures pino-caller operates on a logger with the correct Node.js settings. - const nodeBaseLogger = pino({ level: getLogLevel() }); - logger = pinoCallerWrapper(nodeBaseLogger) as unknown as PinoLogger; // Reassign the exported logger - } catch (e) { - // If pino-caller fails to load, the basic pino logger for Node.js (already set) will be used. - // You could add a log message here if desired, e.g., using console.error - // console.error('pino-caller could not be loaded for Node.js. Falling back to basic pino logger.', e); - } - } -} - -// In Node.js environments, attempt to enhance the logger. -// This is a "fire-and-forget" operation. The logger is usable synchronously -// from the start, and gets upgraded with caller info if pino-caller loads successfully. -if (isNodeEnvironment) { - tryEnhanceLoggerForNode().catch((error) => { - // The basic logger is already in place, so we just log the enhancement error. - // console.error('Error during asynchronous logger enhancement for Node.js:', error); - }); -} - -// Export the logger instance. It will be the basic one initially, -// and in Node.js, it's potentially replaced by the pino-caller-enhanced one -// after the asynchronous import and enhancement completes. -export { logger }; +// Legacy entrypoint kept for internal imports. +// All logging is now centralized in @lit-protocol/logger. +export { logger } from '@lit-protocol/logger'; +export type { + Logger, + LogLevel, + LogTransport, + LogEntry, +} from '@lit-protocol/logger'; diff --git a/packages/networks/src/networks/vNaga/shared/factories/BaseModuleFactory.ts b/packages/networks/src/networks/vNaga/shared/factories/BaseModuleFactory.ts index a7fc555004..d798d173cf 100644 --- a/packages/networks/src/networks/vNaga/shared/factories/BaseModuleFactory.ts +++ b/packages/networks/src/networks/vNaga/shared/factories/BaseModuleFactory.ts @@ -433,7 +433,7 @@ export function createBaseModule(config: BaseModuleConfig) { scopes?: ('sign-anything' | 'personal-sign' | 'no-permissions')[]; apiKey?: string; }) => { - console.log('[BaseModuleFactory.authService.pkpMint] params:', params); + _logger.debug({ params }, '[authService.pkpMint] params'); return await handleAuthServerRequest({ jobName: 'PKP Minting', serverUrl: params.authServiceBaseUrl, diff --git a/packages/networks/src/networks/vNaga/shared/managers/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/removePermittedAddressByIdentifier.ts b/packages/networks/src/networks/vNaga/shared/managers/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/removePermittedAddressByIdentifier.ts index be8c17eb9d..2f37775603 100644 --- a/packages/networks/src/networks/vNaga/shared/managers/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/removePermittedAddressByIdentifier.ts +++ b/packages/networks/src/networks/vNaga/shared/managers/LitChainClient/apis/highLevelApis/PKPPermissionsManager/handlers/removePermittedAddressByIdentifier.ts @@ -1,4 +1,5 @@ import { z } from 'zod'; +import { logger } from '@lit-protocol/logger'; import { DefaultNetworkConfig } from '../../../../../../../shared/interfaces/NetworkContext'; import { PkpIdentifierRaw, @@ -41,8 +42,10 @@ export async function removePermittedAddressByIdentifier( networkCtx ); - console.log('❌ TARGET ADDRESS:', targetAddress); - console.log('❌ PKP TOKEN ID:', pkpTokenId); + logger.debug( + { targetAddress, pkpTokenId }, + 'Removing permitted address by identifier' + ); return removePermittedAddress( { diff --git a/packages/networks/src/networks/vNaga/shared/managers/LitChainClient/apis/highLevelApis/mintPKP/mintWithMultiAuths.ts b/packages/networks/src/networks/vNaga/shared/managers/LitChainClient/apis/highLevelApis/mintPKP/mintWithMultiAuths.ts index c61bd06ed4..ce798bf0e9 100644 --- a/packages/networks/src/networks/vNaga/shared/managers/LitChainClient/apis/highLevelApis/mintPKP/mintWithMultiAuths.ts +++ b/packages/networks/src/networks/vNaga/shared/managers/LitChainClient/apis/highLevelApis/mintPKP/mintWithMultiAuths.ts @@ -110,8 +110,6 @@ export const mintWithMultiAuths = async ( logger.debug({ validatedRequest }); - console.log('🔥 mintWithMultiAuths:', validatedRequest); - const tx = await mintNextAndAddAuthMethods( { keyType: 2, diff --git a/packages/networks/src/networks/vNaga/shared/managers/LitChainClient/apis/rawContractApis/permissions/write/addPermittedAuthMethod.ts b/packages/networks/src/networks/vNaga/shared/managers/LitChainClient/apis/rawContractApis/permissions/write/addPermittedAuthMethod.ts index a574a794f6..8cd8c16947 100644 --- a/packages/networks/src/networks/vNaga/shared/managers/LitChainClient/apis/rawContractApis/permissions/write/addPermittedAuthMethod.ts +++ b/packages/networks/src/networks/vNaga/shared/managers/LitChainClient/apis/rawContractApis/permissions/write/addPermittedAuthMethod.ts @@ -45,8 +45,6 @@ export async function addPermittedAuthMethod( const validatedRequest = addPermittedAuthMethodSchema.parse(request); logger.debug({ validatedRequest }, 'Adding permitted auth method'); - console.log('🔐 ADD PERMITTED AUTH METHOD:', validatedRequest); - const { pkpPermissionsContract, publicClient } = createContractsManager( networkCtx, accountOrWalletClient diff --git a/packages/networks/src/networks/vNaga/shared/managers/LitChainClient/apis/rawContractApis/permissions/write/removePermittedAddress.ts b/packages/networks/src/networks/vNaga/shared/managers/LitChainClient/apis/rawContractApis/permissions/write/removePermittedAddress.ts index 8cc31ce844..2d939119e2 100644 --- a/packages/networks/src/networks/vNaga/shared/managers/LitChainClient/apis/rawContractApis/permissions/write/removePermittedAddress.ts +++ b/packages/networks/src/networks/vNaga/shared/managers/LitChainClient/apis/rawContractApis/permissions/write/removePermittedAddress.ts @@ -37,8 +37,6 @@ export async function removePermittedAddress( const validatedRequest = removePermittedAddressSchema.parse(request); logger.debug({ validatedRequest }); - console.log('🔥 REMOVE PERMITTED ADDRESS:', validatedRequest); - const { pkpPermissionsContract, pkpNftContract, publicClient, walletClient } = createContractsManager(networkCtx, accountOrWalletClient); diff --git a/packages/networks/src/networks/vNaga/shared/managers/LitChainClient/apis/rawContractApis/permissions/write/removePermittedAuthMethod.ts b/packages/networks/src/networks/vNaga/shared/managers/LitChainClient/apis/rawContractApis/permissions/write/removePermittedAuthMethod.ts index 3b09ecbc43..2cfbb428d5 100644 --- a/packages/networks/src/networks/vNaga/shared/managers/LitChainClient/apis/rawContractApis/permissions/write/removePermittedAuthMethod.ts +++ b/packages/networks/src/networks/vNaga/shared/managers/LitChainClient/apis/rawContractApis/permissions/write/removePermittedAuthMethod.ts @@ -42,8 +42,6 @@ export async function removePermittedAuthMethod( const validatedRequest = removePermittedAuthMethodSchema.parse(request); logger.debug({ validatedRequest }, 'Removing permitted auth method'); - console.log('🔥 REMOVE PERMITTED AUTH METHOD:', validatedRequest); - const { pkpPermissionsContract, publicClient } = createContractsManager( networkCtx, accountOrWalletClient diff --git a/packages/networks/src/networks/vNaga/shared/managers/LitChainClient/apis/rawContractApis/permissions/write/removePermittedAuthMethodScope.ts b/packages/networks/src/networks/vNaga/shared/managers/LitChainClient/apis/rawContractApis/permissions/write/removePermittedAuthMethodScope.ts index 40d1406e68..bf45584705 100644 --- a/packages/networks/src/networks/vNaga/shared/managers/LitChainClient/apis/rawContractApis/permissions/write/removePermittedAuthMethodScope.ts +++ b/packages/networks/src/networks/vNaga/shared/managers/LitChainClient/apis/rawContractApis/permissions/write/removePermittedAuthMethodScope.ts @@ -43,8 +43,6 @@ export async function removePermittedAuthMethodScope( const validatedRequest = removePermittedAuthMethodScopeSchema.parse(request); logger.debug({ validatedRequest }, 'Removing permitted auth method scope'); - console.log('🔥 REMOVE PERMITTED AUTH METHOD SCOPE:', validatedRequest); - const { pkpPermissionsContract, publicClient } = createContractsManager( networkCtx, accountOrWalletClient diff --git a/packages/networks/src/networks/vNaga/shared/managers/api-manager/helper/get-signatures.ts b/packages/networks/src/networks/vNaga/shared/managers/api-manager/helper/get-signatures.ts index 12484dcc1e..8cc1b272c9 100644 --- a/packages/networks/src/networks/vNaga/shared/managers/api-manager/helper/get-signatures.ts +++ b/packages/networks/src/networks/vNaga/shared/managers/api-manager/helper/get-signatures.ts @@ -6,6 +6,7 @@ import { combinePKPSignNodeShares, hexifyStringValues, logErrorWithRequestId, + logWithRequestId, mostCommonString, } from '@lit-protocol/crypto'; import { @@ -199,7 +200,8 @@ export const combineExecuteJSSignatures = async (params: { continue; } - console.log( + logWithRequestId( + requestId, `[executeJs] dropping signature share ${index + 1}/${ shares.length } for ${signatureKey}; drops left ${dropBudget - 1}` diff --git a/packages/wrapped-keys/package.json b/packages/wrapped-keys/package.json index 1d9631342d..ad14059f5e 100644 --- a/packages/wrapped-keys/package.json +++ b/packages/wrapped-keys/package.json @@ -30,6 +30,7 @@ "@lit-protocol/auth": "workspace:*", "@lit-protocol/constants": "workspace:*", "@lit-protocol/lit-client": "workspace:*", + "@lit-protocol/logger": "workspace:*", "@lit-protocol/networks": "workspace:*", "@lit-protocol/schemas": "workspace:*", "@lit-protocol/types": "workspace:*" diff --git a/packages/wrapped-keys/src/lib/service-client/utils.ts b/packages/wrapped-keys/src/lib/service-client/utils.ts index f4eb1fc4c6..7c294b691c 100644 --- a/packages/wrapped-keys/src/lib/service-client/utils.ts +++ b/packages/wrapped-keys/src/lib/service-client/utils.ts @@ -1,4 +1,8 @@ import { LIT_NETWORK_VALUES } from '@lit-protocol/constants'; +import { + generateCurlCommand, + writeCurlCommandDebugFile, +} from '@lit-protocol/logger'; import { AuthSig } from '@lit-protocol/types'; import { @@ -110,6 +114,13 @@ export async function makeRequest({ requestId: string; }) { try { + const curlCommand = generateCurlCommand(url, init); + await writeCurlCommandDebugFile({ + requestId, + curlCommand, + idHeaderName: 'x-correlation-id', + }); + const response = await fetch(url, { ...init }); if (!response.ok) { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 8c43ebb33c..16be05dfc4 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -381,6 +381,9 @@ importers: '@ethersproject/providers': specifier: 5.7.0 version: 5.7.0(bufferutil@4.0.9)(utf-8-validate@5.0.10) + '@lit-protocol/logger': + specifier: workspace:* + version: link:../../dist/packages/logger ethers: specifier: 5.7.2 version: 5.7.2(bufferutil@4.0.9)(utf-8-validate@5.0.10) @@ -407,6 +410,9 @@ importers: packages/auth: dependencies: + '@lit-protocol/logger': + specifier: workspace:* + version: link:../../dist/packages/logger '@noble/curves': specifier: 1.8.1 version: 1.8.1 @@ -459,6 +465,9 @@ importers: '@ethersproject/transactions': specifier: 5.7.0 version: 5.7.0 + '@lit-protocol/logger': + specifier: workspace:* + version: link:../../dist/packages/logger '@wagmi/core': specifier: 2.22.1 version: 2.22.1(@tanstack/query-core@5.90.5)(@types/react@18.3.27)(react@18.3.1)(typescript@5.8.3)(use-sync-external-store@1.4.0(react@18.3.1))(viem@2.38.3(bufferutil@4.0.9)(typescript@5.8.3)(utf-8-validate@5.0.10)(zod@3.24.3)) @@ -615,6 +624,9 @@ importers: packages/crypto: dependencies: + '@lit-protocol/logger': + specifier: workspace:* + version: link:../../dist/packages/logger '@lit-protocol/nacl': specifier: 7.1.1 version: 7.1.1 @@ -754,12 +766,6 @@ importers: pako: specifier: 2.1.0 version: 2.1.0 - pino: - specifier: 9.6.0 - version: 9.6.0 - pino-caller: - specifier: 4.0.0 - version: 4.0.0 siwe: specifier: 2.3.2 version: 2.3.2(ethers@5.7.2(bufferutil@4.0.9)(utf-8-validate@5.0.10)) @@ -792,6 +798,9 @@ importers: packages/lit-client: dependencies: + '@lit-protocol/logger': + specifier: workspace:* + version: link:../../dist/packages/logger '@lit-protocol/uint8arrays': specifier: 7.1.1 version: 7.1.1(bufferutil@4.0.9)(typescript@5.8.3)(utf-8-validate@5.0.10) @@ -821,6 +830,9 @@ importers: '@lit-protocol/contracts': specifier: workspace:* version: link:../../dist/packages/contracts + '@lit-protocol/logger': + specifier: workspace:* + version: link:../../dist/packages/logger '@lit-protocol/nacl': specifier: 7.1.1 version: 7.1.1 @@ -845,12 +857,6 @@ importers: node-localstorage: specifier: 3.0.5 version: 3.0.5 - pino: - specifier: 9.6.0 - version: 9.6.0 - pino-caller: - specifier: 4.0.0 - version: 4.0.0 tslib: specifier: ^2.8.1 version: 2.8.1 @@ -909,6 +915,9 @@ importers: '@lit-protocol/lit-client': specifier: workspace:* version: link:../../dist/packages/lit-client + '@lit-protocol/logger': + specifier: workspace:* + version: link:../../dist/packages/logger '@lit-protocol/networks': specifier: workspace:* version: link:../../dist/packages/networks diff --git a/tools/debug-curl.js b/tools/debug-curl.js new file mode 100644 index 0000000000..d87abc0e66 --- /dev/null +++ b/tools/debug-curl.js @@ -0,0 +1,76 @@ +#!/usr/bin/env node +/* eslint-disable no-console */ +/* eslint-disable @typescript-eslint/no-var-requires */ + +const fs = require('node:fs'); +const path = require('node:path'); + +function getDebugDir() { + const envDir = process.env.LIT_DEBUG_CURL_DIR; + const debugDir = + typeof envDir === 'string' && envDir.trim().length > 0 + ? envDir.trim() + : 'debug'; + return path.isAbsolute(debugDir) ? debugDir : path.join(process.cwd(), debugDir); +} + +function usage() { + console.error('Usage: pnpm debug:curl -- '); + console.error(' Env: LIT_DEBUG_CURL_DIR=./debug (optional)'); +} + +function readFileIfExists(filePath) { + if (!fs.existsSync(filePath)) return null; + return fs.readFileSync(filePath, 'utf8'); +} + +function main() { + const requestId = process.argv[2]; + if (!requestId) { + usage(); + process.exit(1); + } + + const debugDir = getDebugDir(); + if (!fs.existsSync(debugDir)) { + console.error(`Debug dir not found: ${debugDir}`); + process.exit(2); + } + + const exactPath = path.join(debugDir, requestId); + const exact = readFileIfExists(exactPath); + if (exact !== null) { + process.stdout.write(exact); + return; + } + + const matches = fs + .readdirSync(debugDir) + .filter((name) => name.includes(requestId)) + .sort(); + + if (matches.length === 0) { + console.error(`Not found: ${exactPath}`); + process.exit(3); + } + + if (matches.length === 1) { + const matchPath = path.join(debugDir, matches[0]); + const content = readFileIfExists(matchPath); + if (content === null) { + console.error(`Not found: ${matchPath}`); + process.exit(4); + } + process.stdout.write(content); + return; + } + + console.error(`Multiple matches in ${debugDir}:`); + for (const name of matches) { + console.error(`- ${name}`); + } + process.exit(5); +} + +main(); +