diff --git a/1 b/1 deleted file mode 100644 index e69de29b..00000000 diff --git a/apps/backend/database/migrations/00006_add_atomic_functions.sql b/apps/backend/database/migrations/00006_add_atomic_functions.sql new file mode 100644 index 00000000..840c4989 --- /dev/null +++ b/apps/backend/database/migrations/00006_add_atomic_functions.sql @@ -0,0 +1,95 @@ +-- Migration: Add atomic functions for sync operations +-- This migration adds PostgreSQL functions for atomic event processing +-- to prevent inconsistent states when event logging or state updates fail. + +-- Ensure sync_events table has necessary constraints +ALTER TABLE public.sync_events +ADD CONSTRAINT IF NOT EXISTS sync_events_event_id_unique UNIQUE (event_id); + +-- Function for processing sync events atomically +-- This function handles event insertion, booking status updates, and event marking in a single transaction +CREATE OR REPLACE FUNCTION process_sync_event_atomic( + p_event_id TEXT, + p_event_type TEXT, + p_booking_id TEXT, + p_property_id TEXT, + p_user_id TEXT, + p_event_data JSONB, + p_new_status TEXT DEFAULT NULL +) +RETURNS JSONB +LANGUAGE plpgsql +SECURITY DEFINER +AS $$ +DECLARE + v_sync_event_id UUID; +BEGIN + -- Step 1: Insert sync event (with duplicate check) + INSERT INTO public.sync_events (event_id, event_type, booking_id, property_id, user_id, event_data, processed, created_at) + VALUES (p_event_id, p_event_type, p_booking_id, p_property_id, p_user_id, p_event_data, false, now()) + ON CONFLICT (event_id) DO NOTHING + RETURNING id INTO v_sync_event_id; + + -- If event already exists (duplicate), return early + IF v_sync_event_id IS NULL THEN + RETURN jsonb_build_object('success', false, 'error', 'DUPLICATE_EVENT', 'event_id', p_event_id); + END IF; + + -- Step 2: Update booking status if applicable + IF p_new_status IS NOT NULL AND p_booking_id IS NOT NULL THEN + UPDATE public.bookings + SET status = p_new_status, updated_at = now() + WHERE escrow_address = p_booking_id OR blockchain_booking_id = p_booking_id; + END IF; + + -- Step 3: Mark event as processed + UPDATE public.sync_events + SET processed = true, processed_at = now() + WHERE id = v_sync_event_id; + + -- Log success + INSERT INTO public.sync_logs (operation, status, message, data, created_at) + VALUES ( + 'process_sync_event_atomic', + 'success', + 'Event processed atomically', + jsonb_build_object( + 'event_id', p_event_id, + 'event_type', p_event_type, + 'booking_id', p_booking_id, + 'new_status', p_new_status + ), + now() + ); + + RETURN jsonb_build_object( + 'success', true, + 'sync_event_id', v_sync_event_id, + 'event_id', p_event_id + ); + +EXCEPTION WHEN OTHERS THEN + -- Log error + INSERT INTO public.sync_logs (operation, status, error_details, created_at) + VALUES ( + 'process_sync_event_atomic', + 'error', + jsonb_build_object( + 'error', SQLERRM, + 'error_state', SQLSTATE, + 'event_id', p_event_id, + 'event_type', p_event_type + ), + now() + ); + -- Re-raise the exception to rollback the transaction + RAISE; +END; +$$; + +-- Grant execute permission to authenticated users +GRANT EXECUTE ON FUNCTION process_sync_event_atomic(TEXT, TEXT, TEXT, TEXT, TEXT, JSONB, TEXT) TO authenticated; +GRANT EXECUTE ON FUNCTION process_sync_event_atomic(TEXT, TEXT, TEXT, TEXT, TEXT, JSONB, TEXT) TO service_role; + +-- Add comment for documentation +COMMENT ON FUNCTION process_sync_event_atomic IS 'Atomically processes blockchain sync events: inserts event, updates booking status, and marks as processed in a single transaction.'; diff --git a/apps/backend/database/migrations/00007_add_payment_constraints.sql b/apps/backend/database/migrations/00007_add_payment_constraints.sql new file mode 100644 index 00000000..3610577f --- /dev/null +++ b/apps/backend/database/migrations/00007_add_payment_constraints.sql @@ -0,0 +1,132 @@ +-- Migration: Add payment validation constraints and atomic confirmation function +-- This migration prevents duplicate transaction hash usage and ensures atomic payment confirmation. + +-- Unique constraint to prevent the same transaction hash being used for multiple bookings +CREATE UNIQUE INDEX IF NOT EXISTS bookings_payment_tx_hash_unique_idx +ON public.bookings (payment_transaction_hash) +WHERE payment_transaction_hash IS NOT NULL; + +-- RPC function for atomic booking payment confirmation with validations +CREATE OR REPLACE FUNCTION confirm_booking_payment_atomic( + p_booking_id UUID, + p_transaction_hash TEXT +) +RETURNS JSONB +LANGUAGE plpgsql +AS $$ +DECLARE + v_booking RECORD; +BEGIN + -- Validate transaction hash is provided + IF p_transaction_hash IS NULL OR p_transaction_hash = '' THEN + RETURN jsonb_build_object( + 'success', false, + 'error', 'INVALID_TRANSACTION_HASH', + 'message', 'Transaction hash is required' + ); + END IF; + + -- Lock the booking row for update to prevent concurrent modifications + SELECT id, status, payment_transaction_hash + INTO v_booking + FROM public.bookings + WHERE id = p_booking_id + FOR UPDATE; + + -- Check if booking exists + IF v_booking IS NULL THEN + RETURN jsonb_build_object( + 'success', false, + 'error', 'NOT_FOUND', + 'message', 'Booking not found' + ); + END IF; + + -- Check if booking is already paid + IF v_booking.payment_transaction_hash IS NOT NULL THEN + RETURN jsonb_build_object( + 'success', false, + 'error', 'ALREADY_PAID', + 'message', 'Booking has already been paid', + 'existing_hash', v_booking.payment_transaction_hash + ); + END IF; + + -- Check if transaction hash is already used by another booking + IF EXISTS ( + SELECT 1 FROM public.bookings + WHERE payment_transaction_hash = p_transaction_hash + AND id != p_booking_id + ) THEN + RETURN jsonb_build_object( + 'success', false, + 'error', 'DUPLICATE_TRANSACTION', + 'message', 'Transaction hash is already used by another booking' + ); + END IF; + + -- Update the booking with payment confirmation + UPDATE public.bookings + SET + status = 'confirmed', + payment_transaction_hash = p_transaction_hash, + paid_at = now(), + updated_at = now() + WHERE id = p_booking_id; + + -- Log the successful payment confirmation + INSERT INTO public.sync_logs (operation, status, message, data, created_at) + VALUES ( + 'confirm_booking_payment_atomic', + 'success', + 'Payment confirmed atomically', + jsonb_build_object( + 'booking_id', p_booking_id, + 'transaction_hash', p_transaction_hash + ), + now() + ); + + RETURN jsonb_build_object( + 'success', true, + 'booking_id', p_booking_id, + 'transaction_hash', p_transaction_hash, + 'status', 'confirmed' + ); + +EXCEPTION WHEN unique_violation THEN + -- Handle the case where another process inserted the same tx hash concurrently + RETURN jsonb_build_object( + 'success', false, + 'error', 'DUPLICATE_TRANSACTION', + 'message', 'Transaction hash was just used by another booking (concurrent update)' + ); +WHEN OTHERS THEN + -- Log error and return failure + INSERT INTO public.sync_logs (operation, status, error_details, created_at) + VALUES ( + 'confirm_booking_payment_atomic', + 'error', + jsonb_build_object( + 'error', SQLERRM, + 'error_state', SQLSTATE, + 'booking_id', p_booking_id::text, + 'transaction_hash', p_transaction_hash + ), + now() + ); + + RETURN jsonb_build_object( + 'success', false, + 'error', 'DB_ERROR', + 'message', SQLERRM + ); +END; +$$; + +-- Grant execute permission +GRANT EXECUTE ON FUNCTION confirm_booking_payment_atomic(UUID, TEXT) TO authenticated; +GRANT EXECUTE ON FUNCTION confirm_booking_payment_atomic(UUID, TEXT) TO service_role; + +-- Add comment for documentation +COMMENT ON FUNCTION confirm_booking_payment_atomic IS 'Atomically confirms a booking payment with validation: checks booking exists, is not already paid, and transaction hash is not reused.'; diff --git a/apps/backend/src/blockchain/eventListener.ts b/apps/backend/src/blockchain/eventListener.ts index abb1f514..37b9bb4a 100644 --- a/apps/backend/src/blockchain/eventListener.ts +++ b/apps/backend/src/blockchain/eventListener.ts @@ -1,7 +1,8 @@ -import { Contract, Networks, rpc, xdr } from '@stellar/stellar-sdk'; +import { Contract } from '@stellar/stellar-sdk'; import { Server as SorobanRpcServer } from '@stellar/stellar-sdk/rpc'; import { supabase } from '../config/supabase'; import { loggingService } from '../services/logging.service'; +import { syncService } from '../services/sync.service'; export interface BlockchainEvent { id: string; @@ -118,7 +119,15 @@ export class BlockchainEventListener { console.log(`Initialized event listener at ledger ${this.lastProcessedLedger}`); } catch (error) { - console.warn('Could not initialize last processed ledger, starting from current:', error); + const errorLog = await loggingService.logBlockchainOperation( + 'initializeLastProcessedLedger', + {} + ); + await loggingService.logBlockchainError(errorLog, { + error, + context: 'Could not initialize last processed ledger from database, starting from current', + }); + // Fallback to current ledger this.lastProcessedLedger = await this.getCurrentLedger(); } } @@ -131,7 +140,14 @@ export class BlockchainEventListener { const ledgerInfo = await this.server.getLatestLedger(); return ledgerInfo.sequence || 0; } catch (error) { - console.error('Failed to get current ledger:', error); + const errorLog = await loggingService.logBlockchainOperation('getCurrentLedger', { + lastProcessedLedger: this.lastProcessedLedger, + }); + await loggingService.logBlockchainError(errorLog, { + error, + context: 'Failed to get current ledger from Stellar network', + }); + // Return last processed ledger as fallback to allow processing to continue return this.lastProcessedLedger; } } @@ -176,7 +192,15 @@ export class BlockchainEventListener { await this.processEvent(event); } } catch (error) { - console.error(`Error processing ledger ${ledger}:`, error); + const errorLog = await loggingService.logBlockchainOperation('processLedgers', { + ledger, + fromLedger, + toLedger, + }); + await loggingService.logBlockchainError(errorLog, { + error, + context: `Error processing ledger ${ledger}, continuing with next ledger`, + }); // Continue with next ledger instead of failing completely } } @@ -217,7 +241,15 @@ export class BlockchainEventListener { return events; } catch (error) { - console.error(`Failed to get events from ledger ${ledger}:`, error); + const errorLog = await loggingService.logBlockchainOperation('getEventsFromLedger', { + ledger, + contractId: this.config.contractId, + }); + await loggingService.logBlockchainError(errorLog, { + error, + context: `Failed to get events from ledger ${ledger}`, + }); + // Return empty array to allow processing to continue with next ledger return []; } } @@ -249,7 +281,24 @@ export class BlockchainEventListener { data: eventData, }; } catch (error) { - console.error('Error parsing Soroban event:', error); + // Log parsing errors but don't block processing + // Using fire-and-forget logging to avoid blocking the event loop + loggingService + .logBlockchainOperation('parseSorobanEvent', { + ledger, + txHash: event.txHash, + }) + .then((errorLog) => { + loggingService.logBlockchainError(errorLog, { + error, + context: 'Error parsing Soroban event, skipping event', + }); + }) + .catch((loggingError) => { + // Fallback to console if logging service fails + console.error('Failed to log parseSorobanEvent error:', loggingError); + console.error('Original parsing error:', error); + }); return null; } } @@ -272,7 +321,21 @@ export class BlockchainEventListener { // Return empty object if no data found return {}; } catch (error) { - console.error('Error parsing Soroban event data:', error); + // Log parsing errors but don't block processing + // Using fire-and-forget logging to avoid blocking the event loop + loggingService + .logBlockchainOperation('parseSorobanEventData', {}) + .then((errorLog) => { + loggingService.logBlockchainError(errorLog, { + error, + context: 'Error parsing Soroban event data, returning empty object', + }); + }) + .catch((loggingError) => { + // Fallback to console if logging service fails + console.error('Failed to log parseSorobanEventData error:', loggingError); + console.error('Original parsing error:', error); + }); return {}; } } @@ -296,55 +359,180 @@ export class BlockchainEventListener { return null; } } catch (error) { - console.error('Error determining event type:', error); + // Log parsing errors but don't block processing + // Using fire-and-forget logging to avoid blocking the event loop + loggingService + .logBlockchainOperation('determineEventType', { + eventName: event.name ?? event.type, + }) + .then((errorLog) => { + loggingService.logBlockchainError(errorLog, { + error, + context: 'Error determining event type, skipping event', + }); + }) + .catch((loggingError) => { + // Fallback to console if logging service fails + console.error('Failed to log determineEventType error:', loggingError); + console.error('Original parsing error:', error); + }); return null; } } /** - * Process a single blockchain event + * Check if event has already been processed (duplicate check) + */ + private async isEventAlreadyProcessed(eventId: string): Promise { + try { + const { data, error } = await supabase + .from('sync_events') + .select('id, processed') + .eq('event_id', eventId) + .single(); + + if (error && error.code !== 'PGRST116') { + // PGRST116 is "not found" which is expected for new events + // Other errors should be logged but not block processing + const errorLog = await loggingService.logBlockchainOperation('isEventAlreadyProcessed', { + eventId, + }); + await loggingService.logBlockchainError(errorLog, { + error, + context: 'Error checking for duplicate event', + }); + } + + return data !== null && data !== undefined; + } catch (error) { + // Log error but don't block processing - let the atomic function handle duplicates + const errorLog = await loggingService.logBlockchainOperation('isEventAlreadyProcessed', { + eventId, + }); + await loggingService.logBlockchainError(errorLog, { + error, + context: 'Exception checking for duplicate event', + }); + return false; + } + } + + /** + * Determine the new status for an event type + */ + private getStatusForEventType(eventType: string): string | undefined { + switch (eventType) { + case 'booking_cancelled': + return 'cancelled'; + case 'payment_confirmed': + return 'confirmed'; + default: + return undefined; + } + } + + /** + * Process a single blockchain event atomically */ private async processEvent(event: BlockchainEvent): Promise { try { - const logId = await loggingService.logBlockchainOperation('processEvent', { event }); + const logId = await loggingService.logBlockchainOperation('processEvent', { + eventId: event.id, + eventType: event.type, + txHash: event.transactionHash, + }); - // Store event in database - await this.storeEvent(event); + // Validate transaction ID before processing (explicit duplicate check) + const isDuplicate = await this.isEventAlreadyProcessed(event.id); + if (isDuplicate) { + await loggingService.logBlockchainSuccess(logId, { + eventId: event.id, + skipped: true, + reason: 'duplicate_event_detected_before_processing', + }); + return; + } + + // Determine new status if applicable + const newStatus = this.getStatusForEventType(event.type); + + // Process event atomically (insert, update status if applicable, mark as processed) + const result = await syncService.processEventAtomic( + event.id, + event.type, + event.bookingId || null, + event.propertyId || null, + event.userId || 'unknown', + { + ...event.data, + blockHeight: event.blockHeight, + transactionHash: event.transactionHash, + timestamp: event.timestamp.toISOString(), + }, + newStatus + ); + + // Handle duplicate event (detected during atomic processing) + if (result.error === 'DUPLICATE_EVENT') { + await loggingService.logBlockchainSuccess(logId, { + eventId: event.id, + skipped: true, + reason: 'duplicate_event_detected_during_atomic_processing', + }); + return; + } - // Call registered callbacks + // Call registered callbacks after atomic processing + // These should be idempotent and handle their own errors const callback = this.eventCallbacks.get(event.type); if (callback) { - await callback(event); + try { + await callback(event); + } catch (callbackError) { + // Log callback errors but don't fail the event processing + // since the atomic processing already succeeded + const callbackErrorLog = await loggingService.logBlockchainOperation( + 'processEvent_callback', + { + eventId: event.id, + eventType: event.type, + } + ); + await loggingService.logBlockchainError(callbackErrorLog, { + error: callbackError, + context: 'Callback execution failed after atomic event processing', + }); + } } // Log success - await loggingService.logBlockchainSuccess(logId, { eventId: event.id }); + await loggingService.logBlockchainSuccess(logId, { + eventId: event.id, + syncEventId: result.syncEventId, + }); } catch (error) { - console.error(`Error processing event ${event.id}:`, error); + const errorLog = await loggingService.logBlockchainOperation('processEvent', { + eventId: event.id, + eventType: event.type, + }); + await loggingService.logBlockchainError(errorLog, { + error, + context: 'Failed to process blockchain event', + }); await this.markEventFailed(event.id, error as Error); - loggingService.logBlockchainError('processEvent', error as Error); + throw error; } } /** * Store event in database + * @deprecated This method is no longer used. Events are stored atomically via processEventAtomic. + * Kept for reference but should not be called. */ - private async storeEvent(event: BlockchainEvent): Promise { - await supabase.from('sync_events').insert({ - event_id: event.id, - event_type: event.type, - booking_id: event.bookingId, - property_id: event.propertyId, - user_id: event.userId, - event_data: { - ...event.data, - blockHeight: event.blockHeight, - transactionHash: event.transactionHash, - timestamp: event.timestamp.toISOString(), - }, - processed: false, - created_at: new Date().toISOString(), - }); + private async storeEvent(_event: BlockchainEvent): Promise { + // This method is deprecated - events are now stored atomically via processEventAtomic + // Keeping for backward compatibility but it should not be called + console.warn('storeEvent is deprecated - use processEventAtomic instead'); } /** diff --git a/apps/backend/src/blockchain/trustlessWork.ts b/apps/backend/src/blockchain/trustlessWork.ts index e7a898e0..03710e2f 100644 --- a/apps/backend/src/blockchain/trustlessWork.ts +++ b/apps/backend/src/blockchain/trustlessWork.ts @@ -1,5 +1,6 @@ import { Keypair } from '@stellar/stellar-sdk'; import axios, { type AxiosResponse } from 'axios'; +import { EscrowError } from '../types/errors'; const TRUSTLESS_WORK_API_URL = process.env.TRUSTLESS_WORK_API_URL || 'https://api.trustlesswork.com'; @@ -304,8 +305,7 @@ export async function createEscrow(params: BookingEscrowParams): Promise return response.escrowAddress; } catch (error) { - console.error('Error creating escrow:', error); - throw new Error('Failed to create escrow'); + throw new EscrowError('Failed to create escrow', 'CREATE_ESCROW_FAIL', error); } } @@ -349,50 +349,46 @@ export class EscrowManager { } async isEscrowReady(escrowId: string): Promise { - try { - const status = await this.client.getEscrowStatus(escrowId); - return status.status === 'created' || status.status === 'pending'; - } catch (error) { - console.error('Error checking escrow status:', error); - return false; - } + const status = await this.client.getEscrowStatus(escrowId); + return status.status === 'created' || status.status === 'pending'; } - async getEscrowDetails(escrowId: string): Promise { - try { - return await this.client.getEscrowStatus(escrowId); - } catch (error) { - console.error('Error fetching escrow details:', error); - return null; - } + async getEscrowDetails(escrowId: string): Promise { + return await this.client.getEscrowStatus(escrowId); } async releasePayment(escrowId: string, amount?: number): Promise { - try { - const result = await this.client.releaseEscrow({ + const result = await this.client.releaseEscrow({ + escrowId, + amount: amount?.toFixed(2), + reason: 'Successful check-in completed', + }); + + if (!result.success) { + throw new EscrowError('Failed to release payment', 'RELEASE_PAYMENT_FAIL', { escrowId, - amount: amount?.toFixed(2), - reason: 'Successful check-in completed', + amount, }); - return result.success; - } catch (error) { - console.error('Error releasing payment:', error); - return false; } + + return result.success; } async releaseDeposit(escrowId: string, amount?: number): Promise { - try { - const result = await this.client.releaseEscrow({ + const result = await this.client.releaseEscrow({ + escrowId, + amount: amount?.toFixed(2), + reason: 'Successful checkout, no damages', + }); + + if (!result.success) { + throw new EscrowError('Failed to release deposit', 'RELEASE_DEPOSIT_FAIL', { escrowId, - amount: amount?.toFixed(2), - reason: 'Successful checkout, no damages', + amount, }); - return result.success; - } catch (error) { - console.error('Error releasing deposit:', error); - return false; } + + return result.success; } } diff --git a/apps/backend/src/controllers/wallet-auth.controller.ts b/apps/backend/src/controllers/wallet-auth.controller.ts deleted file mode 100644 index 2316a9f2..00000000 --- a/apps/backend/src/controllers/wallet-auth.controller.ts +++ /dev/null @@ -1,86 +0,0 @@ -import type { Request, Response } from 'express'; -import { authenticateWallet } from '../services/wallet-auth.service'; -import { generateChallenge as generateWalletChallenge } from '../services/wallet-challenge.service'; -import type { ChallengeRequest, WalletLoginRequest } from '../types/wallet-auth.types'; -import { - ConnectionRejectedError, - InvalidChallengeError, - InvalidPublicKeyError, - SignatureVerificationError, - WalletNotFoundError, -} from '../types/wallet-error-types'; - -//=================== -// Generate challenge for wallet authentication -//=================== -export const generateChallenge = async (req: Request, res: Response) => { - try { - const { publicKey } = req.body as ChallengeRequest; - - const challengeResponse = await generateWalletChallenge(publicKey); - - res.status(200).json(challengeResponse); - } catch (error) { - const message = error instanceof Error ? error.message : 'Unknown error'; - res.status(500).json({ error: message }); - } -}; - -//=================== -// Authenticate wallet with signed transaction -//=================== -export const authenticateWalletController = async (req: Request, res: Response) => { - try { - const walletLoginData = req.body as WalletLoginRequest; - - if (!walletLoginData.publicKey) { - return res.status(400).json({ error: 'Missing publicKey' }); - } - if (!walletLoginData.signedTransaction) { - return res.status(400).json({ error: 'Missing signedTransaction' }); - } - if (!walletLoginData.challenge) { - return res.status(400).json({ error: 'Missing challenge' }); - } - - const authResponse = await authenticateWallet(walletLoginData); - - res.cookie('auth-token', authResponse.token, { - httpOnly: true, - secure: process.env.NODE_ENV === 'production', - sameSite: 'strict', - maxAge: 7 * 24 * 60 * 60 * 1000, - }); - - console.log('πŸŽ‰ Wallet authentication successful for user:', authResponse.user.id); - res.status(200).json({ - user: authResponse.user, - token: authResponse.token, - }); - } catch (error) { - console.error('Wallet authentication error:', error); - - if (error instanceof InvalidPublicKeyError) { - return res.status(400).json({ error: error.message, code: 'INVALID_PUBLIC_KEY' }); - } - - if (error instanceof InvalidChallengeError) { - return res.status(401).json({ error: error.message, code: 'INVALID_CHALLENGE' }); - } - - if (error instanceof SignatureVerificationError) { - return res.status(401).json({ error: error.message, code: 'SIGNATURE_VERIFICATION_FAILED' }); - } - - if (error instanceof WalletNotFoundError) { - return res.status(400).json({ error: error.message, code: 'WALLET_NOT_FOUND' }); - } - - if (error instanceof ConnectionRejectedError) { - return res.status(400).json({ error: error.message, code: 'CONNECTION_REJECTED' }); - } - - const message = error instanceof Error ? error.message : 'Unknown error'; - res.status(500).json({ error: message, code: 'INTERNAL_ERROR' }); - } -}; diff --git a/apps/backend/src/index.ts b/apps/backend/src/index.ts index f7d80f30..27632f15 100644 --- a/apps/backend/src/index.ts +++ b/apps/backend/src/index.ts @@ -8,7 +8,7 @@ import { rateLimiter } from './middleware/rateLimiter'; import { locationRoutes, profileRoutes, propertyRoutes } from './routes'; import authRoutes from './routes/auth'; import bookingRoutes from './routes/booking.routes'; -import walletAuthRoutes from './routes/wallet-auth.routes'; +// wallet-auth routes removed - replaced by client-side Stellar Social SDK import { connectRedis } from './config/redis'; import syncRoutes from './routes/sync.routes'; @@ -89,7 +89,7 @@ app.use(rateLimiter); // Routes app.use('/auth', authRoutes); -app.use('/api/auth', walletAuthRoutes); +// wallet-auth endpoint removed - authentication now handled by client-side Stellar Social SDK app.use('/api/bookings', bookingRoutes); app.use('/api/locations', locationRoutes); app.use('/api/profile', profileRoutes); diff --git a/apps/backend/src/middleware/error.middleware.ts b/apps/backend/src/middleware/error.middleware.ts index 9fa0e7d9..b8aeea7e 100644 --- a/apps/backend/src/middleware/error.middleware.ts +++ b/apps/backend/src/middleware/error.middleware.ts @@ -1,12 +1,38 @@ import type { ErrorRequestHandler } from 'express'; import { JsonWebTokenError, TokenExpiredError } from 'jsonwebtoken'; import { ZodError } from 'zod'; +import { BookingError } from '../types/common.types'; +import { EscrowError, SyncError } from '../types/errors'; interface ErrorResponse { error: string; - details?: Array<{ path?: string; message: string }>; + code?: string; + details?: Array<{ path?: string; message: string }> | unknown; } +// Map error codes to HTTP status codes +const errorCodeToStatus: Record = { + NOT_FOUND: 404, + UNAUTHORIZED: 401, + ALREADY_PAID: 409, + DUPLICATE_TRANSACTION: 409, + DUPLICATE_EVENT: 409, + UNAVAILABLE: 409, + INVALID_STATUS: 400, + INVALID_BUYER_WALLET: 400, + INVALID_SELLER_WALLET: 400, + DB_ERROR: 500, + BLOCKCHAIN_FAIL: 502, + CREATE_ESCROW_FAIL: 502, + ESCROW_CANCEL_FAIL: 502, + RELEASE_PAYMENT_FAIL: 502, + RELEASE_DEPOSIT_FAIL: 502, + SYNC_FAIL: 502, + ATOMIC_PROCESS_FAIL: 500, + POLL_EVENTS_FAIL: 502, + GET_EVENTS_FAIL: 502, +}; + export const errorMiddleware: ErrorRequestHandler = (err, _req, res, _next) => { // Only log full stack traces in development if (process.env.NODE_ENV !== 'production') { @@ -31,6 +57,27 @@ export const errorMiddleware: ErrorRequestHandler = (err, _req, res, _next) => { } else if (err instanceof TokenExpiredError) { statusCode = 401; response.error = 'Token expired'; + } else if (err instanceof BookingError) { + statusCode = errorCodeToStatus[err.code] || 400; + response.error = err.message; + response.code = err.code; + if (err.details && process.env.NODE_ENV !== 'production') { + response.details = err.details; + } + } else if (err instanceof EscrowError) { + statusCode = errorCodeToStatus[err.code] || 502; + response.error = err.message; + response.code = err.code; + if (err.details && process.env.NODE_ENV !== 'production') { + response.details = err.details; + } + } else if (err instanceof SyncError) { + statusCode = errorCodeToStatus[err.code] || 500; + response.error = err.message; + response.code = err.code; + if (err.details && process.env.NODE_ENV !== 'production') { + response.details = err.details; + } } else if (err.name === 'PostgrestError') { statusCode = 400; response.error = 'Database error'; diff --git a/apps/backend/src/routes/wallet-auth.routes.ts b/apps/backend/src/routes/wallet-auth.routes.ts deleted file mode 100644 index c0d93a22..00000000 --- a/apps/backend/src/routes/wallet-auth.routes.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Router } from 'express'; -import { - authenticateWalletController, - generateChallenge, -} from '../controllers/wallet-auth.controller'; -import { challengeRateLimit, walletAuthRateLimit } from '../middleware/rateLimiter'; -import { validateChallengeRequest, validateWalletLogin } from '../validators/wallet-auth.validator'; - -const router = Router(); - -//=================== -// Generate challenge route for wallet authentication -//=================== -router.post('/challenge', challengeRateLimit, validateChallengeRequest, generateChallenge); - -//=================== -// Authenticate with wallet signature route -//=================== -router.post('/wallet', walletAuthRateLimit, validateWalletLogin, authenticateWalletController); - -export default router; diff --git a/apps/backend/src/services/booking.service.ts b/apps/backend/src/services/booking.service.ts index 1d1e029b..4adf48d9 100644 --- a/apps/backend/src/services/booking.service.ts +++ b/apps/backend/src/services/booking.service.ts @@ -1,7 +1,6 @@ import { StrKey } from '@stellar/stellar-sdk'; import { cancelBookingOnChain, - checkBookingAvailability, createBookingOnChain, updateBookingStatusOnChain, } from '../blockchain/bookingContract'; @@ -514,37 +513,64 @@ export const bookingService = new BookingService(blockchainServices); // Existing utility functions for payment confirmation and booking retrieval export async function confirmBookingPayment(bookingId: string, transactionHash: string) { + const log = await loggingService.logBlockchainOperation('confirmBookingPayment', { + bookingId, + transactionHash, + }); + try { - const { data: existingBooking, error: fetchError } = await supabase - .from('bookings') - .select('*') - .eq('id', bookingId) - .single(); + // Use atomic RPC function to validate and confirm payment + const { data, error } = await supabase.rpc('confirm_booking_payment_atomic', { + p_booking_id: bookingId, + p_transaction_hash: transactionHash, + }); - if (fetchError || !existingBooking) { - throw new BookingError('Booking not found or failed to retrieve', 'NOT_FOUND', fetchError); + if (error) { + await loggingService.logBlockchainError(log, { error, context: 'RPC call failed' }); + throw new BookingError('Database error during payment confirmation', 'DB_ERROR', error); } - const { data, error } = await supabase + // Handle validation errors from the RPC function + if (!data.success) { + const errorMap: Record = { + NOT_FOUND: ['Booking not found', 'NOT_FOUND'], + ALREADY_PAID: ['Booking has already been paid', 'ALREADY_PAID'], + DUPLICATE_TRANSACTION: [ + 'Transaction hash is already used by another booking', + 'DUPLICATE_TRANSACTION', + ], + INVALID_TRANSACTION_HASH: ['Transaction hash is required', 'INVALID_TRANSACTION_HASH'], + DB_ERROR: ['Database error occurred', 'DB_ERROR'], + }; + const [msg, code] = errorMap[data.error] || ['Payment confirmation failed', 'CONFIRM_FAIL']; + throw new BookingError(msg, code, data); + } + + // Fetch the updated booking to return full data + const { data: booking, error: fetchError } = await supabase .from('bookings') - .update({ - status: 'confirmed', - payment_transaction_hash: transactionHash, - paid_at: new Date().toISOString(), - }) + .select('*') .eq('id', bookingId) - .select() .single(); - if (error || !data) { - throw new BookingError('Failed to confirm booking status update', 'CONFIRM_FAIL', error); + if (fetchError || !booking) { + // Payment was confirmed but fetch failed - log but don't throw + await loggingService.logBlockchainError(log, { + error: fetchError, + context: 'Payment confirmed but failed to fetch updated booking', + }); + // Return minimal data since confirmation was successful + return { id: bookingId, status: 'confirmed', payment_transaction_hash: transactionHash }; } - return data; + + await loggingService.logBlockchainSuccess(log, { booking }); + return booking; } catch (error) { if (error instanceof BookingError) { throw error; } - throw new BookingError('Confirmation error', 'CONFIRM_FAIL', error); + await loggingService.logBlockchainError(log, { error, context: 'confirmBookingPayment' }); + throw new BookingError('Payment confirmation failed', 'CONFIRM_FAIL', error); } } diff --git a/apps/backend/src/services/cache.service.ts b/apps/backend/src/services/cache.service.ts index e194f2ab..1dcb2534 100644 --- a/apps/backend/src/services/cache.service.ts +++ b/apps/backend/src/services/cache.service.ts @@ -1,5 +1,6 @@ import crypto from 'node:crypto'; import { type RedisClientType, createClient } from 'redis'; +import { loggingService } from './logging.service'; export class CacheService { private client: RedisClientType; @@ -74,7 +75,9 @@ export class CacheService { const data = await this.client.get(key); return data ? JSON.parse(data) : null; } catch (error) { - console.error('Cache get error:', error); + // Cache errors should be logged but not stop the application + const log = await loggingService.logBlockchainOperation('cache_get', { key }); + await loggingService.logBlockchainError(log, { error, context: 'Cache get failed' }); return null; } } @@ -93,7 +96,9 @@ export class CacheService { try { await this.client.setEx(key, ttlSeconds, JSON.stringify(data)); } catch (error) { - console.error('Cache set error:', error); + // Cache errors should be logged but not stop the application + const log = await loggingService.logBlockchainOperation('cache_set', { key, ttlSeconds }); + await loggingService.logBlockchainError(log, { error, context: 'Cache set failed' }); } } @@ -109,7 +114,9 @@ export class CacheService { try { await this.client.del(key); } catch (error) { - console.error('Cache delete error:', error); + // Cache errors should be logged but not stop the application + const log = await loggingService.logBlockchainOperation('cache_delete', { key }); + await loggingService.logBlockchainError(log, { error, context: 'Cache delete failed' }); } } @@ -128,7 +135,12 @@ export class CacheService { await this.client.del(keys); } } catch (error) { - console.error('Cache delete pattern error:', error); + // Cache errors should be logged but not stop the application + const log = await loggingService.logBlockchainOperation('cache_delete_pattern', { pattern }); + await loggingService.logBlockchainError(log, { + error, + context: 'Cache delete pattern failed', + }); } } @@ -257,7 +269,9 @@ export class CacheService { keyCount, }; } catch (error) { - console.error('Cache stats error:', error); + // Cache errors should be logged but not stop the application + const log = await loggingService.logBlockchainOperation('cache_stats', {}); + await loggingService.logBlockchainError(log, { error, context: 'Cache stats failed' }); return { isConnected: true }; } } diff --git a/apps/backend/src/services/sync.service.ts b/apps/backend/src/services/sync.service.ts index 8be6746b..c8b39d87 100644 --- a/apps/backend/src/services/sync.service.ts +++ b/apps/backend/src/services/sync.service.ts @@ -1,29 +1,14 @@ -/** - * Sync Service - * - * Provides blockchain synchronization capabilities for StellarRent. - * Polls the Stellar network for new events and processes them accordingly. - * - * Environment Variables: - * - SOROBAN_RPC_URL: Soroban RPC endpoint URL - * - SOROBAN_CONTRACT_ID: Contract ID to monitor - * - SOROBAN_NETWORK_PASSPHRASE: Network passphrase for validation - * - SYNC_POLL_INTERVAL: Polling interval in milliseconds (default: 5000ms) - * - * Features: - * - Configurable polling intervals - * - Network passphrase validation - * - Comprehensive error handling and logging - * - Manual sync triggers - * - Status monitoring and statistics - */ - -import { Contract, Networks, nativeToScVal, scValToNative } from '@stellar/stellar-sdk'; +import { exec } from 'node:child_process'; +import { promisify } from 'node:util'; +import { Contract, Networks } from '@stellar/stellar-sdk'; import { Server as SorobanRpcServer } from '@stellar/stellar-sdk/rpc'; import { supabase } from '../config/supabase'; +import { SyncError } from '../types/errors'; import { bookingService } from './booking.service'; import { loggingService } from './logging.service'; +const execAsync = promisify(exec); + export interface SyncEvent { id: string; type: @@ -292,11 +277,17 @@ export class SyncService { console.log(`Initialized sync state: last block ${this.lastProcessedBlock}`); } catch (error) { - console.warn('Could not initialize sync state, starting fresh:', error); + // Log error properly and reset state + const errorLog = await loggingService.logBlockchainOperation('initializeSyncState', {}); + await loggingService.logBlockchainError(errorLog, { + error, + context: 'Could not initialize sync state from database, starting fresh', + }); this.lastProcessedBlock = 0; this.totalEventsProcessed = 0; this.failedEvents = 0; this.lastSyncTime = null; + // Don't rethrow - starting fresh is a valid fallback } } @@ -327,18 +318,19 @@ export class SyncService { eventsProcessed: this.totalEventsProcessed, }); } catch (error) { - console.error('Error polling for events:', error); this.failedEvents++; - // Log the error in sync_logs table instead of using loggingService - await supabase.from('sync_logs').insert({ - operation: 'pollForEvents', - status: 'error', - message: 'Failed to poll for blockchain events', - error_details: { - error: error instanceof Error ? error.message : 'Unknown error', - }, + // Log the error using loggingService for proper error serialization + const errorLog = await loggingService.logBlockchainOperation('pollForEvents', { + lastProcessedBlock: this.lastProcessedBlock, + }); + await loggingService.logBlockchainError(errorLog, { + error, + context: 'Failed to poll for blockchain events', }); + + // Re-throw as SyncError to propagate to callers + throw new SyncError('Failed to poll for blockchain events', 'POLL_EVENTS_FAIL', error); } } @@ -466,9 +458,18 @@ export class SyncService { } } - console.warn( - `All ${maxRetries} attempts failed for both SDK and CLI methods. Using fallback block height: ${this.lastProcessedBlock}` - ); + // Log the critical failure properly + const errorLog = await loggingService.logBlockchainOperation('getCurrentBlockHeight', { + maxRetries, + lastProcessedBlock: this.lastProcessedBlock, + }); + await loggingService.logBlockchainError(errorLog, { + error: new Error( + `All ${maxRetries} attempts failed for both SDK and CLI methods. Using fallback block height.` + ), + context: 'Critical: Cannot get current block height from Stellar network', + }); + // Return fallback but log the critical issue return this.lastProcessedBlock; } @@ -529,43 +530,48 @@ export class SyncService { const events = eventsResponse?.events || []; // Transform Stellar events into our format - return events.map((event: unknown) => { - const stellarEvent = event as { - ledger: number; - ledgerClosedAt: string; - id: string; - topic?: string[]; - value?: Record; - txHash: string; - contractId: string; - }; - - return { - id: `${stellarEvent.ledger}-${stellarEvent.ledgerClosedAt}-${stellarEvent.id}`, - type: this.mapStellarEventType(stellarEvent.topic?.[0] || 'unknown'), - blockNumber: stellarEvent.ledger, - timestamp: new Date(stellarEvent.ledgerClosedAt), - data: this.parseStellarEventData(stellarEvent), - txHash: stellarEvent.txHash, - contractId: stellarEvent.contractId, - }; - }); + const transformedEvents = await Promise.all( + events.map(async (event: unknown) => { + const stellarEvent = event as { + ledger: number; + ledgerClosedAt: string; + id: string; + topic?: string[]; + value?: Record; + txHash: string; + contractId: string; + }; + + return { + id: `${stellarEvent.ledger}-${stellarEvent.ledgerClosedAt}-${stellarEvent.id}`, + type: this.mapStellarEventType(stellarEvent.topic?.[0] || 'unknown'), + blockNumber: stellarEvent.ledger, + timestamp: new Date(stellarEvent.ledgerClosedAt), + data: await this.parseStellarEventData(stellarEvent), + txHash: stellarEvent.txHash, + contractId: stellarEvent.contractId, + }; + }) + ); + + return transformedEvents; } catch (error) { - console.error('Failed to get contract events:', error); - - // Log the error but don't throw to prevent sync service from stopping - await supabase.from('sync_logs').insert({ - operation: 'get_contract_events', - status: 'error', - message: 'Failed to query blockchain events', - error_details: { - error: error instanceof Error ? error.message : 'Unknown error', - from_block: fromBlock, - to_block: toBlock, - }, + // Log the error using loggingService + const errorLog = await loggingService.logBlockchainOperation('getContractEvents', { + fromBlock, + toBlock, + }); + await loggingService.logBlockchainError(errorLog, { + error, + context: 'Failed to query blockchain events', }); - return []; + // Re-throw as SyncError for proper error handling + throw new SyncError('Failed to get contract events', 'GET_EVENTS_FAIL', { + error, + fromBlock, + toBlock, + }); } } @@ -590,9 +596,9 @@ export class SyncService { /** * Parse Stellar event data into our format */ - private parseStellarEventData(event: { + private async parseStellarEventData(event: { value?: Record; - }): BlockchainEventData { + }): Promise { try { // Parse event data based on contract event structure const eventData = event.value || {}; @@ -609,60 +615,192 @@ export class SyncService { guests: eventData.guests ? Number(eventData.guests) : undefined, }; } catch (error) { - console.error('Failed to parse event data:', error); + // Log parsing error properly + // This is intentionally not re-thrown as parsing failures shouldn't stop sync + // Using async logging but handling failures gracefully + try { + const errorLog = await loggingService.logBlockchainOperation('parseStellarEventData', { + event: event.value, + }); + await loggingService.logBlockchainError(errorLog, { + error, + context: 'Failed to parse event data - returning empty object', + }); + } catch (loggingError) { + // Log to console as fallback if logging service fails + console.error('Failed to log parseStellarEventData error:', loggingError); + console.error('Original parsing error:', error); + } return {}; } } /** - * Process a single blockchain event + * Check if event has already been processed (duplicate check) + */ + private async isEventAlreadyProcessed(eventId: string): Promise { + try { + const { data, error } = await supabase + .from('sync_events') + .select('id, processed') + .eq('event_id', eventId) + .single(); + + if (error && error.code !== 'PGRST116') { + // PGRST116 is "not found" which is expected for new events + // Other errors should be logged but not block processing + const errorLog = await loggingService.logBlockchainOperation('isEventAlreadyProcessed', { + eventId, + }); + await loggingService.logBlockchainError(errorLog, { + error, + context: 'Error checking for duplicate event', + }); + } + + return data !== null && data !== undefined; + } catch (error) { + // Log error but don't block processing - let the atomic function handle duplicates + const errorLog = await loggingService.logBlockchainOperation('isEventAlreadyProcessed', { + eventId, + }); + await loggingService.logBlockchainError(errorLog, { + error, + context: 'Exception checking for duplicate event', + }); + return false; + } + } + + /** + * Determine the new status for an event type + */ + private getStatusForEventType( + eventType: string, + eventData: BlockchainEventData + ): string | undefined { + switch (eventType) { + case 'booking_cancelled': + return 'cancelled'; + case 'payment_confirmed': + return 'confirmed'; + case 'escrow_created': + return 'pending'; + case 'escrow_released': + return 'completed'; + case 'booking_updated': + case 'booking_created': + return eventData.status ? this.mapBlockchainStatus(eventData.status) : undefined; + default: + return undefined; + } + } + + /** + * Process a single blockchain event atomically */ private async processEvent(event: Record): Promise { + const eventId = event.id as string; + const eventType = event.type as string; + const eventData = event.data as BlockchainEventData; + const txHash = event.txHash as string | undefined; + try { - const logId = await loggingService.logBlockchainOperation('processEvent', { event }); + const logId = await loggingService.logBlockchainOperation('processEvent', { + eventId, + eventType, + txHash, + }); - // Store event in database for tracking - await this.storeSyncEvent(event); + // Validate transaction ID before processing (explicit duplicate check) + const isDuplicate = await this.isEventAlreadyProcessed(eventId); + if (isDuplicate) { + await loggingService.logBlockchainSuccess(logId, { + eventId, + skipped: true, + reason: 'duplicate_event_detected_before_processing', + }); + return; + } - // Process based on event type - switch (event.type as string) { + // Extract event metadata + const bookingId = eventData.escrow_id || (event.bookingId as string) || null; + const propertyId = eventData.property_id || (event.propertyId as string) || null; + const userId = eventData.user_id || (event.userId as string) || 'unknown'; + const newStatus = this.getStatusForEventType(eventType, eventData); + + // Process event atomically (insert, update status if applicable, mark as processed) + const result = await this.processEventAtomic( + eventId, + eventType, + bookingId, + propertyId, + userId, + eventData as unknown as Record, + newStatus + ); + + // Handle duplicate event (detected during atomic processing) + if (result.error === 'DUPLICATE_EVENT') { + await loggingService.logBlockchainSuccess(logId, { + eventId, + skipped: true, + reason: 'duplicate_event_detected_during_atomic_processing', + }); + return; + } + + // For events that require additional processing beyond status updates + // (e.g., creating bookings, logging), handle them after atomic processing + // These operations are idempotent or handle their own errors gracefully + switch (eventType) { case 'booking_created': + // Booking creation is handled by handleBookingCreated which checks for existing bookings await this.handleBookingCreated(event); break; case 'booking_updated': + // Enhanced sync with booking service await this.handleBookingUpdated(event); break; - case 'booking_cancelled': - await this.handleBookingCancelled(event); - break; - case 'payment_confirmed': - await this.handlePaymentConfirmed(event); - break; case 'property_created': - await this.handlePropertyCreated(event); - break; case 'property_updated': - await this.handlePropertyUpdated(event); + // These only do logging, which is safe to do after atomic processing + if (eventType === 'property_created') { + await this.handlePropertyCreated(event); + } else { + await this.handlePropertyUpdated(event); + } break; case 'escrow_created': - await this.handleEscrowCreated(event); - break; case 'escrow_released': - await this.handleEscrowReleased(event); + // These do additional logging after status update + if (eventType === 'escrow_created') { + await this.handleEscrowCreated(event); + } else { + await this.handleEscrowReleased(event); + } break; + // booking_cancelled, payment_confirmed are fully handled by processEventAtomic default: - console.warn(`Unknown event type: ${event.type as string}`); + if (eventType !== 'unknown') { + console.warn(`Unknown event type: ${eventType}`); + } } - // Mark event as processed - await this.markEventProcessed(event.id as string); - await loggingService.logBlockchainSuccess(logId, { - eventId: event.id as string, + eventId, + syncEventId: result.syncEventId, }); } catch (error) { - console.error(`Error processing event ${event.id as string}:`, error); - await this.markEventFailed(event.id as string, error as Error); + const errorLog = await loggingService.logBlockchainOperation('processEvent', { + eventId, + eventType, + }); + await loggingService.logBlockchainError(errorLog, { + error, + context: 'Failed to process blockchain event', + }); + await this.markEventFailed(eventId, error as Error); throw error; } } @@ -717,48 +855,62 @@ export class SyncService { await bookingService.syncBookingFromBlockchain( eventData.escrow_id, this.mapBlockchainStatus(eventData.status), - eventData + eventData as unknown as Record ); } catch (error) { - console.error('Failed to sync booking from blockchain event:', error); + // Log and use fallback to direct database update + const errorLog = await loggingService.logBlockchainOperation('handleBookingUpdated', { + escrowId: eventData.escrow_id, + status: eventData.status, + }); + await loggingService.logBlockchainError(errorLog, { + error, + context: 'Failed to sync booking from blockchain event - using fallback', + }); // Fallback to direct database update - await supabase - .from('bookings') - .update({ - status: this.mapBlockchainStatus(eventData.status), - updated_at: new Date().toISOString(), - }) - .eq('escrow_address', eventData.escrow_id); + try { + await supabase + .from('bookings') + .update({ + status: this.mapBlockchainStatus(eventData.status), + updated_at: new Date().toISOString(), + }) + .eq('escrow_address', eventData.escrow_id); + } catch (fallbackError) { + // Log fallback failure but don't rethrow - event was already processed atomically + const fallbackErrorLog = await loggingService.logBlockchainOperation( + 'handleBookingUpdated_fallback', + { + escrowId: eventData.escrow_id, + } + ); + await loggingService.logBlockchainError(fallbackErrorLog, { + error: fallbackError, + context: 'Fallback database update also failed', + }); + } } } } /** * Handle booking cancellation event + * Note: Status update is handled atomically by processEventAtomic + * This handler is kept for potential future additional processing */ - private async handleBookingCancelled(event: Record): Promise { - const eventData = event.data as BlockchainEventData; - await supabase - .from('bookings') - .update({ - status: 'cancelled', - updated_at: new Date().toISOString(), - }) - .eq('escrow_address', eventData.escrow_id || ''); + private async handleBookingCancelled(_event: Record): Promise { + // Status update is handled atomically by processEventAtomic + // No additional processing needed at this time } /** * Handle payment confirmation event + * Note: Status update is handled atomically by processEventAtomic + * This handler is kept for potential future additional processing */ - private async handlePaymentConfirmed(event: Record): Promise { - const eventData = event.data as BlockchainEventData; - await supabase - .from('bookings') - .update({ - status: 'confirmed', - updated_at: new Date().toISOString(), - }) - .eq('escrow_address', eventData.escrow_id || ''); + private async handlePaymentConfirmed(_event: Record): Promise { + // Status update is handled atomically by processEventAtomic + // No additional processing needed at this time } /** @@ -807,20 +959,13 @@ export class SyncService { /** * Handle escrow creation event + * Note: Status update is handled atomically by processEventAtomic + * This handler only performs additional logging */ private async handleEscrowCreated(event: Record): Promise { const eventData = event.data as BlockchainEventData; - // Update booking with escrow creation confirmation - await supabase - .from('bookings') - .update({ - status: 'pending', - updated_at: new Date().toISOString(), - }) - .eq('escrow_address', eventData.escrow_id || ''); - - // Log the event + // Log the event (status update already handled atomically) await supabase.from('sync_logs').insert({ operation: 'handle_escrow_created', status: 'success', @@ -836,20 +981,13 @@ export class SyncService { /** * Handle escrow release event + * Note: Status update is handled atomically by processEventAtomic + * This handler only performs additional logging */ private async handleEscrowReleased(event: Record): Promise { const eventData = event.data as BlockchainEventData; - // Update booking to completed status when escrow is released - await supabase - .from('bookings') - .update({ - status: 'completed', - updated_at: new Date().toISOString(), - }) - .eq('escrow_address', eventData.escrow_id || ''); - - // Log the event + // Log the event (status update already handled atomically) await supabase.from('sync_logs').insert({ operation: 'handle_escrow_released', status: 'success', @@ -875,6 +1013,63 @@ export class SyncService { return statusMap[blockchainStatus] || 'pending'; } + /** + * Process sync event atomically using PostgreSQL RPC function + * This ensures event logging, status updates, and marking as processed + * all happen in a single transaction to prevent inconsistent states. + */ + async processEventAtomic( + eventId: string, + eventType: string, + bookingId: string | null, + propertyId: string | null, + userId: string, + eventData: Record, + newStatus?: string + ): Promise<{ success: boolean; syncEventId?: string; error?: string }> { + const log = await loggingService.logBlockchainOperation('processEventAtomic', { + eventId, + eventType, + bookingId, + newStatus, + }); + + try { + const { data, error } = await supabase.rpc('process_sync_event_atomic', { + p_event_id: eventId, + p_event_type: eventType, + p_booking_id: bookingId, + p_property_id: propertyId, + p_user_id: userId, + p_event_data: eventData, + p_new_status: newStatus || null, + }); + + if (error) { + await loggingService.logBlockchainError(log, { error, context: 'RPC call failed' }); + throw new SyncError('Failed to process event atomically', 'ATOMIC_PROCESS_FAIL', error); + } + + if (!data.success) { + if (data.error === 'DUPLICATE_EVENT') { + // Duplicate events are not errors, just skip them + await loggingService.logBlockchainSuccess(log, { skipped: true, reason: 'duplicate' }); + return { success: true, error: 'DUPLICATE_EVENT' }; + } + throw new SyncError(`Atomic processing failed: ${data.error}`, data.error, data); + } + + await loggingService.logBlockchainSuccess(log, { syncEventId: data.sync_event_id }); + return { success: true, syncEventId: data.sync_event_id }; + } catch (error) { + if (error instanceof SyncError) { + throw error; + } + await loggingService.logBlockchainError(log, { error, context: 'processEventAtomic' }); + throw new SyncError('Failed to process event atomically', 'ATOMIC_PROCESS_FAIL', error); + } + } + /** * Store sync event in database */ @@ -923,17 +1118,34 @@ export class SyncService { * Update sync state in database */ private async updateSyncState(): Promise { - const { error } = await supabase.from('sync_state').upsert({ - id: 1, // Single row for sync state - last_processed_block: this.lastProcessedBlock, - total_events_processed: this.totalEventsProcessed, - failed_events: this.failedEvents, - last_sync_time: this.lastSyncTime?.toISOString(), - updated_at: new Date().toISOString(), - }); + try { + const { error } = await supabase.from('sync_state').upsert({ + id: 1, // Single row for sync state + last_processed_block: this.lastProcessedBlock, + total_events_processed: this.totalEventsProcessed, + failed_events: this.failedEvents, + last_sync_time: this.lastSyncTime?.toISOString(), + updated_at: new Date().toISOString(), + }); - if (error) { - console.error('Failed to update sync state:', error); + if (error) { + const errorLog = await loggingService.logBlockchainOperation('updateSyncState', { + lastProcessedBlock: this.lastProcessedBlock, + totalEventsProcessed: this.totalEventsProcessed, + }); + await loggingService.logBlockchainError(errorLog, { + error, + context: 'Failed to update sync state in database', + }); + // Don't rethrow - sync can continue even if state update fails + } + } catch (error) { + const errorLog = await loggingService.logBlockchainOperation('updateSyncState', {}); + await loggingService.logBlockchainError(errorLog, { + error, + context: 'Exception updating sync state', + }); + // Don't rethrow - sync can continue even if state update fails } } @@ -972,3 +1184,4 @@ export class SyncService { // Export singleton instance export const syncService = new SyncService(); + diff --git a/apps/backend/src/services/wallet-auth.service.ts b/apps/backend/src/services/wallet-auth.service.ts deleted file mode 100644 index 93991b8b..00000000 --- a/apps/backend/src/services/wallet-auth.service.ts +++ /dev/null @@ -1,211 +0,0 @@ -import { Networks, StrKey, Transaction } from '@stellar/stellar-sdk'; -import jwt from 'jsonwebtoken'; -import { supabase } from '../config/supabase'; -import type { WalletAuthResponse, WalletLoginRequest } from '../types/wallet-auth.types'; -import { - InvalidChallengeError, - InvalidPublicKeyError, - SignatureVerificationError, -} from '../types/wallet-error-types'; -import { getValidChallenge, removeChallenge } from './wallet-challenge.service'; - -//=================== -// Verify the signed transaction contains the correct challenge and is signed by the correct key -//=================== -async function verifySignedTransaction( - publicKey: string, - challenge: string, - signedTransactionXDR: string -): Promise { - try { - const transaction = new Transaction( - signedTransactionXDR, - process.env.NODE_ENV === 'production' ? Networks.PUBLIC : Networks.TESTNET - ); - if (transaction.signatures.length === 0) { - console.error('No signatures found in transaction'); - return false; - } - const sourceAccount = transaction.source; - if (sourceAccount !== publicKey) { - console.error('Transaction source account mismatch'); - return false; - } - // Verify the transaction memo contains our challenge - const memo = transaction.memo; - if (!memo || memo.type !== 'text') { - console.error('Invalid memo type:', memo?.type); - return false; - } - - let memoValue: string; - if (Buffer.isBuffer(memo.value)) { - memoValue = memo.value.toString('utf8'); - } else { - memoValue = memo.value as string; - } - - if (memoValue !== challenge) { - console.error('Memo mismatch!'); - console.error(' Expected:', challenge); - return false; - } - const transactionHash = transaction.hash(); - const signature = transaction.signatures[0]; - - const { Keypair } = await import('@stellar/stellar-sdk'); - const keypair = Keypair.fromPublicKey(publicKey); - - // Verify the signature - const isValid = keypair.verify(transactionHash, signature.signature()); - - if (!isValid) { - console.error('Signature verification failed'); - } else { - console.log('Signature verification successful'); - } - - return isValid; - } catch (error) { - console.error('Transaction verification error:', error); - return false; - } -} - -//=================== -// Get or create wallet user -//=================== -async function getOrCreateWalletUser(publicKey: string) { - console.log('Getting or creating wallet user for:', `${publicKey.substring(0, 10)}...`); - - const { data: existingUser, error: fetchError } = await supabase - .from('wallet_users') - .select('*') - .eq('public_key', publicKey) - .single(); - - if (existingUser && !fetchError) { - await supabase - .from('wallet_users') - .update({ updated_at: new Date().toISOString() }) - .eq('id', existingUser.id); - - return existingUser; - } - - const { data: newUser, error: createError } = await supabase - .from('wallet_users') - .insert({ - public_key: publicKey, - }) - .select() - .single(); - - if (createError || !newUser) { - console.error('Error creating wallet user:', createError); - throw new Error('Failed to create wallet user'); - } - - console.log('New user created:', newUser.id); - - const { error: profileError } = await supabase.from('profiles').insert({ - user_id: newUser.id, - name: 'Wallet User', - verification_status: 'unverified', - last_active: new Date().toISOString(), - }); - - if (profileError) { - console.error('Warning: Failed to create profile:', profileError.message || profileError); - } else { - console.log('Profile created for user'); - } - - return newUser; -} - -//=================== -// Get user profile -//=================== -async function getUserProfile(userId: string) { - const { data: profile } = await supabase - .from('profiles') - .select('*') - .eq('user_id', userId) - .single(); - - return profile || undefined; -} - -//=================== -// Generate JWT token -//=================== -function generateJWT(userId: string, publicKey: string): string { - if (!process.env.JWT_SECRET) { - throw new Error('JWT_SECRET environment variable is required'); - } - - return jwt.sign( - { - userId, - publicKey, - type: 'wallet', - }, - process.env.JWT_SECRET, - { - expiresIn: '7d', - } - ); -} - -//=================== -// Verify wallet signature and authenticate user -//=================== -export async function authenticateWallet(input: WalletLoginRequest): Promise { - const { publicKey, signedTransaction, challenge } = input; - const userKey = `${publicKey.substring(0, 10)}...`; - - console.log(`Starting wallet authentication for ${userKey}`); - console.log(`Challenge: ${challenge}`); - - if (!StrKey.isValidEd25519PublicKey(publicKey)) { - console.error(`Invalid public key format for ${userKey}`); - throw new InvalidPublicKeyError('Invalid public key format'); - } - console.log(`Public key format is valid for ${userKey}`); - - console.log(`Validating challenge for ${userKey}...`); - const storedChallenge = await getValidChallenge(publicKey, challenge); - if (!storedChallenge) { - console.error(`Invalid or expired challenge for ${userKey}`); - throw new InvalidChallengeError('Invalid or expired challenge'); - } - console.log(`Challenge is valid for ${userKey}`); - console.log(`Verifying signed transaction for ${userKey}...`); - const isValidTransaction = await verifySignedTransaction(publicKey, challenge, signedTransaction); - if (!isValidTransaction) { - console.error(`Transaction verification failed for ${userKey}`); - throw new SignatureVerificationError('Invalid signed transaction'); - } - console.log(`Transaction verification successful for ${userKey}`); - - console.log(`Removing used challenge for ${userKey}...`); - await removeChallenge(storedChallenge.id); - - console.log(`Getting or creating wallet user for ${userKey}...`); - const walletUser = await getOrCreateWalletUser(publicKey); - - console.log(`Generating JWT token for ${userKey}...`); - const token = generateJWT(walletUser.id, publicKey); - - const profile = await getUserProfile(walletUser.id); - console.log(`Wallet authentication completed successfully for ${userKey}!`); - return { - token, - user: { - id: walletUser.id, - publicKey, - profile, - }, - }; -} diff --git a/apps/backend/src/services/wallet-challenge.service.ts b/apps/backend/src/services/wallet-challenge.service.ts deleted file mode 100644 index 4ee5363c..00000000 --- a/apps/backend/src/services/wallet-challenge.service.ts +++ /dev/null @@ -1,139 +0,0 @@ -import { randomBytes } from 'node:crypto'; -import { supabase } from '../config/supabase'; -import type { Challenge, ChallengeResponse } from '../types/wallet-auth.types'; - -//=================== -// Generate a unique challenge for a given public key -//=================== -export async function generateChallenge(publicKey: string): Promise { - const CHALLENGE_EXPIRY_MINUTES = 5; - - // Check if there's already a valid challenge for this public key - const { data: existingChallenge, error: fetchError } = await supabase - .from('wallet_challenges') - .select('*') - .eq('public_key', publicKey) - .gt('expires_at', new Date().toISOString()) - .single(); - - if (existingChallenge && !fetchError) { - console.log(`Reusing existing challenge for ${publicKey.substring(0, 10)}...`); - return { - challenge: existingChallenge.challenge, - expiresAt: existingChallenge.expires_at, - }; - } - - const challenge = randomBytes(12).toString('hex'); - const expiresAt = new Date(Date.now() + CHALLENGE_EXPIRY_MINUTES * 60 * 1000); - - await cleanupExpiredChallenges(publicKey); - - const { error } = await supabase.from('wallet_challenges').insert({ - public_key: publicKey, - challenge, - expires_at: expiresAt.toISOString(), - }); - - if (error) { - console.error('Error storing challenge:', error); - throw new Error('Failed to generate challenge'); - } - - console.log(`Generated new challenge for ${publicKey.substring(0, 10)}...`); - return { - challenge, - expiresAt: expiresAt.toISOString(), - }; -} - -//=================== -// Retrieve and validate a challenge for a given public key -//=================== -export async function getValidChallenge( - publicKey: string, - challengeValue: string -): Promise { - const { data, error } = await supabase - .from('wallet_challenges') - .select('*') - .eq('public_key', publicKey) - .eq('challenge', challengeValue) - .gt('expires_at', new Date().toISOString()) - .single(); - - if (error || !data) { - return null; - } - - return data; -} - -//=================== -// Remove a used challenge -//=================== -export async function removeChallenge(challengeId: string): Promise { - const { error } = await supabase.from('wallet_challenges').delete().eq('id', challengeId); - - if (error) { - console.error('Error removing challenge:', error); - } -} - -//=================== -// Clean up expired challenges for a public key -//=================== -async function cleanupExpiredChallenges(publicKey?: string): Promise { - let query = supabase - .from('wallet_challenges') - .delete() - .lt('expires_at', new Date().toISOString()); - - if (publicKey) { - query = query.eq('public_key', publicKey); - } - - const { error } = await query; - - if (error) { - console.error('Error cleaning up expired challenges:', error); - } -} - -//=================== -// Periodic cleanup of all expired challenges -//=================== -// export async function cleanupAllExpiredChallenges(): Promise { -// await cleanupExpiredChallenges(); -// } -export async function cleanupAllExpiredChallenges(): Promise { - try { - const { data: expiredChallenges, error: fetchError } = await supabase - .from('wallet_challenges') - .select('id') - .lt('expires_at', new Date().toISOString()); - - if (fetchError) { - console.error('Error fetching expired challenges:', fetchError); - return; - } - - if (!expiredChallenges || expiredChallenges.length === 0) { - console.log('No expired challenges to cleanup'); - return; - } - - const { error: deleteError } = await supabase - .from('wallet_challenges') - .delete() - .lt('expires_at', new Date().toISOString()); - - if (deleteError) { - console.error('Error cleaning up expired challenges:', deleteError); - } else { - console.log(`πŸ—‘οΈ Cleaned up ${expiredChallenges.length} expired challenges`); - } - } catch (error) { - console.error('Error during cleanup:', error); - } -} diff --git a/apps/backend/src/types/errors.ts b/apps/backend/src/types/errors.ts new file mode 100644 index 00000000..56d2b355 --- /dev/null +++ b/apps/backend/src/types/errors.ts @@ -0,0 +1,49 @@ +/** + * Custom Error Classes for StellarRent Backend + * + * These error classes provide structured error handling for different + * domains within the application, enabling better error tracking, + * logging, and API responses. + */ + +/** + * Error class for escrow-related operations + */ +export class EscrowError extends Error { + constructor( + message: string, + public code: string, + public details?: unknown + ) { + super(message); + this.name = 'EscrowError'; + } +} + +/** + * Error class for blockchain synchronization operations + */ +export class SyncError extends Error { + constructor( + message: string, + public code: string, + public details?: unknown + ) { + super(message); + this.name = 'SyncError'; + } +} + +/** + * Error class for cache operations + */ +export class CacheError extends Error { + constructor( + message: string, + public code: string, + public details?: unknown + ) { + super(message); + this.name = 'CacheError'; + } +} diff --git a/apps/backend/src/types/wallet-auth.types.ts b/apps/backend/src/types/wallet-auth.types.ts deleted file mode 100644 index 19f76cd6..00000000 --- a/apps/backend/src/types/wallet-auth.types.ts +++ /dev/null @@ -1,48 +0,0 @@ -import { z } from 'zod'; - -// Challenge request schema -export const challengeRequestSchema = z.object({ - publicKey: z.string().min(1, 'Public key is required'), -}); - -// Wallet login schema -export const walletLoginSchema = z.object({ - publicKey: z.string().min(1, 'Public key is required'), - signedTransaction: z.string().min(1, 'Signed transaction is required'), - challenge: z.string().min(1, 'Challenge is required'), -}); - -// Types -export type ChallengeRequest = z.infer; -export type WalletLoginRequest = z.infer; - -export interface Challenge { - id: string; - public_key: string; - challenge: string; - expires_at: string; - created_at: string; -} - -export interface WalletAuthResponse { - token: string; - user: { - id: string; - publicKey: string; - profile?: { - name?: string; - avatar_url?: string; - phone?: string; - address?: string; - preferences?: Record; - social_links?: Record; - verification_status: string; - last_active: string; - }; - }; -} - -export interface ChallengeResponse { - challenge: string; - expiresAt: string; -} diff --git a/apps/backend/src/validators/wallet-auth.validator.ts b/apps/backend/src/validators/wallet-auth.validator.ts deleted file mode 100644 index 568fcd19..00000000 --- a/apps/backend/src/validators/wallet-auth.validator.ts +++ /dev/null @@ -1,39 +0,0 @@ -import type { NextFunction, Request, Response } from 'express'; -import { z } from 'zod'; -import { challengeRequestSchema, walletLoginSchema } from '../types/wallet-auth.types'; - -//=================== -// Validate challenge request -//=================== -export const validateChallengeRequest = (req: Request, res: Response, next: NextFunction) => { - try { - challengeRequestSchema.parse(req.body); - next(); - } catch (error) { - if (error instanceof z.ZodError) { - return res.status(400).json({ - error: 'Invalid challenge request data', - details: error.errors, - }); - } - next(error); - } -}; - -//=================== -// Validate wallet login request -//=================== -export const validateWalletLogin = (req: Request, res: Response, next: NextFunction) => { - try { - walletLoginSchema.parse(req.body); - next(); - } catch (error) { - if (error instanceof z.ZodError) { - return res.status(400).json({ - error: 'Invalid wallet login data', - details: error.errors, - }); - } - next(error); - } -}; diff --git a/apps/backend/tests/utils/booking-test.utils.ts b/apps/backend/tests/utils/booking-test.utils.ts index 7fc25158..a901c269 100644 --- a/apps/backend/tests/utils/booking-test.utils.ts +++ b/apps/backend/tests/utils/booking-test.utils.ts @@ -19,7 +19,6 @@ mock.module('../../src/config/supabase', () => ({ select: mock(() => ({ eq: mock(() => ({ single: mock(() => Promise.resolve({ data: null, error: null })), -<<<<<<< HEAD then: mock((callback: any) => callback({ data: [], error: null })), })), in: mock(() => Promise.resolve({ data: [], error: null })), @@ -42,27 +41,13 @@ mock.module('../../src/config/supabase', () => ({ eq: mock(() => Promise.resolve({ data: [], error: null })), in: mock(() => Promise.resolve({ data: [], error: null })), then: mock((callback: any) => callback({ data: [], error: null })), -======= - })), - })), - insert: mock(() => ({ - select: mock(() => Promise.resolve({ data: [], error: null })), - })), - update: mock(() => ({ - eq: mock(() => Promise.resolve({ data: [], error: null })), - })), - delete: mock(() => ({ - eq: mock(() => Promise.resolve({ data: [], error: null })), ->>>>>>> f4a72f1 (feat: connect smart contracts to backend APIs) })), upsert: mock(() => ({ eq: mock(() => Promise.resolve({ data: [], error: null })), })), -<<<<<<< HEAD then: mock((callback: any) => callback({ data: [], error: null })), -======= ->>>>>>> f4a72f1 (feat: connect smart contracts to backend APIs) })), + rpc: mock(() => Promise.resolve({ data: { success: true }, error: null })), auth: { getUser: mock(() => Promise.resolve({ diff --git a/apps/web/.env.example b/apps/web/.env.example index 4ae2ec1b..260f0128 100644 --- a/apps/web/.env.example +++ b/apps/web/.env.example @@ -1,4 +1,14 @@ -NEXT_PUBLIC_STELLAR_NETWORK= +# Stellar Network Configuration +NEXT_PUBLIC_STELLAR_NETWORK=testnet NEXT_PUBLIC_USDC_ISSUER_TESTNET= NEXT_PUBLIC_USDC_ISSUER_MAINNET= -NEXT_PUBLIC_API_URL= \ No newline at end of file + +# API Configuration +NEXT_PUBLIC_API_URL=http://localhost:3001/api + +# Stellar Social Login SDK Configuration +# Get your Google Client ID from: https://console.cloud.google.com/apis/credentials +NEXT_PUBLIC_GOOGLE_CLIENT_ID=your_google_client_id_here + +# Soroban Contract ID for Social Wallet +NEXT_PUBLIC_CONTRACT_ID=CALZGCSB3P3WEBLW3QTF5Y4WEALEVTYUYBC7KBGQ266GDINT7U4E74KW diff --git a/apps/web/package.json b/apps/web/package.json index e2af0336..8ad2fd90 100644 --- a/apps/web/package.json +++ b/apps/web/package.json @@ -12,6 +12,7 @@ "test:e2e:headed": "playwright test --headed" }, "dependencies": { + "stellar-social-sdk": "file:./stellar-social-sdk", "@hookform/resolvers": "^3.3.4", "@radix-ui/react-dialog": "^1.1.14", "@radix-ui/react-dropdown-menu": "^2.1.12", diff --git a/apps/web/src/app/dashboard/tenant-dashboard/page.tsx b/apps/web/src/app/dashboard/tenant-dashboard/page.tsx index 0c8b32f9..9a0e2cd4 100644 --- a/apps/web/src/app/dashboard/tenant-dashboard/page.tsx +++ b/apps/web/src/app/dashboard/tenant-dashboard/page.tsx @@ -13,6 +13,13 @@ import { DollarSign, Download, Edit3, + Eye, + Filter, + Home, + Info, + LogOut, + MapPin, + MessageSquare, PieChart, Settings, Star, @@ -20,7 +27,10 @@ import { Wallet, } from 'lucide-react'; import Image from 'next/image'; -import { useState } from 'react'; +import { useRouter } from 'next/navigation'; +import type React from 'react'; +import { useEffect, useState } from 'react'; +import { useAuth } from '~/hooks/auth/use-auth'; interface Booking { id: string; @@ -165,6 +175,8 @@ const mockTransactions = [ ]; const TenantDashboard = () => { + const router = useRouter(); + const { logout } = useAuth(); const [activeTab, setActiveTab] = useState('bookings'); const [bookings, setBookings] = useState(mockBookings); const [user, setUser] = useState(mockUser); @@ -339,6 +351,18 @@ const TenantDashboard = () => { {user.name} + diff --git a/apps/web/src/app/layout.tsx b/apps/web/src/app/layout.tsx index 2b6949d2..81018e81 100644 --- a/apps/web/src/app/layout.tsx +++ b/apps/web/src/app/layout.tsx @@ -20,6 +20,11 @@ export default function RootLayout({ + +