From d983525aa5c9b924f7a7db5e9f2d79af26afa7f4 Mon Sep 17 00:00:00 2001 From: hailin Date: Mon, 8 Dec 2025 07:57:17 -0800 Subject: [PATCH] fix(wallet): resolve account creation and wallet status query issues MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This commit fixes three critical bugs that prevented the wallet creation flow from completing successfully: 1. mpc-service: extraPayload not included in Kafka messages - KeygenCompletedEvent's extraPayload (containing userId, accountSequence, username, derivedAddresses) was being set dynamically but not serialized - identity-service received events without userId and skipped processing - Fix: Merge extraPayload into the published payload in event-publisher 2. mpc-service: KAFKA_BROKERS hostname mismatch - mpc-service used KAFKA_BROKERS=rwa-kafka:29092 - Kafka advertises itself as kafka:29092 in cluster metadata - During consumer group rebalance, mpc-service couldn't connect to the coordinator address returned by Kafka - Fix: Use kafka:29092 to match Kafka's advertised listener 3. blockchain-service: recovery_mnemonics table missing - RecoveryMnemonic model exists in schema.prisma but not in migration - prisma migrate deploy found no pending migrations - Address derivation failed with "table does not exist" error - Fix: Use prisma db push instead of migrate deploy to sync schema Tested: E2E flow now completes successfully - POST /user/auto-create creates account - MPC keygen completes and publishes event with extraPayload - blockchain-service derives addresses and saves recovery mnemonic - GET /user/wallet returns status=ready with 3 addresses and mnemonic 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- backend/services/blockchain-service/Dockerfile | 6 +++--- backend/services/docker-compose.yml | 2 +- .../messaging/kafka/event-publisher.service.ts | 6 +++++- 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/backend/services/blockchain-service/Dockerfile b/backend/services/blockchain-service/Dockerfile index e8d5ac47..4d8bec84 100644 --- a/backend/services/blockchain-service/Dockerfile +++ b/backend/services/blockchain-service/Dockerfile @@ -52,11 +52,11 @@ RUN DATABASE_URL="postgresql://user:pass@localhost:5432/db" npx prisma generate # Copy built files COPY --from=builder /app/dist ./dist -# Create startup script that runs migrations before starting the app +# Create startup script that syncs schema before starting the app RUN echo '#!/bin/sh\n\ set -e\n\ -echo "Running database migrations..."\n\ -npx prisma migrate deploy || npx prisma db push --accept-data-loss\n\ +echo "Syncing database schema..."\n\ +npx prisma db push --skip-generate\n\ echo "Starting application..."\n\ exec node dist/main.js\n' > /app/start.sh && chmod +x /app/start.sh diff --git a/backend/services/docker-compose.yml b/backend/services/docker-compose.yml index 85615136..d446c53c 100644 --- a/backend/services/docker-compose.yml +++ b/backend/services/docker-compose.yml @@ -342,7 +342,7 @@ services: - REDIS_PORT=6379 - REDIS_PASSWORD=${REDIS_PASSWORD:-} - REDIS_DB=5 - - KAFKA_BROKERS=rwa-kafka:29092 + - KAFKA_BROKERS=kafka:29092 - KAFKA_CLIENT_ID=mpc-service - KAFKA_GROUP_ID=mpc-service-group # MPC System - 使用 Docker 网络内部地址 diff --git a/backend/services/mpc-service/src/infrastructure/messaging/kafka/event-publisher.service.ts b/backend/services/mpc-service/src/infrastructure/messaging/kafka/event-publisher.service.ts index 8360b217..88bdffd2 100644 --- a/backend/services/mpc-service/src/infrastructure/messaging/kafka/event-publisher.service.ts +++ b/backend/services/mpc-service/src/infrastructure/messaging/kafka/event-publisher.service.ts @@ -73,13 +73,17 @@ export class EventPublisherService implements OnModuleInit, OnModuleDestroy { this.logger.log(`[PUBLISH] EventId: ${event.eventId}`); this.logger.log(`[PUBLISH] AggregateId: ${event.aggregateId}`); + // Merge extraPayload into payload if present (used by KeygenCompletedEvent etc.) + const extraPayload = (event as any).extraPayload || {}; + const fullPayload = { ...event.payload, extraPayload }; + const messageValue = { eventId: event.eventId, eventType: event.eventType, occurredAt: event.occurredAt.toISOString(), aggregateId: event.aggregateId, aggregateType: event.aggregateType, - payload: event.payload, + payload: fullPayload, }; this.logger.log(`[PUBLISH] Payload keys: ${Object.keys(event.payload).join(', ')}`);