Compare commits

..

No commits in common. "main" and "v2.0.0-cdc-sync-fix" have entirely different histories.

639 changed files with 4317 additions and 86607 deletions

View File

@ -767,55 +767,7 @@
"Bash(git -C \"c:\\\\Users\\\\dong\\\\Desktop\\\\rwadurian\" commit -m \"$\\(cat <<''EOF''\nfix\\(mining-app\\): update splash page theme and fix token refresh\n\n- Update splash_page.dart to orange theme \\(#FF6B00\\) matching other pages\n- Change app name from \"榴莲挖矿\" to \"榴莲生态\"\n- Fix refreshTokenIfNeeded to properly throw on failure instead of\n silently calling logout \\(which caused Riverpod ref errors\\)\n- Clear local storage directly on refresh failure without remote API call\n\nCo-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>\nEOF\n\\)\")", "Bash(git -C \"c:\\\\Users\\\\dong\\\\Desktop\\\\rwadurian\" commit -m \"$\\(cat <<''EOF''\nfix\\(mining-app\\): update splash page theme and fix token refresh\n\n- Update splash_page.dart to orange theme \\(#FF6B00\\) matching other pages\n- Change app name from \"榴莲挖矿\" to \"榴莲生态\"\n- Fix refreshTokenIfNeeded to properly throw on failure instead of\n silently calling logout \\(which caused Riverpod ref errors\\)\n- Clear local storage directly on refresh failure without remote API call\n\nCo-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>\nEOF\n\\)\")",
"Bash(python3 -c \" import sys content = sys.stdin.read\\(\\) old = '''''' done # 清空 processed_cdc_events 表(因为 migration 时可能已经消费了一些消息) # 这是事务性幂等消费的关键:重置 Kafka offset 后必须同时清空幂等记录 log_info \"\"Truncating processed_cdc_events tables to allow re-consumption...\"\" for db in \"\"rwa_contribution\"\" \"\"rwa_auth\"\"; do if run_psql \"\"$db\"\" \"\"TRUNCATE TABLE processed_cdc_events;\"\" 2>/dev/null; then log_success \"\"Truncated processed_cdc_events in $db\"\" else log_warn \"\"Could not truncate processed_cdc_events in $db \\(table may not exist yet\\)\"\" fi done log_step \"\"Step 9/18: Starting 2.0 services...\"\"'''''' new = '''''' done # 清空 processed_cdc_events 表(因为 migration 时可能已经消费了一些消息) # 这是事务性幂等消费的关键:重置 Kafka offset 后必须同时清空幂等记录 log_info \"\"Truncating processed_cdc_events tables to allow re-consumption...\"\" for db in \"\"rwa_contribution\"\" \"\"rwa_auth\"\"; do if run_psql \"\"$db\"\" \"\"TRUNCATE TABLE processed_cdc_events;\"\" 2>/dev/null; then log_success \"\"Truncated processed_cdc_events in $db\"\" else log_warn \"\"Could not truncate processed_cdc_events in $db \\(table may not exist yet\\)\"\" fi done log_step \"\"Step 9/18: Starting 2.0 services...\"\"'''''' print\\(content.replace\\(old, new\\)\\) \")", "Bash(python3 -c \" import sys content = sys.stdin.read\\(\\) old = '''''' done # 清空 processed_cdc_events 表(因为 migration 时可能已经消费了一些消息) # 这是事务性幂等消费的关键:重置 Kafka offset 后必须同时清空幂等记录 log_info \"\"Truncating processed_cdc_events tables to allow re-consumption...\"\" for db in \"\"rwa_contribution\"\" \"\"rwa_auth\"\"; do if run_psql \"\"$db\"\" \"\"TRUNCATE TABLE processed_cdc_events;\"\" 2>/dev/null; then log_success \"\"Truncated processed_cdc_events in $db\"\" else log_warn \"\"Could not truncate processed_cdc_events in $db \\(table may not exist yet\\)\"\" fi done log_step \"\"Step 9/18: Starting 2.0 services...\"\"'''''' new = '''''' done # 清空 processed_cdc_events 表(因为 migration 时可能已经消费了一些消息) # 这是事务性幂等消费的关键:重置 Kafka offset 后必须同时清空幂等记录 log_info \"\"Truncating processed_cdc_events tables to allow re-consumption...\"\" for db in \"\"rwa_contribution\"\" \"\"rwa_auth\"\"; do if run_psql \"\"$db\"\" \"\"TRUNCATE TABLE processed_cdc_events;\"\" 2>/dev/null; then log_success \"\"Truncated processed_cdc_events in $db\"\" else log_warn \"\"Could not truncate processed_cdc_events in $db \\(table may not exist yet\\)\"\" fi done log_step \"\"Step 9/18: Starting 2.0 services...\"\"'''''' print\\(content.replace\\(old, new\\)\\) \")",
"Bash(git rm:*)", "Bash(git rm:*)",
"Bash(echo \"请在服务器运行以下命令检查 outbox 事件:\n\ndocker exec -it rwa-postgres psql -U rwa_user -d rwa_contribution -c \"\"\nSELECT id, event_type, aggregate_id, \n payload->>''sourceType'' as source_type,\n payload->>''accountSequence'' as account_seq,\n payload->>''sourceAccountSequence'' as source_account_seq,\n payload->>''bonusTier'' as bonus_tier\nFROM outbox_events \nWHERE payload->>''accountSequence'' = ''D25122900007''\nORDER BY id;\n\"\"\")", "Bash(echo \"请在服务器运行以下命令检查 outbox 事件:\n\ndocker exec -it rwa-postgres psql -U rwa_user -d rwa_contribution -c \"\"\nSELECT id, event_type, aggregate_id, \n payload->>''sourceType'' as source_type,\n payload->>''accountSequence'' as account_seq,\n payload->>''sourceAccountSequence'' as source_account_seq,\n payload->>''bonusTier'' as bonus_tier\nFROM outbox_events \nWHERE payload->>''accountSequence'' = ''D25122900007''\nORDER BY id;\n\"\"\")"
"Bash(ssh -o ConnectTimeout=10 ceshi@14.215.128.96 'find /home/ceshi/rwadurian/frontend/mining-admin-web -name \"\"*.tsx\"\" -o -name \"\"*.ts\"\" | xargs grep -l \"\"用户管理\\\\|users\"\" 2>/dev/null | head -10')",
"Bash(dir /s /b \"c:\\\\Users\\\\dong\\\\Desktop\\\\rwadurian\")",
"Bash(dir /b \"c:\\\\Users\\\\dong\\\\Desktop\\\\rwadurian\\\\backend\\\\services\")",
"Bash(ssh -J ceshi@103.39.231.231 ceshi@192.168.1.111 \"curl -s http://localhost:3021/api/v2/admin/status\")",
"Bash(del \"c:\\\\Users\\\\dong\\\\Desktop\\\\rwadurian\\\\frontend\\\\mining-app\\\\lib\\\\domain\\\\usecases\\\\trading\\\\buy_shares.dart\")",
"Bash(del \"c:\\\\Users\\\\dong\\\\Desktop\\\\rwadurian\\\\frontend\\\\mining-app\\\\lib\\\\domain\\\\usecases\\\\trading\\\\sell_shares.dart\")",
"Bash(ls -la \"c:\\\\Users\\\\dong\\\\Desktop\\\\rwadurian\\\\frontend\\\\mining-app\\\\lib\\\\presentation\\\\pages\"\" 2>/dev/null || dir /b \"c:UsersdongDesktoprwadurianfrontendmining-applibpresentationpages \")",
"Bash(cd:*)",
"Bash(ssh -o StrictHostKeyChecking=no -J ceshi@103.39.231.231 ceshi@192.168.1.111 \"curl -s http://localhost:3020/api/v1/ | head -100\")",
"Bash(ssh -o StrictHostKeyChecking=no -J ceshi@103.39.231.231 ceshi@192.168.1.111:*)",
"Bash(bc:*)",
"Bash(DATABASE_URL=\"postgresql://postgres:password@localhost:5432/mining_db?schema=public\" npx prisma migrate diff:*)",
"Bash(git status:*)",
"Bash(xargs cat:*)",
"Bash(ssh -o ProxyJump=ceshi@103.39.231.231 ceshi@192.168.1.111 \"docker ps | grep mining\")",
"Bash(dir /b \"c:\\\\Users\\\\dong\\\\Desktop\\\\rwadurian\\\\backend\\\\services\\\\trading-service\\\\src\\\\application\\\\services\")",
"Bash(DATABASE_URL=\"postgresql://postgres:password@localhost:5432/trading_db?schema=public\" npx prisma migrate dev:*)",
"Bash(dir /b \"c:\\\\Users\\\\dong\\\\Desktop\\\\rwadurian\\\\backend\\\\services\\\\mining-admin-service\\\\src\")",
"Bash(ssh -o ProxyJump=ceshi@103.39.231.231 ceshi@192.168.1.111 \"cd /home/ceshi/rwadurian/backend/service && ls -la\")",
"Bash(ssh -o ProxyJump=ceshi@103.39.231.231 ceshi@192.168.1.111 \"ls -la /home/ceshi/rwadurian/backend/\")",
"Bash(ssh -o ProxyJump=ceshi@103.39.231.231 ceshi@192.168.1.111 \"ls -la /home/ceshi/rwadurian/backend/services/\")",
"Bash(where:*)",
"Bash(npx md-to-pdf:*)",
"Bash(ssh -J ceshi@103.39.231.231 ceshi@192.168.1.111 \"curl -s ''http://localhost:3000/api/price/klines?period=1h&limit=5'' | head -500\")",
"Bash(dir /b /ad \"c:\\\\Users\\\\dong\\\\Desktop\\\\rwadurian\\\\backend\")",
"Bash(timeout 30 cat:*)",
"Bash(npm run lint)",
"Bash(ssh -o ProxyCommand=\"ssh -W %h:%p ceshi@103.39.231.231\" -o StrictHostKeyChecking=no ceshi@192.168.1.111 \"cat /home/ceshi/rwadurian/backend/services/mining-service/src/application/services/batch-mining.service.ts | head -250\")",
"Bash(ssh -o ProxyCommand=\"ssh -W %h:%p ceshi@103.39.231.231\" -o StrictHostKeyChecking=no ceshi@192.168.1.111 \"docker logs rwa-mining-admin-service --tail 50 2>&1 | grep ''第一条数据\\\\|最后一条数据''\")",
"Bash(npx xlsx-cli 挖矿.xlsx)",
"Bash(DATABASE_URL=\"postgresql://postgres:password@localhost:5432/mining_db?schema=public\" npx prisma migrate dev:*)",
"Bash(md-to-pdf:*)",
"Bash(dir \"c:\\\\Users\\\\dong\\\\Desktop\\\\rwadurian\\\\docs\\\\deployment\\\\*.pdf\")",
"Bash(./gradlew compileDebugKotlin:*)",
"Bash(cmd.exe /c \"cd /d c:\\\\Users\\\\dong\\\\Desktop\\\\rwadurian\\\\backend\\\\mpc-system\\\\services\\\\service-party-android && gradlew.bat :app:compileDebugKotlin --no-daemon\")",
"Bash(powershell -Command \"Set-Location 'c:\\\\Users\\\\dong\\\\Desktop\\\\rwadurian\\\\backend\\\\mpc-system\\\\services\\\\service-party-android'; .\\\\gradlew.bat :app:compileDebugKotlin --no-daemon 2>&1\":*)",
"Bash(powershell -Command \"Set-Location ''c:\\\\Users\\\\dong\\\\Desktop\\\\rwadurian\\\\backend\\\\mpc-system\\\\services\\\\service-party-android''; .\\\\gradlew.bat :app:compileDebugKotlin --no-daemon 2>&1 | Select-Object -Last 20\")",
"Bash(cmd.exe /c \"gradlew.bat installDebug && adb logcat -c && adb logcat | findstr /C:\"\"EXPORT\"\" /C:\"\"IMPORT\"\" /C:\"\"STATE\"\"\")",
"Bash(./gradlew:*)",
"Bash(adb shell \"run-as com.durian.tssparty sqlite3 /data/data/com.durian.tssparty/databases/tss_party.db ''SELECT id, tx_hash, from_address, to_address, amount, token_type, status, direction, created_at FROM transaction_records ORDER BY id DESC LIMIT 5;''\")",
"WebFetch(domain:docs.kava.io)",
"WebFetch(domain:kavascan.com)",
"Bash(.gradlew.bat compileDebugKotlin:*)",
"WebFetch(domain:github.com)",
"WebFetch(domain:oneuptime.com)",
"Bash(gradlew.bat assembleDebug:*)",
"Bash(cmd /c \"gradlew.bat assembleDebug --no-daemon\")",
"Bash(./build-install-debug.bat)"
], ],
"deny": [], "deny": [],
"ask": [] "ask": []

View File

@ -309,42 +309,24 @@ services:
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# Trading Service 2.0 - 交易服务 # Trading Service 2.0 - 交易服务
# 前端路径: /api/v2/trading/... -> 后端路径: /api/v2/...
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
- name: trading-service-v2 - name: trading-service-v2
url: http://192.168.1.111:3022/api/v2 url: http://192.168.1.111:3022
routes: routes:
- name: trading-v2-api - name: trading-v2-api
paths: paths:
- /api/v2/trading - /api/v2/trading
strip_path: true strip_path: false
- name: trading-v2-health - name: trading-v2-health
paths: paths:
- /api/v2/trading/health - /api/v2/trading/health
strip_path: true strip_path: false
# ---------------------------------------------------------------------------
# Trading Service WebSocket - 价格实时推送
# WebSocket 连接: wss://api.xxx.com/ws/price -> ws://192.168.1.111:3022/price
# Kong 会自动处理 HTTP -> WebSocket 升级,所以 protocols 只需要 http/https
# ---------------------------------------------------------------------------
- name: trading-ws-service
url: http://192.168.1.111:3022
routes:
- name: trading-ws-price
paths:
- /ws/price
strip_path: true
protocols:
- http
- https
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# Mining Admin Service 2.0 - 挖矿管理后台服务 # Mining Admin Service 2.0 - 挖矿管理后台服务
# 前端路径: /api/v2/mining-admin/... -> 后端路径: /api/v2/...
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
- name: mining-admin-service - name: mining-admin-service
url: http://192.168.1.111:3023/api/v2 url: http://192.168.1.111:3023/api/v1
routes: routes:
- name: mining-admin-api - name: mining-admin-api
paths: paths:
@ -355,19 +337,6 @@ services:
- /api/v2/mining-admin/health - /api/v2/mining-admin/health
strip_path: true strip_path: true
# ---------------------------------------------------------------------------
# Mining Admin Service 2.0 - 版本管理(供 mobile-upgrade 前端使用)
# 前端路径: /mining-admin/api/v2/... -> 后端路径: /api/v2/...
# 注意: 不带 /api/v2 service path因为前端 URL 已包含 /api/v2
# ---------------------------------------------------------------------------
- name: mining-admin-upgrade-service
url: http://192.168.1.111:3023
routes:
- name: mining-admin-upgrade
paths:
- /mining-admin
strip_path: true
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# Auth Service 2.0 - 用户认证服务 # Auth Service 2.0 - 用户认证服务
# 前端路径: /api/v2/auth/... # 前端路径: /api/v2/auth/...
@ -387,19 +356,18 @@ services:
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# Mining Wallet Service 2.0 - 挖矿钱包服务 # Mining Wallet Service 2.0 - 挖矿钱包服务
# 前端路径: /api/v2/mining-wallet/... -> 后端路径: /api/v2/...
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
- name: mining-wallet-service - name: mining-wallet-service
url: http://192.168.1.111:3025/api/v2 url: http://192.168.1.111:3025
routes: routes:
- name: mining-wallet-api - name: mining-wallet-api
paths: paths:
- /api/v2/mining-wallet - /api/v2/mining-wallet
strip_path: true strip_path: false
- name: mining-wallet-health - name: mining-wallet-health
paths: paths:
- /api/v2/mining-wallet/health - /api/v2/mining-wallet/health
strip_path: true strip_path: false
# ============================================================================= # =============================================================================
# Plugins - 全局插件配置 # Plugins - 全局插件配置

View File

@ -680,9 +680,6 @@ type SessionEvent struct {
ExpiresAt int64 `protobuf:"varint,10,opt,name=expires_at,json=expiresAt,proto3" json:"expires_at,omitempty"` // Unix timestamp milliseconds ExpiresAt int64 `protobuf:"varint,10,opt,name=expires_at,json=expiresAt,proto3" json:"expires_at,omitempty"` // Unix timestamp milliseconds
// For sign sessions with delegate party: user's share for delegate to use // For sign sessions with delegate party: user's share for delegate to use
DelegateUserShare *DelegateUserShare `protobuf:"bytes,11,opt,name=delegate_user_share,json=delegateUserShare,proto3" json:"delegate_user_share,omitempty"` DelegateUserShare *DelegateUserShare `protobuf:"bytes,11,opt,name=delegate_user_share,json=delegateUserShare,proto3" json:"delegate_user_share,omitempty"`
// For session_started event: complete list of participants with their indices
// CRITICAL: Use this for TSS protocol instead of JoinSession response
Participants []*PartyInfo `protobuf:"bytes,12,rep,name=participants,proto3" json:"participants,omitempty"`
unknownFields protoimpl.UnknownFields unknownFields protoimpl.UnknownFields
sizeCache protoimpl.SizeCache sizeCache protoimpl.SizeCache
} }
@ -794,13 +791,6 @@ func (x *SessionEvent) GetDelegateUserShare() *DelegateUserShare {
return nil return nil
} }
func (x *SessionEvent) GetParticipants() []*PartyInfo {
if x != nil {
return x.Participants
}
return nil
}
// DelegateUserShare contains user's share for delegate party to use in signing // DelegateUserShare contains user's share for delegate party to use in signing
type DelegateUserShare struct { type DelegateUserShare struct {
state protoimpl.MessageState `protogen:"open.v1"` state protoimpl.MessageState `protogen:"open.v1"`
@ -2489,7 +2479,7 @@ const file_api_proto_message_router_proto_rawDesc = "" +
"\x1dSubscribeSessionEventsRequest\x12\x19\n" + "\x1dSubscribeSessionEventsRequest\x12\x19\n" +
"\bparty_id\x18\x01 \x01(\tR\apartyId\x12\x1f\n" + "\bparty_id\x18\x01 \x01(\tR\apartyId\x12\x1f\n" +
"\vevent_types\x18\x02 \x03(\tR\n" + "\vevent_types\x18\x02 \x03(\tR\n" +
"eventTypes\"\xd2\x04\n" + "eventTypes\"\x94\x04\n" +
"\fSessionEvent\x12\x19\n" + "\fSessionEvent\x12\x19\n" +
"\bevent_id\x18\x01 \x01(\tR\aeventId\x12\x1d\n" + "\bevent_id\x18\x01 \x01(\tR\aeventId\x12\x1d\n" +
"\n" + "\n" +
@ -2509,8 +2499,7 @@ const file_api_proto_message_router_proto_rawDesc = "" +
"\n" + "\n" +
"expires_at\x18\n" + "expires_at\x18\n" +
" \x01(\x03R\texpiresAt\x12P\n" + " \x01(\x03R\texpiresAt\x12P\n" +
"\x13delegate_user_share\x18\v \x01(\v2 .mpc.router.v1.DelegateUserShareR\x11delegateUserShare\x12<\n" + "\x13delegate_user_share\x18\v \x01(\v2 .mpc.router.v1.DelegateUserShareR\x11delegateUserShare\x1a=\n" +
"\fparticipants\x18\f \x03(\v2\x18.mpc.router.v1.PartyInfoR\fparticipants\x1a=\n" +
"\x0fJoinTokensEntry\x12\x10\n" + "\x0fJoinTokensEntry\x12\x10\n" +
"\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n" + "\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n" +
"\x05value\x18\x02 \x01(\tR\x05value:\x028\x01\"\x89\x01\n" + "\x05value\x18\x02 \x01(\tR\x05value:\x028\x01\"\x89\x01\n" +
@ -2734,51 +2723,50 @@ var file_api_proto_message_router_proto_depIdxs = []int32{
6, // 1: mpc.router.v1.RegisterPartyRequest.notification:type_name -> mpc.router.v1.NotificationChannel 6, // 1: mpc.router.v1.RegisterPartyRequest.notification:type_name -> mpc.router.v1.NotificationChannel
37, // 2: mpc.router.v1.SessionEvent.join_tokens:type_name -> mpc.router.v1.SessionEvent.JoinTokensEntry 37, // 2: mpc.router.v1.SessionEvent.join_tokens:type_name -> mpc.router.v1.SessionEvent.JoinTokensEntry
11, // 3: mpc.router.v1.SessionEvent.delegate_user_share:type_name -> mpc.router.v1.DelegateUserShare 11, // 3: mpc.router.v1.SessionEvent.delegate_user_share:type_name -> mpc.router.v1.DelegateUserShare
25, // 4: mpc.router.v1.SessionEvent.participants:type_name -> mpc.router.v1.PartyInfo 10, // 4: mpc.router.v1.PublishSessionEventRequest.event:type_name -> mpc.router.v1.SessionEvent
10, // 5: mpc.router.v1.PublishSessionEventRequest.event:type_name -> mpc.router.v1.SessionEvent 6, // 5: mpc.router.v1.RegisteredParty.notification:type_name -> mpc.router.v1.NotificationChannel
6, // 6: mpc.router.v1.RegisteredParty.notification:type_name -> mpc.router.v1.NotificationChannel 15, // 6: mpc.router.v1.GetRegisteredPartiesResponse.parties:type_name -> mpc.router.v1.RegisteredParty
15, // 7: mpc.router.v1.GetRegisteredPartiesResponse.parties:type_name -> mpc.router.v1.RegisteredParty 20, // 7: mpc.router.v1.GetMessageStatusResponse.deliveries:type_name -> mpc.router.v1.MessageDeliveryStatus
20, // 8: mpc.router.v1.GetMessageStatusResponse.deliveries:type_name -> mpc.router.v1.MessageDeliveryStatus 24, // 8: mpc.router.v1.PartyInfo.device_info:type_name -> mpc.router.v1.DeviceInfo
24, // 9: mpc.router.v1.PartyInfo.device_info:type_name -> mpc.router.v1.DeviceInfo 24, // 9: mpc.router.v1.JoinSessionRequest.device_info:type_name -> mpc.router.v1.DeviceInfo
24, // 10: mpc.router.v1.JoinSessionRequest.device_info:type_name -> mpc.router.v1.DeviceInfo 26, // 10: mpc.router.v1.JoinSessionResponse.session_info:type_name -> mpc.router.v1.SessionInfo
26, // 11: mpc.router.v1.JoinSessionResponse.session_info:type_name -> mpc.router.v1.SessionInfo 25, // 11: mpc.router.v1.JoinSessionResponse.other_parties:type_name -> mpc.router.v1.PartyInfo
25, // 12: mpc.router.v1.JoinSessionResponse.other_parties:type_name -> mpc.router.v1.PartyInfo 25, // 12: mpc.router.v1.GetSessionStatusResponse.participants:type_name -> mpc.router.v1.PartyInfo
25, // 13: mpc.router.v1.GetSessionStatusResponse.participants:type_name -> mpc.router.v1.PartyInfo 0, // 13: mpc.router.v1.MessageRouter.RouteMessage:input_type -> mpc.router.v1.RouteMessageRequest
0, // 14: mpc.router.v1.MessageRouter.RouteMessage:input_type -> mpc.router.v1.RouteMessageRequest 2, // 14: mpc.router.v1.MessageRouter.SubscribeMessages:input_type -> mpc.router.v1.SubscribeMessagesRequest
2, // 15: mpc.router.v1.MessageRouter.SubscribeMessages:input_type -> mpc.router.v1.SubscribeMessagesRequest 4, // 15: mpc.router.v1.MessageRouter.GetPendingMessages:input_type -> mpc.router.v1.GetPendingMessagesRequest
4, // 16: mpc.router.v1.MessageRouter.GetPendingMessages:input_type -> mpc.router.v1.GetPendingMessagesRequest 17, // 16: mpc.router.v1.MessageRouter.AcknowledgeMessage:input_type -> mpc.router.v1.AcknowledgeMessageRequest
17, // 17: mpc.router.v1.MessageRouter.AcknowledgeMessage:input_type -> mpc.router.v1.AcknowledgeMessageRequest 19, // 17: mpc.router.v1.MessageRouter.GetMessageStatus:input_type -> mpc.router.v1.GetMessageStatusRequest
19, // 18: mpc.router.v1.MessageRouter.GetMessageStatus:input_type -> mpc.router.v1.GetMessageStatusRequest 7, // 18: mpc.router.v1.MessageRouter.RegisterParty:input_type -> mpc.router.v1.RegisterPartyRequest
7, // 19: mpc.router.v1.MessageRouter.RegisterParty:input_type -> mpc.router.v1.RegisterPartyRequest 22, // 19: mpc.router.v1.MessageRouter.Heartbeat:input_type -> mpc.router.v1.HeartbeatRequest
22, // 20: mpc.router.v1.MessageRouter.Heartbeat:input_type -> mpc.router.v1.HeartbeatRequest 9, // 20: mpc.router.v1.MessageRouter.SubscribeSessionEvents:input_type -> mpc.router.v1.SubscribeSessionEventsRequest
9, // 21: mpc.router.v1.MessageRouter.SubscribeSessionEvents:input_type -> mpc.router.v1.SubscribeSessionEventsRequest 12, // 21: mpc.router.v1.MessageRouter.PublishSessionEvent:input_type -> mpc.router.v1.PublishSessionEventRequest
12, // 22: mpc.router.v1.MessageRouter.PublishSessionEvent:input_type -> mpc.router.v1.PublishSessionEventRequest 14, // 22: mpc.router.v1.MessageRouter.GetRegisteredParties:input_type -> mpc.router.v1.GetRegisteredPartiesRequest
14, // 23: mpc.router.v1.MessageRouter.GetRegisteredParties:input_type -> mpc.router.v1.GetRegisteredPartiesRequest 27, // 23: mpc.router.v1.MessageRouter.JoinSession:input_type -> mpc.router.v1.JoinSessionRequest
27, // 24: mpc.router.v1.MessageRouter.JoinSession:input_type -> mpc.router.v1.JoinSessionRequest 29, // 24: mpc.router.v1.MessageRouter.MarkPartyReady:input_type -> mpc.router.v1.MarkPartyReadyRequest
29, // 25: mpc.router.v1.MessageRouter.MarkPartyReady:input_type -> mpc.router.v1.MarkPartyReadyRequest 31, // 25: mpc.router.v1.MessageRouter.ReportCompletion:input_type -> mpc.router.v1.ReportCompletionRequest
31, // 26: mpc.router.v1.MessageRouter.ReportCompletion:input_type -> mpc.router.v1.ReportCompletionRequest 33, // 26: mpc.router.v1.MessageRouter.GetSessionStatus:input_type -> mpc.router.v1.GetSessionStatusRequest
33, // 27: mpc.router.v1.MessageRouter.GetSessionStatus:input_type -> mpc.router.v1.GetSessionStatusRequest 35, // 27: mpc.router.v1.MessageRouter.SubmitDelegateShare:input_type -> mpc.router.v1.SubmitDelegateShareRequest
35, // 28: mpc.router.v1.MessageRouter.SubmitDelegateShare:input_type -> mpc.router.v1.SubmitDelegateShareRequest 1, // 28: mpc.router.v1.MessageRouter.RouteMessage:output_type -> mpc.router.v1.RouteMessageResponse
1, // 29: mpc.router.v1.MessageRouter.RouteMessage:output_type -> mpc.router.v1.RouteMessageResponse 3, // 29: mpc.router.v1.MessageRouter.SubscribeMessages:output_type -> mpc.router.v1.MPCMessage
3, // 30: mpc.router.v1.MessageRouter.SubscribeMessages:output_type -> mpc.router.v1.MPCMessage 5, // 30: mpc.router.v1.MessageRouter.GetPendingMessages:output_type -> mpc.router.v1.GetPendingMessagesResponse
5, // 31: mpc.router.v1.MessageRouter.GetPendingMessages:output_type -> mpc.router.v1.GetPendingMessagesResponse 18, // 31: mpc.router.v1.MessageRouter.AcknowledgeMessage:output_type -> mpc.router.v1.AcknowledgeMessageResponse
18, // 32: mpc.router.v1.MessageRouter.AcknowledgeMessage:output_type -> mpc.router.v1.AcknowledgeMessageResponse 21, // 32: mpc.router.v1.MessageRouter.GetMessageStatus:output_type -> mpc.router.v1.GetMessageStatusResponse
21, // 33: mpc.router.v1.MessageRouter.GetMessageStatus:output_type -> mpc.router.v1.GetMessageStatusResponse 8, // 33: mpc.router.v1.MessageRouter.RegisterParty:output_type -> mpc.router.v1.RegisterPartyResponse
8, // 34: mpc.router.v1.MessageRouter.RegisterParty:output_type -> mpc.router.v1.RegisterPartyResponse 23, // 34: mpc.router.v1.MessageRouter.Heartbeat:output_type -> mpc.router.v1.HeartbeatResponse
23, // 35: mpc.router.v1.MessageRouter.Heartbeat:output_type -> mpc.router.v1.HeartbeatResponse 10, // 35: mpc.router.v1.MessageRouter.SubscribeSessionEvents:output_type -> mpc.router.v1.SessionEvent
10, // 36: mpc.router.v1.MessageRouter.SubscribeSessionEvents:output_type -> mpc.router.v1.SessionEvent 13, // 36: mpc.router.v1.MessageRouter.PublishSessionEvent:output_type -> mpc.router.v1.PublishSessionEventResponse
13, // 37: mpc.router.v1.MessageRouter.PublishSessionEvent:output_type -> mpc.router.v1.PublishSessionEventResponse 16, // 37: mpc.router.v1.MessageRouter.GetRegisteredParties:output_type -> mpc.router.v1.GetRegisteredPartiesResponse
16, // 38: mpc.router.v1.MessageRouter.GetRegisteredParties:output_type -> mpc.router.v1.GetRegisteredPartiesResponse 28, // 38: mpc.router.v1.MessageRouter.JoinSession:output_type -> mpc.router.v1.JoinSessionResponse
28, // 39: mpc.router.v1.MessageRouter.JoinSession:output_type -> mpc.router.v1.JoinSessionResponse 30, // 39: mpc.router.v1.MessageRouter.MarkPartyReady:output_type -> mpc.router.v1.MarkPartyReadyResponse
30, // 40: mpc.router.v1.MessageRouter.MarkPartyReady:output_type -> mpc.router.v1.MarkPartyReadyResponse 32, // 40: mpc.router.v1.MessageRouter.ReportCompletion:output_type -> mpc.router.v1.ReportCompletionResponse
32, // 41: mpc.router.v1.MessageRouter.ReportCompletion:output_type -> mpc.router.v1.ReportCompletionResponse 34, // 41: mpc.router.v1.MessageRouter.GetSessionStatus:output_type -> mpc.router.v1.GetSessionStatusResponse
34, // 42: mpc.router.v1.MessageRouter.GetSessionStatus:output_type -> mpc.router.v1.GetSessionStatusResponse 36, // 42: mpc.router.v1.MessageRouter.SubmitDelegateShare:output_type -> mpc.router.v1.SubmitDelegateShareResponse
36, // 43: mpc.router.v1.MessageRouter.SubmitDelegateShare:output_type -> mpc.router.v1.SubmitDelegateShareResponse 28, // [28:43] is the sub-list for method output_type
29, // [29:44] is the sub-list for method output_type 13, // [13:28] is the sub-list for method input_type
14, // [14:29] is the sub-list for method input_type 13, // [13:13] is the sub-list for extension type_name
14, // [14:14] is the sub-list for extension type_name 13, // [13:13] is the sub-list for extension extendee
14, // [14:14] is the sub-list for extension extendee 0, // [0:13] is the sub-list for field type_name
0, // [0:14] is the sub-list for field type_name
} }
func init() { file_api_proto_message_router_proto_init() } func init() { file_api_proto_message_router_proto_init() }

View File

@ -166,9 +166,6 @@ message SessionEvent {
int64 expires_at = 10; // Unix timestamp milliseconds int64 expires_at = 10; // Unix timestamp milliseconds
// For sign sessions with delegate party: user's share for delegate to use // For sign sessions with delegate party: user's share for delegate to use
DelegateUserShare delegate_user_share = 11; DelegateUserShare delegate_user_share = 11;
// For session_started event: complete list of participants with their indices
// CRITICAL: Use this for TSS protocol instead of JoinSession response
repeated PartyInfo participants = 12;
} }
// DelegateUserShare contains user's share for delegate party to use in signing // DelegateUserShare contains user's share for delegate party to use in signing

View File

@ -32,11 +32,9 @@ type PendingSession struct {
SessionID uuid.UUID SessionID uuid.UUID
JoinToken string JoinToken string
MessageHash []byte MessageHash []byte
KeygenSessionID uuid.UUID // For sign sessions: the keygen session that created the keys
ThresholdN int ThresholdN int
ThresholdT int ThresholdT int
SelectedParties []string SelectedParties []string
Participants []use_cases.ParticipantInfo // CRITICAL: Correct PartyIndex from database (via JoinSession)
CreatedAt time.Time CreatedAt time.Time
} }
@ -151,14 +149,6 @@ func main() {
cryptoService, cryptoService,
) )
// Initialize signing use case (for co-managed sign sessions)
participateSigningUC := use_cases.NewParticipateSigningUseCase(
keyShareRepo,
messageRouter,
messageRouter,
cryptoService,
)
// Create shutdown context // Create shutdown context
ctx, cancel := context.WithCancel(context.Background()) ctx, cancel := context.WithCancel(context.Background())
defer cancel() defer cancel()
@ -196,15 +186,14 @@ func main() {
defer heartbeatCancel() defer heartbeatCancel()
logger.Info("Heartbeat started", zap.String("party_id", partyID), zap.Duration("interval", 30*time.Second)) logger.Info("Heartbeat started", zap.String("party_id", partyID), zap.Duration("interval", 30*time.Second))
// Subscribe to session events with two-phase handling for co_managed_keygen and co_managed_sign // Subscribe to session events with two-phase handling for co_managed_keygen
logger.Info("Subscribing to session events (co_managed_keygen and co_managed_sign)", zap.String("party_id", partyID)) logger.Info("Subscribing to session events (co_managed_keygen only)", zap.String("party_id", partyID))
eventHandler := createCoManagedSessionEventHandler( eventHandler := createCoManagedSessionEventHandler(
ctx, ctx,
partyID, partyID,
messageRouter, messageRouter,
participateKeygenUC, participateKeygenUC,
participateSigningUC,
) )
if err := messageRouter.SubscribeSessionEvents(ctx, partyID, eventHandler); err != nil { if err := messageRouter.SubscribeSessionEvents(ctx, partyID, eventHandler); err != nil {
@ -317,17 +306,15 @@ func startHTTPServer(cfg *config.Config) error {
return r.Run(fmt.Sprintf(":%d", cfg.Server.HTTPPort)) return r.Run(fmt.Sprintf(":%d", cfg.Server.HTTPPort))
} }
// createCoManagedSessionEventHandler creates a handler for co_managed_keygen and co_managed_sign sessions // createCoManagedSessionEventHandler creates a handler specifically for co_managed_keygen sessions
// Two-phase event handling: // Two-phase event handling:
// Phase 1 (session_created): JoinSession immediately + store session info // Phase 1 (session_created): JoinSession immediately + store session info
// Phase 2 (session_started): Execute TSS protocol (same timing as user clients receiving all_joined) // Phase 2 (session_started): Execute TSS protocol (same timing as user clients receiving all_joined)
// Supports both keygen (no message_hash) and sign (with message_hash) sessions
func createCoManagedSessionEventHandler( func createCoManagedSessionEventHandler(
ctx context.Context, ctx context.Context,
partyID string, partyID string,
messageRouter *grpcclient.MessageRouterClient, messageRouter *grpcclient.MessageRouterClient,
participateKeygenUC *use_cases.ParticipateKeygenUseCase, participateKeygenUC *use_cases.ParticipateKeygenUseCase,
participateSigningUC *use_cases.ParticipateSigningUseCase,
) func(*router.SessionEvent) { ) func(*router.SessionEvent) {
return func(event *router.SessionEvent) { return func(event *router.SessionEvent) {
// Check if this party is selected for the session // Check if this party is selected for the session
@ -361,27 +348,12 @@ func createCoManagedSessionEventHandler(
// Handle different event types // Handle different event types
switch event.EventType { switch event.EventType {
case "session_created": case "session_created":
// Handle both keygen (no message_hash) and sign (with message_hash) sessions // Only handle keygen sessions (no message_hash)
// For sign sessions: only support 2-of-3 configuration
if len(event.MessageHash) > 0 { if len(event.MessageHash) > 0 {
// This is a sign session logger.Debug("Ignoring sign session (co-managed only handles keygen)",
// Security check: only support 2-of-3 configuration zap.String("session_id", event.SessionId))
if event.ThresholdT != 2 || event.ThresholdN != 3 {
logger.Warn("Ignoring sign session: only 2-of-3 configuration is supported",
zap.String("session_id", event.SessionId),
zap.Int32("threshold_t", event.ThresholdT),
zap.Int32("threshold_n", event.ThresholdN))
return return
} }
logger.Info("Sign session detected (2-of-3), proceeding with participation",
zap.String("session_id", event.SessionId),
zap.String("party_id", partyID))
} else {
// This is a keygen session
logger.Info("Keygen session detected, proceeding with participation",
zap.String("session_id", event.SessionId),
zap.String("party_id", partyID))
}
// Phase 1: Get join token // Phase 1: Get join token
joinToken, exists := event.JoinTokens[partyID] joinToken, exists := event.JoinTokens[partyID]
@ -394,7 +366,7 @@ func createCoManagedSessionEventHandler(
// Immediately call JoinSession (this is required to trigger session_started) // Immediately call JoinSession (this is required to trigger session_started)
joinCtx, joinCancel := context.WithTimeout(ctx, 30*time.Second) joinCtx, joinCancel := context.WithTimeout(ctx, 30*time.Second)
sessionInfo, err := messageRouter.JoinSession(joinCtx, sessionID, partyID, joinToken) _, err := messageRouter.JoinSession(joinCtx, sessionID, partyID, joinToken)
joinCancel() joinCancel()
if err != nil { if err != nil {
logger.Error("Failed to join session", logger.Error("Failed to join session",
@ -406,19 +378,16 @@ func createCoManagedSessionEventHandler(
logger.Info("Successfully joined session, waiting for session_started", logger.Info("Successfully joined session, waiting for session_started",
zap.String("session_id", event.SessionId), zap.String("session_id", event.SessionId),
zap.String("party_id", partyID), zap.String("party_id", partyID))
zap.String("keygen_session_id", sessionInfo.KeygenSessionID.String()))
// Store pending session for later use when session_started arrives // Store pending session for later use when session_started arrives
pendingSessionCache.Store(event.SessionId, &PendingSession{ pendingSessionCache.Store(event.SessionId, &PendingSession{
SessionID: sessionID, SessionID: sessionID,
JoinToken: joinToken, JoinToken: joinToken,
MessageHash: event.MessageHash, MessageHash: event.MessageHash,
KeygenSessionID: sessionInfo.KeygenSessionID, // CRITICAL: Save the correct keygen session ID from JoinSession
ThresholdN: int(event.ThresholdN), ThresholdN: int(event.ThresholdN),
ThresholdT: int(event.ThresholdT), ThresholdT: int(event.ThresholdT),
SelectedParties: event.SelectedParties, SelectedParties: event.SelectedParties,
Participants: sessionInfo.Participants, // CRITICAL: Save participants with correct PartyIndex from database
CreatedAt: time.Now(), CreatedAt: time.Now(),
}) })
@ -432,90 +401,34 @@ func createCoManagedSessionEventHandler(
return return
} }
// CRITICAL FIX: Use participants from session_started event, NOT from JoinSession cache
// The JoinSession response only contains parties that had joined at that moment,
// but session_started event contains the COMPLETE list of all participants
var participants []use_cases.ParticipantInfo
if len(event.Participants) > 0 {
// Use participants from event (preferred - complete list)
participants = make([]use_cases.ParticipantInfo, len(event.Participants))
for i, p := range event.Participants {
participants[i] = use_cases.ParticipantInfo{
PartyID: p.PartyId,
PartyIndex: int(p.PartyIndex),
}
}
logger.Info("Using participants from session_started event",
zap.String("session_id", event.SessionId),
zap.Int("participant_count", len(participants)))
} else {
// Fallback to cached participants (for backward compatibility)
participants = pendingSession.Participants
logger.Warn("No participants in session_started event, using cached participants",
zap.String("session_id", event.SessionId),
zap.Int("participant_count", len(participants)))
}
// Determine session type based on message_hash
isSignSession := len(pendingSession.MessageHash) > 0
if isSignSession {
logger.Info("Session started event received, beginning TSS signing protocol",
zap.String("session_id", event.SessionId),
zap.String("party_id", partyID),
zap.Int("participant_count", len(participants)))
} else {
logger.Info("Session started event received, beginning TSS keygen protocol", logger.Info("Session started event received, beginning TSS keygen protocol",
zap.String("session_id", event.SessionId), zap.String("session_id", event.SessionId),
zap.String("party_id", partyID), zap.String("party_id", partyID))
zap.Int("participant_count", len(participants)))
}
// Execute TSS protocol in goroutine // Execute TSS keygen protocol in goroutine
// Timeout starts NOW (when session_started is received), not at session_created // Timeout starts NOW (when session_started is received), not at session_created
go func() { go func() {
// 10 minute timeout for TSS protocol execution // 10 minute timeout for TSS protocol execution
participateCtx, cancel := context.WithTimeout(ctx, 10*time.Minute) participateCtx, cancel := context.WithTimeout(ctx, 10*time.Minute)
defer cancel() defer cancel()
if isSignSession {
// Execute signing protocol
logger.Info("Auto-participating in co_managed_sign session",
zap.String("session_id", event.SessionId),
zap.String("party_id", partyID),
zap.String("keygen_session_id", pendingSession.KeygenSessionID.String()))
sessionInfo := &use_cases.SessionInfo{
SessionID: pendingSession.SessionID,
SessionType: "co_managed_sign",
ThresholdN: int(event.ThresholdN),
ThresholdT: int(event.ThresholdT),
MessageHash: pendingSession.MessageHash,
KeygenSessionID: pendingSession.KeygenSessionID, // CRITICAL: Use the correct keygen session ID from JoinSession
Participants: participants,
}
result, err := participateSigningUC.ExecuteWithSessionInfo(
participateCtx,
pendingSession.SessionID,
partyID,
sessionInfo,
)
if err != nil {
logger.Error("Co-managed signing participation failed",
zap.Error(err),
zap.String("session_id", event.SessionId))
} else {
logger.Info("Co-managed signing participation completed",
zap.String("session_id", event.SessionId),
zap.String("signature", hex.EncodeToString(result.Signature)))
}
} else {
// Execute keygen protocol
logger.Info("Auto-participating in co_managed_keygen session", logger.Info("Auto-participating in co_managed_keygen session",
zap.String("session_id", event.SessionId), zap.String("session_id", event.SessionId),
zap.String("party_id", partyID)) zap.String("party_id", partyID))
// Build SessionInfo from session_started event (NOT from pendingSession cache)
// session_started event contains ALL participants who have joined,
// including external parties that joined dynamically after session_created
// Note: We already called JoinSession in session_created phase,
// so we use ExecuteWithSessionInfo to skip the duplicate JoinSession call
participants := make([]use_cases.ParticipantInfo, len(event.SelectedParties))
for i, p := range event.SelectedParties {
participants[i] = use_cases.ParticipantInfo{
PartyID: p,
PartyIndex: i,
}
}
sessionInfo := &use_cases.SessionInfo{ sessionInfo := &use_cases.SessionInfo{
SessionID: pendingSession.SessionID, SessionID: pendingSession.SessionID,
SessionType: "co_managed_keygen", SessionType: "co_managed_keygen",
@ -540,7 +453,6 @@ func createCoManagedSessionEventHandler(
zap.String("session_id", event.SessionId), zap.String("session_id", event.SessionId),
zap.String("public_key", hex.EncodeToString(result.PublicKey))) zap.String("public_key", hex.EncodeToString(result.PublicKey)))
} }
}
}() }()
default: default:

View File

@ -63,30 +63,6 @@ func NewParticipateSigningUseCase(
} }
} }
// ExecuteWithSessionInfo participates in a signing session with pre-obtained SessionInfo
// This is used by server-party-co-managed which has already called JoinSession in session_created phase
// and receives session_started event when all participants have joined
func (uc *ParticipateSigningUseCase) ExecuteWithSessionInfo(
ctx context.Context,
sessionID uuid.UUID,
partyID string,
sessionInfo *SessionInfo,
) (*ParticipateSigningOutput, error) {
// Validate session type
if sessionInfo.SessionType != "sign" && sessionInfo.SessionType != "co_managed_sign" {
return nil, ErrInvalidSignSession
}
logger.Info("ExecuteWithSessionInfo: starting signing with pre-obtained session info",
zap.String("session_id", sessionID.String()),
zap.String("party_id", partyID),
zap.String("session_type", sessionInfo.SessionType),
zap.Int("participants", len(sessionInfo.Participants)))
// Delegate to the common execution logic (skipping JoinSession)
return uc.executeWithSessionInfo(ctx, sessionID, partyID, sessionInfo)
}
// Execute participates in a signing session using real TSS protocol // Execute participates in a signing session using real TSS protocol
func (uc *ParticipateSigningUseCase) Execute( func (uc *ParticipateSigningUseCase) Execute(
ctx context.Context, ctx context.Context,
@ -235,123 +211,6 @@ func (uc *ParticipateSigningUseCase) Execute(
}, nil }, nil
} }
// executeWithSessionInfo is the internal logic for ExecuteWithSessionInfo (persistent party only)
func (uc *ParticipateSigningUseCase) executeWithSessionInfo(
ctx context.Context,
sessionID uuid.UUID,
partyID string,
sessionInfo *SessionInfo,
) (*ParticipateSigningOutput, error) {
// Get share data from database (persistent party only - used by server-party-co-managed)
var shareData []byte
var keyShareForUpdate *entities.PartyKeyShare
var originalThresholdN int
var err error
// Load from database using KeygenSessionID
if sessionInfo.KeygenSessionID != uuid.Nil {
keyShareForUpdate, err = uc.keyShareRepo.FindBySessionAndParty(ctx, sessionInfo.KeygenSessionID, partyID)
if err != nil {
logger.Error("Failed to find keyshare for keygen session",
zap.String("party_id", partyID),
zap.String("keygen_session_id", sessionInfo.KeygenSessionID.String()),
zap.Error(err))
return nil, ErrKeyShareNotFound
}
logger.Info("Using specific keyshare by keygen_session_id",
zap.String("party_id", partyID),
zap.String("keygen_session_id", sessionInfo.KeygenSessionID.String()))
} else {
// Fallback: use the most recent key share
keyShares, err := uc.keyShareRepo.ListByParty(ctx, partyID)
if err != nil || len(keyShares) == 0 {
return nil, ErrKeyShareNotFound
}
keyShareForUpdate = keyShares[len(keyShares)-1]
logger.Warn("Using most recent keyshare (keygen_session_id not provided)",
zap.String("party_id", partyID),
zap.String("fallback_session_id", keyShareForUpdate.SessionID.String()))
}
originalThresholdN = keyShareForUpdate.ThresholdN
shareData, err = uc.cryptoService.DecryptShare(keyShareForUpdate.ShareData, partyID)
if err != nil {
return nil, err
}
logger.Info("Using database share (persistent party)",
zap.String("party_id", partyID),
zap.String("session_id", sessionID.String()),
zap.String("keygen_session_id", keyShareForUpdate.SessionID.String()),
zap.Int("original_threshold_n", originalThresholdN),
zap.Int("threshold_t", keyShareForUpdate.ThresholdT))
// Find self in participants and build party index map
var selfIndex int
partyIndexMap := make(map[string]int)
for _, p := range sessionInfo.Participants {
partyIndexMap[p.PartyID] = p.PartyIndex
if p.PartyID == partyID {
selfIndex = p.PartyIndex
}
}
// Subscribe to messages
msgChan, err := uc.messageRouter.SubscribeMessages(ctx, sessionID, partyID)
if err != nil {
return nil, err
}
// Wait for all parties to subscribe
expectedParties := len(sessionInfo.Participants)
logger.Info("Waiting for all parties to subscribe",
zap.String("session_id", sessionID.String()),
zap.String("party_id", partyID),
zap.Int("expected_parties", expectedParties))
time.Sleep(500 * time.Millisecond)
messageHash := sessionInfo.MessageHash
// Run TSS Signing protocol
signature, r, s, err := uc.runSigningProtocol(
ctx,
sessionID,
partyID,
selfIndex,
sessionInfo.Participants,
sessionInfo.ThresholdT,
originalThresholdN,
shareData,
messageHash,
msgChan,
partyIndexMap,
)
if err != nil {
return nil, err
}
// Update key share last used
if keyShareForUpdate != nil {
keyShareForUpdate.MarkUsed()
if err := uc.keyShareRepo.Update(ctx, keyShareForUpdate); err != nil {
logger.Warn("failed to update key share last used", zap.Error(err))
}
}
// Report completion to coordinator
if err := uc.sessionClient.ReportCompletion(ctx, sessionID, partyID, signature); err != nil {
logger.Error("failed to report signing completion", zap.Error(err))
}
return &ParticipateSigningOutput{
Success: true,
Signature: signature,
R: r,
S: s,
}, nil
}
// runSigningProtocol runs the TSS signing protocol using tss-lib // runSigningProtocol runs the TSS signing protocol using tss-lib
func (uc *ParticipateSigningUseCase) runSigningProtocol( func (uc *ParticipateSigningUseCase) runSigningProtocol(
ctx context.Context, ctx context.Context,

View File

@ -1,249 +0,0 @@
# 2-of-3 服务器参与选项 - 纯新增实施方案
## 目标
允许 2-of-3 MPC 用户勾选"包含服务器备份"参与签名,以便在丢失一个设备时转出资产。
## 核心设计
### 安全限制
- **仅** 2-of-3 配置显示此选项
- 其他配置3-of-5, 4-of-7等不显示
### 实施范围
- ✅ 只修改 Android 客户端
- ❌ **不需要**修改后端account-service, message-router
- ✅ 纯新增代码,现有逻辑保持不变
## 修改文件清单
### 1. TssRepository.kt2处新增
#### 1.1 新增辅助方法private
```kotlin
// 位置3712行之前类内部末尾
/**
* 构建参与方列表(新增辅助方法)
* @param participants 所有参与方
* @param includeServerParties 是否包含服务器方(默认 false保持现有行为
*/
private fun buildSigningParticipantList(
participants: List<ParticipantStatusInfo>,
includeServerParties: Boolean = false
): List<Pair<String, Int>> {
val filtered = if (includeServerParties) {
// 包含所有参与方(含服务器)
participants
} else {
// 过滤掉服务器方(现有行为)
participants.filter { !it.partyId.startsWith("co-managed-party-") }
}
return filtered.map { Pair(it.partyId, it.partyIndex) }
}
```
#### 1.2 新增签名会话创建方法
```kotlin
// 位置buildSigningParticipantList 之后
/**
* 创建签名会话(支持选择是否包含服务器)
* @param includeServerBackup 是否包含服务器备份参与方(仅 2-of-3 时使用)
* 新增方法,不影响现有 createSignSession
*/
suspend fun createSignSessionWithOptions(
shareId: Long,
messageHash: String,
password: String,
initiatorName: String,
includeServerBackup: Boolean = false // 新增参数
): Result<SignSessionResult> {
return withContext(Dispatchers.IO) {
try {
val shareEntity = shareRecordDao.getShareById(shareId)
?: return@withContext Result.failure(Exception("Share not found"))
val signingPartyIdForEvents = shareEntity.partyId
android.util.Log.d("TssRepository", "[CO-SIGN-OPTIONS] Creating sign session with includeServerBackup=$includeServerBackup")
ensureSessionEventSubscriptionActive(signingPartyIdForEvents)
val keygenStatusResult = getSessionStatus(shareEntity.sessionId)
if (keygenStatusResult.isFailure) {
return@withContext Result.failure(Exception("无法获取 keygen 会话的参与者信息: ${keygenStatusResult.exceptionOrNull()?.message}"))
}
val keygenStatus = keygenStatusResult.getOrThrow()
// 使用新的辅助方法构建参与方列表
val signingParties = buildSigningParticipantList(
keygenStatus.participants,
includeServerBackup
)
android.util.Log.d("TssRepository", "[CO-SIGN-OPTIONS] Signing parties: ${signingParties.size} of ${keygenStatus.participants.size} (includeServer=$includeServerBackup)")
signingParties.forEach { (id, index) ->
android.util.Log.d("TssRepository", "[CO-SIGN-OPTIONS] party_id=${id.take(16)}, party_index=$index")
}
if (signingParties.size < shareEntity.thresholdT) {
return@withContext Result.failure(Exception(
"签名参与方不足: 需要 ${shareEntity.thresholdT} 个,但只有 ${signingParties.size} 个参与方"
))
}
// 后续逻辑与 createSignSession 相同
// ... 构建请求、创建session、加入gRPC等
// (复用现有 createSignSession 的代码)
// 调用现有方法的内部逻辑(需要提取)
createSignSessionInternal(
shareEntity,
signingParties,
messageHash,
password,
initiatorName
)
} catch (e: Exception) {
Result.failure(e)
}
}
}
```
### 2. MainViewModel.kt1处新增
```kotlin
// 位置initiateSignSession 方法之后
/**
* 创建签名会话(支持选择服务器参与)
* 新增方法,不影响现有 initiateSignSession
*/
fun initiateSignSessionWithOptions(
shareId: Long,
password: String,
initiatorName: String = "发起者",
includeServerBackup: Boolean = false // 新增参数
) {
viewModelScope.launch {
_uiState.update { it.copy(isLoading = true, error = null) }
val tx = _preparedTx.value
if (tx == null) {
_uiState.update { it.copy(isLoading = false, error = "交易未准备") }
return@launch
}
android.util.Log.d("MainViewModel", "[SIGN-OPTIONS] Initiating sign session with includeServerBackup=$includeServerBackup")
val result = repository.createSignSessionWithOptions(
shareId = shareId,
messageHash = tx.signHash,
password = password,
initiatorName = initiatorName,
includeServerBackup = includeServerBackup // 传递参数
)
result.fold(
onSuccess = { sessionResult ->
_signSessionId.value = sessionResult.sessionId
_signInviteCode.value = sessionResult.inviteCode
_signParticipants.value = listOf(initiatorName)
_uiState.update { it.copy(isLoading = false) }
pendingSignInitiatorInfo = PendingSignInitiatorInfo(
sessionId = sessionResult.sessionId,
shareId = shareId,
password = password
)
if (sessionResult.sessionAlreadyInProgress) {
startSigningProcess(sessionResult.sessionId, shareId, password)
}
},
onFailure = { e ->
_uiState.update { it.copy(isLoading = false, error = e.message) }
}
)
}
}
```
### 3. TransferScreen.ktUI 新增)
```kotlin
// 在交易确认界面新增复选框Step 2
// 位置:密码输入框之后
// 仅在 2-of-3 时显示
if (wallet.thresholdT == 2 && wallet.thresholdN == 3) {
Spacer(modifier = Modifier.height(16.dp))
var includeServerBackup by remember { mutableStateOf(false) }
Row(
modifier = Modifier
.fillMaxWidth()
.padding(horizontal = 16.dp),
verticalAlignment = Alignment.CenterVertically
) {
Checkbox(
checked = includeServerBackup,
onCheckedChange = { includeServerBackup = it }
)
Spacer(modifier = Modifier.width(8.dp))
Column {
Text(
text = "包含服务器备份参与签名",
style = MaterialTheme.typography.bodyMedium
)
Text(
text = "如果您丢失了一个设备,勾选此项以使用服务器备份完成签名",
style = MaterialTheme.typography.bodySmall,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
}
}
}
```
### 4. MainActivity.kt传递参数
```kotlin
// 修改 TransferScreen 的 onConfirmTransaction 回调
onConfirmTransaction = { includeServer ->
viewModel.initiateSignSessionWithOptions(
shareId = shareId,
password = "",
includeServerBackup = includeServer
)
}
```
## 测试场景
### 场景12-of-3 正常使用(不勾选)
- 设备A + 设备B 签名 ✅
- 服务器被过滤(现有行为)
### 场景22-of-3 设备丢失(勾选)
- 设备A + 服务器 签名 ✅
- 用户明确勾选"包含服务器备份"
### 场景33-of-5 配置
- 不显示复选框 ✅
- 保持现有行为
## 优势
1. ✅ **零后端修改**:后端只接收 parties 数组
2. ✅ **完全向后兼容**:默认行为不变
3. ✅ **安全限制**:仅 2-of-3 可用
4. ✅ **纯新增**:不修改现有方法
5. ✅ **用户明确选择**:需要主动勾选
## 实施顺序
1. TssRepository新增辅助方法
2. TssRepository新增 createSignSessionWithOptions
3. MainViewModel新增 initiateSignSessionWithOptions
4. TransferScreen新增 UI 复选框
5. MainActivity传递参数
6. 测试编译和功能

View File

@ -39,9 +39,8 @@ android {
} }
// NDK configuration for TSS native library // NDK configuration for TSS native library
// Only include ARM ABIs for real devices (x86_64 is for emulators only)
ndk { ndk {
abiFilters += listOf("arm64-v8a", "armeabi-v7a") abiFilters += listOf("arm64-v8a", "armeabi-v7a", "x86_64")
} }
} }

View File

@ -13,7 +13,6 @@ import androidx.activity.result.contract.ActivityResultContracts
import androidx.compose.foundation.layout.* import androidx.compose.foundation.layout.*
import androidx.compose.material3.* import androidx.compose.material3.*
import androidx.compose.runtime.* import androidx.compose.runtime.*
import androidx.compose.runtime.saveable.rememberSaveable
import androidx.compose.ui.Modifier import androidx.compose.ui.Modifier
import androidx.compose.ui.platform.LocalContext import androidx.compose.ui.platform.LocalContext
import androidx.hilt.navigation.compose.hiltViewModel import androidx.hilt.navigation.compose.hiltViewModel
@ -77,7 +76,6 @@ fun TssPartyApp(
val currentSessionId by viewModel.currentSessionId.collectAsState() val currentSessionId by viewModel.currentSessionId.collectAsState()
val sessionParticipants by viewModel.sessionParticipants.collectAsState() val sessionParticipants by viewModel.sessionParticipants.collectAsState()
val currentRound by viewModel.currentRound.collectAsState() val currentRound by viewModel.currentRound.collectAsState()
val totalRounds by viewModel.totalRounds.collectAsState()
val publicKey by viewModel.publicKey.collectAsState() val publicKey by viewModel.publicKey.collectAsState()
val hasEnteredSession by viewModel.hasEnteredSession.collectAsState() val hasEnteredSession by viewModel.hasEnteredSession.collectAsState()
@ -111,111 +109,69 @@ fun TssPartyApp(
val exportResult by viewModel.exportResult.collectAsState() val exportResult by viewModel.exportResult.collectAsState()
val importResult by viewModel.importResult.collectAsState() val importResult by viewModel.importResult.collectAsState()
// Transaction history state
val transactionRecords by viewModel.transactionRecords.collectAsState()
val isSyncingHistory by viewModel.isSyncingHistory.collectAsState()
val syncResultMessage by viewModel.syncResultMessage.collectAsState()
// Current transfer wallet // Current transfer wallet
var transferWalletId by remember { mutableStateOf<Long?>(null) } var transferWalletId by remember { mutableStateOf<Long?>(null) }
// Export/Import file handling // Export/Import file handling
val context = LocalContext.current val context = LocalContext.current
// Use rememberSaveable to persist across configuration changes (e.g., file picker activity) var pendingExportJson by remember { mutableStateOf<String?>(null) }
var pendingExportJson by rememberSaveable { mutableStateOf<String?>(null) } var pendingExportAddress by remember { mutableStateOf<String?>(null) }
var pendingExportAddress by rememberSaveable { mutableStateOf<String?>(null) }
// File picker for saving backup // File picker for saving backup
val createDocumentLauncher = rememberLauncherForActivityResult( val createDocumentLauncher = rememberLauncherForActivityResult(
contract = ActivityResultContracts.CreateDocument(ShareBackup.MIME_TYPE) contract = ActivityResultContracts.CreateDocument(ShareBackup.MIME_TYPE)
) { uri: Uri? -> ) { uri: Uri? ->
android.util.Log.d("MainActivity", "[EXPORT-FILE] ========== createDocumentLauncher callback ==========")
android.util.Log.d("MainActivity", "[EXPORT-FILE] uri: $uri")
android.util.Log.d("MainActivity", "[EXPORT-FILE] pendingExportJson isNull: ${pendingExportJson == null}")
android.util.Log.d("MainActivity", "[EXPORT-FILE] pendingExportJson length: ${pendingExportJson?.length ?: 0}")
uri?.let { targetUri -> uri?.let { targetUri ->
pendingExportJson?.let { json -> pendingExportJson?.let { json ->
try { try {
android.util.Log.d("MainActivity", "[EXPORT-FILE] Opening output stream to: $targetUri")
context.contentResolver.openOutputStream(targetUri)?.use { outputStream -> context.contentResolver.openOutputStream(targetUri)?.use { outputStream ->
android.util.Log.d("MainActivity", "[EXPORT-FILE] Writing ${json.length} bytes...")
outputStream.write(json.toByteArray(Charsets.UTF_8)) outputStream.write(json.toByteArray(Charsets.UTF_8))
android.util.Log.d("MainActivity", "[EXPORT-FILE] Write completed")
} }
android.util.Log.d("MainActivity", "[EXPORT-FILE] File saved successfully!")
Toast.makeText(context, "备份文件已保存", Toast.LENGTH_SHORT).show() Toast.makeText(context, "备份文件已保存", Toast.LENGTH_SHORT).show()
} catch (e: Exception) { } catch (e: Exception) {
android.util.Log.e("MainActivity", "[EXPORT-FILE] Failed to save file: ${e.message}", e)
Toast.makeText(context, "保存失败: ${e.message}", Toast.LENGTH_LONG).show() Toast.makeText(context, "保存失败: ${e.message}", Toast.LENGTH_LONG).show()
} }
android.util.Log.d("MainActivity", "[EXPORT-FILE] Clearing pendingExportJson and pendingExportAddress")
pendingExportJson = null pendingExportJson = null
pendingExportAddress = null pendingExportAddress = null
} ?: run {
android.util.Log.w("MainActivity", "[EXPORT-FILE] pendingExportJson is null, nothing to write!")
} }
} ?: run {
android.util.Log.w("MainActivity", "[EXPORT-FILE] User cancelled file picker (uri is null)")
} }
android.util.Log.d("MainActivity", "[EXPORT-FILE] ========== callback finished ==========")
} }
// File picker for importing backup // File picker for importing backup
val openDocumentLauncher = rememberLauncherForActivityResult( val openDocumentLauncher = rememberLauncherForActivityResult(
contract = ActivityResultContracts.OpenDocument() contract = ActivityResultContracts.OpenDocument()
) { uri: Uri? -> ) { uri: Uri? ->
android.util.Log.d("MainActivity", "[IMPORT-FILE] ========== openDocumentLauncher callback ==========")
android.util.Log.d("MainActivity", "[IMPORT-FILE] uri: $uri")
uri?.let { sourceUri -> uri?.let { sourceUri ->
try { try {
android.util.Log.d("MainActivity", "[IMPORT-FILE] Opening input stream from: $sourceUri")
context.contentResolver.openInputStream(sourceUri)?.use { inputStream -> context.contentResolver.openInputStream(sourceUri)?.use { inputStream ->
val json = inputStream.bufferedReader().readText() val json = inputStream.bufferedReader().readText()
android.util.Log.d("MainActivity", "[IMPORT-FILE] Read ${json.length} bytes")
android.util.Log.d("MainActivity", "[IMPORT-FILE] JSON preview: ${json.take(100)}...")
android.util.Log.d("MainActivity", "[IMPORT-FILE] Calling viewModel.importShareBackup...")
viewModel.importShareBackup(json) viewModel.importShareBackup(json)
android.util.Log.d("MainActivity", "[IMPORT-FILE] viewModel.importShareBackup called")
} }
} catch (e: Exception) { } catch (e: Exception) {
android.util.Log.e("MainActivity", "[IMPORT-FILE] Failed to read file: ${e.message}", e)
Toast.makeText(context, "读取文件失败: ${e.message}", Toast.LENGTH_LONG).show() Toast.makeText(context, "读取文件失败: ${e.message}", Toast.LENGTH_LONG).show()
} }
} ?: run {
android.util.Log.w("MainActivity", "[IMPORT-FILE] User cancelled file picker (uri is null)")
} }
android.util.Log.d("MainActivity", "[IMPORT-FILE] ========== callback finished ==========")
} }
// Handle export result - trigger file save dialog // Handle export result - trigger file save dialog
LaunchedEffect(pendingExportJson) { LaunchedEffect(pendingExportJson) {
android.util.Log.d("MainActivity", "[EXPORT-EFFECT] LaunchedEffect(pendingExportJson) triggered")
android.util.Log.d("MainActivity", "[EXPORT-EFFECT] pendingExportJson isNull: ${pendingExportJson == null}")
android.util.Log.d("MainActivity", "[EXPORT-EFFECT] pendingExportJson length: ${pendingExportJson?.length ?: 0}")
pendingExportJson?.let { json -> pendingExportJson?.let { json ->
val timestamp = SimpleDateFormat("yyyyMMdd_HHmmss", Locale.getDefault()).format(Date()) val timestamp = SimpleDateFormat("yyyyMMdd_HHmmss", Locale.getDefault()).format(Date())
val addressSuffix = pendingExportAddress?.take(8) ?: "wallet" val addressSuffix = pendingExportAddress?.take(8) ?: "wallet"
val fileName = "tss_backup_${addressSuffix}_$timestamp.${ShareBackup.FILE_EXTENSION}" val fileName = "tss_backup_${addressSuffix}_$timestamp.${ShareBackup.FILE_EXTENSION}"
android.util.Log.d("MainActivity", "[EXPORT-EFFECT] Launching file picker with filename: $fileName")
createDocumentLauncher.launch(fileName) createDocumentLauncher.launch(fileName)
android.util.Log.d("MainActivity", "[EXPORT-EFFECT] File picker launched")
} }
} }
// Handle import result - show toast // Handle import result - show toast
LaunchedEffect(importResult) { LaunchedEffect(importResult) {
android.util.Log.d("MainActivity", "[IMPORT-EFFECT] LaunchedEffect(importResult) triggered")
android.util.Log.d("MainActivity", "[IMPORT-EFFECT] importResult: $importResult")
importResult?.let { result -> importResult?.let { result ->
android.util.Log.d("MainActivity", "[IMPORT-EFFECT] isSuccess: ${result.isSuccess}, error: ${result.error}, message: ${result.message}")
when { when {
result.isSuccess -> { result.isSuccess -> {
android.util.Log.d("MainActivity", "[IMPORT-EFFECT] Showing success toast")
Toast.makeText(context, result.message ?: "导入成功", Toast.LENGTH_SHORT).show() Toast.makeText(context, result.message ?: "导入成功", Toast.LENGTH_SHORT).show()
viewModel.clearExportImportResult() viewModel.clearExportImportResult()
} }
result.error != null -> { result.error != null -> {
android.util.Log.d("MainActivity", "[IMPORT-EFFECT] Showing error toast: ${result.error}")
Toast.makeText(context, result.error, Toast.LENGTH_LONG).show() Toast.makeText(context, result.error, Toast.LENGTH_LONG).show()
viewModel.clearExportImportResult() viewModel.clearExportImportResult()
} }
@ -224,9 +180,7 @@ fun TssPartyApp(
} }
// Track if startup is complete // Track if startup is complete
// Use rememberSaveable to persist across configuration changes (e.g., file picker activity) var startupComplete by remember { mutableStateOf(false) }
var startupComplete by rememberSaveable { mutableStateOf(false) }
android.util.Log.d("MainActivity", "[STATE] TssPartyApp composing, startupComplete: $startupComplete")
// Handle success messages // Handle success messages
LaunchedEffect(uiState.successMessage) { LaunchedEffect(uiState.successMessage) {
@ -302,34 +256,18 @@ fun TssPartyApp(
transferWalletId = shareId transferWalletId = shareId
navController.navigate("transfer/$shareId") navController.navigate("transfer/$shareId")
}, },
onHistory = { shareId, address ->
navController.navigate("history/$shareId/$address")
},
onExportBackup = { shareId, _ -> onExportBackup = { shareId, _ ->
android.util.Log.d("MainActivity", "[EXPORT-TRIGGER] ========== onExportBackup called ==========")
android.util.Log.d("MainActivity", "[EXPORT-TRIGGER] shareId: $shareId")
// Get address for filename // Get address for filename
val share = shares.find { it.id == shareId } val share = shares.find { it.id == shareId }
android.util.Log.d("MainActivity", "[EXPORT-TRIGGER] share found: ${share != null}, address: ${share?.address}")
pendingExportAddress = share?.address pendingExportAddress = share?.address
android.util.Log.d("MainActivity", "[EXPORT-TRIGGER] pendingExportAddress set to: $pendingExportAddress")
// Export and save to file // Export and save to file
android.util.Log.d("MainActivity", "[EXPORT-TRIGGER] Calling viewModel.exportShareBackup...")
viewModel.exportShareBackup(shareId) { json -> viewModel.exportShareBackup(shareId) { json ->
android.util.Log.d("MainActivity", "[EXPORT-TRIGGER] exportShareBackup callback received")
android.util.Log.d("MainActivity", "[EXPORT-TRIGGER] json length: ${json.length}")
android.util.Log.d("MainActivity", "[EXPORT-TRIGGER] Setting pendingExportJson...")
pendingExportJson = json pendingExportJson = json
android.util.Log.d("MainActivity", "[EXPORT-TRIGGER] pendingExportJson set, length: ${pendingExportJson?.length}")
} }
android.util.Log.d("MainActivity", "[EXPORT-TRIGGER] viewModel.exportShareBackup called (async)")
}, },
onImportBackup = { onImportBackup = {
android.util.Log.d("MainActivity", "[IMPORT-TRIGGER] ========== onImportBackup called ==========")
android.util.Log.d("MainActivity", "[IMPORT-TRIGGER] Launching file picker...")
// Open file picker to select backup file // Open file picker to select backup file
openDocumentLauncher.launch(arrayOf("*/*")) openDocumentLauncher.launch(arrayOf("*/*"))
android.util.Log.d("MainActivity", "[IMPORT-TRIGGER] File picker launched")
}, },
onCreateWallet = { onCreateWallet = {
navController.navigate(BottomNavItem.Create.route) navController.navigate(BottomNavItem.Create.route)
@ -350,7 +288,7 @@ fun TssPartyApp(
sessionStatus = sessionStatus, sessionStatus = sessionStatus,
participants = signParticipants, participants = signParticipants,
currentRound = signCurrentRound, currentRound = signCurrentRound,
totalRounds = if (totalRounds > 0) totalRounds else 9, // Default to sign rounds totalRounds = 9,
preparedTx = preparedTx, preparedTx = preparedTx,
signSessionId = signSessionId, signSessionId = signSessionId,
inviteCode = signInviteCode, inviteCode = signInviteCode,
@ -363,19 +301,8 @@ fun TssPartyApp(
onPrepareTransaction = { toAddress, amount, tokenType -> onPrepareTransaction = { toAddress, amount, tokenType ->
viewModel.prepareTransfer(shareId, toAddress, amount, tokenType) viewModel.prepareTransfer(shareId, toAddress, amount, tokenType)
}, },
onConfirmTransaction = { includeServerBackup -> onConfirmTransaction = {
// 【新增】根据用户选择调用相应的签名方法
// includeServerBackup = true: 使用新方法,包含服务器备份参与方
// includeServerBackup = false: 使用现有方法,排除服务器方(默认行为)
if (includeServerBackup) {
viewModel.initiateSignSessionWithOptions(
shareId = shareId,
password = "",
includeServerBackup = true
)
} else {
viewModel.initiateSignSession(shareId, "") viewModel.initiateSignSession(shareId, "")
}
}, },
onCopyInviteCode = { onCopyInviteCode = {
signInviteCode?.let { onCopyToClipboard(it) } signInviteCode?.let { onCopyToClipboard(it) }
@ -398,33 +325,6 @@ fun TssPartyApp(
} }
} }
// Transaction History Screen
composable("history/{shareId}/{address}") { backStackEntry ->
val shareId = backStackEntry.arguments?.getString("shareId")?.toLongOrNull() ?: 0L
val address = backStackEntry.arguments?.getString("address") ?: ""
// Load records and sync when entering screen
LaunchedEffect(shareId, address) {
viewModel.loadTransactionRecords(shareId)
// Auto-sync from blockchain on first entry
if (address.isNotEmpty()) {
viewModel.syncTransactionHistory(shareId, address)
}
}
TransactionHistoryScreen(
shareId = shareId,
walletAddress = address,
transactions = transactionRecords,
networkType = settings.networkType,
isSyncing = isSyncingHistory,
syncResultMessage = syncResultMessage,
onBack = { navController.popBackStack() },
onRefresh = { viewModel.syncTransactionHistory(shareId, address) },
onClearSyncMessage = { viewModel.clearSyncResultMessage() }
)
}
// Tab 2: Create Wallet (创建钱包) // Tab 2: Create Wallet (创建钱包)
composable(BottomNavItem.Create.route) { composable(BottomNavItem.Create.route) {
CreateWalletScreen( CreateWalletScreen(
@ -436,7 +336,7 @@ fun TssPartyApp(
hasEnteredSession = hasEnteredSession, hasEnteredSession = hasEnteredSession,
participants = sessionParticipants, participants = sessionParticipants,
currentRound = currentRound, currentRound = currentRound,
totalRounds = if (totalRounds > 0) totalRounds else 4, // Default to keygen rounds totalRounds = 9,
publicKey = publicKey, publicKey = publicKey,
countdownSeconds = uiState.countdownSeconds, countdownSeconds = uiState.countdownSeconds,
onCreateSession = { name, t, n, participantName -> onCreateSession = { name, t, n, participantName ->
@ -487,7 +387,7 @@ fun TssPartyApp(
sessionInfo = screenSessionInfo, sessionInfo = screenSessionInfo,
participants = joinKeygenParticipants, participants = joinKeygenParticipants,
currentRound = joinKeygenRound, currentRound = joinKeygenRound,
totalRounds = if (totalRounds > 0) totalRounds else 4, // Default to keygen rounds totalRounds = 9,
publicKey = joinKeygenPublicKey, publicKey = joinKeygenPublicKey,
countdownSeconds = uiState.countdownSeconds, countdownSeconds = uiState.countdownSeconds,
onValidateInviteCode = { inviteCode -> onValidateInviteCode = { inviteCode ->
@ -543,7 +443,7 @@ fun TssPartyApp(
signSessionInfo = screenSignSessionInfo, signSessionInfo = screenSignSessionInfo,
participants = coSignParticipants, participants = coSignParticipants,
currentRound = coSignRound, currentRound = coSignRound,
totalRounds = if (totalRounds > 0) totalRounds else 9, // Default to sign rounds totalRounds = 9,
signature = coSignSignature, signature = coSignSignature,
countdownSeconds = uiState.countdownSeconds, countdownSeconds = uiState.countdownSeconds,
onValidateInviteCode = { inviteCode -> onValidateInviteCode = { inviteCode ->

View File

@ -1,87 +1,7 @@
package com.durian.tssparty package com.durian.tssparty
import android.app.Application import android.app.Application
import android.util.Log
import dagger.hilt.android.HiltAndroidApp import dagger.hilt.android.HiltAndroidApp
import java.io.File
import java.io.PrintWriter
import java.io.StringWriter
import java.text.SimpleDateFormat
import java.util.Date
import java.util.Locale
@HiltAndroidApp @HiltAndroidApp
class TssPartyApplication : Application() { class TssPartyApplication : Application()
companion object {
private const val TAG = "TssPartyApplication"
}
private var defaultExceptionHandler: Thread.UncaughtExceptionHandler? = null
override fun onCreate() {
super.onCreate()
Log.d(TAG, "Application onCreate")
// Set up global exception handler
setupCrashHandler()
}
private fun setupCrashHandler() {
defaultExceptionHandler = Thread.getDefaultUncaughtExceptionHandler()
Thread.setDefaultUncaughtExceptionHandler { thread, throwable ->
Log.e(TAG, "=== UNCAUGHT EXCEPTION ===")
Log.e(TAG, "Thread: ${thread.name}")
Log.e(TAG, "Exception: ${throwable.javaClass.simpleName}")
Log.e(TAG, "Message: ${throwable.message}")
// Get full stack trace
val sw = StringWriter()
throwable.printStackTrace(PrintWriter(sw))
val stackTrace = sw.toString()
Log.e(TAG, "Stack trace:\n$stackTrace")
// Try to save crash log to file
try {
saveCrashLog(thread, throwable, stackTrace)
} catch (e: Exception) {
Log.e(TAG, "Failed to save crash log: ${e.message}")
}
// Call the default handler
defaultExceptionHandler?.uncaughtException(thread, throwable)
}
Log.d(TAG, "Crash handler installed")
}
private fun saveCrashLog(thread: Thread, throwable: Throwable, stackTrace: String) {
val crashDir = File(filesDir, "crash_logs")
if (!crashDir.exists()) {
crashDir.mkdirs()
}
val dateFormat = SimpleDateFormat("yyyy-MM-dd_HH-mm-ss", Locale.getDefault())
val timestamp = dateFormat.format(Date())
val crashFile = File(crashDir, "crash_$timestamp.txt")
crashFile.writeText(buildString {
appendLine("=== TSS Party Crash Report ===")
appendLine("Time: $timestamp")
appendLine("Thread: ${thread.name}")
appendLine("Exception: ${throwable.javaClass.name}")
appendLine("Message: ${throwable.message}")
appendLine()
appendLine("=== Stack Trace ===")
appendLine(stackTrace)
appendLine()
appendLine("=== Device Info ===")
appendLine("Android Version: ${android.os.Build.VERSION.RELEASE}")
appendLine("SDK: ${android.os.Build.VERSION.SDK_INT}")
appendLine("Device: ${android.os.Build.MANUFACTURER} ${android.os.Build.MODEL}")
})
Log.d(TAG, "Crash log saved to: ${crashFile.absolutePath}")
}
}

View File

@ -29,9 +29,6 @@ data class ShareRecordEntity(
@ColumnInfo(name = "party_index") @ColumnInfo(name = "party_index")
val partyIndex: Int, val partyIndex: Int,
@ColumnInfo(name = "party_id")
val partyId: String, // The original partyId used during keygen - required for signing
@ColumnInfo(name = "address") @ColumnInfo(name = "address")
val address: String, val address: String,
@ -93,159 +90,15 @@ interface AppSettingDao {
suspend fun setValue(setting: AppSettingEntity) suspend fun setValue(setting: AppSettingEntity)
} }
/**
* 转账记录数据库实体
* Entity for storing transaction history records
*/
@Entity(
tableName = "transaction_records",
foreignKeys = [
ForeignKey(
entity = ShareRecordEntity::class,
parentColumns = ["id"],
childColumns = ["share_id"],
onDelete = ForeignKey.CASCADE // 删除钱包时自动删除关联的转账记录
)
],
indices = [
Index(value = ["share_id"]),
Index(value = ["tx_hash"], unique = true),
Index(value = ["from_address"]),
Index(value = ["to_address"]),
Index(value = ["created_at"])
]
)
data class TransactionRecordEntity(
@PrimaryKey(autoGenerate = true)
val id: Long = 0,
@ColumnInfo(name = "share_id")
val shareId: Long, // 关联的钱包ID
@ColumnInfo(name = "from_address")
val fromAddress: String, // 发送方地址
@ColumnInfo(name = "to_address")
val toAddress: String, // 接收方地址
@ColumnInfo(name = "amount")
val amount: String, // 转账金额(人类可读格式)
@ColumnInfo(name = "token_type")
val tokenType: String, // 代币类型KAVA, GREEN_POINTS, ENERGY_POINTS, FUTURE_POINTS
@ColumnInfo(name = "tx_hash")
val txHash: String, // 交易哈希
@ColumnInfo(name = "gas_price")
val gasPrice: String, // Gas 价格Wei
@ColumnInfo(name = "gas_used")
val gasUsed: String = "", // 实际消耗的 Gas
@ColumnInfo(name = "tx_fee")
val txFee: String = "", // 交易手续费
@ColumnInfo(name = "status")
val status: String, // 交易状态PENDING, CONFIRMED, FAILED
@ColumnInfo(name = "direction")
val direction: String, // 交易方向SENT, RECEIVED
@ColumnInfo(name = "note")
val note: String = "", // 备注
@ColumnInfo(name = "created_at")
val createdAt: Long = System.currentTimeMillis(),
@ColumnInfo(name = "confirmed_at")
val confirmedAt: Long? = null, // 确认时间
@ColumnInfo(name = "block_number")
val blockNumber: Long? = null // 区块高度
)
/**
* 转账记录 DAO
* Data Access Object for transaction records
*/
@Dao
interface TransactionRecordDao {
@Insert(onConflict = OnConflictStrategy.REPLACE)
suspend fun insertRecord(record: TransactionRecordEntity): Long
@Query("SELECT * FROM transaction_records WHERE id = :id")
suspend fun getRecordById(id: Long): TransactionRecordEntity?
@Query("SELECT * FROM transaction_records WHERE tx_hash = :txHash")
suspend fun getRecordByTxHash(txHash: String): TransactionRecordEntity?
@Query("SELECT * FROM transaction_records WHERE share_id = :shareId ORDER BY created_at DESC")
fun getRecordsForShare(shareId: Long): Flow<List<TransactionRecordEntity>>
@Query("SELECT * FROM transaction_records WHERE share_id = :shareId ORDER BY created_at DESC LIMIT :limit OFFSET :offset")
suspend fun getRecordsForSharePaged(shareId: Long, limit: Int, offset: Int): List<TransactionRecordEntity>
@Query("SELECT * FROM transaction_records WHERE share_id = :shareId AND token_type = :tokenType ORDER BY created_at DESC")
fun getRecordsForShareByToken(shareId: Long, tokenType: String): Flow<List<TransactionRecordEntity>>
@Query("SELECT * FROM transaction_records WHERE status = 'PENDING' ORDER BY created_at ASC")
suspend fun getPendingRecords(): List<TransactionRecordEntity>
@Query("UPDATE transaction_records SET status = :status, confirmed_at = :confirmedAt, block_number = :blockNumber, gas_used = :gasUsed, tx_fee = :txFee WHERE id = :id")
suspend fun updateStatus(id: Long, status: String, confirmedAt: Long?, blockNumber: Long?, gasUsed: String, txFee: String)
@Query("""
SELECT
COUNT(*) as total_count,
SUM(CASE WHEN direction = 'SENT' THEN 1 ELSE 0 END) as sent_count,
SUM(CASE WHEN direction = 'RECEIVED' THEN 1 ELSE 0 END) as received_count
FROM transaction_records
WHERE share_id = :shareId AND token_type = :tokenType
""")
suspend fun getTransactionStats(shareId: Long, tokenType: String): TransactionStats
@Query("SELECT COALESCE(SUM(CAST(amount AS REAL)), 0) FROM transaction_records WHERE share_id = :shareId AND token_type = :tokenType AND direction = 'SENT' AND status = 'CONFIRMED'")
suspend fun getTotalSentAmount(shareId: Long, tokenType: String): Double
@Query("SELECT COALESCE(SUM(CAST(amount AS REAL)), 0) FROM transaction_records WHERE share_id = :shareId AND token_type = :tokenType AND direction = 'RECEIVED' AND status = 'CONFIRMED'")
suspend fun getTotalReceivedAmount(shareId: Long, tokenType: String): Double
@Query("SELECT COALESCE(SUM(CAST(tx_fee AS REAL)), 0) FROM transaction_records WHERE share_id = :shareId AND direction = 'SENT' AND status = 'CONFIRMED'")
suspend fun getTotalTxFee(shareId: Long): Double
@Query("DELETE FROM transaction_records WHERE id = :id")
suspend fun deleteRecordById(id: Long)
@Query("DELETE FROM transaction_records WHERE share_id = :shareId")
suspend fun deleteRecordsForShare(shareId: Long)
@Query("SELECT COUNT(*) FROM transaction_records WHERE share_id = :shareId")
suspend fun getRecordCount(shareId: Long): Int
}
/**
* 转账统计数据类
*/
data class TransactionStats(
@ColumnInfo(name = "total_count")
val totalCount: Int,
@ColumnInfo(name = "sent_count")
val sentCount: Int,
@ColumnInfo(name = "received_count")
val receivedCount: Int
)
/** /**
* Room database * Room database
*/ */
@Database( @Database(
entities = [ShareRecordEntity::class, AppSettingEntity::class, TransactionRecordEntity::class], entities = [ShareRecordEntity::class, AppSettingEntity::class],
version = 4, // Version 4: added transaction_records table for transfer history version = 2,
exportSchema = false exportSchema = false
) )
abstract class TssDatabase : RoomDatabase() { abstract class TssDatabase : RoomDatabase() {
abstract fun shareRecordDao(): ShareRecordDao abstract fun shareRecordDao(): ShareRecordDao
abstract fun appSettingDao(): AppSettingDao abstract fun appSettingDao(): AppSettingDao
abstract fun transactionRecordDao(): TransactionRecordDao
} }

View File

@ -123,26 +123,17 @@ class GrpcClient @Inject constructor() {
* Connect to the Message Router server * Connect to the Message Router server
*/ */
fun connect(host: String, port: Int) { fun connect(host: String, port: Int) {
Log.d(TAG, "=== connect() called ===")
Log.d(TAG, " host: $host, port: $port")
Log.d(TAG, " isReconnecting before reset: ${isReconnecting.get()}")
// Save connection params for reconnection // Save connection params for reconnection
currentHost = host currentHost = host
currentPort = port currentPort = port
shouldReconnect.set(true) shouldReconnect.set(true)
reconnectAttempts.set(0) reconnectAttempts.set(0)
// 重要:初次连接时确保 isReconnecting 为 false
// 这样 waitForConnection() 能正确区分初次连接和重连
isReconnecting.set(false)
Log.d(TAG, " isReconnecting after reset: ${isReconnecting.get()} (should be false for first connect)")
doConnect(host, port) doConnect(host, port)
} }
private fun doConnect(host: String, port: Int) { private fun doConnect(host: String, port: Int) {
Log.d(TAG, "doConnect: $host:$port, isReconnecting=${isReconnecting.get()}") Log.d(TAG, "Connecting to $host:$port")
_connectionState.value = GrpcConnectionState.Connecting _connectionState.value = GrpcConnectionState.Connecting
try { try {
@ -192,38 +183,23 @@ class GrpcClient @Inject constructor() {
when (state) { when (state) {
ConnectivityState.READY -> { ConnectivityState.READY -> {
// 关键修复:先读取 isReconnecting 再重置,用于区分初次连接和重连 Log.d(TAG, "Connected successfully")
// - 初次连接isReconnecting = false由 connect() 触发)
// - 重连isReconnecting = true由 triggerReconnect() 触发,包括后台唤醒)
val wasReconnecting = isReconnecting.getAndSet(false)
Log.d(TAG, "=== Channel READY ===")
Log.d(TAG, " wasReconnecting: $wasReconnecting")
Log.d(TAG, " registeredPartyId: $registeredPartyId")
Log.d(TAG, " eventStreamSubscribed: ${eventStreamSubscribed.get()}")
Log.d(TAG, " eventStreamPartyId: $eventStreamPartyId")
_connectionState.value = GrpcConnectionState.Connected _connectionState.value = GrpcConnectionState.Connected
reconnectAttempts.set(0) reconnectAttempts.set(0)
heartbeatFailCount.set(0) heartbeatFailCount.set(0)
isReconnecting.set(false)
// Start channel state monitoring // Start channel state monitoring
startChannelStateMonitor() startChannelStateMonitor()
// 只有重连时才需要恢复注册和订阅
// 初次连接时registerParty() 和 subscribeSessionEvents() 会在外部显式调用
if (wasReconnecting) {
Log.d(TAG, ">>> RECONNECT: Restoring registration and streams")
// Re-register if we were registered before // Re-register if we were registered before
reRegisterIfNeeded() reRegisterIfNeeded()
// Restart heartbeat
startHeartbeat()
// Re-subscribe to streams // Re-subscribe to streams
reSubscribeStreams() reSubscribeStreams()
} else {
Log.d(TAG, ">>> FIRST CONNECT: Skipping restore (will be done by caller)")
}
// Restart heartbeat (both first connect and reconnect need this)
startHeartbeat()
return@withTimeout return@withTimeout
} }
@ -332,23 +308,18 @@ class GrpcClient @Inject constructor() {
* Trigger reconnection with exponential backoff * Trigger reconnection with exponential backoff
*/ */
private fun triggerReconnect(reason: String) { private fun triggerReconnect(reason: String) {
Log.d(TAG, "[IDLE_CRASH_DEBUG] triggerReconnect called: $reason")
Log.d(TAG, "[IDLE_CRASH_DEBUG] shouldReconnect=${shouldReconnect.get()}, isReconnecting=${isReconnecting.get()}")
if (!shouldReconnect.get() || isReconnecting.getAndSet(true)) { if (!shouldReconnect.get() || isReconnecting.getAndSet(true)) {
Log.d(TAG, "[IDLE_CRASH_DEBUG] triggerReconnect skipped (already reconnecting or disabled)")
return return
} }
val host = currentHost val host = currentHost
val port = currentPort val port = currentPort
if (host == null || port == null) { if (host == null || port == null) {
Log.d(TAG, "[IDLE_CRASH_DEBUG] triggerReconnect skipped (no host/port)")
isReconnecting.set(false) isReconnecting.set(false)
return return
} }
Log.d(TAG, "[IDLE_CRASH_DEBUG] Triggering reconnect to $host:$port") Log.d(TAG, "Triggering reconnect: $reason")
// Emit disconnected event // Emit disconnected event
_connectionEvents.tryEmit(GrpcConnectionEvent.Disconnected(reason)) _connectionEvents.tryEmit(GrpcConnectionEvent.Disconnected(reason))
@ -376,10 +347,7 @@ class GrpcClient @Inject constructor() {
Log.d(TAG, "Reconnecting in ${delay}ms (attempt $attempt/${reconnectConfig.maxRetries})") Log.d(TAG, "Reconnecting in ${delay}ms (attempt $attempt/${reconnectConfig.maxRetries})")
delay(delay) delay(delay)
// 注意:不要在这里重置 isReconnecting isReconnecting.set(false)
// isReconnecting 会在 waitForConnection() 的 READY 分支中被重置
// 这样 waitForConnection() 才能知道这是重连而非初次连接
Log.d(TAG, ">>> Starting reconnect, isReconnecting=$isReconnecting (should be true)")
doConnect(host, port) doConnect(host, port)
} }
} }
@ -428,18 +396,15 @@ class GrpcClient @Inject constructor() {
private fun handleHeartbeatFailure(reason: String) { private fun handleHeartbeatFailure(reason: String) {
val fails = heartbeatFailCount.incrementAndGet() val fails = heartbeatFailCount.incrementAndGet()
Log.w(TAG, "[IDLE_CRASH_DEBUG] Heartbeat failed ($fails/$MAX_HEARTBEAT_FAILS): $reason") Log.w(TAG, "Heartbeat failed ($fails/$MAX_HEARTBEAT_FAILS): $reason")
Log.w(TAG, "[IDLE_CRASH_DEBUG] Connection state: ${_connectionState.value}")
Log.w(TAG, "[IDLE_CRASH_DEBUG] Channel state: ${channel?.getState(false)}")
if (fails >= MAX_HEARTBEAT_FAILS) { if (fails >= MAX_HEARTBEAT_FAILS) {
Log.e(TAG, "[IDLE_CRASH_DEBUG] Too many heartbeat failures, triggering reconnect") Log.e(TAG, "Too many heartbeat failures, triggering reconnect")
triggerReconnect("Heartbeat failed") triggerReconnect("Heartbeat failed")
} }
} }
private fun stopHeartbeat() { private fun stopHeartbeat() {
Log.d(TAG, "[IDLE_CRASH_DEBUG] stopHeartbeat called")
heartbeatJob?.cancel() heartbeatJob?.cancel()
heartbeatJob = null heartbeatJob = null
heartbeatFailCount.set(0) heartbeatFailCount.set(0)
@ -483,28 +448,23 @@ class GrpcClient @Inject constructor() {
* Notifies the repository layer to re-establish message/event subscriptions * Notifies the repository layer to re-establish message/event subscriptions
*/ */
private fun reSubscribeStreams() { private fun reSubscribeStreams() {
Log.d(TAG, "[IDLE_CRASH_DEBUG] reSubscribeStreams called")
val needsResubscribe = eventStreamSubscribed.get() || activeMessageSubscription != null val needsResubscribe = eventStreamSubscribed.get() || activeMessageSubscription != null
if (needsResubscribe) { if (needsResubscribe) {
Log.d(TAG, "[IDLE_CRASH_DEBUG] Triggering stream re-subscription callback") Log.d(TAG, "Triggering stream re-subscription callback")
Log.d(TAG, "[IDLE_CRASH_DEBUG] - Event stream: ${eventStreamSubscribed.get()}, partyId: $eventStreamPartyId") Log.d(TAG, " - Event stream: ${eventStreamSubscribed.get()}, partyId: $eventStreamPartyId")
Log.d(TAG, "[IDLE_CRASH_DEBUG] - Message stream: ${activeMessageSubscription?.sessionId}") Log.d(TAG, " - Message stream: ${activeMessageSubscription?.sessionId}")
// Notify repository to re-establish streams // Notify repository to re-establish streams
scope.launch { scope.launch {
Log.d(TAG, "[IDLE_CRASH_DEBUG] Waiting for channel to be ready...")
// Wait for channel to be fully ready instead of fixed delay // Wait for channel to be fully ready instead of fixed delay
if (waitForChannelReady()) { if (waitForChannelReady()) {
Log.d(TAG, "[IDLE_CRASH_DEBUG] Channel ready, invoking reconnect callback")
try { try {
onReconnectedCallback?.invoke() onReconnectedCallback?.invoke()
Log.d(TAG, "[IDLE_CRASH_DEBUG] Reconnect callback completed")
// Emit reconnected event // Emit reconnected event
_connectionEvents.tryEmit(GrpcConnectionEvent.Reconnected) _connectionEvents.tryEmit(GrpcConnectionEvent.Reconnected)
} catch (e: Exception) { } catch (e: Exception) {
Log.e(TAG, "[IDLE_CRASH_DEBUG] Reconnect callback failed: ${e.message}") Log.e(TAG, "Reconnect callback failed: ${e.message}")
Log.e(TAG, "[IDLE_CRASH_DEBUG] Stack trace: ${e.stackTraceToString()}")
// Don't let callback failure affect the connection state // Don't let callback failure affect the connection state
} }
} else { } else {
@ -606,15 +566,6 @@ class GrpcClient @Inject constructor() {
partyRole: String = "temporary", partyRole: String = "temporary",
version: String = "1.0.0" version: String = "1.0.0"
): Result<Boolean> = withContext(Dispatchers.IO) { ): Result<Boolean> = withContext(Dispatchers.IO) {
// 必须等待 channel READY 后才能注册
Log.d(TAG, "registerParty: Waiting for channel READY...")
val isReady = waitForChannelReady(CONNECTION_TIMEOUT_SECONDS * 1000)
if (!isReady) {
Log.e(TAG, "registerParty: Channel not ready after timeout")
return@withContext Result.failure(Exception("Channel not ready"))
}
Log.d(TAG, "registerParty: Channel is READY, proceeding with registration")
// Save for re-registration // Save for re-registration
registeredPartyId = partyId registeredPartyId = partyId
registeredPartyRole = partyRole registeredPartyRole = partyRole
@ -790,16 +741,15 @@ class GrpcClient @Inject constructor() {
override fun onError(t: Throwable) { override fun onError(t: Throwable) {
Log.e(TAG, "Message stream error: ${t.message}") Log.e(TAG, "Message stream error: ${t.message}")
// Ignore events from stale streams - close without exception to avoid crash // Ignore events from stale streams
if (messageStreamVersion.get() != streamVersion) { if (messageStreamVersion.get() != streamVersion) {
Log.d(TAG, "Ignoring error from stale message stream") Log.d(TAG, "Ignoring error from stale message stream")
close() close(t)
return return
} }
// Don't trigger reconnect for CANCELLED or channel shutdown errors // Don't trigger reconnect for CANCELLED errors
val errorMessage = t.message.orEmpty() if (!t.message.orEmpty().contains("CANCELLED")) {
if (!errorMessage.contains("CANCELLED") && !errorMessage.contains("shutdownNow")) {
triggerReconnect("Message stream error: ${t.message}") triggerReconnect("Message stream error: ${t.message}")
} }
close(t) close(t)
@ -871,16 +821,15 @@ class GrpcClient @Inject constructor() {
override fun onError(t: Throwable) { override fun onError(t: Throwable) {
Log.e(TAG, "Session event stream error: ${t.message}") Log.e(TAG, "Session event stream error: ${t.message}")
// Ignore events from stale streams - close without exception to avoid crash // Ignore events from stale streams
if (eventStreamVersion.get() != streamVersion) { if (eventStreamVersion.get() != streamVersion) {
Log.d(TAG, "Ignoring error from stale event stream") Log.d(TAG, "Ignoring error from stale event stream")
close() close(t)
return return
} }
// Don't trigger reconnect for CANCELLED or channel shutdown errors // Don't trigger reconnect for CANCELLED errors
val errorMessage = t.message.orEmpty() if (!t.message.orEmpty().contains("CANCELLED")) {
if (!errorMessage.contains("CANCELLED") && !errorMessage.contains("shutdownNow")) {
triggerReconnect("Event stream error: ${t.message}") triggerReconnect("Event stream error: ${t.message}")
} }
close(t) close(t)

View File

@ -6,7 +6,6 @@ import androidx.room.migration.Migration
import androidx.sqlite.db.SupportSQLiteDatabase import androidx.sqlite.db.SupportSQLiteDatabase
import com.durian.tssparty.data.local.AppSettingDao import com.durian.tssparty.data.local.AppSettingDao
import com.durian.tssparty.data.local.ShareRecordDao import com.durian.tssparty.data.local.ShareRecordDao
import com.durian.tssparty.data.local.TransactionRecordDao
import com.durian.tssparty.data.local.TssDatabase import com.durian.tssparty.data.local.TssDatabase
import com.durian.tssparty.data.local.TssNativeBridge import com.durian.tssparty.data.local.TssNativeBridge
import com.durian.tssparty.data.remote.GrpcClient import com.durian.tssparty.data.remote.GrpcClient
@ -35,53 +34,6 @@ object AppModule {
} }
} }
// Migration from version 2 to 3: add party_id column to share_records
// This is critical for backup/restore - the partyId must be preserved for signing to work
private val MIGRATION_2_3 = object : Migration(2, 3) {
override fun migrate(database: SupportSQLiteDatabase) {
// Add party_id column with empty default (existing records will need to be re-exported)
database.execSQL(
"ALTER TABLE `share_records` ADD COLUMN `party_id` TEXT NOT NULL DEFAULT ''"
)
}
}
// Migration from version 3 to 4: add transaction_records table for transfer history
// 添加转账记录表,用于存储交易历史和分类账
private val MIGRATION_3_4 = object : Migration(3, 4) {
override fun migrate(database: SupportSQLiteDatabase) {
// 创建转账记录表
database.execSQL("""
CREATE TABLE IF NOT EXISTS `transaction_records` (
`id` INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
`share_id` INTEGER NOT NULL,
`from_address` TEXT NOT NULL,
`to_address` TEXT NOT NULL,
`amount` TEXT NOT NULL,
`token_type` TEXT NOT NULL,
`tx_hash` TEXT NOT NULL,
`gas_price` TEXT NOT NULL,
`gas_used` TEXT NOT NULL DEFAULT '',
`tx_fee` TEXT NOT NULL DEFAULT '',
`status` TEXT NOT NULL,
`direction` TEXT NOT NULL,
`note` TEXT NOT NULL DEFAULT '',
`created_at` INTEGER NOT NULL,
`confirmed_at` INTEGER,
`block_number` INTEGER,
FOREIGN KEY(`share_id`) REFERENCES `share_records`(`id`) ON DELETE CASCADE
)
""".trimIndent())
// 创建索引以优化查询性能
database.execSQL("CREATE INDEX IF NOT EXISTS `index_transaction_records_share_id` ON `transaction_records` (`share_id`)")
database.execSQL("CREATE UNIQUE INDEX IF NOT EXISTS `index_transaction_records_tx_hash` ON `transaction_records` (`tx_hash`)")
database.execSQL("CREATE INDEX IF NOT EXISTS `index_transaction_records_from_address` ON `transaction_records` (`from_address`)")
database.execSQL("CREATE INDEX IF NOT EXISTS `index_transaction_records_to_address` ON `transaction_records` (`to_address`)")
database.execSQL("CREATE INDEX IF NOT EXISTS `index_transaction_records_created_at` ON `transaction_records` (`created_at`)")
}
}
@Provides @Provides
@Singleton @Singleton
fun provideGson(): Gson { fun provideGson(): Gson {
@ -96,7 +48,7 @@ object AppModule {
TssDatabase::class.java, TssDatabase::class.java,
"tss_party.db" "tss_party.db"
) )
.addMigrations(MIGRATION_1_2, MIGRATION_2_3, MIGRATION_3_4) .addMigrations(MIGRATION_1_2)
.build() .build()
} }
@ -112,12 +64,6 @@ object AppModule {
return database.appSettingDao() return database.appSettingDao()
} }
@Provides
@Singleton
fun provideTransactionRecordDao(database: TssDatabase): TransactionRecordDao {
return database.transactionRecordDao()
}
@Provides @Provides
@Singleton @Singleton
fun provideGrpcClient(): GrpcClient { fun provideGrpcClient(): GrpcClient {
@ -136,9 +82,8 @@ object AppModule {
grpcClient: GrpcClient, grpcClient: GrpcClient,
tssNativeBridge: TssNativeBridge, tssNativeBridge: TssNativeBridge,
shareRecordDao: ShareRecordDao, shareRecordDao: ShareRecordDao,
appSettingDao: AppSettingDao, appSettingDao: AppSettingDao
transactionRecordDao: TransactionRecordDao
): TssRepository { ): TssRepository {
return TssRepository(grpcClient, tssNativeBridge, shareRecordDao, appSettingDao, transactionRecordDao) return TssRepository(grpcClient, tssNativeBridge, shareRecordDao, appSettingDao)
} }
} }

View File

@ -86,7 +86,6 @@ data class ShareRecord(
val thresholdT: Int, val thresholdT: Int,
val thresholdN: Int, val thresholdN: Int,
val partyIndex: Int, val partyIndex: Int,
val partyId: String, // The original partyId used during keygen - required for signing
val address: String, val address: String,
val createdAt: Long = System.currentTimeMillis() val createdAt: Long = System.currentTimeMillis()
) )
@ -130,21 +129,7 @@ enum class NetworkType {
*/ */
enum class TokenType { enum class TokenType {
KAVA, // Native KAVA token KAVA, // Native KAVA token
GREEN_POINTS, // 绿积分 (dUSDT) ERC-20 token GREEN_POINTS // 绿积分 (dUSDT) ERC-20 token
ENERGY_POINTS, // 积分股 (eUSDT) ERC-20 token
FUTURE_POINTS // 积分值 (fUSDT) ERC-20 token
}
/**
* ERC-20 通用函数签名keccak256 哈希的前4字节
* Common ERC-20 function selectors
*/
object ERC20Selectors {
const val BALANCE_OF = "0x70a08231" // balanceOf(address)
const val TRANSFER = "0xa9059cbb" // transfer(address,uint256)
const val APPROVE = "0x095ea7b3" // approve(address,uint256)
const val ALLOWANCE = "0xdd62ed3e" // allowance(address,address)
const val TOTAL_SUPPLY = "0x18160ddd" // totalSupply()
} }
/** /**
@ -157,122 +142,22 @@ object GreenPointsToken {
const val SYMBOL = "dUSDT" const val SYMBOL = "dUSDT"
const val DECIMALS = 6 const val DECIMALS = 6
// ERC-20 function signatures (kept for backward compatibility) // ERC-20 function signatures (first 4 bytes of keccak256 hash)
const val BALANCE_OF_SELECTOR = ERC20Selectors.BALANCE_OF const val BALANCE_OF_SELECTOR = "0x70a08231" // balanceOf(address)
const val TRANSFER_SELECTOR = ERC20Selectors.TRANSFER const val TRANSFER_SELECTOR = "0xa9059cbb" // transfer(address,uint256)
const val APPROVE_SELECTOR = ERC20Selectors.APPROVE const val APPROVE_SELECTOR = "0x095ea7b3" // approve(address,uint256)
const val ALLOWANCE_SELECTOR = ERC20Selectors.ALLOWANCE const val ALLOWANCE_SELECTOR = "0xdd62ed3e" // allowance(address,address)
const val TOTAL_SUPPLY_SELECTOR = ERC20Selectors.TOTAL_SUPPLY const val TOTAL_SUPPLY_SELECTOR = "0x18160ddd" // totalSupply()
} }
/** /**
* Energy Points (积分股) Token Contract Configuration * Wallet balance containing both native and token balances
* eUSDT - ERC-20 token on Kava EVM
* 总供应量100.02亿 (10,002,000,000)
*/
object EnergyPointsToken {
const val CONTRACT_ADDRESS = "0x7C3275D808eFbAE90C06C7E3A9AfDdcAa8563931"
const val NAME = "积分股"
const val SYMBOL = "eUSDT"
const val DECIMALS = 6 // 与 dUSDT 相同的精度
}
/**
* Future Points (积分值) Token Contract Configuration
* fUSDT - ERC-20 token on Kava EVM
* 总供应量1万亿 (1,000,000,000,000)
*/
object FuturePointsToken {
const val CONTRACT_ADDRESS = "0x14dc4f7d3E4197438d058C3D156dd9826A161134"
const val NAME = "积分值"
const val SYMBOL = "fUSDT"
const val DECIMALS = 6 // 与 dUSDT 相同的精度
}
/**
* 代币配置工具类
* Token configuration utility
*/
object TokenConfig {
/**
* 获取代币合约地址
*/
fun getContractAddress(tokenType: TokenType): String? {
return when (tokenType) {
TokenType.KAVA -> null // 原生代币无合约地址
TokenType.GREEN_POINTS -> GreenPointsToken.CONTRACT_ADDRESS
TokenType.ENERGY_POINTS -> EnergyPointsToken.CONTRACT_ADDRESS
TokenType.FUTURE_POINTS -> FuturePointsToken.CONTRACT_ADDRESS
}
}
/**
* 获取代币精度
*/
fun getDecimals(tokenType: TokenType): Int {
return when (tokenType) {
TokenType.KAVA -> 18 // KAVA 原生代币精度
TokenType.GREEN_POINTS -> GreenPointsToken.DECIMALS
TokenType.ENERGY_POINTS -> EnergyPointsToken.DECIMALS
TokenType.FUTURE_POINTS -> FuturePointsToken.DECIMALS
}
}
/**
* 获取代币名称
*/
fun getName(tokenType: TokenType): String {
return when (tokenType) {
TokenType.KAVA -> "KAVA"
TokenType.GREEN_POINTS -> GreenPointsToken.NAME
TokenType.ENERGY_POINTS -> EnergyPointsToken.NAME
TokenType.FUTURE_POINTS -> FuturePointsToken.NAME
}
}
/**
* 获取代币符号
*/
fun getSymbol(tokenType: TokenType): String {
return when (tokenType) {
TokenType.KAVA -> "KAVA"
TokenType.GREEN_POINTS -> GreenPointsToken.SYMBOL
TokenType.ENERGY_POINTS -> EnergyPointsToken.SYMBOL
TokenType.FUTURE_POINTS -> FuturePointsToken.SYMBOL
}
}
/**
* 判断是否为 ERC-20 代币
*/
fun isERC20(tokenType: TokenType): Boolean {
return tokenType != TokenType.KAVA
}
}
/**
* Wallet balance containing native and all token balances
* 钱包余额包含原生代币和所有 ERC-20 代币余额
*/ */
data class WalletBalance( data class WalletBalance(
val address: String, val address: String,
val kavaBalance: String = "0", // Native KAVA balance val kavaBalance: String = "0", // Native KAVA balance
val greenPointsBalance: String = "0", // 绿积分 (dUSDT) balance val greenPointsBalance: String = "0" // 绿积分 (dUSDT) balance
val energyPointsBalance: String = "0", // 积分股 (eUSDT) balance )
val futurePointsBalance: String = "0" // 积分值 (fUSDT) balance
) {
/**
* 根据代币类型获取余额
*/
fun getBalance(tokenType: TokenType): String {
return when (tokenType) {
TokenType.KAVA -> kavaBalance
TokenType.GREEN_POINTS -> greenPointsBalance
TokenType.ENERGY_POINTS -> energyPointsBalance
TokenType.FUTURE_POINTS -> futurePointsBalance
}
}
}
/** /**
* Share backup data for export/import * Share backup data for export/import
@ -280,7 +165,7 @@ data class WalletBalance(
*/ */
data class ShareBackup( data class ShareBackup(
@SerializedName("version") @SerializedName("version")
val version: Int = 2, // Version 2: added partyId field for proper backup/restore val version: Int = 1, // Backup format version for future compatibility
@SerializedName("sessionId") @SerializedName("sessionId")
val sessionId: String, val sessionId: String,
@ -300,9 +185,6 @@ data class ShareBackup(
@SerializedName("partyIndex") @SerializedName("partyIndex")
val partyIndex: Int, val partyIndex: Int,
@SerializedName("partyId")
val partyId: String, // The original partyId used during keygen - CRITICAL for signing after restore
@SerializedName("address") @SerializedName("address")
val address: String, val address: String,
@ -327,7 +209,6 @@ data class ShareBackup(
thresholdT = share.thresholdT, thresholdT = share.thresholdT,
thresholdN = share.thresholdN, thresholdN = share.thresholdN,
partyIndex = share.partyIndex, partyIndex = share.partyIndex,
partyId = share.partyId,
address = share.address, address = share.address,
createdAt = share.createdAt createdAt = share.createdAt
) )
@ -346,7 +227,6 @@ data class ShareBackup(
thresholdT = thresholdT, thresholdT = thresholdT,
thresholdN = thresholdN, thresholdN = thresholdN,
partyIndex = partyIndex, partyIndex = partyIndex,
partyId = partyId,
address = address, address = address,
createdAt = createdAt createdAt = createdAt
) )

View File

@ -1,398 +0,0 @@
package com.durian.tssparty.presentation.screens
import android.content.Intent
import android.net.Uri
import androidx.compose.foundation.background
import androidx.compose.foundation.clickable
import androidx.compose.foundation.layout.*
import androidx.compose.foundation.lazy.LazyColumn
import androidx.compose.foundation.lazy.items
import androidx.compose.foundation.shape.RoundedCornerShape
import androidx.compose.material.icons.Icons
import androidx.compose.material.icons.filled.*
import androidx.compose.material3.*
import androidx.compose.runtime.*
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.draw.clip
import androidx.compose.ui.graphics.Color
import androidx.compose.ui.platform.LocalContext
import androidx.compose.ui.text.font.FontFamily
import androidx.compose.ui.text.font.FontWeight
import androidx.compose.ui.text.style.TextOverflow
import androidx.compose.ui.unit.dp
import com.durian.tssparty.data.local.TransactionRecordEntity
import com.durian.tssparty.domain.model.EnergyPointsToken
import com.durian.tssparty.domain.model.FuturePointsToken
import com.durian.tssparty.domain.model.GreenPointsToken
import com.durian.tssparty.domain.model.NetworkType
import java.text.SimpleDateFormat
import java.util.*
@OptIn(ExperimentalMaterial3Api::class)
@Composable
fun TransactionHistoryScreen(
shareId: Long,
walletAddress: String,
transactions: List<TransactionRecordEntity>,
networkType: NetworkType,
isSyncing: Boolean,
syncResultMessage: String? = null,
onBack: () -> Unit,
onRefresh: () -> Unit,
onClearSyncMessage: () -> Unit = {}
) {
val context = LocalContext.current
val snackbarHostState = remember { SnackbarHostState() }
// Show snackbar when sync result message changes
LaunchedEffect(syncResultMessage) {
syncResultMessage?.let { message ->
snackbarHostState.showSnackbar(message)
onClearSyncMessage()
}
}
Scaffold(
snackbarHost = { SnackbarHost(snackbarHostState) },
topBar = {
TopAppBar(
title = { Text("交易记录") },
navigationIcon = {
IconButton(onClick = onBack) {
Icon(Icons.Default.ArrowBack, contentDescription = "返回")
}
},
actions = {
if (isSyncing) {
CircularProgressIndicator(
modifier = Modifier
.size(24.dp)
.padding(end = 8.dp),
strokeWidth = 2.dp
)
} else {
IconButton(onClick = onRefresh) {
Icon(Icons.Default.Refresh, contentDescription = "刷新")
}
}
}
)
}
) { paddingValues ->
Column(
modifier = Modifier
.fillMaxSize()
.padding(paddingValues)
.padding(horizontal = 16.dp)
) {
// Wallet address header
Card(
modifier = Modifier.fillMaxWidth(),
colors = CardDefaults.cardColors(
containerColor = MaterialTheme.colorScheme.surfaceVariant
)
) {
Row(
modifier = Modifier
.fillMaxWidth()
.padding(12.dp),
verticalAlignment = Alignment.CenterVertically
) {
Icon(
Icons.Default.AccountBalanceWallet,
contentDescription = null,
modifier = Modifier.size(20.dp),
tint = MaterialTheme.colorScheme.primary
)
Spacer(modifier = Modifier.width(8.dp))
Text(
text = walletAddress,
style = MaterialTheme.typography.bodySmall,
fontFamily = FontFamily.Monospace,
maxLines = 1,
overflow = TextOverflow.Ellipsis
)
}
}
Spacer(modifier = Modifier.height(8.dp))
// Transaction count
Text(
text = "${transactions.size} 条记录",
style = MaterialTheme.typography.bodyMedium,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
Spacer(modifier = Modifier.height(12.dp))
if (transactions.isEmpty()) {
// Empty state
Box(
modifier = Modifier.fillMaxSize(),
contentAlignment = Alignment.Center
) {
Column(horizontalAlignment = Alignment.CenterHorizontally) {
Icon(
imageVector = Icons.Default.Receipt,
contentDescription = null,
modifier = Modifier.size(80.dp),
tint = MaterialTheme.colorScheme.outline
)
Spacer(modifier = Modifier.height(16.dp))
Text(
text = "暂无交易记录",
style = MaterialTheme.typography.titleLarge,
color = MaterialTheme.colorScheme.outline
)
Spacer(modifier = Modifier.height(8.dp))
Text(
text = if (isSyncing) "正在同步中..." else "发起转账后将在此显示",
style = MaterialTheme.typography.bodyMedium,
color = MaterialTheme.colorScheme.outline
)
}
}
} else {
// Transaction list
LazyColumn(
verticalArrangement = Arrangement.spacedBy(8.dp),
contentPadding = PaddingValues(bottom = 16.dp)
) {
items(
items = transactions.sortedByDescending { it.createdAt },
key = { it.id }
) { tx ->
TransactionItemCard(
transaction = tx,
walletAddress = walletAddress,
networkType = networkType,
onClick = {
// Open transaction in block explorer
val explorerUrl = getExplorerUrl(networkType, tx.txHash)
val intent = Intent(Intent.ACTION_VIEW, Uri.parse(explorerUrl))
context.startActivity(intent)
}
)
}
}
}
}
}
}
@Composable
private fun TransactionItemCard(
transaction: TransactionRecordEntity,
walletAddress: String,
networkType: NetworkType,
onClick: () -> Unit
) {
val isSent = transaction.direction == "SENT" ||
transaction.fromAddress.equals(walletAddress, ignoreCase = true)
val statusColor = when (transaction.status) {
"CONFIRMED" -> Color(0xFF4CAF50) // Green
"FAILED" -> MaterialTheme.colorScheme.error
else -> Color(0xFFFF9800) // Orange for PENDING
}
val tokenColor = when (transaction.tokenType) {
"GREEN_POINTS" -> Color(0xFF4CAF50)
"ENERGY_POINTS" -> Color(0xFF2196F3)
"FUTURE_POINTS" -> Color(0xFF9C27B0)
else -> MaterialTheme.colorScheme.primary // KAVA
}
val tokenName = when (transaction.tokenType) {
"GREEN_POINTS" -> GreenPointsToken.NAME
"ENERGY_POINTS" -> EnergyPointsToken.NAME
"FUTURE_POINTS" -> FuturePointsToken.NAME
else -> "KAVA"
}
Card(
modifier = Modifier
.fillMaxWidth()
.clickable { onClick() }
) {
Column(
modifier = Modifier.padding(12.dp)
) {
// Row 1: Direction icon + Amount + Status
Row(
modifier = Modifier.fillMaxWidth(),
horizontalArrangement = Arrangement.SpaceBetween,
verticalAlignment = Alignment.CenterVertically
) {
Row(verticalAlignment = Alignment.CenterVertically) {
// Direction icon
Box(
modifier = Modifier
.size(36.dp)
.clip(RoundedCornerShape(8.dp))
.background(
if (isSent)
MaterialTheme.colorScheme.errorContainer
else
Color(0xFFE8F5E9)
),
contentAlignment = Alignment.Center
) {
Icon(
imageVector = if (isSent) Icons.Default.ArrowUpward else Icons.Default.ArrowDownward,
contentDescription = if (isSent) "发送" else "接收",
tint = if (isSent)
MaterialTheme.colorScheme.error
else
Color(0xFF4CAF50),
modifier = Modifier.size(20.dp)
)
}
Spacer(modifier = Modifier.width(12.dp))
// Amount and token
Column {
Row(verticalAlignment = Alignment.CenterVertically) {
Text(
text = "${if (isSent) "-" else "+"}${transaction.amount}",
style = MaterialTheme.typography.titleMedium,
fontWeight = FontWeight.Bold,
color = if (isSent)
MaterialTheme.colorScheme.error
else
Color(0xFF4CAF50)
)
Spacer(modifier = Modifier.width(4.dp))
Text(
text = tokenName,
style = MaterialTheme.typography.bodySmall,
color = tokenColor
)
}
Text(
text = if (isSent) "发送" else "接收",
style = MaterialTheme.typography.bodySmall,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
}
}
// Status badge
Surface(
color = statusColor.copy(alpha = 0.15f),
shape = RoundedCornerShape(4.dp)
) {
Text(
text = when (transaction.status) {
"CONFIRMED" -> "已确认"
"FAILED" -> "失败"
else -> "待确认"
},
modifier = Modifier.padding(horizontal = 8.dp, vertical = 4.dp),
style = MaterialTheme.typography.labelSmall,
color = statusColor
)
}
}
Spacer(modifier = Modifier.height(8.dp))
Divider(color = MaterialTheme.colorScheme.outlineVariant)
Spacer(modifier = Modifier.height(8.dp))
// Row 2: Address (to/from)
Row(
modifier = Modifier.fillMaxWidth(),
horizontalArrangement = Arrangement.SpaceBetween
) {
Column {
Text(
text = if (isSent) "发送至" else "来自",
style = MaterialTheme.typography.labelSmall,
color = MaterialTheme.colorScheme.outline
)
Text(
text = if (isSent) shortenAddress(transaction.toAddress) else shortenAddress(transaction.fromAddress),
style = MaterialTheme.typography.bodySmall,
fontFamily = FontFamily.Monospace
)
}
Column(horizontalAlignment = Alignment.End) {
Text(
text = "时间",
style = MaterialTheme.typography.labelSmall,
color = MaterialTheme.colorScheme.outline
)
Text(
text = formatTimestamp(transaction.createdAt),
style = MaterialTheme.typography.bodySmall
)
}
}
// Row 3: Tx Hash (abbreviated)
Spacer(modifier = Modifier.height(4.dp))
Row(
modifier = Modifier.fillMaxWidth(),
verticalAlignment = Alignment.CenterVertically
) {
Text(
text = "交易哈希: ${shortenTxHash(transaction.txHash)}",
style = MaterialTheme.typography.labelSmall,
color = MaterialTheme.colorScheme.outline,
fontFamily = FontFamily.Monospace
)
Spacer(modifier = Modifier.width(4.dp))
Icon(
Icons.Default.OpenInNew,
contentDescription = "查看详情",
modifier = Modifier.size(12.dp),
tint = MaterialTheme.colorScheme.outline
)
}
// Row 4: Fee (if confirmed)
if (transaction.status == "CONFIRMED" && transaction.txFee.isNotEmpty()) {
Spacer(modifier = Modifier.height(4.dp))
Text(
text = "手续费: ${transaction.txFee} KAVA",
style = MaterialTheme.typography.labelSmall,
color = MaterialTheme.colorScheme.outline
)
}
}
}
}
private fun shortenAddress(address: String): String {
return if (address.length > 16) {
"${address.take(10)}...${address.takeLast(6)}"
} else {
address
}
}
private fun shortenTxHash(txHash: String): String {
return if (txHash.length > 20) {
"${txHash.take(10)}...${txHash.takeLast(8)}"
} else {
txHash
}
}
private fun formatTimestamp(timestamp: Long): String {
val sdf = SimpleDateFormat("MM-dd HH:mm", Locale.getDefault())
return sdf.format(Date(timestamp))
}
private fun getExplorerUrl(networkType: NetworkType, txHash: String): String {
return when (networkType) {
NetworkType.MAINNET -> "https://kavascan.com/tx/$txHash"
NetworkType.TESTNET -> "https://testnet.kavascan.com/tx/$txHash"
}
}

View File

@ -27,13 +27,10 @@ import android.graphics.Bitmap
import androidx.compose.foundation.Image import androidx.compose.foundation.Image
import androidx.compose.foundation.background import androidx.compose.foundation.background
import androidx.compose.ui.graphics.asImageBitmap import androidx.compose.ui.graphics.asImageBitmap
import com.durian.tssparty.domain.model.EnergyPointsToken
import com.durian.tssparty.domain.model.FuturePointsToken
import com.durian.tssparty.domain.model.GreenPointsToken import com.durian.tssparty.domain.model.GreenPointsToken
import com.durian.tssparty.domain.model.NetworkType import com.durian.tssparty.domain.model.NetworkType
import com.durian.tssparty.domain.model.SessionStatus import com.durian.tssparty.domain.model.SessionStatus
import com.durian.tssparty.domain.model.ShareRecord import com.durian.tssparty.domain.model.ShareRecord
import com.durian.tssparty.domain.model.TokenConfig
import com.durian.tssparty.domain.model.TokenType import com.durian.tssparty.domain.model.TokenType
import com.durian.tssparty.domain.model.WalletBalance import com.durian.tssparty.domain.model.WalletBalance
import com.durian.tssparty.util.TransactionUtils import com.durian.tssparty.util.TransactionUtils
@ -78,7 +75,7 @@ fun TransferScreen(
networkType: NetworkType = NetworkType.MAINNET, networkType: NetworkType = NetworkType.MAINNET,
rpcUrl: String = "https://evm.kava.io", rpcUrl: String = "https://evm.kava.io",
onPrepareTransaction: (toAddress: String, amount: String, tokenType: TokenType) -> Unit, onPrepareTransaction: (toAddress: String, amount: String, tokenType: TokenType) -> Unit,
onConfirmTransaction: (includeServerBackup: Boolean) -> Unit, // 新增参数:是否包含服务器备份参与签名 onConfirmTransaction: () -> Unit,
onCopyInviteCode: () -> Unit, onCopyInviteCode: () -> Unit,
onBroadcastTransaction: () -> Unit, onBroadcastTransaction: () -> Unit,
onCancel: () -> Unit, onCancel: () -> Unit,
@ -159,8 +156,10 @@ fun TransferScreen(
rpcUrl = rpcUrl, rpcUrl = rpcUrl,
onSubmit = { onSubmit = {
// Get current balance for the selected token type // Get current balance for the selected token type
val currentBalance = walletBalance?.getBalance(selectedTokenType) val currentBalance = when (selectedTokenType) {
?: if (selectedTokenType == TokenType.KAVA) balance else null TokenType.KAVA -> walletBalance?.kavaBalance ?: balance
TokenType.GREEN_POINTS -> walletBalance?.greenPointsBalance
}
when { when {
toAddress.isBlank() -> validationError = "请输入收款地址" toAddress.isBlank() -> validationError = "请输入收款地址"
!toAddress.startsWith("0x") || toAddress.length != 42 -> validationError = "地址格式不正确" !toAddress.startsWith("0x") || toAddress.length != 42 -> validationError = "地址格式不正确"
@ -196,9 +195,9 @@ fun TransferScreen(
toAddress = toAddress, toAddress = toAddress,
amount = amount, amount = amount,
error = error, error = error,
onConfirm = { includeServerBackup -> onConfirm = {
validationError = null validationError = null
onConfirmTransaction(includeServerBackup) // 传递服务器备份选项 onConfirmTransaction()
}, },
onBack = onCancel onBack = onCancel
) )
@ -258,9 +257,14 @@ private fun TransferInputScreen(
var isCalculatingMax by remember { mutableStateOf(false) } var isCalculatingMax by remember { mutableStateOf(false) }
// Get current balance for the selected token type // Get current balance for the selected token type
val currentBalance = walletBalance?.getBalance(selectedTokenType) val currentBalance = when (selectedTokenType) {
?: if (selectedTokenType == TokenType.KAVA) balance else null TokenType.KAVA -> walletBalance?.kavaBalance ?: balance
val tokenSymbol = TokenConfig.getName(selectedTokenType) TokenType.GREEN_POINTS -> walletBalance?.greenPointsBalance
}
val tokenSymbol = when (selectedTokenType) {
TokenType.KAVA -> "KAVA"
TokenType.GREEN_POINTS -> GreenPointsToken.NAME
}
Column( Column(
modifier = Modifier modifier = Modifier
@ -289,8 +293,7 @@ private fun TransferInputScreen(
) )
Spacer(modifier = Modifier.height(8.dp)) Spacer(modifier = Modifier.height(8.dp))
// Show all token balances in a 2x2 grid // Show both balances
Column {
Row( Row(
modifier = Modifier.fillMaxWidth(), modifier = Modifier.fillMaxWidth(),
horizontalArrangement = Arrangement.SpaceBetween horizontalArrangement = Arrangement.SpaceBetween
@ -309,7 +312,7 @@ private fun TransferInputScreen(
color = MaterialTheme.colorScheme.primary color = MaterialTheme.colorScheme.primary
) )
} }
// Green Points balance (绿积分) // Green Points balance
Column(horizontalAlignment = Alignment.End) { Column(horizontalAlignment = Alignment.End) {
Text( Text(
text = GreenPointsToken.NAME, text = GreenPointsToken.NAME,
@ -324,41 +327,6 @@ private fun TransferInputScreen(
) )
} }
} }
Spacer(modifier = Modifier.height(4.dp))
Row(
modifier = Modifier.fillMaxWidth(),
horizontalArrangement = Arrangement.SpaceBetween
) {
// Energy Points balance (积分股)
Column {
Text(
text = EnergyPointsToken.NAME,
style = MaterialTheme.typography.labelSmall,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
Text(
text = walletBalance?.energyPointsBalance ?: "加载中...",
style = MaterialTheme.typography.bodySmall,
fontWeight = FontWeight.Medium,
color = Color(0xFF2196F3) // Blue
)
}
// Future Points balance (积分值)
Column(horizontalAlignment = Alignment.End) {
Text(
text = FuturePointsToken.NAME,
style = MaterialTheme.typography.labelSmall,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
Text(
text = walletBalance?.futurePointsBalance ?: "加载中...",
style = MaterialTheme.typography.bodySmall,
fontWeight = FontWeight.Medium,
color = Color(0xFF9C27B0) // Purple
)
}
}
}
} }
} }
@ -371,7 +339,6 @@ private fun TransferInputScreen(
color = MaterialTheme.colorScheme.onSurfaceVariant color = MaterialTheme.colorScheme.onSurfaceVariant
) )
Spacer(modifier = Modifier.height(8.dp)) Spacer(modifier = Modifier.height(8.dp))
// First row: KAVA and Green Points
Row( Row(
modifier = Modifier.fillMaxWidth(), modifier = Modifier.fillMaxWidth(),
horizontalArrangement = Arrangement.spacedBy(8.dp) horizontalArrangement = Arrangement.spacedBy(8.dp)
@ -392,7 +359,7 @@ private fun TransferInputScreen(
}, },
modifier = Modifier.weight(1f) modifier = Modifier.weight(1f)
) )
// Green Points option (绿积分) // Green Points option
FilterChip( FilterChip(
selected = selectedTokenType == TokenType.GREEN_POINTS, selected = selectedTokenType == TokenType.GREEN_POINTS,
onClick = { onTokenTypeChange(TokenType.GREEN_POINTS) }, onClick = { onTokenTypeChange(TokenType.GREEN_POINTS) },
@ -413,53 +380,6 @@ private fun TransferInputScreen(
modifier = Modifier.weight(1f) modifier = Modifier.weight(1f)
) )
} }
Spacer(modifier = Modifier.height(8.dp))
// Second row: Energy Points and Future Points
Row(
modifier = Modifier.fillMaxWidth(),
horizontalArrangement = Arrangement.spacedBy(8.dp)
) {
// Energy Points option (积分股)
FilterChip(
selected = selectedTokenType == TokenType.ENERGY_POINTS,
onClick = { onTokenTypeChange(TokenType.ENERGY_POINTS) },
label = { Text(EnergyPointsToken.NAME) },
leadingIcon = {
if (selectedTokenType == TokenType.ENERGY_POINTS) {
Icon(
Icons.Default.Check,
contentDescription = null,
modifier = Modifier.size(18.dp)
)
}
},
colors = FilterChipDefaults.filterChipColors(
selectedContainerColor = Color(0xFF2196F3).copy(alpha = 0.2f),
selectedLabelColor = Color(0xFF2196F3)
),
modifier = Modifier.weight(1f)
)
// Future Points option (积分值)
FilterChip(
selected = selectedTokenType == TokenType.FUTURE_POINTS,
onClick = { onTokenTypeChange(TokenType.FUTURE_POINTS) },
label = { Text(FuturePointsToken.NAME) },
leadingIcon = {
if (selectedTokenType == TokenType.FUTURE_POINTS) {
Icon(
Icons.Default.Check,
contentDescription = null,
modifier = Modifier.size(18.dp)
)
}
},
colors = FilterChipDefaults.filterChipColors(
selectedContainerColor = Color(0xFF9C27B0).copy(alpha = 0.2f),
selectedLabelColor = Color(0xFF9C27B0)
),
modifier = Modifier.weight(1f)
)
}
Spacer(modifier = Modifier.height(16.dp)) Spacer(modifier = Modifier.height(16.dp))
@ -498,14 +418,9 @@ private fun TransferInputScreen(
keyboardOptions = KeyboardOptions(keyboardType = KeyboardType.Decimal), keyboardOptions = KeyboardOptions(keyboardType = KeyboardType.Decimal),
leadingIcon = { leadingIcon = {
Icon( Icon(
if (selectedTokenType == TokenType.KAVA) Icons.Default.AttachMoney else Icons.Default.Stars, if (selectedTokenType == TokenType.GREEN_POINTS) Icons.Default.Stars else Icons.Default.AttachMoney,
contentDescription = null, contentDescription = null,
tint = when (selectedTokenType) { tint = if (selectedTokenType == TokenType.GREEN_POINTS) Color(0xFF4CAF50) else MaterialTheme.colorScheme.onSurfaceVariant
TokenType.KAVA -> MaterialTheme.colorScheme.onSurfaceVariant
TokenType.GREEN_POINTS -> Color(0xFF4CAF50)
TokenType.ENERGY_POINTS -> Color(0xFF2196F3)
TokenType.FUTURE_POINTS -> Color(0xFF9C27B0)
}
) )
}, },
trailingIcon = { trailingIcon = {
@ -524,7 +439,7 @@ private fun TransferInputScreen(
onAmountChange(currentBalance) onAmountChange(currentBalance)
} }
} else { } else {
// For ERC-20 tokens (dUSDT, eUSDT, fUSDT), use the full balance // For tokens, use the full balance
onAmountChange(currentBalance) onAmountChange(currentBalance)
} }
isCalculatingMax = false isCalculatingMax = false
@ -651,15 +566,12 @@ private fun TransferConfirmScreen(
toAddress: String, toAddress: String,
amount: String, amount: String,
error: String?, error: String?,
onConfirm: (includeServerBackup: Boolean) -> Unit, // 新增参数:是否包含服务器备份参与签名 onConfirm: () -> Unit,
onBack: () -> Unit onBack: () -> Unit
) { ) {
val gasFee = TransactionUtils.weiToKava(preparedTx.gasPrice.multiply(preparedTx.gasLimit)) val gasFee = TransactionUtils.weiToKava(preparedTx.gasPrice.multiply(preparedTx.gasLimit))
val gasGwei = TransactionUtils.weiToGwei(preparedTx.gasPrice) val gasGwei = TransactionUtils.weiToGwei(preparedTx.gasPrice)
// 【新增】服务器备份选项状态(仅 2-of-3 时使用)
var includeServerBackup by remember { mutableStateOf(false) }
Column( Column(
modifier = Modifier modifier = Modifier
.fillMaxSize() .fillMaxSize()
@ -736,48 +648,6 @@ private fun TransferConfirmScreen(
} }
} }
// 【新增功能】2-of-3 服务器备份选项
// 仅在 2-of-3 配置时显示此选项
// 目的:允许用户在丢失一个设备时,使用服务器备份 + 剩余设备完成签名
// 安全限制:仅 2-of-3 配置可用其他配置3-of-5, 4-of-7 等)不显示
// 回滚方法:删除此代码块即可恢复原有行为
if (wallet.thresholdT == 2 && wallet.thresholdN == 3) {
Spacer(modifier = Modifier.height(16.dp))
Card(
colors = CardDefaults.cardColors(
containerColor = MaterialTheme.colorScheme.tertiaryContainer
)
) {
Row(
modifier = Modifier
.fillMaxWidth()
.padding(12.dp),
verticalAlignment = Alignment.CenterVertically
) {
Checkbox(
checked = includeServerBackup,
onCheckedChange = { includeServerBackup = it }
)
Spacer(modifier = Modifier.width(8.dp))
Column {
Text(
text = "包含服务器备份参与签名",
style = MaterialTheme.typography.bodyMedium,
fontWeight = FontWeight.Medium,
color = MaterialTheme.colorScheme.onTertiaryContainer
)
Spacer(modifier = Modifier.height(4.dp))
Text(
text = "如果您丢失了一个设备,勾选此项以使用服务器备份完成签名",
style = MaterialTheme.typography.bodySmall,
color = MaterialTheme.colorScheme.onTertiaryContainer
)
}
}
}
}
// Error display // Error display
error?.let { error?.let {
Spacer(modifier = Modifier.height(16.dp)) Spacer(modifier = Modifier.height(16.dp))
@ -819,7 +689,7 @@ private fun TransferConfirmScreen(
Text("返回") Text("返回")
} }
Button( Button(
onClick = { onConfirm(includeServerBackup) }, // 传递服务器备份选项 onClick = onConfirm,
modifier = Modifier.weight(1f) modifier = Modifier.weight(1f)
) { ) {
Icon( Icon(

View File

@ -35,8 +35,6 @@ import androidx.compose.ui.unit.sp
import androidx.compose.ui.window.Dialog import androidx.compose.ui.window.Dialog
import android.content.Intent import android.content.Intent
import android.net.Uri import android.net.Uri
import com.durian.tssparty.domain.model.EnergyPointsToken
import com.durian.tssparty.domain.model.FuturePointsToken
import com.durian.tssparty.domain.model.GreenPointsToken import com.durian.tssparty.domain.model.GreenPointsToken
import com.durian.tssparty.domain.model.NetworkType import com.durian.tssparty.domain.model.NetworkType
import com.durian.tssparty.domain.model.ShareRecord import com.durian.tssparty.domain.model.ShareRecord
@ -57,7 +55,6 @@ fun WalletsScreen(
onDeleteShare: (Long) -> Unit, onDeleteShare: (Long) -> Unit,
onRefreshBalance: ((String) -> Unit)? = null, onRefreshBalance: ((String) -> Unit)? = null,
onTransfer: ((shareId: Long) -> Unit)? = null, onTransfer: ((shareId: Long) -> Unit)? = null,
onHistory: ((shareId: Long, address: String) -> Unit)? = null,
onExportBackup: ((shareId: Long, password: String) -> Unit)? = null, onExportBackup: ((shareId: Long, password: String) -> Unit)? = null,
onImportBackup: (() -> Unit)? = null, onImportBackup: (() -> Unit)? = null,
onCreateWallet: (() -> Unit)? = null onCreateWallet: (() -> Unit)? = null
@ -158,9 +155,6 @@ fun WalletsScreen(
onTransfer = { onTransfer = {
onTransfer?.invoke(share.id) onTransfer?.invoke(share.id)
}, },
onHistory = {
onHistory?.invoke(share.id, share.address)
},
onDelete = { onDeleteShare(share.id) } onDelete = { onDeleteShare(share.id) }
) )
} }
@ -229,7 +223,6 @@ private fun WalletItemCard(
walletBalance: WalletBalance? = null, walletBalance: WalletBalance? = null,
onViewDetails: () -> Unit, onViewDetails: () -> Unit,
onTransfer: () -> Unit, onTransfer: () -> Unit,
onHistory: () -> Unit,
onDelete: () -> Unit onDelete: () -> Unit
) { ) {
var showDeleteDialog by remember { mutableStateOf(false) } var showDeleteDialog by remember { mutableStateOf(false) }
@ -288,8 +281,7 @@ private fun WalletItemCard(
Spacer(modifier = Modifier.height(12.dp)) Spacer(modifier = Modifier.height(12.dp))
// Balance display - shows all token balances in a 2x2 grid // Balance display - now shows both KAVA and Green Points
Column {
Row( Row(
modifier = Modifier.fillMaxWidth(), modifier = Modifier.fillMaxWidth(),
horizontalArrangement = Arrangement.SpaceBetween horizontalArrangement = Arrangement.SpaceBetween
@ -333,7 +325,7 @@ private fun WalletItemCard(
Icons.Default.Stars, Icons.Default.Stars,
contentDescription = null, contentDescription = null,
modifier = Modifier.size(16.dp), modifier = Modifier.size(16.dp),
tint = Color(0xFF4CAF50) tint = Color(0xFF4CAF50) // Green color for Green Points
) )
Spacer(modifier = Modifier.width(4.dp)) Spacer(modifier = Modifier.width(4.dp))
Text( Text(
@ -348,66 +340,6 @@ private fun WalletItemCard(
} }
} }
} }
Spacer(modifier = Modifier.height(8.dp))
Row(
modifier = Modifier.fillMaxWidth(),
horizontalArrangement = Arrangement.SpaceBetween
) {
// Energy Points (积分股) balance
Column {
Text(
text = EnergyPointsToken.NAME,
style = MaterialTheme.typography.labelSmall,
color = MaterialTheme.colorScheme.outline
)
Row(verticalAlignment = Alignment.CenterVertically) {
Icon(
Icons.Default.Stars,
contentDescription = null,
modifier = Modifier.size(16.dp),
tint = Color(0xFF2196F3) // Blue
)
Spacer(modifier = Modifier.width(4.dp))
Text(
text = walletBalance?.energyPointsBalance ?: "加载中...",
style = MaterialTheme.typography.bodyMedium,
color = if (walletBalance != null)
Color(0xFF2196F3)
else
MaterialTheme.colorScheme.outline,
fontWeight = FontWeight.Medium
)
}
}
// Future Points (积分值) balance
Column(horizontalAlignment = Alignment.End) {
Text(
text = FuturePointsToken.NAME,
style = MaterialTheme.typography.labelSmall,
color = MaterialTheme.colorScheme.outline
)
Row(verticalAlignment = Alignment.CenterVertically) {
Icon(
Icons.Default.Stars,
contentDescription = null,
modifier = Modifier.size(16.dp),
tint = Color(0xFF9C27B0) // Purple
)
Spacer(modifier = Modifier.width(4.dp))
Text(
text = walletBalance?.futurePointsBalance ?: "加载中...",
style = MaterialTheme.typography.bodyMedium,
color = if (walletBalance != null)
Color(0xFF9C27B0)
else
MaterialTheme.colorScheme.outline,
fontWeight = FontWeight.Medium
)
}
}
}
}
Spacer(modifier = Modifier.height(12.dp)) Spacer(modifier = Modifier.height(12.dp))
@ -440,16 +372,6 @@ private fun WalletItemCard(
Text("转账") Text("转账")
} }
TextButton(onClick = onHistory) {
Icon(
Icons.Default.Receipt,
contentDescription = null,
modifier = Modifier.size(18.dp)
)
Spacer(modifier = Modifier.width(4.dp))
Text("记录")
}
TextButton( TextButton(
onClick = { showDeleteDialog = true }, onClick = { showDeleteDialog = true },
colors = ButtonDefaults.textButtonColors( colors = ButtonDefaults.textButtonColors(

View File

@ -2,8 +2,8 @@ package com.durian.tssparty.presentation.viewmodel
import androidx.lifecycle.ViewModel import androidx.lifecycle.ViewModel
import androidx.lifecycle.viewModelScope import androidx.lifecycle.viewModelScope
import com.durian.tssparty.data.repository.JoinKeygenViaGrpcResult
import com.durian.tssparty.data.repository.TssRepository import com.durian.tssparty.data.repository.TssRepository
import com.durian.tssparty.data.repository.TssRepository.JoinKeygenViaGrpcResult
import com.durian.tssparty.domain.model.* import com.durian.tssparty.domain.model.*
import com.durian.tssparty.util.AddressUtils import com.durian.tssparty.util.AddressUtils
import com.durian.tssparty.util.TransactionUtils import com.durian.tssparty.util.TransactionUtils
@ -45,11 +45,6 @@ class MainViewModel @Inject constructor(
private val _hasEnteredSession = MutableStateFlow(false) private val _hasEnteredSession = MutableStateFlow(false)
val hasEnteredSession: StateFlow<Boolean> = _hasEnteredSession.asStateFlow() val hasEnteredSession: StateFlow<Boolean> = _hasEnteredSession.asStateFlow()
// Synchronous flag to prevent participant_joined from adding duplicates after session_started
// This is set immediately (synchronously) when session_started is processed, ensuring
// any subsequent participant_joined events in the same callback queue will see the flag
private var sessionStartedForSession: String? = null
init { init {
// Start initialization on app launch // Start initialization on app launch
checkAllServices() checkAllServices()
@ -223,9 +218,6 @@ class MainViewModel @Inject constructor(
private val _currentRound = MutableStateFlow(0) private val _currentRound = MutableStateFlow(0)
val currentRound: StateFlow<Int> = _currentRound.asStateFlow() val currentRound: StateFlow<Int> = _currentRound.asStateFlow()
private val _totalRounds = MutableStateFlow(0)
val totalRounds: StateFlow<Int> = _totalRounds.asStateFlow()
private val _publicKey = MutableStateFlow<String?>(null) private val _publicKey = MutableStateFlow<String?>(null)
val publicKey: StateFlow<String?> = _publicKey.asStateFlow() val publicKey: StateFlow<String?> = _publicKey.asStateFlow()
@ -296,30 +288,19 @@ class MainViewModel @Inject constructor(
// Setup keygen timeout callback (matching Electron's 5-minute timeout in checkAndTriggerKeygen) // Setup keygen timeout callback (matching Electron's 5-minute timeout in checkAndTriggerKeygen)
repository.setKeygenTimeoutCallback { errorMessage -> repository.setKeygenTimeoutCallback { errorMessage ->
android.util.Log.e("MainViewModel", "[IDLE_CRASH_DEBUG] Keygen timeout callback invoked: $errorMessage") android.util.Log.e("MainViewModel", "Keygen timeout: $errorMessage")
try {
_uiState.update { it.copy(isLoading = false, error = errorMessage, countdownSeconds = -1L) } _uiState.update { it.copy(isLoading = false, error = errorMessage, countdownSeconds = -1L) }
android.util.Log.d("MainViewModel", "[IDLE_CRASH_DEBUG] Keygen timeout callback completed")
} catch (e: Exception) {
android.util.Log.e("MainViewModel", "[IDLE_CRASH_DEBUG] Exception in keygen timeout callback: ${e.message}")
android.util.Log.e("MainViewModel", "[IDLE_CRASH_DEBUG] Stack: ${e.stackTraceToString()}")
}
} }
// Setup countdown tick callback for UI countdown display // Setup countdown tick callback for UI countdown display
repository.setCountdownTickCallback { remainingSeconds -> repository.setCountdownTickCallback { remainingSeconds ->
try { android.util.Log.d("MainViewModel", "Countdown tick: $remainingSeconds seconds remaining")
_uiState.update { it.copy(countdownSeconds = remainingSeconds) } _uiState.update { it.copy(countdownSeconds = remainingSeconds) }
} catch (e: Exception) {
android.util.Log.e("MainViewModel", "[IDLE_CRASH_DEBUG] Exception in countdown tick callback: ${e.message}")
}
} }
// Setup progress callback for real-time round updates from native TSS bridge // Setup progress callback for real-time round updates from native TSS bridge
repository.setProgressCallback { round, totalRoundsFromGo -> repository.setProgressCallback { round, totalRounds ->
android.util.Log.d("MainViewModel", "Progress update: $round / $totalRoundsFromGo") android.util.Log.d("MainViewModel", "Progress update: $round / $totalRounds")
// Update totalRounds from Go library (keygen=4, sign=9)
_totalRounds.value = totalRoundsFromGo
// Update the appropriate round state based on which session type is active // Update the appropriate round state based on which session type is active
when { when {
// Initiator keygen (CreateWallet) // Initiator keygen (CreateWallet)
@ -342,32 +323,21 @@ class MainViewModel @Inject constructor(
} }
repository.setSessionEventCallback { event -> repository.setSessionEventCallback { event ->
try { android.util.Log.d("MainViewModel", "=== MainViewModel received session event ===")
android.util.Log.d("MainViewModel", "[IDLE_CRASH_DEBUG] === MainViewModel received session event ===") android.util.Log.d("MainViewModel", " eventType: ${event.eventType}")
android.util.Log.d("MainViewModel", "[IDLE_CRASH_DEBUG] eventType: ${event.eventType}") android.util.Log.d("MainViewModel", " sessionId: ${event.sessionId}")
android.util.Log.d("MainViewModel", "[IDLE_CRASH_DEBUG] sessionId: ${event.sessionId}") android.util.Log.d("MainViewModel", " _currentSessionId: ${_currentSessionId.value}")
android.util.Log.d("MainViewModel", "[IDLE_CRASH_DEBUG] _currentSessionId: ${_currentSessionId.value}") android.util.Log.d("MainViewModel", " pendingJoinKeygenInfo?.sessionId: ${pendingJoinKeygenInfo?.sessionId}")
android.util.Log.d("MainViewModel", "[IDLE_CRASH_DEBUG] pendingJoinKeygenInfo?.sessionId: ${pendingJoinKeygenInfo?.sessionId}") android.util.Log.d("MainViewModel", " pendingJoinSignInfo?.sessionId: ${pendingJoinSignInfo?.sessionId}")
android.util.Log.d("MainViewModel", "[IDLE_CRASH_DEBUG] pendingJoinSignInfo?.sessionId: ${pendingJoinSignInfo?.sessionId}") android.util.Log.d("MainViewModel", " _signSessionId: ${_signSessionId.value}")
android.util.Log.d("MainViewModel", "[IDLE_CRASH_DEBUG] _signSessionId: ${_signSessionId.value}") android.util.Log.d("MainViewModel", " pendingSignInitiatorInfo?.sessionId: ${pendingSignInitiatorInfo?.sessionId}")
android.util.Log.d("MainViewModel", "[IDLE_CRASH_DEBUG] pendingSignInitiatorInfo?.sessionId: ${pendingSignInitiatorInfo?.sessionId}")
when (event.eventType) { when (event.eventType) {
"session_started" -> { "session_started" -> {
// CRITICAL: Set flag immediately (synchronously) to prevent subsequent
// participant_joined events from adding duplicates. This must be the
// first line before any async operations.
sessionStartedForSession = event.sessionId
android.util.Log.d("MainViewModel", "[IDLE_CRASH_DEBUG] Session started flag set for: ${event.sessionId}")
// Check if this is for keygen initiator (CreateWallet) // Check if this is for keygen initiator (CreateWallet)
val currentSessionId = _currentSessionId.value val currentSessionId = _currentSessionId.value
if (currentSessionId != null && event.sessionId == currentSessionId) { if (currentSessionId != null && event.sessionId == currentSessionId) {
android.util.Log.d("MainViewModel", "Session started event for keygen initiator, triggering keygen") android.util.Log.d("MainViewModel", "Session started event for keygen initiator, triggering keygen")
// Ensure participant list has exactly N parties (fill if incomplete, don't add more)
if (_sessionParticipants.value.size < event.thresholdN) {
_sessionParticipants.value = (1..event.thresholdN).map { "参与方 $it" }
}
viewModelScope.launch { viewModelScope.launch {
startKeygenAsInitiator( startKeygenAsInitiator(
sessionId = currentSessionId, sessionId = currentSessionId,
@ -382,10 +352,6 @@ class MainViewModel @Inject constructor(
val joinKeygenInfo = pendingJoinKeygenInfo val joinKeygenInfo = pendingJoinKeygenInfo
if (joinKeygenInfo != null && event.sessionId == joinKeygenInfo.sessionId) { if (joinKeygenInfo != null && event.sessionId == joinKeygenInfo.sessionId) {
android.util.Log.d("MainViewModel", "Session started event for keygen joiner, triggering keygen") android.util.Log.d("MainViewModel", "Session started event for keygen joiner, triggering keygen")
// Ensure participant list has exactly N parties
if (_joinKeygenParticipants.value.size < event.thresholdN) {
_joinKeygenParticipants.value = (1..event.thresholdN).map { "参与方 $it" }
}
startKeygenAsJoiner() startKeygenAsJoiner()
} }
@ -393,10 +359,6 @@ class MainViewModel @Inject constructor(
val joinSignInfo = pendingJoinSignInfo val joinSignInfo = pendingJoinSignInfo
if (joinSignInfo != null && event.sessionId == joinSignInfo.sessionId) { if (joinSignInfo != null && event.sessionId == joinSignInfo.sessionId) {
android.util.Log.d("MainViewModel", "Session started event for sign joiner, triggering sign") android.util.Log.d("MainViewModel", "Session started event for sign joiner, triggering sign")
// Ensure participant list has exactly T parties
if (_coSignParticipants.value.size < event.thresholdT) {
_coSignParticipants.value = (1..event.thresholdT).map { "参与方 $it" }
}
startSignAsJoiner() startSignAsJoiner()
} }
@ -405,10 +367,6 @@ class MainViewModel @Inject constructor(
android.util.Log.d("MainViewModel", "Checking for sign initiator: signSessionId=$signSessionId, eventSessionId=${event.sessionId}") android.util.Log.d("MainViewModel", "Checking for sign initiator: signSessionId=$signSessionId, eventSessionId=${event.sessionId}")
if (signSessionId != null && event.sessionId == signSessionId) { if (signSessionId != null && event.sessionId == signSessionId) {
android.util.Log.d("MainViewModel", "Session started event for sign initiator, triggering sign") android.util.Log.d("MainViewModel", "Session started event for sign initiator, triggering sign")
// Ensure participant list has exactly T parties
if (_signParticipants.value.size < event.thresholdT) {
_signParticipants.value = (1..event.thresholdT).map { "参与方 $it" }
}
startSignAsInitiator(event.selectedParties) startSignAsInitiator(event.selectedParties)
} else { } else {
android.util.Log.d("MainViewModel", "NOT triggering sign initiator: signSessionId=$signSessionId, pendingSignInitiatorInfo=${pendingSignInitiatorInfo?.sessionId}") android.util.Log.d("MainViewModel", "NOT triggering sign initiator: signSessionId=$signSessionId, pendingSignInitiatorInfo=${pendingSignInitiatorInfo?.sessionId}")
@ -417,15 +375,6 @@ class MainViewModel @Inject constructor(
"party_joined", "participant_joined" -> { "party_joined", "participant_joined" -> {
android.util.Log.d("MainViewModel", "Processing participant_joined event...") android.util.Log.d("MainViewModel", "Processing participant_joined event...")
// CRITICAL: Check synchronous flag first - if session_started was already
// processed for this session, don't add more participants
// This is 100% reliable because the flag is set synchronously in session_started
// handler before any async operations, and callbacks are processed sequentially
if (sessionStartedForSession == event.sessionId) {
android.util.Log.d("MainViewModel", " Session already started for ${event.sessionId}, ignoring participant_joined")
return@setSessionEventCallback
}
// Update participant count for initiator's CreateWallet screen // Update participant count for initiator's CreateWallet screen
val currentSessionId = _currentSessionId.value val currentSessionId = _currentSessionId.value
android.util.Log.d("MainViewModel", " Checking for initiator: currentSessionId=$currentSessionId, eventSessionId=${event.sessionId}") android.util.Log.d("MainViewModel", " Checking for initiator: currentSessionId=$currentSessionId, eventSessionId=${event.sessionId}")
@ -506,12 +455,6 @@ class MainViewModel @Inject constructor(
} }
} }
} }
} catch (e: Exception) {
android.util.Log.e("MainViewModel", "[IDLE_CRASH_DEBUG] Exception in session event callback!")
android.util.Log.e("MainViewModel", "[IDLE_CRASH_DEBUG] Event: ${event.eventType}, sessionId: ${event.sessionId}")
android.util.Log.e("MainViewModel", "[IDLE_CRASH_DEBUG] Exception: ${e.javaClass.simpleName}: ${e.message}")
android.util.Log.e("MainViewModel", "[IDLE_CRASH_DEBUG] Stack: ${e.stackTraceToString()}")
}
} }
} }
@ -572,12 +515,9 @@ class MainViewModel @Inject constructor(
_currentSessionId.value = null _currentSessionId.value = null
_sessionParticipants.value = emptyList() _sessionParticipants.value = emptyList()
_currentRound.value = 0 _currentRound.value = 0
_totalRounds.value = 0
_publicKey.value = null _publicKey.value = null
_createdInviteCode.value = null _createdInviteCode.value = null
_hasEnteredSession.value = false _hasEnteredSession.value = false
// Reset synchronous flag for fresh session
sessionStartedForSession = null
// Reset session status to WAITING for fresh start // Reset session status to WAITING for fresh start
repository.resetSessionStatus() repository.resetSessionStatus()
} }
@ -719,11 +659,7 @@ class MainViewModel @Inject constructor(
viewModelScope.launch { viewModelScope.launch {
_uiState.update { it.copy(isLoading = true, error = null) } _uiState.update { it.copy(isLoading = true, error = null) }
// Initialize participant list with all N parties (keygen requires all parties) android.util.Log.d("MainViewModel", "Starting keygen as joiner: sessionId=${joinInfo.sessionId}, partyIndex=${joinInfo.partyIndex}")
// This ensures UI shows correct participant count even if we missed some participant_joined events
_joinKeygenParticipants.value = (1..joinInfo.thresholdN).map { "参与方 $it" }
android.util.Log.d("MainViewModel", "Starting keygen as joiner: sessionId=${joinInfo.sessionId}, partyIndex=${joinInfo.partyIndex}, thresholdN=${joinInfo.thresholdN}")
val result = repository.executeKeygenAsJoiner( val result = repository.executeKeygenAsJoiner(
sessionId = joinInfo.sessionId, sessionId = joinInfo.sessionId,
@ -770,8 +706,6 @@ class MainViewModel @Inject constructor(
pendingJoinToken = "" pendingJoinToken = ""
pendingPassword = "" pendingPassword = ""
pendingJoinKeygenInfo = null pendingJoinKeygenInfo = null
// Reset synchronous flag for fresh session
sessionStartedForSession = null
// Reset session status to WAITING for fresh start // Reset session status to WAITING for fresh start
repository.resetSessionStatus() repository.resetSessionStatus()
} }
@ -957,8 +891,6 @@ class MainViewModel @Inject constructor(
pendingCoSignInviteCode = "" pendingCoSignInviteCode = ""
pendingCoSignJoinToken = "" pendingCoSignJoinToken = ""
pendingJoinSignInfo = null pendingJoinSignInfo = null
// Reset synchronous flag for fresh session
sessionStartedForSession = null
// Reset session status to WAITING for fresh start // Reset session status to WAITING for fresh start
repository.resetSessionStatus() repository.resetSessionStatus()
} }
@ -985,79 +917,6 @@ class MainViewModel @Inject constructor(
} }
} }
// ========== Transaction Records ==========
private val _transactionRecords = MutableStateFlow<List<com.durian.tssparty.data.local.TransactionRecordEntity>>(emptyList())
val transactionRecords: StateFlow<List<com.durian.tssparty.data.local.TransactionRecordEntity>> = _transactionRecords.asStateFlow()
private val _isSyncingHistory = MutableStateFlow(false)
val isSyncingHistory: StateFlow<Boolean> = _isSyncingHistory.asStateFlow()
private val _syncResultMessage = MutableStateFlow<String?>(null)
val syncResultMessage: StateFlow<String?> = _syncResultMessage.asStateFlow()
fun clearSyncResultMessage() {
_syncResultMessage.value = null
}
/**
* 加载钱包的交易记录
*/
fun loadTransactionRecords(shareId: Long) {
viewModelScope.launch {
repository.getTransactionRecords(shareId).collect { records ->
_transactionRecords.value = records
}
}
}
/**
* 同步钱包的所有历史交易
* 首次导入钱包时调用
*/
fun syncTransactionHistory(shareId: Long, address: String) {
viewModelScope.launch {
_isSyncingHistory.value = true
android.util.Log.d("MainViewModel", "[SYNC] Starting transaction history sync for $address")
val rpcUrl = _settings.value.kavaRpcUrl
val networkType = _settings.value.networkType
val result = repository.syncAllTransactionHistory(shareId, address, rpcUrl, networkType)
result.fold(
onSuccess = { count ->
android.util.Log.d("MainViewModel", "[SYNC] Synced $count transactions")
_syncResultMessage.value = if (count > 0) {
"同步完成,新增 $count 条记录"
} else {
"同步完成,无新记录"
}
},
onFailure = { e ->
android.util.Log.e("MainViewModel", "[SYNC] Error syncing: ${e.message}")
_syncResultMessage.value = "同步失败: ${e.message}"
}
)
_isSyncingHistory.value = false
}
}
/**
* 确认所有待处理的交易
* 应用启动时调用
*/
fun confirmPendingTransactions() {
viewModelScope.launch {
val rpcUrl = _settings.value.kavaRpcUrl
val pendingRecords = repository.getPendingTransactions()
android.util.Log.d("MainViewModel", "[TX-CONFIRM] Found ${pendingRecords.size} pending transactions")
for (record in pendingRecords) {
repository.confirmTransaction(record.txHash, rpcUrl)
}
}
}
// ========== Share Export/Import ========== // ========== Share Export/Import ==========
private val _exportResult = MutableStateFlow<ExportImportResult?>(null) private val _exportResult = MutableStateFlow<ExportImportResult?>(null)
@ -1072,30 +931,19 @@ class MainViewModel @Inject constructor(
* @return The backup JSON string on success * @return The backup JSON string on success
*/ */
fun exportShareBackup(shareId: Long, onSuccess: (String) -> Unit) { fun exportShareBackup(shareId: Long, onSuccess: (String) -> Unit) {
android.util.Log.d("MainViewModel", "[EXPORT] ========== exportShareBackup called ==========")
android.util.Log.d("MainViewModel", "[EXPORT] shareId: $shareId")
viewModelScope.launch { viewModelScope.launch {
android.util.Log.d("MainViewModel", "[EXPORT] Setting loading state...")
_exportResult.value = ExportImportResult(isLoading = true) _exportResult.value = ExportImportResult(isLoading = true)
android.util.Log.d("MainViewModel", "[EXPORT] Calling repository.exportShareBackup...")
val result = repository.exportShareBackup(shareId) val result = repository.exportShareBackup(shareId)
android.util.Log.d("MainViewModel", "[EXPORT] Repository returned, isSuccess: ${result.isSuccess}")
result.fold( result.fold(
onSuccess = { json -> onSuccess = { json ->
android.util.Log.d("MainViewModel", "[EXPORT] Export succeeded, json length: ${json.length}")
android.util.Log.d("MainViewModel", "[EXPORT] Setting success state and calling onSuccess callback...")
_exportResult.value = ExportImportResult(isSuccess = true) _exportResult.value = ExportImportResult(isSuccess = true)
android.util.Log.d("MainViewModel", "[EXPORT] Calling onSuccess callback with json...")
onSuccess(json) onSuccess(json)
android.util.Log.d("MainViewModel", "[EXPORT] onSuccess callback completed")
}, },
onFailure = { e -> onFailure = { e ->
android.util.Log.e("MainViewModel", "[EXPORT] Export failed: ${e.message}", e)
_exportResult.value = ExportImportResult(error = e.message ?: "导出失败") _exportResult.value = ExportImportResult(error = e.message ?: "导出失败")
} }
) )
android.util.Log.d("MainViewModel", "[EXPORT] ========== exportShareBackup finished ==========")
} }
} }
@ -1104,46 +952,27 @@ class MainViewModel @Inject constructor(
* @param backupJson The backup JSON string to import * @param backupJson The backup JSON string to import
*/ */
fun importShareBackup(backupJson: String) { fun importShareBackup(backupJson: String) {
android.util.Log.d("MainViewModel", "[IMPORT] ========== importShareBackup called ==========")
android.util.Log.d("MainViewModel", "[IMPORT] JSON length: ${backupJson.length}")
android.util.Log.d("MainViewModel", "[IMPORT] JSON preview: ${backupJson.take(100)}...")
viewModelScope.launch { viewModelScope.launch {
android.util.Log.d("MainViewModel", "[IMPORT] Setting loading state...")
_importResult.value = ExportImportResult(isLoading = true) _importResult.value = ExportImportResult(isLoading = true)
android.util.Log.d("MainViewModel", "[IMPORT] Calling repository.importShareBackup...")
val result = repository.importShareBackup(backupJson) val result = repository.importShareBackup(backupJson)
android.util.Log.d("MainViewModel", "[IMPORT] Repository returned, isSuccess: ${result.isSuccess}")
result.fold( result.fold(
onSuccess = { share -> onSuccess = { share ->
android.util.Log.d("MainViewModel", "[IMPORT] Import succeeded:")
android.util.Log.d("MainViewModel", "[IMPORT] - id: ${share.id}")
android.util.Log.d("MainViewModel", "[IMPORT] - address: ${share.address}")
android.util.Log.d("MainViewModel", "[IMPORT] - partyId: ${share.partyId}")
_importResult.value = ExportImportResult( _importResult.value = ExportImportResult(
isSuccess = true, isSuccess = true,
message = "已成功导入钱包 (${share.address.take(10)}...)" message = "已成功导入钱包 (${share.address.take(10)}...)"
) )
// Update wallet count // Update wallet count
android.util.Log.d("MainViewModel", "[IMPORT] Updating wallet count...")
_appState.update { state -> _appState.update { state ->
state.copy(walletCount = state.walletCount + 1) state.copy(walletCount = state.walletCount + 1)
} }
// Fetch balance for the imported wallet // Fetch balance for the imported wallet
android.util.Log.d("MainViewModel", "[IMPORT] Fetching balance...")
fetchBalanceForShare(share) fetchBalanceForShare(share)
// Sync transaction history from blockchain (first-time import)
android.util.Log.d("MainViewModel", "[IMPORT] Starting transaction history sync...")
syncTransactionHistory(share.id, share.address)
android.util.Log.d("MainViewModel", "[IMPORT] Import complete!")
}, },
onFailure = { e -> onFailure = { e ->
android.util.Log.e("MainViewModel", "[IMPORT] Import failed: ${e.message}", e)
_importResult.value = ExportImportResult(error = e.message ?: "导入失败") _importResult.value = ExportImportResult(error = e.message ?: "导入失败")
} }
) )
android.util.Log.d("MainViewModel", "[IMPORT] ========== importShareBackup finished ==========")
} }
} }
@ -1453,95 +1282,9 @@ class MainViewModel @Inject constructor(
} }
} }
// ========== 2-of-3 服务器参与选项(新增功能)==========
// 新增日期2026-01-27
// 新增原因:允许 2-of-3 用户在丢失一个设备时,通过服务器参与签名转出资产
// 影响范围:纯新增,不影响现有 initiateSignSession
// 回滚方法:删除此方法及相关 UI 代码即可恢复
/**
* 创建签名会话支持选择服务器参与
*
* 新增方法不修改现有 initiateSignSession
* 仅在 UI 层判断为 2-of-3 且用户主动勾选时调用此方法
*
* @param shareId 钱包 ID
* @param password 钱包密码
* @param initiatorName 发起者名称
* @param includeServerBackup 是否包含服务器备份参与方新增参数
*
* 使用场景
* - 2-of-3 用户丢失一个设备
* - 用户勾选"包含服务器备份"选项
* - 使用剩余设备 + 服务器完成签名
*
* 安全保障
* - UI 层限制仅 2-of-3 显示此选项
* - 用户主动明确选择
* - 服务器只有 1 key < t=2
*/
fun initiateSignSessionWithOptions(
shareId: Long,
password: String,
initiatorName: String = "发起者",
includeServerBackup: Boolean = false // 新增参数
) {
viewModelScope.launch {
_uiState.update { it.copy(isLoading = true, error = null) }
val tx = _preparedTx.value
if (tx == null) {
_uiState.update { it.copy(isLoading = false, error = "交易未准备") }
return@launch
}
android.util.Log.d("MainViewModel", "[SIGN-OPTIONS] Initiating sign session with includeServerBackup=$includeServerBackup")
// 调用新的 repository 方法
val result = repository.createSignSessionWithOptions(
shareId = shareId,
messageHash = tx.signHash,
password = password,
initiatorName = initiatorName,
includeServerBackup = includeServerBackup // 传递新参数
)
result.fold(
onSuccess = { sessionResult ->
_signSessionId.value = sessionResult.sessionId
_signInviteCode.value = sessionResult.inviteCode
_signParticipants.value = listOf(initiatorName)
_uiState.update { it.copy(isLoading = false) }
pendingSignInitiatorInfo = PendingSignInitiatorInfo(
sessionId = sessionResult.sessionId,
shareId = shareId,
password = password
)
android.util.Log.d("MainViewModel", "[SIGN-OPTIONS] Sign session created with server=${includeServerBackup}, sessionId=${sessionResult.sessionId}")
if (sessionResult.sessionAlreadyInProgress) {
android.util.Log.d("MainViewModel", "[SIGN-OPTIONS] Session already in_progress, triggering sign immediately")
startSigningProcess(sessionResult.sessionId, shareId, password)
}
},
onFailure = { e ->
android.util.Log.e("MainViewModel", "[SIGN-OPTIONS] Failed to create sign session: ${e.message}")
_uiState.update { it.copy(isLoading = false, error = e.message) }
}
)
}
}
// ========== 2-of-3 服务器参与选项结束 ==========
/** /**
* Start sign as initiator (called when session_started event is received) * Start sign as initiator (called when session_started event is received)
* Matches Electron's handleCoSignStart for initiator * Matches Electron's handleCoSignStart for initiator
*
* CRITICAL: This method includes防重入检查 to prevent double execution
* Race condition fix: TssRepository may have already triggered signing via
* its session_started handler. This callback serves as a fallback.
*/ */
private fun startSignAsInitiator(selectedParties: List<String>) { private fun startSignAsInitiator(selectedParties: List<String>) {
val info = pendingSignInitiatorInfo val info = pendingSignInitiatorInfo
@ -1550,13 +1293,6 @@ class MainViewModel @Inject constructor(
return return
} }
// CRITICAL: Prevent double execution if TssRepository already started signing
// TssRepository sets signingTriggered=true when it auto-triggers from session_started
if (repository.isSigningTriggered()) {
android.util.Log.d("MainViewModel", "[RACE-FIX] Signing already triggered by TssRepository, skipping duplicate from MainViewModel")
return
}
android.util.Log.d("MainViewModel", "Starting sign as initiator: sessionId=${info.sessionId}, selectedParties=$selectedParties") android.util.Log.d("MainViewModel", "Starting sign as initiator: sessionId=${info.sessionId}, selectedParties=$selectedParties")
startSigningProcess(info.sessionId, info.shareId, info.password) startSigningProcess(info.sessionId, info.shareId, info.password)
} }
@ -1628,30 +1364,7 @@ class MainViewModel @Inject constructor(
onSuccess = { hash -> onSuccess = { hash ->
android.util.Log.d("MainViewModel", "[BROADCAST] SUCCESS! txHash=$hash") android.util.Log.d("MainViewModel", "[BROADCAST] SUCCESS! txHash=$hash")
_txHash.value = hash _txHash.value = hash
// 保存交易记录到本地数据库
val state = _transferState.value
android.util.Log.d("MainViewModel", "[BROADCAST] Saving transaction record: shareId=${state.shareId}, tokenType=${state.tokenType}")
try {
repository.saveTransactionRecord(
shareId = state.shareId,
fromAddress = tx.from,
toAddress = tx.to,
amount = state.amount,
tokenType = state.tokenType,
txHash = hash,
gasPrice = tx.gasPrice.toString()
)
android.util.Log.d("MainViewModel", "[BROADCAST] Transaction record saved successfully")
// 启动后台确认交易状态
confirmTransactionInBackground(hash, rpcUrl)
_uiState.update { it.copy(isLoading = false, successMessage = "交易已广播!") } _uiState.update { it.copy(isLoading = false, successMessage = "交易已广播!") }
} catch (e: Exception) {
android.util.Log.e("MainViewModel", "[BROADCAST] Failed to save transaction record: ${e.message}", e)
_uiState.update { it.copy(isLoading = false, error = "交易已广播但保存记录失败: ${e.message}") }
}
}, },
onFailure = { e -> onFailure = { e ->
android.util.Log.e("MainViewModel", "[BROADCAST] FAILED: ${e.message}", e) android.util.Log.e("MainViewModel", "[BROADCAST] FAILED: ${e.message}", e)
@ -1661,37 +1374,6 @@ class MainViewModel @Inject constructor(
} }
} }
/**
* 后台确认交易状态
* 3 秒轮询一次最多尝试 60 3 分钟
*/
private fun confirmTransactionInBackground(txHash: String, rpcUrl: String) {
viewModelScope.launch {
android.util.Log.d("MainViewModel", "[TX-CONFIRM] Starting background confirmation for $txHash")
var attempts = 0
val maxAttempts = 60
while (attempts < maxAttempts) {
kotlinx.coroutines.delay(3000) // 等待 3 秒
attempts++
val result = repository.confirmTransaction(txHash, rpcUrl)
result.fold(
onSuccess = { confirmed ->
if (confirmed) {
android.util.Log.d("MainViewModel", "[TX-CONFIRM] Transaction confirmed after $attempts attempts")
return@launch
}
},
onFailure = { e ->
android.util.Log.w("MainViewModel", "[TX-CONFIRM] Error checking confirmation: ${e.message}")
}
)
}
android.util.Log.w("MainViewModel", "[TX-CONFIRM] Max attempts reached, transaction may still be pending")
}
}
/** /**
* Reset transfer state * Reset transfer state
*/ */
@ -1705,8 +1387,6 @@ class MainViewModel @Inject constructor(
_signature.value = null _signature.value = null
_txHash.value = null _txHash.value = null
pendingSignInitiatorInfo = null pendingSignInitiatorInfo = null
// Reset synchronous flag for fresh session
sessionStartedForSession = null
// Reset session status to WAITING for fresh start // Reset session status to WAITING for fresh start
repository.resetSessionStatus() repository.resetSessionStatus()
} }

View File

@ -1,10 +1,6 @@
package com.durian.tssparty.util package com.durian.tssparty.util
import com.durian.tssparty.domain.model.ERC20Selectors
import com.durian.tssparty.domain.model.EnergyPointsToken
import com.durian.tssparty.domain.model.FuturePointsToken
import com.durian.tssparty.domain.model.GreenPointsToken import com.durian.tssparty.domain.model.GreenPointsToken
import com.durian.tssparty.domain.model.TokenConfig
import com.durian.tssparty.domain.model.TokenType import com.durian.tssparty.domain.model.TokenType
import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.withContext import kotlinx.coroutines.withContext
@ -23,50 +19,13 @@ import java.util.concurrent.TimeUnit
*/ */
object TransactionUtils { object TransactionUtils {
/**
* HTTP client for blockchain RPC calls
*
* 架构安全修复 - 配置连接池防止资源泄漏
*
* 配置连接池参数限制资源占用:
* - maxIdleConnections: 5 (最多保留 5 个空闲连接)
* - keepAliveDuration: 5 分钟 (空闲连接保活时间)
*
* 注意: TransactionUtils object 单例,生命周期与应用一致
* 如果应用需要完全清理资源,可调用 cleanup() 方法
*/
private val client = OkHttpClient.Builder() private val client = OkHttpClient.Builder()
.connectTimeout(30, TimeUnit.SECONDS) .connectTimeout(30, TimeUnit.SECONDS)
.readTimeout(30, TimeUnit.SECONDS) .readTimeout(30, TimeUnit.SECONDS)
.connectionPool(okhttp3.ConnectionPool(
maxIdleConnections = 5,
keepAliveDuration = 5,
timeUnit = TimeUnit.MINUTES
))
.build() .build()
private val jsonMediaType = "application/json; charset=utf-8".toMediaType() private val jsonMediaType = "application/json; charset=utf-8".toMediaType()
/**
* Cleanup OkHttpClient resources
*
* 架构安全修复 - 提供资源清理方法
*
* 虽然 TransactionUtils object 单例,但提供此方法允许:
* 1. 测试环境清理资源
* 2. 应用完全退出时释放资源
* 3. 内存紧张时主动清理
*/
fun cleanup() {
try {
client.connectionPool.evictAll()
client.dispatcher.executorService.shutdown()
client.cache?.close()
} catch (e: Exception) {
// 静默失败,因为这是清理操作
}
}
// Chain IDs // Chain IDs
const val KAVA_TESTNET_CHAIN_ID = 2221 const val KAVA_TESTNET_CHAIN_ID = 2221
const val KAVA_MAINNET_CHAIN_ID = 2222 const val KAVA_MAINNET_CHAIN_ID = 2222
@ -102,7 +61,7 @@ object TransactionUtils {
/** /**
* Prepare a transaction for signing * Prepare a transaction for signing
* Gets nonce, gas price, estimates gas, and calculates sign hash * Gets nonce, gas price, estimates gas, and calculates sign hash
* Supports both native KAVA transfers and ERC-20 token transfers (绿积分/积分股/积分值) * Supports both native KAVA transfers and ERC-20 token transfers (绿积分)
*/ */
suspend fun prepareTransaction(params: TransactionParams): Result<PreparedTransaction> = withContext(Dispatchers.IO) { suspend fun prepareTransaction(params: TransactionParams): Result<PreparedTransaction> = withContext(Dispatchers.IO) {
try { try {
@ -118,16 +77,13 @@ object TransactionUtils {
// Native KAVA transfer // Native KAVA transfer
Triple(params.to, kavaToWei(params.amount), ByteArray(0)) Triple(params.to, kavaToWei(params.amount), ByteArray(0))
} }
TokenType.GREEN_POINTS, TokenType.ENERGY_POINTS, TokenType.FUTURE_POINTS -> { TokenType.GREEN_POINTS -> {
// ERC-20 token transfer // ERC-20 token transfer (绿积分)
// To address is the contract, value is 0 // To address is the contract, value is 0
// Data is transfer(recipient, amount) encoded // Data is transfer(recipient, amount) encoded
val contractAddress = TokenConfig.getContractAddress(params.tokenType) val tokenAmount = greenPointsToRaw(params.amount)
?: return@withContext Result.failure(Exception("Invalid token type"))
val decimals = TokenConfig.getDecimals(params.tokenType)
val tokenAmount = tokenToRaw(params.amount, decimals)
val transferData = encodeErc20Transfer(params.to, tokenAmount) val transferData = encodeErc20Transfer(params.to, tokenAmount)
Triple(contractAddress, BigInteger.ZERO, transferData) Triple(GreenPointsToken.CONTRACT_ADDRESS, BigInteger.ZERO, transferData)
} }
} }
@ -142,7 +98,7 @@ object TransactionUtils {
// Default gas limits // Default gas limits
when (params.tokenType) { when (params.tokenType) {
TokenType.KAVA -> BigInteger.valueOf(21000) TokenType.KAVA -> BigInteger.valueOf(21000)
else -> BigInteger.valueOf(65000) // ERC-20 transfers need more gas TokenType.GREEN_POINTS -> BigInteger.valueOf(65000) // ERC-20 transfers need more gas
} }
} }
@ -183,7 +139,7 @@ object TransactionUtils {
*/ */
private fun encodeErc20Transfer(to: String, amount: BigInteger): ByteArray { private fun encodeErc20Transfer(to: String, amount: BigInteger): ByteArray {
// Function selector: transfer(address,uint256) = 0xa9059cbb // Function selector: transfer(address,uint256) = 0xa9059cbb
val selector = ERC20Selectors.TRANSFER.removePrefix("0x").hexToByteArray() val selector = GreenPointsToken.TRANSFER_SELECTOR.removePrefix("0x").hexToByteArray()
// Encode recipient address (padded to 32 bytes) // Encode recipient address (padded to 32 bytes)
val paddedAddress = to.removePrefix("0x").lowercase().padStart(64, '0').hexToByteArray() val paddedAddress = to.removePrefix("0x").lowercase().padStart(64, '0').hexToByteArray()
@ -196,43 +152,21 @@ object TransactionUtils {
} }
/** /**
* Convert token amount to raw units based on decimals * Convert Green Points amount to raw units (6 decimals)
* @param amount Human-readable amount (e.g., "100.5")
* @param decimals Token decimals (e.g., 6 for USDT-like tokens, 18 for native)
*/ */
fun tokenToRaw(amount: String, decimals: Int): BigInteger { fun greenPointsToRaw(amount: String): BigInteger {
val decimal = BigDecimal(amount) val decimal = BigDecimal(amount)
val multiplier = BigDecimal.TEN.pow(decimals) val rawDecimal = decimal.multiply(BigDecimal("1000000")) // 10^6
val rawDecimal = decimal.multiply(multiplier)
return rawDecimal.toBigInteger() return rawDecimal.toBigInteger()
} }
/**
* Convert raw units to human-readable token amount
* @param raw Raw amount in smallest units
* @param decimals Token decimals (e.g., 6 for USDT-like tokens, 18 for native)
*/
fun rawToToken(raw: BigInteger, decimals: Int): String {
val rawDecimal = BigDecimal(raw)
val divisor = BigDecimal.TEN.pow(decimals)
val displayDecimal = rawDecimal.divide(divisor, decimals, java.math.RoundingMode.DOWN)
return displayDecimal.toPlainString()
}
/**
* Convert Green Points amount to raw units (6 decimals)
* @deprecated Use tokenToRaw(amount, 6) instead
*/
fun greenPointsToRaw(amount: String): BigInteger {
return tokenToRaw(amount, GreenPointsToken.DECIMALS)
}
/** /**
* Convert raw units to Green Points display amount * Convert raw units to Green Points display amount
* @deprecated Use rawToToken(raw, 6) instead
*/ */
fun rawToGreenPoints(raw: BigInteger): String { fun rawToGreenPoints(raw: BigInteger): String {
return rawToToken(raw, GreenPointsToken.DECIMALS) val rawDecimal = BigDecimal(raw)
val displayDecimal = rawDecimal.divide(BigDecimal("1000000"), 6, java.math.RoundingMode.DOWN)
return displayDecimal.toPlainString()
} }
/** /**

View File

@ -75,20 +75,6 @@ echo [INFO] Using SDK from local.properties
type local.properties type local.properties
echo. echo.
:: Parse rebuild argument early - must happen BEFORE checking tsslib.aar
set REBUILD_REQUESTED=0
if "%1"=="rebuild" (
set REBUILD_REQUESTED=1
echo [INFO] Rebuild requested - deleting tsslib.aar to recompile Go code...
if exist "app\libs\tsslib.aar" (
del /f "app\libs\tsslib.aar"
echo [INFO] tsslib.aar deleted, will be rebuilt
) else (
echo [INFO] tsslib.aar not found, will be built fresh
)
echo.
)
:: Check and build tsslib.aar if needed :: Check and build tsslib.aar if needed
if not exist "app\libs\tsslib.aar" ( if not exist "app\libs\tsslib.aar" (
echo [INFO] tsslib.aar not found, attempting to build TSS library... echo [INFO] tsslib.aar not found, attempting to build TSS library...
@ -197,14 +183,8 @@ set BUILD_TYPE=all
if "%1"=="debug" set BUILD_TYPE=debug if "%1"=="debug" set BUILD_TYPE=debug
if "%1"=="release" set BUILD_TYPE=release if "%1"=="release" set BUILD_TYPE=release
if "%1"=="clean" set BUILD_TYPE=clean if "%1"=="clean" set BUILD_TYPE=clean
if "%1"=="rebuild" set BUILD_TYPE=rebuild
if "%1"=="help" goto :show_help if "%1"=="help" goto :show_help
:: Handle rebuild - aar deletion already done above, just set build type
if "%BUILD_TYPE%"=="rebuild" (
set BUILD_TYPE=all
)
:: Show build type :: Show build type
echo Build type: %BUILD_TYPE% echo Build type: %BUILD_TYPE%
echo. echo.
@ -295,16 +275,14 @@ echo Options:
echo debug - Build debug APK only echo debug - Build debug APK only
echo release - Build release APK only echo release - Build release APK only
echo all - Build both debug and release APKs (default) echo all - Build both debug and release APKs (default)
echo clean - Clean Gradle build files echo clean - Clean build files
echo rebuild - Delete tsslib.aar and rebuild everything (use after Go code changes)
echo help - Show this help message echo help - Show this help message
echo. echo.
echo Examples: echo Examples:
echo build-apk.bat - Build both APKs echo build-apk.bat - Build both APKs
echo build-apk.bat debug - Build debug APK only echo build-apk.bat debug - Build debug APK only
echo build-apk.bat release - Build release APK only echo build-apk.bat release - Build release APK only
echo build-apk.bat clean - Clean Gradle project echo build-apk.bat clean - Clean project
echo build-apk.bat rebuild - Recompile Go code and build APKs
echo. echo.
:end :end

View File

@ -1,147 +0,0 @@
@echo off
chcp 65001 >nul 2>&1
setlocal enabledelayedexpansion
echo ========================================
echo Build - Install - Launch - Debug
echo ========================================
echo.
:: Check for rebuild flag
if "%1"=="rebuild" (
echo [0/5] Rebuild requested - deleting tsslib.aar to recompile Go code...
if exist "app\libs\tsslib.aar" (
del /f "app\libs\tsslib.aar"
echo [INFO] tsslib.aar deleted, will be rebuilt
) else (
echo [INFO] tsslib.aar not found, will be built fresh
)
echo.
:: Build tsslib.aar
echo [0/5] Building tsslib.aar...
:: Get GOPATH for bin directory
for /f "tokens=*" %%G in ('go env GOPATH') do set "GOPATH_DIR=%%G"
if not defined GOPATH_DIR set "GOPATH_DIR=%USERPROFILE%\go"
set "GOBIN_DIR=!GOPATH_DIR!\bin"
:: Add GOPATH/bin to PATH if not already there
echo !PATH! | findstr /i /c:"!GOBIN_DIR!" >nul 2>nul
if !errorlevel! neq 0 (
set "PATH=!PATH!;!GOBIN_DIR!"
)
pushd tsslib
"!GOBIN_DIR!\gomobile.exe" bind -target=android -androidapi 21 -o "..\app\libs\tsslib.aar" .
if !errorlevel! neq 0 (
echo [ERROR] gomobile bind failed!
popd
pause
exit /b 1
)
popd
echo [SUCCESS] tsslib.aar rebuilt!
for %%F in ("app\libs\tsslib.aar") do echo Size: %%~zF bytes
echo.
)
:: Show help
if "%1"=="help" (
echo Usage: build-install-debug.bat [option]
echo.
echo Options:
echo rebuild - Delete and rebuild tsslib.aar before building APK
echo help - Show this help message
echo.
echo Examples:
echo build-install-debug.bat - Build and install debug APK
echo build-install-debug.bat rebuild - Rebuild Go code, then build and install
echo.
pause
exit /b 0
)
:: Step 1: Build Debug APK
echo [1/5] Building Debug APK...
call gradlew.bat assembleDebug --no-daemon
if %errorlevel% neq 0 (
echo [ERROR] Build failed!
pause
exit /b 1
)
echo [SUCCESS] Build completed!
echo.
:: Step 2: Check device connection
echo [2/5] Checking device connection...
adb devices
adb devices | find "device" | find /v "List" >nul
if %errorlevel% neq 0 (
echo [ERROR] No device detected! Please connect your phone and enable USB debugging.
pause
exit /b 1
)
echo [SUCCESS] Device connected!
echo.
:: Step 3: Uninstall old version (to avoid signature conflicts)
echo [3/5] Uninstalling old version (if exists)...
adb uninstall com.durian.tssparty 2>nul
echo Done!
echo.
:: Step 4: Install APK
echo [4/5] Installing APK...
adb install app\build\outputs\apk\debug\app-debug.apk
if %errorlevel% neq 0 (
echo [ERROR] Installation failed!
pause
exit /b 1
)
echo [SUCCESS] Installation completed!
echo.
:: Step 5: Launch app
echo [5/5] Launching app...
adb shell am start -n com.durian.tssparty/.MainActivity
if %errorlevel% neq 0 (
echo [ERROR] Launch failed!
pause
exit /b 1
)
echo [SUCCESS] App launched!
echo.
:: Clear old logs
echo Clearing old logs...
adb logcat -c
echo.
:: Show instructions
echo ========================================
echo App successfully launched!
echo ========================================
echo.
echo Starting log monitoring...
echo.
echo Key log tags:
echo - MainViewModel (ViewModel layer)
echo - TssRepository (Repository layer)
echo - GrpcClient (Network communication)
echo - TssNativeBridge (TSS native library)
echo - AndroidRuntime (Crash logs)
echo.
echo Press Ctrl+C to stop log monitoring
echo.
timeout /t 2 /nobreak >nul
:: Start monitoring logs
adb logcat -v time MainViewModel:D TssRepository:D GrpcClient:D TssNativeBridge:D AndroidRuntime:E *:S
:: If user stops log monitoring
echo.
echo Log monitoring stopped.
echo.
pause

View File

@ -393,17 +393,6 @@ func SendIncomingMessage(fromPartyIndex int, isBroadcast bool, payloadBase64 str
return fmt.Errorf("failed to parse message: %w", err) return fmt.Errorf("failed to parse message: %w", err)
} }
// Extract round from incoming message and update progress
// This ensures progress updates on both sending and receiving messages
totalRounds := 4 // GG20 keygen has 4 rounds
if !session.isKeygen {
totalRounds = 9 // GG20 signing has 9 rounds
}
currentRound := extractRoundFromMessageType(parsedMsg.Type())
if currentRound > 0 {
session.callback.OnProgress(currentRound, totalRounds)
}
go func() { go func() {
_, err := session.localParty.Update(parsedMsg) _, err := session.localParty.Update(parsedMsg)
if err != nil { if err != nil {

View File

@ -821,21 +821,6 @@ async function handleCoSignStart(event: {
// 标记签名开始 // 标记签名开始
signInProgressSessionId = event.sessionId; signInProgressSessionId = event.sessionId;
// CRITICAL: Get the original partyId from keygen (stored in share) for signing
// This is essential for backup/restore - the partyId must match what was used during keygen
const share = database?.getShare(activeCoSignSession.shareId, activeCoSignSession.sharePassword);
if (!share) {
debugLog.error('main', 'Failed to get share data');
mainWindow?.webContents.send(`cosign:events:${event.sessionId}`, {
type: 'failed',
error: 'Failed to get share data',
});
signInProgressSessionId = null;
return;
}
const signingPartyId = share.party_id || grpcClient?.getPartyId() || '';
debugLog.info('main', `Using signingPartyId=${signingPartyId} (currentDevicePartyId=${grpcClient?.getPartyId()})`);
// 打印当前 activeCoSignSession.participants 状态 // 打印当前 activeCoSignSession.participants 状态
console.log('[CO-SIGN] Current activeCoSignSession.participants before update:', console.log('[CO-SIGN] Current activeCoSignSession.participants before update:',
activeCoSignSession.participants.map(p => ({ activeCoSignSession.participants.map(p => ({
@ -847,9 +832,8 @@ async function handleCoSignStart(event: {
// 从 event.selectedParties 更新参与者列表 // 从 event.selectedParties 更新参与者列表
// 优先使用 activeCoSignSession.participants 中的 partyIndex来自 signingParties 或 other_parties // 优先使用 activeCoSignSession.participants 中的 partyIndex来自 signingParties 或 other_parties
// CRITICAL: Use signingPartyId (original from keygen) for identification
if (event.selectedParties && event.selectedParties.length > 0) { if (event.selectedParties && event.selectedParties.length > 0) {
const myPartyId = signingPartyId; const myPartyId = grpcClient?.getPartyId();
const updatedParticipants: Array<{ partyId: string; partyIndex: number; name: string }> = []; const updatedParticipants: Array<{ partyId: string; partyIndex: number; name: string }> = [];
event.selectedParties.forEach((partyId) => { event.selectedParties.forEach((partyId) => {
@ -885,11 +869,21 @@ async function handleCoSignStart(event: {
}))); })));
} }
// Note: share already fetched above for getting signingPartyId // 获取 share 数据
const share = database?.getShare(activeCoSignSession.shareId, activeCoSignSession.sharePassword);
if (!share) {
debugLog.error('main', 'Failed to get share data');
mainWindow?.webContents.send(`cosign:events:${event.sessionId}`, {
type: 'failed',
error: 'Failed to get share data',
});
signInProgressSessionId = null;
return;
}
console.log('[CO-SIGN] Calling tssHandler.participateSign with:', { console.log('[CO-SIGN] Calling tssHandler.participateSign with:', {
sessionId: activeCoSignSession.sessionId, sessionId: activeCoSignSession.sessionId,
partyId: signingPartyId, // CRITICAL: Use signingPartyId (original from keygen) partyId: grpcClient?.getPartyId(),
partyIndex: activeCoSignSession.partyIndex, partyIndex: activeCoSignSession.partyIndex,
participants: activeCoSignSession.participants.map(p => ({ partyId: p.partyId.substring(0, 8), partyIndex: p.partyIndex })), participants: activeCoSignSession.participants.map(p => ({ partyId: p.partyId.substring(0, 8), partyIndex: p.partyIndex })),
threshold: activeCoSignSession.threshold, threshold: activeCoSignSession.threshold,
@ -898,10 +892,9 @@ async function handleCoSignStart(event: {
debugLog.info('tss', `Starting sign for session ${event.sessionId}...`); debugLog.info('tss', `Starting sign for session ${event.sessionId}...`);
try { try {
// CRITICAL: Use signingPartyId (original partyId from keygen) for signing
const result = await (tssHandler as TSSHandler).participateSign( const result = await (tssHandler as TSSHandler).participateSign(
activeCoSignSession.sessionId, activeCoSignSession.sessionId,
signingPartyId, // CRITICAL: Use original partyId from keygen for backup/restore to work grpcClient?.getPartyId() || '',
activeCoSignSession.partyIndex, activeCoSignSession.partyIndex,
activeCoSignSession.participants, activeCoSignSession.participants,
activeCoSignSession.threshold, activeCoSignSession.threshold,
@ -1620,9 +1613,9 @@ function setupIpcHandlers() {
initiatorName?: string; initiatorName?: string;
}) => { }) => {
try { try {
// 获取当前 party ID (用于检查连接状态) // 获取当前 party ID
const currentDevicePartyId = grpcClient?.getPartyId(); const partyId = grpcClient?.getPartyId();
if (!currentDevicePartyId) { if (!partyId) {
return { success: false, error: '请先连接到消息路由器' }; return { success: false, error: '请先连接到消息路由器' };
} }
@ -1632,11 +1625,6 @@ function setupIpcHandlers() {
return { success: false, error: 'Share 不存在或密码错误' }; return { success: false, error: 'Share 不存在或密码错误' };
} }
// CRITICAL: Use the original partyId from keygen (stored in share) for signing
// This is essential for backup/restore - the partyId must match what was used during keygen
const partyId = share.party_id || currentDevicePartyId;
debugLog.info('main', `Initiator using partyId=${partyId} (currentDevicePartyId=${currentDevicePartyId})`);
// 从后端获取 keygen 会话的参与者信息(包含正确的 party_index // 从后端获取 keygen 会话的参与者信息(包含正确的 party_index
const keygenStatus = await accountClient?.getSessionStatus(share.session_id); const keygenStatus = await accountClient?.getSessionStatus(share.session_id);
if (!keygenStatus?.participants || keygenStatus.participants.length === 0) { if (!keygenStatus?.participants || keygenStatus.participants.length === 0) {
@ -1822,8 +1810,8 @@ function setupIpcHandlers() {
parties?: Array<{ party_id: string; party_index: number }>; parties?: Array<{ party_id: string; party_index: number }>;
}) => { }) => {
try { try {
const currentDevicePartyId = grpcClient?.getPartyId(); const partyId = grpcClient?.getPartyId();
if (!currentDevicePartyId) { if (!partyId) {
return { success: false, error: '请先连接到消息路由器' }; return { success: false, error: '请先连接到消息路由器' };
} }
@ -1833,12 +1821,9 @@ function setupIpcHandlers() {
return { success: false, error: 'Share 不存在或密码错误' }; return { success: false, error: 'Share 不存在或密码错误' };
} }
// CRITICAL: Use the original partyId from keygen (stored in share) for signing debugLog.info('grpc', `Joining co-sign session: sessionId=${params.sessionId}, partyId=${partyId}`);
// This is essential for backup/restore - the partyId must match what was used during keygen
const signingPartyId = share.party_id || currentDevicePartyId;
debugLog.info('grpc', `Joining co-sign session: sessionId=${params.sessionId}, signingPartyId=${signingPartyId} (currentDevicePartyId=${currentDevicePartyId})`);
const result = await grpcClient?.joinSession(params.sessionId, signingPartyId, params.joinToken); const result = await grpcClient?.joinSession(params.sessionId, partyId, params.joinToken);
if (result?.success) { if (result?.success) {
// 设置活跃的 Co-Sign 会话 // 设置活跃的 Co-Sign 会话
// 优先使用 params.parties来自 validateInviteCode包含所有预期参与者 // 优先使用 params.parties来自 validateInviteCode包含所有预期参与者
@ -1847,11 +1832,10 @@ function setupIpcHandlers() {
if (params.parties && params.parties.length > 0) { if (params.parties && params.parties.length > 0) {
// 使用完整的 parties 列表 // 使用完整的 parties 列表
// CRITICAL: Use signingPartyId (original from keygen) for identification
participants = params.parties.map(p => ({ participants = params.parties.map(p => ({
partyId: p.party_id, partyId: p.party_id,
partyIndex: p.party_index, partyIndex: p.party_index,
name: p.party_id === signingPartyId ? '我' : `参与方 ${p.party_index + 1}`, name: p.party_id === partyId ? '我' : `参与方 ${p.party_index + 1}`,
})); }));
console.log('[CO-SIGN] Participant using params.parties (complete list):', participants.map(p => ({ console.log('[CO-SIGN] Participant using params.parties (complete list):', participants.map(p => ({
partyId: p.partyId.substring(0, 8), partyId: p.partyId.substring(0, 8),
@ -1866,9 +1850,9 @@ function setupIpcHandlers() {
name: `参与方 ${idx + 1}`, name: `参与方 ${idx + 1}`,
})) || []; })) || [];
// 添加自己 - CRITICAL: Use signingPartyId (original from keygen) // 添加自己
participants.push({ participants.push({
partyId: signingPartyId, partyId: partyId,
partyIndex: result.party_index, partyIndex: result.party_index,
name: '我', name: '我',
}); });
@ -1902,11 +1886,11 @@ function setupIpcHandlers() {
messageHash: params.messageHash, messageHash: params.messageHash,
}); });
// 预订阅消息流 - CRITICAL: Use signingPartyId (original from keygen) // 预订阅消息流
if (tssHandler && 'prepareForSign' in tssHandler) { if (tssHandler && 'prepareForSign' in tssHandler) {
try { try {
debugLog.info('tss', `Preparing for sign: subscribing to messages for session ${params.sessionId}, signingPartyId=${signingPartyId}`); debugLog.info('tss', `Preparing for sign: subscribing to messages for session ${params.sessionId}`);
(tssHandler as TSSHandler).prepareForSign(params.sessionId, signingPartyId); (tssHandler as TSSHandler).prepareForSign(params.sessionId, partyId);
} catch (prepareErr) { } catch (prepareErr) {
debugLog.error('tss', `Failed to prepare for sign: ${(prepareErr as Error).message}`); debugLog.error('tss', `Failed to prepare for sign: ${(prepareErr as Error).message}`);
return { success: false, error: `消息订阅失败: ${(prepareErr as Error).message}` }; return { success: false, error: `消息订阅失败: ${(prepareErr as Error).message}` };

View File

@ -11,12 +11,7 @@ import {
getCurrentRpcUrl, getCurrentRpcUrl,
getGasPrice, getGasPrice,
fetchGreenPointsBalance, fetchGreenPointsBalance,
fetchEnergyPointsBalance,
fetchFuturePointsBalance,
GREEN_POINTS_TOKEN, GREEN_POINTS_TOKEN,
ENERGY_POINTS_TOKEN,
FUTURE_POINTS_TOKEN,
TOKEN_CONFIG,
type PreparedTransaction, type PreparedTransaction,
type TokenType, type TokenType,
} from '../utils/transaction'; } from '../utils/transaction';
@ -37,8 +32,6 @@ interface ShareWithAddress extends ShareItem {
evmAddress?: string; evmAddress?: string;
kavaBalance?: string; kavaBalance?: string;
greenPointsBalance?: string; greenPointsBalance?: string;
energyPointsBalance?: string;
futurePointsBalance?: string;
balanceLoading?: boolean; balanceLoading?: boolean;
} }
@ -96,30 +89,15 @@ export default function Home() {
const [isCalculatingMax, setIsCalculatingMax] = useState(false); const [isCalculatingMax, setIsCalculatingMax] = useState(false);
const [copySuccess, setCopySuccess] = useState(false); const [copySuccess, setCopySuccess] = useState(false);
// 获取当前选择代币的余额
const getTokenBalance = (share: ShareWithAddress | null, tokenType: TokenType): string => {
if (!share) return '0';
switch (tokenType) {
case 'KAVA':
return share.kavaBalance || '0';
case 'GREEN_POINTS':
return share.greenPointsBalance || '0';
case 'ENERGY_POINTS':
return share.energyPointsBalance || '0';
case 'FUTURE_POINTS':
return share.futurePointsBalance || '0';
}
};
// 计算扣除 Gas 费后的最大可转账金额 // 计算扣除 Gas 费后的最大可转账金额
const calculateMaxAmount = async () => { const calculateMaxAmount = async () => {
if (!transferShare?.evmAddress) return; if (!transferShare?.evmAddress) return;
setIsCalculatingMax(true); setIsCalculatingMax(true);
try { try {
if (TOKEN_CONFIG.isERC20(transferTokenType)) { if (transferTokenType === 'GREEN_POINTS') {
// For ERC-20 token transfers, use the full token balance (gas is paid in KAVA) // For token transfers, use the full token balance (gas is paid in KAVA)
const balance = getTokenBalance(transferShare, transferTokenType); const balance = transferShare.greenPointsBalance || '0';
setTransferAmount(balance); setTransferAmount(balance);
setTransferError(null); setTransferError(null);
} else { } else {
@ -153,8 +131,8 @@ export default function Home() {
} }
} catch (error) { } catch (error) {
console.error('Failed to calculate max amount:', error); console.error('Failed to calculate max amount:', error);
if (TOKEN_CONFIG.isERC20(transferTokenType)) { if (transferTokenType === 'GREEN_POINTS') {
setTransferAmount(getTokenBalance(transferShare, transferTokenType)); setTransferAmount(transferShare.greenPointsBalance || '0');
} else { } else {
// 如果获取 Gas 失败,使用默认估算 (1 gwei * 21000) // 如果获取 Gas 失败,使用默认估算 (1 gwei * 21000)
const defaultGasFee = 0.000021; // ~21000 * 1 gwei const defaultGasFee = 0.000021; // ~21000 * 1 gwei
@ -187,14 +165,12 @@ export default function Home() {
const updatedShares = await Promise.all( const updatedShares = await Promise.all(
sharesWithAddrs.map(async (share) => { sharesWithAddrs.map(async (share) => {
if (share.evmAddress) { if (share.evmAddress) {
// Fetch all balances in parallel // Fetch both balances in parallel
const [kavaBalance, greenPointsBalance, energyPointsBalance, futurePointsBalance] = await Promise.all([ const [kavaBalance, greenPointsBalance] = await Promise.all([
fetchKavaBalance(share.evmAddress), fetchKavaBalance(share.evmAddress),
fetchGreenPointsBalance(share.evmAddress), fetchGreenPointsBalance(share.evmAddress),
fetchEnergyPointsBalance(share.evmAddress),
fetchFuturePointsBalance(share.evmAddress),
]); ]);
return { ...share, kavaBalance, greenPointsBalance, energyPointsBalance, futurePointsBalance, balanceLoading: false }; return { ...share, kavaBalance, greenPointsBalance, balanceLoading: false };
} }
return { ...share, balanceLoading: false }; return { ...share, balanceLoading: false };
}) })
@ -339,7 +315,11 @@ export default function Home() {
return '转账金额无效'; return '转账金额无效';
} }
const amount = parseFloat(transferAmount); const amount = parseFloat(transferAmount);
const balance = parseFloat(getTokenBalance(transferShare, transferTokenType)); const balance = parseFloat(
transferTokenType === 'GREEN_POINTS'
? (transferShare?.greenPointsBalance || '0')
: (transferShare?.kavaBalance || '0')
);
if (amount > balance) { if (amount > balance) {
return '余额不足'; return '余额不足';
} }
@ -506,7 +486,7 @@ export default function Home() {
</div> </div>
)} )}
{/* 余额显示 - 所有代币 */} {/* 余额显示 - KAVA 和 绿积分 */}
{share.evmAddress && ( {share.evmAddress && (
<div className={styles.balanceSection}> <div className={styles.balanceSection}>
<div className={styles.balanceRow}> <div className={styles.balanceRow}>
@ -529,26 +509,6 @@ export default function Home() {
)} )}
</span> </span>
</div> </div>
<div className={styles.balanceRow}>
<span className={styles.balanceLabel} style={{ color: '#2196F3' }}>{ENERGY_POINTS_TOKEN.name}</span>
<span className={styles.balanceValue} style={{ color: '#2196F3' }}>
{share.balanceLoading ? (
<span className={styles.balanceLoading}>...</span>
) : (
<>{share.energyPointsBalance || '0'}</>
)}
</span>
</div>
<div className={styles.balanceRow}>
<span className={styles.balanceLabel} style={{ color: '#9C27B0' }}>{FUTURE_POINTS_TOKEN.name}</span>
<span className={styles.balanceValue} style={{ color: '#9C27B0' }}>
{share.balanceLoading ? (
<span className={styles.balanceLoading}>...</span>
) : (
<>{share.futurePointsBalance || '0'}</>
)}
</span>
</div>
</div> </div>
)} )}
@ -618,10 +578,7 @@ export default function Home() {
<div className={styles.transferWalletInfo}> <div className={styles.transferWalletInfo}>
<div className={styles.transferWalletName}>{transferShare.walletName}</div> <div className={styles.transferWalletName}>{transferShare.walletName}</div>
<div className={styles.transferWalletBalance}> <div className={styles.transferWalletBalance}>
KAVA: {transferShare.kavaBalance || '0'} | <span style={{color: '#4CAF50'}}>{GREEN_POINTS_TOKEN.name}: {transferShare.greenPointsBalance || '0'}</span> KAVA: {transferShare.kavaBalance || '0'} | {GREEN_POINTS_TOKEN.name}: {transferShare.greenPointsBalance || '0'}
</div>
<div className={styles.transferWalletBalance}>
<span style={{color: '#2196F3'}}>{ENERGY_POINTS_TOKEN.name}: {transferShare.energyPointsBalance || '0'}</span> | <span style={{color: '#9C27B0'}}>{FUTURE_POINTS_TOKEN.name}: {transferShare.futurePointsBalance || '0'}</span>
</div> </div>
<div className={styles.transferNetwork}> <div className={styles.transferNetwork}>
网络: Kava {getCurrentNetwork() === 'mainnet' ? '主网' : '测试网'} 网络: Kava {getCurrentNetwork() === 'mainnet' ? '主网' : '测试网'}
@ -648,22 +605,6 @@ export default function Home() {
{GREEN_POINTS_TOKEN.name} {GREEN_POINTS_TOKEN.name}
</button> </button>
</div> </div>
<div className={styles.tokenTypeSelector} style={{ marginTop: '8px' }}>
<button
className={`${styles.tokenTypeButton} ${transferTokenType === 'ENERGY_POINTS' ? styles.tokenTypeActive : ''}`}
onClick={() => { setTransferTokenType('ENERGY_POINTS'); setTransferAmount(''); }}
style={transferTokenType === 'ENERGY_POINTS' ? { backgroundColor: '#2196F3', borderColor: '#2196F3' } : {}}
>
{ENERGY_POINTS_TOKEN.name}
</button>
<button
className={`${styles.tokenTypeButton} ${transferTokenType === 'FUTURE_POINTS' ? styles.tokenTypeActive : ''}`}
onClick={() => { setTransferTokenType('FUTURE_POINTS'); setTransferAmount(''); }}
style={transferTokenType === 'FUTURE_POINTS' ? { backgroundColor: '#9C27B0', borderColor: '#9C27B0' } : {}}
>
{FUTURE_POINTS_TOKEN.name}
</button>
</div>
</div> </div>
{/* 收款地址 */} {/* 收款地址 */}
@ -681,7 +622,7 @@ export default function Home() {
{/* 转账金额 */} {/* 转账金额 */}
<div className={styles.transferInputGroup}> <div className={styles.transferInputGroup}>
<label className={styles.transferLabel}> <label className={styles.transferLabel}>
({TOKEN_CONFIG.getName(transferTokenType)}) ({transferTokenType === 'GREEN_POINTS' ? GREEN_POINTS_TOKEN.name : 'KAVA'})
</label> </label>
<div className={styles.transferAmountWrapper}> <div className={styles.transferAmountWrapper}>
<input <input
@ -748,8 +689,8 @@ export default function Home() {
<div className={styles.confirmDetails}> <div className={styles.confirmDetails}>
<div className={styles.confirmRow}> <div className={styles.confirmRow}>
<span className={styles.confirmLabel}></span> <span className={styles.confirmLabel}></span>
<span className={styles.confirmValue} style={TOKEN_CONFIG.isERC20(transferTokenType) ? { color: transferTokenType === 'GREEN_POINTS' ? '#4CAF50' : transferTokenType === 'ENERGY_POINTS' ? '#2196F3' : '#9C27B0' } : {}}> <span className={styles.confirmValue} style={transferTokenType === 'GREEN_POINTS' ? { color: '#4CAF50' } : {}}>
{TOKEN_CONFIG.getName(transferTokenType)} {transferTokenType === 'GREEN_POINTS' ? GREEN_POINTS_TOKEN.name : 'KAVA'}
</span> </span>
</div> </div>
<div className={styles.confirmRow}> <div className={styles.confirmRow}>
@ -758,8 +699,8 @@ export default function Home() {
</div> </div>
<div className={styles.confirmRow}> <div className={styles.confirmRow}>
<span className={styles.confirmLabel}></span> <span className={styles.confirmLabel}></span>
<span className={styles.confirmValue} style={TOKEN_CONFIG.isERC20(transferTokenType) ? { color: transferTokenType === 'GREEN_POINTS' ? '#4CAF50' : transferTokenType === 'ENERGY_POINTS' ? '#2196F3' : '#9C27B0' } : {}}> <span className={styles.confirmValue} style={transferTokenType === 'GREEN_POINTS' ? { color: '#4CAF50' } : {}}>
{transferAmount} {TOKEN_CONFIG.getName(transferTokenType)} {transferAmount} {transferTokenType === 'GREEN_POINTS' ? GREEN_POINTS_TOKEN.name : 'KAVA'}
</span> </span>
</div> </div>
<div className={styles.confirmRow}> <div className={styles.confirmRow}>

View File

@ -17,97 +17,17 @@ export const KAVA_RPC_URL = {
}; };
// Token types // Token types
export type TokenType = 'KAVA' | 'GREEN_POINTS' | 'ENERGY_POINTS' | 'FUTURE_POINTS'; export type TokenType = 'KAVA' | 'GREEN_POINTS';
// ERC-20 通用函数选择器 // Green Points (绿积分) Token Configuration
export const ERC20_SELECTORS = {
balanceOf: '0x70a08231', // balanceOf(address)
transfer: '0xa9059cbb', // transfer(address,uint256)
approve: '0x095ea7b3', // approve(address,uint256)
allowance: '0xdd62ed3e', // allowance(address,address)
totalSupply: '0x18160ddd', // totalSupply()
};
// Green Points (绿积分) Token Configuration - dUSDT
export const GREEN_POINTS_TOKEN = { export const GREEN_POINTS_TOKEN = {
contractAddress: '0xA9F3A35dBa8699c8C681D8db03F0c1A8CEB9D7c3', contractAddress: '0xA9F3A35dBa8699c8C681D8db03F0c1A8CEB9D7c3',
name: '绿积分', name: '绿积分',
symbol: 'dUSDT', symbol: 'dUSDT',
decimals: 6, decimals: 6,
// ERC-20 function selectors (kept for backward compatibility) // ERC-20 function selectors
balanceOfSelector: ERC20_SELECTORS.balanceOf, balanceOfSelector: '0x70a08231',
transferSelector: ERC20_SELECTORS.transfer, transferSelector: '0xa9059cbb',
};
// Energy Points (积分股) Token Configuration - eUSDT
export const ENERGY_POINTS_TOKEN = {
contractAddress: '0x7C3275D808eFbAE90C06C7E3A9AfDdcAa8563931',
name: '积分股',
symbol: 'eUSDT',
decimals: 6,
};
// Future Points (积分值) Token Configuration - fUSDT
export const FUTURE_POINTS_TOKEN = {
contractAddress: '0x14dc4f7d3E4197438d058C3D156dd9826A161134',
name: '积分值',
symbol: 'fUSDT',
decimals: 6,
};
// Token configuration utility
export const TOKEN_CONFIG = {
getContractAddress: (tokenType: TokenType): string | null => {
switch (tokenType) {
case 'KAVA':
return null; // Native token has no contract
case 'GREEN_POINTS':
return GREEN_POINTS_TOKEN.contractAddress;
case 'ENERGY_POINTS':
return ENERGY_POINTS_TOKEN.contractAddress;
case 'FUTURE_POINTS':
return FUTURE_POINTS_TOKEN.contractAddress;
}
},
getDecimals: (tokenType: TokenType): number => {
switch (tokenType) {
case 'KAVA':
return 18;
case 'GREEN_POINTS':
return GREEN_POINTS_TOKEN.decimals;
case 'ENERGY_POINTS':
return ENERGY_POINTS_TOKEN.decimals;
case 'FUTURE_POINTS':
return FUTURE_POINTS_TOKEN.decimals;
}
},
getName: (tokenType: TokenType): string => {
switch (tokenType) {
case 'KAVA':
return 'KAVA';
case 'GREEN_POINTS':
return GREEN_POINTS_TOKEN.name;
case 'ENERGY_POINTS':
return ENERGY_POINTS_TOKEN.name;
case 'FUTURE_POINTS':
return FUTURE_POINTS_TOKEN.name;
}
},
getSymbol: (tokenType: TokenType): string => {
switch (tokenType) {
case 'KAVA':
return 'KAVA';
case 'GREEN_POINTS':
return GREEN_POINTS_TOKEN.symbol;
case 'ENERGY_POINTS':
return ENERGY_POINTS_TOKEN.symbol;
case 'FUTURE_POINTS':
return FUTURE_POINTS_TOKEN.symbol;
}
},
isERC20: (tokenType: TokenType): boolean => {
return tokenType !== 'KAVA';
},
}; };
// 当前网络配置 (从 localStorage 读取或使用默认值) // 当前网络配置 (从 localStorage 读取或使用默认值)
@ -407,69 +327,44 @@ export function weiToKava(wei: bigint): string {
} }
/** /**
* * 绿 (6 decimals)
* @param amount Human-readable amount
* @param decimals Token decimals (default 6 for USDT-like tokens)
*/ */
export function tokenToRaw(amount: string, decimals: number = 6): bigint { export function greenPointsToRaw(amount: string): bigint {
const parts = amount.split('.'); const parts = amount.split('.');
const whole = BigInt(parts[0] || '0'); const whole = BigInt(parts[0] || '0');
let fraction = parts[1] || ''; let fraction = parts[1] || '';
// 补齐或截断到指定位数 // 补齐或截断到 6 位
if (fraction.length > decimals) { if (fraction.length > 6) {
fraction = fraction.substring(0, decimals); fraction = fraction.substring(0, 6);
} else { } else {
fraction = fraction.padEnd(decimals, '0'); fraction = fraction.padEnd(6, '0');
} }
return whole * BigInt(10 ** decimals) + BigInt(fraction); return whole * BigInt(10 ** 6) + BigInt(fraction);
}
/**
*
* @param raw Raw amount in smallest units
* @param decimals Token decimals (default 6 for USDT-like tokens)
*/
export function rawToToken(raw: bigint, decimals: number = 6): string {
const rawStr = raw.toString().padStart(decimals + 1, '0');
const whole = rawStr.slice(0, -decimals) || '0';
const fraction = rawStr.slice(-decimals).replace(/0+$/, '');
return fraction ? `${whole}.${fraction}` : whole;
}
/**
* 绿 (6 decimals)
* @deprecated Use tokenToRaw(amount, 6) instead
*/
export function greenPointsToRaw(amount: string): bigint {
return tokenToRaw(amount, GREEN_POINTS_TOKEN.decimals);
} }
/** /**
* 绿 * 绿
* @deprecated Use rawToToken(raw, 6) instead
*/ */
export function rawToGreenPoints(raw: bigint): string { export function rawToGreenPoints(raw: bigint): string {
return rawToToken(raw, GREEN_POINTS_TOKEN.decimals); const rawStr = raw.toString().padStart(7, '0');
const whole = rawStr.slice(0, -6) || '0';
const fraction = rawStr.slice(-6).replace(/0+$/, '');
return fraction ? `${whole}.${fraction}` : whole;
} }
/** /**
* ERC-20 * 绿 (ERC-20)
* @param address Wallet address
* @param contractAddress Token contract address
* @param decimals Token decimals
*/ */
export async function fetchERC20Balance( export async function fetchGreenPointsBalance(address: string): Promise<string> {
address: string,
contractAddress: string,
decimals: number = 6
): Promise<string> {
try { try {
const rpcUrl = getCurrentRpcUrl(); const rpcUrl = getCurrentRpcUrl();
// Encode balanceOf(address) call data // Encode balanceOf(address) call data
// Function selector: 0x70a08231
// Address parameter: padded to 32 bytes
const paddedAddress = address.toLowerCase().replace('0x', '').padStart(64, '0'); const paddedAddress = address.toLowerCase().replace('0x', '').padStart(64, '0');
const callData = ERC20_SELECTORS.balanceOf + paddedAddress; const callData = GREEN_POINTS_TOKEN.balanceOfSelector + paddedAddress;
const response = await fetch(rpcUrl, { const response = await fetch(rpcUrl, {
method: 'POST', method: 'POST',
@ -479,7 +374,7 @@ export async function fetchERC20Balance(
method: 'eth_call', method: 'eth_call',
params: [ params: [
{ {
to: contractAddress, to: GREEN_POINTS_TOKEN.contractAddress,
data: callData, data: callData,
}, },
'latest', 'latest',
@ -491,65 +386,21 @@ export async function fetchERC20Balance(
const data = await response.json(); const data = await response.json();
if (data.result && data.result !== '0x') { if (data.result && data.result !== '0x') {
const balanceRaw = BigInt(data.result); const balanceRaw = BigInt(data.result);
return rawToToken(balanceRaw, decimals); return rawToGreenPoints(balanceRaw);
} }
return '0'; return '0';
} catch (error) { } catch (error) {
console.error('Failed to fetch ERC20 balance:', error); console.error('Failed to fetch Green Points balance:', error);
return '0'; return '0';
} }
} }
/**
* 绿 (ERC-20)
*/
export async function fetchGreenPointsBalance(address: string): Promise<string> {
return fetchERC20Balance(address, GREEN_POINTS_TOKEN.contractAddress, GREEN_POINTS_TOKEN.decimals);
}
/**
* (eUSDT)
*/
export async function fetchEnergyPointsBalance(address: string): Promise<string> {
return fetchERC20Balance(address, ENERGY_POINTS_TOKEN.contractAddress, ENERGY_POINTS_TOKEN.decimals);
}
/**
* (fUSDT)
*/
export async function fetchFuturePointsBalance(address: string): Promise<string> {
return fetchERC20Balance(address, FUTURE_POINTS_TOKEN.contractAddress, FUTURE_POINTS_TOKEN.decimals);
}
/**
*
*/
export async function fetchAllTokenBalances(address: string): Promise<{
kava: string;
greenPoints: string;
energyPoints: string;
futurePoints: string;
}> {
const [greenPoints, energyPoints, futurePoints] = await Promise.all([
fetchGreenPointsBalance(address),
fetchEnergyPointsBalance(address),
fetchFuturePointsBalance(address),
]);
// Note: KAVA balance is fetched separately via eth_getBalance
return {
kava: '0', // Caller should fetch KAVA balance separately
greenPoints,
energyPoints,
futurePoints,
};
}
/** /**
* Encode ERC-20 transfer function call * Encode ERC-20 transfer function call
*/ */
function encodeErc20Transfer(to: string, amount: bigint): string { function encodeErc20Transfer(to: string, amount: bigint): string {
// Function selector: transfer(address,uint256) = 0xa9059cbb // Function selector: transfer(address,uint256) = 0xa9059cbb
const selector = ERC20_SELECTORS.transfer; const selector = GREEN_POINTS_TOKEN.transferSelector;
// Encode recipient address (padded to 32 bytes) // Encode recipient address (padded to 32 bytes)
const paddedAddress = to.toLowerCase().replace('0x', '').padStart(64, '0'); const paddedAddress = to.toLowerCase().replace('0x', '').padStart(64, '0');
// Encode amount (padded to 32 bytes) // Encode amount (padded to 32 bytes)
@ -625,15 +476,13 @@ export async function estimateGas(params: { from: string; to: string; value: str
// For token transfers, we need different params // For token transfers, we need different params
let txParams: { from: string; to: string; value: string; data?: string }; let txParams: { from: string; to: string; value: string; data?: string };
if (TOKEN_CONFIG.isERC20(tokenType)) { if (tokenType === 'GREEN_POINTS') {
// ERC-20 transfer: to is contract, value is 0, data is transfer call // ERC-20 transfer: to is contract, value is 0, data is transfer call
const contractAddress = TOKEN_CONFIG.getContractAddress(tokenType); const tokenAmount = greenPointsToRaw(params.value);
const decimals = TOKEN_CONFIG.getDecimals(tokenType);
const tokenAmount = tokenToRaw(params.value, decimals);
const transferData = encodeErc20Transfer(params.to, tokenAmount); const transferData = encodeErc20Transfer(params.to, tokenAmount);
txParams = { txParams = {
from: params.from, from: params.from,
to: contractAddress!, to: GREEN_POINTS_TOKEN.contractAddress,
value: '0x0', value: '0x0',
data: transferData, data: transferData,
}; };
@ -662,7 +511,7 @@ export async function estimateGas(params: { from: string; to: string; value: str
if (data.error) { if (data.error) {
// 如果估算失败,使用默认值 // 如果估算失败,使用默认值
console.warn('Gas 估算失败,使用默认值:', data.error); console.warn('Gas 估算失败,使用默认值:', data.error);
return TOKEN_CONFIG.isERC20(tokenType) ? BigInt(65000) : BigInt(21000); return tokenType === 'GREEN_POINTS' ? BigInt(65000) : BigInt(21000);
} }
return BigInt(data.result); return BigInt(data.result);
} }
@ -694,14 +543,12 @@ export async function prepareTransaction(params: TransactionParams): Promise<Pre
let value: bigint; let value: bigint;
let data: string; let data: string;
if (TOKEN_CONFIG.isERC20(tokenType)) { if (tokenType === 'GREEN_POINTS') {
// ERC-20 token transfer // ERC-20 token transfer
// To address is the contract, value is 0 // To address is the contract, value is 0
// Data is transfer(recipient, amount) encoded // Data is transfer(recipient, amount) encoded
const contractAddress = TOKEN_CONFIG.getContractAddress(tokenType); const tokenAmount = greenPointsToRaw(params.value);
const decimals = TOKEN_CONFIG.getDecimals(tokenType); toAddress = GREEN_POINTS_TOKEN.contractAddress.toLowerCase();
const tokenAmount = tokenToRaw(params.value, decimals);
toAddress = contractAddress!.toLowerCase();
value = BigInt(0); value = BigInt(0);
data = encodeErc20Transfer(params.to, tokenAmount); data = encodeErc20Transfer(params.to, tokenAmount);
} else { } else {

View File

@ -149,8 +149,6 @@ func (c *MessageRouterClient) PublishSessionCreated(
} }
// PublishSessionStarted publishes a session_started event when all parties have joined // PublishSessionStarted publishes a session_started event when all parties have joined
// CRITICAL: participants contains the complete list of all parties with their indices
// Receivers should use this list for TSS protocol instead of JoinSession response
func (c *MessageRouterClient) PublishSessionStarted( func (c *MessageRouterClient) PublishSessionStarted(
ctx context.Context, ctx context.Context,
sessionID string, sessionID string,
@ -159,17 +157,7 @@ func (c *MessageRouterClient) PublishSessionStarted(
selectedParties []string, selectedParties []string,
joinTokens map[string]string, joinTokens map[string]string,
startedAt int64, startedAt int64,
participants []use_cases.SessionParticipantInfo,
) error { ) error {
// Convert participants to proto format
protoParticipants := make([]*router.PartyInfo, len(participants))
for i, p := range participants {
protoParticipants[i] = &router.PartyInfo{
PartyId: p.PartyID,
PartyIndex: p.PartyIndex,
}
}
event := &router.SessionEvent{ event := &router.SessionEvent{
EventId: uuid.New().String(), EventId: uuid.New().String(),
EventType: "session_started", EventType: "session_started",
@ -179,13 +167,8 @@ func (c *MessageRouterClient) PublishSessionStarted(
SelectedParties: selectedParties, SelectedParties: selectedParties,
JoinTokens: joinTokens, JoinTokens: joinTokens,
CreatedAt: startedAt, CreatedAt: startedAt,
Participants: protoParticipants,
} }
logger.Info("Publishing session_started event with participants",
zap.String("session_id", sessionID),
zap.Int("participant_count", len(participants)))
return c.PublishSessionEvent(ctx, event) return c.PublishSessionEvent(ctx, event)
} }

View File

@ -21,16 +21,8 @@ import (
// Maximum retries for optimistic lock conflicts during join session // Maximum retries for optimistic lock conflicts during join session
const joinSessionMaxRetries = 3 const joinSessionMaxRetries = 3
// SessionParticipantInfo contains party ID and index for session_started event
type SessionParticipantInfo struct {
PartyID string
PartyIndex int32
}
// JoinSessionMessageRouterClient defines the interface for publishing session events via gRPC // JoinSessionMessageRouterClient defines the interface for publishing session events via gRPC
type JoinSessionMessageRouterClient interface { type JoinSessionMessageRouterClient interface {
// PublishSessionStarted publishes session_started event with complete participants list
// CRITICAL: participants contains all parties with their indices for TSS protocol
PublishSessionStarted( PublishSessionStarted(
ctx context.Context, ctx context.Context,
sessionID string, sessionID string,
@ -39,7 +31,6 @@ type JoinSessionMessageRouterClient interface {
selectedParties []string, selectedParties []string,
joinTokens map[string]string, joinTokens map[string]string,
startedAt int64, startedAt int64,
participants []SessionParticipantInfo,
) error ) error
// PublishParticipantJoined broadcasts a participant_joined event to all parties in the session // PublishParticipantJoined broadcasts a participant_joined event to all parties in the session
@ -257,16 +248,6 @@ func (uc *JoinSessionUseCase) executeWithRetry(
// Build join tokens map (empty for session_started, parties already have tokens) // Build join tokens map (empty for session_started, parties already have tokens)
joinTokens := make(map[string]string) joinTokens := make(map[string]string)
// CRITICAL: Build complete participants list with party indices
// This ensures all parties have the same participant list for TSS protocol
participants := make([]SessionParticipantInfo, len(session.Participants))
for i, p := range session.Participants {
participants[i] = SessionParticipantInfo{
PartyID: p.PartyID.String(),
PartyIndex: int32(p.PartyIndex),
}
}
if err := uc.messageRouterClient.PublishSessionStarted( if err := uc.messageRouterClient.PublishSessionStarted(
ctx, ctx,
session.ID.String(), session.ID.String(),
@ -275,7 +256,6 @@ func (uc *JoinSessionUseCase) executeWithRetry(
selectedParties, selectedParties,
joinTokens, joinTokens,
startedAt, startedAt,
participants,
); err != nil { ); err != nil {
logger.Error("failed to publish session started event to message router", logger.Error("failed to publish session started event to message router",
zap.String("session_id", session.ID.String()), zap.String("session_id", session.ID.String()),
@ -283,8 +263,7 @@ func (uc *JoinSessionUseCase) executeWithRetry(
} else { } else {
logger.Info("published session started event to message router", logger.Info("published session started event to message router",
zap.String("session_id", session.ID.String()), zap.String("session_id", session.ID.String()),
zap.Int("party_count", len(selectedParties)), zap.Int("party_count", len(selectedParties)))
zap.Int("participant_count", len(participants)))
} }
} }
} }

View File

@ -1,6 +1,7 @@
-- ============================================================================ -- ============================================================================
-- auth-service 初始化 migration -- auth-service 初始化 migration
-- 合并自: 0001_init, 0002_add_transactional_idempotency -- 合并自: 20260111000000_init, 20260111083500_allow_nullable_phone_password,
-- 20260112110000_add_nickname_to_synced_legacy_users
-- ============================================================================ -- ============================================================================
-- CreateEnum -- CreateEnum
@ -240,26 +241,3 @@ ALTER TABLE "sms_logs" ADD CONSTRAINT "sms_logs_user_id_fkey" FOREIGN KEY ("user
-- AddForeignKey -- AddForeignKey
ALTER TABLE "login_logs" ADD CONSTRAINT "login_logs_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users"("id") ON DELETE SET NULL ON UPDATE CASCADE; ALTER TABLE "login_logs" ADD CONSTRAINT "login_logs_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users"("id") ON DELETE SET NULL ON UPDATE CASCADE;
-- ============================================================================
-- 事务性幂等消费支持 (从 0002_add_transactional_idempotency 合并)
-- 用于 1.0 -> 2.0 CDC 同步的 100% exactly-once 语义
-- ============================================================================
-- CreateTable
CREATE TABLE "processed_cdc_events" (
"id" BIGSERIAL NOT NULL,
"source_topic" TEXT NOT NULL,
"offset" BIGINT NOT NULL,
"table_name" TEXT NOT NULL,
"operation" TEXT NOT NULL,
"processed_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "processed_cdc_events_pkey" PRIMARY KEY ("id")
);
-- CreateIndex (复合唯一索引保证幂等性)
CREATE UNIQUE INDEX "processed_cdc_events_source_topic_offset_key" ON "processed_cdc_events"("source_topic", "offset");
-- CreateIndex (时间索引用于清理旧数据)
CREATE INDEX "processed_cdc_events_processed_at_idx" ON "processed_cdc_events"("processed_at");

View File

@ -1,27 +0,0 @@
-- CreateTable
CREATE TABLE "synced_wallet_addresses" (
"id" BIGSERIAL NOT NULL,
"legacy_address_id" BIGINT NOT NULL,
"legacy_user_id" BIGINT NOT NULL,
"chain_type" TEXT NOT NULL,
"address" TEXT NOT NULL,
"public_key" TEXT NOT NULL,
"status" TEXT NOT NULL DEFAULT 'ACTIVE',
"legacy_bound_at" TIMESTAMP(3) NOT NULL,
"source_sequence_num" BIGINT NOT NULL,
"synced_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "synced_wallet_addresses_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE UNIQUE INDEX "synced_wallet_addresses_legacy_address_id_key" ON "synced_wallet_addresses"("legacy_address_id");
-- CreateIndex
CREATE UNIQUE INDEX "synced_wallet_addresses_legacy_user_id_chain_type_key" ON "synced_wallet_addresses"("legacy_user_id", "chain_type");
-- CreateIndex
CREATE INDEX "synced_wallet_addresses_legacy_user_id_idx" ON "synced_wallet_addresses"("legacy_user_id");
-- CreateIndex
CREATE INDEX "synced_wallet_addresses_chain_type_address_idx" ON "synced_wallet_addresses"("chain_type", "address");

View File

@ -0,0 +1,25 @@
-- ============================================================================
-- 添加事务性幂等消费支持
-- 用于 1.0 -> 2.0 CDC 同步的 100% exactly-once 语义
-- ============================================================================
-- 创建 processed_cdc_events 表(用于 CDC 事件幂等)
-- 唯一键: (source_topic, offset) - Kafka topic 名称 + 消息偏移量
-- 用于保证每个 CDC 事件只处理一次exactly-once 语义)
CREATE TABLE IF NOT EXISTS "processed_cdc_events" (
"id" BIGSERIAL NOT NULL,
"source_topic" VARCHAR(200) NOT NULL, -- Kafka topic 名称(如 cdc.identity.public.user_accounts
"offset" BIGINT NOT NULL, -- Kafka 消息偏移量(在 partition 内唯一)
"table_name" VARCHAR(100) NOT NULL, -- 源表名
"operation" VARCHAR(10) NOT NULL, -- CDC 操作类型: c(create), u(update), d(delete), r(snapshot read)
"processed_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "processed_cdc_events_pkey" PRIMARY KEY ("id")
);
-- 复合唯一索引:(source_topic, offset) 保证幂等性
-- 注意:这不是数据库自增 ID而是 Kafka 消息的唯一标识
CREATE UNIQUE INDEX "processed_cdc_events_source_topic_offset_key" ON "processed_cdc_events"("source_topic", "offset");
-- 时间索引用于清理旧数据
CREATE INDEX "processed_cdc_events_processed_at_idx" ON "processed_cdc_events"("processed_at");

View File

@ -104,33 +104,6 @@ model SyncedLegacyUser {
@@map("synced_legacy_users") @@map("synced_legacy_users")
} }
// ============================================================================
// CDC 同步的 1.0 钱包地址(只读)
// ============================================================================
model SyncedWalletAddress {
id BigInt @id @default(autoincrement())
// 1.0 钱包地址数据
legacyAddressId BigInt @unique @map("legacy_address_id") // 1.0 的 wallet_addresses.address_id
legacyUserId BigInt @map("legacy_user_id") // 1.0 的 wallet_addresses.user_id
chainType String @map("chain_type") // KAVA, BSC 等
address String // 钱包地址
publicKey String @map("public_key") // MPC 公钥
status String @default("ACTIVE") // ACTIVE, DELETED
legacyBoundAt DateTime @map("legacy_bound_at") // 1.0 绑定时间
// CDC 元数据
sourceSequenceNum BigInt @map("source_sequence_num")
syncedAt DateTime @default(now()) @map("synced_at")
@@unique([legacyUserId, chainType])
@@index([legacyUserId])
@@index([chainType, address])
@@map("synced_wallet_addresses")
}
// ============================================================================ // ============================================================================
// 刷新令牌 // 刷新令牌
// ============================================================================ // ============================================================================

View File

@ -9,7 +9,6 @@ import {
UserController, UserController,
HealthController, HealthController,
AdminController, AdminController,
InternalController,
} from './controllers'; } from './controllers';
import { ApplicationModule } from '@/application'; import { ApplicationModule } from '@/application';
import { JwtAuthGuard } from '@/shared/guards/jwt-auth.guard'; import { JwtAuthGuard } from '@/shared/guards/jwt-auth.guard';
@ -36,7 +35,6 @@ import { JwtAuthGuard } from '@/shared/guards/jwt-auth.guard';
UserController, UserController,
HealthController, HealthController,
AdminController, AdminController,
InternalController,
], ],
providers: [JwtAuthGuard], providers: [JwtAuthGuard],
}) })

View File

@ -5,4 +5,3 @@ export * from './kyc.controller';
export * from './user.controller'; export * from './user.controller';
export * from './health.controller'; export * from './health.controller';
export * from './admin.controller'; export * from './admin.controller';
export * from './internal.controller';

View File

@ -1,50 +0,0 @@
import { Controller, Get, Param, NotFoundException, Logger } from '@nestjs/common';
import { PrismaService } from '@/infrastructure/persistence/prisma/prisma.service';
/**
* API - 2.0 JWT
*/
@Controller('internal')
export class InternalController {
private readonly logger = new Logger(InternalController.name);
constructor(private readonly prisma: PrismaService) {}
/**
* accountSequence Kava
* trading-service
*/
@Get('users/:accountSequence/kava-address')
async getUserKavaAddress(
@Param('accountSequence') accountSequence: string,
): Promise<{ kavaAddress: string }> {
// 1. 通过 SyncedLegacyUser 查找 legacyId
const legacyUser = await this.prisma.syncedLegacyUser.findUnique({
where: { accountSequence },
select: { legacyId: true },
});
if (!legacyUser) {
this.logger.warn(`[Internal] Legacy user not found: ${accountSequence}`);
throw new NotFoundException(`用户未找到: ${accountSequence}`);
}
// 2. 通过 legacyUserId + chainType 查找 KAVA 钱包地址
const walletAddress = await this.prisma.syncedWalletAddress.findUnique({
where: {
legacyUserId_chainType: {
legacyUserId: legacyUser.legacyId,
chainType: 'KAVA',
},
},
select: { address: true, status: true },
});
if (!walletAddress || walletAddress.status !== 'ACTIVE') {
this.logger.warn(`[Internal] Kava address not found for: ${accountSequence}`);
throw new NotFoundException(`未找到 Kava 钱包地址: ${accountSequence}`);
}
return { kavaAddress: walletAddress.address };
}
}

View File

@ -22,7 +22,7 @@ class ChangePasswordDto {
newPassword: string; newPassword: string;
} }
@Controller('auth/password') @Controller('password')
@UseGuards(ThrottlerGuard) @UseGuards(ThrottlerGuard)
export class PasswordController { export class PasswordController {
constructor(private readonly passwordService: PasswordService) {} constructor(private readonly passwordService: PasswordService) {}

View File

@ -21,7 +21,7 @@ class VerifySmsDto {
type: 'REGISTER' | 'LOGIN' | 'RESET_PASSWORD' | 'CHANGE_PHONE'; type: 'REGISTER' | 'LOGIN' | 'RESET_PASSWORD' | 'CHANGE_PHONE';
} }
@Controller('auth/sms') @Controller('sms')
@UseGuards(ThrottlerGuard) @UseGuards(ThrottlerGuard)
export class SmsController { export class SmsController {
constructor(private readonly smsService: SmsService) {} constructor(private readonly smsService: SmsService) {}

View File

@ -1,15 +1,13 @@
import { import {
Controller, Controller,
Get, Get,
Query,
UseGuards, UseGuards,
BadRequestException,
} from '@nestjs/common'; } from '@nestjs/common';
import { UserService, UserProfileResult } from '@/application/services'; import { UserService, UserProfileResult } from '@/application/services';
import { JwtAuthGuard } from '@/shared/guards/jwt-auth.guard'; import { JwtAuthGuard } from '@/shared/guards/jwt-auth.guard';
import { CurrentUser } from '@/shared/decorators/current-user.decorator'; import { CurrentUser } from '@/shared/decorators/current-user.decorator';
@Controller('auth/user') @Controller('user')
@UseGuards(JwtAuthGuard) @UseGuards(JwtAuthGuard)
export class UserController { export class UserController {
constructor(private readonly userService: UserService) {} constructor(private readonly userService: UserService) {}
@ -25,21 +23,4 @@ export class UserController {
const result = await this.userService.getProfile(user.accountSequence); const result = await this.userService.getProfile(user.accountSequence);
return { success: true, data: result }; return { success: true, data: result };
} }
/**
* P2P转账验证
* GET /user/lookup?phone=13800138000
*/
@Get('lookup')
async lookupByPhone(
@Query('phone') phone: string,
@CurrentUser() currentUser: { accountSequence: string },
): Promise<{ success: boolean; data: { exists: boolean; nickname?: string; accountSequence?: string } }> {
if (!phone || phone.length !== 11) {
throw new BadRequestException('请输入有效的11位手机号');
}
const result = await this.userService.lookupByPhone(phone);
return { success: true, data: result };
}
} }

View File

@ -9,7 +9,7 @@ import { InfrastructureModule } from './infrastructure/infrastructure.module';
// 配置模块 // 配置模块
ConfigModule.forRoot({ ConfigModule.forRoot({
isGlobal: true, isGlobal: true,
envFilePath: ['.env.local', '.env', '../.env'], envFilePath: ['.env.local', '.env'],
}), }),
// 限流模块 // 限流模块

View File

@ -48,24 +48,6 @@ export class UserService {
}; };
} }
/**
* P2P转账验证
*/
async lookupByPhone(phone: string): Promise<{ exists: boolean; accountSequence?: string; nickname?: string }> {
const phoneVO = Phone.create(phone);
const user = await this.userRepository.findByPhone(phoneVO);
if (!user || user.status !== 'ACTIVE') {
return { exists: false };
}
return {
exists: true,
accountSequence: user.accountSequence.value,
nickname: user.isKycVerified ? this.maskName(user.realName!) : user.phone.masked,
};
}
/** /**
* *
*/ */

View File

@ -7,7 +7,7 @@ import {
PrismaRefreshTokenRepository, PrismaRefreshTokenRepository,
PrismaSmsVerificationRepository, PrismaSmsVerificationRepository,
} from './persistence/repositories'; } from './persistence/repositories';
import { LegacyUserCdcConsumer, WalletAddressCdcConsumer } from './messaging/cdc'; import { LegacyUserCdcConsumer } from './messaging/cdc';
import { KafkaModule, KafkaProducerService } from './kafka'; import { KafkaModule, KafkaProducerService } from './kafka';
import { RedisService } from './redis'; import { RedisService } from './redis';
import { import {
@ -24,7 +24,6 @@ import { ApplicationModule } from '@/application/application.module';
providers: [ providers: [
// CDC // CDC
LegacyUserCdcConsumer, LegacyUserCdcConsumer,
WalletAddressCdcConsumer,
// Kafka Producer // Kafka Producer
KafkaProducerService, KafkaProducerService,

View File

@ -1,2 +1 @@
export * from './legacy-user-cdc.consumer'; export * from './legacy-user-cdc.consumer';
export * from './wallet-address-cdc.consumer';

View File

@ -1,243 +0,0 @@
import { Injectable, OnModuleInit, OnModuleDestroy, Logger } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import { Kafka, Consumer, EachMessagePayload } from 'kafkajs';
import { Prisma, PrismaClient } from '@prisma/client';
import { PrismaService } from '@/infrastructure/persistence/prisma/prisma.service';
/** Prisma 事务客户端类型 */
type TransactionClient = Omit<
PrismaClient,
'$connect' | '$disconnect' | '$on' | '$transaction' | '$use' | '$extends'
>;
/**
* ExtractNewRecordState
* identity-service wallet_addresses + Debezium
*/
interface UnwrappedCdcWalletAddress {
// 1.0 identity-service wallet_addresses 表字段
address_id: number;
user_id: number;
chain_type: string;
address: string;
public_key: string;
address_digest: string;
mpc_signature_r: string;
mpc_signature_s: string;
mpc_signature_v: number;
status: string;
bound_at: number; // timestamp in milliseconds
// Debezium ExtractNewRecordState 添加的元数据字段
__op: 'c' | 'u' | 'd' | 'r';
__table: string;
__source_ts_ms: number;
__deleted?: string;
}
/**
* CDC Consumer - 1.0
* Debezium CDC synced_wallet_addresses
*
* Transactional Idempotent Consumer
* - CDC exactly-once
* - processed_cdc_events
* -
*/
@Injectable()
export class WalletAddressCdcConsumer implements OnModuleInit, OnModuleDestroy {
private readonly logger = new Logger(WalletAddressCdcConsumer.name);
private kafka: Kafka;
private consumer: Consumer;
private isConnected = false;
private topic: string;
constructor(
private readonly configService: ConfigService,
private readonly prisma: PrismaService,
) {
const brokers = this.configService.get<string>('KAFKA_BROKERS', 'localhost:9092').split(',');
this.kafka = new Kafka({
clientId: 'auth-service-cdc-wallet',
brokers,
});
this.consumer = this.kafka.consumer({
groupId: this.configService.get<string>('CDC_CONSUMER_GROUP', 'auth-service-cdc-group') + '-wallet',
});
this.topic = this.configService.get<string>(
'CDC_TOPIC_WALLET_ADDRESSES',
'cdc.identity.public.wallet_addresses',
);
}
async onModuleInit() {
if (this.configService.get('CDC_ENABLED', 'true') !== 'true') {
this.logger.log('Wallet Address CDC Consumer is disabled');
return;
}
try {
await this.consumer.connect();
this.isConnected = true;
await this.consumer.subscribe({ topic: this.topic, fromBeginning: true });
await this.consumer.run({
eachMessage: async (payload) => {
await this.handleMessage(payload);
},
});
this.logger.log(
`Wallet Address CDC Consumer started, listening to topic: ${this.topic}`,
);
} catch (error) {
this.logger.error('Failed to start Wallet Address CDC Consumer', error);
}
}
async onModuleDestroy() {
if (this.isConnected) {
await this.consumer.disconnect();
this.logger.log('Wallet Address CDC Consumer disconnected');
}
}
private async handleMessage(payload: EachMessagePayload) {
const { topic, partition, message } = payload;
if (!message.value) return;
const offset = BigInt(message.offset);
const idempotencyKey = `${topic}:${offset}`;
try {
const cdcEvent: UnwrappedCdcWalletAddress = JSON.parse(message.value.toString());
const op = cdcEvent.__op;
const tableName = cdcEvent.__table || 'wallet_addresses';
this.logger.log(`[CDC] Processing wallet address event: topic=${topic}, offset=${offset}, op=${op}`);
await this.processWithIdempotency(topic, offset, tableName, op, cdcEvent);
this.logger.log(`[CDC] Successfully processed wallet address event: ${idempotencyKey}`);
} catch (error: any) {
if (error.code === 'P2002') {
this.logger.debug(`[CDC] Skipping duplicate wallet address event: ${idempotencyKey}`);
return;
}
this.logger.error(
`[CDC] Failed to process wallet address message from ${topic}[${partition}], offset=${offset}`,
error,
);
}
}
/**
*
*/
private async processWithIdempotency(
topic: string,
offset: bigint,
tableName: string,
operation: string,
event: UnwrappedCdcWalletAddress,
): Promise<void> {
await this.prisma.$transaction(async (tx) => {
// 1. 尝试插入幂等记录
try {
await tx.processedCdcEvent.create({
data: {
sourceTopic: topic,
offset: offset,
tableName: tableName,
operation: operation,
},
});
} catch (error: any) {
if (error.code === 'P2002') {
this.logger.debug(`[CDC] Wallet address event already processed: ${topic}:${offset}`);
return;
}
throw error;
}
// 2. 执行业务逻辑
await this.processCdcEvent(event, offset, tx);
}, {
isolationLevel: Prisma.TransactionIsolationLevel.Serializable,
timeout: 30000,
});
}
private async processCdcEvent(
event: UnwrappedCdcWalletAddress,
sequenceNum: bigint,
tx: TransactionClient,
): Promise<void> {
const op = event.__op;
const isDeleted = event.__deleted === 'true';
if (isDeleted || op === 'd') {
await this.deleteWalletAddress(event.address_id, tx);
return;
}
switch (op) {
case 'c':
case 'r':
case 'u':
await this.upsertWalletAddress(event, sequenceNum, tx);
break;
}
}
private async upsertWalletAddress(
walletAddress: UnwrappedCdcWalletAddress,
sequenceNum: bigint,
tx: TransactionClient,
): Promise<void> {
await tx.syncedWalletAddress.upsert({
where: { legacyAddressId: BigInt(walletAddress.address_id) },
update: {
legacyUserId: BigInt(walletAddress.user_id),
chainType: walletAddress.chain_type,
address: walletAddress.address,
publicKey: walletAddress.public_key,
status: walletAddress.status,
sourceSequenceNum: sequenceNum,
syncedAt: new Date(),
},
create: {
legacyAddressId: BigInt(walletAddress.address_id),
legacyUserId: BigInt(walletAddress.user_id),
chainType: walletAddress.chain_type,
address: walletAddress.address,
publicKey: walletAddress.public_key,
status: walletAddress.status,
legacyBoundAt: new Date(walletAddress.bound_at),
sourceSequenceNum: sequenceNum,
},
});
this.logger.debug(
`[CDC] Synced wallet address: addressId=${walletAddress.address_id}, chain=${walletAddress.chain_type}`,
);
}
private async deleteWalletAddress(addressId: number, tx: TransactionClient): Promise<void> {
try {
await tx.syncedWalletAddress.update({
where: { legacyAddressId: BigInt(addressId) },
data: { status: 'DELETED' },
});
this.logger.debug(`[CDC] Marked wallet address as deleted: ${addressId}`);
} catch (error) {
this.logger.error(`[CDC] Failed to mark wallet address as deleted: ${addressId}`, error);
}
}
}

View File

@ -1,78 +0,0 @@
// SPDX-License-Identifier: MIT
pragma solidity 0.8.19;
/**
* @title EnergyUSDT
* @dev Fixed supply ERC-20 token - NO MINTING CAPABILITY
* Total Supply: 10,002,000,000 (100.02 Billion) tokens with 6 decimals (matching USDT)
*
* IMPORTANT: This contract has NO mint function and NO way to increase supply.
* All tokens are minted to the deployer at construction time.
*/
contract EnergyUSDT {
string public constant name = "Energy USDT";
string public constant symbol = "eUSDT";
uint8 public constant decimals = 6;
// Fixed total supply: 100.02 billion tokens (10,002,000,000 * 10^6)
uint256 public constant totalSupply = 10_002_000_000 * 10**6;
mapping(address => uint256) private _balances;
mapping(address => mapping(address => uint256)) private _allowances;
event Transfer(address indexed from, address indexed to, uint256 value);
event Approval(address indexed owner, address indexed spender, uint256 value);
/**
* @dev Constructor - mints entire fixed supply to deployer
* No mint function exists - supply is permanently fixed
*/
constructor() {
_balances[msg.sender] = totalSupply;
emit Transfer(address(0), msg.sender, totalSupply);
}
function balanceOf(address account) public view returns (uint256) {
return _balances[account];
}
function transfer(address to, uint256 amount) public returns (bool) {
require(to != address(0), "Transfer to zero address");
require(_balances[msg.sender] >= amount, "Insufficient balance");
unchecked {
_balances[msg.sender] -= amount;
_balances[to] += amount;
}
emit Transfer(msg.sender, to, amount);
return true;
}
function allowance(address owner, address spender) public view returns (uint256) {
return _allowances[owner][spender];
}
function approve(address spender, uint256 amount) public returns (bool) {
require(spender != address(0), "Approve to zero address");
_allowances[msg.sender][spender] = amount;
emit Approval(msg.sender, spender, amount);
return true;
}
function transferFrom(address from, address to, uint256 amount) public returns (bool) {
require(from != address(0), "Transfer from zero address");
require(to != address(0), "Transfer to zero address");
require(_balances[from] >= amount, "Insufficient balance");
require(_allowances[from][msg.sender] >= amount, "Insufficient allowance");
unchecked {
_balances[from] -= amount;
_balances[to] += amount;
_allowances[from][msg.sender] -= amount;
}
emit Transfer(from, to, amount);
return true;
}
}

View File

@ -1,81 +0,0 @@
# eUSDT (Energy USDT)
## 代币信息
| 属性 | 值 |
|------|-----|
| 名称 | Energy USDT |
| 符号 | eUSDT |
| 精度 | 6 decimals |
| 总供应量 | 10,002,000,000 (100.02亿) |
| 标准 | ERC-20 |
| 部署链 | KAVA Mainnet (Chain ID: 2222) |
## 合约特性
- **固定供应量**100.02亿代币,部署时全部铸造给部署者
- **不可增发**:合约中没有 mint 函数,供应量永久固定
- **不可销毁**:合约层面无销毁功能
- **不可升级**:合约逻辑永久固定
- **标准ERC-20**完全兼容所有主流钱包和DEX
## 部署步骤
### 1. 安装依赖
```bash
cd backend/services/blockchain-service/contracts/eUSDT
npm install
```
### 2. 编译合约
```bash
node compile.mjs
```
编译后会在 `build/` 目录生成:
- `EnergyUSDT.abi` - 合约ABI
- `EnergyUSDT.bin` - 合约字节码
### 3. 部署合约
确保部署账户有足够的 KAVA 支付 gas 费(约 0.02 KAVA
```bash
node deploy.mjs
```
## 合约函数
| 函数 | 说明 |
|------|------|
| `name()` | 返回 "Energy USDT" |
| `symbol()` | 返回 "eUSDT" |
| `decimals()` | 返回 6 |
| `totalSupply()` | 返回 10,002,000,000 * 10^6 |
| `balanceOf(address)` | 查询账户余额 |
| `transfer(address, uint256)` | 转账 |
| `approve(address, uint256)` | 授权额度 |
| `transferFrom(address, address, uint256)` | 代理转账 |
| `allowance(address, address)` | 查询授权额度 |
## 事件
| 事件 | 说明 |
|------|------|
| `Transfer(from, to, value)` | 转账事件 |
| `Approval(owner, spender, value)` | 授权事件 |
## 部署信息
| 网络 | 合约地址 | 区块浏览器 |
|------|---------|-----------|
| KAVA Mainnet | `0x7C3275D808eFbAE90C06C7E3A9AfDdcAa8563931` | https://kavascan.com/address/0x7C3275D808eFbAE90C06C7E3A9AfDdcAa8563931 |
**部署详情:**
- 部署者/代币拥有者:`0x4F7E78d6B7C5FC502Ec7039848690f08c8970F1E`
- 私钥:`0x886ea4cffe76c386fecf3ff321ac9ae913737c46c17bc6ce2413752144668a2a`
- 初始持有量10,002,000,000 eUSDT全部代币
- 交易哈希:`0x5bebaa4a35378438ba5c891972024a1766935d2e01397a33502aa99e956a6b19`
- 部署时间2026-01-19

View File

@ -1,51 +0,0 @@
import solc from 'solc';
import fs from 'fs';
const source = fs.readFileSync('EnergyUSDT.sol', 'utf8');
const input = {
language: 'Solidity',
sources: {
'EnergyUSDT.sol': {
content: source
}
},
settings: {
optimizer: {
enabled: true,
runs: 200
},
evmVersion: 'paris', // Use paris to avoid PUSH0
outputSelection: {
'*': {
'*': ['abi', 'evm.bytecode']
}
}
}
};
const output = JSON.parse(solc.compile(JSON.stringify(input)));
if (output.errors) {
output.errors.forEach(err => {
console.log(err.formattedMessage);
});
// Check for actual errors (not just warnings)
const hasErrors = output.errors.some(err => err.severity === 'error');
if (hasErrors) {
process.exit(1);
}
}
const contract = output.contracts['EnergyUSDT.sol']['EnergyUSDT'];
const bytecode = contract.evm.bytecode.object;
const abi = contract.abi;
fs.mkdirSync('build', { recursive: true });
fs.writeFileSync('build/EnergyUSDT.bin', bytecode);
fs.writeFileSync('build/EnergyUSDT.abi', JSON.stringify(abi, null, 2));
console.log('Compiled successfully!');
console.log('Bytecode length:', bytecode.length);
console.log('ABI functions:', abi.filter(x => x.type === 'function').map(x => x.name).join(', '));

View File

@ -1,86 +0,0 @@
import { ethers } from 'ethers';
import fs from 'fs';
// Same deployer account as dUSDT
const PRIVATE_KEY = '0x886ea4cffe76c386fecf3ff321ac9ae913737c46c17bc6ce2413752144668a2a';
const RPC_URL = 'https://evm.kava.io';
// Contract bytecode
const BYTECODE = '0x' + fs.readFileSync('build/EnergyUSDT.bin', 'utf8');
const ABI = JSON.parse(fs.readFileSync('build/EnergyUSDT.abi', 'utf8'));
async function deploy() {
// Connect to Kava mainnet
const provider = new ethers.JsonRpcProvider(RPC_URL);
const wallet = new ethers.Wallet(PRIVATE_KEY, provider);
console.log('Deployer address:', wallet.address);
// Check balance
const balance = await provider.getBalance(wallet.address);
console.log('Balance:', ethers.formatEther(balance), 'KAVA');
if (parseFloat(ethers.formatEther(balance)) < 0.01) {
console.error('Insufficient KAVA balance for deployment!');
process.exit(1);
}
// Get network info
const network = await provider.getNetwork();
console.log('Chain ID:', network.chainId.toString());
// Create contract factory
const factory = new ethers.ContractFactory(ABI, BYTECODE, wallet);
console.log('Deploying EnergyUSDT (eUSDT) contract...');
// Deploy
const contract = await factory.deploy();
console.log('Transaction hash:', contract.deploymentTransaction().hash);
// Wait for deployment
console.log('Waiting for confirmation...');
await contract.waitForDeployment();
const contractAddress = await contract.getAddress();
console.log('Contract deployed at:', contractAddress);
// Verify deployment
console.log('\nVerifying deployment...');
const name = await contract.name();
const symbol = await contract.symbol();
const decimals = await contract.decimals();
const totalSupply = await contract.totalSupply();
const ownerBalance = await contract.balanceOf(wallet.address);
console.log('Token name:', name);
console.log('Token symbol:', symbol);
console.log('Decimals:', decimals.toString());
console.log('Total supply:', ethers.formatUnits(totalSupply, 6), 'eUSDT');
console.log('Owner balance:', ethers.formatUnits(ownerBalance, 6), 'eUSDT');
console.log('\n=== DEPLOYMENT COMPLETE ===');
console.log('Contract Address:', contractAddress);
console.log('Explorer:', `https://kavascan.com/address/${contractAddress}`);
// Save deployment info
const deploymentInfo = {
network: 'KAVA Mainnet',
chainId: 2222,
contractAddress,
deployer: wallet.address,
transactionHash: contract.deploymentTransaction().hash,
deployedAt: new Date().toISOString(),
token: {
name,
symbol,
decimals: decimals.toString(),
totalSupply: totalSupply.toString()
}
};
fs.writeFileSync('deployment.json', JSON.stringify(deploymentInfo, null, 2));
console.log('\nDeployment info saved to deployment.json');
}
deploy().catch(console.error);

View File

@ -1,14 +0,0 @@
{
"network": "KAVA Mainnet",
"chainId": 2222,
"contractAddress": "0x7C3275D808eFbAE90C06C7E3A9AfDdcAa8563931",
"deployer": "0x4F7E78d6B7C5FC502Ec7039848690f08c8970F1E",
"transactionHash": "0x5bebaa4a35378438ba5c891972024a1766935d2e01397a33502aa99e956a6b19",
"deployedAt": "2026-01-19T13:25:28.071Z",
"token": {
"name": "Energy USDT",
"symbol": "eUSDT",
"decimals": "6",
"totalSupply": "10002000000000000"
}
}

View File

@ -1,222 +0,0 @@
{
"name": "eusdt-contract",
"version": "1.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "eusdt-contract",
"version": "1.0.0",
"dependencies": {
"ethers": "^6.9.0",
"solc": "^0.8.19"
}
},
"node_modules/@adraffy/ens-normalize": {
"version": "1.10.1",
"resolved": "https://registry.npmjs.org/@adraffy/ens-normalize/-/ens-normalize-1.10.1.tgz",
"integrity": "sha512-96Z2IP3mYmF1Xg2cDm8f1gWGf/HUVedQ3FMifV4kG/PQ4yEP51xDtRAEfhVNt5f/uzpNkZHwWQuUcu6D6K+Ekw==",
"license": "MIT"
},
"node_modules/@noble/curves": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/@noble/curves/-/curves-1.2.0.tgz",
"integrity": "sha512-oYclrNgRaM9SsBUBVbb8M6DTV7ZHRTKugureoYEncY5c65HOmRzvSiTE3y5CYaPYJA/GVkrhXEoF0M3Ya9PMnw==",
"license": "MIT",
"dependencies": {
"@noble/hashes": "1.3.2"
},
"funding": {
"url": "https://paulmillr.com/funding/"
}
},
"node_modules/@noble/hashes": {
"version": "1.3.2",
"resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.3.2.tgz",
"integrity": "sha512-MVC8EAQp7MvEcm30KWENFjgR+Mkmf+D189XJTkFIlwohU5hcBbn1ZkKq7KVTi2Hme3PMGF390DaL52beVrIihQ==",
"license": "MIT",
"engines": {
"node": ">= 16"
},
"funding": {
"url": "https://paulmillr.com/funding/"
}
},
"node_modules/@types/node": {
"version": "22.7.5",
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.7.5.tgz",
"integrity": "sha512-jML7s2NAzMWc//QSJ1a3prpk78cOPchGvXJsC3C6R6PSMoooztvRVQEz89gmBTBY1SPMaqo5teB4uNHPdetShQ==",
"license": "MIT",
"dependencies": {
"undici-types": "~6.19.2"
}
},
"node_modules/aes-js": {
"version": "4.0.0-beta.5",
"resolved": "https://registry.npmjs.org/aes-js/-/aes-js-4.0.0-beta.5.tgz",
"integrity": "sha512-G965FqalsNyrPqgEGON7nIx1e/OVENSgiEIzyC63haUMuvNnwIgIjMs52hlTCKhkBny7A2ORNlfY9Zu+jmGk1Q==",
"license": "MIT"
},
"node_modules/command-exists": {
"version": "1.2.9",
"resolved": "https://registry.npmjs.org/command-exists/-/command-exists-1.2.9.tgz",
"integrity": "sha512-LTQ/SGc+s0Xc0Fu5WaKnR0YiygZkm9eKFvyS+fRsU7/ZWFF8ykFM6Pc9aCVf1+xasOOZpO3BAVgVrKvsqKHV7w==",
"license": "MIT"
},
"node_modules/commander": {
"version": "8.3.0",
"resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz",
"integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==",
"license": "MIT",
"engines": {
"node": ">= 12"
}
},
"node_modules/ethers": {
"version": "6.16.0",
"resolved": "https://registry.npmjs.org/ethers/-/ethers-6.16.0.tgz",
"integrity": "sha512-U1wulmetNymijEhpSEQ7Ct/P/Jw9/e7R1j5XIbPRydgV2DjLVMsULDlNksq3RQnFgKoLlZf88ijYtWEXcPa07A==",
"funding": [
{
"type": "individual",
"url": "https://github.com/sponsors/ethers-io/"
},
{
"type": "individual",
"url": "https://www.buymeacoffee.com/ricmoo"
}
],
"license": "MIT",
"dependencies": {
"@adraffy/ens-normalize": "1.10.1",
"@noble/curves": "1.2.0",
"@noble/hashes": "1.3.2",
"@types/node": "22.7.5",
"aes-js": "4.0.0-beta.5",
"tslib": "2.7.0",
"ws": "8.17.1"
},
"engines": {
"node": ">=14.0.0"
}
},
"node_modules/follow-redirects": {
"version": "1.15.11",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz",
"integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==",
"funding": [
{
"type": "individual",
"url": "https://github.com/sponsors/RubenVerborgh"
}
],
"license": "MIT",
"engines": {
"node": ">=4.0"
},
"peerDependenciesMeta": {
"debug": {
"optional": true
}
}
},
"node_modules/js-sha3": {
"version": "0.8.0",
"resolved": "https://registry.npmjs.org/js-sha3/-/js-sha3-0.8.0.tgz",
"integrity": "sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q==",
"license": "MIT"
},
"node_modules/memorystream": {
"version": "0.3.1",
"resolved": "https://registry.npmjs.org/memorystream/-/memorystream-0.3.1.tgz",
"integrity": "sha512-S3UwM3yj5mtUSEfP41UZmt/0SCoVYUcU1rkXv+BQ5Ig8ndL4sPoJNBUJERafdPb5jjHJGuMgytgKvKIf58XNBw==",
"engines": {
"node": ">= 0.10.0"
}
},
"node_modules/os-tmpdir": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz",
"integrity": "sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==",
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/semver": {
"version": "5.7.2",
"resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz",
"integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==",
"license": "ISC",
"bin": {
"semver": "bin/semver"
}
},
"node_modules/solc": {
"version": "0.8.19",
"resolved": "https://registry.npmjs.org/solc/-/solc-0.8.19.tgz",
"integrity": "sha512-yqurS3wzC4LdEvmMobODXqprV4MYJcVtinuxgrp61ac8K2zz40vXA0eSAskSHPgv8dQo7Nux39i3QBsHx4pqyA==",
"license": "MIT",
"dependencies": {
"command-exists": "^1.2.8",
"commander": "^8.1.0",
"follow-redirects": "^1.12.1",
"js-sha3": "0.8.0",
"memorystream": "^0.3.1",
"semver": "^5.5.0",
"tmp": "0.0.33"
},
"bin": {
"solcjs": "solc.js"
},
"engines": {
"node": ">=10.0.0"
}
},
"node_modules/tmp": {
"version": "0.0.33",
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz",
"integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==",
"license": "MIT",
"dependencies": {
"os-tmpdir": "~1.0.2"
},
"engines": {
"node": ">=0.6.0"
}
},
"node_modules/tslib": {
"version": "2.7.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.7.0.tgz",
"integrity": "sha512-gLXCKdN1/j47AiHiOkJN69hJmcbGTHI0ImLmbYLHykhgeN0jVGola9yVjFgzCUklsZQMW55o+dW7IXv3RCXDzA==",
"license": "0BSD"
},
"node_modules/undici-types": {
"version": "6.19.8",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz",
"integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==",
"license": "MIT"
},
"node_modules/ws": {
"version": "8.17.1",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz",
"integrity": "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==",
"license": "MIT",
"engines": {
"node": ">=10.0.0"
},
"peerDependencies": {
"bufferutil": "^4.0.1",
"utf-8-validate": ">=5.0.2"
},
"peerDependenciesMeta": {
"bufferutil": {
"optional": true
},
"utf-8-validate": {
"optional": true
}
}
}
}
}

View File

@ -1,14 +0,0 @@
{
"name": "eusdt-contract",
"version": "1.0.0",
"type": "module",
"description": "Energy USDT (eUSDT) ERC-20 Token Contract",
"scripts": {
"compile": "node compile.mjs",
"deploy": "node deploy.mjs"
},
"dependencies": {
"ethers": "^6.9.0",
"solc": "^0.8.19"
}
}

View File

@ -1,78 +0,0 @@
// SPDX-License-Identifier: MIT
pragma solidity 0.8.19;
/**
* @title FutureUSDT
* @dev Fixed supply ERC-20 token - NO MINTING CAPABILITY
* Total Supply: 1,000,000,000,000 (1 Trillion) tokens with 6 decimals (matching USDT)
*
* IMPORTANT: This contract has NO mint function and NO way to increase supply.
* All tokens are minted to the deployer at construction time.
*/
contract FutureUSDT {
string public constant name = "Future USDT";
string public constant symbol = "fUSDT";
uint8 public constant decimals = 6;
// Fixed total supply: 1 trillion tokens (1,000,000,000,000 * 10^6)
uint256 public constant totalSupply = 1_000_000_000_000 * 10**6;
mapping(address => uint256) private _balances;
mapping(address => mapping(address => uint256)) private _allowances;
event Transfer(address indexed from, address indexed to, uint256 value);
event Approval(address indexed owner, address indexed spender, uint256 value);
/**
* @dev Constructor - mints entire fixed supply to deployer
* No mint function exists - supply is permanently fixed
*/
constructor() {
_balances[msg.sender] = totalSupply;
emit Transfer(address(0), msg.sender, totalSupply);
}
function balanceOf(address account) public view returns (uint256) {
return _balances[account];
}
function transfer(address to, uint256 amount) public returns (bool) {
require(to != address(0), "Transfer to zero address");
require(_balances[msg.sender] >= amount, "Insufficient balance");
unchecked {
_balances[msg.sender] -= amount;
_balances[to] += amount;
}
emit Transfer(msg.sender, to, amount);
return true;
}
function allowance(address owner, address spender) public view returns (uint256) {
return _allowances[owner][spender];
}
function approve(address spender, uint256 amount) public returns (bool) {
require(spender != address(0), "Approve to zero address");
_allowances[msg.sender][spender] = amount;
emit Approval(msg.sender, spender, amount);
return true;
}
function transferFrom(address from, address to, uint256 amount) public returns (bool) {
require(from != address(0), "Transfer from zero address");
require(to != address(0), "Transfer to zero address");
require(_balances[from] >= amount, "Insufficient balance");
require(_allowances[from][msg.sender] >= amount, "Insufficient allowance");
unchecked {
_balances[from] -= amount;
_balances[to] += amount;
_allowances[from][msg.sender] -= amount;
}
emit Transfer(from, to, amount);
return true;
}
}

View File

@ -1,81 +0,0 @@
# fUSDT (Future USDT)
## 代币信息
| 属性 | 值 |
|------|-----|
| 名称 | Future USDT |
| 符号 | fUSDT |
| 精度 | 6 decimals |
| 总供应量 | 1,000,000,000,000 (1万亿) |
| 标准 | ERC-20 |
| 部署链 | KAVA Mainnet (Chain ID: 2222) |
## 合约特性
- **固定供应量**1万亿代币部署时全部铸造给部署者
- **不可增发**:合约中没有 mint 函数,供应量永久固定
- **不可销毁**:合约层面无销毁功能
- **不可升级**:合约逻辑永久固定
- **标准ERC-20**完全兼容所有主流钱包和DEX
## 部署步骤
### 1. 安装依赖
```bash
cd backend/services/blockchain-service/contracts/fUSDT
npm install
```
### 2. 编译合约
```bash
node compile.mjs
```
编译后会在 `build/` 目录生成:
- `FutureUSDT.abi` - 合约ABI
- `FutureUSDT.bin` - 合约字节码
### 3. 部署合约
确保部署账户有足够的 KAVA 支付 gas 费(约 0.02 KAVA
```bash
node deploy.mjs
```
## 合约函数
| 函数 | 说明 |
|------|------|
| `name()` | 返回 "Future USDT" |
| `symbol()` | 返回 "fUSDT" |
| `decimals()` | 返回 6 |
| `totalSupply()` | 返回 1,000,000,000,000 * 10^6 |
| `balanceOf(address)` | 查询账户余额 |
| `transfer(address, uint256)` | 转账 |
| `approve(address, uint256)` | 授权额度 |
| `transferFrom(address, address, uint256)` | 代理转账 |
| `allowance(address, address)` | 查询授权额度 |
## 事件
| 事件 | 说明 |
|------|------|
| `Transfer(from, to, value)` | 转账事件 |
| `Approval(owner, spender, value)` | 授权事件 |
## 部署信息
| 网络 | 合约地址 | 区块浏览器 |
|------|---------|-----------|
| KAVA Mainnet | `0x14dc4f7d3E4197438d058C3D156dd9826A161134` | https://kavascan.com/address/0x14dc4f7d3E4197438d058C3D156dd9826A161134 |
**部署详情:**
- 部署者/代币拥有者:`0x4F7E78d6B7C5FC502Ec7039848690f08c8970F1E`
- 私钥:`0x886ea4cffe76c386fecf3ff321ac9ae913737c46c17bc6ce2413752144668a2a`
- 初始持有量1,000,000,000,000 fUSDT全部代币
- 交易哈希:`0x071f535971bc3a134dd26c182b6f05c53f0c3783e91fe6ef471d6c914e4cdb06`
- 部署时间2026-01-19

View File

@ -1,51 +0,0 @@
import solc from 'solc';
import fs from 'fs';
const source = fs.readFileSync('FutureUSDT.sol', 'utf8');
const input = {
language: 'Solidity',
sources: {
'FutureUSDT.sol': {
content: source
}
},
settings: {
optimizer: {
enabled: true,
runs: 200
},
evmVersion: 'paris', // Use paris to avoid PUSH0
outputSelection: {
'*': {
'*': ['abi', 'evm.bytecode']
}
}
}
};
const output = JSON.parse(solc.compile(JSON.stringify(input)));
if (output.errors) {
output.errors.forEach(err => {
console.log(err.formattedMessage);
});
// Check for actual errors (not just warnings)
const hasErrors = output.errors.some(err => err.severity === 'error');
if (hasErrors) {
process.exit(1);
}
}
const contract = output.contracts['FutureUSDT.sol']['FutureUSDT'];
const bytecode = contract.evm.bytecode.object;
const abi = contract.abi;
fs.mkdirSync('build', { recursive: true });
fs.writeFileSync('build/FutureUSDT.bin', bytecode);
fs.writeFileSync('build/FutureUSDT.abi', JSON.stringify(abi, null, 2));
console.log('Compiled successfully!');
console.log('Bytecode length:', bytecode.length);
console.log('ABI functions:', abi.filter(x => x.type === 'function').map(x => x.name).join(', '));

View File

@ -1,86 +0,0 @@
import { ethers } from 'ethers';
import fs from 'fs';
// Same deployer account as dUSDT
const PRIVATE_KEY = '0x886ea4cffe76c386fecf3ff321ac9ae913737c46c17bc6ce2413752144668a2a';
const RPC_URL = 'https://evm.kava.io';
// Contract bytecode
const BYTECODE = '0x' + fs.readFileSync('build/FutureUSDT.bin', 'utf8');
const ABI = JSON.parse(fs.readFileSync('build/FutureUSDT.abi', 'utf8'));
async function deploy() {
// Connect to Kava mainnet
const provider = new ethers.JsonRpcProvider(RPC_URL);
const wallet = new ethers.Wallet(PRIVATE_KEY, provider);
console.log('Deployer address:', wallet.address);
// Check balance
const balance = await provider.getBalance(wallet.address);
console.log('Balance:', ethers.formatEther(balance), 'KAVA');
if (parseFloat(ethers.formatEther(balance)) < 0.01) {
console.error('Insufficient KAVA balance for deployment!');
process.exit(1);
}
// Get network info
const network = await provider.getNetwork();
console.log('Chain ID:', network.chainId.toString());
// Create contract factory
const factory = new ethers.ContractFactory(ABI, BYTECODE, wallet);
console.log('Deploying FutureUSDT (fUSDT) contract...');
// Deploy
const contract = await factory.deploy();
console.log('Transaction hash:', contract.deploymentTransaction().hash);
// Wait for deployment
console.log('Waiting for confirmation...');
await contract.waitForDeployment();
const contractAddress = await contract.getAddress();
console.log('Contract deployed at:', contractAddress);
// Verify deployment
console.log('\nVerifying deployment...');
const name = await contract.name();
const symbol = await contract.symbol();
const decimals = await contract.decimals();
const totalSupply = await contract.totalSupply();
const ownerBalance = await contract.balanceOf(wallet.address);
console.log('Token name:', name);
console.log('Token symbol:', symbol);
console.log('Decimals:', decimals.toString());
console.log('Total supply:', ethers.formatUnits(totalSupply, 6), 'fUSDT');
console.log('Owner balance:', ethers.formatUnits(ownerBalance, 6), 'fUSDT');
console.log('\n=== DEPLOYMENT COMPLETE ===');
console.log('Contract Address:', contractAddress);
console.log('Explorer:', `https://kavascan.com/address/${contractAddress}`);
// Save deployment info
const deploymentInfo = {
network: 'KAVA Mainnet',
chainId: 2222,
contractAddress,
deployer: wallet.address,
transactionHash: contract.deploymentTransaction().hash,
deployedAt: new Date().toISOString(),
token: {
name,
symbol,
decimals: decimals.toString(),
totalSupply: totalSupply.toString()
}
};
fs.writeFileSync('deployment.json', JSON.stringify(deploymentInfo, null, 2));
console.log('\nDeployment info saved to deployment.json');
}
deploy().catch(console.error);

View File

@ -1,14 +0,0 @@
{
"network": "KAVA Mainnet",
"chainId": 2222,
"contractAddress": "0x14dc4f7d3E4197438d058C3D156dd9826A161134",
"deployer": "0x4F7E78d6B7C5FC502Ec7039848690f08c8970F1E",
"transactionHash": "0x071f535971bc3a134dd26c182b6f05c53f0c3783e91fe6ef471d6c914e4cdb06",
"deployedAt": "2026-01-19T13:26:05.111Z",
"token": {
"name": "Future USDT",
"symbol": "fUSDT",
"decimals": "6",
"totalSupply": "1000000000000000000"
}
}

View File

@ -1,222 +0,0 @@
{
"name": "fusdt-contract",
"version": "1.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "fusdt-contract",
"version": "1.0.0",
"dependencies": {
"ethers": "^6.9.0",
"solc": "^0.8.19"
}
},
"node_modules/@adraffy/ens-normalize": {
"version": "1.10.1",
"resolved": "https://registry.npmjs.org/@adraffy/ens-normalize/-/ens-normalize-1.10.1.tgz",
"integrity": "sha512-96Z2IP3mYmF1Xg2cDm8f1gWGf/HUVedQ3FMifV4kG/PQ4yEP51xDtRAEfhVNt5f/uzpNkZHwWQuUcu6D6K+Ekw==",
"license": "MIT"
},
"node_modules/@noble/curves": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/@noble/curves/-/curves-1.2.0.tgz",
"integrity": "sha512-oYclrNgRaM9SsBUBVbb8M6DTV7ZHRTKugureoYEncY5c65HOmRzvSiTE3y5CYaPYJA/GVkrhXEoF0M3Ya9PMnw==",
"license": "MIT",
"dependencies": {
"@noble/hashes": "1.3.2"
},
"funding": {
"url": "https://paulmillr.com/funding/"
}
},
"node_modules/@noble/hashes": {
"version": "1.3.2",
"resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.3.2.tgz",
"integrity": "sha512-MVC8EAQp7MvEcm30KWENFjgR+Mkmf+D189XJTkFIlwohU5hcBbn1ZkKq7KVTi2Hme3PMGF390DaL52beVrIihQ==",
"license": "MIT",
"engines": {
"node": ">= 16"
},
"funding": {
"url": "https://paulmillr.com/funding/"
}
},
"node_modules/@types/node": {
"version": "22.7.5",
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.7.5.tgz",
"integrity": "sha512-jML7s2NAzMWc//QSJ1a3prpk78cOPchGvXJsC3C6R6PSMoooztvRVQEz89gmBTBY1SPMaqo5teB4uNHPdetShQ==",
"license": "MIT",
"dependencies": {
"undici-types": "~6.19.2"
}
},
"node_modules/aes-js": {
"version": "4.0.0-beta.5",
"resolved": "https://registry.npmjs.org/aes-js/-/aes-js-4.0.0-beta.5.tgz",
"integrity": "sha512-G965FqalsNyrPqgEGON7nIx1e/OVENSgiEIzyC63haUMuvNnwIgIjMs52hlTCKhkBny7A2ORNlfY9Zu+jmGk1Q==",
"license": "MIT"
},
"node_modules/command-exists": {
"version": "1.2.9",
"resolved": "https://registry.npmjs.org/command-exists/-/command-exists-1.2.9.tgz",
"integrity": "sha512-LTQ/SGc+s0Xc0Fu5WaKnR0YiygZkm9eKFvyS+fRsU7/ZWFF8ykFM6Pc9aCVf1+xasOOZpO3BAVgVrKvsqKHV7w==",
"license": "MIT"
},
"node_modules/commander": {
"version": "8.3.0",
"resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz",
"integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==",
"license": "MIT",
"engines": {
"node": ">= 12"
}
},
"node_modules/ethers": {
"version": "6.16.0",
"resolved": "https://registry.npmjs.org/ethers/-/ethers-6.16.0.tgz",
"integrity": "sha512-U1wulmetNymijEhpSEQ7Ct/P/Jw9/e7R1j5XIbPRydgV2DjLVMsULDlNksq3RQnFgKoLlZf88ijYtWEXcPa07A==",
"funding": [
{
"type": "individual",
"url": "https://github.com/sponsors/ethers-io/"
},
{
"type": "individual",
"url": "https://www.buymeacoffee.com/ricmoo"
}
],
"license": "MIT",
"dependencies": {
"@adraffy/ens-normalize": "1.10.1",
"@noble/curves": "1.2.0",
"@noble/hashes": "1.3.2",
"@types/node": "22.7.5",
"aes-js": "4.0.0-beta.5",
"tslib": "2.7.0",
"ws": "8.17.1"
},
"engines": {
"node": ">=14.0.0"
}
},
"node_modules/follow-redirects": {
"version": "1.15.11",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz",
"integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==",
"funding": [
{
"type": "individual",
"url": "https://github.com/sponsors/RubenVerborgh"
}
],
"license": "MIT",
"engines": {
"node": ">=4.0"
},
"peerDependenciesMeta": {
"debug": {
"optional": true
}
}
},
"node_modules/js-sha3": {
"version": "0.8.0",
"resolved": "https://registry.npmjs.org/js-sha3/-/js-sha3-0.8.0.tgz",
"integrity": "sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q==",
"license": "MIT"
},
"node_modules/memorystream": {
"version": "0.3.1",
"resolved": "https://registry.npmjs.org/memorystream/-/memorystream-0.3.1.tgz",
"integrity": "sha512-S3UwM3yj5mtUSEfP41UZmt/0SCoVYUcU1rkXv+BQ5Ig8ndL4sPoJNBUJERafdPb5jjHJGuMgytgKvKIf58XNBw==",
"engines": {
"node": ">= 0.10.0"
}
},
"node_modules/os-tmpdir": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz",
"integrity": "sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==",
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/semver": {
"version": "5.7.2",
"resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz",
"integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==",
"license": "ISC",
"bin": {
"semver": "bin/semver"
}
},
"node_modules/solc": {
"version": "0.8.19",
"resolved": "https://registry.npmjs.org/solc/-/solc-0.8.19.tgz",
"integrity": "sha512-yqurS3wzC4LdEvmMobODXqprV4MYJcVtinuxgrp61ac8K2zz40vXA0eSAskSHPgv8dQo7Nux39i3QBsHx4pqyA==",
"license": "MIT",
"dependencies": {
"command-exists": "^1.2.8",
"commander": "^8.1.0",
"follow-redirects": "^1.12.1",
"js-sha3": "0.8.0",
"memorystream": "^0.3.1",
"semver": "^5.5.0",
"tmp": "0.0.33"
},
"bin": {
"solcjs": "solc.js"
},
"engines": {
"node": ">=10.0.0"
}
},
"node_modules/tmp": {
"version": "0.0.33",
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz",
"integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==",
"license": "MIT",
"dependencies": {
"os-tmpdir": "~1.0.2"
},
"engines": {
"node": ">=0.6.0"
}
},
"node_modules/tslib": {
"version": "2.7.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.7.0.tgz",
"integrity": "sha512-gLXCKdN1/j47AiHiOkJN69hJmcbGTHI0ImLmbYLHykhgeN0jVGola9yVjFgzCUklsZQMW55o+dW7IXv3RCXDzA==",
"license": "0BSD"
},
"node_modules/undici-types": {
"version": "6.19.8",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz",
"integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==",
"license": "MIT"
},
"node_modules/ws": {
"version": "8.17.1",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz",
"integrity": "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==",
"license": "MIT",
"engines": {
"node": ">=10.0.0"
},
"peerDependencies": {
"bufferutil": "^4.0.1",
"utf-8-validate": ">=5.0.2"
},
"peerDependenciesMeta": {
"bufferutil": {
"optional": true
},
"utf-8-validate": {
"optional": true
}
}
}
}
}

View File

@ -1,14 +0,0 @@
{
"name": "fusdt-contract",
"version": "1.0.0",
"type": "module",
"description": "Future USDT (fUSDT) ERC-20 Token Contract",
"scripts": {
"compile": "node compile.mjs",
"deploy": "node deploy.mjs"
},
"dependencies": {
"ethers": "^6.9.0",
"solc": "^0.8.19"
}
}

View File

@ -1,6 +1,7 @@
-- ============================================================================ -- ============================================================================
-- contribution-service 初始化 migration -- contribution-service 初始化 migration
-- 合并自: 0001_init, 0002_add_transactional_idempotency, 20250120000001_add_region_to_system_accounts -- 合并自: 20260111000000_init, 20260111100000_add_referral_user_ids,
-- 20260112020000_fix_status_varchar_length, 20260112200000_add_adoption_province_city
-- ============================================================================ -- ============================================================================
-- ============================================ -- ============================================
@ -227,9 +228,8 @@ CREATE INDEX "unallocated_contributions_status_idx" ON "unallocated_contribution
CREATE TABLE "system_accounts" ( CREATE TABLE "system_accounts" (
"id" BIGSERIAL NOT NULL, "id" BIGSERIAL NOT NULL,
"account_type" TEXT NOT NULL, "account_type" VARCHAR(20) NOT NULL,
"region_code" TEXT, "name" VARCHAR(100) NOT NULL,
"name" TEXT NOT NULL,
"contribution_balance" DECIMAL(30,10) NOT NULL DEFAULT 0, "contribution_balance" DECIMAL(30,10) NOT NULL DEFAULT 0,
"contribution_never_expires" BOOLEAN NOT NULL DEFAULT false, "contribution_never_expires" BOOLEAN NOT NULL DEFAULT false,
"version" INTEGER NOT NULL DEFAULT 1, "version" INTEGER NOT NULL DEFAULT 1,
@ -239,26 +239,18 @@ CREATE TABLE "system_accounts" (
CONSTRAINT "system_accounts_pkey" PRIMARY KEY ("id") CONSTRAINT "system_accounts_pkey" PRIMARY KEY ("id")
); );
CREATE UNIQUE INDEX "system_accounts_account_type_region_code_key" ON "system_accounts"("account_type", "region_code"); CREATE UNIQUE INDEX "system_accounts_account_type_key" ON "system_accounts"("account_type");
CREATE INDEX "system_accounts_account_type_idx" ON "system_accounts"("account_type");
CREATE INDEX "system_accounts_region_code_idx" ON "system_accounts"("region_code");
CREATE TABLE "system_contribution_records" ( CREATE TABLE "system_contribution_records" (
"id" BIGSERIAL NOT NULL, "id" BIGSERIAL NOT NULL,
"system_account_id" BIGINT NOT NULL, "system_account_id" BIGINT NOT NULL,
"source_adoption_id" BIGINT NOT NULL, "source_adoption_id" BIGINT NOT NULL,
"source_account_sequence" VARCHAR(20) NOT NULL, "source_account_sequence" VARCHAR(20) NOT NULL,
-- 来源类型: FIXED_RATE(固定比例) / LEVEL_OVERFLOW(层级溢出) / LEVEL_NO_ANCESTOR(无上线) / BONUS_TIER_1/2/3(团队奖励未解锁)
"source_type" VARCHAR(30) NOT NULL,
-- 层级深度1-15仅对 LEVEL_OVERFLOW 和 LEVEL_NO_ANCESTOR 类型有效
"level_depth" INTEGER,
"distribution_rate" DECIMAL(10,6) NOT NULL, "distribution_rate" DECIMAL(10,6) NOT NULL,
"amount" DECIMAL(30,10) NOT NULL, "amount" DECIMAL(30,10) NOT NULL,
"effective_date" DATE NOT NULL, "effective_date" DATE NOT NULL,
"expire_date" DATE, "expire_date" DATE,
"is_expired" BOOLEAN NOT NULL DEFAULT false, "is_expired" BOOLEAN NOT NULL DEFAULT false,
-- 软删除时间戳
"deleted_at" TIMESTAMP(3),
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, "created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "system_contribution_records_pkey" PRIMARY KEY ("id") CONSTRAINT "system_contribution_records_pkey" PRIMARY KEY ("id")
@ -266,8 +258,6 @@ CREATE TABLE "system_contribution_records" (
CREATE INDEX "system_contribution_records_system_account_id_idx" ON "system_contribution_records"("system_account_id"); CREATE INDEX "system_contribution_records_system_account_id_idx" ON "system_contribution_records"("system_account_id");
CREATE INDEX "system_contribution_records_source_adoption_id_idx" ON "system_contribution_records"("source_adoption_id"); CREATE INDEX "system_contribution_records_source_adoption_id_idx" ON "system_contribution_records"("source_adoption_id");
CREATE INDEX "system_contribution_records_source_type_idx" ON "system_contribution_records"("source_type");
CREATE INDEX "system_contribution_records_deleted_at_idx" ON "system_contribution_records"("deleted_at");
ALTER TABLE "system_contribution_records" ADD CONSTRAINT "system_contribution_records_system_account_id_fkey" FOREIGN KEY ("system_account_id") REFERENCES "system_accounts"("id") ON DELETE RESTRICT ON UPDATE CASCADE; ALTER TABLE "system_contribution_records" ADD CONSTRAINT "system_contribution_records_system_account_id_fkey" FOREIGN KEY ("system_account_id") REFERENCES "system_accounts"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
@ -337,36 +327,20 @@ CREATE TABLE "cdc_sync_progress" (
CREATE UNIQUE INDEX "cdc_sync_progress_source_topic_key" ON "cdc_sync_progress"("source_topic"); CREATE UNIQUE INDEX "cdc_sync_progress_source_topic_key" ON "cdc_sync_progress"("source_topic");
-- 2.0 服务间 Outbox 事件幂等表
CREATE TABLE "processed_events" ( CREATE TABLE "processed_events" (
"id" BIGSERIAL NOT NULL, "id" BIGSERIAL NOT NULL,
"event_id" VARCHAR(100) NOT NULL, "event_id" VARCHAR(100) NOT NULL,
"event_type" VARCHAR(50) NOT NULL, "event_type" VARCHAR(50) NOT NULL,
"source_service" VARCHAR(100) NOT NULL, "source_service" VARCHAR(50),
"processed_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, "processed_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "processed_events_pkey" PRIMARY KEY ("id") CONSTRAINT "processed_events_pkey" PRIMARY KEY ("id")
); );
CREATE UNIQUE INDEX "processed_events_source_service_event_id_key" ON "processed_events"("source_service", "event_id"); CREATE UNIQUE INDEX "processed_events_event_id_key" ON "processed_events"("event_id");
CREATE INDEX "processed_events_event_type_idx" ON "processed_events"("event_type"); CREATE INDEX "processed_events_event_type_idx" ON "processed_events"("event_type");
CREATE INDEX "processed_events_processed_at_idx" ON "processed_events"("processed_at"); CREATE INDEX "processed_events_processed_at_idx" ON "processed_events"("processed_at");
-- 1.0 CDC 事件幂等表
CREATE TABLE "processed_cdc_events" (
"id" BIGSERIAL NOT NULL,
"source_topic" VARCHAR(200) NOT NULL,
"offset" BIGINT NOT NULL,
"table_name" VARCHAR(100) NOT NULL,
"operation" VARCHAR(10) NOT NULL,
"processed_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "processed_cdc_events_pkey" PRIMARY KEY ("id")
);
CREATE UNIQUE INDEX "processed_cdc_events_source_topic_offset_key" ON "processed_cdc_events"("source_topic", "offset");
CREATE INDEX "processed_cdc_events_processed_at_idx" ON "processed_cdc_events"("processed_at");
-- ============================================ -- ============================================
-- 9. 配置表 -- 9. 配置表
-- ============================================ -- ============================================

View File

@ -0,0 +1,45 @@
-- ============================================================================
-- 添加事务性幂等消费支持
-- 用于 1.0 -> 2.0 CDC 同步的 100% exactly-once 语义
-- ============================================================================
-- 1. 创建 processed_cdc_events 表(用于 CDC 事件幂等)
-- 唯一键: (source_topic, offset) - Kafka topic 名称 + 消息偏移量
-- 用于保证每个 CDC 事件只处理一次exactly-once 语义)
CREATE TABLE IF NOT EXISTS "processed_cdc_events" (
"id" BIGSERIAL NOT NULL,
"source_topic" VARCHAR(200) NOT NULL, -- Kafka topic 名称(如 cdc.identity.public.user_accounts
"offset" BIGINT NOT NULL, -- Kafka 消息偏移量(在 partition 内唯一)
"table_name" VARCHAR(100) NOT NULL, -- 源表名
"operation" VARCHAR(10) NOT NULL, -- CDC 操作类型: c(create), u(update), d(delete), r(snapshot read)
"processed_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "processed_cdc_events_pkey" PRIMARY KEY ("id")
);
-- 复合唯一索引:(source_topic, offset) 保证幂等性
-- 注意:这不是数据库自增 ID而是 Kafka 消息的唯一标识
CREATE UNIQUE INDEX "processed_cdc_events_source_topic_offset_key" ON "processed_cdc_events"("source_topic", "offset");
-- 时间索引用于清理旧数据
CREATE INDEX "processed_cdc_events_processed_at_idx" ON "processed_cdc_events"("processed_at");
-- 2. 修复 processed_events 表(用于 2.0 服务间 Outbox 事件幂等)
-- 唯一键: (source_service, event_id) - 服务名 + outbox 表的 ID
-- 不同服务的 outbox ID 可能相同,所以需要组合服务名作为复合唯一键
-- 2.1 修改 source_service 列:扩展长度 50->100且设为 NOT NULL
-- 先为已有 NULL 值设置默认值
UPDATE "processed_events" SET "source_service" = 'unknown' WHERE "source_service" IS NULL;
-- 修改列类型和约束
ALTER TABLE "processed_events"
ALTER COLUMN "source_service" SET NOT NULL,
ALTER COLUMN "source_service" TYPE VARCHAR(100);
-- 2.2 删除旧的单字段唯一索引
DROP INDEX IF EXISTS "processed_events_event_id_key";
-- 2.3 创建新的复合唯一索引
-- 索引名使用蛇形命名以与列名保持一致
CREATE UNIQUE INDEX IF NOT EXISTS "processed_events_source_service_event_id_key" ON "processed_events"("source_service", "event_id");

View File

@ -300,9 +300,8 @@ model UnallocatedContribution {
// 系统账户(运营/省/市/总部) // 系统账户(运营/省/市/总部)
model SystemAccount { model SystemAccount {
id BigInt @id @default(autoincrement()) id BigInt @id @default(autoincrement())
accountType String @map("account_type") // OPERATION / PROVINCE / CITY / HEADQUARTERS accountType String @unique @map("account_type") @db.VarChar(20) // OPERATION / PROVINCE / CITY / HEADQUARTERS
regionCode String? @map("region_code") // 省/市代码,如 440000, 440100 name String @db.VarChar(100)
name String
contributionBalance Decimal @default(0) @map("contribution_balance") @db.Decimal(30, 10) contributionBalance Decimal @default(0) @map("contribution_balance") @db.Decimal(30, 10)
contributionNeverExpires Boolean @default(false) @map("contribution_never_expires") contributionNeverExpires Boolean @default(false) @map("contribution_never_expires")
@ -314,9 +313,6 @@ model SystemAccount {
records SystemContributionRecord[] records SystemContributionRecord[]
@@unique([accountType, regionCode])
@@index([accountType])
@@index([regionCode])
@@map("system_accounts") @@map("system_accounts")
} }
@ -327,11 +323,6 @@ model SystemContributionRecord {
sourceAdoptionId BigInt @map("source_adoption_id") sourceAdoptionId BigInt @map("source_adoption_id")
sourceAccountSequence String @map("source_account_sequence") @db.VarChar(20) sourceAccountSequence String @map("source_account_sequence") @db.VarChar(20)
// 来源类型FIXED_RATE(固定比例分配) / LEVEL_OVERFLOW(层级溢出) / LEVEL_NO_ANCESTOR(无上线) / BONUS_TIER_1/2/3(团队奖励未解锁)
sourceType String @map("source_type") @db.VarChar(30)
// 层级深度:对于 LEVEL_OVERFLOW 和 LEVEL_NO_ANCESTOR 类型表示第几级1-15
levelDepth Int? @map("level_depth")
distributionRate Decimal @map("distribution_rate") @db.Decimal(10, 6) distributionRate Decimal @map("distribution_rate") @db.Decimal(10, 6)
amount Decimal @map("amount") @db.Decimal(30, 10) amount Decimal @map("amount") @db.Decimal(30, 10)
@ -340,14 +331,11 @@ model SystemContributionRecord {
isExpired Boolean @default(false) @map("is_expired") isExpired Boolean @default(false) @map("is_expired")
createdAt DateTime @default(now()) @map("created_at") createdAt DateTime @default(now()) @map("created_at")
deletedAt DateTime? @map("deleted_at") // 软删除标记
systemAccount SystemAccount @relation(fields: [systemAccountId], references: [id]) systemAccount SystemAccount @relation(fields: [systemAccountId], references: [id])
@@index([systemAccountId]) @@index([systemAccountId])
@@index([sourceAdoptionId]) @@index([sourceAdoptionId])
@@index([deletedAt])
@@index([sourceType])
@@map("system_contribution_records") @@map("system_contribution_records")
} }

View File

@ -10,8 +10,6 @@ import {
AdoptionSyncedEvent, AdoptionSyncedEvent,
ContributionRecordSyncedEvent, ContributionRecordSyncedEvent,
NetworkProgressUpdatedEvent, NetworkProgressUpdatedEvent,
SystemAccountSyncedEvent,
UnallocatedContributionSyncedEvent,
} from '../../domain/events'; } from '../../domain/events';
import { Public } from '../../shared/guards/jwt-auth.guard'; import { Public } from '../../shared/guards/jwt-auth.guard';
@ -422,190 +420,4 @@ export class AdminController {
}; };
} }
} }
@Post('system-accounts/publish-all')
@Public()
@ApiOperation({ summary: '发布所有系统账户算力事件到 outbox用于同步到 mining-service' })
async publishAllSystemAccounts(): Promise<{
success: boolean;
publishedCount: number;
message: string;
}> {
try {
const systemAccounts = await this.prisma.systemAccount.findMany();
await this.unitOfWork.executeInTransaction(async () => {
const events = systemAccounts.map((account) => {
const event = new SystemAccountSyncedEvent(
account.accountType,
account.regionCode,
account.name,
account.contributionBalance.toString(),
account.createdAt,
);
return {
aggregateType: SystemAccountSyncedEvent.AGGREGATE_TYPE,
aggregateId: `${account.accountType}:${account.regionCode || 'null'}`,
eventType: SystemAccountSyncedEvent.EVENT_TYPE,
payload: event.toPayload(),
};
});
await this.outboxRepository.saveMany(events);
});
this.logger.log(`Published ${systemAccounts.length} system account events`);
return {
success: true,
publishedCount: systemAccounts.length,
message: `Published ${systemAccounts.length} system account events`,
};
} catch (error) {
this.logger.error('Failed to publish system accounts', error);
return {
success: false,
publishedCount: 0,
message: `Failed: ${error.message}`,
};
}
}
@Get('system-accounts')
@Public()
@ApiOperation({ summary: '获取所有系统账户算力' })
async getSystemAccounts() {
const systemAccounts = await this.prisma.systemAccount.findMany();
return {
accounts: systemAccounts.map((a) => ({
accountType: a.accountType,
name: a.name,
contributionBalance: a.contributionBalance.toString(),
createdAt: a.createdAt,
updatedAt: a.updatedAt,
})),
total: systemAccounts.length,
};
}
@Get('unallocated-contributions')
@Public()
@ApiOperation({ summary: '获取所有未分配算力列表,供 mining-service 定时同步' })
async getUnallocatedContributions(): Promise<{
contributions: Array<{
sourceAdoptionId: string;
sourceAccountSequence: string;
wouldBeAccountSequence: string | null;
contributionType: string;
amount: string;
reason: string | null;
effectiveDate: string;
expireDate: string;
}>;
total: number;
}> {
const unallocatedContributions = await this.prisma.unallocatedContribution.findMany({
where: { status: 'PENDING' },
select: {
sourceAdoptionId: true,
sourceAccountSequence: true,
wouldBeAccountSequence: true,
unallocType: true,
amount: true,
reason: true,
effectiveDate: true,
expireDate: true,
},
});
return {
contributions: unallocatedContributions.map((uc) => ({
sourceAdoptionId: uc.sourceAdoptionId.toString(),
sourceAccountSequence: uc.sourceAccountSequence,
wouldBeAccountSequence: uc.wouldBeAccountSequence,
contributionType: uc.unallocType,
amount: uc.amount.toString(),
reason: uc.reason,
effectiveDate: uc.effectiveDate.toISOString(),
expireDate: uc.expireDate.toISOString(),
})),
total: unallocatedContributions.length,
};
}
@Post('unallocated-contributions/publish-all')
@Public()
@ApiOperation({ summary: '发布所有未分配算力事件到 outbox用于同步到 mining-service' })
async publishAllUnallocatedContributions(): Promise<{
success: boolean;
publishedCount: number;
failedCount: number;
message: string;
}> {
const unallocatedContributions = await this.prisma.unallocatedContribution.findMany({
where: { status: 'PENDING' },
select: {
id: true,
sourceAdoptionId: true,
sourceAccountSequence: true,
wouldBeAccountSequence: true,
unallocType: true,
amount: true,
reason: true,
effectiveDate: true,
expireDate: true,
},
});
let publishedCount = 0;
let failedCount = 0;
const batchSize = 100;
for (let i = 0; i < unallocatedContributions.length; i += batchSize) {
const batch = unallocatedContributions.slice(i, i + batchSize);
try {
await this.unitOfWork.executeInTransaction(async () => {
const events = batch.map((uc) => {
const event = new UnallocatedContributionSyncedEvent(
uc.sourceAdoptionId,
uc.sourceAccountSequence,
uc.wouldBeAccountSequence,
uc.unallocType,
uc.amount.toString(),
uc.reason,
uc.effectiveDate,
uc.expireDate,
);
return {
aggregateType: UnallocatedContributionSyncedEvent.AGGREGATE_TYPE,
aggregateId: `${uc.sourceAdoptionId}-${uc.unallocType}`,
eventType: UnallocatedContributionSyncedEvent.EVENT_TYPE,
payload: event.toPayload(),
};
});
await this.outboxRepository.saveMany(events);
});
publishedCount += batch.length;
this.logger.debug(`Published unallocated contribution batch ${Math.floor(i / batchSize) + 1}: ${batch.length} events`);
} catch (error) {
failedCount += batch.length;
this.logger.error(`Failed to publish unallocated contribution batch ${Math.floor(i / batchSize) + 1}`, error);
}
}
this.logger.log(`Published ${publishedCount} unallocated contribution events, ${failedCount} failed`);
return {
success: failedCount === 0,
publishedCount,
failedCount,
message: `Published ${publishedCount} events, ${failedCount} failed out of ${unallocatedContributions.length} total`,
};
}
} }

View File

@ -1,10 +1,8 @@
import { Controller, Get, Param, Query, NotFoundException } from '@nestjs/common'; import { Controller, Get, Param, Query, NotFoundException } from '@nestjs/common';
import { ApiTags, ApiOperation, ApiResponse, ApiParam, ApiQuery } from '@nestjs/swagger'; import { ApiTags, ApiOperation, ApiResponse, ApiParam } from '@nestjs/swagger';
import { GetContributionAccountQuery } from '../../application/queries/get-contribution-account.query'; import { GetContributionAccountQuery } from '../../application/queries/get-contribution-account.query';
import { GetContributionStatsQuery } from '../../application/queries/get-contribution-stats.query'; import { GetContributionStatsQuery } from '../../application/queries/get-contribution-stats.query';
import { GetContributionRankingQuery } from '../../application/queries/get-contribution-ranking.query'; import { GetContributionRankingQuery } from '../../application/queries/get-contribution-ranking.query';
import { GetPlantingLedgerQuery, PlantingLedgerDto } from '../../application/queries/get-planting-ledger.query';
import { GetTeamTreeQuery, DirectReferralsResponseDto, MyTeamInfoDto } from '../../application/queries/get-team-tree.query';
import { import {
ContributionAccountResponse, ContributionAccountResponse,
ContributionRecordsResponse, ContributionRecordsResponse,
@ -13,7 +11,6 @@ import {
import { ContributionStatsResponse } from '../dto/response/contribution-stats.response'; import { ContributionStatsResponse } from '../dto/response/contribution-stats.response';
import { ContributionRankingResponse, UserRankResponse } from '../dto/response/contribution-ranking.response'; import { ContributionRankingResponse, UserRankResponse } from '../dto/response/contribution-ranking.response';
import { GetContributionRecordsRequest } from '../dto/request/get-records.request'; import { GetContributionRecordsRequest } from '../dto/request/get-records.request';
import { Public } from '../../shared/guards/jwt-auth.guard';
@ApiTags('Contribution') @ApiTags('Contribution')
@Controller('contribution') @Controller('contribution')
@ -22,12 +19,9 @@ export class ContributionController {
private readonly getAccountQuery: GetContributionAccountQuery, private readonly getAccountQuery: GetContributionAccountQuery,
private readonly getStatsQuery: GetContributionStatsQuery, private readonly getStatsQuery: GetContributionStatsQuery,
private readonly getRankingQuery: GetContributionRankingQuery, private readonly getRankingQuery: GetContributionRankingQuery,
private readonly getPlantingLedgerQuery: GetPlantingLedgerQuery,
private readonly getTeamTreeQuery: GetTeamTreeQuery,
) {} ) {}
@Get('stats') @Get('stats')
@Public()
@ApiOperation({ summary: '获取算力统计数据' }) @ApiOperation({ summary: '获取算力统计数据' })
@ApiResponse({ status: 200, type: ContributionStatsResponse }) @ApiResponse({ status: 200, type: ContributionStatsResponse })
async getStats(): Promise<ContributionStatsResponse> { async getStats(): Promise<ContributionStatsResponse> {
@ -101,52 +95,4 @@ export class ContributionController {
} }
return result; return result;
} }
@Get('accounts/:accountSequence/planting-ledger')
@ApiOperation({ summary: '获取账户认种分类账' })
@ApiParam({ name: 'accountSequence', description: '账户序号' })
@ApiQuery({ name: 'page', required: false, type: Number, description: '页码' })
@ApiQuery({ name: 'pageSize', required: false, type: Number, description: '每页数量' })
@ApiResponse({ status: 200, description: '认种分类账' })
async getPlantingLedger(
@Param('accountSequence') accountSequence: string,
@Query('page') page?: number,
@Query('pageSize') pageSize?: number,
): Promise<PlantingLedgerDto> {
return this.getPlantingLedgerQuery.execute(
accountSequence,
page ?? 1,
pageSize ?? 20,
);
}
// ========== 团队树 API ==========
@Get('accounts/:accountSequence/team')
@ApiOperation({ summary: '获取账户团队信息' })
@ApiParam({ name: 'accountSequence', description: '账户序号' })
@ApiResponse({ status: 200, description: '团队信息' })
async getMyTeamInfo(
@Param('accountSequence') accountSequence: string,
): Promise<MyTeamInfoDto> {
return this.getTeamTreeQuery.getMyTeamInfo(accountSequence);
}
@Get('accounts/:accountSequence/team/direct-referrals')
@ApiOperation({ summary: '获取账户直推列表(用于伞下树懒加载)' })
@ApiParam({ name: 'accountSequence', description: '账户序号' })
@ApiQuery({ name: 'limit', required: false, type: Number, description: '每页数量' })
@ApiQuery({ name: 'offset', required: false, type: Number, description: '偏移量' })
@ApiResponse({ status: 200, description: '直推列表' })
async getDirectReferrals(
@Param('accountSequence') accountSequence: string,
@Query('limit') limit?: number,
@Query('offset') offset?: number,
): Promise<DirectReferralsResponseDto> {
return this.getTeamTreeQuery.getDirectReferrals(
accountSequence,
limit ?? 100,
offset ?? 0,
);
}
} }

View File

@ -2,7 +2,6 @@ import { Controller, Get } from '@nestjs/common';
import { ApiTags, ApiOperation, ApiResponse } from '@nestjs/swagger'; import { ApiTags, ApiOperation, ApiResponse } from '@nestjs/swagger';
import { PrismaService } from '../../infrastructure/persistence/prisma/prisma.service'; import { PrismaService } from '../../infrastructure/persistence/prisma/prisma.service';
import { RedisService } from '../../infrastructure/redis/redis.service'; import { RedisService } from '../../infrastructure/redis/redis.service';
import { CDCConsumerService } from '../../infrastructure/kafka/cdc-consumer.service';
import { Public } from '../../shared/guards/jwt-auth.guard'; import { Public } from '../../shared/guards/jwt-auth.guard';
interface HealthStatus { interface HealthStatus {
@ -21,7 +20,6 @@ export class HealthController {
constructor( constructor(
private readonly prisma: PrismaService, private readonly prisma: PrismaService,
private readonly redis: RedisService, private readonly redis: RedisService,
private readonly cdcConsumer: CDCConsumerService,
) {} ) {}
@Get() @Get()
@ -70,15 +68,4 @@ export class HealthController {
async live(): Promise<{ alive: boolean }> { async live(): Promise<{ alive: boolean }> {
return { alive: true }; return { alive: true };
} }
@Get('cdc-sync')
@ApiOperation({ summary: 'CDC 同步状态检查' })
@ApiResponse({ status: 200, description: 'CDC 同步状态' })
async cdcSyncStatus(): Promise<{
isRunning: boolean;
sequentialMode: boolean;
allPhasesCompleted: boolean;
}> {
return this.cdcConsumer.getSyncStatus();
}
} }

View File

@ -16,7 +16,6 @@ import { JwtAuthGuard } from './shared/guards/jwt-auth.guard';
envFilePath: [ envFilePath: [
`.env.${process.env.NODE_ENV || 'development'}`, `.env.${process.env.NODE_ENV || 'development'}`,
'.env', '.env',
'../.env', // 父目录共享 .env
], ],
ignoreEnvFile: false, ignoreEnvFile: false,
}), }),

View File

@ -12,15 +12,12 @@ import { CDCEventDispatcher } from './event-handlers/cdc-event-dispatcher';
import { ContributionCalculationService } from './services/contribution-calculation.service'; import { ContributionCalculationService } from './services/contribution-calculation.service';
import { ContributionDistributionPublisherService } from './services/contribution-distribution-publisher.service'; import { ContributionDistributionPublisherService } from './services/contribution-distribution-publisher.service';
import { ContributionRateService } from './services/contribution-rate.service'; import { ContributionRateService } from './services/contribution-rate.service';
import { BonusClaimService } from './services/bonus-claim.service';
import { SnapshotService } from './services/snapshot.service'; import { SnapshotService } from './services/snapshot.service';
// Queries // Queries
import { GetContributionAccountQuery } from './queries/get-contribution-account.query'; import { GetContributionAccountQuery } from './queries/get-contribution-account.query';
import { GetContributionStatsQuery } from './queries/get-contribution-stats.query'; import { GetContributionStatsQuery } from './queries/get-contribution-stats.query';
import { GetContributionRankingQuery } from './queries/get-contribution-ranking.query'; import { GetContributionRankingQuery } from './queries/get-contribution-ranking.query';
import { GetPlantingLedgerQuery } from './queries/get-planting-ledger.query';
import { GetTeamTreeQuery } from './queries/get-team-tree.query';
// Schedulers // Schedulers
import { ContributionScheduler } from './schedulers/contribution.scheduler'; import { ContributionScheduler } from './schedulers/contribution.scheduler';
@ -41,15 +38,12 @@ import { ContributionScheduler } from './schedulers/contribution.scheduler';
ContributionCalculationService, ContributionCalculationService,
ContributionDistributionPublisherService, ContributionDistributionPublisherService,
ContributionRateService, ContributionRateService,
BonusClaimService,
SnapshotService, SnapshotService,
// Queries // Queries
GetContributionAccountQuery, GetContributionAccountQuery,
GetContributionStatsQuery, GetContributionStatsQuery,
GetContributionRankingQuery, GetContributionRankingQuery,
GetPlantingLedgerQuery,
GetTeamTreeQuery,
// Schedulers // Schedulers
ContributionScheduler, ContributionScheduler,
@ -61,8 +55,6 @@ import { ContributionScheduler } from './schedulers/contribution.scheduler';
GetContributionAccountQuery, GetContributionAccountQuery,
GetContributionStatsQuery, GetContributionStatsQuery,
GetContributionRankingQuery, GetContributionRankingQuery,
GetPlantingLedgerQuery,
GetTeamTreeQuery,
], ],
}) })
export class ApplicationModule {} export class ApplicationModule {}

View File

@ -2,7 +2,6 @@ import { Injectable, Logger } from '@nestjs/common';
import Decimal from 'decimal.js'; import Decimal from 'decimal.js';
import { CDCEvent, TransactionClient } from '../../infrastructure/kafka/cdc-consumer.service'; import { CDCEvent, TransactionClient } from '../../infrastructure/kafka/cdc-consumer.service';
import { ContributionCalculationService } from '../services/contribution-calculation.service'; import { ContributionCalculationService } from '../services/contribution-calculation.service';
import { ContributionRateService } from '../services/contribution-rate.service';
/** /**
* *
@ -16,11 +15,19 @@ export interface AdoptionSyncResult {
* CDC * CDC
* 1.0 planting-service同步过来的planting_orders数据 * 1.0 planting-service同步过来的planting_orders数据
* *
* *
* =========================================== * ===========================================
* - handle() 100% * - handle() synced_adoptions
* - status MINING_ENABLED * - AdoptionSyncResultID
* - Serializable * - calculateForAdoption
*
* calculateForAdoption
* 1. calculateForAdoption 使
* 2. Serializable
* 3. "Adoption not found" synced_adoptions
*
* Kafka Idempotent Consumer & Transactional Outbox Pattern
* https://www.lydtechconsulting.com/blog/kafka-idempotent-consumer-transactional-outbox
*/ */
@Injectable() @Injectable()
export class AdoptionSyncedHandler { export class AdoptionSyncedHandler {
@ -28,7 +35,6 @@ export class AdoptionSyncedHandler {
constructor( constructor(
private readonly contributionCalculationService: ContributionCalculationService, private readonly contributionCalculationService: ContributionCalculationService,
private readonly contributionRateService: ContributionRateService,
) {} ) {}
/** /**
@ -42,28 +48,13 @@ export class AdoptionSyncedHandler {
this.logger.log(`[CDC] Adoption event received: op=${op}, seq=${event.sequenceNum}`); this.logger.log(`[CDC] Adoption event received: op=${op}, seq=${event.sequenceNum}`);
this.logger.debug(`[CDC] Adoption event payload: ${JSON.stringify(after || before)}`); this.logger.debug(`[CDC] Adoption event payload: ${JSON.stringify(after || before)}`);
// 获取认种日期,用于查询当日贡献值
const data = after || before;
const adoptionDate = data?.created_at || data?.createdAt || data?.paid_at || data?.paidAt;
// 在事务外获取当日每棵树的贡献值
let contributionPerTree = new Decimal('22617'); // 默认值
if (adoptionDate) {
try {
contributionPerTree = await this.contributionRateService.getContributionPerTree(new Date(adoptionDate));
this.logger.log(`[CDC] Got contributionPerTree for ${adoptionDate}: ${contributionPerTree.toString()}`);
} catch (error) {
this.logger.warn(`[CDC] Failed to get contributionPerTree, using default 22617`, error);
}
}
try { try {
switch (op) { switch (op) {
case 'c': // create case 'c': // create
case 'r': // read (snapshot) case 'r': // read (snapshot)
return await this.handleCreate(after, event.sequenceNum, tx, contributionPerTree); return await this.handleCreate(after, event.sequenceNum, tx);
case 'u': // update case 'u': // update
return await this.handleUpdate(after, before, event.sequenceNum, tx, contributionPerTree); return await this.handleUpdate(after, before, event.sequenceNum, tx);
case 'd': // delete case 'd': // delete
await this.handleDelete(before); await this.handleDelete(before);
return null; return null;
@ -95,21 +86,21 @@ export class AdoptionSyncedHandler {
} }
} }
private async handleCreate(data: any, sequenceNum: bigint, tx: TransactionClient, contributionPerTree: Decimal): Promise<AdoptionSyncResult | null> { private async handleCreate(data: any, sequenceNum: bigint, tx: TransactionClient): Promise<AdoptionSyncResult | null> {
if (!data) { if (!data) {
this.logger.warn(`[CDC] Adoption create: empty data received`); this.logger.warn(`[CDC] Adoption create: empty data received`);
return null; return null;
} }
// planting_orders表字段: order_id, account_sequence, tree_count, created_at, status, selected_province, selected_city
const orderId = data.order_id || data.id; const orderId = data.order_id || data.id;
const accountSequence = data.account_sequence || data.accountSequence; const accountSequence = data.account_sequence || data.accountSequence;
const treeCount = data.tree_count || data.treeCount; const treeCount = data.tree_count || data.treeCount;
const createdAt = data.created_at || data.createdAt || data.paid_at || data.paidAt; const createdAt = data.created_at || data.createdAt || data.paid_at || data.paidAt;
const selectedProvince = data.selected_province || data.selectedProvince || null; const selectedProvince = data.selected_province || data.selectedProvince || null;
const selectedCity = data.selected_city || data.selectedCity || null; const selectedCity = data.selected_city || data.selectedCity || null;
const status = data.status ?? null;
this.logger.log(`[CDC] Adoption create: orderId=${orderId}, account=${accountSequence}, trees=${treeCount}, status=${status}, contributionPerTree=${contributionPerTree.toString()}`); this.logger.log(`[CDC] Adoption create: orderId=${orderId}, account=${accountSequence}, trees=${treeCount}, province=${selectedProvince}, city=${selectedCity}`);
if (!orderId || !accountSequence) { if (!orderId || !accountSequence) {
this.logger.warn(`[CDC] Invalid adoption data: missing order_id or account_sequence`, { data }); this.logger.warn(`[CDC] Invalid adoption data: missing order_id or account_sequence`, { data });
@ -118,7 +109,8 @@ export class AdoptionSyncedHandler {
const originalAdoptionId = BigInt(orderId); const originalAdoptionId = BigInt(orderId);
// 100%同步数据,使用真实的每棵树贡献值 // 在事务中保存同步的认种订单数据
this.logger.log(`[CDC] Upserting synced adoption: ${orderId}`);
await tx.syncedAdoption.upsert({ await tx.syncedAdoption.upsert({
where: { originalAdoptionId }, where: { originalAdoptionId },
create: { create: {
@ -126,10 +118,10 @@ export class AdoptionSyncedHandler {
accountSequence, accountSequence,
treeCount, treeCount,
adoptionDate: new Date(createdAt), adoptionDate: new Date(createdAt),
status, status: data.status ?? null,
selectedProvince, selectedProvince,
selectedCity, selectedCity,
contributionPerTree, contributionPerTree: new Decimal('1'), // 每棵树1算力
sourceSequenceNum: sequenceNum, sourceSequenceNum: sequenceNum,
syncedAt: new Date(), syncedAt: new Date(),
}, },
@ -137,26 +129,25 @@ export class AdoptionSyncedHandler {
accountSequence, accountSequence,
treeCount, treeCount,
adoptionDate: new Date(createdAt), adoptionDate: new Date(createdAt),
status, status: data.status ?? undefined,
selectedProvince, selectedProvince: selectedProvince ?? undefined,
selectedCity, selectedCity: selectedCity ?? undefined,
contributionPerTree, contributionPerTree: new Decimal('1'),
sourceSequenceNum: sequenceNum, sourceSequenceNum: sequenceNum,
syncedAt: new Date(), syncedAt: new Date(),
}, },
}); });
this.logger.log(`[CDC] Adoption synced: orderId=${orderId}, status=${status}`); this.logger.log(`[CDC] Adoption synced successfully: orderId=${orderId}, account=${accountSequence}, trees=${treeCount}`);
// 只有 MINING_ENABLED 状态才触发算力计算 // 返回结果,供事务提交后计算算力
const needsCalculation = status === 'MINING_ENABLED';
return { return {
originalAdoptionId, originalAdoptionId,
needsCalculation, needsCalculation: true,
}; };
} }
private async handleUpdate(after: any, before: any, sequenceNum: bigint, tx: TransactionClient, contributionPerTree: Decimal): Promise<AdoptionSyncResult | null> { private async handleUpdate(after: any, before: any, sequenceNum: bigint, tx: TransactionClient): Promise<AdoptionSyncResult | null> {
if (!after) { if (!after) {
this.logger.warn(`[CDC] Adoption update: empty after data received`); this.logger.warn(`[CDC] Adoption update: empty after data received`);
return null; return null;
@ -164,22 +155,37 @@ export class AdoptionSyncedHandler {
const orderId = after.order_id || after.id; const orderId = after.order_id || after.id;
const originalAdoptionId = BigInt(orderId); const originalAdoptionId = BigInt(orderId);
this.logger.log(`[CDC] Adoption update: orderId=${orderId}`);
// 检查是否已经处理过(使用事务客户端)
const existingAdoption = await tx.syncedAdoption.findUnique({
where: { originalAdoptionId },
});
if (existingAdoption?.contributionDistributed) {
// 如果树数量发生变化,需要重新计算(这种情况较少)
const newTreeCount = after.tree_count || after.treeCount;
if (existingAdoption.treeCount !== newTreeCount) {
this.logger.warn(
`[CDC] Adoption tree count changed after processing: ${originalAdoptionId}, old=${existingAdoption.treeCount}, new=${newTreeCount}. This requires special handling.`,
);
// TODO: 实现树数量变化的处理逻辑
} else {
this.logger.debug(`[CDC] Adoption ${orderId} already distributed, skipping update`);
}
return null;
}
const accountSequence = after.account_sequence || after.accountSequence; const accountSequence = after.account_sequence || after.accountSequence;
const treeCount = after.tree_count || after.treeCount; const treeCount = after.tree_count || after.treeCount;
const createdAt = after.created_at || after.createdAt || after.paid_at || after.paidAt; const createdAt = after.created_at || after.createdAt || after.paid_at || after.paidAt;
const selectedProvince = after.selected_province || after.selectedProvince || null; const selectedProvince = after.selected_province || after.selectedProvince || null;
const selectedCity = after.selected_city || after.selectedCity || null; const selectedCity = after.selected_city || after.selectedCity || null;
const newStatus = after.status ?? null;
const oldStatus = before?.status ?? null;
this.logger.log(`[CDC] Adoption update: orderId=${orderId}, status=${oldStatus} -> ${newStatus}, contributionPerTree=${contributionPerTree.toString()}`); this.logger.log(`[CDC] Adoption update data: account=${accountSequence}, trees=${treeCount}, province=${selectedProvince}, city=${selectedCity}`);
// 查询现有记录 // 在事务中保存同步的认种订单数据
const existingAdoption = await tx.syncedAdoption.findUnique({
where: { originalAdoptionId },
});
// 100%同步数据,使用真实的每棵树贡献值
await tx.syncedAdoption.upsert({ await tx.syncedAdoption.upsert({
where: { originalAdoptionId }, where: { originalAdoptionId },
create: { create: {
@ -187,10 +193,10 @@ export class AdoptionSyncedHandler {
accountSequence, accountSequence,
treeCount, treeCount,
adoptionDate: new Date(createdAt), adoptionDate: new Date(createdAt),
status: newStatus, status: after.status ?? null,
selectedProvince, selectedProvince,
selectedCity, selectedCity,
contributionPerTree, contributionPerTree: new Decimal('1'),
sourceSequenceNum: sequenceNum, sourceSequenceNum: sequenceNum,
syncedAt: new Date(), syncedAt: new Date(),
}, },
@ -198,24 +204,21 @@ export class AdoptionSyncedHandler {
accountSequence, accountSequence,
treeCount, treeCount,
adoptionDate: new Date(createdAt), adoptionDate: new Date(createdAt),
status: newStatus, status: after.status ?? undefined,
selectedProvince, selectedProvince: selectedProvince ?? undefined,
selectedCity, selectedCity: selectedCity ?? undefined,
contributionPerTree, contributionPerTree: new Decimal('1'),
sourceSequenceNum: sequenceNum, sourceSequenceNum: sequenceNum,
syncedAt: new Date(), syncedAt: new Date(),
}, },
}); });
this.logger.log(`[CDC] Adoption synced: orderId=${orderId}, status=${newStatus}`); this.logger.log(`[CDC] Adoption updated successfully: ${originalAdoptionId}`);
// 只有当 status 变为 MINING_ENABLED 且尚未计算过算力时,才触发算力计算
const statusChangedToMiningEnabled = newStatus === 'MINING_ENABLED' && oldStatus !== 'MINING_ENABLED';
const needsCalculation = statusChangedToMiningEnabled && !existingAdoption?.contributionDistributed;
// 只有尚未分配算力的认种才需要计算
return { return {
originalAdoptionId, originalAdoptionId,
needsCalculation, needsCalculation: !existingAdoption?.contributionDistributed,
}; };
} }

View File

@ -51,17 +51,14 @@ export class CDCEventDispatcher implements OnModuleInit {
this.handleAdoptionPostCommit.bind(this), this.handleAdoptionPostCommit.bind(this),
); );
// 非阻塞启动 CDC 消费者 // 启动 CDC 消费者
// 让 HTTP 服务器先启动CDC 同步在后台进行 try {
// 脚本通过 /health/cdc-sync API 轮询同步状态 await this.cdcConsumer.start();
this.cdcConsumer.start()
.then(() => {
this.logger.log('CDC event dispatcher started with transactional idempotency'); this.logger.log('CDC event dispatcher started with transactional idempotency');
}) } catch (error) {
.catch((error) => {
this.logger.error('Failed to start CDC event dispatcher', error); this.logger.error('Failed to start CDC event dispatcher', error);
// 不抛出错误,允许服务在没有 Kafka 的情况下启动(用于本地开发) // 不抛出错误,允许服务在没有 Kafka 的情况下启动(用于本地开发)
}); }
} }
private async handleUserEvent(event: CDCEvent, tx: TransactionClient): Promise<void> { private async handleUserEvent(event: CDCEvent, tx: TransactionClient): Promise<void> {

View File

@ -5,7 +5,22 @@ import { CDCEvent, TransactionClient } from '../../infrastructure/kafka/cdc-cons
* CDC * CDC
* 1.0 referral-service同步过来的referral_relationships数据 * 1.0 referral-service同步过来的referral_relationships数据
* *
* 100% * 1.0 (referral_relationships):
* - user_id: BigInt (ID)
* - account_sequence: String ()
* - referrer_id: BigInt (ID, account_sequence)
* - ancestor_path: BigInt[] ( user_id)
* - depth: Int ()
*
* 2.0 :
* - original_user_id (1.0 user_id)
* - referrer_user_id (1.0 referrer_id)
* - referrer account_sequence
* - ancestor_path
*
* handler tx
* 使
*
*/ */
@Injectable() @Injectable()
export class ReferralSyncedHandler { export class ReferralSyncedHandler {
@ -46,11 +61,12 @@ export class ReferralSyncedHandler {
return; return;
} }
// 1.0 字段映射
const accountSequence = data.account_sequence || data.accountSequence; const accountSequence = data.account_sequence || data.accountSequence;
const originalUserId = data.user_id || data.userId; const originalUserId = data.user_id || data.userId;
const referrerUserId = data.referrer_id || data.referrerId; const referrerUserId = data.referrer_id || data.referrerId;
const ancestorPathArray = data.ancestor_path || data.ancestorPath; const ancestorPathArray = data.ancestor_path || data.ancestorPath;
const depth = data.depth ?? 0; const depth = data.depth || 0;
this.logger.log(`[CDC] Referral create: account=${accountSequence}, userId=${originalUserId}, referrerId=${referrerUserId}, depth=${depth}`); this.logger.log(`[CDC] Referral create: account=${accountSequence}, userId=${originalUserId}, referrerId=${referrerUserId}, depth=${depth}`);
@ -59,9 +75,11 @@ export class ReferralSyncedHandler {
return; return;
} }
// 将 BigInt[] 转换为逗号分隔的字符串
const ancestorPath = this.convertAncestorPath(ancestorPathArray); const ancestorPath = this.convertAncestorPath(ancestorPathArray);
this.logger.debug(`[CDC] Referral ancestorPath converted: ${ancestorPath}`);
// 尝试查找推荐人的 account_sequence // 尝试查找推荐人的 account_sequence(使用事务客户端)
let referrerAccountSequence: string | null = null; let referrerAccountSequence: string | null = null;
if (referrerUserId) { if (referrerUserId) {
const referrer = await tx.syncedReferral.findFirst({ const referrer = await tx.syncedReferral.findFirst({
@ -69,10 +87,14 @@ export class ReferralSyncedHandler {
}); });
if (referrer) { if (referrer) {
referrerAccountSequence = referrer.accountSequence; referrerAccountSequence = referrer.accountSequence;
this.logger.debug(`[CDC] Found referrer account_sequence: ${referrerAccountSequence} for referrer_id: ${referrerUserId}`);
} else {
this.logger.log(`[CDC] Referrer user_id ${referrerUserId} not found yet for ${accountSequence}, will resolve later`);
} }
} }
// 100%同步数据 // 使用外部事务客户端执行所有操作
this.logger.log(`[CDC] Upserting synced referral: ${accountSequence}`);
await tx.syncedReferral.upsert({ await tx.syncedReferral.upsert({
where: { accountSequence }, where: { accountSequence },
create: { create: {
@ -86,17 +108,17 @@ export class ReferralSyncedHandler {
syncedAt: new Date(), syncedAt: new Date(),
}, },
update: { update: {
referrerAccountSequence, referrerAccountSequence: referrerAccountSequence ?? undefined,
referrerUserId: referrerUserId ? BigInt(referrerUserId) : null, referrerUserId: referrerUserId ? BigInt(referrerUserId) : undefined,
originalUserId: originalUserId ? BigInt(originalUserId) : null, originalUserId: originalUserId ? BigInt(originalUserId) : undefined,
ancestorPath, ancestorPath: ancestorPath ?? undefined,
depth, depth: depth ?? undefined,
sourceSequenceNum: sequenceNum, sourceSequenceNum: sequenceNum,
syncedAt: new Date(), syncedAt: new Date(),
}, },
}); });
this.logger.log(`[CDC] Referral synced: ${accountSequence}, referrerId=${referrerUserId || 'none'}, depth=${depth}`); this.logger.log(`[CDC] Referral synced successfully: ${accountSequence} (user_id: ${originalUserId}) -> referrer_id: ${referrerUserId || 'none'}, depth: ${depth}`);
} }
private async handleUpdate(data: any, sequenceNum: bigint, tx: TransactionClient): Promise<void> { private async handleUpdate(data: any, sequenceNum: bigint, tx: TransactionClient): Promise<void> {
@ -109,7 +131,7 @@ export class ReferralSyncedHandler {
const originalUserId = data.user_id || data.userId; const originalUserId = data.user_id || data.userId;
const referrerUserId = data.referrer_id || data.referrerId; const referrerUserId = data.referrer_id || data.referrerId;
const ancestorPathArray = data.ancestor_path || data.ancestorPath; const ancestorPathArray = data.ancestor_path || data.ancestorPath;
const depth = data.depth ?? 0; const depth = data.depth || 0;
this.logger.log(`[CDC] Referral update: account=${accountSequence}, referrerId=${referrerUserId}, depth=${depth}`); this.logger.log(`[CDC] Referral update: account=${accountSequence}, referrerId=${referrerUserId}, depth=${depth}`);
@ -120,7 +142,7 @@ export class ReferralSyncedHandler {
const ancestorPath = this.convertAncestorPath(ancestorPathArray); const ancestorPath = this.convertAncestorPath(ancestorPathArray);
// 尝试查找推荐人的 account_sequence // 尝试查找推荐人的 account_sequence(使用事务客户端)
let referrerAccountSequence: string | null = null; let referrerAccountSequence: string | null = null;
if (referrerUserId) { if (referrerUserId) {
const referrer = await tx.syncedReferral.findFirst({ const referrer = await tx.syncedReferral.findFirst({
@ -128,10 +150,10 @@ export class ReferralSyncedHandler {
}); });
if (referrer) { if (referrer) {
referrerAccountSequence = referrer.accountSequence; referrerAccountSequence = referrer.accountSequence;
this.logger.debug(`[CDC] Found referrer account_sequence: ${referrerAccountSequence}`);
} }
} }
// 100%同步数据
await tx.syncedReferral.upsert({ await tx.syncedReferral.upsert({
where: { accountSequence }, where: { accountSequence },
create: { create: {
@ -145,17 +167,17 @@ export class ReferralSyncedHandler {
syncedAt: new Date(), syncedAt: new Date(),
}, },
update: { update: {
referrerAccountSequence, referrerAccountSequence: referrerAccountSequence ?? undefined,
referrerUserId: referrerUserId ? BigInt(referrerUserId) : null, referrerUserId: referrerUserId ? BigInt(referrerUserId) : undefined,
originalUserId: originalUserId ? BigInt(originalUserId) : null, originalUserId: originalUserId ? BigInt(originalUserId) : undefined,
ancestorPath, ancestorPath: ancestorPath ?? undefined,
depth, depth: depth ?? undefined,
sourceSequenceNum: sequenceNum, sourceSequenceNum: sequenceNum,
syncedAt: new Date(), syncedAt: new Date(),
}, },
}); });
this.logger.log(`[CDC] Referral synced: ${accountSequence}`); this.logger.log(`[CDC] Referral updated successfully: ${accountSequence}`);
} }
private async handleDelete(data: any): Promise<void> { private async handleDelete(data: any): Promise<void> {

View File

@ -6,7 +6,9 @@ import { ContributionAccountAggregate } from '../../domain/aggregates/contributi
* CDC * CDC
* *
* *
* 100% * handler tx
* 使
*
*/ */
@Injectable() @Injectable()
export class UserSyncedHandler { export class UserSyncedHandler {
@ -47,19 +49,22 @@ export class UserSyncedHandler {
return; return;
} }
// 兼容不同的字段命名CDC 使用 snake_case
const userId = data.user_id ?? data.id; const userId = data.user_id ?? data.id;
const accountSequence = data.account_sequence ?? data.accountSequence; const accountSequence = data.account_sequence ?? data.accountSequence;
const phone = data.phone_number ?? data.phone ?? null; const phone = data.phone_number ?? data.phone ?? null;
const status = data.status ?? null; const status = data.status ?? 'ACTIVE';
this.logger.log(`[CDC] User create: userId=${userId}, accountSequence=${accountSequence}, status=${status}`); this.logger.log(`[CDC] User create: userId=${userId}, accountSequence=${accountSequence}, phone=${phone}, status=${status}`);
if (!userId || !accountSequence) { if (!userId || !accountSequence) {
this.logger.warn(`[CDC] Invalid user data: missing user_id or account_sequence`, { data }); this.logger.warn(`[CDC] Invalid user data: missing user_id or account_sequence`, { data });
return; return;
} }
// 100%同步数据 // 使用外部事务客户端执行所有操作
// 保存同步的用户数据
this.logger.log(`[CDC] Upserting synced user: ${accountSequence}`);
await tx.syncedUser.upsert({ await tx.syncedUser.upsert({
where: { accountSequence }, where: { accountSequence },
create: { create: {
@ -71,9 +76,8 @@ export class UserSyncedHandler {
syncedAt: new Date(), syncedAt: new Date(),
}, },
update: { update: {
originalUserId: BigInt(userId), phone: phone ?? undefined,
phone, status: status ?? undefined,
status,
sourceSequenceNum: sequenceNum, sourceSequenceNum: sequenceNum,
syncedAt: new Date(), syncedAt: new Date(),
}, },
@ -91,9 +95,11 @@ export class UserSyncedHandler {
data: persistData, data: persistData,
}); });
this.logger.log(`[CDC] Created contribution account for user: ${accountSequence}`); this.logger.log(`[CDC] Created contribution account for user: ${accountSequence}`);
} else {
this.logger.debug(`[CDC] Contribution account already exists for user: ${accountSequence}`);
} }
this.logger.log(`[CDC] User synced: ${accountSequence}`); this.logger.log(`[CDC] User synced successfully: ${accountSequence}`);
} }
private async handleUpdate(data: any, sequenceNum: bigint, tx: TransactionClient): Promise<void> { private async handleUpdate(data: any, sequenceNum: bigint, tx: TransactionClient): Promise<void> {
@ -102,10 +108,11 @@ export class UserSyncedHandler {
return; return;
} }
// 兼容不同的字段命名CDC 使用 snake_case
const userId = data.user_id ?? data.id; const userId = data.user_id ?? data.id;
const accountSequence = data.account_sequence ?? data.accountSequence; const accountSequence = data.account_sequence ?? data.accountSequence;
const phone = data.phone_number ?? data.phone ?? null; const phone = data.phone_number ?? data.phone ?? null;
const status = data.status ?? null; const status = data.status ?? 'ACTIVE';
this.logger.log(`[CDC] User update: userId=${userId}, accountSequence=${accountSequence}, status=${status}`); this.logger.log(`[CDC] User update: userId=${userId}, accountSequence=${accountSequence}, status=${status}`);
@ -114,7 +121,6 @@ export class UserSyncedHandler {
return; return;
} }
// 100%同步数据
await tx.syncedUser.upsert({ await tx.syncedUser.upsert({
where: { accountSequence }, where: { accountSequence },
create: { create: {
@ -126,15 +132,14 @@ export class UserSyncedHandler {
syncedAt: new Date(), syncedAt: new Date(),
}, },
update: { update: {
originalUserId: BigInt(userId), phone: phone ?? undefined,
phone, status: status ?? undefined,
status,
sourceSequenceNum: sequenceNum, sourceSequenceNum: sequenceNum,
syncedAt: new Date(), syncedAt: new Date(),
}, },
}); });
this.logger.log(`[CDC] User synced: ${accountSequence}`); this.logger.log(`[CDC] User updated successfully: ${accountSequence}`);
} }
private async handleDelete(data: any): Promise<void> { private async handleDelete(data: any): Promise<void> {

View File

@ -183,16 +183,16 @@ export class GetContributionAccountQuery {
private toRecordDto(record: any): ContributionRecordDto { private toRecordDto(record: any): ContributionRecordDto {
return { return {
id: record.id?.toString() ?? '', id: record.id,
sourceType: record.sourceType, sourceType: record.sourceType,
sourceAdoptionId: record.sourceAdoptionId?.toString() ?? '', sourceAdoptionId: record.sourceAdoptionId,
sourceAccountSequence: record.sourceAccountSequence, sourceAccountSequence: record.sourceAccountSequence,
treeCount: record.treeCount, treeCount: record.treeCount,
baseContribution: record.baseContribution?.value?.toString() ?? '0', baseContribution: record.baseContribution.value.toString(),
distributionRate: record.distributionRate?.value?.toString() ?? '0', distributionRate: record.distributionRate.value.toString(),
levelDepth: record.levelDepth, levelDepth: record.levelDepth,
bonusTier: record.bonusTier, bonusTier: record.bonusTier,
finalContribution: record.amount?.value?.toString() ?? '0', finalContribution: record.finalContribution.value.toString(),
effectiveDate: record.effectiveDate, effectiveDate: record.effectiveDate,
expireDate: record.expireDate, expireDate: record.expireDate,
isExpired: record.isExpired, isExpired: record.isExpired,

View File

@ -1,5 +1,4 @@
import { Injectable } from '@nestjs/common'; import { Injectable } from '@nestjs/common';
import Decimal from 'decimal.js';
import { ContributionAccountRepository } from '../../infrastructure/persistence/repositories/contribution-account.repository'; import { ContributionAccountRepository } from '../../infrastructure/persistence/repositories/contribution-account.repository';
import { ContributionRecordRepository } from '../../infrastructure/persistence/repositories/contribution-record.repository'; import { ContributionRecordRepository } from '../../infrastructure/persistence/repositories/contribution-record.repository';
import { UnallocatedContributionRepository } from '../../infrastructure/persistence/repositories/unallocated-contribution.repository'; import { UnallocatedContributionRepository } from '../../infrastructure/persistence/repositories/unallocated-contribution.repository';
@ -7,15 +6,6 @@ import { SystemAccountRepository } from '../../infrastructure/persistence/reposi
import { SyncedDataRepository } from '../../infrastructure/persistence/repositories/synced-data.repository'; import { SyncedDataRepository } from '../../infrastructure/persistence/repositories/synced-data.repository';
import { ContributionSourceType } from '../../domain/aggregates/contribution-account.aggregate'; import { ContributionSourceType } from '../../domain/aggregates/contribution-account.aggregate';
// 基准算力常量
const BASE_CONTRIBUTION_PER_TREE = new Decimal('22617');
const RATE_PERSONAL = new Decimal('0.70');
const RATE_OPERATION = new Decimal('0.12');
const RATE_PROVINCE = new Decimal('0.01');
const RATE_CITY = new Decimal('0.02');
const RATE_LEVEL_TOTAL = new Decimal('0.075');
const RATE_BONUS_TOTAL = new Decimal('0.075');
export interface ContributionStatsDto { export interface ContributionStatsDto {
// 用户统计 // 用户统计
totalUsers: number; totalUsers: number;
@ -26,57 +16,17 @@ export interface ContributionStatsDto {
totalAdoptions: number; totalAdoptions: number;
processedAdoptions: number; processedAdoptions: number;
unprocessedAdoptions: number; unprocessedAdoptions: number;
totalTrees: number;
// 算力统计 // 算力统计
totalContribution: string; totalContribution: string;
// 算力分布(基础) // 算力分布
contributionByType: { contributionByType: {
personal: string; personal: string;
teamLevel: string; teamLevel: string;
teamBonus: string; teamBonus: string;
}; };
// ========== 详细算力分解(按用户需求) ==========
// 全网算力 = 总认种树 * 22617
networkTotalContribution: string;
// 个人用户总算力 = 总认种树 * (22617 * 70%)
personalTotalContribution: string;
// 运营账户总算力 = 总认种树 * (22617 * 12%)
operationTotalContribution: string;
// 省公司总算力 = 总认种树 * (22617 * 1%)
provinceTotalContribution: string;
// 市公司总算力 = 总认种树 * (22617 * 2%)
cityTotalContribution: string;
// 层级算力详情 (7.5%)
levelContribution: {
total: string;
unlocked: string;
pending: string;
byTier: {
// 1档: 1-5级
tier1: { unlocked: string; pending: string };
// 2档: 6-10级
tier2: { unlocked: string; pending: string };
// 3档: 11-15级
tier3: { unlocked: string; pending: string };
};
};
// 团队奖励算力详情 (7.5%)
bonusContribution: {
total: string;
unlocked: string;
pending: string;
byTier: {
tier1: { unlocked: string; pending: string };
tier2: { unlocked: string; pending: string };
tier3: { unlocked: string; pending: string };
};
};
// 系统账户 // 系统账户
systemAccounts: { systemAccounts: {
accountType: string; accountType: string;
@ -111,10 +61,6 @@ export class GetContributionStatsQuery {
systemAccounts, systemAccounts,
totalUnallocated, totalUnallocated,
unallocatedByType, unallocatedByType,
detailedStats,
unallocatedByLevelTier,
unallocatedByBonusTier,
totalTrees,
] = await Promise.all([ ] = await Promise.all([
this.syncedDataRepository.countUsers(), this.syncedDataRepository.countUsers(),
this.accountRepository.countAccounts(), this.accountRepository.countAccounts(),
@ -126,33 +72,8 @@ export class GetContributionStatsQuery {
this.systemAccountRepository.findAll(), this.systemAccountRepository.findAll(),
this.unallocatedRepository.getTotalUnallocated(), this.unallocatedRepository.getTotalUnallocated(),
this.unallocatedRepository.getTotalUnallocatedByType(), this.unallocatedRepository.getTotalUnallocatedByType(),
this.accountRepository.getDetailedContributionStats(),
this.unallocatedRepository.getUnallocatedByLevelTier(),
this.unallocatedRepository.getUnallocatedByBonusTier(),
this.syncedDataRepository.getTotalTrees(),
]); ]);
// 计算理论算力(基于总认种树 * 基准算力)
const networkTotal = BASE_CONTRIBUTION_PER_TREE.mul(totalTrees);
const personalTotal = networkTotal.mul(RATE_PERSONAL);
const operationTotal = networkTotal.mul(RATE_OPERATION);
const provinceTotal = networkTotal.mul(RATE_PROVINCE);
const cityTotal = networkTotal.mul(RATE_CITY);
const levelTotal = networkTotal.mul(RATE_LEVEL_TOTAL);
const bonusTotal = networkTotal.mul(RATE_BONUS_TOTAL);
// 层级算力: 已解锁 + 未解锁
const levelUnlocked = new Decimal(detailedStats.levelUnlocked);
const levelPending = new Decimal(unallocatedByLevelTier.tier1)
.plus(unallocatedByLevelTier.tier2)
.plus(unallocatedByLevelTier.tier3);
// 团队奖励算力: 已解锁 + 未解锁
const bonusUnlocked = new Decimal(detailedStats.bonusUnlocked);
const bonusPending = new Decimal(unallocatedByBonusTier.tier1)
.plus(unallocatedByBonusTier.tier2)
.plus(unallocatedByBonusTier.tier3);
return { return {
totalUsers, totalUsers,
totalAccounts, totalAccounts,
@ -160,63 +81,12 @@ export class GetContributionStatsQuery {
totalAdoptions, totalAdoptions,
processedAdoptions: totalAdoptions - undistributedAdoptions, processedAdoptions: totalAdoptions - undistributedAdoptions,
unprocessedAdoptions: undistributedAdoptions, unprocessedAdoptions: undistributedAdoptions,
totalTrees,
totalContribution: totalContribution.value.toString(), totalContribution: totalContribution.value.toString(),
contributionByType: { contributionByType: {
personal: (contributionByType.get(ContributionSourceType.PERSONAL)?.value || 0).toString(), personal: (contributionByType.get(ContributionSourceType.PERSONAL)?.value || 0).toString(),
teamLevel: (contributionByType.get(ContributionSourceType.TEAM_LEVEL)?.value || 0).toString(), teamLevel: (contributionByType.get(ContributionSourceType.TEAM_LEVEL)?.value || 0).toString(),
teamBonus: (contributionByType.get(ContributionSourceType.TEAM_BONUS)?.value || 0).toString(), teamBonus: (contributionByType.get(ContributionSourceType.TEAM_BONUS)?.value || 0).toString(),
}, },
// 详细算力分解
networkTotalContribution: networkTotal.toString(),
personalTotalContribution: personalTotal.toString(),
operationTotalContribution: operationTotal.toString(),
provinceTotalContribution: provinceTotal.toString(),
cityTotalContribution: cityTotal.toString(),
// 层级算力详情
levelContribution: {
total: levelTotal.toString(),
unlocked: levelUnlocked.toString(),
pending: levelPending.toString(),
byTier: {
tier1: {
unlocked: detailedStats.levelByTier.tier1.unlocked,
pending: unallocatedByLevelTier.tier1,
},
tier2: {
unlocked: detailedStats.levelByTier.tier2.unlocked,
pending: unallocatedByLevelTier.tier2,
},
tier3: {
unlocked: detailedStats.levelByTier.tier3.unlocked,
pending: unallocatedByLevelTier.tier3,
},
},
},
// 团队奖励算力详情
bonusContribution: {
total: bonusTotal.toString(),
unlocked: bonusUnlocked.toString(),
pending: bonusPending.toString(),
byTier: {
tier1: {
unlocked: detailedStats.bonusByTier.tier1.unlocked,
pending: unallocatedByBonusTier.tier1,
},
tier2: {
unlocked: detailedStats.bonusByTier.tier2.unlocked,
pending: unallocatedByBonusTier.tier2,
},
tier3: {
unlocked: detailedStats.bonusByTier.tier3.unlocked,
pending: unallocatedByBonusTier.tier3,
},
},
},
systemAccounts: systemAccounts.map((a) => ({ systemAccounts: systemAccounts.map((a) => ({
accountType: a.accountType, accountType: a.accountType,
name: a.name, name: a.name,
@ -228,5 +98,4 @@ export class GetContributionStatsQuery {
), ),
}; };
} }
} }

View File

@ -1,85 +0,0 @@
import { Injectable } from '@nestjs/common';
import { SyncedDataRepository } from '../../infrastructure/persistence/repositories/synced-data.repository';
import { ContributionAccountRepository } from '../../infrastructure/persistence/repositories/contribution-account.repository';
export interface PlantingRecordDto {
orderId: string;
orderNo: string;
originalAdoptionId: string;
treeCount: number;
contributionPerTree: string;
totalContribution: string;
status: string;
adoptionDate: string | null;
createdAt: string;
}
export interface PlantingSummaryDto {
totalOrders: number;
totalTreeCount: number;
totalAmount: string;
effectiveTreeCount: number;
/** 用户实际的有效贡献值(个人算力) */
effectiveContribution: string;
firstPlantingAt: string | null;
lastPlantingAt: string | null;
}
export interface PlantingLedgerDto {
summary: PlantingSummaryDto;
items: PlantingRecordDto[];
total: number;
page: number;
pageSize: number;
totalPages: number;
}
@Injectable()
export class GetPlantingLedgerQuery {
constructor(
private readonly syncedDataRepository: SyncedDataRepository,
private readonly contributionAccountRepository: ContributionAccountRepository,
) {}
async execute(
accountSequence: string,
page: number = 1,
pageSize: number = 20,
): Promise<PlantingLedgerDto> {
const [summary, ledger, contributionAccount] = await Promise.all([
this.syncedDataRepository.getPlantingSummary(accountSequence),
this.syncedDataRepository.getPlantingLedger(accountSequence, page, pageSize),
this.contributionAccountRepository.findByAccountSequence(accountSequence),
]);
// 获取用户实际的有效贡献值(个人算力)
const effectiveContribution = contributionAccount?.personalContribution.toString() || '0';
return {
summary: {
totalOrders: summary.totalOrders,
totalTreeCount: summary.totalTreeCount,
totalAmount: summary.totalAmount,
effectiveTreeCount: summary.effectiveTreeCount,
effectiveContribution,
firstPlantingAt: summary.firstPlantingAt?.toISOString() || null,
lastPlantingAt: summary.lastPlantingAt?.toISOString() || null,
},
items: ledger.items.map((item) => ({
orderId: item.id.toString(),
orderNo: `ORD-${item.originalAdoptionId}`,
originalAdoptionId: item.originalAdoptionId.toString(),
treeCount: item.treeCount,
contributionPerTree: item.contributionPerTree.toString(),
totalContribution: item.contributionPerTree.mul(item.treeCount).toString(),
status: item.status || 'UNKNOWN',
adoptionDate: item.adoptionDate?.toISOString() || null,
createdAt: item.createdAt.toISOString(),
})),
total: ledger.total,
page: ledger.page,
pageSize: ledger.pageSize,
totalPages: ledger.totalPages,
};
}
}

View File

@ -1,121 +0,0 @@
import { Injectable, Inject } from '@nestjs/common';
import {
ISyncedDataRepository,
SYNCED_DATA_REPOSITORY,
} from '../../domain/repositories/synced-data.repository.interface';
/**
*
*/
export interface TeamMemberDto {
accountSequence: string;
personalPlantingCount: number;
teamPlantingCount: number;
directReferralCount: number;
}
/**
*
*/
export interface DirectReferralsResponseDto {
referrals: TeamMemberDto[];
total: number;
hasMore: boolean;
}
/**
*
*/
export interface MyTeamInfoDto {
accountSequence: string;
personalPlantingCount: number;
teamPlantingCount: number;
directReferralCount: number;
}
@Injectable()
export class GetTeamTreeQuery {
constructor(
@Inject(SYNCED_DATA_REPOSITORY)
private readonly syncedDataRepository: ISyncedDataRepository,
) {}
/**
*
*/
async getMyTeamInfo(accountSequence: string): Promise<MyTeamInfoDto> {
// 获取个人认种棵数
const personalPlantingCount = await this.syncedDataRepository.getTotalTreesByAccountSequence(accountSequence);
// 获取直推数量
const directReferrals = await this.syncedDataRepository.findDirectReferrals(accountSequence);
// 获取团队认种棵数(伞下各级总和)
const teamTreesByLevel = await this.syncedDataRepository.getTeamTreesByLevel(accountSequence, 15);
let teamPlantingCount = 0;
teamTreesByLevel.forEach((count) => {
teamPlantingCount += count;
});
return {
accountSequence,
personalPlantingCount,
teamPlantingCount,
directReferralCount: directReferrals.length,
};
}
/**
*
*/
async getDirectReferrals(
accountSequence: string,
limit: number = 100,
offset: number = 0,
): Promise<DirectReferralsResponseDto> {
// 获取所有直推
const allDirectReferrals = await this.syncedDataRepository.findDirectReferrals(accountSequence);
// 分页
const total = allDirectReferrals.length;
const paginatedReferrals = allDirectReferrals.slice(offset, offset + limit);
// 获取每个直推成员的详细信息
const referrals: TeamMemberDto[] = await Promise.all(
paginatedReferrals.map(async (ref) => {
return this.getTeamMemberInfo(ref.accountSequence);
}),
);
return {
referrals,
total,
hasMore: offset + limit < total,
};
}
/**
*
*/
private async getTeamMemberInfo(accountSequence: string): Promise<TeamMemberDto> {
// 获取个人认种棵数
const personalPlantingCount = await this.syncedDataRepository.getTotalTreesByAccountSequence(accountSequence);
// 获取直推数量
const directReferrals = await this.syncedDataRepository.findDirectReferrals(accountSequence);
// 获取团队认种棵数
const teamTreesByLevel = await this.syncedDataRepository.getTeamTreesByLevel(accountSequence, 15);
let teamPlantingCount = 0;
teamTreesByLevel.forEach((count) => {
teamPlantingCount += count;
});
return {
accountSequence,
personalPlantingCount,
teamPlantingCount,
directReferralCount: directReferrals.length,
};
}
}

View File

@ -2,13 +2,10 @@ import { Injectable, Logger, OnModuleInit } from '@nestjs/common';
import { Cron, CronExpression } from '@nestjs/schedule'; import { Cron, CronExpression } from '@nestjs/schedule';
import { ContributionCalculationService } from '../services/contribution-calculation.service'; import { ContributionCalculationService } from '../services/contribution-calculation.service';
import { SnapshotService } from '../services/snapshot.service'; import { SnapshotService } from '../services/snapshot.service';
import { BonusClaimService } from '../services/bonus-claim.service';
import { ContributionRecordRepository } from '../../infrastructure/persistence/repositories/contribution-record.repository'; import { ContributionRecordRepository } from '../../infrastructure/persistence/repositories/contribution-record.repository';
import { ContributionAccountRepository } from '../../infrastructure/persistence/repositories/contribution-account.repository';
import { OutboxRepository } from '../../infrastructure/persistence/repositories/outbox.repository'; import { OutboxRepository } from '../../infrastructure/persistence/repositories/outbox.repository';
import { KafkaProducerService } from '../../infrastructure/kafka/kafka-producer.service'; import { KafkaProducerService } from '../../infrastructure/kafka/kafka-producer.service';
import { RedisService } from '../../infrastructure/redis/redis.service'; import { RedisService } from '../../infrastructure/redis/redis.service';
import { ContributionAccountUpdatedEvent } from '../../domain/events';
/** /**
* *
@ -21,9 +18,7 @@ export class ContributionScheduler implements OnModuleInit {
constructor( constructor(
private readonly calculationService: ContributionCalculationService, private readonly calculationService: ContributionCalculationService,
private readonly snapshotService: SnapshotService, private readonly snapshotService: SnapshotService,
private readonly bonusClaimService: BonusClaimService,
private readonly contributionRecordRepository: ContributionRecordRepository, private readonly contributionRecordRepository: ContributionRecordRepository,
private readonly contributionAccountRepository: ContributionAccountRepository,
private readonly outboxRepository: OutboxRepository, private readonly outboxRepository: OutboxRepository,
private readonly kafkaProducer: KafkaProducerService, private readonly kafkaProducer: KafkaProducerService,
private readonly redis: RedisService, private readonly redis: RedisService,
@ -179,181 +174,4 @@ export class ContributionScheduler implements OnModuleInit {
await this.redis.releaseLock(`${this.LOCK_KEY}:cleanup`, lockValue); await this.redis.releaseLock(`${this.LOCK_KEY}:cleanup`, lockValue);
} }
} }
/**
* 10
* 15
*/
@Cron('*/10 * * * *')
async publishRecentlyUpdatedAccounts(): Promise<void> {
const lockValue = await this.redis.acquireLock(`${this.LOCK_KEY}:incremental-sync`, 540); // 9分钟锁
if (!lockValue) {
return;
}
try {
// 查找过去15分钟内更新的账户比10分钟多5分钟余量避免遗漏边界情况
const fifteenMinutesAgo = new Date(Date.now() - 15 * 60 * 1000);
const accounts = await this.contributionAccountRepository.findRecentlyUpdated(fifteenMinutesAgo, 500);
if (accounts.length === 0) {
return;
}
const events = accounts.map((account) => {
const event = new ContributionAccountUpdatedEvent(
account.accountSequence,
account.personalContribution.value.toString(),
account.totalLevelPending.value.toString(),
account.totalBonusPending.value.toString(),
account.effectiveContribution.value.toString(),
account.effectiveContribution.value.toString(),
account.hasAdopted,
account.directReferralAdoptedCount,
account.unlockedLevelDepth,
account.unlockedBonusTiers,
account.createdAt,
);
return {
aggregateType: ContributionAccountUpdatedEvent.AGGREGATE_TYPE,
aggregateId: account.accountSequence,
eventType: ContributionAccountUpdatedEvent.EVENT_TYPE,
payload: event.toPayload(),
};
});
await this.outboxRepository.saveMany(events);
this.logger.log(`Incremental sync: published ${accounts.length} recently updated accounts`);
} catch (error) {
this.logger.error('Failed to publish recently updated accounts', error);
} finally {
await this.redis.releaseLock(`${this.LOCK_KEY}:incremental-sync`, lockValue);
}
}
/**
* 10
* /
*/
@Cron('*/10 * * * *')
async processContributionBackfill(): Promise<void> {
const lockValue = await this.redis.acquireLock(`${this.LOCK_KEY}:backfill`, 540); // 9分钟锁
if (!lockValue) {
return;
}
try {
this.logger.log('Starting contribution backfill scan...');
// 查找解锁状态不完整的账户(已认种但层级<15或奖励档位<3
const accounts = await this.contributionAccountRepository.findAccountsWithIncompleteUnlock(100);
if (accounts.length === 0) {
this.logger.debug('No accounts with incomplete unlock status found');
return;
}
this.logger.log(`Found ${accounts.length} accounts with incomplete unlock status`);
let backfilledCount = 0;
let errorCount = 0;
for (const account of accounts) {
try {
const hasBackfill = await this.bonusClaimService.processBackfillForAccount(account.accountSequence);
if (hasBackfill) {
backfilledCount++;
}
} catch (error) {
errorCount++;
this.logger.error(
`Failed to process backfill for account ${account.accountSequence}`,
error,
);
// 继续处理下一个账户
}
}
this.logger.log(
`Contribution backfill completed: ${backfilledCount} accounts backfilled, ${errorCount} errors`,
);
} catch (error) {
this.logger.error('Failed to process contribution backfill', error);
} finally {
await this.redis.releaseLock(`${this.LOCK_KEY}:backfill`, lockValue);
}
}
/**
* 4
*
*/
@Cron('0 4 * * *')
async publishAllAccountUpdates(): Promise<void> {
const lockValue = await this.redis.acquireLock(`${this.LOCK_KEY}:full-sync`, 3600); // 1小时锁
if (!lockValue) {
return;
}
try {
this.logger.log('Starting daily full sync of contribution accounts...');
let page = 1;
const pageSize = 100;
let totalPublished = 0;
while (true) {
const { items: accounts, total } = await this.contributionAccountRepository.findMany({
page,
limit: pageSize,
orderBy: 'effectiveContribution',
order: 'desc',
});
if (accounts.length === 0) {
break;
}
const events = accounts.map((account) => {
const event = new ContributionAccountUpdatedEvent(
account.accountSequence,
account.personalContribution.value.toString(),
account.totalLevelPending.value.toString(),
account.totalBonusPending.value.toString(),
account.effectiveContribution.value.toString(),
account.effectiveContribution.value.toString(),
account.hasAdopted,
account.directReferralAdoptedCount,
account.unlockedLevelDepth,
account.unlockedBonusTiers,
account.createdAt,
);
return {
aggregateType: ContributionAccountUpdatedEvent.AGGREGATE_TYPE,
aggregateId: account.accountSequence,
eventType: ContributionAccountUpdatedEvent.EVENT_TYPE,
payload: event.toPayload(),
};
});
await this.outboxRepository.saveMany(events);
totalPublished += accounts.length;
if (accounts.length < pageSize || page * pageSize >= total) {
break;
}
page++;
}
this.logger.log(`Daily full sync completed: published ${totalPublished} contribution account events`);
} catch (error) {
this.logger.error('Failed to publish all account updates', error);
} finally {
await this.redis.releaseLock(`${this.LOCK_KEY}:full-sync`, lockValue);
}
}
} }

View File

@ -1,623 +0,0 @@
import { Injectable, Logger } from '@nestjs/common';
import { UnallocatedContributionRepository, UnallocatedContribution } from '../../infrastructure/persistence/repositories/unallocated-contribution.repository';
import { ContributionAccountRepository } from '../../infrastructure/persistence/repositories/contribution-account.repository';
import { ContributionRecordRepository } from '../../infrastructure/persistence/repositories/contribution-record.repository';
import { SystemAccountRepository } from '../../infrastructure/persistence/repositories/system-account.repository';
import { OutboxRepository } from '../../infrastructure/persistence/repositories/outbox.repository';
import { SyncedDataRepository } from '../../infrastructure/persistence/repositories/synced-data.repository';
import { UnitOfWork } from '../../infrastructure/persistence/unit-of-work/unit-of-work';
import { ContributionRecordAggregate } from '../../domain/aggregates/contribution-record.aggregate';
import { ContributionAccountAggregate, ContributionSourceType } from '../../domain/aggregates/contribution-account.aggregate';
import { ContributionAmount } from '../../domain/value-objects/contribution-amount.vo';
import { DistributionRate } from '../../domain/value-objects/distribution-rate.vo';
import { ContributionCalculatorService } from '../../domain/services/contribution-calculator.service';
import { ContributionRecordSyncedEvent, SystemAccountSyncedEvent, ContributionAccountUpdatedEvent } from '../../domain/events';
/**
*
*
*/
@Injectable()
export class BonusClaimService {
private readonly logger = new Logger(BonusClaimService.name);
constructor(
private readonly unallocatedContributionRepository: UnallocatedContributionRepository,
private readonly contributionAccountRepository: ContributionAccountRepository,
private readonly contributionRecordRepository: ContributionRecordRepository,
private readonly systemAccountRepository: SystemAccountRepository,
private readonly outboxRepository: OutboxRepository,
private readonly syncedDataRepository: SyncedDataRepository,
private readonly unitOfWork: UnitOfWork,
) {}
/**
*
*
* @param accountSequence
* @param previousCount
* @param newCount
*/
async checkAndClaimBonus(
accountSequence: string,
previousCount: number,
newCount: number,
): Promise<void> {
// 检查是否达到新的解锁条件
const tiersToClaimList: number[] = [];
// T2: 直推≥2人认种时解锁
if (previousCount < 2 && newCount >= 2) {
tiersToClaimList.push(2);
}
// T3: 直推≥4人认种时解锁
if (previousCount < 4 && newCount >= 4) {
tiersToClaimList.push(3);
}
if (tiersToClaimList.length === 0) {
return;
}
this.logger.log(
`User ${accountSequence} unlocked bonus tiers: ${tiersToClaimList.join(', ')} ` +
`(directReferralAdoptedCount: ${previousCount} -> ${newCount})`,
);
// 检查是否已在事务中(被 ContributionCalculationService 调用时)
// 如果已在事务中,直接执行,避免嵌套事务导致超时
if (this.unitOfWork.isInTransaction()) {
for (const tier of tiersToClaimList) {
await this.claimBonusTier(accountSequence, tier);
}
} else {
// 独立调用时,开启新事务
await this.unitOfWork.executeInTransaction(async () => {
for (const tier of tiersToClaimList) {
await this.claimBonusTier(accountSequence, tier);
}
});
}
}
/**
*
*/
private async claimBonusTier(accountSequence: string, bonusTier: number): Promise<void> {
// 1. 查询待领取的记录
const pendingRecords = await this.unallocatedContributionRepository.findPendingBonusByAccountSequence(
accountSequence,
bonusTier,
);
if (pendingRecords.length === 0) {
this.logger.debug(`No pending T${bonusTier} bonus records for ${accountSequence}`);
return;
}
this.logger.log(
`Claiming ${pendingRecords.length} T${bonusTier} bonus records for ${accountSequence}`,
);
// 2. 查询原始认种数据,获取 treeCount 和 baseContribution
const adoptionDataMap = new Map<string, { treeCount: number; baseContribution: ContributionAmount }>();
for (const pending of pendingRecords) {
const adoptionIdStr = pending.sourceAdoptionId.toString();
if (!adoptionDataMap.has(adoptionIdStr)) {
const adoption = await this.syncedDataRepository.findSyncedAdoptionByOriginalId(pending.sourceAdoptionId);
if (adoption) {
adoptionDataMap.set(adoptionIdStr, {
treeCount: adoption.treeCount,
baseContribution: new ContributionAmount(adoption.contributionPerTree),
});
} else {
// 如果找不到原始认种数据,使用默认值并记录警告
this.logger.warn(`Adoption not found for sourceAdoptionId: ${pending.sourceAdoptionId}, using default values`);
adoptionDataMap.set(adoptionIdStr, {
treeCount: 0,
baseContribution: new ContributionAmount(0),
});
}
}
}
// 3. 创建贡献值记录
const contributionRecords: ContributionRecordAggregate[] = [];
for (const pending of pendingRecords) {
const adoptionData = adoptionDataMap.get(pending.sourceAdoptionId.toString())!;
const record = new ContributionRecordAggregate({
accountSequence: accountSequence,
sourceType: ContributionSourceType.TEAM_BONUS,
sourceAdoptionId: pending.sourceAdoptionId,
sourceAccountSequence: pending.sourceAccountSequence,
treeCount: adoptionData.treeCount,
baseContribution: adoptionData.baseContribution,
distributionRate: DistributionRate.BONUS_PER,
bonusTier: bonusTier,
amount: pending.amount,
effectiveDate: pending.effectiveDate,
expireDate: pending.expireDate,
});
contributionRecords.push(record);
}
// 4. 保存贡献值记录
const savedRecords = await this.contributionRecordRepository.saveMany(contributionRecords);
// 5. 更新用户的贡献值账户
let totalAmount = new ContributionAmount(0);
for (const pending of pendingRecords) {
totalAmount = new ContributionAmount(totalAmount.value.plus(pending.amount.value));
}
await this.contributionAccountRepository.updateContribution(
accountSequence,
ContributionSourceType.TEAM_BONUS,
totalAmount,
null,
bonusTier,
);
// 6. 标记待领取记录为已分配
const pendingIds = pendingRecords.map((r) => r.id);
await this.unallocatedContributionRepository.claimBonusRecords(pendingIds, accountSequence);
// 7. 从 HEADQUARTERS 减少算力并删除明细记录
await this.systemAccountRepository.subtractContribution('HEADQUARTERS', null, totalAmount);
for (const pending of pendingRecords) {
await this.systemAccountRepository.deleteContributionRecordsByAdoption(
'HEADQUARTERS',
null,
pending.sourceAdoptionId,
pending.sourceAccountSequence,
);
}
// 8. 发布 HEADQUARTERS 账户更新事件
const headquartersAccount = await this.systemAccountRepository.findByTypeAndRegion('HEADQUARTERS', null);
if (headquartersAccount) {
const hqEvent = new SystemAccountSyncedEvent(
'HEADQUARTERS',
null,
headquartersAccount.name,
headquartersAccount.contributionBalance.value.toString(),
headquartersAccount.createdAt,
);
await this.outboxRepository.save({
aggregateType: SystemAccountSyncedEvent.AGGREGATE_TYPE,
aggregateId: 'HEADQUARTERS',
eventType: SystemAccountSyncedEvent.EVENT_TYPE,
payload: hqEvent.toPayload(),
});
}
// 9. 发布事件到 Kafka通过 Outbox
await this.publishBonusClaimEvents(accountSequence, savedRecords, pendingRecords);
this.logger.log(
`Claimed T${bonusTier} bonus for ${accountSequence}: ` +
`${pendingRecords.length} records, total amount: ${totalAmount.value.toString()}`,
);
}
/**
*
*/
private async publishBonusClaimEvents(
accountSequence: string,
savedRecords: ContributionRecordAggregate[],
pendingRecords: UnallocatedContribution[],
): Promise<void> {
// 1. 发布贡献值记录同步事件(用于 mining-admin-service CDC
for (const record of savedRecords) {
const event = new ContributionRecordSyncedEvent(
record.id!,
record.accountSequence,
record.sourceType,
record.sourceAdoptionId,
record.sourceAccountSequence,
record.treeCount,
record.baseContribution.value.toString(),
record.distributionRate.value.toString(),
record.levelDepth,
record.bonusTier,
record.amount.value.toString(),
record.effectiveDate,
record.expireDate,
record.isExpired,
record.createdAt,
);
await this.outboxRepository.save({
aggregateType: ContributionRecordSyncedEvent.AGGREGATE_TYPE,
aggregateId: record.id!.toString(),
eventType: ContributionRecordSyncedEvent.EVENT_TYPE,
payload: event.toPayload(),
});
}
// 2. 发布补发事件到 mining-wallet-service
const userContributions = savedRecords.map((record, index) => ({
accountSequence: record.accountSequence,
contributionType: 'TEAM_BONUS',
amount: record.amount.value.toString(),
bonusTier: record.bonusTier,
effectiveDate: record.effectiveDate.toISOString(),
expireDate: record.expireDate.toISOString(),
sourceAdoptionId: record.sourceAdoptionId.toString(),
sourceAccountSequence: record.sourceAccountSequence,
isBackfill: true, // 标记为补发
}));
const eventId = `bonus-claim-${accountSequence}-${Date.now()}`;
const payload = {
eventType: 'BonusClaimed',
eventId,
timestamp: new Date().toISOString(),
payload: {
accountSequence,
bonusTier: savedRecords[0]?.bonusTier,
claimedCount: savedRecords.length,
userContributions,
},
};
await this.outboxRepository.save({
eventType: 'BonusClaimed',
topic: 'contribution.bonus.claimed',
key: accountSequence,
payload,
aggregateId: accountSequence,
aggregateType: 'ContributionAccount',
});
}
// ========== 定时任务补发逻辑 ==========
private readonly domainCalculator = new ContributionCalculatorService();
/**
*
*
* @returns
*/
async processBackfillForAccount(accountSequence: string): Promise<boolean> {
const account = await this.contributionAccountRepository.findByAccountSequence(accountSequence);
if (!account) {
return false;
}
// 重新计算直推认种用户数
const currentDirectReferralAdoptedCount = await this.syncedDataRepository.getDirectReferralAdoptedCount(
accountSequence,
);
// 计算应该解锁的层级深度和奖励档位
const expectedLevelDepth = this.domainCalculator.calculateUnlockedLevelDepth(currentDirectReferralAdoptedCount);
const expectedBonusTiers = this.domainCalculator.calculateUnlockedBonusTiers(
account.hasAdopted,
currentDirectReferralAdoptedCount,
);
let hasBackfill = false;
// 检查是否需要补发层级贡献值
if (expectedLevelDepth > account.unlockedLevelDepth) {
this.logger.log(
`[Backfill] Account ${accountSequence} level unlock: ${account.unlockedLevelDepth} -> ${expectedLevelDepth} ` +
`(directReferralAdoptedCount: ${account.directReferralAdoptedCount} -> ${currentDirectReferralAdoptedCount})`,
);
await this.unitOfWork.executeInTransaction(async () => {
// 补发层级贡献值
const levelClaimed = await this.claimLevelContributions(
accountSequence,
account.unlockedLevelDepth + 1,
expectedLevelDepth,
);
if (levelClaimed > 0) {
hasBackfill = true;
}
// 更新账户的直推认种数和解锁状态
await this.updateAccountUnlockStatus(
account,
currentDirectReferralAdoptedCount,
expectedLevelDepth,
expectedBonusTiers,
);
});
}
// 检查是否需要补发奖励档位
if (expectedBonusTiers > account.unlockedBonusTiers) {
this.logger.log(
`[Backfill] Account ${accountSequence} bonus unlock: ${account.unlockedBonusTiers} -> ${expectedBonusTiers} ` +
`(directReferralAdoptedCount: ${account.directReferralAdoptedCount} -> ${currentDirectReferralAdoptedCount})`,
);
// 使用现有的 checkAndClaimBonus 方法补发奖励
await this.checkAndClaimBonus(
accountSequence,
account.directReferralAdoptedCount,
currentDirectReferralAdoptedCount,
);
hasBackfill = true;
// 如果只有奖励档位需要补发(层级已经是最新的),也需要更新账户状态
if (expectedLevelDepth <= account.unlockedLevelDepth) {
await this.unitOfWork.executeInTransaction(async () => {
await this.updateAccountUnlockStatus(
account,
currentDirectReferralAdoptedCount,
expectedLevelDepth,
expectedBonusTiers,
);
});
}
}
return hasBackfill;
}
/**
*
* @param accountSequence
* @param minLevel
* @param maxLevel
* @returns
*/
private async claimLevelContributions(
accountSequence: string,
minLevel: number,
maxLevel: number,
): Promise<number> {
// 1. 查询待领取的层级贡献值记录
const pendingRecords = await this.unallocatedContributionRepository.findPendingLevelByAccountSequence(
accountSequence,
minLevel,
maxLevel,
);
if (pendingRecords.length === 0) {
this.logger.debug(`[Backfill] No pending level records for ${accountSequence} (levels ${minLevel}-${maxLevel})`);
return 0;
}
this.logger.log(
`[Backfill] Claiming ${pendingRecords.length} level records for ${accountSequence} (levels ${minLevel}-${maxLevel})`,
);
// 2. 查询原始认种数据,获取 treeCount 和 baseContribution
const adoptionDataMap = new Map<string, { treeCount: number; baseContribution: ContributionAmount }>();
for (const pending of pendingRecords) {
const adoptionIdStr = pending.sourceAdoptionId.toString();
if (!adoptionDataMap.has(adoptionIdStr)) {
const adoption = await this.syncedDataRepository.findSyncedAdoptionByOriginalId(pending.sourceAdoptionId);
if (adoption) {
adoptionDataMap.set(adoptionIdStr, {
treeCount: adoption.treeCount,
baseContribution: new ContributionAmount(adoption.contributionPerTree),
});
} else {
this.logger.warn(`[Backfill] Adoption not found for sourceAdoptionId: ${pending.sourceAdoptionId}`);
adoptionDataMap.set(adoptionIdStr, {
treeCount: 0,
baseContribution: new ContributionAmount(0),
});
}
}
}
// 3. 创建贡献值记录
const contributionRecords: ContributionRecordAggregate[] = [];
for (const pending of pendingRecords) {
const adoptionData = adoptionDataMap.get(pending.sourceAdoptionId.toString())!;
const record = new ContributionRecordAggregate({
accountSequence: accountSequence,
sourceType: ContributionSourceType.TEAM_LEVEL,
sourceAdoptionId: pending.sourceAdoptionId,
sourceAccountSequence: pending.sourceAccountSequence,
treeCount: adoptionData.treeCount,
baseContribution: adoptionData.baseContribution,
distributionRate: DistributionRate.LEVEL_PER,
levelDepth: pending.levelDepth!,
amount: pending.amount,
effectiveDate: pending.effectiveDate,
expireDate: pending.expireDate,
});
contributionRecords.push(record);
}
// 4. 保存贡献值记录
const savedRecords = await this.contributionRecordRepository.saveMany(contributionRecords);
// 5. 更新用户的贡献值账户(按层级分别更新)
for (const pending of pendingRecords) {
await this.contributionAccountRepository.updateContribution(
accountSequence,
ContributionSourceType.TEAM_LEVEL,
pending.amount,
pending.levelDepth,
null,
);
}
// 6. 标记待领取记录为已分配
const pendingIds = pendingRecords.map((r) => r.id);
await this.unallocatedContributionRepository.claimLevelRecords(pendingIds, accountSequence);
// 7. 计算总金额用于从 HEADQUARTERS 扣除
let totalAmount = new ContributionAmount(0);
for (const pending of pendingRecords) {
totalAmount = new ContributionAmount(totalAmount.value.plus(pending.amount.value));
}
// 8. 从 HEADQUARTERS 减少算力并删除明细记录
await this.systemAccountRepository.subtractContribution('HEADQUARTERS', null, totalAmount);
for (const pending of pendingRecords) {
await this.systemAccountRepository.deleteContributionRecordsByAdoption(
'HEADQUARTERS',
null,
pending.sourceAdoptionId,
pending.sourceAccountSequence,
);
}
// 9. 发布 HEADQUARTERS 账户更新事件
const headquartersAccount = await this.systemAccountRepository.findByTypeAndRegion('HEADQUARTERS', null);
if (headquartersAccount) {
const hqEvent = new SystemAccountSyncedEvent(
'HEADQUARTERS',
null,
headquartersAccount.name,
headquartersAccount.contributionBalance.value.toString(),
headquartersAccount.createdAt,
);
await this.outboxRepository.save({
aggregateType: SystemAccountSyncedEvent.AGGREGATE_TYPE,
aggregateId: 'HEADQUARTERS',
eventType: SystemAccountSyncedEvent.EVENT_TYPE,
payload: hqEvent.toPayload(),
});
}
// 10. 发布贡献值记录同步事件
await this.publishLevelClaimEvents(accountSequence, savedRecords, pendingRecords);
this.logger.log(
`[Backfill] Claimed level contributions for ${accountSequence}: ` +
`${pendingRecords.length} records, total amount: ${totalAmount.value.toString()}`,
);
return pendingRecords.length;
}
/**
*
*/
private async updateAccountUnlockStatus(
account: ContributionAccountAggregate,
newDirectReferralAdoptedCount: number,
expectedLevelDepth: number,
expectedBonusTiers: number,
): Promise<void> {
// 增量更新直推认种数
const previousCount = account.directReferralAdoptedCount;
if (newDirectReferralAdoptedCount > previousCount) {
for (let i = previousCount; i < newDirectReferralAdoptedCount; i++) {
account.incrementDirectReferralAdoptedCount();
}
}
await this.contributionAccountRepository.save(account);
// 发布账户更新事件
await this.publishContributionAccountUpdatedEvent(account);
}
/**
*
*/
private async publishLevelClaimEvents(
accountSequence: string,
savedRecords: ContributionRecordAggregate[],
pendingRecords: UnallocatedContribution[],
): Promise<void> {
// 1. 发布贡献值记录同步事件(用于 mining-admin-service CDC
for (const record of savedRecords) {
const event = new ContributionRecordSyncedEvent(
record.id!,
record.accountSequence,
record.sourceType,
record.sourceAdoptionId,
record.sourceAccountSequence,
record.treeCount,
record.baseContribution.value.toString(),
record.distributionRate.value.toString(),
record.levelDepth,
record.bonusTier,
record.amount.value.toString(),
record.effectiveDate,
record.expireDate,
record.isExpired,
record.createdAt,
);
await this.outboxRepository.save({
aggregateType: ContributionRecordSyncedEvent.AGGREGATE_TYPE,
aggregateId: record.id!.toString(),
eventType: ContributionRecordSyncedEvent.EVENT_TYPE,
payload: event.toPayload(),
});
}
// 2. 发布补发事件到 mining-wallet-service
const userContributions = savedRecords.map((record) => ({
accountSequence: record.accountSequence,
contributionType: 'TEAM_LEVEL',
amount: record.amount.value.toString(),
levelDepth: record.levelDepth,
effectiveDate: record.effectiveDate.toISOString(),
expireDate: record.expireDate.toISOString(),
sourceAdoptionId: record.sourceAdoptionId.toString(),
sourceAccountSequence: record.sourceAccountSequence,
isBackfill: true, // 标记为补发
}));
const eventId = `level-claim-${accountSequence}-${Date.now()}`;
const payload = {
eventType: 'LevelClaimed',
eventId,
timestamp: new Date().toISOString(),
payload: {
accountSequence,
claimedCount: savedRecords.length,
userContributions,
},
};
await this.outboxRepository.save({
eventType: 'LevelClaimed',
topic: 'contribution.level.claimed',
key: accountSequence,
payload,
aggregateId: accountSequence,
aggregateType: 'ContributionAccount',
});
}
/**
*
*/
private async publishContributionAccountUpdatedEvent(
account: ContributionAccountAggregate,
): Promise<void> {
const totalContribution = account.personalContribution.value
.plus(account.totalLevelPending.value)
.plus(account.totalBonusPending.value);
const event = new ContributionAccountUpdatedEvent(
account.accountSequence,
account.personalContribution.value.toString(),
account.totalLevelPending.value.toString(),
account.totalBonusPending.value.toString(),
totalContribution.toString(),
account.effectiveContribution.value.toString(),
account.hasAdopted,
account.directReferralAdoptedCount,
account.unlockedLevelDepth,
account.unlockedBonusTiers,
account.createdAt,
);
await this.outboxRepository.save({
aggregateType: ContributionAccountUpdatedEvent.AGGREGATE_TYPE,
aggregateId: account.accountSequence,
eventType: ContributionAccountUpdatedEvent.EVENT_TYPE,
payload: event.toPayload(),
});
}
}

View File

@ -9,12 +9,10 @@ import { OutboxRepository } from '../../infrastructure/persistence/repositories/
import { UnitOfWork } from '../../infrastructure/persistence/unit-of-work/unit-of-work'; import { UnitOfWork } from '../../infrastructure/persistence/unit-of-work/unit-of-work';
import { ContributionAccountAggregate, ContributionSourceType } from '../../domain/aggregates/contribution-account.aggregate'; import { ContributionAccountAggregate, ContributionSourceType } from '../../domain/aggregates/contribution-account.aggregate';
import { ContributionRecordAggregate } from '../../domain/aggregates/contribution-record.aggregate'; import { ContributionRecordAggregate } from '../../domain/aggregates/contribution-record.aggregate';
import { ContributionAmount } from '../../domain/value-objects/contribution-amount.vo';
import { SyncedReferral } from '../../domain/repositories/synced-data.repository.interface'; import { SyncedReferral } from '../../domain/repositories/synced-data.repository.interface';
import { ContributionDistributionPublisherService } from './contribution-distribution-publisher.service'; import { ContributionDistributionPublisherService } from './contribution-distribution-publisher.service';
import { ContributionRateService } from './contribution-rate.service'; import { ContributionRateService } from './contribution-rate.service';
import { BonusClaimService } from './bonus-claim.service'; import { ContributionRecordSyncedEvent, NetworkProgressUpdatedEvent } from '../../domain/events';
import { ContributionRecordSyncedEvent, NetworkProgressUpdatedEvent, ContributionAccountUpdatedEvent, SystemAccountSyncedEvent, SystemContributionRecordCreatedEvent, UnallocatedContributionSyncedEvent } from '../../domain/events';
/** /**
* *
@ -35,7 +33,6 @@ export class ContributionCalculationService {
private readonly unitOfWork: UnitOfWork, private readonly unitOfWork: UnitOfWork,
private readonly distributionPublisher: ContributionDistributionPublisherService, private readonly distributionPublisher: ContributionDistributionPublisherService,
private readonly contributionRateService: ContributionRateService, private readonly contributionRateService: ContributionRateService,
private readonly bonusClaimService: BonusClaimService,
) {} ) {}
/** /**
@ -114,49 +111,6 @@ export class ContributionCalculationService {
`teamBonus=${result.teamBonusRecords.length}, ` + `teamBonus=${result.teamBonusRecords.length}, ` +
`unallocated=${result.unallocatedContributions.length}`, `unallocated=${result.unallocatedContributions.length}`,
); );
// 更新全网认种进度(更新 NetworkAdoptionProgress 表)
// 判断是否为新认种用户:之前没有账户记录即为新用户
const isNewUser = !adopterAccount;
await this.contributionRateService.updateNetworkProgress(
adoption.treeCount,
adoption.adoptionDate,
adoption.originalAdoptionId,
isNewUser,
);
// 发布全网进度更新事件(用于 mining-service 同步全网理论算力)
await this.publishNetworkProgressEvent();
}
/**
*
*/
private async publishNetworkProgressEvent(): Promise<void> {
try {
const progress = await this.contributionRateService.getNetworkProgress();
const event = new NetworkProgressUpdatedEvent(
progress.totalTreeCount,
progress.totalAdoptionOrders,
progress.totalAdoptedUsers,
progress.currentUnit,
progress.currentMultiplier.toString(),
progress.currentContributionPerTree.toString(),
progress.nextUnitTreeCount,
);
await this.outboxRepository.save({
aggregateType: NetworkProgressUpdatedEvent.AGGREGATE_TYPE,
aggregateId: 'network',
eventType: NetworkProgressUpdatedEvent.EVENT_TYPE,
payload: event.toPayload(),
});
this.logger.debug(`Published NetworkProgressUpdatedEvent: trees=${progress.totalTreeCount}`);
} catch (error) {
this.logger.error('Failed to publish NetworkProgressUpdatedEvent', error);
}
} }
/** /**
@ -210,8 +164,6 @@ export class ContributionCalculationService {
): Promise<void> { ): Promise<void> {
// 收集所有保存后的记录带ID用于发布事件 // 收集所有保存后的记录带ID用于发布事件
const savedRecords: ContributionRecordAggregate[] = []; const savedRecords: ContributionRecordAggregate[] = [];
// 收集所有被更新的账户序列号(用于发布账户更新事件)
const updatedAccountSequences = new Set<string>();
// 1. 保存个人算力记录 // 1. 保存个人算力记录
const savedPersonalRecord = await this.contributionRecordRepository.save(result.personalRecord); const savedPersonalRecord = await this.contributionRecordRepository.save(result.personalRecord);
@ -226,7 +178,6 @@ export class ContributionCalculationService {
} }
account.addPersonalContribution(result.personalRecord.amount); account.addPersonalContribution(result.personalRecord.amount);
await this.contributionAccountRepository.save(account); await this.contributionAccountRepository.save(account);
updatedAccountSequences.add(result.personalRecord.accountSequence);
// 2. 保存团队层级算力记录 // 2. 保存团队层级算力记录
if (result.teamLevelRecords.length > 0) { if (result.teamLevelRecords.length > 0) {
@ -242,7 +193,6 @@ export class ContributionCalculationService {
record.levelDepth, // 传递层级深度 record.levelDepth, // 传递层级深度
null, null,
); );
updatedAccountSequences.add(record.accountSequence);
} }
} }
@ -260,7 +210,6 @@ export class ContributionCalculationService {
null, null,
record.bonusTier, // 传递加成档位 record.bonusTier, // 传递加成档位
); );
updatedAccountSequences.add(record.accountSequence);
} }
} }
@ -268,7 +217,7 @@ export class ContributionCalculationService {
const effectiveDate = result.personalRecord.effectiveDate; const effectiveDate = result.personalRecord.effectiveDate;
const expireDate = result.personalRecord.expireDate; const expireDate = result.personalRecord.expireDate;
// 4. 保存未分配算力并发布同步事件 // 4. 保存未分配算力
if (result.unallocatedContributions.length > 0) { if (result.unallocatedContributions.length > 0) {
await this.unallocatedContributionRepository.saveMany( await this.unallocatedContributionRepository.saveMany(
result.unallocatedContributions.map((u) => ({ result.unallocatedContributions.map((u) => ({
@ -279,189 +228,28 @@ export class ContributionCalculationService {
expireDate, expireDate,
})), })),
); );
// 汇总未分配算力到 HEADQUARTERS总部账户
const totalUnallocatedAmount = result.unallocatedContributions.reduce(
(sum, u) => sum.add(u.amount),
new ContributionAmount(0),
);
await this.systemAccountRepository.addContribution(
'HEADQUARTERS',
null,
totalUnallocatedAmount,
);
// 为每笔未分配算力创建 HEADQUARTERS 明细记录
for (const unallocated of result.unallocatedContributions) {
// 确定来源类型和层级深度
const sourceType = unallocated.type as string; // LEVEL_OVERFLOW / LEVEL_NO_ANCESTOR / BONUS_TIER_1/2/3
const levelDepth = unallocated.levelDepth;
const savedRecord = await this.systemAccountRepository.saveContributionRecord({
accountType: 'HEADQUARTERS',
regionCode: null,
sourceAdoptionId,
sourceAccountSequence,
sourceType,
levelDepth,
distributionRate: 0, // 未分配算力没有固定比例
amount: unallocated.amount,
effectiveDate,
expireDate: null,
});
// 发布 HEADQUARTERS 算力明细事件
const recordEvent = new SystemContributionRecordCreatedEvent(
savedRecord.id,
'HEADQUARTERS',
null,
sourceAdoptionId,
sourceAccountSequence,
sourceType as any,
levelDepth,
0,
unallocated.amount.value.toString(),
effectiveDate,
null,
savedRecord.createdAt,
);
await this.outboxRepository.save({
aggregateType: SystemContributionRecordCreatedEvent.AGGREGATE_TYPE,
aggregateId: savedRecord.id.toString(),
eventType: SystemContributionRecordCreatedEvent.EVENT_TYPE,
payload: recordEvent.toPayload(),
});
} }
// 发布 HEADQUARTERS 账户同步事件 // 5. 保存系统账户算力
const headquartersAccount = await this.systemAccountRepository.findByTypeAndRegion('HEADQUARTERS', null);
if (headquartersAccount) {
const hqEvent = new SystemAccountSyncedEvent(
'HEADQUARTERS',
null, // 区域代码(总部没有区域)
headquartersAccount.name,
headquartersAccount.contributionBalance.value.toString(),
headquartersAccount.createdAt,
);
await this.outboxRepository.save({
aggregateType: SystemAccountSyncedEvent.AGGREGATE_TYPE,
aggregateId: 'HEADQUARTERS',
eventType: SystemAccountSyncedEvent.EVENT_TYPE,
payload: hqEvent.toPayload(),
});
}
// 发布未分配算力同步事件(用于 mining-service 同步待解锁算力)
for (const unallocated of result.unallocatedContributions) {
const event = new UnallocatedContributionSyncedEvent(
sourceAdoptionId,
sourceAccountSequence,
unallocated.wouldBeAccountSequence,
unallocated.type,
unallocated.amount.value.toString(),
unallocated.reason,
effectiveDate,
expireDate,
);
await this.outboxRepository.save({
aggregateType: UnallocatedContributionSyncedEvent.AGGREGATE_TYPE,
aggregateId: `${sourceAdoptionId}-${unallocated.type}`,
eventType: UnallocatedContributionSyncedEvent.EVENT_TYPE,
payload: event.toPayload(),
});
}
}
// 5. 保存系统账户算力并发布同步事件
if (result.systemContributions.length > 0) { if (result.systemContributions.length > 0) {
await this.systemAccountRepository.ensureSystemAccountsExist(); await this.systemAccountRepository.ensureSystemAccountsExist();
for (const sys of result.systemContributions) { for (const sys of result.systemContributions) {
// 动态创建/更新系统账户 await this.systemAccountRepository.addContribution(sys.accountType, sys.amount);
await this.systemAccountRepository.addContribution( await this.systemAccountRepository.saveContributionRecord({
sys.accountType, systemAccountType: sys.accountType,
sys.regionCode,
sys.amount,
);
// 保存算力明细记录
const savedRecord = await this.systemAccountRepository.saveContributionRecord({
accountType: sys.accountType,
regionCode: sys.regionCode,
sourceAdoptionId, sourceAdoptionId,
sourceAccountSequence, sourceAccountSequence,
sourceType: 'FIXED_RATE', // 固定比例分配
levelDepth: null,
distributionRate: sys.rate.value.toNumber(), distributionRate: sys.rate.value.toNumber(),
amount: sys.amount, amount: sys.amount,
effectiveDate, effectiveDate,
expireDate: null, expireDate: null, // System account contributions never expire based on the schema's contributionNeverExpires field
}); });
// 发布系统账户同步事件(用于 mining-service 同步系统账户算力)
const systemAccount = await this.systemAccountRepository.findByTypeAndRegion(
sys.accountType,
sys.regionCode,
);
if (systemAccount) {
const event = new SystemAccountSyncedEvent(
sys.accountType,
sys.regionCode,
systemAccount.name,
systemAccount.contributionBalance.value.toString(),
systemAccount.createdAt,
);
await this.outboxRepository.save({
aggregateType: SystemAccountSyncedEvent.AGGREGATE_TYPE,
aggregateId: `${sys.accountType}:${sys.regionCode || 'null'}`,
eventType: SystemAccountSyncedEvent.EVENT_TYPE,
payload: event.toPayload(),
});
// 发布系统账户算力明细事件(用于 mining-admin-service 同步明细记录)
const recordEvent = new SystemContributionRecordCreatedEvent(
savedRecord.id,
sys.accountType,
sys.regionCode, // 传递区域代码
sourceAdoptionId,
sourceAccountSequence,
'FIXED_RATE', // 固定比例分配
null, // 无层级深度
sys.rate.value.toNumber(),
sys.amount.value.toString(),
effectiveDate,
null,
savedRecord.createdAt,
);
await this.outboxRepository.save({
aggregateType: SystemContributionRecordCreatedEvent.AGGREGATE_TYPE,
aggregateId: savedRecord.id.toString(),
eventType: SystemContributionRecordCreatedEvent.EVENT_TYPE,
payload: recordEvent.toPayload(),
});
}
} }
} }
// 6. 发布算力记录同步事件(用于 mining-admin-service- 使用保存后带 ID 的记录 // 6. 发布算力记录同步事件(用于 mining-admin-service- 使用保存后带 ID 的记录
await this.publishContributionRecordEvents(savedRecords); await this.publishContributionRecordEvents(savedRecords);
// 7. 发布所有被更新账户的事件(用于 CDC 同步到 mining-admin-service
await this.publishUpdatedAccountEvents(updatedAccountSequences);
}
/**
*
*/
private async publishUpdatedAccountEvents(accountSequences: Set<string>): Promise<void> {
if (accountSequences.size === 0) return;
for (const accountSequence of accountSequences) {
const account = await this.contributionAccountRepository.findByAccountSequence(accountSequence);
if (account) {
await this.publishContributionAccountUpdatedEvent(account);
}
}
} }
/** /**
@ -512,15 +300,11 @@ export class ContributionCalculationService {
if (!account.hasAdopted) { if (!account.hasAdopted) {
account.markAsAdopted(); account.markAsAdopted();
await this.contributionAccountRepository.save(account); await this.contributionAccountRepository.save(account);
// 发布账户更新事件到 outbox用于 CDC 同步到 mining-admin-service
await this.publishContributionAccountUpdatedEvent(account);
} }
} }
/** /**
* 线 * 线
*
*/ */
private async updateReferrerUnlockStatus(referrerAccountSequence: string): Promise<void> { private async updateReferrerUnlockStatus(referrerAccountSequence: string): Promise<void> {
const account = await this.contributionAccountRepository.findByAccountSequence(referrerAccountSequence); const account = await this.contributionAccountRepository.findByAccountSequence(referrerAccountSequence);
@ -532,27 +316,16 @@ export class ContributionCalculationService {
); );
// 更新解锁状态 // 更新解锁状态
const previousCount = account.directReferralAdoptedCount; const currentCount = account.directReferralAdoptedCount;
if (directReferralAdoptedCount > previousCount) { if (directReferralAdoptedCount > currentCount) {
// 需要增量更新 // 需要增量更新
for (let i = previousCount; i < directReferralAdoptedCount; i++) { for (let i = currentCount; i < directReferralAdoptedCount; i++) {
account.incrementDirectReferralAdoptedCount(); account.incrementDirectReferralAdoptedCount();
} }
await this.contributionAccountRepository.save(account); await this.contributionAccountRepository.save(account);
// 发布账户更新事件到 outbox用于 CDC 同步到 mining-admin-service
await this.publishContributionAccountUpdatedEvent(account);
this.logger.debug( this.logger.debug(
`Updated referrer ${referrerAccountSequence} unlock status: level=${account.unlockedLevelDepth}, bonus=${account.unlockedBonusTiers}`, `Updated referrer ${referrerAccountSequence} unlock status: level=${account.unlockedLevelDepth}, bonus=${account.unlockedBonusTiers}`,
); );
// 检查并处理奖励补发T2: 直推≥2人, T3: 直推≥4人
await this.bonusClaimService.checkAndClaimBonus(
referrerAccountSequence,
previousCount,
directReferralAdoptedCount,
);
} }
} }
@ -620,43 +393,4 @@ export class ContributionCalculationService {
}, },
}; };
} }
/**
* CDC mining-admin-service
*/
private async publishContributionAccountUpdatedEvent(
account: ContributionAccountAggregate,
): Promise<void> {
// 总算力 = 个人算力 + 层级待解锁 + 加成待解锁
const totalContribution = account.personalContribution.value
.plus(account.totalLevelPending.value)
.plus(account.totalBonusPending.value);
const event = new ContributionAccountUpdatedEvent(
account.accountSequence,
account.personalContribution.value.toString(),
account.totalLevelPending.value.toString(),
account.totalBonusPending.value.toString(),
totalContribution.toString(),
account.effectiveContribution.value.toString(),
account.hasAdopted,
account.directReferralAdoptedCount,
account.unlockedLevelDepth,
account.unlockedBonusTiers,
account.createdAt,
);
await this.outboxRepository.save({
aggregateType: ContributionAccountUpdatedEvent.AGGREGATE_TYPE,
aggregateId: account.accountSequence,
eventType: ContributionAccountUpdatedEvent.EVENT_TYPE,
payload: event.toPayload(),
});
this.logger.debug(
`Published ContributionAccountUpdatedEvent for ${account.accountSequence}: ` +
`directReferralAdoptedCount=${account.directReferralAdoptedCount}, ` +
`hasAdopted=${account.hasAdopted}`,
);
}
} }

View File

@ -121,16 +121,11 @@ export class ContributionDistributionPublisherService {
return result.systemContributions.map((sys) => ({ return result.systemContributions.map((sys) => ({
accountType: sys.accountType, accountType: sys.accountType,
amount: sys.amount.value.toString(), amount: sys.amount.value.toString(),
// 省份代码PROVINCE 用自己的 regionCodeCITY 需要传递省份代码用于创建省份
provinceCode: provinceCode:
sys.accountType === 'PROVINCE' sys.accountType === 'PROVINCE' || sys.accountType === 'CITY'
? sys.regionCode || provinceCode ? provinceCode
: sys.accountType === 'CITY'
? provinceCode // CITY 需要省份代码来创建省份(如果省份不存在)
: undefined, : undefined,
// 城市代码:只有 CITY 类型有 cityCode: sys.accountType === 'CITY' ? cityCode : undefined,
cityCode:
sys.accountType === 'CITY' ? sys.regionCode || cityCode : undefined,
neverExpires: sys.accountType === 'OPERATION', // 运营账户永不过期 neverExpires: sys.accountType === 'OPERATION', // 运营账户永不过期
})); }));
} }

View File

@ -1,40 +0,0 @@
/**
*
* directReferralAdoptedCount, unlockedLevelDepth, unlockedBonusTiers
* mining-admin-service
*/
export class ContributionAccountUpdatedEvent {
static readonly EVENT_TYPE = 'ContributionAccountUpdated';
static readonly AGGREGATE_TYPE = 'ContributionAccount';
constructor(
public readonly accountSequence: string,
public readonly personalContribution: string,
public readonly teamLevelContribution: string,
public readonly teamBonusContribution: string,
public readonly totalContribution: string,
public readonly effectiveContribution: string,
public readonly hasAdopted: boolean,
public readonly directReferralAdoptedCount: number,
public readonly unlockedLevelDepth: number,
public readonly unlockedBonusTiers: number,
public readonly createdAt: Date,
) {}
toPayload(): Record<string, any> {
return {
eventType: ContributionAccountUpdatedEvent.EVENT_TYPE,
accountSequence: this.accountSequence,
personalContribution: this.personalContribution,
teamLevelContribution: this.teamLevelContribution,
teamBonusContribution: this.teamBonusContribution,
totalContribution: this.totalContribution,
effectiveContribution: this.effectiveContribution,
hasAdopted: this.hasAdopted,
directReferralAdoptedCount: this.directReferralAdoptedCount,
unlockedLevelDepth: this.unlockedLevelDepth,
unlockedBonusTiers: this.unlockedBonusTiers,
createdAt: this.createdAt.toISOString(),
};
}
}

View File

@ -1,11 +1,7 @@
export * from './contribution-calculated.event'; export * from './contribution-calculated.event';
export * from './daily-snapshot-created.event'; export * from './daily-snapshot-created.event';
export * from './contribution-account-synced.event'; export * from './contribution-account-synced.event';
export * from './contribution-account-updated.event';
export * from './referral-synced.event'; export * from './referral-synced.event';
export * from './adoption-synced.event'; export * from './adoption-synced.event';
export * from './contribution-record-synced.event'; export * from './contribution-record-synced.event';
export * from './network-progress-updated.event'; export * from './network-progress-updated.event';
export * from './system-account-synced.event';
export * from './system-contribution-record-created.event';
export * from './unallocated-contribution-synced.event';

View File

@ -1,27 +0,0 @@
/**
*
* mining-service
*/
export class SystemAccountSyncedEvent {
static readonly EVENT_TYPE = 'SystemAccountSynced';
static readonly AGGREGATE_TYPE = 'SystemAccount';
constructor(
public readonly accountType: string, // OPERATION / PROVINCE / CITY / HEADQUARTERS
public readonly regionCode: string | null, // 省/市代码,如 440000, 440100
public readonly name: string,
public readonly contributionBalance: string,
public readonly createdAt: Date,
) {}
toPayload(): Record<string, any> {
return {
eventType: SystemAccountSyncedEvent.EVENT_TYPE,
accountType: this.accountType,
regionCode: this.regionCode,
name: this.name,
contributionBalance: this.contributionBalance,
createdAt: this.createdAt.toISOString(),
};
}
}

View File

@ -1,56 +0,0 @@
/**
*
* - FIXED_RATE: 固定比例分配OPERATION 12%PROVINCE 1%CITY 2%
* - LEVEL_OVERFLOW: 层级溢出归总部线
* - LEVEL_NO_ANCESTOR: 无上线归总部线
* - BONUS_TIER_1/2/3: 团队奖励未解锁归总部
*/
export type SystemContributionSourceType =
| 'FIXED_RATE'
| 'LEVEL_OVERFLOW'
| 'LEVEL_NO_ANCESTOR'
| 'BONUS_TIER_1'
| 'BONUS_TIER_2'
| 'BONUS_TIER_3';
/**
*
* mining-admin-service
*/
export class SystemContributionRecordCreatedEvent {
static readonly EVENT_TYPE = 'SystemContributionRecordCreated';
static readonly AGGREGATE_TYPE = 'SystemContributionRecord';
constructor(
public readonly recordId: bigint, // 明细记录ID
public readonly accountType: string, // 系统账户类型OPERATION/PROVINCE/CITY/HEADQUARTERS
public readonly regionCode: string | null, // 区域代码(省/市代码,如 440000, 440100
public readonly sourceAdoptionId: bigint, // 来源认种ID
public readonly sourceAccountSequence: string, // 认种人账号
public readonly sourceType: SystemContributionSourceType, // 来源类型
public readonly levelDepth: number | null, // 层级深度1-15仅对 LEVEL_OVERFLOW/LEVEL_NO_ANCESTOR 有效
public readonly distributionRate: number, // 分配比例
public readonly amount: string, // 算力金额
public readonly effectiveDate: Date, // 生效日期
public readonly expireDate: Date | null, // 过期日期
public readonly createdAt: Date, // 创建时间
) {}
toPayload(): Record<string, any> {
return {
eventType: SystemContributionRecordCreatedEvent.EVENT_TYPE,
recordId: this.recordId.toString(),
accountType: this.accountType,
regionCode: this.regionCode,
sourceAdoptionId: this.sourceAdoptionId.toString(),
sourceAccountSequence: this.sourceAccountSequence,
sourceType: this.sourceType,
levelDepth: this.levelDepth,
distributionRate: this.distributionRate,
amount: this.amount,
effectiveDate: this.effectiveDate.toISOString(),
expireDate: this.expireDate?.toISOString() ?? null,
createdAt: this.createdAt.toISOString(),
};
}
}

View File

@ -1,33 +0,0 @@
/**
*
* mining-service
*/
export class UnallocatedContributionSyncedEvent {
static readonly EVENT_TYPE = 'UnallocatedContributionSynced';
static readonly AGGREGATE_TYPE = 'UnallocatedContribution';
constructor(
public readonly sourceAdoptionId: bigint,
public readonly sourceAccountSequence: string,
public readonly wouldBeAccountSequence: string | null,
public readonly contributionType: string, // LEVEL_NO_ANCESTOR, LEVEL_OVERFLOW, BONUS_TIER_1, BONUS_TIER_2, BONUS_TIER_3
public readonly amount: string,
public readonly reason: string | null,
public readonly effectiveDate: Date,
public readonly expireDate: Date,
) {}
toPayload(): Record<string, any> {
return {
eventType: UnallocatedContributionSyncedEvent.EVENT_TYPE,
sourceAdoptionId: this.sourceAdoptionId.toString(),
sourceAccountSequence: this.sourceAccountSequence,
wouldBeAccountSequence: this.wouldBeAccountSequence,
contributionType: this.contributionType,
amount: this.amount,
reason: this.reason,
effectiveDate: this.effectiveDate.toISOString(),
expireDate: this.expireDate.toISOString(),
};
}
}

View File

@ -5,16 +5,6 @@ import { ContributionAccountAggregate, ContributionSourceType } from '../aggrega
import { ContributionRecordAggregate } from '../aggregates/contribution-record.aggregate'; import { ContributionRecordAggregate } from '../aggregates/contribution-record.aggregate';
import { SyncedAdoption, SyncedReferral } from '../repositories/synced-data.repository.interface'; import { SyncedAdoption, SyncedReferral } from '../repositories/synced-data.repository.interface';
/**
*
*/
export interface SystemContributionAllocation {
accountType: 'OPERATION' | 'PROVINCE' | 'CITY' | 'HEADQUARTERS';
regionCode: string | null; // 省市代码,如 440000、440100
rate: DistributionRate;
amount: ContributionAmount;
}
/** /**
* *
*/ */
@ -37,8 +27,12 @@ export interface ContributionDistributionResult {
reason: string; reason: string;
}[]; }[];
// 系统账户贡献值(支持按省市细分) // 系统账户贡献值
systemContributions: SystemContributionAllocation[]; systemContributions: {
accountType: 'OPERATION' | 'PROVINCE' | 'CITY';
rate: DistributionRate;
amount: ContributionAmount;
}[];
} }
/** /**
@ -91,31 +85,23 @@ export class ContributionCalculatorService {
}); });
// 2. 系统账户贡献值 (15%) // 2. 系统账户贡献值 (15%)
// 运营账户(全国)- 12% result.systemContributions = [
result.systemContributions.push({ {
accountType: 'OPERATION', accountType: 'OPERATION',
regionCode: null,
rate: DistributionRate.OPERATION, rate: DistributionRate.OPERATION,
amount: totalContribution.multiply(DistributionRate.OPERATION.value), amount: totalContribution.multiply(DistributionRate.OPERATION.value),
}); },
{
// 省公司账户 - 1%(按认种选择的省份)
const provinceCode = adoption.selectedProvince;
result.systemContributions.push({
accountType: 'PROVINCE', accountType: 'PROVINCE',
regionCode: provinceCode || null,
rate: DistributionRate.PROVINCE, rate: DistributionRate.PROVINCE,
amount: totalContribution.multiply(DistributionRate.PROVINCE.value), amount: totalContribution.multiply(DistributionRate.PROVINCE.value),
}); },
{
// 市公司账户 - 2%(按认种选择的城市)
const cityCode = adoption.selectedCity;
result.systemContributions.push({
accountType: 'CITY', accountType: 'CITY',
regionCode: cityCode || null,
rate: DistributionRate.CITY, rate: DistributionRate.CITY,
amount: totalContribution.multiply(DistributionRate.CITY.value), amount: totalContribution.multiply(DistributionRate.CITY.value),
}); },
];
// 3. 团队贡献值 (15%) // 3. 团队贡献值 (15%)
this.distributeTeamContribution( this.distributeTeamContribution(

View File

@ -13,11 +13,11 @@ import { KafkaModule } from './kafka/kafka.module';
import { KafkaProducerService } from './kafka/kafka-producer.service'; import { KafkaProducerService } from './kafka/kafka-producer.service';
import { CDCConsumerService } from './kafka/cdc-consumer.service'; import { CDCConsumerService } from './kafka/cdc-consumer.service';
import { RedisModule } from './redis/redis.module'; import { RedisModule } from './redis/redis.module';
import { SYNCED_DATA_REPOSITORY } from '../domain/repositories/synced-data.repository.interface';
// Repository injection tokens // Repository injection tokens
export const CONTRIBUTION_ACCOUNT_REPOSITORY = 'CONTRIBUTION_ACCOUNT_REPOSITORY'; export const CONTRIBUTION_ACCOUNT_REPOSITORY = 'CONTRIBUTION_ACCOUNT_REPOSITORY';
export const CONTRIBUTION_RECORD_REPOSITORY = 'CONTRIBUTION_RECORD_REPOSITORY'; export const CONTRIBUTION_RECORD_REPOSITORY = 'CONTRIBUTION_RECORD_REPOSITORY';
export const SYNCED_DATA_REPOSITORY = 'SYNCED_DATA_REPOSITORY';
@Module({ @Module({
imports: [PrismaModule, KafkaModule, RedisModule], imports: [PrismaModule, KafkaModule, RedisModule],

View File

@ -53,21 +53,6 @@ export type TransactionalCDCHandlerWithResult<T> = (event: CDCEvent, tx: Transac
/** 事务提交后的回调函数 */ /** 事务提交后的回调函数 */
export type PostCommitCallback<T> = (result: T) => Promise<void>; export type PostCommitCallback<T> = (result: T) => Promise<void>;
/** Topic 消费阶段配置 */
export interface TopicPhase {
topic: string;
tableName: string;
}
/**
*
* Kafka
*/
interface CollectedMessage {
payload: EachMessagePayload;
orderId: bigint; // 用于排序的 order_id
}
@Injectable() @Injectable()
export class CDCConsumerService implements OnModuleInit, OnModuleDestroy { export class CDCConsumerService implements OnModuleInit, OnModuleDestroy {
private readonly logger = new Logger(CDCConsumerService.name); private readonly logger = new Logger(CDCConsumerService.name);
@ -76,14 +61,6 @@ export class CDCConsumerService implements OnModuleInit, OnModuleDestroy {
private handlers: Map<string, CDCHandler> = new Map(); private handlers: Map<string, CDCHandler> = new Map();
private isRunning = false; private isRunning = false;
// 分阶段消费配置
private topicPhases: TopicPhase[] = [];
private currentPhaseIndex = 0;
private sequentialMode = false;
// 初始同步完成标记(只有顺序同步全部完成后才为 true
private initialSyncCompleted = false;
constructor( constructor(
private readonly configService: ConfigService, private readonly configService: ConfigService,
private readonly prisma: PrismaService, private readonly prisma: PrismaService,
@ -270,14 +247,7 @@ export class CDCConsumerService implements OnModuleInit, OnModuleDestroy {
} }
/** /**
* *
*
* topic
* 1. (user_accounts)
* 2. (referral_relationships) -
* 3. (planting_orders) -
*
*
*/ */
async start(): Promise<void> { async start(): Promise<void> {
if (this.isRunning) { if (this.isRunning) {
@ -289,337 +259,21 @@ export class CDCConsumerService implements OnModuleInit, OnModuleDestroy {
await this.consumer.connect(); await this.consumer.connect();
this.logger.log('CDC consumer connected'); this.logger.log('CDC consumer connected');
// 配置顺序消费阶段(顺序很重要!) // 订阅 Debezium CDC topics (从1.0服务全量同步)
this.topicPhases = [ const topics = [
{ // 用户账户表 (identity-service: user_accounts)
topic: this.configService.get<string>('CDC_TOPIC_USERS', 'cdc.identity.public.user_accounts'), this.configService.get<string>('CDC_TOPIC_USERS', 'cdc.identity.public.user_accounts'),
tableName: 'user_accounts', // 认种订单表 (planting-service: planting_orders)
}, this.configService.get<string>('CDC_TOPIC_ADOPTIONS', 'cdc.planting.public.planting_orders'),
{ // 推荐关系表 (referral-service: referral_relationships)
topic: this.configService.get<string>('CDC_TOPIC_REFERRALS', 'cdc.referral.public.referral_relationships'), this.configService.get<string>('CDC_TOPIC_REFERRALS', 'cdc.referral.public.referral_relationships'),
tableName: 'referral_relationships',
},
{
topic: this.configService.get<string>('CDC_TOPIC_ADOPTIONS', 'cdc.planting.public.planting_orders'),
tableName: 'planting_orders',
},
]; ];
this.currentPhaseIndex = 0;
this.sequentialMode = true;
this.isRunning = true;
// 开始顺序消费(阻塞直到完成,确保数据依赖顺序正确)
await this.startSequentialConsumption();
this.logger.log('CDC consumer started with sequential phase consumption');
} catch (error) {
this.logger.error('Failed to start CDC consumer', error);
// 不抛出错误,允许服务在没有 Kafka 的情况下启动(用于本地开发)
}
}
/**
*
*/
private async startSequentialConsumption(): Promise<void> {
for (let i = 0; i < this.topicPhases.length; i++) {
this.currentPhaseIndex = i;
const phase = this.topicPhases[i];
this.logger.log(`[CDC] Starting phase ${i + 1}/${this.topicPhases.length}: ${phase.tableName} (${phase.topic})`);
// 消费当前阶段直到追上最新
await this.consumePhaseToEnd(phase);
this.logger.log(`[CDC] Completed phase ${i + 1}/${this.topicPhases.length}: ${phase.tableName}`);
}
this.logger.log('[CDC] All phases completed. Switching to continuous mode...');
// 所有阶段完成后,切换到持续消费模式(同时监听所有 topic
await this.startContinuousMode();
}
/**
*
*
* planting_orders 使"收集-排序-处理"
* 1.
* 2. order_id
* 3.
*
* Debezium snapshot
*
*/
private async consumePhaseToEnd(phase: TopicPhase): Promise<void> {
const admin = this.kafka.admin();
await admin.connect();
// 获取 topic 的高水位线和最早 offset
const topicOffsets = await admin.fetchTopicOffsets(phase.topic);
const highWatermarks: Map<number, string> = new Map();
const earliestOffsets: Map<number, string> = new Map();
for (const partitionOffset of topicOffsets) {
highWatermarks.set(partitionOffset.partition, partitionOffset.high);
earliestOffsets.set(partitionOffset.partition, partitionOffset.low);
}
this.logger.log(`[CDC] Phase ${phase.tableName}: High watermarks = ${JSON.stringify(Object.fromEntries(highWatermarks))}`);
// 检查是否 topic 为空
const allEmpty = Array.from(highWatermarks.values()).every(hw => hw === '0');
if (allEmpty) {
this.logger.log(`[CDC] Phase ${phase.tableName}: Topic is empty, skipping`);
await admin.disconnect();
return;
}
// 使用固定的 group id
const phaseGroupId = `contribution-service-cdc-phase-${phase.tableName}`;
// 重置 consumer group 的 offset 到最早位置
// 使用 admin.resetOffsets 而不是 setOffsets更简洁且专门用于重置到 earliest/latest
// 这确保每次服务启动都会从头开始消费,不受之前 committed offset 影响
// 参考: https://kafka.js.org/docs/admin#a-name-reset-offsets-a-resetoffsets
this.logger.log(`[CDC] Phase ${phase.tableName}: Resetting consumer group ${phaseGroupId} offsets to earliest`);
try {
await admin.resetOffsets({
groupId: phaseGroupId,
topic: phase.topic,
earliest: true,
});
this.logger.log(`[CDC] Phase ${phase.tableName}: Consumer group offsets reset successfully`);
} catch (resetError: any) {
// 如果 consumer group 不存在resetOffsets 会失败,这是正常的(首次运行)
// fromBeginning: true 会在这种情况下生效
this.logger.log(`[CDC] Phase ${phase.tableName}: Could not reset offsets (may be first run): ${resetError.message}`);
}
const phaseConsumer = this.kafka.consumer({
groupId: phaseGroupId,
});
try {
await phaseConsumer.connect();
// 订阅单个 topicfromBeginning 对新 group 有效
await phaseConsumer.subscribe({
topic: phase.topic,
fromBeginning: true,
});
// 判断是否需要使用"收集-排序-处理"模式
const needsSorting = phase.tableName === 'planting_orders';
if (needsSorting) {
// planting_orders 阶段:使用"收集-排序-处理"模式
await this.consumePhaseWithSorting(phaseConsumer, phase, highWatermarks);
} else {
// 其他阶段:使用原有的"边消费边处理"模式
await this.consumePhaseDirectly(phaseConsumer, phase, highWatermarks);
}
// 停止消费
await phaseConsumer.stop();
await phaseConsumer.disconnect();
await admin.disconnect();
} catch (error) {
this.logger.error(`[CDC] Error in phase ${phase.tableName}`, error);
await phaseConsumer.disconnect();
await admin.disconnect();
throw error;
}
}
/**
*
* user_accounts referral_relationships
*/
private async consumePhaseDirectly(
phaseConsumer: Consumer,
phase: TopicPhase,
highWatermarks: Map<number, string>,
): Promise<void> {
let processedOffsets: Map<number, bigint> = new Map();
let isComplete = false;
for (const partition of highWatermarks.keys()) {
processedOffsets.set(partition, BigInt(-1));
}
// 开始消费
await phaseConsumer.run({
eachMessage: async (payload: EachMessagePayload) => {
await this.handleMessage(payload);
// 更新已处理的 offset
processedOffsets.set(payload.partition, BigInt(payload.message.offset));
// 检查是否所有 partition 都已追上高水位线
let allCaughtUp = true;
for (const [partition, highWatermark] of highWatermarks) {
const processed = processedOffsets.get(partition) ?? BigInt(-1);
// 高水位线是下一个将被写入的 offset所以已处理的 offset 需要 >= highWatermark - 1
if (processed < BigInt(highWatermark) - BigInt(1)) {
allCaughtUp = false;
break;
}
}
if (allCaughtUp && !isComplete) {
isComplete = true;
this.logger.log(`[CDC] Phase ${phase.tableName}: Caught up with all partitions`);
}
},
});
// 等待追上高水位线
while (!isComplete) {
await new Promise(resolve => setTimeout(resolve, 100));
// 每秒检查一次进度
const currentProgress = Array.from(processedOffsets.entries())
.map(([p, o]) => `P${p}:${o}/${highWatermarks.get(p)}`)
.join(', ');
this.logger.debug(`[CDC] Phase ${phase.tableName} progress: ${currentProgress}`);
}
}
/**
* --
* planting_orders order_id
*
* Debezium snapshot PostgreSQL
*
* unlocked_level_depth
*/
private async consumePhaseWithSorting(
phaseConsumer: Consumer,
phase: TopicPhase,
highWatermarks: Map<number, string>,
): Promise<void> {
const collectedMessages: CollectedMessage[] = [];
let processedOffsets: Map<number, bigint> = new Map();
let isComplete = false;
for (const partition of highWatermarks.keys()) {
processedOffsets.set(partition, BigInt(-1));
}
this.logger.log(`[CDC] Phase ${phase.tableName}: Using collect-sort-process mode`);
// 第一步:收集所有消息(不处理)
await phaseConsumer.run({
eachMessage: async (payload: EachMessagePayload) => {
// 解析消息获取 order_id 用于排序
const orderId = this.extractOrderIdFromPayload(payload);
collectedMessages.push({
payload,
orderId,
});
// 更新已处理的 offset
processedOffsets.set(payload.partition, BigInt(payload.message.offset));
// 检查是否所有 partition 都已追上高水位线
let allCaughtUp = true;
for (const [partition, highWatermark] of highWatermarks) {
const processed = processedOffsets.get(partition) ?? BigInt(-1);
if (processed < BigInt(highWatermark) - BigInt(1)) {
allCaughtUp = false;
break;
}
}
if (allCaughtUp && !isComplete) {
isComplete = true;
this.logger.log(`[CDC] Phase ${phase.tableName}: Collected all ${collectedMessages.length} messages`);
}
},
});
// 等待收集完成
while (!isComplete) {
await new Promise(resolve => setTimeout(resolve, 100));
// 每秒检查一次进度
const currentProgress = Array.from(processedOffsets.entries())
.map(([p, o]) => `P${p}:${o}/${highWatermarks.get(p)}`)
.join(', ');
this.logger.debug(`[CDC] Phase ${phase.tableName} collecting: ${currentProgress}, collected: ${collectedMessages.length}`);
}
// 第二步:按 order_id 升序排序
this.logger.log(`[CDC] Phase ${phase.tableName}: Sorting ${collectedMessages.length} messages by order_id`);
collectedMessages.sort((a, b) => {
if (a.orderId < b.orderId) return -1;
if (a.orderId > b.orderId) return 1;
return 0;
});
// 记录排序前后的变化(用于调试)
if (collectedMessages.length > 0) {
const firstFive = collectedMessages.slice(0, 5).map(m => m.orderId.toString()).join(', ');
const lastFive = collectedMessages.slice(-5).map(m => m.orderId.toString()).join(', ');
this.logger.log(`[CDC] Phase ${phase.tableName}: Sorted order_ids: first=[${firstFive}], last=[${lastFive}]`);
}
// 第三步:按排序后的顺序处理消息
this.logger.log(`[CDC] Phase ${phase.tableName}: Processing ${collectedMessages.length} messages in sorted order`);
let processedCount = 0;
for (const collected of collectedMessages) {
await this.handleMessage(collected.payload);
processedCount++;
// 每处理 100 条记录日志一次进度
if (processedCount % 100 === 0) {
this.logger.log(`[CDC] Phase ${phase.tableName}: Processed ${processedCount}/${collectedMessages.length} messages`);
}
}
this.logger.log(`[CDC] Phase ${phase.tableName}: Completed processing all ${collectedMessages.length} messages in order_id order`);
}
/**
* Kafka order_id
*/
private extractOrderIdFromPayload(payload: EachMessagePayload): bigint {
try {
if (!payload.message.value) {
return BigInt(0);
}
const rawData = JSON.parse(payload.message.value.toString());
// order_id 是源表的主键字段
const orderId = rawData.order_id || rawData.id || 0;
// 转换为 bigint 用于比较
return BigInt(orderId);
} catch (error) {
this.logger.warn(`[CDC] Failed to extract order_id from message, using 0`, error);
return BigInt(0);
}
}
/**
* topic
*/
private async startContinuousMode(): Promise<void> {
this.sequentialMode = false;
this.initialSyncCompleted = true; // 标记初始同步完成
const topics = this.topicPhases.map(p => p.topic);
await this.consumer.subscribe({ await this.consumer.subscribe({
topics, topics,
fromBeginning: false, // 从上次消费的位置继续(不是从头开始) fromBeginning: true, // 首次启动时全量同步历史数据
}); });
this.logger.log(`Subscribed to topics: ${topics.join(', ')}`);
this.logger.log(`[CDC] Continuous mode: Subscribed to topics: ${topics.join(', ')}`);
await this.consumer.run({ await this.consumer.run({
eachMessage: async (payload: EachMessagePayload) => { eachMessage: async (payload: EachMessagePayload) => {
@ -627,19 +281,12 @@ export class CDCConsumerService implements OnModuleInit, OnModuleDestroy {
}, },
}); });
this.logger.log('[CDC] Continuous mode started - all topics being consumed in parallel'); this.isRunning = true;
this.logger.log('CDC consumer started with transactional idempotency protection');
} catch (error) {
this.logger.error('Failed to start CDC consumer', error);
// 不抛出错误,允许服务在没有 Kafka 的情况下启动(用于本地开发)
} }
/**
* CDC
* - initialSyncCompleted = true:
*/
getSyncStatus(): { isRunning: boolean; sequentialMode: boolean; allPhasesCompleted: boolean } {
return {
isRunning: this.isRunning,
sequentialMode: this.sequentialMode,
allPhasesCompleted: this.initialSyncCompleted,
};
} }
/** /**

View File

@ -223,142 +223,6 @@ export class ContributionAccountRepository implements IContributionAccountReposi
}); });
} }
async findRecentlyUpdated(since: Date, limit: number = 500): Promise<ContributionAccountAggregate[]> {
const records = await this.client.contributionAccount.findMany({
where: { updatedAt: { gte: since } },
orderBy: { updatedAt: 'desc' },
take: limit,
});
return records.map((r) => this.toDomain(r));
}
/**
*
*
* @param limit
* @returns
*/
async findAccountsWithIncompleteUnlock(limit: number = 100): Promise<ContributionAccountAggregate[]> {
// 查找已认种但未达到满解锁状态的账户:
// - unlockedLevelDepth < 15 或
// - unlockedBonusTiers < 3
const records = await this.client.contributionAccount.findMany({
where: {
hasAdopted: true,
OR: [
{ unlockedLevelDepth: { lt: 15 } },
{ unlockedBonusTiers: { lt: 3 } },
],
},
orderBy: { updatedAt: 'asc' }, // 优先处理最久未更新的
take: limit,
});
return records.map((r) => this.toDomain(r));
}
/**
*
*/
async getDetailedContributionStats(): Promise<{
// 个人算力总计
personalTotal: string;
// 层级算力 - 已解锁(已分配给上线)
levelUnlocked: string;
// 层级算力 - 未解锁待解锁的pending
levelPending: string;
// 层级按档位分解
levelByTier: {
tier1: { unlocked: string; pending: string }; // 1-5级
tier2: { unlocked: string; pending: string }; // 6-10级
tier3: { unlocked: string; pending: string }; // 11-15级
};
// 团队奖励算力 - 已解锁
bonusUnlocked: string;
// 团队奖励算力 - 未解锁
bonusPending: string;
// 团队奖励按档位分解
bonusByTier: {
tier1: { unlocked: string; pending: string };
tier2: { unlocked: string; pending: string };
tier3: { unlocked: string; pending: string };
};
}> {
const result = await this.client.contributionAccount.aggregate({
_sum: {
personalContribution: true,
// 层级 1-5
level1Pending: true,
level2Pending: true,
level3Pending: true,
level4Pending: true,
level5Pending: true,
// 层级 6-10
level6Pending: true,
level7Pending: true,
level8Pending: true,
level9Pending: true,
level10Pending: true,
// 层级 11-15
level11Pending: true,
level12Pending: true,
level13Pending: true,
level14Pending: true,
level15Pending: true,
// 团队奖励
bonusTier1Pending: true,
bonusTier2Pending: true,
bonusTier3Pending: true,
// 汇总
totalLevelPending: true,
totalBonusPending: true,
totalUnlocked: true,
},
});
const sum = result._sum;
// 层级 1-5 已解锁在pending字段中存储的是已分配给该用户的层级算力
const level1to5 = new Decimal(sum.level1Pending || 0)
.plus(sum.level2Pending || 0)
.plus(sum.level3Pending || 0)
.plus(sum.level4Pending || 0)
.plus(sum.level5Pending || 0);
// 层级 6-10
const level6to10 = new Decimal(sum.level6Pending || 0)
.plus(sum.level7Pending || 0)
.plus(sum.level8Pending || 0)
.plus(sum.level9Pending || 0)
.plus(sum.level10Pending || 0);
// 层级 11-15
const level11to15 = new Decimal(sum.level11Pending || 0)
.plus(sum.level12Pending || 0)
.plus(sum.level13Pending || 0)
.plus(sum.level14Pending || 0)
.plus(sum.level15Pending || 0);
return {
personalTotal: (sum.personalContribution || new Decimal(0)).toString(),
levelUnlocked: (sum.totalLevelPending || new Decimal(0)).toString(),
levelPending: '0', // 未解锁的存储在 unallocated 表中
levelByTier: {
tier1: { unlocked: level1to5.toString(), pending: '0' },
tier2: { unlocked: level6to10.toString(), pending: '0' },
tier3: { unlocked: level11to15.toString(), pending: '0' },
},
bonusUnlocked: (sum.totalBonusPending || new Decimal(0)).toString(),
bonusPending: '0', // 未解锁的存储在 unallocated 表中
bonusByTier: {
tier1: { unlocked: (sum.bonusTier1Pending || new Decimal(0)).toString(), pending: '0' },
tier2: { unlocked: (sum.bonusTier2Pending || new Decimal(0)).toString(), pending: '0' },
tier3: { unlocked: (sum.bonusTier3Pending || new Decimal(0)).toString(), pending: '0' },
},
};
}
private toDomain(record: any): ContributionAccountAggregate { private toDomain(record: any): ContributionAccountAggregate {
return ContributionAccountAggregate.fromPersistence({ return ContributionAccountAggregate.fromPersistence({
id: record.id, id: record.id,

View File

@ -136,10 +136,7 @@ export class SyncedDataRepository implements ISyncedDataRepository {
async findUndistributedAdoptions(limit: number = 100): Promise<SyncedAdoption[]> { async findUndistributedAdoptions(limit: number = 100): Promise<SyncedAdoption[]> {
const records = await this.client.syncedAdoption.findMany({ const records = await this.client.syncedAdoption.findMany({
where: { where: { contributionDistributed: false },
contributionDistributed: false,
status: 'MINING_ENABLED', // 只处理最终成功的认种订单
},
orderBy: { adoptionDate: 'asc' }, orderBy: { adoptionDate: 'asc' },
take: limit, take: limit,
}); });
@ -174,10 +171,7 @@ export class SyncedDataRepository implements ISyncedDataRepository {
async getTotalTreesByAccountSequence(accountSequence: string): Promise<number> { async getTotalTreesByAccountSequence(accountSequence: string): Promise<number> {
const result = await this.client.syncedAdoption.aggregate({ const result = await this.client.syncedAdoption.aggregate({
where: { where: { accountSequence },
accountSequence,
status: 'MINING_ENABLED', // 只统计最终成功的认种订单
},
_sum: { treeCount: true }, _sum: { treeCount: true },
}); });
return result._sum.treeCount ?? 0; return result._sum.treeCount ?? 0;
@ -291,12 +285,8 @@ export class SyncedDataRepository implements ISyncedDataRepository {
const accountSequences = directReferrals.map((r) => r.accountSequence); const accountSequences = directReferrals.map((r) => r.accountSequence);
// 只统计有 MINING_ENABLED 状态认种记录的直推用户数
const adoptedCount = await this.client.syncedAdoption.findMany({ const adoptedCount = await this.client.syncedAdoption.findMany({
where: { where: { accountSequence: { in: accountSequences } },
accountSequence: { in: accountSequences },
status: 'MINING_ENABLED', // 只统计最终成功的认种订单
},
distinct: ['accountSequence'], distinct: ['accountSequence'],
}); });
@ -318,10 +308,7 @@ export class SyncedDataRepository implements ISyncedDataRepository {
const adoptions = await this.client.syncedAdoption.groupBy({ const adoptions = await this.client.syncedAdoption.groupBy({
by: ['accountSequence'], by: ['accountSequence'],
where: { where: { accountSequence: { in: sequences } },
accountSequence: { in: sequences },
status: 'MINING_ENABLED', // 只统计最终成功的认种订单
},
_sum: { treeCount: true }, _sum: { treeCount: true },
}); });
@ -359,89 +346,6 @@ export class SyncedDataRepository implements ISyncedDataRepository {
return result; return result;
} }
// ========== 认种分类账查询 ==========
async getPlantingLedger(
accountSequence: string,
page: number = 1,
pageSize: number = 20,
): Promise<{
items: SyncedAdoption[];
total: number;
page: number;
pageSize: number;
totalPages: number;
}> {
const skip = (page - 1) * pageSize;
// 只返回 MINING_ENABLED 状态的认种记录
const whereClause = { accountSequence, status: 'MINING_ENABLED' };
const [items, total] = await Promise.all([
this.client.syncedAdoption.findMany({
where: whereClause,
orderBy: { adoptionDate: 'desc' },
skip,
take: pageSize,
}),
this.client.syncedAdoption.count({
where: whereClause,
}),
]);
return {
items: items.map((r) => this.toSyncedAdoption(r)),
total,
page,
pageSize,
totalPages: Math.ceil(total / pageSize),
};
}
async getPlantingSummary(accountSequence: string): Promise<{
totalOrders: number;
totalTreeCount: number;
totalAmount: string;
effectiveTreeCount: number;
firstPlantingAt: Date | null;
lastPlantingAt: Date | null;
}> {
// 只统计 MINING_ENABLED 状态的认种记录
const adoptions = await this.client.syncedAdoption.findMany({
where: { accountSequence, status: 'MINING_ENABLED' },
orderBy: { adoptionDate: 'asc' },
});
if (adoptions.length === 0) {
return {
totalOrders: 0,
totalTreeCount: 0,
totalAmount: '0',
effectiveTreeCount: 0,
firstPlantingAt: null,
lastPlantingAt: null,
};
}
const totalOrders = adoptions.length;
const totalTreeCount = adoptions.reduce((sum, a) => sum + a.treeCount, 0);
// 计算总金额treeCount * contributionPerTree
let totalAmount = new Decimal(0);
for (const adoption of adoptions) {
const amount = new Decimal(adoption.contributionPerTree).mul(adoption.treeCount);
totalAmount = totalAmount.add(amount);
}
return {
totalOrders,
totalTreeCount,
totalAmount: totalAmount.toString(),
effectiveTreeCount: totalTreeCount, // 全部都是有效的 MINING_ENABLED
firstPlantingAt: adoptions[0]?.adoptionDate || null,
lastPlantingAt: adoptions[adoptions.length - 1]?.adoptionDate || null,
};
}
// ========== 统计方法(用于查询服务)========== // ========== 统计方法(用于查询服务)==========
async countUsers(): Promise<number> { async countUsers(): Promise<number> {
@ -454,23 +358,10 @@ export class SyncedDataRepository implements ISyncedDataRepository {
async countUndistributedAdoptions(): Promise<number> { async countUndistributedAdoptions(): Promise<number> {
return this.client.syncedAdoption.count({ return this.client.syncedAdoption.count({
where: { where: { contributionDistributed: false },
contributionDistributed: false,
status: 'MINING_ENABLED', // 只统计最终成功的认种订单
},
}); });
} }
async getTotalTrees(): Promise<number> {
const result = await this.client.syncedAdoption.aggregate({
where: {
status: 'MINING_ENABLED', // 只统计最终成功的认种订单
},
_sum: { treeCount: true },
});
return result._sum.treeCount ?? 0;
}
// ========== 私有方法 ========== // ========== 私有方法 ==========
private toSyncedUser(record: any): SyncedUser { private toSyncedUser(record: any): SyncedUser {

View File

@ -7,7 +7,6 @@ export type SystemAccountType = 'OPERATION' | 'PROVINCE' | 'CITY' | 'HEADQUARTER
export interface SystemAccount { export interface SystemAccount {
id: bigint; id: bigint;
accountType: SystemAccountType; accountType: SystemAccountType;
regionCode: string | null; // 省/市代码
name: string; name: string;
contributionBalance: ContributionAmount; contributionBalance: ContributionAmount;
contributionNeverExpires: boolean; contributionNeverExpires: boolean;
@ -21,8 +20,6 @@ export interface SystemContributionRecord {
systemAccountId: bigint; systemAccountId: bigint;
sourceAdoptionId: bigint; sourceAdoptionId: bigint;
sourceAccountSequence: string; sourceAccountSequence: string;
sourceType: string; // 来源类型FIXED_RATE / LEVEL_OVERFLOW / LEVEL_NO_ANCESTOR / BONUS_TIER_1/2/3
levelDepth: number | null; // 层级深度1-15仅对层级相关类型有效
distributionRate: number; distributionRate: number;
amount: ContributionAmount; amount: ContributionAmount;
effectiveDate: Date; effectiveDate: Date;
@ -39,19 +36,9 @@ export class SystemAccountRepository {
return this.unitOfWork.getClient(); return this.unitOfWork.getClient();
} }
/** async findByType(accountType: SystemAccountType): Promise<SystemAccount | null> {
* accountType + regionCode const record = await this.client.systemAccount.findUnique({
* regionCode 使 findFirst findUnique where: { accountType },
*/
async findByTypeAndRegion(
accountType: SystemAccountType,
regionCode: string | null,
): Promise<SystemAccount | null> {
const record = await this.client.systemAccount.findFirst({
where: {
accountType,
regionCode: regionCode === null ? { equals: null } : regionCode,
},
}); });
if (!record) { if (!record) {
@ -61,225 +48,123 @@ export class SystemAccountRepository {
return this.toSystemAccount(record); return this.toSystemAccount(record);
} }
/**
* CITY
*/
async findByType(accountType: SystemAccountType): Promise<SystemAccount[]> {
const records = await this.client.systemAccount.findMany({
where: { accountType },
orderBy: { regionCode: 'asc' },
});
return records.map((r) => this.toSystemAccount(r));
}
async findAll(): Promise<SystemAccount[]> { async findAll(): Promise<SystemAccount[]> {
const records = await this.client.systemAccount.findMany({ const records = await this.client.systemAccount.findMany({
orderBy: [{ accountType: 'asc' }, { regionCode: 'asc' }], orderBy: { accountType: 'asc' },
}); });
return records.map((r) => this.toSystemAccount(r)); return records.map((r) => this.toSystemAccount(r));
} }
/**
*
*/
async ensureSystemAccountsExist(): Promise<void> { async ensureSystemAccountsExist(): Promise<void> {
const accounts: { accountType: SystemAccountType; name: string }[] = [ const accounts: { accountType: SystemAccountType; name: string }[] = [
{ accountType: 'OPERATION', name: '运营账户' }, { accountType: 'OPERATION', name: '运营账户' },
{ accountType: 'PROVINCE', name: '省公司账户' },
{ accountType: 'CITY', name: '市公司账户' },
{ accountType: 'HEADQUARTERS', name: '总部账户' }, { accountType: 'HEADQUARTERS', name: '总部账户' },
]; ];
for (const account of accounts) { for (const account of accounts) {
// 由于 regionCode 是 nullable使用 findFirst + create 替代 upsert await this.client.systemAccount.upsert({
const existing = await this.client.systemAccount.findFirst({ where: { accountType: account.accountType },
where: { create: {
accountType: account.accountType, accountType: account.accountType,
regionCode: { equals: null },
},
});
if (!existing) {
await this.client.systemAccount.create({
data: {
accountType: account.accountType,
regionCode: null,
name: account.name, name: account.name,
contributionBalance: 0, contributionBalance: 0,
contributionNeverExpires: true,
}, },
update: {},
}); });
} }
} }
}
/**
*
*/
async addContribution( async addContribution(
accountType: SystemAccountType, accountType: SystemAccountType,
regionCode: string | null,
amount: ContributionAmount, amount: ContributionAmount,
): Promise<void> { ): Promise<void> {
const name = this.getAccountName(accountType, regionCode);
// 由于 regionCode 是 nullable使用 findFirst + create/update 替代 upsert
const existing = await this.client.systemAccount.findFirst({
where: {
accountType,
regionCode: regionCode === null ? { equals: null } : regionCode,
},
});
if (existing) {
await this.client.systemAccount.update({ await this.client.systemAccount.update({
where: { id: existing.id }, where: { accountType },
data: { data: {
contributionBalance: { increment: amount.value }, contributionBalance: { increment: amount.value },
}, },
}); });
} else {
await this.client.systemAccount.create({
data: {
accountType,
regionCode,
name,
contributionBalance: amount.value,
contributionNeverExpires: true,
},
});
}
}
/**
*
*/
private getAccountName(accountType: SystemAccountType, regionCode: string | null): string {
if (!regionCode) {
const names: Record<SystemAccountType, string> = {
OPERATION: '运营账户',
PROVINCE: '省公司账户',
CITY: '市公司账户',
HEADQUARTERS: '总部账户',
};
return names[accountType] || accountType;
}
return `${regionCode}账户`;
}
/**
*
*/
async subtractContribution(
accountType: SystemAccountType,
regionCode: string | null,
amount: ContributionAmount,
): Promise<void> {
const existing = await this.client.systemAccount.findFirst({
where: {
accountType,
regionCode: regionCode === null ? { equals: null } : regionCode,
},
});
if (existing) {
await this.client.systemAccount.update({
where: { id: existing.id },
data: {
contributionBalance: { decrement: amount.value },
},
});
}
}
/**
*
*/
async deleteContributionRecordsByAdoption(
accountType: SystemAccountType,
regionCode: string | null,
sourceAdoptionId: bigint,
sourceAccountSequence: string,
): Promise<number> {
const systemAccount = await this.findByTypeAndRegion(accountType, regionCode);
if (!systemAccount) {
return 0;
}
const result = await this.client.systemContributionRecord.updateMany({
where: {
systemAccountId: systemAccount.id,
sourceAdoptionId,
sourceAccountSequence,
deletedAt: null, // 只软删除未删除的记录
},
data: {
deletedAt: new Date(),
},
});
return result.count;
} }
async saveContributionRecord(record: { async saveContributionRecord(record: {
accountType: SystemAccountType; systemAccountType: SystemAccountType;
regionCode: string | null;
sourceAdoptionId: bigint; sourceAdoptionId: bigint;
sourceAccountSequence: string; sourceAccountSequence: string;
sourceType: string; // 来源类型
levelDepth?: number | null; // 层级深度
distributionRate: number; distributionRate: number;
amount: ContributionAmount; amount: ContributionAmount;
effectiveDate: Date; effectiveDate: Date;
expireDate?: Date | null; expireDate?: Date | null;
}): Promise<SystemContributionRecord> { }): Promise<void> {
const systemAccount = await this.findByTypeAndRegion(record.accountType, record.regionCode); const systemAccount = await this.findByType(record.systemAccountType);
if (!systemAccount) { if (!systemAccount) {
throw new Error(`System account ${record.accountType}:${record.regionCode} not found`); throw new Error(`System account ${record.systemAccountType} not found`);
} }
const created = await this.client.systemContributionRecord.create({ await this.client.systemContributionRecord.create({
data: { data: {
systemAccountId: systemAccount.id, systemAccountId: systemAccount.id,
sourceAdoptionId: record.sourceAdoptionId, sourceAdoptionId: record.sourceAdoptionId,
sourceAccountSequence: record.sourceAccountSequence, sourceAccountSequence: record.sourceAccountSequence,
sourceType: record.sourceType,
levelDepth: record.levelDepth ?? null,
distributionRate: record.distributionRate, distributionRate: record.distributionRate,
amount: record.amount.value, amount: record.amount.value,
effectiveDate: record.effectiveDate, effectiveDate: record.effectiveDate,
expireDate: record.expireDate ?? null, expireDate: record.expireDate ?? null,
}, },
}); });
}
return this.toContributionRecord(created); async saveContributionRecords(records: {
systemAccountType: SystemAccountType;
sourceAdoptionId: bigint;
sourceAccountSequence: string;
distributionRate: number;
amount: ContributionAmount;
effectiveDate: Date;
expireDate?: Date | null;
}[]): Promise<void> {
if (records.length === 0) return;
const systemAccounts = await this.findAll();
const accountMap = new Map<SystemAccountType, bigint>();
for (const account of systemAccounts) {
accountMap.set(account.accountType, account.id);
}
await this.client.systemContributionRecord.createMany({
data: records.map((r) => ({
systemAccountId: accountMap.get(r.systemAccountType)!,
sourceAdoptionId: r.sourceAdoptionId,
sourceAccountSequence: r.sourceAccountSequence,
distributionRate: r.distributionRate,
amount: r.amount.value,
effectiveDate: r.effectiveDate,
expireDate: r.expireDate ?? null,
})),
});
} }
async findContributionRecords( async findContributionRecords(
accountType: SystemAccountType, systemAccountType: SystemAccountType,
regionCode: string | null,
page: number, page: number,
pageSize: number, pageSize: number,
): Promise<{ data: SystemContributionRecord[]; total: number }> { ): Promise<{ data: SystemContributionRecord[]; total: number }> {
const systemAccount = await this.findByTypeAndRegion(accountType, regionCode); const systemAccount = await this.findByType(systemAccountType);
if (!systemAccount) { if (!systemAccount) {
return { data: [], total: 0 }; return { data: [], total: 0 };
} }
const whereClause = {
systemAccountId: systemAccount.id,
deletedAt: null, // 过滤已软删除的记录
};
const [records, total] = await Promise.all([ const [records, total] = await Promise.all([
this.client.systemContributionRecord.findMany({ this.client.systemContributionRecord.findMany({
where: whereClause, where: { systemAccountId: systemAccount.id },
skip: (page - 1) * pageSize, skip: (page - 1) * pageSize,
take: pageSize, take: pageSize,
orderBy: { createdAt: 'desc' }, orderBy: { createdAt: 'desc' },
}), }),
this.client.systemContributionRecord.count({ this.client.systemContributionRecord.count({
where: whereClause, where: { systemAccountId: systemAccount.id },
}), }),
]); ]);
@ -293,7 +178,6 @@ export class SystemAccountRepository {
return { return {
id: record.id, id: record.id,
accountType: record.accountType as SystemAccountType, accountType: record.accountType as SystemAccountType,
regionCode: record.regionCode,
name: record.name, name: record.name,
contributionBalance: new ContributionAmount(record.contributionBalance), contributionBalance: new ContributionAmount(record.contributionBalance),
contributionNeverExpires: record.contributionNeverExpires, contributionNeverExpires: record.contributionNeverExpires,
@ -309,8 +193,6 @@ export class SystemAccountRepository {
systemAccountId: record.systemAccountId, systemAccountId: record.systemAccountId,
sourceAdoptionId: record.sourceAdoptionId, sourceAdoptionId: record.sourceAdoptionId,
sourceAccountSequence: record.sourceAccountSequence, sourceAccountSequence: record.sourceAccountSequence,
sourceType: record.sourceType,
levelDepth: record.levelDepth,
distributionRate: record.distributionRate, distributionRate: record.distributionRate,
amount: new ContributionAmount(record.amount), amount: new ContributionAmount(record.amount),
effectiveDate: record.effectiveDate, effectiveDate: record.effectiveDate,

View File

@ -7,16 +7,14 @@ export interface UnallocatedContribution {
unallocType: string; unallocType: string;
wouldBeAccountSequence: string | null; wouldBeAccountSequence: string | null;
levelDepth: number | null; levelDepth: number | null;
bonusTier: number | null;
amount: ContributionAmount; amount: ContributionAmount;
reason: string | null; reason: string | null;
sourceAdoptionId: bigint; sourceAdoptionId: bigint;
sourceAccountSequence: string; sourceAccountSequence: string;
effectiveDate: Date; effectiveDate: Date;
expireDate: Date; expireDate: Date;
status: string; allocatedToHeadquarters: boolean;
allocatedAt: Date | null; allocatedAt: Date | null;
allocatedToAccountSequence: string | null;
createdAt: Date; createdAt: Date;
} }
@ -132,205 +130,20 @@ export class UnallocatedContributionRepository {
}; };
} }
/**
*
* @param accountSequence
* @param bonusTier (2 3)
*/
async findPendingBonusByAccountSequence(
accountSequence: string,
bonusTier: number,
): Promise<UnallocatedContribution[]> {
const records = await this.client.unallocatedContribution.findMany({
where: {
wouldBeAccountSequence: accountSequence,
unallocType: `BONUS_TIER_${bonusTier}`,
status: 'PENDING',
},
orderBy: { createdAt: 'asc' },
});
return records.map((r) => this.toDomain(r));
}
/**
* -
* @param ids ID列表
* @param accountSequence
*/
async claimBonusRecords(ids: bigint[], accountSequence: string): Promise<void> {
if (ids.length === 0) return;
await this.client.unallocatedContribution.updateMany({
where: {
id: { in: ids },
status: 'PENDING',
},
data: {
status: 'ALLOCATED_TO_USER',
allocatedAt: new Date(),
allocatedToAccountSequence: accountSequence,
},
});
}
/**
*
*/
async findAllPendingBonusByAccountSequence(
accountSequence: string,
): Promise<UnallocatedContribution[]> {
const records = await this.client.unallocatedContribution.findMany({
where: {
wouldBeAccountSequence: accountSequence,
unallocType: { startsWith: 'BONUS_TIER_' },
status: 'PENDING',
},
orderBy: { createdAt: 'asc' },
});
return records.map((r) => this.toDomain(r));
}
/**
*
* @param accountSequence
* @param minLevel
* @param maxLevel
*/
async findPendingLevelByAccountSequence(
accountSequence: string,
minLevel: number,
maxLevel: number,
): Promise<UnallocatedContribution[]> {
const records = await this.client.unallocatedContribution.findMany({
where: {
wouldBeAccountSequence: accountSequence,
unallocType: 'LEVEL_OVERFLOW',
levelDepth: {
gte: minLevel,
lte: maxLevel,
},
status: 'PENDING',
},
orderBy: { levelDepth: 'asc' },
});
return records.map((r) => this.toDomain(r));
}
/**
* -
* @param ids ID列表
* @param accountSequence
*/
async claimLevelRecords(ids: bigint[], accountSequence: string): Promise<void> {
if (ids.length === 0) return;
await this.client.unallocatedContribution.updateMany({
where: {
id: { in: ids },
status: 'PENDING',
},
data: {
status: 'ALLOCATED_TO_USER',
allocatedAt: new Date(),
allocatedToAccountSequence: accountSequence,
},
});
}
/**
*
*/
async getUnallocatedByLevelTier(): Promise<{
tier1: string; // 1-5级未分配
tier2: string; // 6-10级未分配
tier3: string; // 11-15级未分配
}> {
const results = await this.client.unallocatedContribution.groupBy({
by: ['levelDepth'],
where: {
levelDepth: { not: null },
status: 'PENDING',
},
_sum: { amount: true },
});
let tier1 = new ContributionAmount(0);
let tier2 = new ContributionAmount(0);
let tier3 = new ContributionAmount(0);
for (const item of results) {
const depth = item.levelDepth!;
const amount = new ContributionAmount(item._sum.amount || 0);
if (depth >= 1 && depth <= 5) {
tier1 = tier1.add(amount);
} else if (depth >= 6 && depth <= 10) {
tier2 = tier2.add(amount);
} else if (depth >= 11 && depth <= 15) {
tier3 = tier3.add(amount);
}
}
return {
tier1: tier1.value.toString(),
tier2: tier2.value.toString(),
tier3: tier3.value.toString(),
};
}
/**
*
*/
async getUnallocatedByBonusTier(): Promise<{
tier1: string;
tier2: string;
tier3: string;
}> {
const results = await this.client.unallocatedContribution.groupBy({
by: ['unallocType'],
where: {
unallocType: { startsWith: 'BONUS_TIER_' },
status: 'PENDING',
},
_sum: { amount: true },
});
let tier1 = '0';
let tier2 = '0';
let tier3 = '0';
for (const item of results) {
const amount = (item._sum.amount || 0).toString();
if (item.unallocType === 'BONUS_TIER_1') {
tier1 = amount;
} else if (item.unallocType === 'BONUS_TIER_2') {
tier2 = amount;
} else if (item.unallocType === 'BONUS_TIER_3') {
tier3 = amount;
}
}
return { tier1, tier2, tier3 };
}
private toDomain(record: any): UnallocatedContribution { private toDomain(record: any): UnallocatedContribution {
return { return {
id: record.id, id: record.id,
unallocType: record.unallocType, unallocType: record.unallocType,
wouldBeAccountSequence: record.wouldBeAccountSequence, wouldBeAccountSequence: record.wouldBeAccountSequence,
levelDepth: record.levelDepth, levelDepth: record.levelDepth,
bonusTier: record.bonusTier,
amount: new ContributionAmount(record.amount), amount: new ContributionAmount(record.amount),
reason: record.reason, reason: record.reason,
sourceAdoptionId: record.sourceAdoptionId, sourceAdoptionId: record.sourceAdoptionId,
sourceAccountSequence: record.sourceAccountSequence, sourceAccountSequence: record.sourceAccountSequence,
effectiveDate: record.effectiveDate, effectiveDate: record.effectiveDate,
expireDate: record.expireDate, expireDate: record.expireDate,
status: record.status, allocatedToHeadquarters: record.allocatedToHeadquarters,
allocatedAt: record.allocatedAt, allocatedAt: record.allocatedAt,
allocatedToAccountSequence: record.allocatedToAccountSequence,
createdAt: record.createdAt, createdAt: record.createdAt,
}; };
} }

View File

@ -25,7 +25,6 @@
# CDC & Sync: # CDC & Sync:
# ./deploy-mining.sh sync-reset # Reset CDC consumer offsets to beginning # ./deploy-mining.sh sync-reset # Reset CDC consumer offsets to beginning
# ./deploy-mining.sh sync-status # Show CDC consumer group status # ./deploy-mining.sh sync-status # Show CDC consumer group status
# ./deploy-mining.sh cdc-resnapshot # Force Debezium to re-snapshot (use when Kafka data lost)
# #
# Full Reset (for development/testing): # Full Reset (for development/testing):
# ./deploy-mining.sh full-reset # Complete reset: stop services, drop DBs, recreate, resync # ./deploy-mining.sh full-reset # Complete reset: stop services, drop DBs, recreate, resync
@ -41,7 +40,6 @@
# admin -> mining-admin-service # admin -> mining-admin-service
# auth -> auth-service # auth -> auth-service
# wallet -> mining-wallet-service # wallet -> mining-wallet-service
# blockchain -> mining-blockchain-service
# #
set -e set -e
@ -61,7 +59,6 @@ MINING_SERVICES=(
"mining-admin-service" "mining-admin-service"
"auth-service" "auth-service"
"mining-wallet-service" "mining-wallet-service"
"mining-blockchain-service"
) )
# Service Aliases # Service Aliases
@ -73,7 +70,6 @@ declare -A SERVICE_ALIASES=(
["admin"]="mining-admin-service" ["admin"]="mining-admin-service"
["auth"]="auth-service" ["auth"]="auth-service"
["wallet"]="mining-wallet-service" ["wallet"]="mining-wallet-service"
["blockchain"]="mining-blockchain-service"
) )
# 2.0 Databases # 2.0 Databases
@ -84,7 +80,6 @@ MINING_DATABASES=(
"rwa_mining_admin" "rwa_mining_admin"
"rwa_auth" "rwa_auth"
"rwa_mining_wallet" "rwa_mining_wallet"
"rwa_blockchain"
) )
# Service to Database mapping # Service to Database mapping
@ -95,7 +90,6 @@ declare -A SERVICE_DB=(
["mining-admin-service"]="rwa_mining_admin" ["mining-admin-service"]="rwa_mining_admin"
["auth-service"]="rwa_auth" ["auth-service"]="rwa_auth"
["mining-wallet-service"]="rwa_mining_wallet" ["mining-wallet-service"]="rwa_mining_wallet"
["mining-blockchain-service"]="rwa_blockchain"
) )
# 2.0 Ports # 2.0 Ports
@ -106,19 +100,12 @@ declare -A SERVICE_PORTS=(
["mining-admin-service"]="3023" ["mining-admin-service"]="3023"
["auth-service"]="3024" ["auth-service"]="3024"
["mining-wallet-service"]="3025" ["mining-wallet-service"]="3025"
["mining-blockchain-service"]="3026"
) )
# CDC Consumer Groups (all groups that need to be reset during full-reset) # CDC Consumer Groups (all groups that need to be reset during full-reset)
# NOTE: contribution-service uses sequential phase consumption with separate consumer groups
# for each table (user_accounts, referral_relationships, planting_orders)
CDC_CONSUMER_GROUPS=( CDC_CONSUMER_GROUPS=(
"contribution-service-cdc-group" "contribution-service-cdc-group"
"contribution-service-cdc-phase-user_accounts"
"contribution-service-cdc-phase-referral_relationships"
"contribution-service-cdc-phase-planting_orders"
"auth-service-cdc-group" "auth-service-cdc-group"
"auth-service-cdc-group-wallet"
"mining-admin-service-cdc-group" "mining-admin-service-cdc-group"
) )
@ -132,14 +119,6 @@ OUTBOX_CONNECTORS=(
"mining-wallet-outbox-connector" "mining-wallet-outbox-connector"
) )
# Debezium CDC Postgres Connectors (for 1.0 -> 2.0 data sync)
# These connectors capture changes from 1.0 service databases
CDC_POSTGRES_CONNECTORS=(
"identity-postgres-connector"
"referral-postgres-connector"
"planting-postgres-connector"
)
# Debezium Connect URL (default port 8084 as mapped in docker-compose) # Debezium Connect URL (default port 8084 as mapped in docker-compose)
DEBEZIUM_CONNECT_URL="${DEBEZIUM_CONNECT_URL:-http://localhost:8084}" DEBEZIUM_CONNECT_URL="${DEBEZIUM_CONNECT_URL:-http://localhost:8084}"
@ -729,150 +708,6 @@ sync_reset() {
log_info "Run: ./deploy-mining.sh up contribution-service && ./deploy-mining.sh up auth-service" log_info "Run: ./deploy-mining.sh up contribution-service && ./deploy-mining.sh up auth-service"
} }
# Trigger Debezium CDC connectors to re-snapshot
# This is needed when Kafka topic messages are deleted (due to retention or manual cleanup)
# and the connector needs to re-export all data from the source database
cdc_resnapshot() {
print_section "Triggering CDC Connectors Re-Snapshot"
local connect_url="$DEBEZIUM_CONNECT_URL"
# Check if Debezium Connect is available
if ! curl -s "$connect_url" &>/dev/null; then
log_error "Debezium Connect not available at $connect_url"
return 1
fi
echo -e "${YELLOW}WARNING: This will delete and recreate CDC Postgres connectors.${NC}"
echo -e "${YELLOW}All connectors will re-snapshot their source tables.${NC}"
echo ""
echo "Connectors to be re-created:"
for connector in "${CDC_POSTGRES_CONNECTORS[@]}"; do
echo " - $connector"
done
echo ""
read -p "Continue? (y/n): " confirm
if [ "$confirm" != "y" ]; then
log_warn "Aborted"
return 1
fi
# Stop CDC consumer services first
log_step "Stopping CDC consumer services..."
service_stop "contribution-service"
service_stop "auth-service"
# Wait for consumer groups to become inactive
log_info "Waiting 10 seconds for consumers to disconnect..."
sleep 10
# Delete consumer groups to ensure fresh consumption
log_step "Deleting consumer groups..."
for group in "${CDC_CONSUMER_GROUPS[@]}"; do
log_info "Deleting consumer group: $group"
if docker ps --format '{{.Names}}' 2>/dev/null | grep -q "^${KAFKA_CONTAINER}$"; then
docker exec "$KAFKA_CONTAINER" kafka-consumer-groups --bootstrap-server localhost:9092 \
--delete --group "$group" 2>/dev/null && log_success "Deleted $group" || log_warn "Could not delete $group"
fi
done
# Clear processed_cdc_events table
log_step "Clearing processed CDC events..."
if run_psql "rwa_contribution" "TRUNCATE TABLE processed_cdc_events;" 2>/dev/null; then
log_success "Truncated processed_cdc_events in rwa_contribution"
else
log_warn "Could not truncate processed_cdc_events (table may not exist)"
fi
# For each CDC Postgres connector, save config, delete, and recreate
log_step "Re-creating CDC Postgres connectors..."
local scripts_dir="$SCRIPT_DIR/scripts/debezium"
for connector in "${CDC_POSTGRES_CONNECTORS[@]}"; do
log_info "Processing connector: $connector"
# Get current config from running connector
local config
config=$(curl -s "$connect_url/connectors/$connector/config" 2>/dev/null)
local config_file=""
local use_file_config=false
# If connector doesn't exist, try to find config file
if [ -z "$config" ] || echo "$config" | grep -q "error_code"; then
log_warn "Connector $connector not found, looking for config file..."
# Map connector name to config file
case "$connector" in
"identity-postgres-connector")
config_file="$scripts_dir/identity-connector.json"
;;
"referral-postgres-connector")
config_file="$scripts_dir/referral-connector.json"
;;
"planting-postgres-connector")
config_file="$scripts_dir/planting-connector.json"
;;
esac
if [ -n "$config_file" ] && [ -f "$config_file" ]; then
log_info "Found config file: $config_file"
use_file_config=true
else
log_error "No config available for $connector, skipping"
continue
fi
else
# Delete existing connector
log_info "Deleting connector: $connector"
curl -s -X DELETE "$connect_url/connectors/$connector" &>/dev/null
sleep 2
fi
# Create connector
log_info "Creating connector: $connector with snapshot.mode=always"
local result
if [ "$use_file_config" = true ]; then
# Use config file, replace snapshot.mode with always
local json_config
json_config=$(cat "$config_file" | envsubst | sed 's/"snapshot.mode": "initial"/"snapshot.mode": "always"/')
result=$(echo "$json_config" | curl -s -X POST "$connect_url/connectors" \
-H "Content-Type: application/json" \
-d @- 2>/dev/null)
else
# Use config from running connector, but change snapshot.mode to always
local modified_config
modified_config=$(echo "$config" | sed 's/"snapshot.mode":"initial"/"snapshot.mode":"always"/' | sed 's/"snapshot.mode": "initial"/"snapshot.mode": "always"/')
result=$(curl -s -X POST "$connect_url/connectors" \
-H "Content-Type: application/json" \
-d "{\"name\":\"$connector\",\"config\":$modified_config}" 2>/dev/null)
fi
if echo "$result" | grep -q '"name"'; then
log_success "Created connector: $connector"
else
log_error "Failed to create connector $connector: $result"
fi
# Wait between connectors
sleep 3
done
# Wait for snapshots to complete
log_step "Waiting 30 seconds for Debezium snapshots to complete..."
sleep 30
# Start services
log_step "Starting CDC consumer services..."
service_start "contribution-service"
service_start "auth-service"
log_success "CDC re-snapshot completed!"
log_info "Monitor sync progress with: ./deploy-mining.sh sync-status"
}
sync_status() { sync_status() {
print_section "CDC Sync Status" print_section "CDC Sync Status"
@ -1267,47 +1102,9 @@ full_reset() {
service_start "$service" service_start "$service"
done done
log_step "Step 10/18: Waiting for contribution-service CDC sync to complete..." log_step "Step 10/18: Waiting for services to be ready and sync from 1.0..."
log_info "Waiting for contribution-service to complete CDC sync (user_accounts -> referral_relationships -> planting_orders)..." log_info "Waiting 30 seconds for all services to start and sync data from 1.0 CDC..."
sleep 30
# 等待 contribution-service 的 CDC 顺序同步完成
# 通过 /health/cdc-sync API 检查同步状态
local max_wait=600 # 最多等待 10 分钟
local wait_count=0
local sync_completed=false
local cdc_sync_url="http://localhost:3020/api/v2/health/cdc-sync"
while [ "$wait_count" -lt "$max_wait" ] && [ "$sync_completed" = false ]; do
# 调用 API 检查同步状态
local sync_status
sync_status=$(curl -s "$cdc_sync_url" 2>/dev/null || echo '{}')
if echo "$sync_status" | grep -q '"allPhasesCompleted":true'; then
sync_completed=true
log_success "CDC sync completed - all phases finished"
else
# 显示当前状态
local is_running
local sequential_mode
is_running=$(echo "$sync_status" | grep -o '"isRunning":[^,}]*' | cut -d':' -f2)
sequential_mode=$(echo "$sync_status" | grep -o '"sequentialMode":[^,}]*' | cut -d':' -f2)
if [ "$is_running" = "true" ] && [ "$sequential_mode" = "true" ]; then
log_info "CDC sync in progress (sequential mode)... (waited ${wait_count}s)"
elif [ "$is_running" = "true" ]; then
log_info "CDC consumer running... (waited ${wait_count}s)"
else
log_info "Waiting for CDC consumer to start... (waited ${wait_count}s)"
fi
sleep 5
wait_count=$((wait_count + 5))
fi
done
if [ "$sync_completed" = false ]; then
log_warn "CDC sync did not complete within ${max_wait}s, proceeding anyway..."
log_info "You may need to wait longer or check: curl $cdc_sync_url"
fi
log_step "Step 11/18: Registering Debezium outbox connectors..." log_step "Step 11/18: Registering Debezium outbox connectors..."
# Register outbox connectors AFTER services are running and have synced data # Register outbox connectors AFTER services are running and have synced data
@ -1530,7 +1327,6 @@ show_help() {
echo -e "${BOLD}CDC / Sync Management:${NC}" echo -e "${BOLD}CDC / Sync Management:${NC}"
echo " sync-reset Reset CDC consumer to read from beginning" echo " sync-reset Reset CDC consumer to read from beginning"
echo " sync-status Show CDC consumer group status" echo " sync-status Show CDC consumer group status"
echo " cdc-resnapshot Force Debezium CDC connectors to re-snapshot ${YELLOW}(use when Kafka data lost)${NC}"
echo " outbox-register Register all Debezium outbox connectors" echo " outbox-register Register all Debezium outbox connectors"
echo " outbox-status Show outbox connector status" echo " outbox-status Show outbox connector status"
echo " outbox-delete Delete all outbox connectors" echo " outbox-delete Delete all outbox connectors"
@ -1555,7 +1351,6 @@ show_help() {
echo " admin -> mining-admin-service" echo " admin -> mining-admin-service"
echo " auth -> auth-service" echo " auth -> auth-service"
echo " wallet -> mining-wallet-service" echo " wallet -> mining-wallet-service"
echo " blockchain -> mining-blockchain-service"
echo "" echo ""
echo -e "${BOLD}Examples:${NC}" echo -e "${BOLD}Examples:${NC}"
echo " $0 up # Start all services" echo " $0 up # Start all services"
@ -1634,10 +1429,6 @@ main() {
sync-status) sync-status)
sync_status sync_status
;; ;;
cdc-resnapshot)
print_header
cdc_resnapshot
;;
# Outbox connector commands # Outbox connector commands
outbox-register) outbox-register)

View File

@ -76,10 +76,6 @@ services:
REDIS_DB: 11 REDIS_DB: 11
# Kafka # Kafka
KAFKA_BROKERS: kafka:29092 KAFKA_BROKERS: kafka:29092
# JWT 配置 (与 auth-service 共享密钥以验证 token)
JWT_SECRET: ${JWT_SECRET:-your-jwt-secret-change-in-production}
# 2.0 内部服务调用
CONTRIBUTION_SERVICE_URL: http://contribution-service:3020
ports: ports:
- "3021:3021" - "3021:3021"
healthcheck: healthcheck:
@ -112,9 +108,6 @@ services:
KAFKA_BROKERS: kafka:29092 KAFKA_BROKERS: kafka:29092
# 2.0 内部服务调用 # 2.0 内部服务调用
MINING_SERVICE_URL: http://mining-service:3021 MINING_SERVICE_URL: http://mining-service:3021
AUTH_SERVICE_URL: http://auth-service:3024
# JWT 配置 (与 auth-service 共享密钥以验证 token)
JWT_SECRET: ${JWT_SECRET:-your-jwt-secret-change-in-production}
ports: ports:
- "3022:3022" - "3022:3022"
healthcheck: healthcheck:
@ -189,7 +182,6 @@ services:
KAFKA_BROKERS: kafka:29092 KAFKA_BROKERS: kafka:29092
CDC_ENABLED: "true" CDC_ENABLED: "true"
CDC_TOPIC_USERS: ${CDC_TOPIC_USERS:-cdc.identity.public.user_accounts} CDC_TOPIC_USERS: ${CDC_TOPIC_USERS:-cdc.identity.public.user_accounts}
CDC_TOPIC_WALLET_ADDRESSES: ${CDC_TOPIC_WALLET_ADDRESSES:-cdc.identity.public.wallet_addresses}
CDC_CONSUMER_GROUP: auth-service-cdc-group CDC_CONSUMER_GROUP: auth-service-cdc-group
# JWT 配置 # JWT 配置
JWT_SECRET: ${JWT_SECRET:-your-jwt-secret-change-in-production} JWT_SECRET: ${JWT_SECRET:-your-jwt-secret-change-in-production}
@ -247,64 +239,6 @@ services:
networks: networks:
- rwa-network - rwa-network
mining-blockchain-service:
build:
context: ./mining-blockchain-service
dockerfile: Dockerfile
container_name: rwa-mining-blockchain-service
environment:
NODE_ENV: production
TZ: Asia/Shanghai
PORT: 3026
# PostgreSQL - 使用独立的数据库
DATABASE_URL: postgresql://${POSTGRES_USER:-postgres}:${POSTGRES_PASSWORD:-postgres}@postgres:5432/rwa_blockchain?schema=public
# Redis - 使用 DB 8 隔离 (0-15 有效范围)
REDIS_HOST: redis
REDIS_PORT: 6379
REDIS_PASSWORD: ${REDIS_PASSWORD:-}
REDIS_DB: 8
# Kafka - 用于 MPC 签名通信和事件发布
KAFKA_BROKERS: kafka:29092
# JWT 配置
JWT_SECRET: ${JWT_SECRET:-your-jwt-secret-change-in-production}
# 区块链配置
NETWORK_MODE: ${NETWORK_MODE:-mainnet}
# KAVA 配置
KAVA_RPC_URL: ${KAVA_RPC_URL:-https://evm.kava.io}
KAVA_CHAIN_ID: ${KAVA_CHAIN_ID:-2222}
KAVA_USDT_CONTRACT: ${KAVA_USDT_CONTRACT:-0xA9F3A35dBa8699c8C681D8db03F0c1A8CEB9D7c3}
# 积分股合约 (eUSDT - Energy USDT)
KAVA_EUSDT_CONTRACT: ${KAVA_EUSDT_CONTRACT:-0x7C3275D808eFbAE90C06C7E3A9AfDdcAa8563931}
# 积分值合约 (fUSDT - Future USDT)
KAVA_FUSDT_CONTRACT: ${KAVA_FUSDT_CONTRACT:-0x14dc4f7d3E4197438d058C3D156dd9826A161134}
# BSC 配置
BSC_RPC_URL: ${BSC_RPC_URL:-https://bsc-dataseed.binance.org}
BSC_CHAIN_ID: ${BSC_CHAIN_ID:-56}
BSC_USDT_CONTRACT: ${BSC_USDT_CONTRACT:-0x55d398326f99059fF775485246999027B3197955}
# C2C Bot 热钱包 (MPC)
HOT_WALLET_USERNAME: ${HOT_WALLET_USERNAME:-}
HOT_WALLET_ADDRESS: ${HOT_WALLET_ADDRESS:-}
# eUSDT (积分股) 做市商钱包 (MPC)
EUSDT_MARKET_MAKER_USERNAME: ${EUSDT_MARKET_MAKER_USERNAME:-}
EUSDT_MARKET_MAKER_ADDRESS: ${EUSDT_MARKET_MAKER_ADDRESS:-}
# fUSDT (积分值) 做市商钱包 (MPC)
FUSDT_MARKET_MAKER_USERNAME: ${FUSDT_MARKET_MAKER_USERNAME:-}
FUSDT_MARKET_MAKER_ADDRESS: ${FUSDT_MARKET_MAKER_ADDRESS:-}
# 区块扫描配置
BLOCK_SCAN_INTERVAL_MS: ${BLOCK_SCAN_INTERVAL_MS:-5000}
BLOCK_CONFIRMATIONS_REQUIRED: ${BLOCK_CONFIRMATIONS_REQUIRED:-12}
ports:
- "3026:3026"
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:3026/api/v2/health"]
interval: 30s
timeout: 10s
retries: 3
start_period: 60s
restart: unless-stopped
networks:
- rwa-network
# =========================================================================== # ===========================================================================
# Frontend Services (2.0) # Frontend Services (2.0)
# =========================================================================== # ===========================================================================

View File

@ -171,9 +171,6 @@ export class UserApplicationService {
// 7. 保存账户 // 7. 保存账户
await this.userRepository.save(account); await this.userRepository.save(account);
// 7.1 更新事件中的 userIdsave 后才有真实 userId
account.updateEventsUserId();
// 8. 生成 Token // 8. 生成 Token
const tokens = await this.tokenService.generateTokenPair({ const tokens = await this.tokenService.generateTokenPair({
userId: account.userId.toString(), userId: account.userId.toString(),
@ -355,10 +352,7 @@ export class UserApplicationService {
}); });
this.logger.log(`[REGISTER] Tokens generated`); this.logger.log(`[REGISTER] Tokens generated`);
// 13. 更新事件中的 userIdsave 后才有真实 userId // 13. 发布领域事件
account.updateEventsUserId();
// 14. 发布领域事件
this.logger.log( this.logger.log(
`[REGISTER] Publishing ${account.domainEvents.length} domain events...`, `[REGISTER] Publishing ${account.domainEvents.length} domain events...`,
); );
@ -529,10 +523,7 @@ export class UserApplicationService {
}); });
this.logger.log(`[REGISTER_NO_SMS] Tokens generated`); this.logger.log(`[REGISTER_NO_SMS] Tokens generated`);
// 11. 更新事件中的 userIdsave 后才有真实 userId // 11. 发布领域事件
account.updateEventsUserId();
// 12. 发布领域事件
this.logger.log( this.logger.log(
`[REGISTER_NO_SMS] Publishing ${account.domainEvents.length} domain events...`, `[REGISTER_NO_SMS] Publishing ${account.domainEvents.length} domain events...`,
); );
@ -873,10 +864,6 @@ export class UserApplicationService {
await this.userRepository.save(account); await this.userRepository.save(account);
await this.redisService.delete(`sms:register:${phoneNumber.value}`); await this.redisService.delete(`sms:register:${phoneNumber.value}`);
// 更新事件中的 userIdsave 后才有真实 userId
account.updateEventsUserId();
await this.eventPublisher.publishAll(account.domainEvents); await this.eventPublisher.publishAll(account.domainEvents);
account.clearDomainEvents(); account.clearDomainEvents();

View File

@ -498,22 +498,6 @@ export class UserAccount {
this._domainEvents = []; this._domainEvents = [];
} }
/**
* userId
*
* [2026-01-29] aggregate userId 0
* repository.save() userId
*/
updateEventsUserId(): void {
const realUserId = this._userId.toString();
// 只更新 userId 为 '0' 的事件(即新创建时的临时值)
for (const event of this._domainEvents) {
if ('payload' in event && (event as any).payload?.userId === '0') {
(event as any).payload.userId = realUserId;
}
}
}
/** /**
* *
* *

View File

@ -8,28 +8,22 @@
"name": "mining-admin-service", "name": "mining-admin-service",
"version": "1.0.0", "version": "1.0.0",
"dependencies": { "dependencies": {
"@nestjs/axios": "^3.1.3",
"@nestjs/common": "^10.3.0", "@nestjs/common": "^10.3.0",
"@nestjs/config": "^3.1.1", "@nestjs/config": "^3.1.1",
"@nestjs/core": "^10.3.0", "@nestjs/core": "^10.3.0",
"@nestjs/platform-express": "^10.3.0", "@nestjs/platform-express": "^10.3.0",
"@nestjs/swagger": "^7.1.17", "@nestjs/swagger": "^7.1.17",
"@prisma/client": "^5.7.1", "@prisma/client": "^5.7.1",
"adbkit-apkreader": "^3.2.0",
"axios": "^1.13.2",
"bcrypt": "^5.1.1", "bcrypt": "^5.1.1",
"class-transformer": "^0.5.1", "class-transformer": "^0.5.1",
"class-validator": "^0.14.0", "class-validator": "^0.14.0",
"decimal.js": "^10.4.3", "decimal.js": "^10.4.3",
"ioredis": "^5.3.2", "ioredis": "^5.3.2",
"jsonwebtoken": "^9.0.2", "jsonwebtoken": "^9.0.2",
"jszip": "^3.10.1",
"kafkajs": "^2.2.4", "kafkajs": "^2.2.4",
"plist": "^3.1.0",
"reflect-metadata": "^0.1.14", "reflect-metadata": "^0.1.14",
"rxjs": "^7.8.1", "rxjs": "^7.8.1",
"swagger-ui-express": "^5.0.0", "swagger-ui-express": "^5.0.0"
"xlsx": "^0.18.5"
}, },
"devDependencies": { "devDependencies": {
"@nestjs/cli": "^10.2.1", "@nestjs/cli": "^10.2.1",
@ -38,9 +32,7 @@
"@types/bcrypt": "^6.0.0", "@types/bcrypt": "^6.0.0",
"@types/express": "^4.17.21", "@types/express": "^4.17.21",
"@types/jsonwebtoken": "^9.0.10", "@types/jsonwebtoken": "^9.0.10",
"@types/multer": "^1.4.13",
"@types/node": "^20.10.5", "@types/node": "^20.10.5",
"@types/plist": "^3.0.5",
"eslint": "^8.56.0", "eslint": "^8.56.0",
"prettier": "^3.1.1", "prettier": "^3.1.1",
"prisma": "^5.7.1", "prisma": "^5.7.1",
@ -635,17 +627,6 @@
"integrity": "sha512-4aErSrCR/On/e5G2hDP0wjooqDdauzEbIq8hIkIe5pXV0rtWJZvdCEKL0ykZxex+IxIwBp0eGeV48hQN07dXtw==", "integrity": "sha512-4aErSrCR/On/e5G2hDP0wjooqDdauzEbIq8hIkIe5pXV0rtWJZvdCEKL0ykZxex+IxIwBp0eGeV48hQN07dXtw==",
"license": "MIT" "license": "MIT"
}, },
"node_modules/@nestjs/axios": {
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/@nestjs/axios/-/axios-3.1.3.tgz",
"integrity": "sha512-RZ/63c1tMxGLqyG3iOCVt7A72oy4x1eM6QEhd4KzCYpaVWW0igq0WSREeRoEZhIxRcZfDfIIkvsOMiM7yfVGZQ==",
"license": "MIT",
"peerDependencies": {
"@nestjs/common": "^7.0.0 || ^8.0.0 || ^9.0.0 || ^10.0.0",
"axios": "^1.3.1",
"rxjs": "^6.0.0 || ^7.0.0"
}
},
"node_modules/@nestjs/cli": { "node_modules/@nestjs/cli": {
"version": "10.4.9", "version": "10.4.9",
"resolved": "https://registry.npmjs.org/@nestjs/cli/-/cli-10.4.9.tgz", "resolved": "https://registry.npmjs.org/@nestjs/cli/-/cli-10.4.9.tgz",
@ -1225,16 +1206,6 @@
"dev": true, "dev": true,
"license": "MIT" "license": "MIT"
}, },
"node_modules/@types/multer": {
"version": "1.4.13",
"resolved": "https://registry.npmjs.org/@types/multer/-/multer-1.4.13.tgz",
"integrity": "sha512-bhhdtPw7JqCiEfC9Jimx5LqX9BDIPJEh2q/fQ4bqbBPtyEZYr3cvF22NwG0DmPZNYA0CAf2CnqDB4KIGGpJcaw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/express": "*"
}
},
"node_modules/@types/node": { "node_modules/@types/node": {
"version": "20.19.28", "version": "20.19.28",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.28.tgz", "resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.28.tgz",
@ -1245,17 +1216,6 @@
"undici-types": "~6.21.0" "undici-types": "~6.21.0"
} }
}, },
"node_modules/@types/plist": {
"version": "3.0.5",
"resolved": "https://registry.npmjs.org/@types/plist/-/plist-3.0.5.tgz",
"integrity": "sha512-E6OCaRmAe4WDmWNsL/9RMqdkkzDCY1etutkflWk4c+AcjDU07Pcz1fQwTX0TQz+Pxqn9i4L1TU3UFpjnrcDgxA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/node": "*",
"xmlbuilder": ">=11.0.1"
}
},
"node_modules/@types/qs": { "node_modules/@types/qs": {
"version": "6.14.0", "version": "6.14.0",
"resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.14.0.tgz", "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.14.0.tgz",
@ -1477,15 +1437,6 @@
"@xtuc/long": "4.2.2" "@xtuc/long": "4.2.2"
} }
}, },
"node_modules/@xmldom/xmldom": {
"version": "0.8.11",
"resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.11.tgz",
"integrity": "sha512-cQzWCtO6C8TQiYl1ruKNn2U6Ao4o4WBBcbL61yJl84x+j5sOWWFU9X7DpND8XZG3daDppSsigMdfAIl2upQBRw==",
"license": "MIT",
"engines": {
"node": ">=10.0.0"
}
},
"node_modules/@xtuc/ieee754": { "node_modules/@xtuc/ieee754": {
"version": "1.2.0", "version": "1.2.0",
"resolved": "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz", "resolved": "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz",
@ -1543,45 +1494,6 @@
"acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0"
} }
}, },
"node_modules/adbkit-apkreader": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/adbkit-apkreader/-/adbkit-apkreader-3.2.0.tgz",
"integrity": "sha512-QwsxPYCqWSmCAiW/A4gq0eytb4jtZc7WNbECIhLCRfGEB38oXzIV/YkTpkOTQFKSg3S4Svb6y///qOUH7UrWWw==",
"license": "Apache-2.0",
"dependencies": {
"bluebird": "^3.4.7",
"debug": "~4.1.1",
"yauzl": "^2.7.0"
},
"engines": {
"node": ">= 4"
}
},
"node_modules/adbkit-apkreader/node_modules/debug": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz",
"integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==",
"deprecated": "Debug versions >=3.2.0 <3.2.7 || >=4 <4.3.1 have a low-severity ReDos regression when used in a Node.js environment. It is recommended you upgrade to 3.2.7 or 4.3.1. (https://github.com/visionmedia/debug/issues/797)",
"license": "MIT",
"dependencies": {
"ms": "^2.1.1"
}
},
"node_modules/adbkit-apkreader/node_modules/ms": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
"license": "MIT"
},
"node_modules/adler-32": {
"version": "1.3.1",
"resolved": "https://registry.npmjs.org/adler-32/-/adler-32-1.3.1.tgz",
"integrity": "sha512-ynZ4w/nUUv5rrsR8UUGoe1VC9hZj6V5hU9Qw1HlMDJGEJw5S7TfTErWTjMys6M7vr0YWcPqs3qAr4ss0nDfP+A==",
"license": "Apache-2.0",
"engines": {
"node": ">=0.8"
}
},
"node_modules/agent-base": { "node_modules/agent-base": {
"version": "6.0.2", "version": "6.0.2",
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
@ -1822,24 +1734,6 @@
"dev": true, "dev": true,
"license": "MIT" "license": "MIT"
}, },
"node_modules/asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==",
"license": "MIT"
},
"node_modules/axios": {
"version": "1.13.2",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.13.2.tgz",
"integrity": "sha512-VPk9ebNqPcy5lRGuSlKx752IlDatOjT9paPlm8A7yOuW2Fbvp4X3JznJtT4f0GzGLLiWE9W8onz51SqLYwzGaA==",
"license": "MIT",
"peer": true,
"dependencies": {
"follow-redirects": "^1.15.6",
"form-data": "^4.0.4",
"proxy-from-env": "^1.1.0"
}
},
"node_modules/balanced-match": { "node_modules/balanced-match": {
"version": "1.0.2", "version": "1.0.2",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
@ -1850,6 +1744,7 @@
"version": "1.5.1", "version": "1.5.1",
"resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz",
"integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==",
"dev": true,
"funding": [ "funding": [
{ {
"type": "github", "type": "github",
@ -1915,12 +1810,6 @@
"readable-stream": "^3.4.0" "readable-stream": "^3.4.0"
} }
}, },
"node_modules/bluebird": {
"version": "3.7.2",
"resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz",
"integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==",
"license": "MIT"
},
"node_modules/body-parser": { "node_modules/body-parser": {
"version": "1.20.4", "version": "1.20.4",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.4.tgz", "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.4.tgz",
@ -2028,15 +1917,6 @@
"ieee754": "^1.1.13" "ieee754": "^1.1.13"
} }
}, },
"node_modules/buffer-crc32": {
"version": "0.2.13",
"resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz",
"integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==",
"license": "MIT",
"engines": {
"node": "*"
}
},
"node_modules/buffer-equal-constant-time": { "node_modules/buffer-equal-constant-time": {
"version": "1.0.1", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
@ -2148,19 +2028,6 @@
], ],
"license": "CC-BY-4.0" "license": "CC-BY-4.0"
}, },
"node_modules/cfb": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/cfb/-/cfb-1.2.2.tgz",
"integrity": "sha512-KfdUZsSOw19/ObEWasvBP/Ac4reZvAGauZhs6S/gqNhXhI7cKwvlH7ulj+dOEYnca4bm4SGo8C1bTAQvnTjgQA==",
"license": "Apache-2.0",
"dependencies": {
"adler-32": "~1.3.0",
"crc-32": "~1.2.0"
},
"engines": {
"node": ">=0.8"
}
},
"node_modules/chalk": { "node_modules/chalk": {
"version": "4.1.2", "version": "4.1.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
@ -2318,15 +2185,6 @@
"node": ">=0.10.0" "node": ">=0.10.0"
} }
}, },
"node_modules/codepage": {
"version": "1.15.0",
"resolved": "https://registry.npmjs.org/codepage/-/codepage-1.15.0.tgz",
"integrity": "sha512-3g6NUTPd/YtuuGrhMnOMRjFc+LJw/bnMp3+0r/Wcz3IXUuCosKRJvMphm5+Q+bvTVGcJJuRvVLuYba+WojaFaA==",
"license": "Apache-2.0",
"engines": {
"node": ">=0.8"
}
},
"node_modules/color-convert": { "node_modules/color-convert": {
"version": "2.0.1", "version": "2.0.1",
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
@ -2354,18 +2212,6 @@
"color-support": "bin.js" "color-support": "bin.js"
} }
}, },
"node_modules/combined-stream": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
"license": "MIT",
"dependencies": {
"delayed-stream": "~1.0.0"
},
"engines": {
"node": ">= 0.8"
}
},
"node_modules/commander": { "node_modules/commander": {
"version": "4.1.1", "version": "4.1.1",
"resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz",
@ -2466,6 +2312,7 @@
"version": "1.0.3", "version": "1.0.3",
"resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz",
"integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==",
"dev": true,
"license": "MIT" "license": "MIT"
}, },
"node_modules/cors": { "node_modules/cors": {
@ -2508,18 +2355,6 @@
} }
} }
}, },
"node_modules/crc-32": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.2.tgz",
"integrity": "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==",
"license": "Apache-2.0",
"bin": {
"crc32": "bin/crc32.njs"
},
"engines": {
"node": ">=0.8"
}
},
"node_modules/cross-spawn": { "node_modules/cross-spawn": {
"version": "7.0.6", "version": "7.0.6",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
@ -2598,15 +2433,6 @@
"url": "https://github.com/sponsors/ljharb" "url": "https://github.com/sponsors/ljharb"
} }
}, },
"node_modules/delayed-stream": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
"license": "MIT",
"engines": {
"node": ">=0.4.0"
}
},
"node_modules/delegates": { "node_modules/delegates": {
"version": "1.0.0", "version": "1.0.0",
"resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz",
@ -2803,21 +2629,6 @@
"node": ">= 0.4" "node": ">= 0.4"
} }
}, },
"node_modules/es-set-tostringtag": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz",
"integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==",
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0",
"get-intrinsic": "^1.2.6",
"has-tostringtag": "^1.0.2",
"hasown": "^2.0.2"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/escalade": { "node_modules/escalade": {
"version": "3.2.0", "version": "3.2.0",
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz",
@ -3192,15 +3003,6 @@
"reusify": "^1.0.4" "reusify": "^1.0.4"
} }
}, },
"node_modules/fd-slicer": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz",
"integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==",
"license": "MIT",
"dependencies": {
"pend": "~1.2.0"
}
},
"node_modules/fflate": { "node_modules/fflate": {
"version": "0.8.2", "version": "0.8.2",
"resolved": "https://registry.npmjs.org/fflate/-/fflate-0.8.2.tgz", "resolved": "https://registry.npmjs.org/fflate/-/fflate-0.8.2.tgz",
@ -3334,26 +3136,6 @@
"dev": true, "dev": true,
"license": "ISC" "license": "ISC"
}, },
"node_modules/follow-redirects": {
"version": "1.15.11",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz",
"integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==",
"funding": [
{
"type": "individual",
"url": "https://github.com/sponsors/RubenVerborgh"
}
],
"license": "MIT",
"engines": {
"node": ">=4.0"
},
"peerDependenciesMeta": {
"debug": {
"optional": true
}
}
},
"node_modules/foreground-child": { "node_modules/foreground-child": {
"version": "3.3.1", "version": "3.3.1",
"resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz", "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz",
@ -3400,22 +3182,6 @@
"webpack": "^5.11.0" "webpack": "^5.11.0"
} }
}, },
"node_modules/form-data": {
"version": "4.0.5",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.5.tgz",
"integrity": "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==",
"license": "MIT",
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"es-set-tostringtag": "^2.1.0",
"hasown": "^2.0.2",
"mime-types": "^2.1.12"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/forwarded": { "node_modules/forwarded": {
"version": "0.2.0", "version": "0.2.0",
"resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz",
@ -3425,15 +3191,6 @@
"node": ">= 0.6" "node": ">= 0.6"
} }
}, },
"node_modules/frac": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/frac/-/frac-1.1.2.tgz",
"integrity": "sha512-w/XBfkibaTl3YDqASwfDUqkna4Z2p9cFSr1aHDt0WoMTECnRfBOv2WArlZILlqgWlmdIlALXGpM2AOhEk5W3IA==",
"license": "Apache-2.0",
"engines": {
"node": ">=0.8"
}
},
"node_modules/fresh": { "node_modules/fresh": {
"version": "0.5.2", "version": "0.5.2",
"resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz",
@ -3736,21 +3493,6 @@
"url": "https://github.com/sponsors/ljharb" "url": "https://github.com/sponsors/ljharb"
} }
}, },
"node_modules/has-tostringtag": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
"integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
"license": "MIT",
"dependencies": {
"has-symbols": "^1.0.3"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/has-unicode": { "node_modules/has-unicode": {
"version": "2.0.1", "version": "2.0.1",
"resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz",
@ -3867,12 +3609,6 @@
"node": ">= 4" "node": ">= 4"
} }
}, },
"node_modules/immediate": {
"version": "3.0.6",
"resolved": "https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz",
"integrity": "sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==",
"license": "MIT"
},
"node_modules/import-fresh": { "node_modules/import-fresh": {
"version": "3.3.1", "version": "3.3.1",
"resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz",
@ -4095,12 +3831,6 @@
"url": "https://github.com/sponsors/sindresorhus" "url": "https://github.com/sponsors/sindresorhus"
} }
}, },
"node_modules/isarray": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
"integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==",
"license": "MIT"
},
"node_modules/isexe": { "node_modules/isexe": {
"version": "2.0.0", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
@ -4272,48 +4002,6 @@
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
"license": "MIT" "license": "MIT"
}, },
"node_modules/jszip": {
"version": "3.10.1",
"resolved": "https://registry.npmjs.org/jszip/-/jszip-3.10.1.tgz",
"integrity": "sha512-xXDvecyTpGLrqFrvkrUSoxxfJI5AH7U8zxxtVclpsUtMCq4JQ290LY8AW5c7Ggnr/Y/oK+bQMbqK2qmtk3pN4g==",
"license": "(MIT OR GPL-3.0-or-later)",
"dependencies": {
"lie": "~3.3.0",
"pako": "~1.0.2",
"readable-stream": "~2.3.6",
"setimmediate": "^1.0.5"
}
},
"node_modules/jszip/node_modules/readable-stream": {
"version": "2.3.8",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz",
"integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==",
"license": "MIT",
"dependencies": {
"core-util-is": "~1.0.0",
"inherits": "~2.0.3",
"isarray": "~1.0.0",
"process-nextick-args": "~2.0.0",
"safe-buffer": "~5.1.1",
"string_decoder": "~1.1.1",
"util-deprecate": "~1.0.1"
}
},
"node_modules/jszip/node_modules/safe-buffer": {
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
"integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==",
"license": "MIT"
},
"node_modules/jszip/node_modules/string_decoder": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz",
"integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==",
"license": "MIT",
"dependencies": {
"safe-buffer": "~5.1.0"
}
},
"node_modules/jwa": { "node_modules/jwa": {
"version": "2.0.1", "version": "2.0.1",
"resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.1.tgz", "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.1.tgz",
@ -4374,15 +4062,6 @@
"integrity": "sha512-r9kw4OA6oDO4dPXkOrXTkArQAafIKAU71hChInV4FxZ69dxCfbwQGDPzqR5/vea94wU705/3AZroEbSoeVWrQw==", "integrity": "sha512-r9kw4OA6oDO4dPXkOrXTkArQAafIKAU71hChInV4FxZ69dxCfbwQGDPzqR5/vea94wU705/3AZroEbSoeVWrQw==",
"license": "MIT" "license": "MIT"
}, },
"node_modules/lie": {
"version": "3.3.0",
"resolved": "https://registry.npmjs.org/lie/-/lie-3.3.0.tgz",
"integrity": "sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ==",
"license": "MIT",
"dependencies": {
"immediate": "~3.0.5"
}
},
"node_modules/lines-and-columns": { "node_modules/lines-and-columns": {
"version": "1.2.4", "version": "1.2.4",
"resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz",
@ -5006,12 +4685,6 @@
"dev": true, "dev": true,
"license": "BlueOak-1.0.0" "license": "BlueOak-1.0.0"
}, },
"node_modules/pako": {
"version": "1.0.11",
"resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz",
"integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==",
"license": "(MIT AND Zlib)"
},
"node_modules/parent-module": { "node_modules/parent-module": {
"version": "1.0.1", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
@ -5115,12 +4788,6 @@
"node": ">=8" "node": ">=8"
} }
}, },
"node_modules/pend": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz",
"integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==",
"license": "MIT"
},
"node_modules/picocolors": { "node_modules/picocolors": {
"version": "1.1.1", "version": "1.1.1",
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
@ -5141,20 +4808,6 @@
"url": "https://github.com/sponsors/jonschlinkert" "url": "https://github.com/sponsors/jonschlinkert"
} }
}, },
"node_modules/plist": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/plist/-/plist-3.1.0.tgz",
"integrity": "sha512-uysumyrvkUX0rX/dEVqt8gC3sTBzd4zoWfLeS29nb53imdaXVvLINYXTI2GNqzaMuvacNx4uJQ8+b3zXR0pkgQ==",
"license": "MIT",
"dependencies": {
"@xmldom/xmldom": "^0.8.8",
"base64-js": "^1.5.1",
"xmlbuilder": "^15.1.1"
},
"engines": {
"node": ">=10.4.0"
}
},
"node_modules/pluralize": { "node_modules/pluralize": {
"version": "8.0.0", "version": "8.0.0",
"resolved": "https://registry.npmjs.org/pluralize/-/pluralize-8.0.0.tgz", "resolved": "https://registry.npmjs.org/pluralize/-/pluralize-8.0.0.tgz",
@ -5212,12 +4865,6 @@
"fsevents": "2.3.3" "fsevents": "2.3.3"
} }
}, },
"node_modules/process-nextick-args": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
"integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==",
"license": "MIT"
},
"node_modules/proxy-addr": { "node_modules/proxy-addr": {
"version": "2.0.7", "version": "2.0.7",
"resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz",
@ -5231,12 +4878,6 @@
"node": ">= 0.10" "node": ">= 0.10"
} }
}, },
"node_modules/proxy-from-env": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==",
"license": "MIT"
},
"node_modules/punycode": { "node_modules/punycode": {
"version": "2.3.1", "version": "2.3.1",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz",
@ -5664,12 +5305,6 @@
"node": ">= 0.4" "node": ">= 0.4"
} }
}, },
"node_modules/setimmediate": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz",
"integrity": "sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==",
"license": "MIT"
},
"node_modules/setprototypeof": { "node_modules/setprototypeof": {
"version": "1.2.0", "version": "1.2.0",
"resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz",
@ -5815,18 +5450,6 @@
"node": ">=0.10.0" "node": ">=0.10.0"
} }
}, },
"node_modules/ssf": {
"version": "0.11.2",
"resolved": "https://registry.npmjs.org/ssf/-/ssf-0.11.2.tgz",
"integrity": "sha512-+idbmIXoYET47hH+d7dfm2epdOMUDjqcB4648sTZ+t2JwoyBFL/insLfB/racrDmsKB3diwsDA696pZMieAC5g==",
"license": "Apache-2.0",
"dependencies": {
"frac": "~1.1.2"
},
"engines": {
"node": ">=0.8"
}
},
"node_modules/standard-as-callback": { "node_modules/standard-as-callback": {
"version": "2.1.0", "version": "2.1.0",
"resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.1.0.tgz", "resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.1.0.tgz",
@ -6629,24 +6252,6 @@
"string-width": "^1.0.2 || 2 || 3 || 4" "string-width": "^1.0.2 || 2 || 3 || 4"
} }
}, },
"node_modules/wmf": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wmf/-/wmf-1.0.2.tgz",
"integrity": "sha512-/p9K7bEh0Dj6WbXg4JG0xvLQmIadrner1bi45VMJTfnbVHsc7yIajZyoSoK60/dtVBs12Fm6WkUI5/3WAVsNMw==",
"license": "Apache-2.0",
"engines": {
"node": ">=0.8"
}
},
"node_modules/word": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/word/-/word-0.3.0.tgz",
"integrity": "sha512-OELeY0Q61OXpdUfTp+oweA/vtLVg5VDOXh+3he3PNzLGG/y0oylSOC1xRVj0+l4vQ3tj/bB1HVHv1ocXkQceFA==",
"license": "Apache-2.0",
"engines": {
"node": ">=0.8"
}
},
"node_modules/word-wrap": { "node_modules/word-wrap": {
"version": "1.2.5", "version": "1.2.5",
"resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz",
@ -6697,36 +6302,6 @@
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==",
"license": "ISC" "license": "ISC"
}, },
"node_modules/xlsx": {
"version": "0.18.5",
"resolved": "https://registry.npmjs.org/xlsx/-/xlsx-0.18.5.tgz",
"integrity": "sha512-dmg3LCjBPHZnQp5/F/+nnTa+miPJxUXB6vtk42YjBBKayDNagxGEeIdWApkYPOf3Z3pm3k62Knjzp7lMeTEtFQ==",
"license": "Apache-2.0",
"dependencies": {
"adler-32": "~1.3.0",
"cfb": "~1.2.1",
"codepage": "~1.15.0",
"crc-32": "~1.2.1",
"ssf": "~0.11.2",
"wmf": "~1.0.1",
"word": "~0.3.0"
},
"bin": {
"xlsx": "bin/xlsx.njs"
},
"engines": {
"node": ">=0.8"
}
},
"node_modules/xmlbuilder": {
"version": "15.1.1",
"resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-15.1.1.tgz",
"integrity": "sha512-yMqGBqtXyeN1e3TGYvgNgDVZ3j84W4cwkOXQswghol6APgZWaff9lnbvN7MHYJOiXsvGPXtjTYJEiC9J2wv9Eg==",
"license": "MIT",
"engines": {
"node": ">=8.0"
}
},
"node_modules/xtend": { "node_modules/xtend": {
"version": "4.0.2", "version": "4.0.2",
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz",
@ -6752,16 +6327,6 @@
"node": ">=12" "node": ">=12"
} }
}, },
"node_modules/yauzl": {
"version": "2.10.0",
"resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz",
"integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==",
"license": "MIT",
"dependencies": {
"buffer-crc32": "~0.2.3",
"fd-slicer": "~1.1.0"
}
},
"node_modules/yocto-queue": { "node_modules/yocto-queue": {
"version": "0.1.0", "version": "0.1.0",
"resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",

View File

@ -15,28 +15,22 @@
"prisma:migrate": "prisma migrate dev" "prisma:migrate": "prisma migrate dev"
}, },
"dependencies": { "dependencies": {
"@nestjs/axios": "^3.1.3",
"@nestjs/common": "^10.3.0", "@nestjs/common": "^10.3.0",
"@nestjs/config": "^3.1.1", "@nestjs/config": "^3.1.1",
"@nestjs/core": "^10.3.0", "@nestjs/core": "^10.3.0",
"@nestjs/platform-express": "^10.3.0", "@nestjs/platform-express": "^10.3.0",
"@nestjs/swagger": "^7.1.17", "@nestjs/swagger": "^7.1.17",
"@prisma/client": "^5.7.1", "@prisma/client": "^5.7.1",
"adbkit-apkreader": "^3.2.0",
"axios": "^1.13.2",
"bcrypt": "^5.1.1", "bcrypt": "^5.1.1",
"class-transformer": "^0.5.1", "class-transformer": "^0.5.1",
"class-validator": "^0.14.0", "class-validator": "^0.14.0",
"decimal.js": "^10.4.3", "decimal.js": "^10.4.3",
"ioredis": "^5.3.2", "ioredis": "^5.3.2",
"jsonwebtoken": "^9.0.2", "jsonwebtoken": "^9.0.2",
"jszip": "^3.10.1",
"kafkajs": "^2.2.4", "kafkajs": "^2.2.4",
"plist": "^3.1.0",
"reflect-metadata": "^0.1.14", "reflect-metadata": "^0.1.14",
"rxjs": "^7.8.1", "rxjs": "^7.8.1",
"swagger-ui-express": "^5.0.0", "swagger-ui-express": "^5.0.0"
"xlsx": "^0.18.5"
}, },
"devDependencies": { "devDependencies": {
"@nestjs/cli": "^10.2.1", "@nestjs/cli": "^10.2.1",
@ -45,9 +39,7 @@
"@types/bcrypt": "^6.0.0", "@types/bcrypt": "^6.0.0",
"@types/express": "^4.17.21", "@types/express": "^4.17.21",
"@types/jsonwebtoken": "^9.0.10", "@types/jsonwebtoken": "^9.0.10",
"@types/multer": "^1.4.13",
"@types/node": "^20.10.5", "@types/node": "^20.10.5",
"@types/plist": "^3.0.5",
"eslint": "^8.56.0", "eslint": "^8.56.0",
"prettier": "^3.1.1", "prettier": "^3.1.1",
"prisma": "^5.7.1", "prisma": "^5.7.1",

View File

@ -1,8 +1,8 @@
-- ============================================================================ -- ============================================================================
-- mining-admin-service 初始化 migration -- mining-admin-service 初始化 migration
-- 合并自: 0001_init, 0002_fix_processed_event_composite_key, -- 合并自: 20260111000000_init, 20260112110000_add_referral_adoption_nickname,
-- 20250120000001_add_region_to_synced_system_contributions, -- 20260112150000_add_unlocked_bonus_tiers, 20260112200000_add_contribution_records_network_progress,
-- 20250120000002_add_synced_system_contribution_records -- 20260113000000_use_prisma_relation_mode, 20260113100000_add_distribution_summary
-- 注意: 使用 Prisma relationMode = "prisma"不在数据库层创建FK约束 -- 注意: 使用 Prisma relationMode = "prisma"不在数据库层创建FK约束
-- ============================================================================ -- ============================================================================
@ -302,11 +302,10 @@ CREATE TABLE "synced_circulation_pools" (
CONSTRAINT "synced_circulation_pools_pkey" PRIMARY KEY ("id") CONSTRAINT "synced_circulation_pools_pkey" PRIMARY KEY ("id")
); );
-- CreateTable: 系统账户算力 (from contribution-service) -- CreateTable
CREATE TABLE "synced_system_contributions" ( CREATE TABLE "synced_system_contributions" (
"id" TEXT NOT NULL, "id" TEXT NOT NULL,
"accountType" TEXT NOT NULL, "accountType" TEXT NOT NULL,
"region_code" TEXT,
"name" TEXT NOT NULL, "name" TEXT NOT NULL,
"contributionBalance" DECIMAL(30,8) NOT NULL DEFAULT 0, "contributionBalance" DECIMAL(30,8) NOT NULL DEFAULT 0,
"contributionNeverExpires" BOOLEAN NOT NULL DEFAULT false, "contributionNeverExpires" BOOLEAN NOT NULL DEFAULT false,
@ -688,12 +687,8 @@ CREATE UNIQUE INDEX "synced_daily_mining_stats_statDate_key" ON "synced_daily_mi
-- CreateIndex -- CreateIndex
CREATE UNIQUE INDEX "synced_day_klines_klineDate_key" ON "synced_day_klines"("klineDate"); CREATE UNIQUE INDEX "synced_day_klines_klineDate_key" ON "synced_day_klines"("klineDate");
-- CreateIndex: synced_system_contributions -- CreateIndex
-- 使用 accountType + COALESCE(region_code, '__NULL__') 复合唯一键 CREATE UNIQUE INDEX "synced_system_contributions_accountType_key" ON "synced_system_contributions"("accountType");
-- 注意PostgreSQL 中 NULL != NULL所以直接用 region_code 做唯一索引无法阻止重复的 (OPERATION, NULL)
CREATE UNIQUE INDEX "synced_system_contributions_accountType_region_code_key" ON "synced_system_contributions"("accountType", COALESCE(region_code, '__NULL__'));
CREATE INDEX "synced_system_contributions_accountType_idx" ON "synced_system_contributions"("accountType");
CREATE INDEX "synced_system_contributions_region_code_idx" ON "synced_system_contributions"("region_code");
-- CreateIndex -- CreateIndex
CREATE UNIQUE INDEX "cdc_sync_progress_sourceTopic_key" ON "cdc_sync_progress"("sourceTopic"); CREATE UNIQUE INDEX "cdc_sync_progress_sourceTopic_key" ON "cdc_sync_progress"("sourceTopic");
@ -701,8 +696,11 @@ CREATE UNIQUE INDEX "cdc_sync_progress_sourceTopic_key" ON "cdc_sync_progress"("
-- CreateIndex -- CreateIndex
CREATE INDEX "cdc_sync_progress_sourceService_idx" ON "cdc_sync_progress"("sourceService"); CREATE INDEX "cdc_sync_progress_sourceService_idx" ON "cdc_sync_progress"("sourceService");
-- CreateIndex (使用复合唯一键替代单独的 eventId 唯一约束) -- CreateIndex
CREATE UNIQUE INDEX "processed_events_sourceService_eventId_key" ON "processed_events"("sourceService", "eventId"); CREATE UNIQUE INDEX "processed_events_eventId_key" ON "processed_events"("eventId");
-- CreateIndex
CREATE INDEX "processed_events_sourceService_idx" ON "processed_events"("sourceService");
-- CreateIndex -- CreateIndex
CREATE INDEX "processed_events_processedAt_idx" ON "processed_events"("processedAt"); CREATE INDEX "processed_events_processedAt_idx" ON "processed_events"("processedAt");
@ -862,40 +860,3 @@ CREATE UNIQUE INDEX "synced_fee_configs_fee_type_key" ON "synced_fee_configs"("f
-- AddForeignKey (保留 admin 相关的外键) -- AddForeignKey (保留 admin 相关的外键)
ALTER TABLE "audit_logs" ADD CONSTRAINT "audit_logs_adminId_fkey" FOREIGN KEY ("adminId") REFERENCES "admin_users"("id") ON DELETE RESTRICT ON UPDATE CASCADE; ALTER TABLE "audit_logs" ADD CONSTRAINT "audit_logs_adminId_fkey" FOREIGN KEY ("adminId") REFERENCES "admin_users"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
-- ============================================================================
-- 系统账户算力明细同步表
-- 用于存储从 contribution-service 同步的系统账户算力来源明细
-- ============================================================================
-- CreateTable: 系统账户算力明细 (from contribution-service)
CREATE TABLE "synced_system_contribution_records" (
"id" TEXT NOT NULL,
"original_record_id" BIGINT NOT NULL,
"account_type" TEXT NOT NULL,
"region_code" TEXT,
"source_adoption_id" BIGINT NOT NULL,
"source_account_sequence" TEXT NOT NULL,
-- 来源类型: FIXED_RATE(固定比例) / LEVEL_OVERFLOW(层级溢出) / LEVEL_NO_ANCESTOR(无上线) / BONUS_TIER_1/2/3(团队奖励未解锁)
"source_type" VARCHAR(30) NOT NULL,
-- 层级深度1-15仅对 LEVEL_OVERFLOW 和 LEVEL_NO_ANCESTOR 类型有效
"level_depth" INTEGER,
"distribution_rate" DECIMAL(10,6) NOT NULL,
"amount" DECIMAL(30,10) NOT NULL,
"effective_date" DATE NOT NULL,
"expire_date" DATE,
"is_expired" BOOLEAN NOT NULL DEFAULT false,
"created_at" TIMESTAMP(3) NOT NULL,
"syncedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "synced_system_contribution_records_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE UNIQUE INDEX "synced_system_contribution_records_original_record_id_key" ON "synced_system_contribution_records"("original_record_id");
CREATE INDEX "synced_system_contribution_records_account_type_region_code_idx" ON "synced_system_contribution_records"("account_type", "region_code");
CREATE INDEX "synced_system_contribution_records_source_adoption_id_idx" ON "synced_system_contribution_records"("source_adoption_id");
CREATE INDEX "synced_system_contribution_records_source_account_sequence_idx" ON "synced_system_contribution_records"("source_account_sequence");
CREATE INDEX "synced_system_contribution_records_source_type_idx" ON "synced_system_contribution_records"("source_type");
CREATE INDEX "synced_system_contribution_records_created_at_idx" ON "synced_system_contribution_records"("created_at" DESC);

View File

@ -0,0 +1,26 @@
-- ============================================================================
-- 修复 processed_events 表的幂等键
-- 用于 2.0 服务间 Outbox 事件的 100% exactly-once 语义
-- ============================================================================
--
-- 问题: 原来使用 eventId 作为唯一键,但不同服务的 outbox ID 可能相同
-- 解决: 使用 (sourceService, eventId) 作为复合唯一键
--
-- 唯一键说明:
-- - sourceService: 发送事件的服务名(如 "auth-service", "contribution-service"
-- - eventId: 发送方 outbox 表的自增 ID非 UUID而是数据库自增主键
-- - 组合后在全局唯一,可用于精确追踪事件来源
-- ============================================================================
-- 先清空已有数据(因为之前的数据可能有冲突)
TRUNCATE TABLE "processed_events";
-- 删除旧的唯一索引(仅 eventId
DROP INDEX IF EXISTS "processed_events_eventId_key";
-- 删除旧的 sourceService 普通索引
DROP INDEX IF EXISTS "processed_events_sourceService_idx";
-- 创建新的复合唯一索引:(sourceService, eventId)
-- 这个组合保证跨服务的唯一性
CREATE UNIQUE INDEX "processed_events_sourceService_eventId_key" ON "processed_events"("sourceService", "eventId");

View File

@ -1,31 +0,0 @@
-- CreateEnum
CREATE TYPE "Platform" AS ENUM ('ANDROID', 'IOS');
-- CreateTable
CREATE TABLE "app_versions" (
"id" TEXT NOT NULL,
"platform" "Platform" NOT NULL,
"version_code" INTEGER NOT NULL,
"version_name" TEXT NOT NULL,
"build_number" TEXT NOT NULL,
"download_url" TEXT NOT NULL,
"file_size" BIGINT NOT NULL,
"file_sha256" TEXT NOT NULL,
"min_os_version" TEXT,
"changelog" TEXT NOT NULL,
"is_force_update" BOOLEAN NOT NULL DEFAULT false,
"is_enabled" BOOLEAN NOT NULL DEFAULT true,
"release_date" TIMESTAMP(3),
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updated_at" TIMESTAMP(3) NOT NULL,
"created_by" TEXT NOT NULL,
"updated_by" TEXT,
CONSTRAINT "app_versions_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE INDEX "app_versions_platform_is_enabled_idx" ON "app_versions"("platform", "is_enabled");
-- CreateIndex
CREATE INDEX "app_versions_platform_version_code_idx" ON "app_versions"("platform", "version_code");

View File

@ -422,62 +422,16 @@ model SyncedCirculationPool {
model SyncedSystemContribution { model SyncedSystemContribution {
id String @id @default(uuid()) id String @id @default(uuid())
accountType String // OPERATION / PROVINCE / CITY / HEADQUARTERS accountType String @unique // OPERATION, PROVINCE, CITY, HEADQUARTERS
regionCode String? @map("region_code") // 省/市代码,如 440000, 440100
name String name String
contributionBalance Decimal @db.Decimal(30, 8) @default(0) contributionBalance Decimal @db.Decimal(30, 8) @default(0)
contributionNeverExpires Boolean @default(false) contributionNeverExpires Boolean @default(false)
syncedAt DateTime @default(now()) syncedAt DateTime @default(now())
updatedAt DateTime @updatedAt updatedAt DateTime @updatedAt
@@unique([accountType, regionCode])
@@index([accountType])
@@index([regionCode])
@@map("synced_system_contributions") @@map("synced_system_contributions")
} }
// =============================================================================
// CDC 同步表 - 系统账户算力明细 (from contribution-service)
// =============================================================================
model SyncedSystemContributionRecord {
id String @id @default(uuid())
originalRecordId BigInt @unique @map("original_record_id") // contribution-service 中的原始 ID
// 系统账户信息(冗余存储,便于查询)
accountType String @map("account_type") // OPERATION / PROVINCE / CITY / HEADQUARTERS
regionCode String? @map("region_code") // 省/市代码
// 来源信息
sourceAdoptionId BigInt @map("source_adoption_id") // 来源认种ID
sourceAccountSequence String @map("source_account_sequence") // 认种人账号
// 来源类型: FIXED_RATE(固定比例) / LEVEL_OVERFLOW(层级溢出) / LEVEL_NO_ANCESTOR(无上线) / BONUS_TIER_1/2/3(团队奖励未解锁)
sourceType String @map("source_type") @db.VarChar(30)
// 层级深度1-15仅对 LEVEL_OVERFLOW 和 LEVEL_NO_ANCESTOR 类型有效
levelDepth Int? @map("level_depth")
// 分配参数
distributionRate Decimal @map("distribution_rate") @db.Decimal(10, 6) // 分配比例
amount Decimal @map("amount") @db.Decimal(30, 10) // 算力金额
// 有效期
effectiveDate DateTime @map("effective_date") @db.Date // 生效日期
expireDate DateTime? @map("expire_date") @db.Date // 过期日期系统账户一般为null永不过期
isExpired Boolean @default(false) @map("is_expired")
createdAt DateTime @map("created_at") // 原始记录创建时间
syncedAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@index([accountType, regionCode])
@@index([sourceAdoptionId])
@@index([sourceAccountSequence])
@@index([sourceType])
@@index([createdAt(sort: Desc)])
@@map("synced_system_contribution_records")
}
// ============================================================================= // =============================================================================
// CDC 同步进度跟踪 // CDC 同步进度跟踪
// ============================================================================= // =============================================================================
@ -891,38 +845,3 @@ model SyncedFeeConfig {
@@map("synced_fee_configs") @@map("synced_fee_configs")
} }
// =============================================================================
// App 版本管理 (Mining App Upgrade)
// =============================================================================
/// 平台类型
enum Platform {
ANDROID
IOS
}
/// App 版本
model AppVersion {
id String @id @default(uuid())
platform Platform
versionCode Int @map("version_code") // Android: versionCode, iOS: CFBundleVersion
versionName String @map("version_name") // 用户可见版本号,如 "1.2.3"
buildNumber String @map("build_number") // 构建号
downloadUrl String @map("download_url") // APK/IPA 下载地址
fileSize BigInt @map("file_size") // 文件大小(字节)
fileSha256 String @map("file_sha256") // 文件 SHA-256 校验和
minOsVersion String? @map("min_os_version") // 最低操作系统版本要求
changelog String @db.Text // 更新日志
isForceUpdate Boolean @default(false) @map("is_force_update") // 是否强制更新
isEnabled Boolean @default(true) @map("is_enabled") // 是否启用
releaseDate DateTime? @map("release_date") // 发布日期
createdAt DateTime @default(now()) @map("created_at")
updatedAt DateTime @updatedAt @map("updated_at")
createdBy String @map("created_by") // 创建人ID
updatedBy String? @map("updated_by") // 更新人ID
@@index([platform, isEnabled])
@@index([platform, versionCode])
@@map("app_versions")
}

View File

@ -1,45 +1,25 @@
import { Module } from '@nestjs/common'; import { Module } from '@nestjs/common';
import { MulterModule } from '@nestjs/platform-express';
import { ApplicationModule } from '../application/application.module'; import { ApplicationModule } from '../application/application.module';
import { AuthController } from './controllers/auth.controller'; import { AuthController } from './controllers/auth.controller';
import { DashboardController } from './controllers/dashboard.controller'; import { DashboardController } from './controllers/dashboard.controller';
import { ConfigController } from './controllers/config.controller'; import { ConfigController } from './controllers/config.controller';
import { InitializationController } from './controllers/initialization.controller';
import { AuditController } from './controllers/audit.controller'; import { AuditController } from './controllers/audit.controller';
import { HealthController } from './controllers/health.controller'; import { HealthController } from './controllers/health.controller';
import { UsersController } from './controllers/users.controller'; import { UsersController } from './controllers/users.controller';
import { SystemAccountsController } from './controllers/system-accounts.controller'; import { SystemAccountsController } from './controllers/system-accounts.controller';
import { ReportsController } from './controllers/reports.controller';
import { ManualMiningController } from './controllers/manual-mining.controller';
import { PendingContributionsController } from './controllers/pending-contributions.controller';
import { BatchMiningController } from './controllers/batch-mining.controller';
import { VersionController } from './controllers/version.controller';
import { UpgradeVersionController } from './controllers/upgrade-version.controller';
import { MobileVersionController } from './controllers/mobile-version.controller';
@Module({ @Module({
imports: [ imports: [ApplicationModule],
ApplicationModule,
MulterModule.register({
limits: {
fileSize: 500 * 1024 * 1024, // 500MB
},
}),
],
controllers: [ controllers: [
AuthController, AuthController,
DashboardController, DashboardController,
ConfigController, ConfigController,
InitializationController,
AuditController, AuditController,
HealthController, HealthController,
UsersController, UsersController,
SystemAccountsController, SystemAccountsController,
ReportsController,
ManualMiningController,
PendingContributionsController,
BatchMiningController,
VersionController,
UpgradeVersionController,
MobileVersionController,
], ],
}) })
export class ApiModule {} export class ApiModule {}

Some files were not shown because too many files have changed in this diff Show More