Compare commits

..

No commits in common. "main" and "v2.0.0-cdc-sync-fix" have entirely different histories.

370 changed files with 4018 additions and 48090 deletions

View File

@ -767,38 +767,7 @@
"Bash(git -C \"c:\\\\Users\\\\dong\\\\Desktop\\\\rwadurian\" commit -m \"$\\(cat <<''EOF''\nfix\\(mining-app\\): update splash page theme and fix token refresh\n\n- Update splash_page.dart to orange theme \\(#FF6B00\\) matching other pages\n- Change app name from \"榴莲挖矿\" to \"榴莲生态\"\n- Fix refreshTokenIfNeeded to properly throw on failure instead of\n silently calling logout \\(which caused Riverpod ref errors\\)\n- Clear local storage directly on refresh failure without remote API call\n\nCo-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>\nEOF\n\\)\")", "Bash(git -C \"c:\\\\Users\\\\dong\\\\Desktop\\\\rwadurian\" commit -m \"$\\(cat <<''EOF''\nfix\\(mining-app\\): update splash page theme and fix token refresh\n\n- Update splash_page.dart to orange theme \\(#FF6B00\\) matching other pages\n- Change app name from \"榴莲挖矿\" to \"榴莲生态\"\n- Fix refreshTokenIfNeeded to properly throw on failure instead of\n silently calling logout \\(which caused Riverpod ref errors\\)\n- Clear local storage directly on refresh failure without remote API call\n\nCo-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>\nEOF\n\\)\")",
"Bash(python3 -c \" import sys content = sys.stdin.read\\(\\) old = '''''' done # 清空 processed_cdc_events 表(因为 migration 时可能已经消费了一些消息) # 这是事务性幂等消费的关键:重置 Kafka offset 后必须同时清空幂等记录 log_info \"\"Truncating processed_cdc_events tables to allow re-consumption...\"\" for db in \"\"rwa_contribution\"\" \"\"rwa_auth\"\"; do if run_psql \"\"$db\"\" \"\"TRUNCATE TABLE processed_cdc_events;\"\" 2>/dev/null; then log_success \"\"Truncated processed_cdc_events in $db\"\" else log_warn \"\"Could not truncate processed_cdc_events in $db \\(table may not exist yet\\)\"\" fi done log_step \"\"Step 9/18: Starting 2.0 services...\"\"'''''' new = '''''' done # 清空 processed_cdc_events 表(因为 migration 时可能已经消费了一些消息) # 这是事务性幂等消费的关键:重置 Kafka offset 后必须同时清空幂等记录 log_info \"\"Truncating processed_cdc_events tables to allow re-consumption...\"\" for db in \"\"rwa_contribution\"\" \"\"rwa_auth\"\"; do if run_psql \"\"$db\"\" \"\"TRUNCATE TABLE processed_cdc_events;\"\" 2>/dev/null; then log_success \"\"Truncated processed_cdc_events in $db\"\" else log_warn \"\"Could not truncate processed_cdc_events in $db \\(table may not exist yet\\)\"\" fi done log_step \"\"Step 9/18: Starting 2.0 services...\"\"'''''' print\\(content.replace\\(old, new\\)\\) \")", "Bash(python3 -c \" import sys content = sys.stdin.read\\(\\) old = '''''' done # 清空 processed_cdc_events 表(因为 migration 时可能已经消费了一些消息) # 这是事务性幂等消费的关键:重置 Kafka offset 后必须同时清空幂等记录 log_info \"\"Truncating processed_cdc_events tables to allow re-consumption...\"\" for db in \"\"rwa_contribution\"\" \"\"rwa_auth\"\"; do if run_psql \"\"$db\"\" \"\"TRUNCATE TABLE processed_cdc_events;\"\" 2>/dev/null; then log_success \"\"Truncated processed_cdc_events in $db\"\" else log_warn \"\"Could not truncate processed_cdc_events in $db \\(table may not exist yet\\)\"\" fi done log_step \"\"Step 9/18: Starting 2.0 services...\"\"'''''' new = '''''' done # 清空 processed_cdc_events 表(因为 migration 时可能已经消费了一些消息) # 这是事务性幂等消费的关键:重置 Kafka offset 后必须同时清空幂等记录 log_info \"\"Truncating processed_cdc_events tables to allow re-consumption...\"\" for db in \"\"rwa_contribution\"\" \"\"rwa_auth\"\"; do if run_psql \"\"$db\"\" \"\"TRUNCATE TABLE processed_cdc_events;\"\" 2>/dev/null; then log_success \"\"Truncated processed_cdc_events in $db\"\" else log_warn \"\"Could not truncate processed_cdc_events in $db \\(table may not exist yet\\)\"\" fi done log_step \"\"Step 9/18: Starting 2.0 services...\"\"'''''' print\\(content.replace\\(old, new\\)\\) \")",
"Bash(git rm:*)", "Bash(git rm:*)",
"Bash(echo \"请在服务器运行以下命令检查 outbox 事件:\n\ndocker exec -it rwa-postgres psql -U rwa_user -d rwa_contribution -c \"\"\nSELECT id, event_type, aggregate_id, \n payload->>''sourceType'' as source_type,\n payload->>''accountSequence'' as account_seq,\n payload->>''sourceAccountSequence'' as source_account_seq,\n payload->>''bonusTier'' as bonus_tier\nFROM outbox_events \nWHERE payload->>''accountSequence'' = ''D25122900007''\nORDER BY id;\n\"\"\")", "Bash(echo \"请在服务器运行以下命令检查 outbox 事件:\n\ndocker exec -it rwa-postgres psql -U rwa_user -d rwa_contribution -c \"\"\nSELECT id, event_type, aggregate_id, \n payload->>''sourceType'' as source_type,\n payload->>''accountSequence'' as account_seq,\n payload->>''sourceAccountSequence'' as source_account_seq,\n payload->>''bonusTier'' as bonus_tier\nFROM outbox_events \nWHERE payload->>''accountSequence'' = ''D25122900007''\nORDER BY id;\n\"\"\")"
"Bash(ssh -o ConnectTimeout=10 ceshi@14.215.128.96 'find /home/ceshi/rwadurian/frontend/mining-admin-web -name \"\"*.tsx\"\" -o -name \"\"*.ts\"\" | xargs grep -l \"\"用户管理\\\\|users\"\" 2>/dev/null | head -10')",
"Bash(dir /s /b \"c:\\\\Users\\\\dong\\\\Desktop\\\\rwadurian\")",
"Bash(dir /b \"c:\\\\Users\\\\dong\\\\Desktop\\\\rwadurian\\\\backend\\\\services\")",
"Bash(ssh -J ceshi@103.39.231.231 ceshi@192.168.1.111 \"curl -s http://localhost:3021/api/v2/admin/status\")",
"Bash(del \"c:\\\\Users\\\\dong\\\\Desktop\\\\rwadurian\\\\frontend\\\\mining-app\\\\lib\\\\domain\\\\usecases\\\\trading\\\\buy_shares.dart\")",
"Bash(del \"c:\\\\Users\\\\dong\\\\Desktop\\\\rwadurian\\\\frontend\\\\mining-app\\\\lib\\\\domain\\\\usecases\\\\trading\\\\sell_shares.dart\")",
"Bash(ls -la \"c:\\\\Users\\\\dong\\\\Desktop\\\\rwadurian\\\\frontend\\\\mining-app\\\\lib\\\\presentation\\\\pages\"\" 2>/dev/null || dir /b \"c:UsersdongDesktoprwadurianfrontendmining-applibpresentationpages \")",
"Bash(cd:*)",
"Bash(ssh -o StrictHostKeyChecking=no -J ceshi@103.39.231.231 ceshi@192.168.1.111 \"curl -s http://localhost:3020/api/v1/ | head -100\")",
"Bash(ssh -o StrictHostKeyChecking=no -J ceshi@103.39.231.231 ceshi@192.168.1.111:*)",
"Bash(bc:*)",
"Bash(DATABASE_URL=\"postgresql://postgres:password@localhost:5432/mining_db?schema=public\" npx prisma migrate diff:*)",
"Bash(git status:*)",
"Bash(xargs cat:*)",
"Bash(ssh -o ProxyJump=ceshi@103.39.231.231 ceshi@192.168.1.111 \"docker ps | grep mining\")",
"Bash(dir /b \"c:\\\\Users\\\\dong\\\\Desktop\\\\rwadurian\\\\backend\\\\services\\\\trading-service\\\\src\\\\application\\\\services\")",
"Bash(DATABASE_URL=\"postgresql://postgres:password@localhost:5432/trading_db?schema=public\" npx prisma migrate dev:*)",
"Bash(dir /b \"c:\\\\Users\\\\dong\\\\Desktop\\\\rwadurian\\\\backend\\\\services\\\\mining-admin-service\\\\src\")",
"Bash(ssh -o ProxyJump=ceshi@103.39.231.231 ceshi@192.168.1.111 \"cd /home/ceshi/rwadurian/backend/service && ls -la\")",
"Bash(ssh -o ProxyJump=ceshi@103.39.231.231 ceshi@192.168.1.111 \"ls -la /home/ceshi/rwadurian/backend/\")",
"Bash(ssh -o ProxyJump=ceshi@103.39.231.231 ceshi@192.168.1.111 \"ls -la /home/ceshi/rwadurian/backend/services/\")",
"Bash(where:*)",
"Bash(npx md-to-pdf:*)",
"Bash(ssh -J ceshi@103.39.231.231 ceshi@192.168.1.111 \"curl -s ''http://localhost:3000/api/price/klines?period=1h&limit=5'' | head -500\")",
"Bash(dir /b /ad \"c:\\\\Users\\\\dong\\\\Desktop\\\\rwadurian\\\\backend\")",
"Bash(timeout 30 cat:*)",
"Bash(npm run lint)",
"Bash(ssh -o ProxyCommand=\"ssh -W %h:%p ceshi@103.39.231.231\" -o StrictHostKeyChecking=no ceshi@192.168.1.111 \"cat /home/ceshi/rwadurian/backend/services/mining-service/src/application/services/batch-mining.service.ts | head -250\")",
"Bash(ssh -o ProxyCommand=\"ssh -W %h:%p ceshi@103.39.231.231\" -o StrictHostKeyChecking=no ceshi@192.168.1.111 \"docker logs rwa-mining-admin-service --tail 50 2>&1 | grep ''第一条数据\\\\|最后一条数据''\")",
"Bash(npx xlsx-cli 挖矿.xlsx)",
"Bash(DATABASE_URL=\"postgresql://postgres:password@localhost:5432/mining_db?schema=public\" npx prisma migrate dev:*)"
], ],
"deny": [], "deny": [],
"ask": [] "ask": []

View File

@ -309,42 +309,24 @@ services:
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# Trading Service 2.0 - 交易服务 # Trading Service 2.0 - 交易服务
# 前端路径: /api/v2/trading/... -> 后端路径: /api/v2/...
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
- name: trading-service-v2 - name: trading-service-v2
url: http://192.168.1.111:3022/api/v2 url: http://192.168.1.111:3022
routes: routes:
- name: trading-v2-api - name: trading-v2-api
paths: paths:
- /api/v2/trading - /api/v2/trading
strip_path: true strip_path: false
- name: trading-v2-health - name: trading-v2-health
paths: paths:
- /api/v2/trading/health - /api/v2/trading/health
strip_path: true strip_path: false
# ---------------------------------------------------------------------------
# Trading Service WebSocket - 价格实时推送
# WebSocket 连接: wss://api.xxx.com/ws/price -> ws://192.168.1.111:3022/price
# Kong 会自动处理 HTTP -> WebSocket 升级,所以 protocols 只需要 http/https
# ---------------------------------------------------------------------------
- name: trading-ws-service
url: http://192.168.1.111:3022
routes:
- name: trading-ws-price
paths:
- /ws/price
strip_path: true
protocols:
- http
- https
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# Mining Admin Service 2.0 - 挖矿管理后台服务 # Mining Admin Service 2.0 - 挖矿管理后台服务
# 前端路径: /api/v2/mining-admin/... -> 后端路径: /api/v2/...
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
- name: mining-admin-service - name: mining-admin-service
url: http://192.168.1.111:3023/api/v2 url: http://192.168.1.111:3023/api/v1
routes: routes:
- name: mining-admin-api - name: mining-admin-api
paths: paths:
@ -374,19 +356,18 @@ services:
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# Mining Wallet Service 2.0 - 挖矿钱包服务 # Mining Wallet Service 2.0 - 挖矿钱包服务
# 前端路径: /api/v2/mining-wallet/... -> 后端路径: /api/v2/...
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
- name: mining-wallet-service - name: mining-wallet-service
url: http://192.168.1.111:3025/api/v2 url: http://192.168.1.111:3025
routes: routes:
- name: mining-wallet-api - name: mining-wallet-api
paths: paths:
- /api/v2/mining-wallet - /api/v2/mining-wallet
strip_path: true strip_path: false
- name: mining-wallet-health - name: mining-wallet-health
paths: paths:
- /api/v2/mining-wallet/health - /api/v2/mining-wallet/health
strip_path: true strip_path: false
# ============================================================================= # =============================================================================
# Plugins - 全局插件配置 # Plugins - 全局插件配置

View File

@ -39,9 +39,8 @@ android {
} }
// NDK configuration for TSS native library // NDK configuration for TSS native library
// Only include ARM ABIs for real devices (x86_64 is for emulators only)
ndk { ndk {
abiFilters += listOf("arm64-v8a", "armeabi-v7a") abiFilters += listOf("arm64-v8a", "armeabi-v7a", "x86_64")
} }
} }

View File

@ -29,9 +29,6 @@ data class ShareRecordEntity(
@ColumnInfo(name = "party_index") @ColumnInfo(name = "party_index")
val partyIndex: Int, val partyIndex: Int,
@ColumnInfo(name = "party_id")
val partyId: String, // The original partyId used during keygen - required for signing
@ColumnInfo(name = "address") @ColumnInfo(name = "address")
val address: String, val address: String,
@ -93,159 +90,15 @@ interface AppSettingDao {
suspend fun setValue(setting: AppSettingEntity) suspend fun setValue(setting: AppSettingEntity)
} }
/**
* 转账记录数据库实体
* Entity for storing transaction history records
*/
@Entity(
tableName = "transaction_records",
foreignKeys = [
ForeignKey(
entity = ShareRecordEntity::class,
parentColumns = ["id"],
childColumns = ["share_id"],
onDelete = ForeignKey.CASCADE // 删除钱包时自动删除关联的转账记录
)
],
indices = [
Index(value = ["share_id"]),
Index(value = ["tx_hash"], unique = true),
Index(value = ["from_address"]),
Index(value = ["to_address"]),
Index(value = ["created_at"])
]
)
data class TransactionRecordEntity(
@PrimaryKey(autoGenerate = true)
val id: Long = 0,
@ColumnInfo(name = "share_id")
val shareId: Long, // 关联的钱包ID
@ColumnInfo(name = "from_address")
val fromAddress: String, // 发送方地址
@ColumnInfo(name = "to_address")
val toAddress: String, // 接收方地址
@ColumnInfo(name = "amount")
val amount: String, // 转账金额(人类可读格式)
@ColumnInfo(name = "token_type")
val tokenType: String, // 代币类型KAVA, GREEN_POINTS, ENERGY_POINTS, FUTURE_POINTS
@ColumnInfo(name = "tx_hash")
val txHash: String, // 交易哈希
@ColumnInfo(name = "gas_price")
val gasPrice: String, // Gas 价格Wei
@ColumnInfo(name = "gas_used")
val gasUsed: String = "", // 实际消耗的 Gas
@ColumnInfo(name = "tx_fee")
val txFee: String = "", // 交易手续费
@ColumnInfo(name = "status")
val status: String, // 交易状态PENDING, CONFIRMED, FAILED
@ColumnInfo(name = "direction")
val direction: String, // 交易方向SENT, RECEIVED
@ColumnInfo(name = "note")
val note: String = "", // 备注
@ColumnInfo(name = "created_at")
val createdAt: Long = System.currentTimeMillis(),
@ColumnInfo(name = "confirmed_at")
val confirmedAt: Long? = null, // 确认时间
@ColumnInfo(name = "block_number")
val blockNumber: Long? = null // 区块高度
)
/**
* 转账记录 DAO
* Data Access Object for transaction records
*/
@Dao
interface TransactionRecordDao {
@Insert(onConflict = OnConflictStrategy.REPLACE)
suspend fun insertRecord(record: TransactionRecordEntity): Long
@Query("SELECT * FROM transaction_records WHERE id = :id")
suspend fun getRecordById(id: Long): TransactionRecordEntity?
@Query("SELECT * FROM transaction_records WHERE tx_hash = :txHash")
suspend fun getRecordByTxHash(txHash: String): TransactionRecordEntity?
@Query("SELECT * FROM transaction_records WHERE share_id = :shareId ORDER BY created_at DESC")
fun getRecordsForShare(shareId: Long): Flow<List<TransactionRecordEntity>>
@Query("SELECT * FROM transaction_records WHERE share_id = :shareId ORDER BY created_at DESC LIMIT :limit OFFSET :offset")
suspend fun getRecordsForSharePaged(shareId: Long, limit: Int, offset: Int): List<TransactionRecordEntity>
@Query("SELECT * FROM transaction_records WHERE share_id = :shareId AND token_type = :tokenType ORDER BY created_at DESC")
fun getRecordsForShareByToken(shareId: Long, tokenType: String): Flow<List<TransactionRecordEntity>>
@Query("SELECT * FROM transaction_records WHERE status = 'PENDING' ORDER BY created_at ASC")
suspend fun getPendingRecords(): List<TransactionRecordEntity>
@Query("UPDATE transaction_records SET status = :status, confirmed_at = :confirmedAt, block_number = :blockNumber, gas_used = :gasUsed, tx_fee = :txFee WHERE id = :id")
suspend fun updateStatus(id: Long, status: String, confirmedAt: Long?, blockNumber: Long?, gasUsed: String, txFee: String)
@Query("""
SELECT
COUNT(*) as total_count,
SUM(CASE WHEN direction = 'SENT' THEN 1 ELSE 0 END) as sent_count,
SUM(CASE WHEN direction = 'RECEIVED' THEN 1 ELSE 0 END) as received_count
FROM transaction_records
WHERE share_id = :shareId AND token_type = :tokenType
""")
suspend fun getTransactionStats(shareId: Long, tokenType: String): TransactionStats
@Query("SELECT COALESCE(SUM(CAST(amount AS REAL)), 0) FROM transaction_records WHERE share_id = :shareId AND token_type = :tokenType AND direction = 'SENT' AND status = 'CONFIRMED'")
suspend fun getTotalSentAmount(shareId: Long, tokenType: String): Double
@Query("SELECT COALESCE(SUM(CAST(amount AS REAL)), 0) FROM transaction_records WHERE share_id = :shareId AND token_type = :tokenType AND direction = 'RECEIVED' AND status = 'CONFIRMED'")
suspend fun getTotalReceivedAmount(shareId: Long, tokenType: String): Double
@Query("SELECT COALESCE(SUM(CAST(tx_fee AS REAL)), 0) FROM transaction_records WHERE share_id = :shareId AND direction = 'SENT' AND status = 'CONFIRMED'")
suspend fun getTotalTxFee(shareId: Long): Double
@Query("DELETE FROM transaction_records WHERE id = :id")
suspend fun deleteRecordById(id: Long)
@Query("DELETE FROM transaction_records WHERE share_id = :shareId")
suspend fun deleteRecordsForShare(shareId: Long)
@Query("SELECT COUNT(*) FROM transaction_records WHERE share_id = :shareId")
suspend fun getRecordCount(shareId: Long): Int
}
/**
* 转账统计数据类
*/
data class TransactionStats(
@ColumnInfo(name = "total_count")
val totalCount: Int,
@ColumnInfo(name = "sent_count")
val sentCount: Int,
@ColumnInfo(name = "received_count")
val receivedCount: Int
)
/** /**
* Room database * Room database
*/ */
@Database( @Database(
entities = [ShareRecordEntity::class, AppSettingEntity::class, TransactionRecordEntity::class], entities = [ShareRecordEntity::class, AppSettingEntity::class],
version = 4, // Version 4: added transaction_records table for transfer history version = 2,
exportSchema = false exportSchema = false
) )
abstract class TssDatabase : RoomDatabase() { abstract class TssDatabase : RoomDatabase() {
abstract fun shareRecordDao(): ShareRecordDao abstract fun shareRecordDao(): ShareRecordDao
abstract fun appSettingDao(): AppSettingDao abstract fun appSettingDao(): AppSettingDao
abstract fun transactionRecordDao(): TransactionRecordDao
} }

View File

@ -5,8 +5,6 @@ import com.durian.tssparty.data.local.AppSettingDao
import com.durian.tssparty.data.local.AppSettingEntity import com.durian.tssparty.data.local.AppSettingEntity
import com.durian.tssparty.data.local.ShareRecordDao import com.durian.tssparty.data.local.ShareRecordDao
import com.durian.tssparty.data.local.ShareRecordEntity import com.durian.tssparty.data.local.ShareRecordEntity
import com.durian.tssparty.data.local.TransactionRecordDao
import com.durian.tssparty.data.local.TransactionRecordEntity
import com.durian.tssparty.data.local.TssNativeBridge import com.durian.tssparty.data.local.TssNativeBridge
import com.durian.tssparty.data.remote.GrpcClient import com.durian.tssparty.data.remote.GrpcClient
import com.durian.tssparty.data.remote.GrpcConnectionEvent import com.durian.tssparty.data.remote.GrpcConnectionEvent
@ -33,8 +31,7 @@ class TssRepository @Inject constructor(
private val grpcClient: GrpcClient, private val grpcClient: GrpcClient,
private val tssNativeBridge: TssNativeBridge, private val tssNativeBridge: TssNativeBridge,
private val shareRecordDao: ShareRecordDao, private val shareRecordDao: ShareRecordDao,
private val appSettingDao: AppSettingDao, private val appSettingDao: AppSettingDao
private val transactionRecordDao: TransactionRecordDao
) { ) {
private val _currentSession = MutableStateFlow<TssSession?>(null) private val _currentSession = MutableStateFlow<TssSession?>(null)
val currentSession: StateFlow<TssSession?> = _currentSession.asStateFlow() val currentSession: StateFlow<TssSession?> = _currentSession.asStateFlow()
@ -51,12 +48,6 @@ class TssRepository @Inject constructor(
// partyId is loaded once from database in registerParty() and cached here // partyId is loaded once from database in registerParty() and cached here
// This matches Electron's getOrCreatePartyId() pattern // This matches Electron's getOrCreatePartyId() pattern
private lateinit var partyId: String private lateinit var partyId: String
// currentSigningPartyId: The partyId to use for the current signing session
// This may differ from partyId when signing with a restored wallet backup
// CRITICAL: For backup/restore to work, signing must use the original partyId from keygen
private var currentSigningPartyId: String? = null
private var messageCollectionJob: Job? = null private var messageCollectionJob: Job? = null
private var sessionEventJob: Job? = null private var sessionEventJob: Job? = null
@ -1060,7 +1051,6 @@ class TssRepository @Inject constructor(
val address = AddressUtils.deriveEvmAddress(publicKeyBytes) val address = AddressUtils.deriveEvmAddress(publicKeyBytes)
// Save share record (use actual thresholds and party index from backend) // Save share record (use actual thresholds and party index from backend)
// CRITICAL: Save partyId - this is required for signing after backup/restore
val shareEntity = ShareRecordEntity( val shareEntity = ShareRecordEntity(
sessionId = sessionId, sessionId = sessionId,
publicKey = result.publicKey, publicKey = result.publicKey,
@ -1068,7 +1058,6 @@ class TssRepository @Inject constructor(
thresholdT = actualThresholdT, thresholdT = actualThresholdT,
thresholdN = actualThresholdN, thresholdN = actualThresholdN,
partyIndex = actualPartyIndex, partyIndex = actualPartyIndex,
partyId = partyId,
address = address address = address
) )
val id = shareRecordDao.insertShare(shareEntity) val id = shareRecordDao.insertShare(shareEntity)
@ -1126,26 +1115,14 @@ class TssRepository @Inject constructor(
// Note: Password is verified during actual sign execution, same as Electron // Note: Password is verified during actual sign execution, same as Electron
// CRITICAL: Use the original partyId from the share (keygen time) for signing
// This is essential for backup/restore - the partyId must match what was used during keygen
// If shareEntity.partyId is empty (legacy data), fall back to current device's partyId
val signingPartyId = if (shareEntity.partyId.isNotEmpty()) {
shareEntity.partyId
} else {
android.util.Log.w("TssRepository", "Share has no partyId (legacy data), using current device partyId")
partyId
}
currentSigningPartyId = signingPartyId // Save for later use in this flow
android.util.Log.d("TssRepository", "Using signingPartyId=$signingPartyId (current device partyId=$partyId)")
// CRITICAL: Set pendingSessionId BEFORE joinSession to avoid race condition // CRITICAL: Set pendingSessionId BEFORE joinSession to avoid race condition
// This ensures session_started events can be matched even if they arrive // This ensures session_started events can be matched even if they arrive
// before _currentSession is set // before _currentSession is set
pendingSessionId = sessionId pendingSessionId = sessionId
android.util.Log.d("TssRepository", "Set pendingSessionId=$sessionId for event matching (sign joiner)") android.util.Log.d("TssRepository", "Set pendingSessionId=$sessionId for event matching (sign joiner)")
// Join session via gRPC using the original partyId from keygen (CRITICAL for backup/restore) // Join session via gRPC (matching Electron's grpcClient.joinSession)
val joinResult = grpcClient.joinSession(sessionId, signingPartyId, joinToken) val joinResult = grpcClient.joinSession(sessionId, partyId, joinToken)
if (joinResult.isFailure) { if (joinResult.isFailure) {
android.util.Log.e("TssRepository", "gRPC sign join failed", joinResult.exceptionOrNull()) android.util.Log.e("TssRepository", "gRPC sign join failed", joinResult.exceptionOrNull())
return@withContext Result.failure(joinResult.exceptionOrNull()!!) return@withContext Result.failure(joinResult.exceptionOrNull()!!)
@ -1160,13 +1137,12 @@ class TssRepository @Inject constructor(
// Build participants list (matching Electron's logic) // Build participants list (matching Electron's logic)
// Prefer using parties from validateInviteCode (complete list) // Prefer using parties from validateInviteCode (complete list)
// CRITICAL: Use signingPartyId (original partyId from keygen) for participant identification
val participants = if (parties.isNotEmpty()) { val participants = if (parties.isNotEmpty()) {
parties.toMutableList() parties.toMutableList()
} else { } else {
// Fallback: use other_parties + self // Fallback: use other_parties + self
val list = sessionData.participants.toMutableList() val list = sessionData.participants.toMutableList()
list.add(Participant(signingPartyId, myPartyIndex, "")) list.add(Participant(partyId, myPartyIndex, ""))
list.sortBy { it.partyIndex } list.sortBy { it.partyIndex }
list list
} }
@ -1246,14 +1222,10 @@ class TssRepository @Inject constructor(
} else { } else {
messageHash messageHash
} }
// CRITICAL: Use shareEntity.partyId (original partyId from keygen) for signing android.util.Log.d("TssRepository", "Starting TSS sign with cleanMessageHash=${cleanMessageHash.take(20)}...")
// This is required for backup/restore to work - the partyId must match what was used during keygen
val signingPartyId = shareEntity.partyId
currentSigningPartyId = signingPartyId // Save for later use in this flow
android.util.Log.d("TssRepository", "Starting TSS sign with cleanMessageHash=${cleanMessageHash.take(20)}..., signingPartyId=$signingPartyId")
val startResult = tssNativeBridge.startSign( val startResult = tssNativeBridge.startSign(
sessionId = sessionId, sessionId = sessionId,
partyId = signingPartyId, partyId = partyId,
partyIndex = partyIndex, partyIndex = partyIndex,
thresholdT = thresholdT, thresholdT = thresholdT,
thresholdN = shareEntity.thresholdN, // Use original N from keygen thresholdN = shareEntity.thresholdN, // Use original N from keygen
@ -1271,8 +1243,8 @@ class TssRepository @Inject constructor(
// Start collecting progress from native bridge // Start collecting progress from native bridge
startProgressCollection() startProgressCollection()
// Mark ready - use signingPartyId (original partyId from keygen) // Mark ready
grpcClient.markPartyReady(sessionId, signingPartyId) grpcClient.markPartyReady(sessionId, partyId)
// Wait for sign result // Wait for sign result
val signResult = tssNativeBridge.waitForSignResult() val signResult = tssNativeBridge.waitForSignResult()
@ -1284,15 +1256,14 @@ class TssRepository @Inject constructor(
val result = signResult.getOrThrow() val result = signResult.getOrThrow()
// Report completion - use signingPartyId (original partyId from keygen) // Report completion
val signatureBytes = android.util.Base64.decode(result.signature, android.util.Base64.NO_WRAP) val signatureBytes = android.util.Base64.decode(result.signature, android.util.Base64.NO_WRAP)
grpcClient.reportCompletion(sessionId, signingPartyId, signature = signatureBytes) grpcClient.reportCompletion(sessionId, partyId, signature = signatureBytes)
stopProgressCollection() stopProgressCollection()
_sessionStatus.value = SessionStatus.COMPLETED _sessionStatus.value = SessionStatus.COMPLETED
pendingSessionId = null // Clear pending session ID on completion pendingSessionId = null // Clear pending session ID on completion
messageCollectionJob?.cancel() messageCollectionJob?.cancel()
currentSigningPartyId = null // Clear after signing completes
android.util.Log.d("TssRepository", "Sign as joiner completed: signature=${result.signature.take(20)}...") android.util.Log.d("TssRepository", "Sign as joiner completed: signature=${result.signature.take(20)}...")
@ -1303,7 +1274,6 @@ class TssRepository @Inject constructor(
stopProgressCollection() stopProgressCollection()
_sessionStatus.value = SessionStatus.FAILED _sessionStatus.value = SessionStatus.FAILED
pendingSessionId = null // Clear pending session ID on failure pendingSessionId = null // Clear pending session ID on failure
currentSigningPartyId = null // Clear on failure too
Result.failure(e) Result.failure(e)
} }
} }
@ -1396,7 +1366,6 @@ class TssRepository @Inject constructor(
val address = AddressUtils.deriveEvmAddress(publicKeyBytes) val address = AddressUtils.deriveEvmAddress(publicKeyBytes)
// Save share record // Save share record
// CRITICAL: Save partyId - this is required for signing after backup/restore
val shareEntity = ShareRecordEntity( val shareEntity = ShareRecordEntity(
sessionId = apiJoinData.sessionId, sessionId = apiJoinData.sessionId,
publicKey = result.publicKey, publicKey = result.publicKey,
@ -1404,7 +1373,6 @@ class TssRepository @Inject constructor(
thresholdT = apiJoinData.thresholdT, thresholdT = apiJoinData.thresholdT,
thresholdN = apiJoinData.thresholdN, thresholdN = apiJoinData.thresholdN,
partyIndex = myPartyIndex, partyIndex = myPartyIndex,
partyId = partyId,
address = address address = address
) )
val id = shareRecordDao.insertShare(shareEntity) val id = shareRecordDao.insertShare(shareEntity)
@ -1548,15 +1516,12 @@ class TssRepository @Inject constructor(
_sessionStatus.value = SessionStatus.WAITING _sessionStatus.value = SessionStatus.WAITING
// Add self to participants // Add self to participants
// CRITICAL: Use shareEntity.partyId (original partyId from keygen) for signing val allParticipants = sessionData.participants + Participant(partyId, myPartyIndex)
val signingPartyId = shareEntity.partyId
currentSigningPartyId = signingPartyId // Save for later use in this flow
val allParticipants = sessionData.participants + Participant(signingPartyId, myPartyIndex)
// Start TSS sign // Start TSS sign
val startResult = tssNativeBridge.startSign( val startResult = tssNativeBridge.startSign(
sessionId = apiJoinData.sessionId, sessionId = apiJoinData.sessionId,
partyId = signingPartyId, partyId = partyId,
partyIndex = myPartyIndex, partyIndex = myPartyIndex,
thresholdT = apiJoinData.thresholdT, thresholdT = apiJoinData.thresholdT,
thresholdN = shareEntity.thresholdN, // Use original N from keygen thresholdN = shareEntity.thresholdN, // Use original N from keygen
@ -1575,8 +1540,8 @@ class TssRepository @Inject constructor(
// Start message routing // Start message routing
startMessageRouting(apiJoinData.sessionId, myPartyIndex) startMessageRouting(apiJoinData.sessionId, myPartyIndex)
// Mark ready - use signingPartyId (original partyId from keygen) // Mark ready
grpcClient.markPartyReady(apiJoinData.sessionId, signingPartyId) grpcClient.markPartyReady(apiJoinData.sessionId, partyId)
// Wait for sign result // Wait for sign result
val signResult = tssNativeBridge.waitForSignResult() val signResult = tssNativeBridge.waitForSignResult()
@ -1587,20 +1552,18 @@ class TssRepository @Inject constructor(
val result = signResult.getOrThrow() val result = signResult.getOrThrow()
// Report completion - use signingPartyId (original partyId from keygen) // Report completion
val signatureBytes = Base64.decode(result.signature, Base64.NO_WRAP) val signatureBytes = Base64.decode(result.signature, Base64.NO_WRAP)
grpcClient.reportCompletion(apiJoinData.sessionId, signingPartyId, signature = signatureBytes) grpcClient.reportCompletion(apiJoinData.sessionId, partyId, signature = signatureBytes)
_sessionStatus.value = SessionStatus.COMPLETED _sessionStatus.value = SessionStatus.COMPLETED
messageCollectionJob?.cancel() messageCollectionJob?.cancel()
currentSigningPartyId = null // Clear after signing completes
Result.success(result) Result.success(result)
} catch (e: Exception) { } catch (e: Exception) {
android.util.Log.e("TssRepository", "Join sign session failed", e) android.util.Log.e("TssRepository", "Join sign session failed", e)
_sessionStatus.value = SessionStatus.FAILED _sessionStatus.value = SessionStatus.FAILED
currentSigningPartyId = null // Clear on failure too
Result.failure(e) Result.failure(e)
} }
} }
@ -1822,7 +1785,6 @@ class TssRepository @Inject constructor(
val address = AddressUtils.deriveEvmAddress(publicKeyBytes) val address = AddressUtils.deriveEvmAddress(publicKeyBytes)
// Save share record (use actual thresholds from backend) // Save share record (use actual thresholds from backend)
// CRITICAL: Save partyId - this is required for signing after backup/restore
val shareEntity = ShareRecordEntity( val shareEntity = ShareRecordEntity(
sessionId = sessionId, sessionId = sessionId,
publicKey = result.publicKey, publicKey = result.publicKey,
@ -1830,7 +1792,6 @@ class TssRepository @Inject constructor(
thresholdT = actualThresholdT, thresholdT = actualThresholdT,
thresholdN = actualThresholdN, thresholdN = actualThresholdN,
partyIndex = myPartyIndex, partyIndex = myPartyIndex,
partyId = partyId,
address = address address = address
) )
val id = shareRecordDao.insertShare(shareEntity) val id = shareRecordDao.insertShare(shareEntity)
@ -1939,7 +1900,6 @@ class TssRepository @Inject constructor(
} }
// Convert to entity and save // Convert to entity and save
// CRITICAL: Preserve the original partyId from backup - this is required for signing
val shareRecord = backup.toShareRecord() val shareRecord = backup.toShareRecord()
val entity = ShareRecordEntity( val entity = ShareRecordEntity(
sessionId = shareRecord.sessionId, sessionId = shareRecord.sessionId,
@ -1948,7 +1908,6 @@ class TssRepository @Inject constructor(
thresholdT = shareRecord.thresholdT, thresholdT = shareRecord.thresholdT,
thresholdN = shareRecord.thresholdN, thresholdN = shareRecord.thresholdN,
partyIndex = shareRecord.partyIndex, partyIndex = shareRecord.partyIndex,
partyId = shareRecord.partyId,
address = shareRecord.address, address = shareRecord.address,
createdAt = shareRecord.createdAt createdAt = shareRecord.createdAt
) )
@ -1956,7 +1915,7 @@ class TssRepository @Inject constructor(
val newId = shareRecordDao.insertShare(entity) val newId = shareRecordDao.insertShare(entity)
val savedShare = shareRecord.copy(id = newId) val savedShare = shareRecord.copy(id = newId)
android.util.Log.d("TssRepository", "Imported share backup for address: ${backup.address}, partyId: ${backup.partyId}") android.util.Log.d("TssRepository", "Imported share backup for address: ${backup.address}")
Result.success(savedShare) Result.success(savedShare)
} catch (e: com.google.gson.JsonSyntaxException) { } catch (e: com.google.gson.JsonSyntaxException) {
android.util.Log.e("TssRepository", "Invalid JSON format in backup", e) android.util.Log.e("TssRepository", "Invalid JSON format in backup", e)
@ -2012,19 +1971,10 @@ class TssRepository @Inject constructor(
} }
/** /**
* Get ERC-20 token balance for an address * Get Green Points (绿积分/dUSDT) token balance for an address
* Uses eth_call to call balanceOf(address) on the ERC-20 contract * Uses eth_call to call balanceOf(address) on the ERC-20 contract
* @param address The wallet address
* @param rpcUrl The RPC endpoint URL
* @param contractAddress The ERC-20 token contract address
* @param decimals The token decimals (default 6 for USDT-like tokens)
*/ */
suspend fun getERC20Balance( suspend fun getGreenPointsBalance(address: String, rpcUrl: String): Result<String> {
address: String,
rpcUrl: String,
contractAddress: String,
decimals: Int = 6
): Result<String> {
return withContext(Dispatchers.IO) { return withContext(Dispatchers.IO) {
try { try {
val client = okhttp3.OkHttpClient() val client = okhttp3.OkHttpClient()
@ -2034,14 +1984,14 @@ class TssRepository @Inject constructor(
// Function selector: 0x70a08231 // Function selector: 0x70a08231
// Address parameter: padded to 32 bytes // Address parameter: padded to 32 bytes
val paddedAddress = address.removePrefix("0x").lowercase().padStart(64, '0') val paddedAddress = address.removePrefix("0x").lowercase().padStart(64, '0')
val callData = "${ERC20Selectors.BALANCE_OF}$paddedAddress" val callData = "${GreenPointsToken.BALANCE_OF_SELECTOR}$paddedAddress"
val requestBody = """ val requestBody = """
{ {
"jsonrpc": "2.0", "jsonrpc": "2.0",
"method": "eth_call", "method": "eth_call",
"params": [{ "params": [{
"to": "$contractAddress", "to": "${GreenPointsToken.CONTRACT_ADDRESS}",
"data": "$callData" "data": "$callData"
}, "latest"], }, "latest"],
"id": 1 "id": 1
@ -2063,88 +2013,42 @@ class TssRepository @Inject constructor(
} }
val hexBalance = json.get("result").asString val hexBalance = json.get("result").asString
// Convert hex to decimal, then apply decimals // Convert hex to decimal, then apply 6 decimals (dUSDT uses 6 decimals like USDT)
val rawBalance = java.math.BigInteger(hexBalance.removePrefix("0x"), 16) val rawBalance = java.math.BigInteger(hexBalance.removePrefix("0x"), 16)
val divisor = java.math.BigDecimal.TEN.pow(decimals)
val tokenBalance = java.math.BigDecimal(rawBalance).divide( val tokenBalance = java.math.BigDecimal(rawBalance).divide(
divisor, java.math.BigDecimal("1000000"), // 10^6 for 6 decimals
decimals, 6,
java.math.RoundingMode.DOWN java.math.RoundingMode.DOWN
) )
Result.success(tokenBalance.toPlainString()) Result.success(tokenBalance.toPlainString())
} catch (e: Exception) { } catch (e: Exception) {
android.util.Log.e("TssRepository", "Failed to get ERC20 balance for $contractAddress: ${e.message}") android.util.Log.e("TssRepository", "Failed to get Green Points balance: ${e.message}")
Result.failure(e) Result.failure(e)
} }
} }
} }
/** /**
* Get Green Points (绿积分/dUSDT) token balance for an address * Get both KAVA and Green Points balances for an address
* Uses eth_call to call balanceOf(address) on the ERC-20 contract
*/
suspend fun getGreenPointsBalance(address: String, rpcUrl: String): Result<String> {
return getERC20Balance(
address = address,
rpcUrl = rpcUrl,
contractAddress = GreenPointsToken.CONTRACT_ADDRESS,
decimals = GreenPointsToken.DECIMALS
)
}
/**
* Get Energy Points (积分股/eUSDT) token balance for an address
*/
suspend fun getEnergyPointsBalance(address: String, rpcUrl: String): Result<String> {
return getERC20Balance(
address = address,
rpcUrl = rpcUrl,
contractAddress = EnergyPointsToken.CONTRACT_ADDRESS,
decimals = EnergyPointsToken.DECIMALS
)
}
/**
* Get Future Points (积分值/fUSDT) token balance for an address
*/
suspend fun getFuturePointsBalance(address: String, rpcUrl: String): Result<String> {
return getERC20Balance(
address = address,
rpcUrl = rpcUrl,
contractAddress = FuturePointsToken.CONTRACT_ADDRESS,
decimals = FuturePointsToken.DECIMALS
)
}
/**
* Get all token balances for an address (KAVA + all ERC-20 tokens)
*/ */
suspend fun getWalletBalance(address: String, rpcUrl: String): Result<WalletBalance> { suspend fun getWalletBalance(address: String, rpcUrl: String): Result<WalletBalance> {
return withContext(Dispatchers.IO) { return withContext(Dispatchers.IO) {
try { try {
// Fetch all balances in parallel // Fetch both balances in parallel
val kavaDeferred = async { getBalance(address, rpcUrl) } val kavaDeferred = async { getBalance(address, rpcUrl) }
val greenPointsDeferred = async { getGreenPointsBalance(address, rpcUrl) } val greenPointsDeferred = async { getGreenPointsBalance(address, rpcUrl) }
val energyPointsDeferred = async { getEnergyPointsBalance(address, rpcUrl) }
val futurePointsDeferred = async { getFuturePointsBalance(address, rpcUrl) }
val kavaResult = kavaDeferred.await() val kavaResult = kavaDeferred.await()
val greenPointsResult = greenPointsDeferred.await() val greenPointsResult = greenPointsDeferred.await()
val energyPointsResult = energyPointsDeferred.await()
val futurePointsResult = futurePointsDeferred.await()
val kavaBalance = kavaResult.getOrDefault("0") val kavaBalance = kavaResult.getOrDefault("0")
val greenPointsBalance = greenPointsResult.getOrDefault("0") val greenPointsBalance = greenPointsResult.getOrDefault("0")
val energyPointsBalance = energyPointsResult.getOrDefault("0")
val futurePointsBalance = futurePointsResult.getOrDefault("0")
Result.success(WalletBalance( Result.success(WalletBalance(
address = address, address = address,
kavaBalance = kavaBalance, kavaBalance = kavaBalance,
greenPointsBalance = greenPointsBalance, greenPointsBalance = greenPointsBalance
energyPointsBalance = energyPointsBalance,
futurePointsBalance = futurePointsBalance
)) ))
} catch (e: Exception) { } catch (e: Exception) {
Result.failure(e) Result.failure(e)
@ -2408,12 +2312,8 @@ class TssRepository @Inject constructor(
val shareEntity = shareRecordDao.getShareById(shareId) val shareEntity = shareRecordDao.getShareById(shareId)
?: return@withContext Result.failure(Exception("Share not found")) ?: return@withContext Result.failure(Exception("Share not found"))
// CRITICAL: Use shareEntity.partyId (original partyId from keygen) for signing
// This is required for backup/restore to work - the partyId must match what was used during keygen
val signingPartyId = shareEntity.partyId
currentSigningPartyId = signingPartyId // Save for waitForSignature
android.util.Log.d("TssRepository", "[CO-SIGN] startSigning: participants=${session.participants.size}") android.util.Log.d("TssRepository", "[CO-SIGN] startSigning: participants=${session.participants.size}")
android.util.Log.d("TssRepository", "[CO-SIGN] startSigning: sessionId=$sessionId, signingPartyId=$signingPartyId, partyIndex=${shareEntity.partyIndex}") android.util.Log.d("TssRepository", "[CO-SIGN] startSigning: sessionId=$sessionId, partyId=$partyId, partyIndex=${shareEntity.partyIndex}")
android.util.Log.d("TssRepository", "[CO-SIGN] startSigning: thresholdT=${session.thresholdT}, thresholdN=${shareEntity.thresholdN}") android.util.Log.d("TssRepository", "[CO-SIGN] startSigning: thresholdT=${session.thresholdT}, thresholdN=${shareEntity.thresholdN}")
android.util.Log.d("TssRepository", "[CO-SIGN] startSigning: messageHash=${session.messageHash?.take(20)}...") android.util.Log.d("TssRepository", "[CO-SIGN] startSigning: messageHash=${session.messageHash?.take(20)}...")
session.participants.forEachIndexed { idx, p -> session.participants.forEachIndexed { idx, p ->
@ -2428,10 +2328,10 @@ class TssRepository @Inject constructor(
} else { } else {
rawMessageHash rawMessageHash
} }
android.util.Log.d("TssRepository", "[CO-SIGN] Calling tssNativeBridge.startSign with cleanMessageHash=${cleanMessageHash.take(20)}..., signingPartyId=$signingPartyId") android.util.Log.d("TssRepository", "[CO-SIGN] Calling tssNativeBridge.startSign with cleanMessageHash=${cleanMessageHash.take(20)}...")
val startResult = tssNativeBridge.startSign( val startResult = tssNativeBridge.startSign(
sessionId = sessionId, sessionId = sessionId,
partyId = signingPartyId, partyId = partyId,
partyIndex = shareEntity.partyIndex, partyIndex = shareEntity.partyIndex,
thresholdT = session.thresholdT, thresholdT = session.thresholdT,
thresholdN = shareEntity.thresholdN, thresholdN = shareEntity.thresholdN,
@ -2459,8 +2359,8 @@ class TssRepository @Inject constructor(
startMessageRouting(sessionId, shareEntity.partyIndex) startMessageRouting(sessionId, shareEntity.partyIndex)
} }
// Mark ready - use signingPartyId (original partyId from keygen) // Mark ready
grpcClient.markPartyReady(sessionId, signingPartyId) grpcClient.markPartyReady(sessionId, partyId)
Result.success(Unit) Result.success(Unit)
} catch (e: Exception) { } catch (e: Exception) {
@ -2486,18 +2386,16 @@ class TssRepository @Inject constructor(
val result = signResult.getOrThrow() val result = signResult.getOrThrow()
// Report completion - use currentSigningPartyId (original partyId from keygen) // Report completion
val signatureBytes = Base64.decode(result.signature, Base64.NO_WRAP) val signatureBytes = Base64.decode(result.signature, Base64.NO_WRAP)
val session = _currentSession.value val session = _currentSession.value
val signingPartyId = currentSigningPartyId ?: partyId
if (session != null) { if (session != null) {
grpcClient.reportCompletion(session.sessionId, signingPartyId, signature = signatureBytes) grpcClient.reportCompletion(session.sessionId, partyId, signature = signatureBytes)
} }
stopProgressCollection() stopProgressCollection()
_sessionStatus.value = SessionStatus.COMPLETED _sessionStatus.value = SessionStatus.COMPLETED
messageCollectionJob?.cancel() messageCollectionJob?.cancel()
currentSigningPartyId = null // Clear after signing completes
Result.success(result) Result.success(result)
} catch (e: Exception) { } catch (e: Exception) {
@ -2861,7 +2759,6 @@ private fun ShareRecordEntity.toShareRecord() = ShareRecord(
thresholdT = thresholdT, thresholdT = thresholdT,
thresholdN = thresholdN, thresholdN = thresholdN,
partyIndex = partyIndex, partyIndex = partyIndex,
partyId = partyId,
address = address, address = address,
createdAt = createdAt createdAt = createdAt
) )

View File

@ -6,7 +6,6 @@ import androidx.room.migration.Migration
import androidx.sqlite.db.SupportSQLiteDatabase import androidx.sqlite.db.SupportSQLiteDatabase
import com.durian.tssparty.data.local.AppSettingDao import com.durian.tssparty.data.local.AppSettingDao
import com.durian.tssparty.data.local.ShareRecordDao import com.durian.tssparty.data.local.ShareRecordDao
import com.durian.tssparty.data.local.TransactionRecordDao
import com.durian.tssparty.data.local.TssDatabase import com.durian.tssparty.data.local.TssDatabase
import com.durian.tssparty.data.local.TssNativeBridge import com.durian.tssparty.data.local.TssNativeBridge
import com.durian.tssparty.data.remote.GrpcClient import com.durian.tssparty.data.remote.GrpcClient
@ -35,53 +34,6 @@ object AppModule {
} }
} }
// Migration from version 2 to 3: add party_id column to share_records
// This is critical for backup/restore - the partyId must be preserved for signing to work
private val MIGRATION_2_3 = object : Migration(2, 3) {
override fun migrate(database: SupportSQLiteDatabase) {
// Add party_id column with empty default (existing records will need to be re-exported)
database.execSQL(
"ALTER TABLE `share_records` ADD COLUMN `party_id` TEXT NOT NULL DEFAULT ''"
)
}
}
// Migration from version 3 to 4: add transaction_records table for transfer history
// 添加转账记录表,用于存储交易历史和分类账
private val MIGRATION_3_4 = object : Migration(3, 4) {
override fun migrate(database: SupportSQLiteDatabase) {
// 创建转账记录表
database.execSQL("""
CREATE TABLE IF NOT EXISTS `transaction_records` (
`id` INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
`share_id` INTEGER NOT NULL,
`from_address` TEXT NOT NULL,
`to_address` TEXT NOT NULL,
`amount` TEXT NOT NULL,
`token_type` TEXT NOT NULL,
`tx_hash` TEXT NOT NULL,
`gas_price` TEXT NOT NULL,
`gas_used` TEXT NOT NULL DEFAULT '',
`tx_fee` TEXT NOT NULL DEFAULT '',
`status` TEXT NOT NULL,
`direction` TEXT NOT NULL,
`note` TEXT NOT NULL DEFAULT '',
`created_at` INTEGER NOT NULL,
`confirmed_at` INTEGER,
`block_number` INTEGER,
FOREIGN KEY(`share_id`) REFERENCES `share_records`(`id`) ON DELETE CASCADE
)
""".trimIndent())
// 创建索引以优化查询性能
database.execSQL("CREATE INDEX IF NOT EXISTS `index_transaction_records_share_id` ON `transaction_records` (`share_id`)")
database.execSQL("CREATE UNIQUE INDEX IF NOT EXISTS `index_transaction_records_tx_hash` ON `transaction_records` (`tx_hash`)")
database.execSQL("CREATE INDEX IF NOT EXISTS `index_transaction_records_from_address` ON `transaction_records` (`from_address`)")
database.execSQL("CREATE INDEX IF NOT EXISTS `index_transaction_records_to_address` ON `transaction_records` (`to_address`)")
database.execSQL("CREATE INDEX IF NOT EXISTS `index_transaction_records_created_at` ON `transaction_records` (`created_at`)")
}
}
@Provides @Provides
@Singleton @Singleton
fun provideGson(): Gson { fun provideGson(): Gson {
@ -96,7 +48,7 @@ object AppModule {
TssDatabase::class.java, TssDatabase::class.java,
"tss_party.db" "tss_party.db"
) )
.addMigrations(MIGRATION_1_2, MIGRATION_2_3, MIGRATION_3_4) .addMigrations(MIGRATION_1_2)
.build() .build()
} }
@ -112,12 +64,6 @@ object AppModule {
return database.appSettingDao() return database.appSettingDao()
} }
@Provides
@Singleton
fun provideTransactionRecordDao(database: TssDatabase): TransactionRecordDao {
return database.transactionRecordDao()
}
@Provides @Provides
@Singleton @Singleton
fun provideGrpcClient(): GrpcClient { fun provideGrpcClient(): GrpcClient {
@ -136,9 +82,8 @@ object AppModule {
grpcClient: GrpcClient, grpcClient: GrpcClient,
tssNativeBridge: TssNativeBridge, tssNativeBridge: TssNativeBridge,
shareRecordDao: ShareRecordDao, shareRecordDao: ShareRecordDao,
appSettingDao: AppSettingDao, appSettingDao: AppSettingDao
transactionRecordDao: TransactionRecordDao
): TssRepository { ): TssRepository {
return TssRepository(grpcClient, tssNativeBridge, shareRecordDao, appSettingDao, transactionRecordDao) return TssRepository(grpcClient, tssNativeBridge, shareRecordDao, appSettingDao)
} }
} }

View File

@ -86,7 +86,6 @@ data class ShareRecord(
val thresholdT: Int, val thresholdT: Int,
val thresholdN: Int, val thresholdN: Int,
val partyIndex: Int, val partyIndex: Int,
val partyId: String, // The original partyId used during keygen - required for signing
val address: String, val address: String,
val createdAt: Long = System.currentTimeMillis() val createdAt: Long = System.currentTimeMillis()
) )
@ -130,21 +129,7 @@ enum class NetworkType {
*/ */
enum class TokenType { enum class TokenType {
KAVA, // Native KAVA token KAVA, // Native KAVA token
GREEN_POINTS, // 绿积分 (dUSDT) ERC-20 token GREEN_POINTS // 绿积分 (dUSDT) ERC-20 token
ENERGY_POINTS, // 积分股 (eUSDT) ERC-20 token
FUTURE_POINTS // 积分值 (fUSDT) ERC-20 token
}
/**
* ERC-20 通用函数签名keccak256 哈希的前4字节
* Common ERC-20 function selectors
*/
object ERC20Selectors {
const val BALANCE_OF = "0x70a08231" // balanceOf(address)
const val TRANSFER = "0xa9059cbb" // transfer(address,uint256)
const val APPROVE = "0x095ea7b3" // approve(address,uint256)
const val ALLOWANCE = "0xdd62ed3e" // allowance(address,address)
const val TOTAL_SUPPLY = "0x18160ddd" // totalSupply()
} }
/** /**
@ -157,122 +142,22 @@ object GreenPointsToken {
const val SYMBOL = "dUSDT" const val SYMBOL = "dUSDT"
const val DECIMALS = 6 const val DECIMALS = 6
// ERC-20 function signatures (kept for backward compatibility) // ERC-20 function signatures (first 4 bytes of keccak256 hash)
const val BALANCE_OF_SELECTOR = ERC20Selectors.BALANCE_OF const val BALANCE_OF_SELECTOR = "0x70a08231" // balanceOf(address)
const val TRANSFER_SELECTOR = ERC20Selectors.TRANSFER const val TRANSFER_SELECTOR = "0xa9059cbb" // transfer(address,uint256)
const val APPROVE_SELECTOR = ERC20Selectors.APPROVE const val APPROVE_SELECTOR = "0x095ea7b3" // approve(address,uint256)
const val ALLOWANCE_SELECTOR = ERC20Selectors.ALLOWANCE const val ALLOWANCE_SELECTOR = "0xdd62ed3e" // allowance(address,address)
const val TOTAL_SUPPLY_SELECTOR = ERC20Selectors.TOTAL_SUPPLY const val TOTAL_SUPPLY_SELECTOR = "0x18160ddd" // totalSupply()
} }
/** /**
* Energy Points (积分股) Token Contract Configuration * Wallet balance containing both native and token balances
* eUSDT - ERC-20 token on Kava EVM
* 总供应量100.02亿 (10,002,000,000)
*/
object EnergyPointsToken {
const val CONTRACT_ADDRESS = "0x7C3275D808eFbAE90C06C7E3A9AfDdcAa8563931"
const val NAME = "积分股"
const val SYMBOL = "eUSDT"
const val DECIMALS = 6 // 与 dUSDT 相同的精度
}
/**
* Future Points (积分值) Token Contract Configuration
* fUSDT - ERC-20 token on Kava EVM
* 总供应量1万亿 (1,000,000,000,000)
*/
object FuturePointsToken {
const val CONTRACT_ADDRESS = "0x14dc4f7d3E4197438d058C3D156dd9826A161134"
const val NAME = "积分值"
const val SYMBOL = "fUSDT"
const val DECIMALS = 6 // 与 dUSDT 相同的精度
}
/**
* 代币配置工具类
* Token configuration utility
*/
object TokenConfig {
/**
* 获取代币合约地址
*/
fun getContractAddress(tokenType: TokenType): String? {
return when (tokenType) {
TokenType.KAVA -> null // 原生代币无合约地址
TokenType.GREEN_POINTS -> GreenPointsToken.CONTRACT_ADDRESS
TokenType.ENERGY_POINTS -> EnergyPointsToken.CONTRACT_ADDRESS
TokenType.FUTURE_POINTS -> FuturePointsToken.CONTRACT_ADDRESS
}
}
/**
* 获取代币精度
*/
fun getDecimals(tokenType: TokenType): Int {
return when (tokenType) {
TokenType.KAVA -> 18 // KAVA 原生代币精度
TokenType.GREEN_POINTS -> GreenPointsToken.DECIMALS
TokenType.ENERGY_POINTS -> EnergyPointsToken.DECIMALS
TokenType.FUTURE_POINTS -> FuturePointsToken.DECIMALS
}
}
/**
* 获取代币名称
*/
fun getName(tokenType: TokenType): String {
return when (tokenType) {
TokenType.KAVA -> "KAVA"
TokenType.GREEN_POINTS -> GreenPointsToken.NAME
TokenType.ENERGY_POINTS -> EnergyPointsToken.NAME
TokenType.FUTURE_POINTS -> FuturePointsToken.NAME
}
}
/**
* 获取代币符号
*/
fun getSymbol(tokenType: TokenType): String {
return when (tokenType) {
TokenType.KAVA -> "KAVA"
TokenType.GREEN_POINTS -> GreenPointsToken.SYMBOL
TokenType.ENERGY_POINTS -> EnergyPointsToken.SYMBOL
TokenType.FUTURE_POINTS -> FuturePointsToken.SYMBOL
}
}
/**
* 判断是否为 ERC-20 代币
*/
fun isERC20(tokenType: TokenType): Boolean {
return tokenType != TokenType.KAVA
}
}
/**
* Wallet balance containing native and all token balances
* 钱包余额包含原生代币和所有 ERC-20 代币余额
*/ */
data class WalletBalance( data class WalletBalance(
val address: String, val address: String,
val kavaBalance: String = "0", // Native KAVA balance val kavaBalance: String = "0", // Native KAVA balance
val greenPointsBalance: String = "0", // 绿积分 (dUSDT) balance val greenPointsBalance: String = "0" // 绿积分 (dUSDT) balance
val energyPointsBalance: String = "0", // 积分股 (eUSDT) balance )
val futurePointsBalance: String = "0" // 积分值 (fUSDT) balance
) {
/**
* 根据代币类型获取余额
*/
fun getBalance(tokenType: TokenType): String {
return when (tokenType) {
TokenType.KAVA -> kavaBalance
TokenType.GREEN_POINTS -> greenPointsBalance
TokenType.ENERGY_POINTS -> energyPointsBalance
TokenType.FUTURE_POINTS -> futurePointsBalance
}
}
}
/** /**
* Share backup data for export/import * Share backup data for export/import
@ -280,7 +165,7 @@ data class WalletBalance(
*/ */
data class ShareBackup( data class ShareBackup(
@SerializedName("version") @SerializedName("version")
val version: Int = 2, // Version 2: added partyId field for proper backup/restore val version: Int = 1, // Backup format version for future compatibility
@SerializedName("sessionId") @SerializedName("sessionId")
val sessionId: String, val sessionId: String,
@ -300,9 +185,6 @@ data class ShareBackup(
@SerializedName("partyIndex") @SerializedName("partyIndex")
val partyIndex: Int, val partyIndex: Int,
@SerializedName("partyId")
val partyId: String, // The original partyId used during keygen - CRITICAL for signing after restore
@SerializedName("address") @SerializedName("address")
val address: String, val address: String,
@ -327,7 +209,6 @@ data class ShareBackup(
thresholdT = share.thresholdT, thresholdT = share.thresholdT,
thresholdN = share.thresholdN, thresholdN = share.thresholdN,
partyIndex = share.partyIndex, partyIndex = share.partyIndex,
partyId = share.partyId,
address = share.address, address = share.address,
createdAt = share.createdAt createdAt = share.createdAt
) )
@ -346,7 +227,6 @@ data class ShareBackup(
thresholdT = thresholdT, thresholdT = thresholdT,
thresholdN = thresholdN, thresholdN = thresholdN,
partyIndex = partyIndex, partyIndex = partyIndex,
partyId = partyId,
address = address, address = address,
createdAt = createdAt createdAt = createdAt
) )

View File

@ -27,13 +27,10 @@ import android.graphics.Bitmap
import androidx.compose.foundation.Image import androidx.compose.foundation.Image
import androidx.compose.foundation.background import androidx.compose.foundation.background
import androidx.compose.ui.graphics.asImageBitmap import androidx.compose.ui.graphics.asImageBitmap
import com.durian.tssparty.domain.model.EnergyPointsToken
import com.durian.tssparty.domain.model.FuturePointsToken
import com.durian.tssparty.domain.model.GreenPointsToken import com.durian.tssparty.domain.model.GreenPointsToken
import com.durian.tssparty.domain.model.NetworkType import com.durian.tssparty.domain.model.NetworkType
import com.durian.tssparty.domain.model.SessionStatus import com.durian.tssparty.domain.model.SessionStatus
import com.durian.tssparty.domain.model.ShareRecord import com.durian.tssparty.domain.model.ShareRecord
import com.durian.tssparty.domain.model.TokenConfig
import com.durian.tssparty.domain.model.TokenType import com.durian.tssparty.domain.model.TokenType
import com.durian.tssparty.domain.model.WalletBalance import com.durian.tssparty.domain.model.WalletBalance
import com.durian.tssparty.util.TransactionUtils import com.durian.tssparty.util.TransactionUtils
@ -159,8 +156,10 @@ fun TransferScreen(
rpcUrl = rpcUrl, rpcUrl = rpcUrl,
onSubmit = { onSubmit = {
// Get current balance for the selected token type // Get current balance for the selected token type
val currentBalance = walletBalance?.getBalance(selectedTokenType) val currentBalance = when (selectedTokenType) {
?: if (selectedTokenType == TokenType.KAVA) balance else null TokenType.KAVA -> walletBalance?.kavaBalance ?: balance
TokenType.GREEN_POINTS -> walletBalance?.greenPointsBalance
}
when { when {
toAddress.isBlank() -> validationError = "请输入收款地址" toAddress.isBlank() -> validationError = "请输入收款地址"
!toAddress.startsWith("0x") || toAddress.length != 42 -> validationError = "地址格式不正确" !toAddress.startsWith("0x") || toAddress.length != 42 -> validationError = "地址格式不正确"
@ -258,9 +257,14 @@ private fun TransferInputScreen(
var isCalculatingMax by remember { mutableStateOf(false) } var isCalculatingMax by remember { mutableStateOf(false) }
// Get current balance for the selected token type // Get current balance for the selected token type
val currentBalance = walletBalance?.getBalance(selectedTokenType) val currentBalance = when (selectedTokenType) {
?: if (selectedTokenType == TokenType.KAVA) balance else null TokenType.KAVA -> walletBalance?.kavaBalance ?: balance
val tokenSymbol = TokenConfig.getName(selectedTokenType) TokenType.GREEN_POINTS -> walletBalance?.greenPointsBalance
}
val tokenSymbol = when (selectedTokenType) {
TokenType.KAVA -> "KAVA"
TokenType.GREEN_POINTS -> GreenPointsToken.NAME
}
Column( Column(
modifier = Modifier modifier = Modifier
@ -289,74 +293,38 @@ private fun TransferInputScreen(
) )
Spacer(modifier = Modifier.height(8.dp)) Spacer(modifier = Modifier.height(8.dp))
// Show all token balances in a 2x2 grid // Show both balances
Column { Row(
Row( modifier = Modifier.fillMaxWidth(),
modifier = Modifier.fillMaxWidth(), horizontalArrangement = Arrangement.SpaceBetween
horizontalArrangement = Arrangement.SpaceBetween ) {
) { // KAVA balance
// KAVA balance Column {
Column { Text(
Text( text = "KAVA",
text = "KAVA", style = MaterialTheme.typography.labelSmall,
style = MaterialTheme.typography.labelSmall, color = MaterialTheme.colorScheme.onSurfaceVariant
color = MaterialTheme.colorScheme.onSurfaceVariant )
) Text(
Text( text = walletBalance?.kavaBalance ?: balance ?: "加载中...",
text = walletBalance?.kavaBalance ?: balance ?: "加载中...", style = MaterialTheme.typography.bodySmall,
style = MaterialTheme.typography.bodySmall, fontWeight = FontWeight.Medium,
fontWeight = FontWeight.Medium, color = MaterialTheme.colorScheme.primary
color = MaterialTheme.colorScheme.primary )
)
}
// Green Points balance (绿积分)
Column(horizontalAlignment = Alignment.End) {
Text(
text = GreenPointsToken.NAME,
style = MaterialTheme.typography.labelSmall,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
Text(
text = walletBalance?.greenPointsBalance ?: "加载中...",
style = MaterialTheme.typography.bodySmall,
fontWeight = FontWeight.Medium,
color = Color(0xFF4CAF50)
)
}
} }
Spacer(modifier = Modifier.height(4.dp)) // Green Points balance
Row( Column(horizontalAlignment = Alignment.End) {
modifier = Modifier.fillMaxWidth(), Text(
horizontalArrangement = Arrangement.SpaceBetween text = GreenPointsToken.NAME,
) { style = MaterialTheme.typography.labelSmall,
// Energy Points balance (积分股) color = MaterialTheme.colorScheme.onSurfaceVariant
Column { )
Text( Text(
text = EnergyPointsToken.NAME, text = walletBalance?.greenPointsBalance ?: "加载中...",
style = MaterialTheme.typography.labelSmall, style = MaterialTheme.typography.bodySmall,
color = MaterialTheme.colorScheme.onSurfaceVariant fontWeight = FontWeight.Medium,
) color = Color(0xFF4CAF50)
Text( )
text = walletBalance?.energyPointsBalance ?: "加载中...",
style = MaterialTheme.typography.bodySmall,
fontWeight = FontWeight.Medium,
color = Color(0xFF2196F3) // Blue
)
}
// Future Points balance (积分值)
Column(horizontalAlignment = Alignment.End) {
Text(
text = FuturePointsToken.NAME,
style = MaterialTheme.typography.labelSmall,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
Text(
text = walletBalance?.futurePointsBalance ?: "加载中...",
style = MaterialTheme.typography.bodySmall,
fontWeight = FontWeight.Medium,
color = Color(0xFF9C27B0) // Purple
)
}
} }
} }
} }
@ -371,7 +339,6 @@ private fun TransferInputScreen(
color = MaterialTheme.colorScheme.onSurfaceVariant color = MaterialTheme.colorScheme.onSurfaceVariant
) )
Spacer(modifier = Modifier.height(8.dp)) Spacer(modifier = Modifier.height(8.dp))
// First row: KAVA and Green Points
Row( Row(
modifier = Modifier.fillMaxWidth(), modifier = Modifier.fillMaxWidth(),
horizontalArrangement = Arrangement.spacedBy(8.dp) horizontalArrangement = Arrangement.spacedBy(8.dp)
@ -392,7 +359,7 @@ private fun TransferInputScreen(
}, },
modifier = Modifier.weight(1f) modifier = Modifier.weight(1f)
) )
// Green Points option (绿积分) // Green Points option
FilterChip( FilterChip(
selected = selectedTokenType == TokenType.GREEN_POINTS, selected = selectedTokenType == TokenType.GREEN_POINTS,
onClick = { onTokenTypeChange(TokenType.GREEN_POINTS) }, onClick = { onTokenTypeChange(TokenType.GREEN_POINTS) },
@ -413,53 +380,6 @@ private fun TransferInputScreen(
modifier = Modifier.weight(1f) modifier = Modifier.weight(1f)
) )
} }
Spacer(modifier = Modifier.height(8.dp))
// Second row: Energy Points and Future Points
Row(
modifier = Modifier.fillMaxWidth(),
horizontalArrangement = Arrangement.spacedBy(8.dp)
) {
// Energy Points option (积分股)
FilterChip(
selected = selectedTokenType == TokenType.ENERGY_POINTS,
onClick = { onTokenTypeChange(TokenType.ENERGY_POINTS) },
label = { Text(EnergyPointsToken.NAME) },
leadingIcon = {
if (selectedTokenType == TokenType.ENERGY_POINTS) {
Icon(
Icons.Default.Check,
contentDescription = null,
modifier = Modifier.size(18.dp)
)
}
},
colors = FilterChipDefaults.filterChipColors(
selectedContainerColor = Color(0xFF2196F3).copy(alpha = 0.2f),
selectedLabelColor = Color(0xFF2196F3)
),
modifier = Modifier.weight(1f)
)
// Future Points option (积分值)
FilterChip(
selected = selectedTokenType == TokenType.FUTURE_POINTS,
onClick = { onTokenTypeChange(TokenType.FUTURE_POINTS) },
label = { Text(FuturePointsToken.NAME) },
leadingIcon = {
if (selectedTokenType == TokenType.FUTURE_POINTS) {
Icon(
Icons.Default.Check,
contentDescription = null,
modifier = Modifier.size(18.dp)
)
}
},
colors = FilterChipDefaults.filterChipColors(
selectedContainerColor = Color(0xFF9C27B0).copy(alpha = 0.2f),
selectedLabelColor = Color(0xFF9C27B0)
),
modifier = Modifier.weight(1f)
)
}
Spacer(modifier = Modifier.height(16.dp)) Spacer(modifier = Modifier.height(16.dp))
@ -498,14 +418,9 @@ private fun TransferInputScreen(
keyboardOptions = KeyboardOptions(keyboardType = KeyboardType.Decimal), keyboardOptions = KeyboardOptions(keyboardType = KeyboardType.Decimal),
leadingIcon = { leadingIcon = {
Icon( Icon(
if (selectedTokenType == TokenType.KAVA) Icons.Default.AttachMoney else Icons.Default.Stars, if (selectedTokenType == TokenType.GREEN_POINTS) Icons.Default.Stars else Icons.Default.AttachMoney,
contentDescription = null, contentDescription = null,
tint = when (selectedTokenType) { tint = if (selectedTokenType == TokenType.GREEN_POINTS) Color(0xFF4CAF50) else MaterialTheme.colorScheme.onSurfaceVariant
TokenType.KAVA -> MaterialTheme.colorScheme.onSurfaceVariant
TokenType.GREEN_POINTS -> Color(0xFF4CAF50)
TokenType.ENERGY_POINTS -> Color(0xFF2196F3)
TokenType.FUTURE_POINTS -> Color(0xFF9C27B0)
}
) )
}, },
trailingIcon = { trailingIcon = {
@ -524,7 +439,7 @@ private fun TransferInputScreen(
onAmountChange(currentBalance) onAmountChange(currentBalance)
} }
} else { } else {
// For ERC-20 tokens (dUSDT, eUSDT, fUSDT), use the full balance // For tokens, use the full balance
onAmountChange(currentBalance) onAmountChange(currentBalance)
} }
isCalculatingMax = false isCalculatingMax = false

View File

@ -35,8 +35,6 @@ import androidx.compose.ui.unit.sp
import androidx.compose.ui.window.Dialog import androidx.compose.ui.window.Dialog
import android.content.Intent import android.content.Intent
import android.net.Uri import android.net.Uri
import com.durian.tssparty.domain.model.EnergyPointsToken
import com.durian.tssparty.domain.model.FuturePointsToken
import com.durian.tssparty.domain.model.GreenPointsToken import com.durian.tssparty.domain.model.GreenPointsToken
import com.durian.tssparty.domain.model.NetworkType import com.durian.tssparty.domain.model.NetworkType
import com.durian.tssparty.domain.model.ShareRecord import com.durian.tssparty.domain.model.ShareRecord
@ -283,123 +281,62 @@ private fun WalletItemCard(
Spacer(modifier = Modifier.height(12.dp)) Spacer(modifier = Modifier.height(12.dp))
// Balance display - shows all token balances in a 2x2 grid // Balance display - now shows both KAVA and Green Points
Column { Row(
Row( modifier = Modifier.fillMaxWidth(),
modifier = Modifier.fillMaxWidth(), horizontalArrangement = Arrangement.SpaceBetween
horizontalArrangement = Arrangement.SpaceBetween ) {
) { // KAVA balance
// KAVA balance Column {
Column { Text(
Text( text = "KAVA",
text = "KAVA", style = MaterialTheme.typography.labelSmall,
style = MaterialTheme.typography.labelSmall, color = MaterialTheme.colorScheme.outline
color = MaterialTheme.colorScheme.outline )
Row(verticalAlignment = Alignment.CenterVertically) {
Icon(
Icons.Default.AccountBalance,
contentDescription = null,
modifier = Modifier.size(16.dp),
tint = MaterialTheme.colorScheme.primary
) )
Row(verticalAlignment = Alignment.CenterVertically) { Spacer(modifier = Modifier.width(4.dp))
Icon(
Icons.Default.AccountBalance,
contentDescription = null,
modifier = Modifier.size(16.dp),
tint = MaterialTheme.colorScheme.primary
)
Spacer(modifier = Modifier.width(4.dp))
Text(
text = walletBalance?.kavaBalance ?: balance ?: "加载中...",
style = MaterialTheme.typography.bodyMedium,
color = if (walletBalance != null || balance != null)
MaterialTheme.colorScheme.primary
else
MaterialTheme.colorScheme.outline,
fontWeight = FontWeight.Medium
)
}
}
// Green Points (绿积分) balance
Column(horizontalAlignment = Alignment.End) {
Text( Text(
text = GreenPointsToken.NAME, text = walletBalance?.kavaBalance ?: balance ?: "加载中...",
style = MaterialTheme.typography.labelSmall, style = MaterialTheme.typography.bodyMedium,
color = MaterialTheme.colorScheme.outline color = if (walletBalance != null || balance != null)
MaterialTheme.colorScheme.primary
else
MaterialTheme.colorScheme.outline,
fontWeight = FontWeight.Medium
) )
Row(verticalAlignment = Alignment.CenterVertically) {
Icon(
Icons.Default.Stars,
contentDescription = null,
modifier = Modifier.size(16.dp),
tint = Color(0xFF4CAF50)
)
Spacer(modifier = Modifier.width(4.dp))
Text(
text = walletBalance?.greenPointsBalance ?: "加载中...",
style = MaterialTheme.typography.bodyMedium,
color = if (walletBalance != null)
Color(0xFF4CAF50)
else
MaterialTheme.colorScheme.outline,
fontWeight = FontWeight.Medium
)
}
} }
} }
Spacer(modifier = Modifier.height(8.dp))
Row(
modifier = Modifier.fillMaxWidth(),
horizontalArrangement = Arrangement.SpaceBetween
) {
// Energy Points (积分股) balance
Column {
Text(
text = EnergyPointsToken.NAME,
style = MaterialTheme.typography.labelSmall,
color = MaterialTheme.colorScheme.outline
)
Row(verticalAlignment = Alignment.CenterVertically) {
Icon(
Icons.Default.Stars,
contentDescription = null,
modifier = Modifier.size(16.dp),
tint = Color(0xFF2196F3) // Blue
)
Spacer(modifier = Modifier.width(4.dp))
Text(
text = walletBalance?.energyPointsBalance ?: "加载中...",
style = MaterialTheme.typography.bodyMedium,
color = if (walletBalance != null)
Color(0xFF2196F3)
else
MaterialTheme.colorScheme.outline,
fontWeight = FontWeight.Medium
)
}
}
// Future Points (积分值) balance // Green Points (绿积分) balance
Column(horizontalAlignment = Alignment.End) { Column(horizontalAlignment = Alignment.End) {
Text( Text(
text = FuturePointsToken.NAME, text = GreenPointsToken.NAME,
style = MaterialTheme.typography.labelSmall, style = MaterialTheme.typography.labelSmall,
color = MaterialTheme.colorScheme.outline color = MaterialTheme.colorScheme.outline
)
Row(verticalAlignment = Alignment.CenterVertically) {
Icon(
Icons.Default.Stars,
contentDescription = null,
modifier = Modifier.size(16.dp),
tint = Color(0xFF4CAF50) // Green color for Green Points
)
Spacer(modifier = Modifier.width(4.dp))
Text(
text = walletBalance?.greenPointsBalance ?: "加载中...",
style = MaterialTheme.typography.bodyMedium,
color = if (walletBalance != null)
Color(0xFF4CAF50)
else
MaterialTheme.colorScheme.outline,
fontWeight = FontWeight.Medium
) )
Row(verticalAlignment = Alignment.CenterVertically) {
Icon(
Icons.Default.Stars,
contentDescription = null,
modifier = Modifier.size(16.dp),
tint = Color(0xFF9C27B0) // Purple
)
Spacer(modifier = Modifier.width(4.dp))
Text(
text = walletBalance?.futurePointsBalance ?: "加载中...",
style = MaterialTheme.typography.bodyMedium,
color = if (walletBalance != null)
Color(0xFF9C27B0)
else
MaterialTheme.colorScheme.outline,
fontWeight = FontWeight.Medium
)
}
} }
} }
} }

View File

@ -1,10 +1,6 @@
package com.durian.tssparty.util package com.durian.tssparty.util
import com.durian.tssparty.domain.model.ERC20Selectors
import com.durian.tssparty.domain.model.EnergyPointsToken
import com.durian.tssparty.domain.model.FuturePointsToken
import com.durian.tssparty.domain.model.GreenPointsToken import com.durian.tssparty.domain.model.GreenPointsToken
import com.durian.tssparty.domain.model.TokenConfig
import com.durian.tssparty.domain.model.TokenType import com.durian.tssparty.domain.model.TokenType
import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.withContext import kotlinx.coroutines.withContext
@ -65,7 +61,7 @@ object TransactionUtils {
/** /**
* Prepare a transaction for signing * Prepare a transaction for signing
* Gets nonce, gas price, estimates gas, and calculates sign hash * Gets nonce, gas price, estimates gas, and calculates sign hash
* Supports both native KAVA transfers and ERC-20 token transfers (绿积分/积分股/积分值) * Supports both native KAVA transfers and ERC-20 token transfers (绿积分)
*/ */
suspend fun prepareTransaction(params: TransactionParams): Result<PreparedTransaction> = withContext(Dispatchers.IO) { suspend fun prepareTransaction(params: TransactionParams): Result<PreparedTransaction> = withContext(Dispatchers.IO) {
try { try {
@ -81,16 +77,13 @@ object TransactionUtils {
// Native KAVA transfer // Native KAVA transfer
Triple(params.to, kavaToWei(params.amount), ByteArray(0)) Triple(params.to, kavaToWei(params.amount), ByteArray(0))
} }
TokenType.GREEN_POINTS, TokenType.ENERGY_POINTS, TokenType.FUTURE_POINTS -> { TokenType.GREEN_POINTS -> {
// ERC-20 token transfer // ERC-20 token transfer (绿积分)
// To address is the contract, value is 0 // To address is the contract, value is 0
// Data is transfer(recipient, amount) encoded // Data is transfer(recipient, amount) encoded
val contractAddress = TokenConfig.getContractAddress(params.tokenType) val tokenAmount = greenPointsToRaw(params.amount)
?: return@withContext Result.failure(Exception("Invalid token type"))
val decimals = TokenConfig.getDecimals(params.tokenType)
val tokenAmount = tokenToRaw(params.amount, decimals)
val transferData = encodeErc20Transfer(params.to, tokenAmount) val transferData = encodeErc20Transfer(params.to, tokenAmount)
Triple(contractAddress, BigInteger.ZERO, transferData) Triple(GreenPointsToken.CONTRACT_ADDRESS, BigInteger.ZERO, transferData)
} }
} }
@ -105,7 +98,7 @@ object TransactionUtils {
// Default gas limits // Default gas limits
when (params.tokenType) { when (params.tokenType) {
TokenType.KAVA -> BigInteger.valueOf(21000) TokenType.KAVA -> BigInteger.valueOf(21000)
else -> BigInteger.valueOf(65000) // ERC-20 transfers need more gas TokenType.GREEN_POINTS -> BigInteger.valueOf(65000) // ERC-20 transfers need more gas
} }
} }
@ -146,7 +139,7 @@ object TransactionUtils {
*/ */
private fun encodeErc20Transfer(to: String, amount: BigInteger): ByteArray { private fun encodeErc20Transfer(to: String, amount: BigInteger): ByteArray {
// Function selector: transfer(address,uint256) = 0xa9059cbb // Function selector: transfer(address,uint256) = 0xa9059cbb
val selector = ERC20Selectors.TRANSFER.removePrefix("0x").hexToByteArray() val selector = GreenPointsToken.TRANSFER_SELECTOR.removePrefix("0x").hexToByteArray()
// Encode recipient address (padded to 32 bytes) // Encode recipient address (padded to 32 bytes)
val paddedAddress = to.removePrefix("0x").lowercase().padStart(64, '0').hexToByteArray() val paddedAddress = to.removePrefix("0x").lowercase().padStart(64, '0').hexToByteArray()
@ -159,43 +152,21 @@ object TransactionUtils {
} }
/** /**
* Convert token amount to raw units based on decimals * Convert Green Points amount to raw units (6 decimals)
* @param amount Human-readable amount (e.g., "100.5")
* @param decimals Token decimals (e.g., 6 for USDT-like tokens, 18 for native)
*/ */
fun tokenToRaw(amount: String, decimals: Int): BigInteger { fun greenPointsToRaw(amount: String): BigInteger {
val decimal = BigDecimal(amount) val decimal = BigDecimal(amount)
val multiplier = BigDecimal.TEN.pow(decimals) val rawDecimal = decimal.multiply(BigDecimal("1000000")) // 10^6
val rawDecimal = decimal.multiply(multiplier)
return rawDecimal.toBigInteger() return rawDecimal.toBigInteger()
} }
/**
* Convert raw units to human-readable token amount
* @param raw Raw amount in smallest units
* @param decimals Token decimals (e.g., 6 for USDT-like tokens, 18 for native)
*/
fun rawToToken(raw: BigInteger, decimals: Int): String {
val rawDecimal = BigDecimal(raw)
val divisor = BigDecimal.TEN.pow(decimals)
val displayDecimal = rawDecimal.divide(divisor, decimals, java.math.RoundingMode.DOWN)
return displayDecimal.toPlainString()
}
/**
* Convert Green Points amount to raw units (6 decimals)
* @deprecated Use tokenToRaw(amount, 6) instead
*/
fun greenPointsToRaw(amount: String): BigInteger {
return tokenToRaw(amount, GreenPointsToken.DECIMALS)
}
/** /**
* Convert raw units to Green Points display amount * Convert raw units to Green Points display amount
* @deprecated Use rawToToken(raw, 6) instead
*/ */
fun rawToGreenPoints(raw: BigInteger): String { fun rawToGreenPoints(raw: BigInteger): String {
return rawToToken(raw, GreenPointsToken.DECIMALS) val rawDecimal = BigDecimal(raw)
val displayDecimal = rawDecimal.divide(BigDecimal("1000000"), 6, java.math.RoundingMode.DOWN)
return displayDecimal.toPlainString()
} }
/** /**

View File

@ -821,21 +821,6 @@ async function handleCoSignStart(event: {
// 标记签名开始 // 标记签名开始
signInProgressSessionId = event.sessionId; signInProgressSessionId = event.sessionId;
// CRITICAL: Get the original partyId from keygen (stored in share) for signing
// This is essential for backup/restore - the partyId must match what was used during keygen
const share = database?.getShare(activeCoSignSession.shareId, activeCoSignSession.sharePassword);
if (!share) {
debugLog.error('main', 'Failed to get share data');
mainWindow?.webContents.send(`cosign:events:${event.sessionId}`, {
type: 'failed',
error: 'Failed to get share data',
});
signInProgressSessionId = null;
return;
}
const signingPartyId = share.party_id || grpcClient?.getPartyId() || '';
debugLog.info('main', `Using signingPartyId=${signingPartyId} (currentDevicePartyId=${grpcClient?.getPartyId()})`);
// 打印当前 activeCoSignSession.participants 状态 // 打印当前 activeCoSignSession.participants 状态
console.log('[CO-SIGN] Current activeCoSignSession.participants before update:', console.log('[CO-SIGN] Current activeCoSignSession.participants before update:',
activeCoSignSession.participants.map(p => ({ activeCoSignSession.participants.map(p => ({
@ -847,9 +832,8 @@ async function handleCoSignStart(event: {
// 从 event.selectedParties 更新参与者列表 // 从 event.selectedParties 更新参与者列表
// 优先使用 activeCoSignSession.participants 中的 partyIndex来自 signingParties 或 other_parties // 优先使用 activeCoSignSession.participants 中的 partyIndex来自 signingParties 或 other_parties
// CRITICAL: Use signingPartyId (original from keygen) for identification
if (event.selectedParties && event.selectedParties.length > 0) { if (event.selectedParties && event.selectedParties.length > 0) {
const myPartyId = signingPartyId; const myPartyId = grpcClient?.getPartyId();
const updatedParticipants: Array<{ partyId: string; partyIndex: number; name: string }> = []; const updatedParticipants: Array<{ partyId: string; partyIndex: number; name: string }> = [];
event.selectedParties.forEach((partyId) => { event.selectedParties.forEach((partyId) => {
@ -885,11 +869,21 @@ async function handleCoSignStart(event: {
}))); })));
} }
// Note: share already fetched above for getting signingPartyId // 获取 share 数据
const share = database?.getShare(activeCoSignSession.shareId, activeCoSignSession.sharePassword);
if (!share) {
debugLog.error('main', 'Failed to get share data');
mainWindow?.webContents.send(`cosign:events:${event.sessionId}`, {
type: 'failed',
error: 'Failed to get share data',
});
signInProgressSessionId = null;
return;
}
console.log('[CO-SIGN] Calling tssHandler.participateSign with:', { console.log('[CO-SIGN] Calling tssHandler.participateSign with:', {
sessionId: activeCoSignSession.sessionId, sessionId: activeCoSignSession.sessionId,
partyId: signingPartyId, // CRITICAL: Use signingPartyId (original from keygen) partyId: grpcClient?.getPartyId(),
partyIndex: activeCoSignSession.partyIndex, partyIndex: activeCoSignSession.partyIndex,
participants: activeCoSignSession.participants.map(p => ({ partyId: p.partyId.substring(0, 8), partyIndex: p.partyIndex })), participants: activeCoSignSession.participants.map(p => ({ partyId: p.partyId.substring(0, 8), partyIndex: p.partyIndex })),
threshold: activeCoSignSession.threshold, threshold: activeCoSignSession.threshold,
@ -898,10 +892,9 @@ async function handleCoSignStart(event: {
debugLog.info('tss', `Starting sign for session ${event.sessionId}...`); debugLog.info('tss', `Starting sign for session ${event.sessionId}...`);
try { try {
// CRITICAL: Use signingPartyId (original partyId from keygen) for signing
const result = await (tssHandler as TSSHandler).participateSign( const result = await (tssHandler as TSSHandler).participateSign(
activeCoSignSession.sessionId, activeCoSignSession.sessionId,
signingPartyId, // CRITICAL: Use original partyId from keygen for backup/restore to work grpcClient?.getPartyId() || '',
activeCoSignSession.partyIndex, activeCoSignSession.partyIndex,
activeCoSignSession.participants, activeCoSignSession.participants,
activeCoSignSession.threshold, activeCoSignSession.threshold,
@ -1620,9 +1613,9 @@ function setupIpcHandlers() {
initiatorName?: string; initiatorName?: string;
}) => { }) => {
try { try {
// 获取当前 party ID (用于检查连接状态) // 获取当前 party ID
const currentDevicePartyId = grpcClient?.getPartyId(); const partyId = grpcClient?.getPartyId();
if (!currentDevicePartyId) { if (!partyId) {
return { success: false, error: '请先连接到消息路由器' }; return { success: false, error: '请先连接到消息路由器' };
} }
@ -1632,11 +1625,6 @@ function setupIpcHandlers() {
return { success: false, error: 'Share 不存在或密码错误' }; return { success: false, error: 'Share 不存在或密码错误' };
} }
// CRITICAL: Use the original partyId from keygen (stored in share) for signing
// This is essential for backup/restore - the partyId must match what was used during keygen
const partyId = share.party_id || currentDevicePartyId;
debugLog.info('main', `Initiator using partyId=${partyId} (currentDevicePartyId=${currentDevicePartyId})`);
// 从后端获取 keygen 会话的参与者信息(包含正确的 party_index // 从后端获取 keygen 会话的参与者信息(包含正确的 party_index
const keygenStatus = await accountClient?.getSessionStatus(share.session_id); const keygenStatus = await accountClient?.getSessionStatus(share.session_id);
if (!keygenStatus?.participants || keygenStatus.participants.length === 0) { if (!keygenStatus?.participants || keygenStatus.participants.length === 0) {
@ -1822,8 +1810,8 @@ function setupIpcHandlers() {
parties?: Array<{ party_id: string; party_index: number }>; parties?: Array<{ party_id: string; party_index: number }>;
}) => { }) => {
try { try {
const currentDevicePartyId = grpcClient?.getPartyId(); const partyId = grpcClient?.getPartyId();
if (!currentDevicePartyId) { if (!partyId) {
return { success: false, error: '请先连接到消息路由器' }; return { success: false, error: '请先连接到消息路由器' };
} }
@ -1833,12 +1821,9 @@ function setupIpcHandlers() {
return { success: false, error: 'Share 不存在或密码错误' }; return { success: false, error: 'Share 不存在或密码错误' };
} }
// CRITICAL: Use the original partyId from keygen (stored in share) for signing debugLog.info('grpc', `Joining co-sign session: sessionId=${params.sessionId}, partyId=${partyId}`);
// This is essential for backup/restore - the partyId must match what was used during keygen
const signingPartyId = share.party_id || currentDevicePartyId;
debugLog.info('grpc', `Joining co-sign session: sessionId=${params.sessionId}, signingPartyId=${signingPartyId} (currentDevicePartyId=${currentDevicePartyId})`);
const result = await grpcClient?.joinSession(params.sessionId, signingPartyId, params.joinToken); const result = await grpcClient?.joinSession(params.sessionId, partyId, params.joinToken);
if (result?.success) { if (result?.success) {
// 设置活跃的 Co-Sign 会话 // 设置活跃的 Co-Sign 会话
// 优先使用 params.parties来自 validateInviteCode包含所有预期参与者 // 优先使用 params.parties来自 validateInviteCode包含所有预期参与者
@ -1847,11 +1832,10 @@ function setupIpcHandlers() {
if (params.parties && params.parties.length > 0) { if (params.parties && params.parties.length > 0) {
// 使用完整的 parties 列表 // 使用完整的 parties 列表
// CRITICAL: Use signingPartyId (original from keygen) for identification
participants = params.parties.map(p => ({ participants = params.parties.map(p => ({
partyId: p.party_id, partyId: p.party_id,
partyIndex: p.party_index, partyIndex: p.party_index,
name: p.party_id === signingPartyId ? '我' : `参与方 ${p.party_index + 1}`, name: p.party_id === partyId ? '我' : `参与方 ${p.party_index + 1}`,
})); }));
console.log('[CO-SIGN] Participant using params.parties (complete list):', participants.map(p => ({ console.log('[CO-SIGN] Participant using params.parties (complete list):', participants.map(p => ({
partyId: p.partyId.substring(0, 8), partyId: p.partyId.substring(0, 8),
@ -1866,9 +1850,9 @@ function setupIpcHandlers() {
name: `参与方 ${idx + 1}`, name: `参与方 ${idx + 1}`,
})) || []; })) || [];
// 添加自己 - CRITICAL: Use signingPartyId (original from keygen) // 添加自己
participants.push({ participants.push({
partyId: signingPartyId, partyId: partyId,
partyIndex: result.party_index, partyIndex: result.party_index,
name: '我', name: '我',
}); });
@ -1902,11 +1886,11 @@ function setupIpcHandlers() {
messageHash: params.messageHash, messageHash: params.messageHash,
}); });
// 预订阅消息流 - CRITICAL: Use signingPartyId (original from keygen) // 预订阅消息流
if (tssHandler && 'prepareForSign' in tssHandler) { if (tssHandler && 'prepareForSign' in tssHandler) {
try { try {
debugLog.info('tss', `Preparing for sign: subscribing to messages for session ${params.sessionId}, signingPartyId=${signingPartyId}`); debugLog.info('tss', `Preparing for sign: subscribing to messages for session ${params.sessionId}`);
(tssHandler as TSSHandler).prepareForSign(params.sessionId, signingPartyId); (tssHandler as TSSHandler).prepareForSign(params.sessionId, partyId);
} catch (prepareErr) { } catch (prepareErr) {
debugLog.error('tss', `Failed to prepare for sign: ${(prepareErr as Error).message}`); debugLog.error('tss', `Failed to prepare for sign: ${(prepareErr as Error).message}`);
return { success: false, error: `消息订阅失败: ${(prepareErr as Error).message}` }; return { success: false, error: `消息订阅失败: ${(prepareErr as Error).message}` };

View File

@ -11,12 +11,7 @@ import {
getCurrentRpcUrl, getCurrentRpcUrl,
getGasPrice, getGasPrice,
fetchGreenPointsBalance, fetchGreenPointsBalance,
fetchEnergyPointsBalance,
fetchFuturePointsBalance,
GREEN_POINTS_TOKEN, GREEN_POINTS_TOKEN,
ENERGY_POINTS_TOKEN,
FUTURE_POINTS_TOKEN,
TOKEN_CONFIG,
type PreparedTransaction, type PreparedTransaction,
type TokenType, type TokenType,
} from '../utils/transaction'; } from '../utils/transaction';
@ -37,8 +32,6 @@ interface ShareWithAddress extends ShareItem {
evmAddress?: string; evmAddress?: string;
kavaBalance?: string; kavaBalance?: string;
greenPointsBalance?: string; greenPointsBalance?: string;
energyPointsBalance?: string;
futurePointsBalance?: string;
balanceLoading?: boolean; balanceLoading?: boolean;
} }
@ -96,30 +89,15 @@ export default function Home() {
const [isCalculatingMax, setIsCalculatingMax] = useState(false); const [isCalculatingMax, setIsCalculatingMax] = useState(false);
const [copySuccess, setCopySuccess] = useState(false); const [copySuccess, setCopySuccess] = useState(false);
// 获取当前选择代币的余额
const getTokenBalance = (share: ShareWithAddress | null, tokenType: TokenType): string => {
if (!share) return '0';
switch (tokenType) {
case 'KAVA':
return share.kavaBalance || '0';
case 'GREEN_POINTS':
return share.greenPointsBalance || '0';
case 'ENERGY_POINTS':
return share.energyPointsBalance || '0';
case 'FUTURE_POINTS':
return share.futurePointsBalance || '0';
}
};
// 计算扣除 Gas 费后的最大可转账金额 // 计算扣除 Gas 费后的最大可转账金额
const calculateMaxAmount = async () => { const calculateMaxAmount = async () => {
if (!transferShare?.evmAddress) return; if (!transferShare?.evmAddress) return;
setIsCalculatingMax(true); setIsCalculatingMax(true);
try { try {
if (TOKEN_CONFIG.isERC20(transferTokenType)) { if (transferTokenType === 'GREEN_POINTS') {
// For ERC-20 token transfers, use the full token balance (gas is paid in KAVA) // For token transfers, use the full token balance (gas is paid in KAVA)
const balance = getTokenBalance(transferShare, transferTokenType); const balance = transferShare.greenPointsBalance || '0';
setTransferAmount(balance); setTransferAmount(balance);
setTransferError(null); setTransferError(null);
} else { } else {
@ -153,8 +131,8 @@ export default function Home() {
} }
} catch (error) { } catch (error) {
console.error('Failed to calculate max amount:', error); console.error('Failed to calculate max amount:', error);
if (TOKEN_CONFIG.isERC20(transferTokenType)) { if (transferTokenType === 'GREEN_POINTS') {
setTransferAmount(getTokenBalance(transferShare, transferTokenType)); setTransferAmount(transferShare.greenPointsBalance || '0');
} else { } else {
// 如果获取 Gas 失败,使用默认估算 (1 gwei * 21000) // 如果获取 Gas 失败,使用默认估算 (1 gwei * 21000)
const defaultGasFee = 0.000021; // ~21000 * 1 gwei const defaultGasFee = 0.000021; // ~21000 * 1 gwei
@ -187,14 +165,12 @@ export default function Home() {
const updatedShares = await Promise.all( const updatedShares = await Promise.all(
sharesWithAddrs.map(async (share) => { sharesWithAddrs.map(async (share) => {
if (share.evmAddress) { if (share.evmAddress) {
// Fetch all balances in parallel // Fetch both balances in parallel
const [kavaBalance, greenPointsBalance, energyPointsBalance, futurePointsBalance] = await Promise.all([ const [kavaBalance, greenPointsBalance] = await Promise.all([
fetchKavaBalance(share.evmAddress), fetchKavaBalance(share.evmAddress),
fetchGreenPointsBalance(share.evmAddress), fetchGreenPointsBalance(share.evmAddress),
fetchEnergyPointsBalance(share.evmAddress),
fetchFuturePointsBalance(share.evmAddress),
]); ]);
return { ...share, kavaBalance, greenPointsBalance, energyPointsBalance, futurePointsBalance, balanceLoading: false }; return { ...share, kavaBalance, greenPointsBalance, balanceLoading: false };
} }
return { ...share, balanceLoading: false }; return { ...share, balanceLoading: false };
}) })
@ -339,7 +315,11 @@ export default function Home() {
return '转账金额无效'; return '转账金额无效';
} }
const amount = parseFloat(transferAmount); const amount = parseFloat(transferAmount);
const balance = parseFloat(getTokenBalance(transferShare, transferTokenType)); const balance = parseFloat(
transferTokenType === 'GREEN_POINTS'
? (transferShare?.greenPointsBalance || '0')
: (transferShare?.kavaBalance || '0')
);
if (amount > balance) { if (amount > balance) {
return '余额不足'; return '余额不足';
} }
@ -506,7 +486,7 @@ export default function Home() {
</div> </div>
)} )}
{/* 余额显示 - 所有代币 */} {/* 余额显示 - KAVA 和 绿积分 */}
{share.evmAddress && ( {share.evmAddress && (
<div className={styles.balanceSection}> <div className={styles.balanceSection}>
<div className={styles.balanceRow}> <div className={styles.balanceRow}>
@ -529,26 +509,6 @@ export default function Home() {
)} )}
</span> </span>
</div> </div>
<div className={styles.balanceRow}>
<span className={styles.balanceLabel} style={{ color: '#2196F3' }}>{ENERGY_POINTS_TOKEN.name}</span>
<span className={styles.balanceValue} style={{ color: '#2196F3' }}>
{share.balanceLoading ? (
<span className={styles.balanceLoading}>...</span>
) : (
<>{share.energyPointsBalance || '0'}</>
)}
</span>
</div>
<div className={styles.balanceRow}>
<span className={styles.balanceLabel} style={{ color: '#9C27B0' }}>{FUTURE_POINTS_TOKEN.name}</span>
<span className={styles.balanceValue} style={{ color: '#9C27B0' }}>
{share.balanceLoading ? (
<span className={styles.balanceLoading}>...</span>
) : (
<>{share.futurePointsBalance || '0'}</>
)}
</span>
</div>
</div> </div>
)} )}
@ -618,10 +578,7 @@ export default function Home() {
<div className={styles.transferWalletInfo}> <div className={styles.transferWalletInfo}>
<div className={styles.transferWalletName}>{transferShare.walletName}</div> <div className={styles.transferWalletName}>{transferShare.walletName}</div>
<div className={styles.transferWalletBalance}> <div className={styles.transferWalletBalance}>
KAVA: {transferShare.kavaBalance || '0'} | <span style={{color: '#4CAF50'}}>{GREEN_POINTS_TOKEN.name}: {transferShare.greenPointsBalance || '0'}</span> KAVA: {transferShare.kavaBalance || '0'} | {GREEN_POINTS_TOKEN.name}: {transferShare.greenPointsBalance || '0'}
</div>
<div className={styles.transferWalletBalance}>
<span style={{color: '#2196F3'}}>{ENERGY_POINTS_TOKEN.name}: {transferShare.energyPointsBalance || '0'}</span> | <span style={{color: '#9C27B0'}}>{FUTURE_POINTS_TOKEN.name}: {transferShare.futurePointsBalance || '0'}</span>
</div> </div>
<div className={styles.transferNetwork}> <div className={styles.transferNetwork}>
网络: Kava {getCurrentNetwork() === 'mainnet' ? '主网' : '测试网'} 网络: Kava {getCurrentNetwork() === 'mainnet' ? '主网' : '测试网'}
@ -648,22 +605,6 @@ export default function Home() {
{GREEN_POINTS_TOKEN.name} {GREEN_POINTS_TOKEN.name}
</button> </button>
</div> </div>
<div className={styles.tokenTypeSelector} style={{ marginTop: '8px' }}>
<button
className={`${styles.tokenTypeButton} ${transferTokenType === 'ENERGY_POINTS' ? styles.tokenTypeActive : ''}`}
onClick={() => { setTransferTokenType('ENERGY_POINTS'); setTransferAmount(''); }}
style={transferTokenType === 'ENERGY_POINTS' ? { backgroundColor: '#2196F3', borderColor: '#2196F3' } : {}}
>
{ENERGY_POINTS_TOKEN.name}
</button>
<button
className={`${styles.tokenTypeButton} ${transferTokenType === 'FUTURE_POINTS' ? styles.tokenTypeActive : ''}`}
onClick={() => { setTransferTokenType('FUTURE_POINTS'); setTransferAmount(''); }}
style={transferTokenType === 'FUTURE_POINTS' ? { backgroundColor: '#9C27B0', borderColor: '#9C27B0' } : {}}
>
{FUTURE_POINTS_TOKEN.name}
</button>
</div>
</div> </div>
{/* 收款地址 */} {/* 收款地址 */}
@ -681,7 +622,7 @@ export default function Home() {
{/* 转账金额 */} {/* 转账金额 */}
<div className={styles.transferInputGroup}> <div className={styles.transferInputGroup}>
<label className={styles.transferLabel}> <label className={styles.transferLabel}>
({TOKEN_CONFIG.getName(transferTokenType)}) ({transferTokenType === 'GREEN_POINTS' ? GREEN_POINTS_TOKEN.name : 'KAVA'})
</label> </label>
<div className={styles.transferAmountWrapper}> <div className={styles.transferAmountWrapper}>
<input <input
@ -748,8 +689,8 @@ export default function Home() {
<div className={styles.confirmDetails}> <div className={styles.confirmDetails}>
<div className={styles.confirmRow}> <div className={styles.confirmRow}>
<span className={styles.confirmLabel}></span> <span className={styles.confirmLabel}></span>
<span className={styles.confirmValue} style={TOKEN_CONFIG.isERC20(transferTokenType) ? { color: transferTokenType === 'GREEN_POINTS' ? '#4CAF50' : transferTokenType === 'ENERGY_POINTS' ? '#2196F3' : '#9C27B0' } : {}}> <span className={styles.confirmValue} style={transferTokenType === 'GREEN_POINTS' ? { color: '#4CAF50' } : {}}>
{TOKEN_CONFIG.getName(transferTokenType)} {transferTokenType === 'GREEN_POINTS' ? GREEN_POINTS_TOKEN.name : 'KAVA'}
</span> </span>
</div> </div>
<div className={styles.confirmRow}> <div className={styles.confirmRow}>
@ -758,8 +699,8 @@ export default function Home() {
</div> </div>
<div className={styles.confirmRow}> <div className={styles.confirmRow}>
<span className={styles.confirmLabel}></span> <span className={styles.confirmLabel}></span>
<span className={styles.confirmValue} style={TOKEN_CONFIG.isERC20(transferTokenType) ? { color: transferTokenType === 'GREEN_POINTS' ? '#4CAF50' : transferTokenType === 'ENERGY_POINTS' ? '#2196F3' : '#9C27B0' } : {}}> <span className={styles.confirmValue} style={transferTokenType === 'GREEN_POINTS' ? { color: '#4CAF50' } : {}}>
{transferAmount} {TOKEN_CONFIG.getName(transferTokenType)} {transferAmount} {transferTokenType === 'GREEN_POINTS' ? GREEN_POINTS_TOKEN.name : 'KAVA'}
</span> </span>
</div> </div>
<div className={styles.confirmRow}> <div className={styles.confirmRow}>

View File

@ -17,97 +17,17 @@ export const KAVA_RPC_URL = {
}; };
// Token types // Token types
export type TokenType = 'KAVA' | 'GREEN_POINTS' | 'ENERGY_POINTS' | 'FUTURE_POINTS'; export type TokenType = 'KAVA' | 'GREEN_POINTS';
// ERC-20 通用函数选择器 // Green Points (绿积分) Token Configuration
export const ERC20_SELECTORS = {
balanceOf: '0x70a08231', // balanceOf(address)
transfer: '0xa9059cbb', // transfer(address,uint256)
approve: '0x095ea7b3', // approve(address,uint256)
allowance: '0xdd62ed3e', // allowance(address,address)
totalSupply: '0x18160ddd', // totalSupply()
};
// Green Points (绿积分) Token Configuration - dUSDT
export const GREEN_POINTS_TOKEN = { export const GREEN_POINTS_TOKEN = {
contractAddress: '0xA9F3A35dBa8699c8C681D8db03F0c1A8CEB9D7c3', contractAddress: '0xA9F3A35dBa8699c8C681D8db03F0c1A8CEB9D7c3',
name: '绿积分', name: '绿积分',
symbol: 'dUSDT', symbol: 'dUSDT',
decimals: 6, decimals: 6,
// ERC-20 function selectors (kept for backward compatibility) // ERC-20 function selectors
balanceOfSelector: ERC20_SELECTORS.balanceOf, balanceOfSelector: '0x70a08231',
transferSelector: ERC20_SELECTORS.transfer, transferSelector: '0xa9059cbb',
};
// Energy Points (积分股) Token Configuration - eUSDT
export const ENERGY_POINTS_TOKEN = {
contractAddress: '0x7C3275D808eFbAE90C06C7E3A9AfDdcAa8563931',
name: '积分股',
symbol: 'eUSDT',
decimals: 6,
};
// Future Points (积分值) Token Configuration - fUSDT
export const FUTURE_POINTS_TOKEN = {
contractAddress: '0x14dc4f7d3E4197438d058C3D156dd9826A161134',
name: '积分值',
symbol: 'fUSDT',
decimals: 6,
};
// Token configuration utility
export const TOKEN_CONFIG = {
getContractAddress: (tokenType: TokenType): string | null => {
switch (tokenType) {
case 'KAVA':
return null; // Native token has no contract
case 'GREEN_POINTS':
return GREEN_POINTS_TOKEN.contractAddress;
case 'ENERGY_POINTS':
return ENERGY_POINTS_TOKEN.contractAddress;
case 'FUTURE_POINTS':
return FUTURE_POINTS_TOKEN.contractAddress;
}
},
getDecimals: (tokenType: TokenType): number => {
switch (tokenType) {
case 'KAVA':
return 18;
case 'GREEN_POINTS':
return GREEN_POINTS_TOKEN.decimals;
case 'ENERGY_POINTS':
return ENERGY_POINTS_TOKEN.decimals;
case 'FUTURE_POINTS':
return FUTURE_POINTS_TOKEN.decimals;
}
},
getName: (tokenType: TokenType): string => {
switch (tokenType) {
case 'KAVA':
return 'KAVA';
case 'GREEN_POINTS':
return GREEN_POINTS_TOKEN.name;
case 'ENERGY_POINTS':
return ENERGY_POINTS_TOKEN.name;
case 'FUTURE_POINTS':
return FUTURE_POINTS_TOKEN.name;
}
},
getSymbol: (tokenType: TokenType): string => {
switch (tokenType) {
case 'KAVA':
return 'KAVA';
case 'GREEN_POINTS':
return GREEN_POINTS_TOKEN.symbol;
case 'ENERGY_POINTS':
return ENERGY_POINTS_TOKEN.symbol;
case 'FUTURE_POINTS':
return FUTURE_POINTS_TOKEN.symbol;
}
},
isERC20: (tokenType: TokenType): boolean => {
return tokenType !== 'KAVA';
},
}; };
// 当前网络配置 (从 localStorage 读取或使用默认值) // 当前网络配置 (从 localStorage 读取或使用默认值)
@ -407,69 +327,44 @@ export function weiToKava(wei: bigint): string {
} }
/** /**
* * 绿 (6 decimals)
* @param amount Human-readable amount
* @param decimals Token decimals (default 6 for USDT-like tokens)
*/ */
export function tokenToRaw(amount: string, decimals: number = 6): bigint { export function greenPointsToRaw(amount: string): bigint {
const parts = amount.split('.'); const parts = amount.split('.');
const whole = BigInt(parts[0] || '0'); const whole = BigInt(parts[0] || '0');
let fraction = parts[1] || ''; let fraction = parts[1] || '';
// 补齐或截断到指定位数 // 补齐或截断到 6 位
if (fraction.length > decimals) { if (fraction.length > 6) {
fraction = fraction.substring(0, decimals); fraction = fraction.substring(0, 6);
} else { } else {
fraction = fraction.padEnd(decimals, '0'); fraction = fraction.padEnd(6, '0');
} }
return whole * BigInt(10 ** decimals) + BigInt(fraction); return whole * BigInt(10 ** 6) + BigInt(fraction);
}
/**
*
* @param raw Raw amount in smallest units
* @param decimals Token decimals (default 6 for USDT-like tokens)
*/
export function rawToToken(raw: bigint, decimals: number = 6): string {
const rawStr = raw.toString().padStart(decimals + 1, '0');
const whole = rawStr.slice(0, -decimals) || '0';
const fraction = rawStr.slice(-decimals).replace(/0+$/, '');
return fraction ? `${whole}.${fraction}` : whole;
}
/**
* 绿 (6 decimals)
* @deprecated Use tokenToRaw(amount, 6) instead
*/
export function greenPointsToRaw(amount: string): bigint {
return tokenToRaw(amount, GREEN_POINTS_TOKEN.decimals);
} }
/** /**
* 绿 * 绿
* @deprecated Use rawToToken(raw, 6) instead
*/ */
export function rawToGreenPoints(raw: bigint): string { export function rawToGreenPoints(raw: bigint): string {
return rawToToken(raw, GREEN_POINTS_TOKEN.decimals); const rawStr = raw.toString().padStart(7, '0');
const whole = rawStr.slice(0, -6) || '0';
const fraction = rawStr.slice(-6).replace(/0+$/, '');
return fraction ? `${whole}.${fraction}` : whole;
} }
/** /**
* ERC-20 * 绿 (ERC-20)
* @param address Wallet address
* @param contractAddress Token contract address
* @param decimals Token decimals
*/ */
export async function fetchERC20Balance( export async function fetchGreenPointsBalance(address: string): Promise<string> {
address: string,
contractAddress: string,
decimals: number = 6
): Promise<string> {
try { try {
const rpcUrl = getCurrentRpcUrl(); const rpcUrl = getCurrentRpcUrl();
// Encode balanceOf(address) call data // Encode balanceOf(address) call data
// Function selector: 0x70a08231
// Address parameter: padded to 32 bytes
const paddedAddress = address.toLowerCase().replace('0x', '').padStart(64, '0'); const paddedAddress = address.toLowerCase().replace('0x', '').padStart(64, '0');
const callData = ERC20_SELECTORS.balanceOf + paddedAddress; const callData = GREEN_POINTS_TOKEN.balanceOfSelector + paddedAddress;
const response = await fetch(rpcUrl, { const response = await fetch(rpcUrl, {
method: 'POST', method: 'POST',
@ -479,7 +374,7 @@ export async function fetchERC20Balance(
method: 'eth_call', method: 'eth_call',
params: [ params: [
{ {
to: contractAddress, to: GREEN_POINTS_TOKEN.contractAddress,
data: callData, data: callData,
}, },
'latest', 'latest',
@ -491,65 +386,21 @@ export async function fetchERC20Balance(
const data = await response.json(); const data = await response.json();
if (data.result && data.result !== '0x') { if (data.result && data.result !== '0x') {
const balanceRaw = BigInt(data.result); const balanceRaw = BigInt(data.result);
return rawToToken(balanceRaw, decimals); return rawToGreenPoints(balanceRaw);
} }
return '0'; return '0';
} catch (error) { } catch (error) {
console.error('Failed to fetch ERC20 balance:', error); console.error('Failed to fetch Green Points balance:', error);
return '0'; return '0';
} }
} }
/**
* 绿 (ERC-20)
*/
export async function fetchGreenPointsBalance(address: string): Promise<string> {
return fetchERC20Balance(address, GREEN_POINTS_TOKEN.contractAddress, GREEN_POINTS_TOKEN.decimals);
}
/**
* (eUSDT)
*/
export async function fetchEnergyPointsBalance(address: string): Promise<string> {
return fetchERC20Balance(address, ENERGY_POINTS_TOKEN.contractAddress, ENERGY_POINTS_TOKEN.decimals);
}
/**
* (fUSDT)
*/
export async function fetchFuturePointsBalance(address: string): Promise<string> {
return fetchERC20Balance(address, FUTURE_POINTS_TOKEN.contractAddress, FUTURE_POINTS_TOKEN.decimals);
}
/**
*
*/
export async function fetchAllTokenBalances(address: string): Promise<{
kava: string;
greenPoints: string;
energyPoints: string;
futurePoints: string;
}> {
const [greenPoints, energyPoints, futurePoints] = await Promise.all([
fetchGreenPointsBalance(address),
fetchEnergyPointsBalance(address),
fetchFuturePointsBalance(address),
]);
// Note: KAVA balance is fetched separately via eth_getBalance
return {
kava: '0', // Caller should fetch KAVA balance separately
greenPoints,
energyPoints,
futurePoints,
};
}
/** /**
* Encode ERC-20 transfer function call * Encode ERC-20 transfer function call
*/ */
function encodeErc20Transfer(to: string, amount: bigint): string { function encodeErc20Transfer(to: string, amount: bigint): string {
// Function selector: transfer(address,uint256) = 0xa9059cbb // Function selector: transfer(address,uint256) = 0xa9059cbb
const selector = ERC20_SELECTORS.transfer; const selector = GREEN_POINTS_TOKEN.transferSelector;
// Encode recipient address (padded to 32 bytes) // Encode recipient address (padded to 32 bytes)
const paddedAddress = to.toLowerCase().replace('0x', '').padStart(64, '0'); const paddedAddress = to.toLowerCase().replace('0x', '').padStart(64, '0');
// Encode amount (padded to 32 bytes) // Encode amount (padded to 32 bytes)
@ -625,15 +476,13 @@ export async function estimateGas(params: { from: string; to: string; value: str
// For token transfers, we need different params // For token transfers, we need different params
let txParams: { from: string; to: string; value: string; data?: string }; let txParams: { from: string; to: string; value: string; data?: string };
if (TOKEN_CONFIG.isERC20(tokenType)) { if (tokenType === 'GREEN_POINTS') {
// ERC-20 transfer: to is contract, value is 0, data is transfer call // ERC-20 transfer: to is contract, value is 0, data is transfer call
const contractAddress = TOKEN_CONFIG.getContractAddress(tokenType); const tokenAmount = greenPointsToRaw(params.value);
const decimals = TOKEN_CONFIG.getDecimals(tokenType);
const tokenAmount = tokenToRaw(params.value, decimals);
const transferData = encodeErc20Transfer(params.to, tokenAmount); const transferData = encodeErc20Transfer(params.to, tokenAmount);
txParams = { txParams = {
from: params.from, from: params.from,
to: contractAddress!, to: GREEN_POINTS_TOKEN.contractAddress,
value: '0x0', value: '0x0',
data: transferData, data: transferData,
}; };
@ -662,7 +511,7 @@ export async function estimateGas(params: { from: string; to: string; value: str
if (data.error) { if (data.error) {
// 如果估算失败,使用默认值 // 如果估算失败,使用默认值
console.warn('Gas 估算失败,使用默认值:', data.error); console.warn('Gas 估算失败,使用默认值:', data.error);
return TOKEN_CONFIG.isERC20(tokenType) ? BigInt(65000) : BigInt(21000); return tokenType === 'GREEN_POINTS' ? BigInt(65000) : BigInt(21000);
} }
return BigInt(data.result); return BigInt(data.result);
} }
@ -694,14 +543,12 @@ export async function prepareTransaction(params: TransactionParams): Promise<Pre
let value: bigint; let value: bigint;
let data: string; let data: string;
if (TOKEN_CONFIG.isERC20(tokenType)) { if (tokenType === 'GREEN_POINTS') {
// ERC-20 token transfer // ERC-20 token transfer
// To address is the contract, value is 0 // To address is the contract, value is 0
// Data is transfer(recipient, amount) encoded // Data is transfer(recipient, amount) encoded
const contractAddress = TOKEN_CONFIG.getContractAddress(tokenType); const tokenAmount = greenPointsToRaw(params.value);
const decimals = TOKEN_CONFIG.getDecimals(tokenType); toAddress = GREEN_POINTS_TOKEN.contractAddress.toLowerCase();
const tokenAmount = tokenToRaw(params.value, decimals);
toAddress = contractAddress!.toLowerCase();
value = BigInt(0); value = BigInt(0);
data = encodeErc20Transfer(params.to, tokenAmount); data = encodeErc20Transfer(params.to, tokenAmount);
} else { } else {

View File

@ -1,6 +1,7 @@
-- ============================================================================ -- ============================================================================
-- auth-service 初始化 migration -- auth-service 初始化 migration
-- 合并自: 0001_init, 0002_add_transactional_idempotency -- 合并自: 20260111000000_init, 20260111083500_allow_nullable_phone_password,
-- 20260112110000_add_nickname_to_synced_legacy_users
-- ============================================================================ -- ============================================================================
-- CreateEnum -- CreateEnum
@ -240,26 +241,3 @@ ALTER TABLE "sms_logs" ADD CONSTRAINT "sms_logs_user_id_fkey" FOREIGN KEY ("user
-- AddForeignKey -- AddForeignKey
ALTER TABLE "login_logs" ADD CONSTRAINT "login_logs_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users"("id") ON DELETE SET NULL ON UPDATE CASCADE; ALTER TABLE "login_logs" ADD CONSTRAINT "login_logs_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users"("id") ON DELETE SET NULL ON UPDATE CASCADE;
-- ============================================================================
-- 事务性幂等消费支持 (从 0002_add_transactional_idempotency 合并)
-- 用于 1.0 -> 2.0 CDC 同步的 100% exactly-once 语义
-- ============================================================================
-- CreateTable
CREATE TABLE "processed_cdc_events" (
"id" BIGSERIAL NOT NULL,
"source_topic" TEXT NOT NULL,
"offset" BIGINT NOT NULL,
"table_name" TEXT NOT NULL,
"operation" TEXT NOT NULL,
"processed_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "processed_cdc_events_pkey" PRIMARY KEY ("id")
);
-- CreateIndex (复合唯一索引保证幂等性)
CREATE UNIQUE INDEX "processed_cdc_events_source_topic_offset_key" ON "processed_cdc_events"("source_topic", "offset");
-- CreateIndex (时间索引用于清理旧数据)
CREATE INDEX "processed_cdc_events_processed_at_idx" ON "processed_cdc_events"("processed_at");

View File

@ -0,0 +1,25 @@
-- ============================================================================
-- 添加事务性幂等消费支持
-- 用于 1.0 -> 2.0 CDC 同步的 100% exactly-once 语义
-- ============================================================================
-- 创建 processed_cdc_events 表(用于 CDC 事件幂等)
-- 唯一键: (source_topic, offset) - Kafka topic 名称 + 消息偏移量
-- 用于保证每个 CDC 事件只处理一次exactly-once 语义)
CREATE TABLE IF NOT EXISTS "processed_cdc_events" (
"id" BIGSERIAL NOT NULL,
"source_topic" VARCHAR(200) NOT NULL, -- Kafka topic 名称(如 cdc.identity.public.user_accounts
"offset" BIGINT NOT NULL, -- Kafka 消息偏移量(在 partition 内唯一)
"table_name" VARCHAR(100) NOT NULL, -- 源表名
"operation" VARCHAR(10) NOT NULL, -- CDC 操作类型: c(create), u(update), d(delete), r(snapshot read)
"processed_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "processed_cdc_events_pkey" PRIMARY KEY ("id")
);
-- 复合唯一索引:(source_topic, offset) 保证幂等性
-- 注意:这不是数据库自增 ID而是 Kafka 消息的唯一标识
CREATE UNIQUE INDEX "processed_cdc_events_source_topic_offset_key" ON "processed_cdc_events"("source_topic", "offset");
-- 时间索引用于清理旧数据
CREATE INDEX "processed_cdc_events_processed_at_idx" ON "processed_cdc_events"("processed_at");

View File

@ -22,7 +22,7 @@ class ChangePasswordDto {
newPassword: string; newPassword: string;
} }
@Controller('auth/password') @Controller('password')
@UseGuards(ThrottlerGuard) @UseGuards(ThrottlerGuard)
export class PasswordController { export class PasswordController {
constructor(private readonly passwordService: PasswordService) {} constructor(private readonly passwordService: PasswordService) {}

View File

@ -21,7 +21,7 @@ class VerifySmsDto {
type: 'REGISTER' | 'LOGIN' | 'RESET_PASSWORD' | 'CHANGE_PHONE'; type: 'REGISTER' | 'LOGIN' | 'RESET_PASSWORD' | 'CHANGE_PHONE';
} }
@Controller('auth/sms') @Controller('sms')
@UseGuards(ThrottlerGuard) @UseGuards(ThrottlerGuard)
export class SmsController { export class SmsController {
constructor(private readonly smsService: SmsService) {} constructor(private readonly smsService: SmsService) {}

View File

@ -7,7 +7,7 @@ import { UserService, UserProfileResult } from '@/application/services';
import { JwtAuthGuard } from '@/shared/guards/jwt-auth.guard'; import { JwtAuthGuard } from '@/shared/guards/jwt-auth.guard';
import { CurrentUser } from '@/shared/decorators/current-user.decorator'; import { CurrentUser } from '@/shared/decorators/current-user.decorator';
@Controller('auth/user') @Controller('user')
@UseGuards(JwtAuthGuard) @UseGuards(JwtAuthGuard)
export class UserController { export class UserController {
constructor(private readonly userService: UserService) {} constructor(private readonly userService: UserService) {}

View File

@ -9,7 +9,7 @@ import { InfrastructureModule } from './infrastructure/infrastructure.module';
// 配置模块 // 配置模块
ConfigModule.forRoot({ ConfigModule.forRoot({
isGlobal: true, isGlobal: true,
envFilePath: ['.env.local', '.env', '../.env'], envFilePath: ['.env.local', '.env'],
}), }),
// 限流模块 // 限流模块

View File

@ -1,78 +0,0 @@
// SPDX-License-Identifier: MIT
pragma solidity 0.8.19;
/**
* @title EnergyUSDT
* @dev Fixed supply ERC-20 token - NO MINTING CAPABILITY
* Total Supply: 10,002,000,000 (100.02 Billion) tokens with 6 decimals (matching USDT)
*
* IMPORTANT: This contract has NO mint function and NO way to increase supply.
* All tokens are minted to the deployer at construction time.
*/
contract EnergyUSDT {
string public constant name = "Energy USDT";
string public constant symbol = "eUSDT";
uint8 public constant decimals = 6;
// Fixed total supply: 100.02 billion tokens (10,002,000,000 * 10^6)
uint256 public constant totalSupply = 10_002_000_000 * 10**6;
mapping(address => uint256) private _balances;
mapping(address => mapping(address => uint256)) private _allowances;
event Transfer(address indexed from, address indexed to, uint256 value);
event Approval(address indexed owner, address indexed spender, uint256 value);
/**
* @dev Constructor - mints entire fixed supply to deployer
* No mint function exists - supply is permanently fixed
*/
constructor() {
_balances[msg.sender] = totalSupply;
emit Transfer(address(0), msg.sender, totalSupply);
}
function balanceOf(address account) public view returns (uint256) {
return _balances[account];
}
function transfer(address to, uint256 amount) public returns (bool) {
require(to != address(0), "Transfer to zero address");
require(_balances[msg.sender] >= amount, "Insufficient balance");
unchecked {
_balances[msg.sender] -= amount;
_balances[to] += amount;
}
emit Transfer(msg.sender, to, amount);
return true;
}
function allowance(address owner, address spender) public view returns (uint256) {
return _allowances[owner][spender];
}
function approve(address spender, uint256 amount) public returns (bool) {
require(spender != address(0), "Approve to zero address");
_allowances[msg.sender][spender] = amount;
emit Approval(msg.sender, spender, amount);
return true;
}
function transferFrom(address from, address to, uint256 amount) public returns (bool) {
require(from != address(0), "Transfer from zero address");
require(to != address(0), "Transfer to zero address");
require(_balances[from] >= amount, "Insufficient balance");
require(_allowances[from][msg.sender] >= amount, "Insufficient allowance");
unchecked {
_balances[from] -= amount;
_balances[to] += amount;
_allowances[from][msg.sender] -= amount;
}
emit Transfer(from, to, amount);
return true;
}
}

View File

@ -1,81 +0,0 @@
# eUSDT (Energy USDT)
## 代币信息
| 属性 | 值 |
|------|-----|
| 名称 | Energy USDT |
| 符号 | eUSDT |
| 精度 | 6 decimals |
| 总供应量 | 10,002,000,000 (100.02亿) |
| 标准 | ERC-20 |
| 部署链 | KAVA Mainnet (Chain ID: 2222) |
## 合约特性
- **固定供应量**100.02亿代币,部署时全部铸造给部署者
- **不可增发**:合约中没有 mint 函数,供应量永久固定
- **不可销毁**:合约层面无销毁功能
- **不可升级**:合约逻辑永久固定
- **标准ERC-20**完全兼容所有主流钱包和DEX
## 部署步骤
### 1. 安装依赖
```bash
cd backend/services/blockchain-service/contracts/eUSDT
npm install
```
### 2. 编译合约
```bash
node compile.mjs
```
编译后会在 `build/` 目录生成:
- `EnergyUSDT.abi` - 合约ABI
- `EnergyUSDT.bin` - 合约字节码
### 3. 部署合约
确保部署账户有足够的 KAVA 支付 gas 费(约 0.02 KAVA
```bash
node deploy.mjs
```
## 合约函数
| 函数 | 说明 |
|------|------|
| `name()` | 返回 "Energy USDT" |
| `symbol()` | 返回 "eUSDT" |
| `decimals()` | 返回 6 |
| `totalSupply()` | 返回 10,002,000,000 * 10^6 |
| `balanceOf(address)` | 查询账户余额 |
| `transfer(address, uint256)` | 转账 |
| `approve(address, uint256)` | 授权额度 |
| `transferFrom(address, address, uint256)` | 代理转账 |
| `allowance(address, address)` | 查询授权额度 |
## 事件
| 事件 | 说明 |
|------|------|
| `Transfer(from, to, value)` | 转账事件 |
| `Approval(owner, spender, value)` | 授权事件 |
## 部署信息
| 网络 | 合约地址 | 区块浏览器 |
|------|---------|-----------|
| KAVA Mainnet | `0x7C3275D808eFbAE90C06C7E3A9AfDdcAa8563931` | https://kavascan.com/address/0x7C3275D808eFbAE90C06C7E3A9AfDdcAa8563931 |
**部署详情:**
- 部署者/代币拥有者:`0x4F7E78d6B7C5FC502Ec7039848690f08c8970F1E`
- 私钥:`0x886ea4cffe76c386fecf3ff321ac9ae913737c46c17bc6ce2413752144668a2a`
- 初始持有量10,002,000,000 eUSDT全部代币
- 交易哈希:`0x5bebaa4a35378438ba5c891972024a1766935d2e01397a33502aa99e956a6b19`
- 部署时间2026-01-19

View File

@ -1,51 +0,0 @@
import solc from 'solc';
import fs from 'fs';
const source = fs.readFileSync('EnergyUSDT.sol', 'utf8');
const input = {
language: 'Solidity',
sources: {
'EnergyUSDT.sol': {
content: source
}
},
settings: {
optimizer: {
enabled: true,
runs: 200
},
evmVersion: 'paris', // Use paris to avoid PUSH0
outputSelection: {
'*': {
'*': ['abi', 'evm.bytecode']
}
}
}
};
const output = JSON.parse(solc.compile(JSON.stringify(input)));
if (output.errors) {
output.errors.forEach(err => {
console.log(err.formattedMessage);
});
// Check for actual errors (not just warnings)
const hasErrors = output.errors.some(err => err.severity === 'error');
if (hasErrors) {
process.exit(1);
}
}
const contract = output.contracts['EnergyUSDT.sol']['EnergyUSDT'];
const bytecode = contract.evm.bytecode.object;
const abi = contract.abi;
fs.mkdirSync('build', { recursive: true });
fs.writeFileSync('build/EnergyUSDT.bin', bytecode);
fs.writeFileSync('build/EnergyUSDT.abi', JSON.stringify(abi, null, 2));
console.log('Compiled successfully!');
console.log('Bytecode length:', bytecode.length);
console.log('ABI functions:', abi.filter(x => x.type === 'function').map(x => x.name).join(', '));

View File

@ -1,86 +0,0 @@
import { ethers } from 'ethers';
import fs from 'fs';
// Same deployer account as dUSDT
const PRIVATE_KEY = '0x886ea4cffe76c386fecf3ff321ac9ae913737c46c17bc6ce2413752144668a2a';
const RPC_URL = 'https://evm.kava.io';
// Contract bytecode
const BYTECODE = '0x' + fs.readFileSync('build/EnergyUSDT.bin', 'utf8');
const ABI = JSON.parse(fs.readFileSync('build/EnergyUSDT.abi', 'utf8'));
async function deploy() {
// Connect to Kava mainnet
const provider = new ethers.JsonRpcProvider(RPC_URL);
const wallet = new ethers.Wallet(PRIVATE_KEY, provider);
console.log('Deployer address:', wallet.address);
// Check balance
const balance = await provider.getBalance(wallet.address);
console.log('Balance:', ethers.formatEther(balance), 'KAVA');
if (parseFloat(ethers.formatEther(balance)) < 0.01) {
console.error('Insufficient KAVA balance for deployment!');
process.exit(1);
}
// Get network info
const network = await provider.getNetwork();
console.log('Chain ID:', network.chainId.toString());
// Create contract factory
const factory = new ethers.ContractFactory(ABI, BYTECODE, wallet);
console.log('Deploying EnergyUSDT (eUSDT) contract...');
// Deploy
const contract = await factory.deploy();
console.log('Transaction hash:', contract.deploymentTransaction().hash);
// Wait for deployment
console.log('Waiting for confirmation...');
await contract.waitForDeployment();
const contractAddress = await contract.getAddress();
console.log('Contract deployed at:', contractAddress);
// Verify deployment
console.log('\nVerifying deployment...');
const name = await contract.name();
const symbol = await contract.symbol();
const decimals = await contract.decimals();
const totalSupply = await contract.totalSupply();
const ownerBalance = await contract.balanceOf(wallet.address);
console.log('Token name:', name);
console.log('Token symbol:', symbol);
console.log('Decimals:', decimals.toString());
console.log('Total supply:', ethers.formatUnits(totalSupply, 6), 'eUSDT');
console.log('Owner balance:', ethers.formatUnits(ownerBalance, 6), 'eUSDT');
console.log('\n=== DEPLOYMENT COMPLETE ===');
console.log('Contract Address:', contractAddress);
console.log('Explorer:', `https://kavascan.com/address/${contractAddress}`);
// Save deployment info
const deploymentInfo = {
network: 'KAVA Mainnet',
chainId: 2222,
contractAddress,
deployer: wallet.address,
transactionHash: contract.deploymentTransaction().hash,
deployedAt: new Date().toISOString(),
token: {
name,
symbol,
decimals: decimals.toString(),
totalSupply: totalSupply.toString()
}
};
fs.writeFileSync('deployment.json', JSON.stringify(deploymentInfo, null, 2));
console.log('\nDeployment info saved to deployment.json');
}
deploy().catch(console.error);

View File

@ -1,14 +0,0 @@
{
"network": "KAVA Mainnet",
"chainId": 2222,
"contractAddress": "0x7C3275D808eFbAE90C06C7E3A9AfDdcAa8563931",
"deployer": "0x4F7E78d6B7C5FC502Ec7039848690f08c8970F1E",
"transactionHash": "0x5bebaa4a35378438ba5c891972024a1766935d2e01397a33502aa99e956a6b19",
"deployedAt": "2026-01-19T13:25:28.071Z",
"token": {
"name": "Energy USDT",
"symbol": "eUSDT",
"decimals": "6",
"totalSupply": "10002000000000000"
}
}

View File

@ -1,222 +0,0 @@
{
"name": "eusdt-contract",
"version": "1.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "eusdt-contract",
"version": "1.0.0",
"dependencies": {
"ethers": "^6.9.0",
"solc": "^0.8.19"
}
},
"node_modules/@adraffy/ens-normalize": {
"version": "1.10.1",
"resolved": "https://registry.npmjs.org/@adraffy/ens-normalize/-/ens-normalize-1.10.1.tgz",
"integrity": "sha512-96Z2IP3mYmF1Xg2cDm8f1gWGf/HUVedQ3FMifV4kG/PQ4yEP51xDtRAEfhVNt5f/uzpNkZHwWQuUcu6D6K+Ekw==",
"license": "MIT"
},
"node_modules/@noble/curves": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/@noble/curves/-/curves-1.2.0.tgz",
"integrity": "sha512-oYclrNgRaM9SsBUBVbb8M6DTV7ZHRTKugureoYEncY5c65HOmRzvSiTE3y5CYaPYJA/GVkrhXEoF0M3Ya9PMnw==",
"license": "MIT",
"dependencies": {
"@noble/hashes": "1.3.2"
},
"funding": {
"url": "https://paulmillr.com/funding/"
}
},
"node_modules/@noble/hashes": {
"version": "1.3.2",
"resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.3.2.tgz",
"integrity": "sha512-MVC8EAQp7MvEcm30KWENFjgR+Mkmf+D189XJTkFIlwohU5hcBbn1ZkKq7KVTi2Hme3PMGF390DaL52beVrIihQ==",
"license": "MIT",
"engines": {
"node": ">= 16"
},
"funding": {
"url": "https://paulmillr.com/funding/"
}
},
"node_modules/@types/node": {
"version": "22.7.5",
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.7.5.tgz",
"integrity": "sha512-jML7s2NAzMWc//QSJ1a3prpk78cOPchGvXJsC3C6R6PSMoooztvRVQEz89gmBTBY1SPMaqo5teB4uNHPdetShQ==",
"license": "MIT",
"dependencies": {
"undici-types": "~6.19.2"
}
},
"node_modules/aes-js": {
"version": "4.0.0-beta.5",
"resolved": "https://registry.npmjs.org/aes-js/-/aes-js-4.0.0-beta.5.tgz",
"integrity": "sha512-G965FqalsNyrPqgEGON7nIx1e/OVENSgiEIzyC63haUMuvNnwIgIjMs52hlTCKhkBny7A2ORNlfY9Zu+jmGk1Q==",
"license": "MIT"
},
"node_modules/command-exists": {
"version": "1.2.9",
"resolved": "https://registry.npmjs.org/command-exists/-/command-exists-1.2.9.tgz",
"integrity": "sha512-LTQ/SGc+s0Xc0Fu5WaKnR0YiygZkm9eKFvyS+fRsU7/ZWFF8ykFM6Pc9aCVf1+xasOOZpO3BAVgVrKvsqKHV7w==",
"license": "MIT"
},
"node_modules/commander": {
"version": "8.3.0",
"resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz",
"integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==",
"license": "MIT",
"engines": {
"node": ">= 12"
}
},
"node_modules/ethers": {
"version": "6.16.0",
"resolved": "https://registry.npmjs.org/ethers/-/ethers-6.16.0.tgz",
"integrity": "sha512-U1wulmetNymijEhpSEQ7Ct/P/Jw9/e7R1j5XIbPRydgV2DjLVMsULDlNksq3RQnFgKoLlZf88ijYtWEXcPa07A==",
"funding": [
{
"type": "individual",
"url": "https://github.com/sponsors/ethers-io/"
},
{
"type": "individual",
"url": "https://www.buymeacoffee.com/ricmoo"
}
],
"license": "MIT",
"dependencies": {
"@adraffy/ens-normalize": "1.10.1",
"@noble/curves": "1.2.0",
"@noble/hashes": "1.3.2",
"@types/node": "22.7.5",
"aes-js": "4.0.0-beta.5",
"tslib": "2.7.0",
"ws": "8.17.1"
},
"engines": {
"node": ">=14.0.0"
}
},
"node_modules/follow-redirects": {
"version": "1.15.11",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz",
"integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==",
"funding": [
{
"type": "individual",
"url": "https://github.com/sponsors/RubenVerborgh"
}
],
"license": "MIT",
"engines": {
"node": ">=4.0"
},
"peerDependenciesMeta": {
"debug": {
"optional": true
}
}
},
"node_modules/js-sha3": {
"version": "0.8.0",
"resolved": "https://registry.npmjs.org/js-sha3/-/js-sha3-0.8.0.tgz",
"integrity": "sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q==",
"license": "MIT"
},
"node_modules/memorystream": {
"version": "0.3.1",
"resolved": "https://registry.npmjs.org/memorystream/-/memorystream-0.3.1.tgz",
"integrity": "sha512-S3UwM3yj5mtUSEfP41UZmt/0SCoVYUcU1rkXv+BQ5Ig8ndL4sPoJNBUJERafdPb5jjHJGuMgytgKvKIf58XNBw==",
"engines": {
"node": ">= 0.10.0"
}
},
"node_modules/os-tmpdir": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz",
"integrity": "sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==",
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/semver": {
"version": "5.7.2",
"resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz",
"integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==",
"license": "ISC",
"bin": {
"semver": "bin/semver"
}
},
"node_modules/solc": {
"version": "0.8.19",
"resolved": "https://registry.npmjs.org/solc/-/solc-0.8.19.tgz",
"integrity": "sha512-yqurS3wzC4LdEvmMobODXqprV4MYJcVtinuxgrp61ac8K2zz40vXA0eSAskSHPgv8dQo7Nux39i3QBsHx4pqyA==",
"license": "MIT",
"dependencies": {
"command-exists": "^1.2.8",
"commander": "^8.1.0",
"follow-redirects": "^1.12.1",
"js-sha3": "0.8.0",
"memorystream": "^0.3.1",
"semver": "^5.5.0",
"tmp": "0.0.33"
},
"bin": {
"solcjs": "solc.js"
},
"engines": {
"node": ">=10.0.0"
}
},
"node_modules/tmp": {
"version": "0.0.33",
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz",
"integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==",
"license": "MIT",
"dependencies": {
"os-tmpdir": "~1.0.2"
},
"engines": {
"node": ">=0.6.0"
}
},
"node_modules/tslib": {
"version": "2.7.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.7.0.tgz",
"integrity": "sha512-gLXCKdN1/j47AiHiOkJN69hJmcbGTHI0ImLmbYLHykhgeN0jVGola9yVjFgzCUklsZQMW55o+dW7IXv3RCXDzA==",
"license": "0BSD"
},
"node_modules/undici-types": {
"version": "6.19.8",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz",
"integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==",
"license": "MIT"
},
"node_modules/ws": {
"version": "8.17.1",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz",
"integrity": "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==",
"license": "MIT",
"engines": {
"node": ">=10.0.0"
},
"peerDependencies": {
"bufferutil": "^4.0.1",
"utf-8-validate": ">=5.0.2"
},
"peerDependenciesMeta": {
"bufferutil": {
"optional": true
},
"utf-8-validate": {
"optional": true
}
}
}
}
}

View File

@ -1,14 +0,0 @@
{
"name": "eusdt-contract",
"version": "1.0.0",
"type": "module",
"description": "Energy USDT (eUSDT) ERC-20 Token Contract",
"scripts": {
"compile": "node compile.mjs",
"deploy": "node deploy.mjs"
},
"dependencies": {
"ethers": "^6.9.0",
"solc": "^0.8.19"
}
}

View File

@ -1,78 +0,0 @@
// SPDX-License-Identifier: MIT
pragma solidity 0.8.19;
/**
* @title FutureUSDT
* @dev Fixed supply ERC-20 token - NO MINTING CAPABILITY
* Total Supply: 1,000,000,000,000 (1 Trillion) tokens with 6 decimals (matching USDT)
*
* IMPORTANT: This contract has NO mint function and NO way to increase supply.
* All tokens are minted to the deployer at construction time.
*/
contract FutureUSDT {
string public constant name = "Future USDT";
string public constant symbol = "fUSDT";
uint8 public constant decimals = 6;
// Fixed total supply: 1 trillion tokens (1,000,000,000,000 * 10^6)
uint256 public constant totalSupply = 1_000_000_000_000 * 10**6;
mapping(address => uint256) private _balances;
mapping(address => mapping(address => uint256)) private _allowances;
event Transfer(address indexed from, address indexed to, uint256 value);
event Approval(address indexed owner, address indexed spender, uint256 value);
/**
* @dev Constructor - mints entire fixed supply to deployer
* No mint function exists - supply is permanently fixed
*/
constructor() {
_balances[msg.sender] = totalSupply;
emit Transfer(address(0), msg.sender, totalSupply);
}
function balanceOf(address account) public view returns (uint256) {
return _balances[account];
}
function transfer(address to, uint256 amount) public returns (bool) {
require(to != address(0), "Transfer to zero address");
require(_balances[msg.sender] >= amount, "Insufficient balance");
unchecked {
_balances[msg.sender] -= amount;
_balances[to] += amount;
}
emit Transfer(msg.sender, to, amount);
return true;
}
function allowance(address owner, address spender) public view returns (uint256) {
return _allowances[owner][spender];
}
function approve(address spender, uint256 amount) public returns (bool) {
require(spender != address(0), "Approve to zero address");
_allowances[msg.sender][spender] = amount;
emit Approval(msg.sender, spender, amount);
return true;
}
function transferFrom(address from, address to, uint256 amount) public returns (bool) {
require(from != address(0), "Transfer from zero address");
require(to != address(0), "Transfer to zero address");
require(_balances[from] >= amount, "Insufficient balance");
require(_allowances[from][msg.sender] >= amount, "Insufficient allowance");
unchecked {
_balances[from] -= amount;
_balances[to] += amount;
_allowances[from][msg.sender] -= amount;
}
emit Transfer(from, to, amount);
return true;
}
}

View File

@ -1,81 +0,0 @@
# fUSDT (Future USDT)
## 代币信息
| 属性 | 值 |
|------|-----|
| 名称 | Future USDT |
| 符号 | fUSDT |
| 精度 | 6 decimals |
| 总供应量 | 1,000,000,000,000 (1万亿) |
| 标准 | ERC-20 |
| 部署链 | KAVA Mainnet (Chain ID: 2222) |
## 合约特性
- **固定供应量**1万亿代币部署时全部铸造给部署者
- **不可增发**:合约中没有 mint 函数,供应量永久固定
- **不可销毁**:合约层面无销毁功能
- **不可升级**:合约逻辑永久固定
- **标准ERC-20**完全兼容所有主流钱包和DEX
## 部署步骤
### 1. 安装依赖
```bash
cd backend/services/blockchain-service/contracts/fUSDT
npm install
```
### 2. 编译合约
```bash
node compile.mjs
```
编译后会在 `build/` 目录生成:
- `FutureUSDT.abi` - 合约ABI
- `FutureUSDT.bin` - 合约字节码
### 3. 部署合约
确保部署账户有足够的 KAVA 支付 gas 费(约 0.02 KAVA
```bash
node deploy.mjs
```
## 合约函数
| 函数 | 说明 |
|------|------|
| `name()` | 返回 "Future USDT" |
| `symbol()` | 返回 "fUSDT" |
| `decimals()` | 返回 6 |
| `totalSupply()` | 返回 1,000,000,000,000 * 10^6 |
| `balanceOf(address)` | 查询账户余额 |
| `transfer(address, uint256)` | 转账 |
| `approve(address, uint256)` | 授权额度 |
| `transferFrom(address, address, uint256)` | 代理转账 |
| `allowance(address, address)` | 查询授权额度 |
## 事件
| 事件 | 说明 |
|------|------|
| `Transfer(from, to, value)` | 转账事件 |
| `Approval(owner, spender, value)` | 授权事件 |
## 部署信息
| 网络 | 合约地址 | 区块浏览器 |
|------|---------|-----------|
| KAVA Mainnet | `0x14dc4f7d3E4197438d058C3D156dd9826A161134` | https://kavascan.com/address/0x14dc4f7d3E4197438d058C3D156dd9826A161134 |
**部署详情:**
- 部署者/代币拥有者:`0x4F7E78d6B7C5FC502Ec7039848690f08c8970F1E`
- 私钥:`0x886ea4cffe76c386fecf3ff321ac9ae913737c46c17bc6ce2413752144668a2a`
- 初始持有量1,000,000,000,000 fUSDT全部代币
- 交易哈希:`0x071f535971bc3a134dd26c182b6f05c53f0c3783e91fe6ef471d6c914e4cdb06`
- 部署时间2026-01-19

View File

@ -1,51 +0,0 @@
import solc from 'solc';
import fs from 'fs';
const source = fs.readFileSync('FutureUSDT.sol', 'utf8');
const input = {
language: 'Solidity',
sources: {
'FutureUSDT.sol': {
content: source
}
},
settings: {
optimizer: {
enabled: true,
runs: 200
},
evmVersion: 'paris', // Use paris to avoid PUSH0
outputSelection: {
'*': {
'*': ['abi', 'evm.bytecode']
}
}
}
};
const output = JSON.parse(solc.compile(JSON.stringify(input)));
if (output.errors) {
output.errors.forEach(err => {
console.log(err.formattedMessage);
});
// Check for actual errors (not just warnings)
const hasErrors = output.errors.some(err => err.severity === 'error');
if (hasErrors) {
process.exit(1);
}
}
const contract = output.contracts['FutureUSDT.sol']['FutureUSDT'];
const bytecode = contract.evm.bytecode.object;
const abi = contract.abi;
fs.mkdirSync('build', { recursive: true });
fs.writeFileSync('build/FutureUSDT.bin', bytecode);
fs.writeFileSync('build/FutureUSDT.abi', JSON.stringify(abi, null, 2));
console.log('Compiled successfully!');
console.log('Bytecode length:', bytecode.length);
console.log('ABI functions:', abi.filter(x => x.type === 'function').map(x => x.name).join(', '));

View File

@ -1,86 +0,0 @@
import { ethers } from 'ethers';
import fs from 'fs';
// Same deployer account as dUSDT
const PRIVATE_KEY = '0x886ea4cffe76c386fecf3ff321ac9ae913737c46c17bc6ce2413752144668a2a';
const RPC_URL = 'https://evm.kava.io';
// Contract bytecode
const BYTECODE = '0x' + fs.readFileSync('build/FutureUSDT.bin', 'utf8');
const ABI = JSON.parse(fs.readFileSync('build/FutureUSDT.abi', 'utf8'));
async function deploy() {
// Connect to Kava mainnet
const provider = new ethers.JsonRpcProvider(RPC_URL);
const wallet = new ethers.Wallet(PRIVATE_KEY, provider);
console.log('Deployer address:', wallet.address);
// Check balance
const balance = await provider.getBalance(wallet.address);
console.log('Balance:', ethers.formatEther(balance), 'KAVA');
if (parseFloat(ethers.formatEther(balance)) < 0.01) {
console.error('Insufficient KAVA balance for deployment!');
process.exit(1);
}
// Get network info
const network = await provider.getNetwork();
console.log('Chain ID:', network.chainId.toString());
// Create contract factory
const factory = new ethers.ContractFactory(ABI, BYTECODE, wallet);
console.log('Deploying FutureUSDT (fUSDT) contract...');
// Deploy
const contract = await factory.deploy();
console.log('Transaction hash:', contract.deploymentTransaction().hash);
// Wait for deployment
console.log('Waiting for confirmation...');
await contract.waitForDeployment();
const contractAddress = await contract.getAddress();
console.log('Contract deployed at:', contractAddress);
// Verify deployment
console.log('\nVerifying deployment...');
const name = await contract.name();
const symbol = await contract.symbol();
const decimals = await contract.decimals();
const totalSupply = await contract.totalSupply();
const ownerBalance = await contract.balanceOf(wallet.address);
console.log('Token name:', name);
console.log('Token symbol:', symbol);
console.log('Decimals:', decimals.toString());
console.log('Total supply:', ethers.formatUnits(totalSupply, 6), 'fUSDT');
console.log('Owner balance:', ethers.formatUnits(ownerBalance, 6), 'fUSDT');
console.log('\n=== DEPLOYMENT COMPLETE ===');
console.log('Contract Address:', contractAddress);
console.log('Explorer:', `https://kavascan.com/address/${contractAddress}`);
// Save deployment info
const deploymentInfo = {
network: 'KAVA Mainnet',
chainId: 2222,
contractAddress,
deployer: wallet.address,
transactionHash: contract.deploymentTransaction().hash,
deployedAt: new Date().toISOString(),
token: {
name,
symbol,
decimals: decimals.toString(),
totalSupply: totalSupply.toString()
}
};
fs.writeFileSync('deployment.json', JSON.stringify(deploymentInfo, null, 2));
console.log('\nDeployment info saved to deployment.json');
}
deploy().catch(console.error);

View File

@ -1,14 +0,0 @@
{
"network": "KAVA Mainnet",
"chainId": 2222,
"contractAddress": "0x14dc4f7d3E4197438d058C3D156dd9826A161134",
"deployer": "0x4F7E78d6B7C5FC502Ec7039848690f08c8970F1E",
"transactionHash": "0x071f535971bc3a134dd26c182b6f05c53f0c3783e91fe6ef471d6c914e4cdb06",
"deployedAt": "2026-01-19T13:26:05.111Z",
"token": {
"name": "Future USDT",
"symbol": "fUSDT",
"decimals": "6",
"totalSupply": "1000000000000000000"
}
}

View File

@ -1,222 +0,0 @@
{
"name": "fusdt-contract",
"version": "1.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "fusdt-contract",
"version": "1.0.0",
"dependencies": {
"ethers": "^6.9.0",
"solc": "^0.8.19"
}
},
"node_modules/@adraffy/ens-normalize": {
"version": "1.10.1",
"resolved": "https://registry.npmjs.org/@adraffy/ens-normalize/-/ens-normalize-1.10.1.tgz",
"integrity": "sha512-96Z2IP3mYmF1Xg2cDm8f1gWGf/HUVedQ3FMifV4kG/PQ4yEP51xDtRAEfhVNt5f/uzpNkZHwWQuUcu6D6K+Ekw==",
"license": "MIT"
},
"node_modules/@noble/curves": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/@noble/curves/-/curves-1.2.0.tgz",
"integrity": "sha512-oYclrNgRaM9SsBUBVbb8M6DTV7ZHRTKugureoYEncY5c65HOmRzvSiTE3y5CYaPYJA/GVkrhXEoF0M3Ya9PMnw==",
"license": "MIT",
"dependencies": {
"@noble/hashes": "1.3.2"
},
"funding": {
"url": "https://paulmillr.com/funding/"
}
},
"node_modules/@noble/hashes": {
"version": "1.3.2",
"resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.3.2.tgz",
"integrity": "sha512-MVC8EAQp7MvEcm30KWENFjgR+Mkmf+D189XJTkFIlwohU5hcBbn1ZkKq7KVTi2Hme3PMGF390DaL52beVrIihQ==",
"license": "MIT",
"engines": {
"node": ">= 16"
},
"funding": {
"url": "https://paulmillr.com/funding/"
}
},
"node_modules/@types/node": {
"version": "22.7.5",
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.7.5.tgz",
"integrity": "sha512-jML7s2NAzMWc//QSJ1a3prpk78cOPchGvXJsC3C6R6PSMoooztvRVQEz89gmBTBY1SPMaqo5teB4uNHPdetShQ==",
"license": "MIT",
"dependencies": {
"undici-types": "~6.19.2"
}
},
"node_modules/aes-js": {
"version": "4.0.0-beta.5",
"resolved": "https://registry.npmjs.org/aes-js/-/aes-js-4.0.0-beta.5.tgz",
"integrity": "sha512-G965FqalsNyrPqgEGON7nIx1e/OVENSgiEIzyC63haUMuvNnwIgIjMs52hlTCKhkBny7A2ORNlfY9Zu+jmGk1Q==",
"license": "MIT"
},
"node_modules/command-exists": {
"version": "1.2.9",
"resolved": "https://registry.npmjs.org/command-exists/-/command-exists-1.2.9.tgz",
"integrity": "sha512-LTQ/SGc+s0Xc0Fu5WaKnR0YiygZkm9eKFvyS+fRsU7/ZWFF8ykFM6Pc9aCVf1+xasOOZpO3BAVgVrKvsqKHV7w==",
"license": "MIT"
},
"node_modules/commander": {
"version": "8.3.0",
"resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz",
"integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==",
"license": "MIT",
"engines": {
"node": ">= 12"
}
},
"node_modules/ethers": {
"version": "6.16.0",
"resolved": "https://registry.npmjs.org/ethers/-/ethers-6.16.0.tgz",
"integrity": "sha512-U1wulmetNymijEhpSEQ7Ct/P/Jw9/e7R1j5XIbPRydgV2DjLVMsULDlNksq3RQnFgKoLlZf88ijYtWEXcPa07A==",
"funding": [
{
"type": "individual",
"url": "https://github.com/sponsors/ethers-io/"
},
{
"type": "individual",
"url": "https://www.buymeacoffee.com/ricmoo"
}
],
"license": "MIT",
"dependencies": {
"@adraffy/ens-normalize": "1.10.1",
"@noble/curves": "1.2.0",
"@noble/hashes": "1.3.2",
"@types/node": "22.7.5",
"aes-js": "4.0.0-beta.5",
"tslib": "2.7.0",
"ws": "8.17.1"
},
"engines": {
"node": ">=14.0.0"
}
},
"node_modules/follow-redirects": {
"version": "1.15.11",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz",
"integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==",
"funding": [
{
"type": "individual",
"url": "https://github.com/sponsors/RubenVerborgh"
}
],
"license": "MIT",
"engines": {
"node": ">=4.0"
},
"peerDependenciesMeta": {
"debug": {
"optional": true
}
}
},
"node_modules/js-sha3": {
"version": "0.8.0",
"resolved": "https://registry.npmjs.org/js-sha3/-/js-sha3-0.8.0.tgz",
"integrity": "sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q==",
"license": "MIT"
},
"node_modules/memorystream": {
"version": "0.3.1",
"resolved": "https://registry.npmjs.org/memorystream/-/memorystream-0.3.1.tgz",
"integrity": "sha512-S3UwM3yj5mtUSEfP41UZmt/0SCoVYUcU1rkXv+BQ5Ig8ndL4sPoJNBUJERafdPb5jjHJGuMgytgKvKIf58XNBw==",
"engines": {
"node": ">= 0.10.0"
}
},
"node_modules/os-tmpdir": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz",
"integrity": "sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==",
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/semver": {
"version": "5.7.2",
"resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz",
"integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==",
"license": "ISC",
"bin": {
"semver": "bin/semver"
}
},
"node_modules/solc": {
"version": "0.8.19",
"resolved": "https://registry.npmjs.org/solc/-/solc-0.8.19.tgz",
"integrity": "sha512-yqurS3wzC4LdEvmMobODXqprV4MYJcVtinuxgrp61ac8K2zz40vXA0eSAskSHPgv8dQo7Nux39i3QBsHx4pqyA==",
"license": "MIT",
"dependencies": {
"command-exists": "^1.2.8",
"commander": "^8.1.0",
"follow-redirects": "^1.12.1",
"js-sha3": "0.8.0",
"memorystream": "^0.3.1",
"semver": "^5.5.0",
"tmp": "0.0.33"
},
"bin": {
"solcjs": "solc.js"
},
"engines": {
"node": ">=10.0.0"
}
},
"node_modules/tmp": {
"version": "0.0.33",
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz",
"integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==",
"license": "MIT",
"dependencies": {
"os-tmpdir": "~1.0.2"
},
"engines": {
"node": ">=0.6.0"
}
},
"node_modules/tslib": {
"version": "2.7.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.7.0.tgz",
"integrity": "sha512-gLXCKdN1/j47AiHiOkJN69hJmcbGTHI0ImLmbYLHykhgeN0jVGola9yVjFgzCUklsZQMW55o+dW7IXv3RCXDzA==",
"license": "0BSD"
},
"node_modules/undici-types": {
"version": "6.19.8",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz",
"integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==",
"license": "MIT"
},
"node_modules/ws": {
"version": "8.17.1",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz",
"integrity": "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==",
"license": "MIT",
"engines": {
"node": ">=10.0.0"
},
"peerDependencies": {
"bufferutil": "^4.0.1",
"utf-8-validate": ">=5.0.2"
},
"peerDependenciesMeta": {
"bufferutil": {
"optional": true
},
"utf-8-validate": {
"optional": true
}
}
}
}
}

View File

@ -1,14 +0,0 @@
{
"name": "fusdt-contract",
"version": "1.0.0",
"type": "module",
"description": "Future USDT (fUSDT) ERC-20 Token Contract",
"scripts": {
"compile": "node compile.mjs",
"deploy": "node deploy.mjs"
},
"dependencies": {
"ethers": "^6.9.0",
"solc": "^0.8.19"
}
}

View File

@ -1,6 +1,7 @@
-- ============================================================================ -- ============================================================================
-- contribution-service 初始化 migration -- contribution-service 初始化 migration
-- 合并自: 0001_init, 0002_add_transactional_idempotency, 20250120000001_add_region_to_system_accounts -- 合并自: 20260111000000_init, 20260111100000_add_referral_user_ids,
-- 20260112020000_fix_status_varchar_length, 20260112200000_add_adoption_province_city
-- ============================================================================ -- ============================================================================
-- ============================================ -- ============================================
@ -227,9 +228,8 @@ CREATE INDEX "unallocated_contributions_status_idx" ON "unallocated_contribution
CREATE TABLE "system_accounts" ( CREATE TABLE "system_accounts" (
"id" BIGSERIAL NOT NULL, "id" BIGSERIAL NOT NULL,
"account_type" TEXT NOT NULL, "account_type" VARCHAR(20) NOT NULL,
"region_code" TEXT, "name" VARCHAR(100) NOT NULL,
"name" TEXT NOT NULL,
"contribution_balance" DECIMAL(30,10) NOT NULL DEFAULT 0, "contribution_balance" DECIMAL(30,10) NOT NULL DEFAULT 0,
"contribution_never_expires" BOOLEAN NOT NULL DEFAULT false, "contribution_never_expires" BOOLEAN NOT NULL DEFAULT false,
"version" INTEGER NOT NULL DEFAULT 1, "version" INTEGER NOT NULL DEFAULT 1,
@ -239,26 +239,18 @@ CREATE TABLE "system_accounts" (
CONSTRAINT "system_accounts_pkey" PRIMARY KEY ("id") CONSTRAINT "system_accounts_pkey" PRIMARY KEY ("id")
); );
CREATE UNIQUE INDEX "system_accounts_account_type_region_code_key" ON "system_accounts"("account_type", "region_code"); CREATE UNIQUE INDEX "system_accounts_account_type_key" ON "system_accounts"("account_type");
CREATE INDEX "system_accounts_account_type_idx" ON "system_accounts"("account_type");
CREATE INDEX "system_accounts_region_code_idx" ON "system_accounts"("region_code");
CREATE TABLE "system_contribution_records" ( CREATE TABLE "system_contribution_records" (
"id" BIGSERIAL NOT NULL, "id" BIGSERIAL NOT NULL,
"system_account_id" BIGINT NOT NULL, "system_account_id" BIGINT NOT NULL,
"source_adoption_id" BIGINT NOT NULL, "source_adoption_id" BIGINT NOT NULL,
"source_account_sequence" VARCHAR(20) NOT NULL, "source_account_sequence" VARCHAR(20) NOT NULL,
-- 来源类型: FIXED_RATE(固定比例) / LEVEL_OVERFLOW(层级溢出) / LEVEL_NO_ANCESTOR(无上线) / BONUS_TIER_1/2/3(团队奖励未解锁)
"source_type" VARCHAR(30) NOT NULL,
-- 层级深度1-15仅对 LEVEL_OVERFLOW 和 LEVEL_NO_ANCESTOR 类型有效
"level_depth" INTEGER,
"distribution_rate" DECIMAL(10,6) NOT NULL, "distribution_rate" DECIMAL(10,6) NOT NULL,
"amount" DECIMAL(30,10) NOT NULL, "amount" DECIMAL(30,10) NOT NULL,
"effective_date" DATE NOT NULL, "effective_date" DATE NOT NULL,
"expire_date" DATE, "expire_date" DATE,
"is_expired" BOOLEAN NOT NULL DEFAULT false, "is_expired" BOOLEAN NOT NULL DEFAULT false,
-- 软删除时间戳
"deleted_at" TIMESTAMP(3),
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, "created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "system_contribution_records_pkey" PRIMARY KEY ("id") CONSTRAINT "system_contribution_records_pkey" PRIMARY KEY ("id")
@ -266,8 +258,6 @@ CREATE TABLE "system_contribution_records" (
CREATE INDEX "system_contribution_records_system_account_id_idx" ON "system_contribution_records"("system_account_id"); CREATE INDEX "system_contribution_records_system_account_id_idx" ON "system_contribution_records"("system_account_id");
CREATE INDEX "system_contribution_records_source_adoption_id_idx" ON "system_contribution_records"("source_adoption_id"); CREATE INDEX "system_contribution_records_source_adoption_id_idx" ON "system_contribution_records"("source_adoption_id");
CREATE INDEX "system_contribution_records_source_type_idx" ON "system_contribution_records"("source_type");
CREATE INDEX "system_contribution_records_deleted_at_idx" ON "system_contribution_records"("deleted_at");
ALTER TABLE "system_contribution_records" ADD CONSTRAINT "system_contribution_records_system_account_id_fkey" FOREIGN KEY ("system_account_id") REFERENCES "system_accounts"("id") ON DELETE RESTRICT ON UPDATE CASCADE; ALTER TABLE "system_contribution_records" ADD CONSTRAINT "system_contribution_records_system_account_id_fkey" FOREIGN KEY ("system_account_id") REFERENCES "system_accounts"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
@ -337,36 +327,20 @@ CREATE TABLE "cdc_sync_progress" (
CREATE UNIQUE INDEX "cdc_sync_progress_source_topic_key" ON "cdc_sync_progress"("source_topic"); CREATE UNIQUE INDEX "cdc_sync_progress_source_topic_key" ON "cdc_sync_progress"("source_topic");
-- 2.0 服务间 Outbox 事件幂等表
CREATE TABLE "processed_events" ( CREATE TABLE "processed_events" (
"id" BIGSERIAL NOT NULL, "id" BIGSERIAL NOT NULL,
"event_id" VARCHAR(100) NOT NULL, "event_id" VARCHAR(100) NOT NULL,
"event_type" VARCHAR(50) NOT NULL, "event_type" VARCHAR(50) NOT NULL,
"source_service" VARCHAR(100) NOT NULL, "source_service" VARCHAR(50),
"processed_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, "processed_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "processed_events_pkey" PRIMARY KEY ("id") CONSTRAINT "processed_events_pkey" PRIMARY KEY ("id")
); );
CREATE UNIQUE INDEX "processed_events_source_service_event_id_key" ON "processed_events"("source_service", "event_id"); CREATE UNIQUE INDEX "processed_events_event_id_key" ON "processed_events"("event_id");
CREATE INDEX "processed_events_event_type_idx" ON "processed_events"("event_type"); CREATE INDEX "processed_events_event_type_idx" ON "processed_events"("event_type");
CREATE INDEX "processed_events_processed_at_idx" ON "processed_events"("processed_at"); CREATE INDEX "processed_events_processed_at_idx" ON "processed_events"("processed_at");
-- 1.0 CDC 事件幂等表
CREATE TABLE "processed_cdc_events" (
"id" BIGSERIAL NOT NULL,
"source_topic" VARCHAR(200) NOT NULL,
"offset" BIGINT NOT NULL,
"table_name" VARCHAR(100) NOT NULL,
"operation" VARCHAR(10) NOT NULL,
"processed_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "processed_cdc_events_pkey" PRIMARY KEY ("id")
);
CREATE UNIQUE INDEX "processed_cdc_events_source_topic_offset_key" ON "processed_cdc_events"("source_topic", "offset");
CREATE INDEX "processed_cdc_events_processed_at_idx" ON "processed_cdc_events"("processed_at");
-- ============================================ -- ============================================
-- 9. 配置表 -- 9. 配置表
-- ============================================ -- ============================================

View File

@ -0,0 +1,45 @@
-- ============================================================================
-- 添加事务性幂等消费支持
-- 用于 1.0 -> 2.0 CDC 同步的 100% exactly-once 语义
-- ============================================================================
-- 1. 创建 processed_cdc_events 表(用于 CDC 事件幂等)
-- 唯一键: (source_topic, offset) - Kafka topic 名称 + 消息偏移量
-- 用于保证每个 CDC 事件只处理一次exactly-once 语义)
CREATE TABLE IF NOT EXISTS "processed_cdc_events" (
"id" BIGSERIAL NOT NULL,
"source_topic" VARCHAR(200) NOT NULL, -- Kafka topic 名称(如 cdc.identity.public.user_accounts
"offset" BIGINT NOT NULL, -- Kafka 消息偏移量(在 partition 内唯一)
"table_name" VARCHAR(100) NOT NULL, -- 源表名
"operation" VARCHAR(10) NOT NULL, -- CDC 操作类型: c(create), u(update), d(delete), r(snapshot read)
"processed_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "processed_cdc_events_pkey" PRIMARY KEY ("id")
);
-- 复合唯一索引:(source_topic, offset) 保证幂等性
-- 注意:这不是数据库自增 ID而是 Kafka 消息的唯一标识
CREATE UNIQUE INDEX "processed_cdc_events_source_topic_offset_key" ON "processed_cdc_events"("source_topic", "offset");
-- 时间索引用于清理旧数据
CREATE INDEX "processed_cdc_events_processed_at_idx" ON "processed_cdc_events"("processed_at");
-- 2. 修复 processed_events 表(用于 2.0 服务间 Outbox 事件幂等)
-- 唯一键: (source_service, event_id) - 服务名 + outbox 表的 ID
-- 不同服务的 outbox ID 可能相同,所以需要组合服务名作为复合唯一键
-- 2.1 修改 source_service 列:扩展长度 50->100且设为 NOT NULL
-- 先为已有 NULL 值设置默认值
UPDATE "processed_events" SET "source_service" = 'unknown' WHERE "source_service" IS NULL;
-- 修改列类型和约束
ALTER TABLE "processed_events"
ALTER COLUMN "source_service" SET NOT NULL,
ALTER COLUMN "source_service" TYPE VARCHAR(100);
-- 2.2 删除旧的单字段唯一索引
DROP INDEX IF EXISTS "processed_events_event_id_key";
-- 2.3 创建新的复合唯一索引
-- 索引名使用蛇形命名以与列名保持一致
CREATE UNIQUE INDEX IF NOT EXISTS "processed_events_source_service_event_id_key" ON "processed_events"("source_service", "event_id");

View File

@ -299,10 +299,9 @@ model UnallocatedContribution {
// 系统账户(运营/省/市/总部) // 系统账户(运营/省/市/总部)
model SystemAccount { model SystemAccount {
id BigInt @id @default(autoincrement()) id BigInt @id @default(autoincrement())
accountType String @map("account_type") // OPERATION / PROVINCE / CITY / HEADQUARTERS accountType String @unique @map("account_type") @db.VarChar(20) // OPERATION / PROVINCE / CITY / HEADQUARTERS
regionCode String? @map("region_code") // 省/市代码,如 440000, 440100 name String @db.VarChar(100)
name String
contributionBalance Decimal @default(0) @map("contribution_balance") @db.Decimal(30, 10) contributionBalance Decimal @default(0) @map("contribution_balance") @db.Decimal(30, 10)
contributionNeverExpires Boolean @default(false) @map("contribution_never_expires") contributionNeverExpires Boolean @default(false) @map("contribution_never_expires")
@ -314,9 +313,6 @@ model SystemAccount {
records SystemContributionRecord[] records SystemContributionRecord[]
@@unique([accountType, regionCode])
@@index([accountType])
@@index([regionCode])
@@map("system_accounts") @@map("system_accounts")
} }
@ -327,11 +323,6 @@ model SystemContributionRecord {
sourceAdoptionId BigInt @map("source_adoption_id") sourceAdoptionId BigInt @map("source_adoption_id")
sourceAccountSequence String @map("source_account_sequence") @db.VarChar(20) sourceAccountSequence String @map("source_account_sequence") @db.VarChar(20)
// 来源类型FIXED_RATE(固定比例分配) / LEVEL_OVERFLOW(层级溢出) / LEVEL_NO_ANCESTOR(无上线) / BONUS_TIER_1/2/3(团队奖励未解锁)
sourceType String @map("source_type") @db.VarChar(30)
// 层级深度:对于 LEVEL_OVERFLOW 和 LEVEL_NO_ANCESTOR 类型表示第几级1-15
levelDepth Int? @map("level_depth")
distributionRate Decimal @map("distribution_rate") @db.Decimal(10, 6) distributionRate Decimal @map("distribution_rate") @db.Decimal(10, 6)
amount Decimal @map("amount") @db.Decimal(30, 10) amount Decimal @map("amount") @db.Decimal(30, 10)
@ -339,15 +330,12 @@ model SystemContributionRecord {
expireDate DateTime? @map("expire_date") @db.Date expireDate DateTime? @map("expire_date") @db.Date
isExpired Boolean @default(false) @map("is_expired") isExpired Boolean @default(false) @map("is_expired")
createdAt DateTime @default(now()) @map("created_at") createdAt DateTime @default(now()) @map("created_at")
deletedAt DateTime? @map("deleted_at") // 软删除标记
systemAccount SystemAccount @relation(fields: [systemAccountId], references: [id]) systemAccount SystemAccount @relation(fields: [systemAccountId], references: [id])
@@index([systemAccountId]) @@index([systemAccountId])
@@index([sourceAdoptionId]) @@index([sourceAdoptionId])
@@index([deletedAt])
@@index([sourceType])
@@map("system_contribution_records") @@map("system_contribution_records")
} }

View File

@ -10,8 +10,6 @@ import {
AdoptionSyncedEvent, AdoptionSyncedEvent,
ContributionRecordSyncedEvent, ContributionRecordSyncedEvent,
NetworkProgressUpdatedEvent, NetworkProgressUpdatedEvent,
SystemAccountSyncedEvent,
UnallocatedContributionSyncedEvent,
} from '../../domain/events'; } from '../../domain/events';
import { Public } from '../../shared/guards/jwt-auth.guard'; import { Public } from '../../shared/guards/jwt-auth.guard';
@ -422,190 +420,4 @@ export class AdminController {
}; };
} }
} }
@Post('system-accounts/publish-all')
@Public()
@ApiOperation({ summary: '发布所有系统账户算力事件到 outbox用于同步到 mining-service' })
async publishAllSystemAccounts(): Promise<{
success: boolean;
publishedCount: number;
message: string;
}> {
try {
const systemAccounts = await this.prisma.systemAccount.findMany();
await this.unitOfWork.executeInTransaction(async () => {
const events = systemAccounts.map((account) => {
const event = new SystemAccountSyncedEvent(
account.accountType,
account.regionCode,
account.name,
account.contributionBalance.toString(),
account.createdAt,
);
return {
aggregateType: SystemAccountSyncedEvent.AGGREGATE_TYPE,
aggregateId: `${account.accountType}:${account.regionCode || 'null'}`,
eventType: SystemAccountSyncedEvent.EVENT_TYPE,
payload: event.toPayload(),
};
});
await this.outboxRepository.saveMany(events);
});
this.logger.log(`Published ${systemAccounts.length} system account events`);
return {
success: true,
publishedCount: systemAccounts.length,
message: `Published ${systemAccounts.length} system account events`,
};
} catch (error) {
this.logger.error('Failed to publish system accounts', error);
return {
success: false,
publishedCount: 0,
message: `Failed: ${error.message}`,
};
}
}
@Get('system-accounts')
@Public()
@ApiOperation({ summary: '获取所有系统账户算力' })
async getSystemAccounts() {
const systemAccounts = await this.prisma.systemAccount.findMany();
return {
accounts: systemAccounts.map((a) => ({
accountType: a.accountType,
name: a.name,
contributionBalance: a.contributionBalance.toString(),
createdAt: a.createdAt,
updatedAt: a.updatedAt,
})),
total: systemAccounts.length,
};
}
@Get('unallocated-contributions')
@Public()
@ApiOperation({ summary: '获取所有未分配算力列表,供 mining-service 定时同步' })
async getUnallocatedContributions(): Promise<{
contributions: Array<{
sourceAdoptionId: string;
sourceAccountSequence: string;
wouldBeAccountSequence: string | null;
contributionType: string;
amount: string;
reason: string | null;
effectiveDate: string;
expireDate: string;
}>;
total: number;
}> {
const unallocatedContributions = await this.prisma.unallocatedContribution.findMany({
where: { status: 'PENDING' },
select: {
sourceAdoptionId: true,
sourceAccountSequence: true,
wouldBeAccountSequence: true,
unallocType: true,
amount: true,
reason: true,
effectiveDate: true,
expireDate: true,
},
});
return {
contributions: unallocatedContributions.map((uc) => ({
sourceAdoptionId: uc.sourceAdoptionId.toString(),
sourceAccountSequence: uc.sourceAccountSequence,
wouldBeAccountSequence: uc.wouldBeAccountSequence,
contributionType: uc.unallocType,
amount: uc.amount.toString(),
reason: uc.reason,
effectiveDate: uc.effectiveDate.toISOString(),
expireDate: uc.expireDate.toISOString(),
})),
total: unallocatedContributions.length,
};
}
@Post('unallocated-contributions/publish-all')
@Public()
@ApiOperation({ summary: '发布所有未分配算力事件到 outbox用于同步到 mining-service' })
async publishAllUnallocatedContributions(): Promise<{
success: boolean;
publishedCount: number;
failedCount: number;
message: string;
}> {
const unallocatedContributions = await this.prisma.unallocatedContribution.findMany({
where: { status: 'PENDING' },
select: {
id: true,
sourceAdoptionId: true,
sourceAccountSequence: true,
wouldBeAccountSequence: true,
unallocType: true,
amount: true,
reason: true,
effectiveDate: true,
expireDate: true,
},
});
let publishedCount = 0;
let failedCount = 0;
const batchSize = 100;
for (let i = 0; i < unallocatedContributions.length; i += batchSize) {
const batch = unallocatedContributions.slice(i, i + batchSize);
try {
await this.unitOfWork.executeInTransaction(async () => {
const events = batch.map((uc) => {
const event = new UnallocatedContributionSyncedEvent(
uc.sourceAdoptionId,
uc.sourceAccountSequence,
uc.wouldBeAccountSequence,
uc.unallocType,
uc.amount.toString(),
uc.reason,
uc.effectiveDate,
uc.expireDate,
);
return {
aggregateType: UnallocatedContributionSyncedEvent.AGGREGATE_TYPE,
aggregateId: `${uc.sourceAdoptionId}-${uc.unallocType}`,
eventType: UnallocatedContributionSyncedEvent.EVENT_TYPE,
payload: event.toPayload(),
};
});
await this.outboxRepository.saveMany(events);
});
publishedCount += batch.length;
this.logger.debug(`Published unallocated contribution batch ${Math.floor(i / batchSize) + 1}: ${batch.length} events`);
} catch (error) {
failedCount += batch.length;
this.logger.error(`Failed to publish unallocated contribution batch ${Math.floor(i / batchSize) + 1}`, error);
}
}
this.logger.log(`Published ${publishedCount} unallocated contribution events, ${failedCount} failed`);
return {
success: failedCount === 0,
publishedCount,
failedCount,
message: `Published ${publishedCount} events, ${failedCount} failed out of ${unallocatedContributions.length} total`,
};
}
} }

View File

@ -1,10 +1,8 @@
import { Controller, Get, Param, Query, NotFoundException } from '@nestjs/common'; import { Controller, Get, Param, Query, NotFoundException } from '@nestjs/common';
import { ApiTags, ApiOperation, ApiResponse, ApiParam, ApiQuery } from '@nestjs/swagger'; import { ApiTags, ApiOperation, ApiResponse, ApiParam } from '@nestjs/swagger';
import { GetContributionAccountQuery } from '../../application/queries/get-contribution-account.query'; import { GetContributionAccountQuery } from '../../application/queries/get-contribution-account.query';
import { GetContributionStatsQuery } from '../../application/queries/get-contribution-stats.query'; import { GetContributionStatsQuery } from '../../application/queries/get-contribution-stats.query';
import { GetContributionRankingQuery } from '../../application/queries/get-contribution-ranking.query'; import { GetContributionRankingQuery } from '../../application/queries/get-contribution-ranking.query';
import { GetPlantingLedgerQuery, PlantingLedgerDto } from '../../application/queries/get-planting-ledger.query';
import { GetTeamTreeQuery, DirectReferralsResponseDto, MyTeamInfoDto } from '../../application/queries/get-team-tree.query';
import { import {
ContributionAccountResponse, ContributionAccountResponse,
ContributionRecordsResponse, ContributionRecordsResponse,
@ -13,7 +11,6 @@ import {
import { ContributionStatsResponse } from '../dto/response/contribution-stats.response'; import { ContributionStatsResponse } from '../dto/response/contribution-stats.response';
import { ContributionRankingResponse, UserRankResponse } from '../dto/response/contribution-ranking.response'; import { ContributionRankingResponse, UserRankResponse } from '../dto/response/contribution-ranking.response';
import { GetContributionRecordsRequest } from '../dto/request/get-records.request'; import { GetContributionRecordsRequest } from '../dto/request/get-records.request';
import { Public } from '../../shared/guards/jwt-auth.guard';
@ApiTags('Contribution') @ApiTags('Contribution')
@Controller('contribution') @Controller('contribution')
@ -22,12 +19,9 @@ export class ContributionController {
private readonly getAccountQuery: GetContributionAccountQuery, private readonly getAccountQuery: GetContributionAccountQuery,
private readonly getStatsQuery: GetContributionStatsQuery, private readonly getStatsQuery: GetContributionStatsQuery,
private readonly getRankingQuery: GetContributionRankingQuery, private readonly getRankingQuery: GetContributionRankingQuery,
private readonly getPlantingLedgerQuery: GetPlantingLedgerQuery,
private readonly getTeamTreeQuery: GetTeamTreeQuery,
) {} ) {}
@Get('stats') @Get('stats')
@Public()
@ApiOperation({ summary: '获取算力统计数据' }) @ApiOperation({ summary: '获取算力统计数据' })
@ApiResponse({ status: 200, type: ContributionStatsResponse }) @ApiResponse({ status: 200, type: ContributionStatsResponse })
async getStats(): Promise<ContributionStatsResponse> { async getStats(): Promise<ContributionStatsResponse> {
@ -101,52 +95,4 @@ export class ContributionController {
} }
return result; return result;
} }
@Get('accounts/:accountSequence/planting-ledger')
@ApiOperation({ summary: '获取账户认种分类账' })
@ApiParam({ name: 'accountSequence', description: '账户序号' })
@ApiQuery({ name: 'page', required: false, type: Number, description: '页码' })
@ApiQuery({ name: 'pageSize', required: false, type: Number, description: '每页数量' })
@ApiResponse({ status: 200, description: '认种分类账' })
async getPlantingLedger(
@Param('accountSequence') accountSequence: string,
@Query('page') page?: number,
@Query('pageSize') pageSize?: number,
): Promise<PlantingLedgerDto> {
return this.getPlantingLedgerQuery.execute(
accountSequence,
page ?? 1,
pageSize ?? 20,
);
}
// ========== 团队树 API ==========
@Get('accounts/:accountSequence/team')
@ApiOperation({ summary: '获取账户团队信息' })
@ApiParam({ name: 'accountSequence', description: '账户序号' })
@ApiResponse({ status: 200, description: '团队信息' })
async getMyTeamInfo(
@Param('accountSequence') accountSequence: string,
): Promise<MyTeamInfoDto> {
return this.getTeamTreeQuery.getMyTeamInfo(accountSequence);
}
@Get('accounts/:accountSequence/team/direct-referrals')
@ApiOperation({ summary: '获取账户直推列表(用于伞下树懒加载)' })
@ApiParam({ name: 'accountSequence', description: '账户序号' })
@ApiQuery({ name: 'limit', required: false, type: Number, description: '每页数量' })
@ApiQuery({ name: 'offset', required: false, type: Number, description: '偏移量' })
@ApiResponse({ status: 200, description: '直推列表' })
async getDirectReferrals(
@Param('accountSequence') accountSequence: string,
@Query('limit') limit?: number,
@Query('offset') offset?: number,
): Promise<DirectReferralsResponseDto> {
return this.getTeamTreeQuery.getDirectReferrals(
accountSequence,
limit ?? 100,
offset ?? 0,
);
}
} }

View File

@ -2,7 +2,6 @@ import { Controller, Get } from '@nestjs/common';
import { ApiTags, ApiOperation, ApiResponse } from '@nestjs/swagger'; import { ApiTags, ApiOperation, ApiResponse } from '@nestjs/swagger';
import { PrismaService } from '../../infrastructure/persistence/prisma/prisma.service'; import { PrismaService } from '../../infrastructure/persistence/prisma/prisma.service';
import { RedisService } from '../../infrastructure/redis/redis.service'; import { RedisService } from '../../infrastructure/redis/redis.service';
import { CDCConsumerService } from '../../infrastructure/kafka/cdc-consumer.service';
import { Public } from '../../shared/guards/jwt-auth.guard'; import { Public } from '../../shared/guards/jwt-auth.guard';
interface HealthStatus { interface HealthStatus {
@ -21,7 +20,6 @@ export class HealthController {
constructor( constructor(
private readonly prisma: PrismaService, private readonly prisma: PrismaService,
private readonly redis: RedisService, private readonly redis: RedisService,
private readonly cdcConsumer: CDCConsumerService,
) {} ) {}
@Get() @Get()
@ -70,15 +68,4 @@ export class HealthController {
async live(): Promise<{ alive: boolean }> { async live(): Promise<{ alive: boolean }> {
return { alive: true }; return { alive: true };
} }
@Get('cdc-sync')
@ApiOperation({ summary: 'CDC 同步状态检查' })
@ApiResponse({ status: 200, description: 'CDC 同步状态' })
async cdcSyncStatus(): Promise<{
isRunning: boolean;
sequentialMode: boolean;
allPhasesCompleted: boolean;
}> {
return this.cdcConsumer.getSyncStatus();
}
} }

View File

@ -16,7 +16,6 @@ import { JwtAuthGuard } from './shared/guards/jwt-auth.guard';
envFilePath: [ envFilePath: [
`.env.${process.env.NODE_ENV || 'development'}`, `.env.${process.env.NODE_ENV || 'development'}`,
'.env', '.env',
'../.env', // 父目录共享 .env
], ],
ignoreEnvFile: false, ignoreEnvFile: false,
}), }),

View File

@ -12,15 +12,12 @@ import { CDCEventDispatcher } from './event-handlers/cdc-event-dispatcher';
import { ContributionCalculationService } from './services/contribution-calculation.service'; import { ContributionCalculationService } from './services/contribution-calculation.service';
import { ContributionDistributionPublisherService } from './services/contribution-distribution-publisher.service'; import { ContributionDistributionPublisherService } from './services/contribution-distribution-publisher.service';
import { ContributionRateService } from './services/contribution-rate.service'; import { ContributionRateService } from './services/contribution-rate.service';
import { BonusClaimService } from './services/bonus-claim.service';
import { SnapshotService } from './services/snapshot.service'; import { SnapshotService } from './services/snapshot.service';
// Queries // Queries
import { GetContributionAccountQuery } from './queries/get-contribution-account.query'; import { GetContributionAccountQuery } from './queries/get-contribution-account.query';
import { GetContributionStatsQuery } from './queries/get-contribution-stats.query'; import { GetContributionStatsQuery } from './queries/get-contribution-stats.query';
import { GetContributionRankingQuery } from './queries/get-contribution-ranking.query'; import { GetContributionRankingQuery } from './queries/get-contribution-ranking.query';
import { GetPlantingLedgerQuery } from './queries/get-planting-ledger.query';
import { GetTeamTreeQuery } from './queries/get-team-tree.query';
// Schedulers // Schedulers
import { ContributionScheduler } from './schedulers/contribution.scheduler'; import { ContributionScheduler } from './schedulers/contribution.scheduler';
@ -41,15 +38,12 @@ import { ContributionScheduler } from './schedulers/contribution.scheduler';
ContributionCalculationService, ContributionCalculationService,
ContributionDistributionPublisherService, ContributionDistributionPublisherService,
ContributionRateService, ContributionRateService,
BonusClaimService,
SnapshotService, SnapshotService,
// Queries // Queries
GetContributionAccountQuery, GetContributionAccountQuery,
GetContributionStatsQuery, GetContributionStatsQuery,
GetContributionRankingQuery, GetContributionRankingQuery,
GetPlantingLedgerQuery,
GetTeamTreeQuery,
// Schedulers // Schedulers
ContributionScheduler, ContributionScheduler,
@ -61,8 +55,6 @@ import { ContributionScheduler } from './schedulers/contribution.scheduler';
GetContributionAccountQuery, GetContributionAccountQuery,
GetContributionStatsQuery, GetContributionStatsQuery,
GetContributionRankingQuery, GetContributionRankingQuery,
GetPlantingLedgerQuery,
GetTeamTreeQuery,
], ],
}) })
export class ApplicationModule {} export class ApplicationModule {}

View File

@ -2,7 +2,6 @@ import { Injectable, Logger } from '@nestjs/common';
import Decimal from 'decimal.js'; import Decimal from 'decimal.js';
import { CDCEvent, TransactionClient } from '../../infrastructure/kafka/cdc-consumer.service'; import { CDCEvent, TransactionClient } from '../../infrastructure/kafka/cdc-consumer.service';
import { ContributionCalculationService } from '../services/contribution-calculation.service'; import { ContributionCalculationService } from '../services/contribution-calculation.service';
import { ContributionRateService } from '../services/contribution-rate.service';
/** /**
* *
@ -16,11 +15,19 @@ export interface AdoptionSyncResult {
* CDC * CDC
* 1.0 planting-service同步过来的planting_orders数据 * 1.0 planting-service同步过来的planting_orders数据
* *
* *
* =========================================== * ===========================================
* - handle() 100% * - handle() synced_adoptions
* - status MINING_ENABLED * - AdoptionSyncResultID
* - Serializable * - calculateForAdoption
*
* calculateForAdoption
* 1. calculateForAdoption 使
* 2. Serializable
* 3. "Adoption not found" synced_adoptions
*
* Kafka Idempotent Consumer & Transactional Outbox Pattern
* https://www.lydtechconsulting.com/blog/kafka-idempotent-consumer-transactional-outbox
*/ */
@Injectable() @Injectable()
export class AdoptionSyncedHandler { export class AdoptionSyncedHandler {
@ -28,7 +35,6 @@ export class AdoptionSyncedHandler {
constructor( constructor(
private readonly contributionCalculationService: ContributionCalculationService, private readonly contributionCalculationService: ContributionCalculationService,
private readonly contributionRateService: ContributionRateService,
) {} ) {}
/** /**
@ -42,28 +48,13 @@ export class AdoptionSyncedHandler {
this.logger.log(`[CDC] Adoption event received: op=${op}, seq=${event.sequenceNum}`); this.logger.log(`[CDC] Adoption event received: op=${op}, seq=${event.sequenceNum}`);
this.logger.debug(`[CDC] Adoption event payload: ${JSON.stringify(after || before)}`); this.logger.debug(`[CDC] Adoption event payload: ${JSON.stringify(after || before)}`);
// 获取认种日期,用于查询当日贡献值
const data = after || before;
const adoptionDate = data?.created_at || data?.createdAt || data?.paid_at || data?.paidAt;
// 在事务外获取当日每棵树的贡献值
let contributionPerTree = new Decimal('22617'); // 默认值
if (adoptionDate) {
try {
contributionPerTree = await this.contributionRateService.getContributionPerTree(new Date(adoptionDate));
this.logger.log(`[CDC] Got contributionPerTree for ${adoptionDate}: ${contributionPerTree.toString()}`);
} catch (error) {
this.logger.warn(`[CDC] Failed to get contributionPerTree, using default 22617`, error);
}
}
try { try {
switch (op) { switch (op) {
case 'c': // create case 'c': // create
case 'r': // read (snapshot) case 'r': // read (snapshot)
return await this.handleCreate(after, event.sequenceNum, tx, contributionPerTree); return await this.handleCreate(after, event.sequenceNum, tx);
case 'u': // update case 'u': // update
return await this.handleUpdate(after, before, event.sequenceNum, tx, contributionPerTree); return await this.handleUpdate(after, before, event.sequenceNum, tx);
case 'd': // delete case 'd': // delete
await this.handleDelete(before); await this.handleDelete(before);
return null; return null;
@ -95,21 +86,21 @@ export class AdoptionSyncedHandler {
} }
} }
private async handleCreate(data: any, sequenceNum: bigint, tx: TransactionClient, contributionPerTree: Decimal): Promise<AdoptionSyncResult | null> { private async handleCreate(data: any, sequenceNum: bigint, tx: TransactionClient): Promise<AdoptionSyncResult | null> {
if (!data) { if (!data) {
this.logger.warn(`[CDC] Adoption create: empty data received`); this.logger.warn(`[CDC] Adoption create: empty data received`);
return null; return null;
} }
// planting_orders表字段: order_id, account_sequence, tree_count, created_at, status, selected_province, selected_city
const orderId = data.order_id || data.id; const orderId = data.order_id || data.id;
const accountSequence = data.account_sequence || data.accountSequence; const accountSequence = data.account_sequence || data.accountSequence;
const treeCount = data.tree_count || data.treeCount; const treeCount = data.tree_count || data.treeCount;
const createdAt = data.created_at || data.createdAt || data.paid_at || data.paidAt; const createdAt = data.created_at || data.createdAt || data.paid_at || data.paidAt;
const selectedProvince = data.selected_province || data.selectedProvince || null; const selectedProvince = data.selected_province || data.selectedProvince || null;
const selectedCity = data.selected_city || data.selectedCity || null; const selectedCity = data.selected_city || data.selectedCity || null;
const status = data.status ?? null;
this.logger.log(`[CDC] Adoption create: orderId=${orderId}, account=${accountSequence}, trees=${treeCount}, status=${status}, contributionPerTree=${contributionPerTree.toString()}`); this.logger.log(`[CDC] Adoption create: orderId=${orderId}, account=${accountSequence}, trees=${treeCount}, province=${selectedProvince}, city=${selectedCity}`);
if (!orderId || !accountSequence) { if (!orderId || !accountSequence) {
this.logger.warn(`[CDC] Invalid adoption data: missing order_id or account_sequence`, { data }); this.logger.warn(`[CDC] Invalid adoption data: missing order_id or account_sequence`, { data });
@ -118,7 +109,8 @@ export class AdoptionSyncedHandler {
const originalAdoptionId = BigInt(orderId); const originalAdoptionId = BigInt(orderId);
// 100%同步数据,使用真实的每棵树贡献值 // 在事务中保存同步的认种订单数据
this.logger.log(`[CDC] Upserting synced adoption: ${orderId}`);
await tx.syncedAdoption.upsert({ await tx.syncedAdoption.upsert({
where: { originalAdoptionId }, where: { originalAdoptionId },
create: { create: {
@ -126,10 +118,10 @@ export class AdoptionSyncedHandler {
accountSequence, accountSequence,
treeCount, treeCount,
adoptionDate: new Date(createdAt), adoptionDate: new Date(createdAt),
status, status: data.status ?? null,
selectedProvince, selectedProvince,
selectedCity, selectedCity,
contributionPerTree, contributionPerTree: new Decimal('1'), // 每棵树1算力
sourceSequenceNum: sequenceNum, sourceSequenceNum: sequenceNum,
syncedAt: new Date(), syncedAt: new Date(),
}, },
@ -137,26 +129,25 @@ export class AdoptionSyncedHandler {
accountSequence, accountSequence,
treeCount, treeCount,
adoptionDate: new Date(createdAt), adoptionDate: new Date(createdAt),
status, status: data.status ?? undefined,
selectedProvince, selectedProvince: selectedProvince ?? undefined,
selectedCity, selectedCity: selectedCity ?? undefined,
contributionPerTree, contributionPerTree: new Decimal('1'),
sourceSequenceNum: sequenceNum, sourceSequenceNum: sequenceNum,
syncedAt: new Date(), syncedAt: new Date(),
}, },
}); });
this.logger.log(`[CDC] Adoption synced: orderId=${orderId}, status=${status}`); this.logger.log(`[CDC] Adoption synced successfully: orderId=${orderId}, account=${accountSequence}, trees=${treeCount}`);
// 只有 MINING_ENABLED 状态才触发算力计算 // 返回结果,供事务提交后计算算力
const needsCalculation = status === 'MINING_ENABLED';
return { return {
originalAdoptionId, originalAdoptionId,
needsCalculation, needsCalculation: true,
}; };
} }
private async handleUpdate(after: any, before: any, sequenceNum: bigint, tx: TransactionClient, contributionPerTree: Decimal): Promise<AdoptionSyncResult | null> { private async handleUpdate(after: any, before: any, sequenceNum: bigint, tx: TransactionClient): Promise<AdoptionSyncResult | null> {
if (!after) { if (!after) {
this.logger.warn(`[CDC] Adoption update: empty after data received`); this.logger.warn(`[CDC] Adoption update: empty after data received`);
return null; return null;
@ -164,22 +155,37 @@ export class AdoptionSyncedHandler {
const orderId = after.order_id || after.id; const orderId = after.order_id || after.id;
const originalAdoptionId = BigInt(orderId); const originalAdoptionId = BigInt(orderId);
this.logger.log(`[CDC] Adoption update: orderId=${orderId}`);
// 检查是否已经处理过(使用事务客户端)
const existingAdoption = await tx.syncedAdoption.findUnique({
where: { originalAdoptionId },
});
if (existingAdoption?.contributionDistributed) {
// 如果树数量发生变化,需要重新计算(这种情况较少)
const newTreeCount = after.tree_count || after.treeCount;
if (existingAdoption.treeCount !== newTreeCount) {
this.logger.warn(
`[CDC] Adoption tree count changed after processing: ${originalAdoptionId}, old=${existingAdoption.treeCount}, new=${newTreeCount}. This requires special handling.`,
);
// TODO: 实现树数量变化的处理逻辑
} else {
this.logger.debug(`[CDC] Adoption ${orderId} already distributed, skipping update`);
}
return null;
}
const accountSequence = after.account_sequence || after.accountSequence; const accountSequence = after.account_sequence || after.accountSequence;
const treeCount = after.tree_count || after.treeCount; const treeCount = after.tree_count || after.treeCount;
const createdAt = after.created_at || after.createdAt || after.paid_at || after.paidAt; const createdAt = after.created_at || after.createdAt || after.paid_at || after.paidAt;
const selectedProvince = after.selected_province || after.selectedProvince || null; const selectedProvince = after.selected_province || after.selectedProvince || null;
const selectedCity = after.selected_city || after.selectedCity || null; const selectedCity = after.selected_city || after.selectedCity || null;
const newStatus = after.status ?? null;
const oldStatus = before?.status ?? null;
this.logger.log(`[CDC] Adoption update: orderId=${orderId}, status=${oldStatus} -> ${newStatus}, contributionPerTree=${contributionPerTree.toString()}`); this.logger.log(`[CDC] Adoption update data: account=${accountSequence}, trees=${treeCount}, province=${selectedProvince}, city=${selectedCity}`);
// 查询现有记录 // 在事务中保存同步的认种订单数据
const existingAdoption = await tx.syncedAdoption.findUnique({
where: { originalAdoptionId },
});
// 100%同步数据,使用真实的每棵树贡献值
await tx.syncedAdoption.upsert({ await tx.syncedAdoption.upsert({
where: { originalAdoptionId }, where: { originalAdoptionId },
create: { create: {
@ -187,10 +193,10 @@ export class AdoptionSyncedHandler {
accountSequence, accountSequence,
treeCount, treeCount,
adoptionDate: new Date(createdAt), adoptionDate: new Date(createdAt),
status: newStatus, status: after.status ?? null,
selectedProvince, selectedProvince,
selectedCity, selectedCity,
contributionPerTree, contributionPerTree: new Decimal('1'),
sourceSequenceNum: sequenceNum, sourceSequenceNum: sequenceNum,
syncedAt: new Date(), syncedAt: new Date(),
}, },
@ -198,24 +204,21 @@ export class AdoptionSyncedHandler {
accountSequence, accountSequence,
treeCount, treeCount,
adoptionDate: new Date(createdAt), adoptionDate: new Date(createdAt),
status: newStatus, status: after.status ?? undefined,
selectedProvince, selectedProvince: selectedProvince ?? undefined,
selectedCity, selectedCity: selectedCity ?? undefined,
contributionPerTree, contributionPerTree: new Decimal('1'),
sourceSequenceNum: sequenceNum, sourceSequenceNum: sequenceNum,
syncedAt: new Date(), syncedAt: new Date(),
}, },
}); });
this.logger.log(`[CDC] Adoption synced: orderId=${orderId}, status=${newStatus}`); this.logger.log(`[CDC] Adoption updated successfully: ${originalAdoptionId}`);
// 只有当 status 变为 MINING_ENABLED 且尚未计算过算力时,才触发算力计算
const statusChangedToMiningEnabled = newStatus === 'MINING_ENABLED' && oldStatus !== 'MINING_ENABLED';
const needsCalculation = statusChangedToMiningEnabled && !existingAdoption?.contributionDistributed;
// 只有尚未分配算力的认种才需要计算
return { return {
originalAdoptionId, originalAdoptionId,
needsCalculation, needsCalculation: !existingAdoption?.contributionDistributed,
}; };
} }

View File

@ -51,17 +51,14 @@ export class CDCEventDispatcher implements OnModuleInit {
this.handleAdoptionPostCommit.bind(this), this.handleAdoptionPostCommit.bind(this),
); );
// 非阻塞启动 CDC 消费者 // 启动 CDC 消费者
// 让 HTTP 服务器先启动CDC 同步在后台进行 try {
// 脚本通过 /health/cdc-sync API 轮询同步状态 await this.cdcConsumer.start();
this.cdcConsumer.start() this.logger.log('CDC event dispatcher started with transactional idempotency');
.then(() => { } catch (error) {
this.logger.log('CDC event dispatcher started with transactional idempotency'); this.logger.error('Failed to start CDC event dispatcher', error);
}) // 不抛出错误,允许服务在没有 Kafka 的情况下启动(用于本地开发)
.catch((error) => { }
this.logger.error('Failed to start CDC event dispatcher', error);
// 不抛出错误,允许服务在没有 Kafka 的情况下启动(用于本地开发)
});
} }
private async handleUserEvent(event: CDCEvent, tx: TransactionClient): Promise<void> { private async handleUserEvent(event: CDCEvent, tx: TransactionClient): Promise<void> {

View File

@ -5,7 +5,22 @@ import { CDCEvent, TransactionClient } from '../../infrastructure/kafka/cdc-cons
* CDC * CDC
* 1.0 referral-service同步过来的referral_relationships数据 * 1.0 referral-service同步过来的referral_relationships数据
* *
* 100% * 1.0 (referral_relationships):
* - user_id: BigInt (ID)
* - account_sequence: String ()
* - referrer_id: BigInt (ID, account_sequence)
* - ancestor_path: BigInt[] ( user_id)
* - depth: Int ()
*
* 2.0 :
* - original_user_id (1.0 user_id)
* - referrer_user_id (1.0 referrer_id)
* - referrer account_sequence
* - ancestor_path
*
* handler tx
* 使
*
*/ */
@Injectable() @Injectable()
export class ReferralSyncedHandler { export class ReferralSyncedHandler {
@ -46,11 +61,12 @@ export class ReferralSyncedHandler {
return; return;
} }
// 1.0 字段映射
const accountSequence = data.account_sequence || data.accountSequence; const accountSequence = data.account_sequence || data.accountSequence;
const originalUserId = data.user_id || data.userId; const originalUserId = data.user_id || data.userId;
const referrerUserId = data.referrer_id || data.referrerId; const referrerUserId = data.referrer_id || data.referrerId;
const ancestorPathArray = data.ancestor_path || data.ancestorPath; const ancestorPathArray = data.ancestor_path || data.ancestorPath;
const depth = data.depth ?? 0; const depth = data.depth || 0;
this.logger.log(`[CDC] Referral create: account=${accountSequence}, userId=${originalUserId}, referrerId=${referrerUserId}, depth=${depth}`); this.logger.log(`[CDC] Referral create: account=${accountSequence}, userId=${originalUserId}, referrerId=${referrerUserId}, depth=${depth}`);
@ -59,9 +75,11 @@ export class ReferralSyncedHandler {
return; return;
} }
// 将 BigInt[] 转换为逗号分隔的字符串
const ancestorPath = this.convertAncestorPath(ancestorPathArray); const ancestorPath = this.convertAncestorPath(ancestorPathArray);
this.logger.debug(`[CDC] Referral ancestorPath converted: ${ancestorPath}`);
// 尝试查找推荐人的 account_sequence // 尝试查找推荐人的 account_sequence(使用事务客户端)
let referrerAccountSequence: string | null = null; let referrerAccountSequence: string | null = null;
if (referrerUserId) { if (referrerUserId) {
const referrer = await tx.syncedReferral.findFirst({ const referrer = await tx.syncedReferral.findFirst({
@ -69,10 +87,14 @@ export class ReferralSyncedHandler {
}); });
if (referrer) { if (referrer) {
referrerAccountSequence = referrer.accountSequence; referrerAccountSequence = referrer.accountSequence;
this.logger.debug(`[CDC] Found referrer account_sequence: ${referrerAccountSequence} for referrer_id: ${referrerUserId}`);
} else {
this.logger.log(`[CDC] Referrer user_id ${referrerUserId} not found yet for ${accountSequence}, will resolve later`);
} }
} }
// 100%同步数据 // 使用外部事务客户端执行所有操作
this.logger.log(`[CDC] Upserting synced referral: ${accountSequence}`);
await tx.syncedReferral.upsert({ await tx.syncedReferral.upsert({
where: { accountSequence }, where: { accountSequence },
create: { create: {
@ -86,17 +108,17 @@ export class ReferralSyncedHandler {
syncedAt: new Date(), syncedAt: new Date(),
}, },
update: { update: {
referrerAccountSequence, referrerAccountSequence: referrerAccountSequence ?? undefined,
referrerUserId: referrerUserId ? BigInt(referrerUserId) : null, referrerUserId: referrerUserId ? BigInt(referrerUserId) : undefined,
originalUserId: originalUserId ? BigInt(originalUserId) : null, originalUserId: originalUserId ? BigInt(originalUserId) : undefined,
ancestorPath, ancestorPath: ancestorPath ?? undefined,
depth, depth: depth ?? undefined,
sourceSequenceNum: sequenceNum, sourceSequenceNum: sequenceNum,
syncedAt: new Date(), syncedAt: new Date(),
}, },
}); });
this.logger.log(`[CDC] Referral synced: ${accountSequence}, referrerId=${referrerUserId || 'none'}, depth=${depth}`); this.logger.log(`[CDC] Referral synced successfully: ${accountSequence} (user_id: ${originalUserId}) -> referrer_id: ${referrerUserId || 'none'}, depth: ${depth}`);
} }
private async handleUpdate(data: any, sequenceNum: bigint, tx: TransactionClient): Promise<void> { private async handleUpdate(data: any, sequenceNum: bigint, tx: TransactionClient): Promise<void> {
@ -109,7 +131,7 @@ export class ReferralSyncedHandler {
const originalUserId = data.user_id || data.userId; const originalUserId = data.user_id || data.userId;
const referrerUserId = data.referrer_id || data.referrerId; const referrerUserId = data.referrer_id || data.referrerId;
const ancestorPathArray = data.ancestor_path || data.ancestorPath; const ancestorPathArray = data.ancestor_path || data.ancestorPath;
const depth = data.depth ?? 0; const depth = data.depth || 0;
this.logger.log(`[CDC] Referral update: account=${accountSequence}, referrerId=${referrerUserId}, depth=${depth}`); this.logger.log(`[CDC] Referral update: account=${accountSequence}, referrerId=${referrerUserId}, depth=${depth}`);
@ -120,7 +142,7 @@ export class ReferralSyncedHandler {
const ancestorPath = this.convertAncestorPath(ancestorPathArray); const ancestorPath = this.convertAncestorPath(ancestorPathArray);
// 尝试查找推荐人的 account_sequence // 尝试查找推荐人的 account_sequence(使用事务客户端)
let referrerAccountSequence: string | null = null; let referrerAccountSequence: string | null = null;
if (referrerUserId) { if (referrerUserId) {
const referrer = await tx.syncedReferral.findFirst({ const referrer = await tx.syncedReferral.findFirst({
@ -128,10 +150,10 @@ export class ReferralSyncedHandler {
}); });
if (referrer) { if (referrer) {
referrerAccountSequence = referrer.accountSequence; referrerAccountSequence = referrer.accountSequence;
this.logger.debug(`[CDC] Found referrer account_sequence: ${referrerAccountSequence}`);
} }
} }
// 100%同步数据
await tx.syncedReferral.upsert({ await tx.syncedReferral.upsert({
where: { accountSequence }, where: { accountSequence },
create: { create: {
@ -145,17 +167,17 @@ export class ReferralSyncedHandler {
syncedAt: new Date(), syncedAt: new Date(),
}, },
update: { update: {
referrerAccountSequence, referrerAccountSequence: referrerAccountSequence ?? undefined,
referrerUserId: referrerUserId ? BigInt(referrerUserId) : null, referrerUserId: referrerUserId ? BigInt(referrerUserId) : undefined,
originalUserId: originalUserId ? BigInt(originalUserId) : null, originalUserId: originalUserId ? BigInt(originalUserId) : undefined,
ancestorPath, ancestorPath: ancestorPath ?? undefined,
depth, depth: depth ?? undefined,
sourceSequenceNum: sequenceNum, sourceSequenceNum: sequenceNum,
syncedAt: new Date(), syncedAt: new Date(),
}, },
}); });
this.logger.log(`[CDC] Referral synced: ${accountSequence}`); this.logger.log(`[CDC] Referral updated successfully: ${accountSequence}`);
} }
private async handleDelete(data: any): Promise<void> { private async handleDelete(data: any): Promise<void> {

View File

@ -6,7 +6,9 @@ import { ContributionAccountAggregate } from '../../domain/aggregates/contributi
* CDC * CDC
* *
* *
* 100% * handler tx
* 使
*
*/ */
@Injectable() @Injectable()
export class UserSyncedHandler { export class UserSyncedHandler {
@ -47,19 +49,22 @@ export class UserSyncedHandler {
return; return;
} }
// 兼容不同的字段命名CDC 使用 snake_case
const userId = data.user_id ?? data.id; const userId = data.user_id ?? data.id;
const accountSequence = data.account_sequence ?? data.accountSequence; const accountSequence = data.account_sequence ?? data.accountSequence;
const phone = data.phone_number ?? data.phone ?? null; const phone = data.phone_number ?? data.phone ?? null;
const status = data.status ?? null; const status = data.status ?? 'ACTIVE';
this.logger.log(`[CDC] User create: userId=${userId}, accountSequence=${accountSequence}, status=${status}`); this.logger.log(`[CDC] User create: userId=${userId}, accountSequence=${accountSequence}, phone=${phone}, status=${status}`);
if (!userId || !accountSequence) { if (!userId || !accountSequence) {
this.logger.warn(`[CDC] Invalid user data: missing user_id or account_sequence`, { data }); this.logger.warn(`[CDC] Invalid user data: missing user_id or account_sequence`, { data });
return; return;
} }
// 100%同步数据 // 使用外部事务客户端执行所有操作
// 保存同步的用户数据
this.logger.log(`[CDC] Upserting synced user: ${accountSequence}`);
await tx.syncedUser.upsert({ await tx.syncedUser.upsert({
where: { accountSequence }, where: { accountSequence },
create: { create: {
@ -71,9 +76,8 @@ export class UserSyncedHandler {
syncedAt: new Date(), syncedAt: new Date(),
}, },
update: { update: {
originalUserId: BigInt(userId), phone: phone ?? undefined,
phone, status: status ?? undefined,
status,
sourceSequenceNum: sequenceNum, sourceSequenceNum: sequenceNum,
syncedAt: new Date(), syncedAt: new Date(),
}, },
@ -91,9 +95,11 @@ export class UserSyncedHandler {
data: persistData, data: persistData,
}); });
this.logger.log(`[CDC] Created contribution account for user: ${accountSequence}`); this.logger.log(`[CDC] Created contribution account for user: ${accountSequence}`);
} else {
this.logger.debug(`[CDC] Contribution account already exists for user: ${accountSequence}`);
} }
this.logger.log(`[CDC] User synced: ${accountSequence}`); this.logger.log(`[CDC] User synced successfully: ${accountSequence}`);
} }
private async handleUpdate(data: any, sequenceNum: bigint, tx: TransactionClient): Promise<void> { private async handleUpdate(data: any, sequenceNum: bigint, tx: TransactionClient): Promise<void> {
@ -102,10 +108,11 @@ export class UserSyncedHandler {
return; return;
} }
// 兼容不同的字段命名CDC 使用 snake_case
const userId = data.user_id ?? data.id; const userId = data.user_id ?? data.id;
const accountSequence = data.account_sequence ?? data.accountSequence; const accountSequence = data.account_sequence ?? data.accountSequence;
const phone = data.phone_number ?? data.phone ?? null; const phone = data.phone_number ?? data.phone ?? null;
const status = data.status ?? null; const status = data.status ?? 'ACTIVE';
this.logger.log(`[CDC] User update: userId=${userId}, accountSequence=${accountSequence}, status=${status}`); this.logger.log(`[CDC] User update: userId=${userId}, accountSequence=${accountSequence}, status=${status}`);
@ -114,7 +121,6 @@ export class UserSyncedHandler {
return; return;
} }
// 100%同步数据
await tx.syncedUser.upsert({ await tx.syncedUser.upsert({
where: { accountSequence }, where: { accountSequence },
create: { create: {
@ -126,15 +132,14 @@ export class UserSyncedHandler {
syncedAt: new Date(), syncedAt: new Date(),
}, },
update: { update: {
originalUserId: BigInt(userId), phone: phone ?? undefined,
phone, status: status ?? undefined,
status,
sourceSequenceNum: sequenceNum, sourceSequenceNum: sequenceNum,
syncedAt: new Date(), syncedAt: new Date(),
}, },
}); });
this.logger.log(`[CDC] User synced: ${accountSequence}`); this.logger.log(`[CDC] User updated successfully: ${accountSequence}`);
} }
private async handleDelete(data: any): Promise<void> { private async handleDelete(data: any): Promise<void> {

View File

@ -183,16 +183,16 @@ export class GetContributionAccountQuery {
private toRecordDto(record: any): ContributionRecordDto { private toRecordDto(record: any): ContributionRecordDto {
return { return {
id: record.id?.toString() ?? '', id: record.id,
sourceType: record.sourceType, sourceType: record.sourceType,
sourceAdoptionId: record.sourceAdoptionId?.toString() ?? '', sourceAdoptionId: record.sourceAdoptionId,
sourceAccountSequence: record.sourceAccountSequence, sourceAccountSequence: record.sourceAccountSequence,
treeCount: record.treeCount, treeCount: record.treeCount,
baseContribution: record.baseContribution?.value?.toString() ?? '0', baseContribution: record.baseContribution.value.toString(),
distributionRate: record.distributionRate?.value?.toString() ?? '0', distributionRate: record.distributionRate.value.toString(),
levelDepth: record.levelDepth, levelDepth: record.levelDepth,
bonusTier: record.bonusTier, bonusTier: record.bonusTier,
finalContribution: record.amount?.value?.toString() ?? '0', finalContribution: record.finalContribution.value.toString(),
effectiveDate: record.effectiveDate, effectiveDate: record.effectiveDate,
expireDate: record.expireDate, expireDate: record.expireDate,
isExpired: record.isExpired, isExpired: record.isExpired,

View File

@ -1,5 +1,4 @@
import { Injectable } from '@nestjs/common'; import { Injectable } from '@nestjs/common';
import Decimal from 'decimal.js';
import { ContributionAccountRepository } from '../../infrastructure/persistence/repositories/contribution-account.repository'; import { ContributionAccountRepository } from '../../infrastructure/persistence/repositories/contribution-account.repository';
import { ContributionRecordRepository } from '../../infrastructure/persistence/repositories/contribution-record.repository'; import { ContributionRecordRepository } from '../../infrastructure/persistence/repositories/contribution-record.repository';
import { UnallocatedContributionRepository } from '../../infrastructure/persistence/repositories/unallocated-contribution.repository'; import { UnallocatedContributionRepository } from '../../infrastructure/persistence/repositories/unallocated-contribution.repository';
@ -7,15 +6,6 @@ import { SystemAccountRepository } from '../../infrastructure/persistence/reposi
import { SyncedDataRepository } from '../../infrastructure/persistence/repositories/synced-data.repository'; import { SyncedDataRepository } from '../../infrastructure/persistence/repositories/synced-data.repository';
import { ContributionSourceType } from '../../domain/aggregates/contribution-account.aggregate'; import { ContributionSourceType } from '../../domain/aggregates/contribution-account.aggregate';
// 基准算力常量
const BASE_CONTRIBUTION_PER_TREE = new Decimal('22617');
const RATE_PERSONAL = new Decimal('0.70');
const RATE_OPERATION = new Decimal('0.12');
const RATE_PROVINCE = new Decimal('0.01');
const RATE_CITY = new Decimal('0.02');
const RATE_LEVEL_TOTAL = new Decimal('0.075');
const RATE_BONUS_TOTAL = new Decimal('0.075');
export interface ContributionStatsDto { export interface ContributionStatsDto {
// 用户统计 // 用户统计
totalUsers: number; totalUsers: number;
@ -26,57 +16,17 @@ export interface ContributionStatsDto {
totalAdoptions: number; totalAdoptions: number;
processedAdoptions: number; processedAdoptions: number;
unprocessedAdoptions: number; unprocessedAdoptions: number;
totalTrees: number;
// 算力统计 // 算力统计
totalContribution: string; totalContribution: string;
// 算力分布(基础) // 算力分布
contributionByType: { contributionByType: {
personal: string; personal: string;
teamLevel: string; teamLevel: string;
teamBonus: string; teamBonus: string;
}; };
// ========== 详细算力分解(按用户需求) ==========
// 全网算力 = 总认种树 * 22617
networkTotalContribution: string;
// 个人用户总算力 = 总认种树 * (22617 * 70%)
personalTotalContribution: string;
// 运营账户总算力 = 总认种树 * (22617 * 12%)
operationTotalContribution: string;
// 省公司总算力 = 总认种树 * (22617 * 1%)
provinceTotalContribution: string;
// 市公司总算力 = 总认种树 * (22617 * 2%)
cityTotalContribution: string;
// 层级算力详情 (7.5%)
levelContribution: {
total: string;
unlocked: string;
pending: string;
byTier: {
// 1档: 1-5级
tier1: { unlocked: string; pending: string };
// 2档: 6-10级
tier2: { unlocked: string; pending: string };
// 3档: 11-15级
tier3: { unlocked: string; pending: string };
};
};
// 团队奖励算力详情 (7.5%)
bonusContribution: {
total: string;
unlocked: string;
pending: string;
byTier: {
tier1: { unlocked: string; pending: string };
tier2: { unlocked: string; pending: string };
tier3: { unlocked: string; pending: string };
};
};
// 系统账户 // 系统账户
systemAccounts: { systemAccounts: {
accountType: string; accountType: string;
@ -111,10 +61,6 @@ export class GetContributionStatsQuery {
systemAccounts, systemAccounts,
totalUnallocated, totalUnallocated,
unallocatedByType, unallocatedByType,
detailedStats,
unallocatedByLevelTier,
unallocatedByBonusTier,
totalTrees,
] = await Promise.all([ ] = await Promise.all([
this.syncedDataRepository.countUsers(), this.syncedDataRepository.countUsers(),
this.accountRepository.countAccounts(), this.accountRepository.countAccounts(),
@ -126,33 +72,8 @@ export class GetContributionStatsQuery {
this.systemAccountRepository.findAll(), this.systemAccountRepository.findAll(),
this.unallocatedRepository.getTotalUnallocated(), this.unallocatedRepository.getTotalUnallocated(),
this.unallocatedRepository.getTotalUnallocatedByType(), this.unallocatedRepository.getTotalUnallocatedByType(),
this.accountRepository.getDetailedContributionStats(),
this.unallocatedRepository.getUnallocatedByLevelTier(),
this.unallocatedRepository.getUnallocatedByBonusTier(),
this.syncedDataRepository.getTotalTrees(),
]); ]);
// 计算理论算力(基于总认种树 * 基准算力)
const networkTotal = BASE_CONTRIBUTION_PER_TREE.mul(totalTrees);
const personalTotal = networkTotal.mul(RATE_PERSONAL);
const operationTotal = networkTotal.mul(RATE_OPERATION);
const provinceTotal = networkTotal.mul(RATE_PROVINCE);
const cityTotal = networkTotal.mul(RATE_CITY);
const levelTotal = networkTotal.mul(RATE_LEVEL_TOTAL);
const bonusTotal = networkTotal.mul(RATE_BONUS_TOTAL);
// 层级算力: 已解锁 + 未解锁
const levelUnlocked = new Decimal(detailedStats.levelUnlocked);
const levelPending = new Decimal(unallocatedByLevelTier.tier1)
.plus(unallocatedByLevelTier.tier2)
.plus(unallocatedByLevelTier.tier3);
// 团队奖励算力: 已解锁 + 未解锁
const bonusUnlocked = new Decimal(detailedStats.bonusUnlocked);
const bonusPending = new Decimal(unallocatedByBonusTier.tier1)
.plus(unallocatedByBonusTier.tier2)
.plus(unallocatedByBonusTier.tier3);
return { return {
totalUsers, totalUsers,
totalAccounts, totalAccounts,
@ -160,63 +81,12 @@ export class GetContributionStatsQuery {
totalAdoptions, totalAdoptions,
processedAdoptions: totalAdoptions - undistributedAdoptions, processedAdoptions: totalAdoptions - undistributedAdoptions,
unprocessedAdoptions: undistributedAdoptions, unprocessedAdoptions: undistributedAdoptions,
totalTrees,
totalContribution: totalContribution.value.toString(), totalContribution: totalContribution.value.toString(),
contributionByType: { contributionByType: {
personal: (contributionByType.get(ContributionSourceType.PERSONAL)?.value || 0).toString(), personal: (contributionByType.get(ContributionSourceType.PERSONAL)?.value || 0).toString(),
teamLevel: (contributionByType.get(ContributionSourceType.TEAM_LEVEL)?.value || 0).toString(), teamLevel: (contributionByType.get(ContributionSourceType.TEAM_LEVEL)?.value || 0).toString(),
teamBonus: (contributionByType.get(ContributionSourceType.TEAM_BONUS)?.value || 0).toString(), teamBonus: (contributionByType.get(ContributionSourceType.TEAM_BONUS)?.value || 0).toString(),
}, },
// 详细算力分解
networkTotalContribution: networkTotal.toString(),
personalTotalContribution: personalTotal.toString(),
operationTotalContribution: operationTotal.toString(),
provinceTotalContribution: provinceTotal.toString(),
cityTotalContribution: cityTotal.toString(),
// 层级算力详情
levelContribution: {
total: levelTotal.toString(),
unlocked: levelUnlocked.toString(),
pending: levelPending.toString(),
byTier: {
tier1: {
unlocked: detailedStats.levelByTier.tier1.unlocked,
pending: unallocatedByLevelTier.tier1,
},
tier2: {
unlocked: detailedStats.levelByTier.tier2.unlocked,
pending: unallocatedByLevelTier.tier2,
},
tier3: {
unlocked: detailedStats.levelByTier.tier3.unlocked,
pending: unallocatedByLevelTier.tier3,
},
},
},
// 团队奖励算力详情
bonusContribution: {
total: bonusTotal.toString(),
unlocked: bonusUnlocked.toString(),
pending: bonusPending.toString(),
byTier: {
tier1: {
unlocked: detailedStats.bonusByTier.tier1.unlocked,
pending: unallocatedByBonusTier.tier1,
},
tier2: {
unlocked: detailedStats.bonusByTier.tier2.unlocked,
pending: unallocatedByBonusTier.tier2,
},
tier3: {
unlocked: detailedStats.bonusByTier.tier3.unlocked,
pending: unallocatedByBonusTier.tier3,
},
},
},
systemAccounts: systemAccounts.map((a) => ({ systemAccounts: systemAccounts.map((a) => ({
accountType: a.accountType, accountType: a.accountType,
name: a.name, name: a.name,
@ -228,5 +98,4 @@ export class GetContributionStatsQuery {
), ),
}; };
} }
} }

View File

@ -1,85 +0,0 @@
import { Injectable } from '@nestjs/common';
import { SyncedDataRepository } from '../../infrastructure/persistence/repositories/synced-data.repository';
import { ContributionAccountRepository } from '../../infrastructure/persistence/repositories/contribution-account.repository';
export interface PlantingRecordDto {
orderId: string;
orderNo: string;
originalAdoptionId: string;
treeCount: number;
contributionPerTree: string;
totalContribution: string;
status: string;
adoptionDate: string | null;
createdAt: string;
}
export interface PlantingSummaryDto {
totalOrders: number;
totalTreeCount: number;
totalAmount: string;
effectiveTreeCount: number;
/** 用户实际的有效贡献值(个人算力) */
effectiveContribution: string;
firstPlantingAt: string | null;
lastPlantingAt: string | null;
}
export interface PlantingLedgerDto {
summary: PlantingSummaryDto;
items: PlantingRecordDto[];
total: number;
page: number;
pageSize: number;
totalPages: number;
}
@Injectable()
export class GetPlantingLedgerQuery {
constructor(
private readonly syncedDataRepository: SyncedDataRepository,
private readonly contributionAccountRepository: ContributionAccountRepository,
) {}
async execute(
accountSequence: string,
page: number = 1,
pageSize: number = 20,
): Promise<PlantingLedgerDto> {
const [summary, ledger, contributionAccount] = await Promise.all([
this.syncedDataRepository.getPlantingSummary(accountSequence),
this.syncedDataRepository.getPlantingLedger(accountSequence, page, pageSize),
this.contributionAccountRepository.findByAccountSequence(accountSequence),
]);
// 获取用户实际的有效贡献值(个人算力)
const effectiveContribution = contributionAccount?.personalContribution.toString() || '0';
return {
summary: {
totalOrders: summary.totalOrders,
totalTreeCount: summary.totalTreeCount,
totalAmount: summary.totalAmount,
effectiveTreeCount: summary.effectiveTreeCount,
effectiveContribution,
firstPlantingAt: summary.firstPlantingAt?.toISOString() || null,
lastPlantingAt: summary.lastPlantingAt?.toISOString() || null,
},
items: ledger.items.map((item) => ({
orderId: item.id.toString(),
orderNo: `ORD-${item.originalAdoptionId}`,
originalAdoptionId: item.originalAdoptionId.toString(),
treeCount: item.treeCount,
contributionPerTree: item.contributionPerTree.toString(),
totalContribution: item.contributionPerTree.mul(item.treeCount).toString(),
status: item.status || 'UNKNOWN',
adoptionDate: item.adoptionDate?.toISOString() || null,
createdAt: item.createdAt.toISOString(),
})),
total: ledger.total,
page: ledger.page,
pageSize: ledger.pageSize,
totalPages: ledger.totalPages,
};
}
}

View File

@ -1,121 +0,0 @@
import { Injectable, Inject } from '@nestjs/common';
import {
ISyncedDataRepository,
SYNCED_DATA_REPOSITORY,
} from '../../domain/repositories/synced-data.repository.interface';
/**
*
*/
export interface TeamMemberDto {
accountSequence: string;
personalPlantingCount: number;
teamPlantingCount: number;
directReferralCount: number;
}
/**
*
*/
export interface DirectReferralsResponseDto {
referrals: TeamMemberDto[];
total: number;
hasMore: boolean;
}
/**
*
*/
export interface MyTeamInfoDto {
accountSequence: string;
personalPlantingCount: number;
teamPlantingCount: number;
directReferralCount: number;
}
@Injectable()
export class GetTeamTreeQuery {
constructor(
@Inject(SYNCED_DATA_REPOSITORY)
private readonly syncedDataRepository: ISyncedDataRepository,
) {}
/**
*
*/
async getMyTeamInfo(accountSequence: string): Promise<MyTeamInfoDto> {
// 获取个人认种棵数
const personalPlantingCount = await this.syncedDataRepository.getTotalTreesByAccountSequence(accountSequence);
// 获取直推数量
const directReferrals = await this.syncedDataRepository.findDirectReferrals(accountSequence);
// 获取团队认种棵数(伞下各级总和)
const teamTreesByLevel = await this.syncedDataRepository.getTeamTreesByLevel(accountSequence, 15);
let teamPlantingCount = 0;
teamTreesByLevel.forEach((count) => {
teamPlantingCount += count;
});
return {
accountSequence,
personalPlantingCount,
teamPlantingCount,
directReferralCount: directReferrals.length,
};
}
/**
*
*/
async getDirectReferrals(
accountSequence: string,
limit: number = 100,
offset: number = 0,
): Promise<DirectReferralsResponseDto> {
// 获取所有直推
const allDirectReferrals = await this.syncedDataRepository.findDirectReferrals(accountSequence);
// 分页
const total = allDirectReferrals.length;
const paginatedReferrals = allDirectReferrals.slice(offset, offset + limit);
// 获取每个直推成员的详细信息
const referrals: TeamMemberDto[] = await Promise.all(
paginatedReferrals.map(async (ref) => {
return this.getTeamMemberInfo(ref.accountSequence);
}),
);
return {
referrals,
total,
hasMore: offset + limit < total,
};
}
/**
*
*/
private async getTeamMemberInfo(accountSequence: string): Promise<TeamMemberDto> {
// 获取个人认种棵数
const personalPlantingCount = await this.syncedDataRepository.getTotalTreesByAccountSequence(accountSequence);
// 获取直推数量
const directReferrals = await this.syncedDataRepository.findDirectReferrals(accountSequence);
// 获取团队认种棵数
const teamTreesByLevel = await this.syncedDataRepository.getTeamTreesByLevel(accountSequence, 15);
let teamPlantingCount = 0;
teamTreesByLevel.forEach((count) => {
teamPlantingCount += count;
});
return {
accountSequence,
personalPlantingCount,
teamPlantingCount,
directReferralCount: directReferrals.length,
};
}
}

View File

@ -3,11 +3,9 @@ import { Cron, CronExpression } from '@nestjs/schedule';
import { ContributionCalculationService } from '../services/contribution-calculation.service'; import { ContributionCalculationService } from '../services/contribution-calculation.service';
import { SnapshotService } from '../services/snapshot.service'; import { SnapshotService } from '../services/snapshot.service';
import { ContributionRecordRepository } from '../../infrastructure/persistence/repositories/contribution-record.repository'; import { ContributionRecordRepository } from '../../infrastructure/persistence/repositories/contribution-record.repository';
import { ContributionAccountRepository } from '../../infrastructure/persistence/repositories/contribution-account.repository';
import { OutboxRepository } from '../../infrastructure/persistence/repositories/outbox.repository'; import { OutboxRepository } from '../../infrastructure/persistence/repositories/outbox.repository';
import { KafkaProducerService } from '../../infrastructure/kafka/kafka-producer.service'; import { KafkaProducerService } from '../../infrastructure/kafka/kafka-producer.service';
import { RedisService } from '../../infrastructure/redis/redis.service'; import { RedisService } from '../../infrastructure/redis/redis.service';
import { ContributionAccountUpdatedEvent } from '../../domain/events';
/** /**
* *
@ -21,7 +19,6 @@ export class ContributionScheduler implements OnModuleInit {
private readonly calculationService: ContributionCalculationService, private readonly calculationService: ContributionCalculationService,
private readonly snapshotService: SnapshotService, private readonly snapshotService: SnapshotService,
private readonly contributionRecordRepository: ContributionRecordRepository, private readonly contributionRecordRepository: ContributionRecordRepository,
private readonly contributionAccountRepository: ContributionAccountRepository,
private readonly outboxRepository: OutboxRepository, private readonly outboxRepository: OutboxRepository,
private readonly kafkaProducer: KafkaProducerService, private readonly kafkaProducer: KafkaProducerService,
private readonly redis: RedisService, private readonly redis: RedisService,
@ -177,128 +174,4 @@ export class ContributionScheduler implements OnModuleInit {
await this.redis.releaseLock(`${this.LOCK_KEY}:cleanup`, lockValue); await this.redis.releaseLock(`${this.LOCK_KEY}:cleanup`, lockValue);
} }
} }
/**
* 10
* 15
*/
@Cron('*/10 * * * *')
async publishRecentlyUpdatedAccounts(): Promise<void> {
const lockValue = await this.redis.acquireLock(`${this.LOCK_KEY}:incremental-sync`, 540); // 9分钟锁
if (!lockValue) {
return;
}
try {
// 查找过去15分钟内更新的账户比10分钟多5分钟余量避免遗漏边界情况
const fifteenMinutesAgo = new Date(Date.now() - 15 * 60 * 1000);
const accounts = await this.contributionAccountRepository.findRecentlyUpdated(fifteenMinutesAgo, 500);
if (accounts.length === 0) {
return;
}
const events = accounts.map((account) => {
const event = new ContributionAccountUpdatedEvent(
account.accountSequence,
account.personalContribution.value.toString(),
account.totalLevelPending.value.toString(),
account.totalBonusPending.value.toString(),
account.effectiveContribution.value.toString(),
account.effectiveContribution.value.toString(),
account.hasAdopted,
account.directReferralAdoptedCount,
account.unlockedLevelDepth,
account.unlockedBonusTiers,
account.createdAt,
);
return {
aggregateType: ContributionAccountUpdatedEvent.AGGREGATE_TYPE,
aggregateId: account.accountSequence,
eventType: ContributionAccountUpdatedEvent.EVENT_TYPE,
payload: event.toPayload(),
};
});
await this.outboxRepository.saveMany(events);
this.logger.log(`Incremental sync: published ${accounts.length} recently updated accounts`);
} catch (error) {
this.logger.error('Failed to publish recently updated accounts', error);
} finally {
await this.redis.releaseLock(`${this.LOCK_KEY}:incremental-sync`, lockValue);
}
}
/**
* 4
*
*/
@Cron('0 4 * * *')
async publishAllAccountUpdates(): Promise<void> {
const lockValue = await this.redis.acquireLock(`${this.LOCK_KEY}:full-sync`, 3600); // 1小时锁
if (!lockValue) {
return;
}
try {
this.logger.log('Starting daily full sync of contribution accounts...');
let page = 1;
const pageSize = 100;
let totalPublished = 0;
while (true) {
const { items: accounts, total } = await this.contributionAccountRepository.findMany({
page,
limit: pageSize,
orderBy: 'effectiveContribution',
order: 'desc',
});
if (accounts.length === 0) {
break;
}
const events = accounts.map((account) => {
const event = new ContributionAccountUpdatedEvent(
account.accountSequence,
account.personalContribution.value.toString(),
account.totalLevelPending.value.toString(),
account.totalBonusPending.value.toString(),
account.effectiveContribution.value.toString(),
account.effectiveContribution.value.toString(),
account.hasAdopted,
account.directReferralAdoptedCount,
account.unlockedLevelDepth,
account.unlockedBonusTiers,
account.createdAt,
);
return {
aggregateType: ContributionAccountUpdatedEvent.AGGREGATE_TYPE,
aggregateId: account.accountSequence,
eventType: ContributionAccountUpdatedEvent.EVENT_TYPE,
payload: event.toPayload(),
};
});
await this.outboxRepository.saveMany(events);
totalPublished += accounts.length;
if (accounts.length < pageSize || page * pageSize >= total) {
break;
}
page++;
}
this.logger.log(`Daily full sync completed: published ${totalPublished} contribution account events`);
} catch (error) {
this.logger.error('Failed to publish all account updates', error);
} finally {
await this.redis.releaseLock(`${this.LOCK_KEY}:full-sync`, lockValue);
}
}
} }

View File

@ -1,274 +0,0 @@
import { Injectable, Logger } from '@nestjs/common';
import { UnallocatedContributionRepository, UnallocatedContribution } from '../../infrastructure/persistence/repositories/unallocated-contribution.repository';
import { ContributionAccountRepository } from '../../infrastructure/persistence/repositories/contribution-account.repository';
import { ContributionRecordRepository } from '../../infrastructure/persistence/repositories/contribution-record.repository';
import { SystemAccountRepository } from '../../infrastructure/persistence/repositories/system-account.repository';
import { OutboxRepository } from '../../infrastructure/persistence/repositories/outbox.repository';
import { SyncedDataRepository } from '../../infrastructure/persistence/repositories/synced-data.repository';
import { UnitOfWork } from '../../infrastructure/persistence/unit-of-work/unit-of-work';
import { ContributionRecordAggregate } from '../../domain/aggregates/contribution-record.aggregate';
import { ContributionSourceType } from '../../domain/aggregates/contribution-account.aggregate';
import { ContributionAmount } from '../../domain/value-objects/contribution-amount.vo';
import { DistributionRate } from '../../domain/value-objects/distribution-rate.vo';
import { ContributionRecordSyncedEvent, SystemAccountSyncedEvent } from '../../domain/events';
/**
*
*
*/
@Injectable()
export class BonusClaimService {
private readonly logger = new Logger(BonusClaimService.name);
constructor(
private readonly unallocatedContributionRepository: UnallocatedContributionRepository,
private readonly contributionAccountRepository: ContributionAccountRepository,
private readonly contributionRecordRepository: ContributionRecordRepository,
private readonly systemAccountRepository: SystemAccountRepository,
private readonly outboxRepository: OutboxRepository,
private readonly syncedDataRepository: SyncedDataRepository,
private readonly unitOfWork: UnitOfWork,
) {}
/**
*
*
* @param accountSequence
* @param previousCount
* @param newCount
*/
async checkAndClaimBonus(
accountSequence: string,
previousCount: number,
newCount: number,
): Promise<void> {
// 检查是否达到新的解锁条件
const tiersToClaimList: number[] = [];
// T2: 直推≥2人认种时解锁
if (previousCount < 2 && newCount >= 2) {
tiersToClaimList.push(2);
}
// T3: 直推≥4人认种时解锁
if (previousCount < 4 && newCount >= 4) {
tiersToClaimList.push(3);
}
if (tiersToClaimList.length === 0) {
return;
}
this.logger.log(
`User ${accountSequence} unlocked bonus tiers: ${tiersToClaimList.join(', ')} ` +
`(directReferralAdoptedCount: ${previousCount} -> ${newCount})`,
);
// 检查是否已在事务中(被 ContributionCalculationService 调用时)
// 如果已在事务中,直接执行,避免嵌套事务导致超时
if (this.unitOfWork.isInTransaction()) {
for (const tier of tiersToClaimList) {
await this.claimBonusTier(accountSequence, tier);
}
} else {
// 独立调用时,开启新事务
await this.unitOfWork.executeInTransaction(async () => {
for (const tier of tiersToClaimList) {
await this.claimBonusTier(accountSequence, tier);
}
});
}
}
/**
*
*/
private async claimBonusTier(accountSequence: string, bonusTier: number): Promise<void> {
// 1. 查询待领取的记录
const pendingRecords = await this.unallocatedContributionRepository.findPendingBonusByAccountSequence(
accountSequence,
bonusTier,
);
if (pendingRecords.length === 0) {
this.logger.debug(`No pending T${bonusTier} bonus records for ${accountSequence}`);
return;
}
this.logger.log(
`Claiming ${pendingRecords.length} T${bonusTier} bonus records for ${accountSequence}`,
);
// 2. 查询原始认种数据,获取 treeCount 和 baseContribution
const adoptionDataMap = new Map<string, { treeCount: number; baseContribution: ContributionAmount }>();
for (const pending of pendingRecords) {
const adoptionIdStr = pending.sourceAdoptionId.toString();
if (!adoptionDataMap.has(adoptionIdStr)) {
const adoption = await this.syncedDataRepository.findSyncedAdoptionByOriginalId(pending.sourceAdoptionId);
if (adoption) {
adoptionDataMap.set(adoptionIdStr, {
treeCount: adoption.treeCount,
baseContribution: new ContributionAmount(adoption.contributionPerTree),
});
} else {
// 如果找不到原始认种数据,使用默认值并记录警告
this.logger.warn(`Adoption not found for sourceAdoptionId: ${pending.sourceAdoptionId}, using default values`);
adoptionDataMap.set(adoptionIdStr, {
treeCount: 0,
baseContribution: new ContributionAmount(0),
});
}
}
}
// 3. 创建贡献值记录
const contributionRecords: ContributionRecordAggregate[] = [];
for (const pending of pendingRecords) {
const adoptionData = adoptionDataMap.get(pending.sourceAdoptionId.toString())!;
const record = new ContributionRecordAggregate({
accountSequence: accountSequence,
sourceType: ContributionSourceType.TEAM_BONUS,
sourceAdoptionId: pending.sourceAdoptionId,
sourceAccountSequence: pending.sourceAccountSequence,
treeCount: adoptionData.treeCount,
baseContribution: adoptionData.baseContribution,
distributionRate: DistributionRate.BONUS_PER,
bonusTier: bonusTier,
amount: pending.amount,
effectiveDate: pending.effectiveDate,
expireDate: pending.expireDate,
});
contributionRecords.push(record);
}
// 4. 保存贡献值记录
const savedRecords = await this.contributionRecordRepository.saveMany(contributionRecords);
// 5. 更新用户的贡献值账户
let totalAmount = new ContributionAmount(0);
for (const pending of pendingRecords) {
totalAmount = new ContributionAmount(totalAmount.value.plus(pending.amount.value));
}
await this.contributionAccountRepository.updateContribution(
accountSequence,
ContributionSourceType.TEAM_BONUS,
totalAmount,
null,
bonusTier,
);
// 6. 标记待领取记录为已分配
const pendingIds = pendingRecords.map((r) => r.id);
await this.unallocatedContributionRepository.claimBonusRecords(pendingIds, accountSequence);
// 7. 从 HEADQUARTERS 减少算力并删除明细记录
await this.systemAccountRepository.subtractContribution('HEADQUARTERS', null, totalAmount);
for (const pending of pendingRecords) {
await this.systemAccountRepository.deleteContributionRecordsByAdoption(
'HEADQUARTERS',
null,
pending.sourceAdoptionId,
pending.sourceAccountSequence,
);
}
// 8. 发布 HEADQUARTERS 账户更新事件
const headquartersAccount = await this.systemAccountRepository.findByTypeAndRegion('HEADQUARTERS', null);
if (headquartersAccount) {
const hqEvent = new SystemAccountSyncedEvent(
'HEADQUARTERS',
null,
headquartersAccount.name,
headquartersAccount.contributionBalance.value.toString(),
headquartersAccount.createdAt,
);
await this.outboxRepository.save({
aggregateType: SystemAccountSyncedEvent.AGGREGATE_TYPE,
aggregateId: 'HEADQUARTERS',
eventType: SystemAccountSyncedEvent.EVENT_TYPE,
payload: hqEvent.toPayload(),
});
}
// 9. 发布事件到 Kafka通过 Outbox
await this.publishBonusClaimEvents(accountSequence, savedRecords, pendingRecords);
this.logger.log(
`Claimed T${bonusTier} bonus for ${accountSequence}: ` +
`${pendingRecords.length} records, total amount: ${totalAmount.value.toString()}`,
);
}
/**
*
*/
private async publishBonusClaimEvents(
accountSequence: string,
savedRecords: ContributionRecordAggregate[],
pendingRecords: UnallocatedContribution[],
): Promise<void> {
// 1. 发布贡献值记录同步事件(用于 mining-admin-service CDC
for (const record of savedRecords) {
const event = new ContributionRecordSyncedEvent(
record.id!,
record.accountSequence,
record.sourceType,
record.sourceAdoptionId,
record.sourceAccountSequence,
record.treeCount,
record.baseContribution.value.toString(),
record.distributionRate.value.toString(),
record.levelDepth,
record.bonusTier,
record.amount.value.toString(),
record.effectiveDate,
record.expireDate,
record.isExpired,
record.createdAt,
);
await this.outboxRepository.save({
aggregateType: ContributionRecordSyncedEvent.AGGREGATE_TYPE,
aggregateId: record.id!.toString(),
eventType: ContributionRecordSyncedEvent.EVENT_TYPE,
payload: event.toPayload(),
});
}
// 2. 发布补发事件到 mining-wallet-service
const userContributions = savedRecords.map((record, index) => ({
accountSequence: record.accountSequence,
contributionType: 'TEAM_BONUS',
amount: record.amount.value.toString(),
bonusTier: record.bonusTier,
effectiveDate: record.effectiveDate.toISOString(),
expireDate: record.expireDate.toISOString(),
sourceAdoptionId: record.sourceAdoptionId.toString(),
sourceAccountSequence: record.sourceAccountSequence,
isBackfill: true, // 标记为补发
}));
const eventId = `bonus-claim-${accountSequence}-${Date.now()}`;
const payload = {
eventType: 'BonusClaimed',
eventId,
timestamp: new Date().toISOString(),
payload: {
accountSequence,
bonusTier: savedRecords[0]?.bonusTier,
claimedCount: savedRecords.length,
userContributions,
},
};
await this.outboxRepository.save({
eventType: 'BonusClaimed',
topic: 'contribution.bonus.claimed',
key: accountSequence,
payload,
aggregateId: accountSequence,
aggregateType: 'ContributionAccount',
});
}
}

View File

@ -9,12 +9,10 @@ import { OutboxRepository } from '../../infrastructure/persistence/repositories/
import { UnitOfWork } from '../../infrastructure/persistence/unit-of-work/unit-of-work'; import { UnitOfWork } from '../../infrastructure/persistence/unit-of-work/unit-of-work';
import { ContributionAccountAggregate, ContributionSourceType } from '../../domain/aggregates/contribution-account.aggregate'; import { ContributionAccountAggregate, ContributionSourceType } from '../../domain/aggregates/contribution-account.aggregate';
import { ContributionRecordAggregate } from '../../domain/aggregates/contribution-record.aggregate'; import { ContributionRecordAggregate } from '../../domain/aggregates/contribution-record.aggregate';
import { ContributionAmount } from '../../domain/value-objects/contribution-amount.vo';
import { SyncedReferral } from '../../domain/repositories/synced-data.repository.interface'; import { SyncedReferral } from '../../domain/repositories/synced-data.repository.interface';
import { ContributionDistributionPublisherService } from './contribution-distribution-publisher.service'; import { ContributionDistributionPublisherService } from './contribution-distribution-publisher.service';
import { ContributionRateService } from './contribution-rate.service'; import { ContributionRateService } from './contribution-rate.service';
import { BonusClaimService } from './bonus-claim.service'; import { ContributionRecordSyncedEvent, NetworkProgressUpdatedEvent } from '../../domain/events';
import { ContributionRecordSyncedEvent, NetworkProgressUpdatedEvent, ContributionAccountUpdatedEvent, SystemAccountSyncedEvent, SystemContributionRecordCreatedEvent, UnallocatedContributionSyncedEvent } from '../../domain/events';
/** /**
* *
@ -35,7 +33,6 @@ export class ContributionCalculationService {
private readonly unitOfWork: UnitOfWork, private readonly unitOfWork: UnitOfWork,
private readonly distributionPublisher: ContributionDistributionPublisherService, private readonly distributionPublisher: ContributionDistributionPublisherService,
private readonly contributionRateService: ContributionRateService, private readonly contributionRateService: ContributionRateService,
private readonly bonusClaimService: BonusClaimService,
) {} ) {}
/** /**
@ -114,49 +111,6 @@ export class ContributionCalculationService {
`teamBonus=${result.teamBonusRecords.length}, ` + `teamBonus=${result.teamBonusRecords.length}, ` +
`unallocated=${result.unallocatedContributions.length}`, `unallocated=${result.unallocatedContributions.length}`,
); );
// 更新全网认种进度(更新 NetworkAdoptionProgress 表)
// 判断是否为新认种用户:之前没有账户记录即为新用户
const isNewUser = !adopterAccount;
await this.contributionRateService.updateNetworkProgress(
adoption.treeCount,
adoption.adoptionDate,
adoption.originalAdoptionId,
isNewUser,
);
// 发布全网进度更新事件(用于 mining-service 同步全网理论算力)
await this.publishNetworkProgressEvent();
}
/**
*
*/
private async publishNetworkProgressEvent(): Promise<void> {
try {
const progress = await this.contributionRateService.getNetworkProgress();
const event = new NetworkProgressUpdatedEvent(
progress.totalTreeCount,
progress.totalAdoptionOrders,
progress.totalAdoptedUsers,
progress.currentUnit,
progress.currentMultiplier.toString(),
progress.currentContributionPerTree.toString(),
progress.nextUnitTreeCount,
);
await this.outboxRepository.save({
aggregateType: NetworkProgressUpdatedEvent.AGGREGATE_TYPE,
aggregateId: 'network',
eventType: NetworkProgressUpdatedEvent.EVENT_TYPE,
payload: event.toPayload(),
});
this.logger.debug(`Published NetworkProgressUpdatedEvent: trees=${progress.totalTreeCount}`);
} catch (error) {
this.logger.error('Failed to publish NetworkProgressUpdatedEvent', error);
}
} }
/** /**
@ -210,8 +164,6 @@ export class ContributionCalculationService {
): Promise<void> { ): Promise<void> {
// 收集所有保存后的记录带ID用于发布事件 // 收集所有保存后的记录带ID用于发布事件
const savedRecords: ContributionRecordAggregate[] = []; const savedRecords: ContributionRecordAggregate[] = [];
// 收集所有被更新的账户序列号(用于发布账户更新事件)
const updatedAccountSequences = new Set<string>();
// 1. 保存个人算力记录 // 1. 保存个人算力记录
const savedPersonalRecord = await this.contributionRecordRepository.save(result.personalRecord); const savedPersonalRecord = await this.contributionRecordRepository.save(result.personalRecord);
@ -226,7 +178,6 @@ export class ContributionCalculationService {
} }
account.addPersonalContribution(result.personalRecord.amount); account.addPersonalContribution(result.personalRecord.amount);
await this.contributionAccountRepository.save(account); await this.contributionAccountRepository.save(account);
updatedAccountSequences.add(result.personalRecord.accountSequence);
// 2. 保存团队层级算力记录 // 2. 保存团队层级算力记录
if (result.teamLevelRecords.length > 0) { if (result.teamLevelRecords.length > 0) {
@ -242,7 +193,6 @@ export class ContributionCalculationService {
record.levelDepth, // 传递层级深度 record.levelDepth, // 传递层级深度
null, null,
); );
updatedAccountSequences.add(record.accountSequence);
} }
} }
@ -260,7 +210,6 @@ export class ContributionCalculationService {
null, null,
record.bonusTier, // 传递加成档位 record.bonusTier, // 传递加成档位
); );
updatedAccountSequences.add(record.accountSequence);
} }
} }
@ -268,7 +217,7 @@ export class ContributionCalculationService {
const effectiveDate = result.personalRecord.effectiveDate; const effectiveDate = result.personalRecord.effectiveDate;
const expireDate = result.personalRecord.expireDate; const expireDate = result.personalRecord.expireDate;
// 4. 保存未分配算力并发布同步事件 // 4. 保存未分配算力
if (result.unallocatedContributions.length > 0) { if (result.unallocatedContributions.length > 0) {
await this.unallocatedContributionRepository.saveMany( await this.unallocatedContributionRepository.saveMany(
result.unallocatedContributions.map((u) => ({ result.unallocatedContributions.map((u) => ({
@ -279,189 +228,28 @@ export class ContributionCalculationService {
expireDate, expireDate,
})), })),
); );
// 汇总未分配算力到 HEADQUARTERS总部账户
const totalUnallocatedAmount = result.unallocatedContributions.reduce(
(sum, u) => sum.add(u.amount),
new ContributionAmount(0),
);
await this.systemAccountRepository.addContribution(
'HEADQUARTERS',
null,
totalUnallocatedAmount,
);
// 为每笔未分配算力创建 HEADQUARTERS 明细记录
for (const unallocated of result.unallocatedContributions) {
// 确定来源类型和层级深度
const sourceType = unallocated.type as string; // LEVEL_OVERFLOW / LEVEL_NO_ANCESTOR / BONUS_TIER_1/2/3
const levelDepth = unallocated.levelDepth;
const savedRecord = await this.systemAccountRepository.saveContributionRecord({
accountType: 'HEADQUARTERS',
regionCode: null,
sourceAdoptionId,
sourceAccountSequence,
sourceType,
levelDepth,
distributionRate: 0, // 未分配算力没有固定比例
amount: unallocated.amount,
effectiveDate,
expireDate: null,
});
// 发布 HEADQUARTERS 算力明细事件
const recordEvent = new SystemContributionRecordCreatedEvent(
savedRecord.id,
'HEADQUARTERS',
null,
sourceAdoptionId,
sourceAccountSequence,
sourceType as any,
levelDepth,
0,
unallocated.amount.value.toString(),
effectiveDate,
null,
savedRecord.createdAt,
);
await this.outboxRepository.save({
aggregateType: SystemContributionRecordCreatedEvent.AGGREGATE_TYPE,
aggregateId: savedRecord.id.toString(),
eventType: SystemContributionRecordCreatedEvent.EVENT_TYPE,
payload: recordEvent.toPayload(),
});
}
// 发布 HEADQUARTERS 账户同步事件
const headquartersAccount = await this.systemAccountRepository.findByTypeAndRegion('HEADQUARTERS', null);
if (headquartersAccount) {
const hqEvent = new SystemAccountSyncedEvent(
'HEADQUARTERS',
null, // 区域代码(总部没有区域)
headquartersAccount.name,
headquartersAccount.contributionBalance.value.toString(),
headquartersAccount.createdAt,
);
await this.outboxRepository.save({
aggregateType: SystemAccountSyncedEvent.AGGREGATE_TYPE,
aggregateId: 'HEADQUARTERS',
eventType: SystemAccountSyncedEvent.EVENT_TYPE,
payload: hqEvent.toPayload(),
});
}
// 发布未分配算力同步事件(用于 mining-service 同步待解锁算力)
for (const unallocated of result.unallocatedContributions) {
const event = new UnallocatedContributionSyncedEvent(
sourceAdoptionId,
sourceAccountSequence,
unallocated.wouldBeAccountSequence,
unallocated.type,
unallocated.amount.value.toString(),
unallocated.reason,
effectiveDate,
expireDate,
);
await this.outboxRepository.save({
aggregateType: UnallocatedContributionSyncedEvent.AGGREGATE_TYPE,
aggregateId: `${sourceAdoptionId}-${unallocated.type}`,
eventType: UnallocatedContributionSyncedEvent.EVENT_TYPE,
payload: event.toPayload(),
});
}
} }
// 5. 保存系统账户算力并发布同步事件 // 5. 保存系统账户算力
if (result.systemContributions.length > 0) { if (result.systemContributions.length > 0) {
await this.systemAccountRepository.ensureSystemAccountsExist(); await this.systemAccountRepository.ensureSystemAccountsExist();
for (const sys of result.systemContributions) { for (const sys of result.systemContributions) {
// 动态创建/更新系统账户 await this.systemAccountRepository.addContribution(sys.accountType, sys.amount);
await this.systemAccountRepository.addContribution( await this.systemAccountRepository.saveContributionRecord({
sys.accountType, systemAccountType: sys.accountType,
sys.regionCode,
sys.amount,
);
// 保存算力明细记录
const savedRecord = await this.systemAccountRepository.saveContributionRecord({
accountType: sys.accountType,
regionCode: sys.regionCode,
sourceAdoptionId, sourceAdoptionId,
sourceAccountSequence, sourceAccountSequence,
sourceType: 'FIXED_RATE', // 固定比例分配
levelDepth: null,
distributionRate: sys.rate.value.toNumber(), distributionRate: sys.rate.value.toNumber(),
amount: sys.amount, amount: sys.amount,
effectiveDate, effectiveDate,
expireDate: null, expireDate: null, // System account contributions never expire based on the schema's contributionNeverExpires field
}); });
// 发布系统账户同步事件(用于 mining-service 同步系统账户算力)
const systemAccount = await this.systemAccountRepository.findByTypeAndRegion(
sys.accountType,
sys.regionCode,
);
if (systemAccount) {
const event = new SystemAccountSyncedEvent(
sys.accountType,
sys.regionCode,
systemAccount.name,
systemAccount.contributionBalance.value.toString(),
systemAccount.createdAt,
);
await this.outboxRepository.save({
aggregateType: SystemAccountSyncedEvent.AGGREGATE_TYPE,
aggregateId: `${sys.accountType}:${sys.regionCode || 'null'}`,
eventType: SystemAccountSyncedEvent.EVENT_TYPE,
payload: event.toPayload(),
});
// 发布系统账户算力明细事件(用于 mining-admin-service 同步明细记录)
const recordEvent = new SystemContributionRecordCreatedEvent(
savedRecord.id,
sys.accountType,
sys.regionCode, // 传递区域代码
sourceAdoptionId,
sourceAccountSequence,
'FIXED_RATE', // 固定比例分配
null, // 无层级深度
sys.rate.value.toNumber(),
sys.amount.value.toString(),
effectiveDate,
null,
savedRecord.createdAt,
);
await this.outboxRepository.save({
aggregateType: SystemContributionRecordCreatedEvent.AGGREGATE_TYPE,
aggregateId: savedRecord.id.toString(),
eventType: SystemContributionRecordCreatedEvent.EVENT_TYPE,
payload: recordEvent.toPayload(),
});
}
} }
} }
// 6. 发布算力记录同步事件(用于 mining-admin-service- 使用保存后带 ID 的记录 // 6. 发布算力记录同步事件(用于 mining-admin-service- 使用保存后带 ID 的记录
await this.publishContributionRecordEvents(savedRecords); await this.publishContributionRecordEvents(savedRecords);
// 7. 发布所有被更新账户的事件(用于 CDC 同步到 mining-admin-service
await this.publishUpdatedAccountEvents(updatedAccountSequences);
}
/**
*
*/
private async publishUpdatedAccountEvents(accountSequences: Set<string>): Promise<void> {
if (accountSequences.size === 0) return;
for (const accountSequence of accountSequences) {
const account = await this.contributionAccountRepository.findByAccountSequence(accountSequence);
if (account) {
await this.publishContributionAccountUpdatedEvent(account);
}
}
} }
/** /**
@ -512,15 +300,11 @@ export class ContributionCalculationService {
if (!account.hasAdopted) { if (!account.hasAdopted) {
account.markAsAdopted(); account.markAsAdopted();
await this.contributionAccountRepository.save(account); await this.contributionAccountRepository.save(account);
// 发布账户更新事件到 outbox用于 CDC 同步到 mining-admin-service
await this.publishContributionAccountUpdatedEvent(account);
} }
} }
/** /**
* 线 * 线
*
*/ */
private async updateReferrerUnlockStatus(referrerAccountSequence: string): Promise<void> { private async updateReferrerUnlockStatus(referrerAccountSequence: string): Promise<void> {
const account = await this.contributionAccountRepository.findByAccountSequence(referrerAccountSequence); const account = await this.contributionAccountRepository.findByAccountSequence(referrerAccountSequence);
@ -532,27 +316,16 @@ export class ContributionCalculationService {
); );
// 更新解锁状态 // 更新解锁状态
const previousCount = account.directReferralAdoptedCount; const currentCount = account.directReferralAdoptedCount;
if (directReferralAdoptedCount > previousCount) { if (directReferralAdoptedCount > currentCount) {
// 需要增量更新 // 需要增量更新
for (let i = previousCount; i < directReferralAdoptedCount; i++) { for (let i = currentCount; i < directReferralAdoptedCount; i++) {
account.incrementDirectReferralAdoptedCount(); account.incrementDirectReferralAdoptedCount();
} }
await this.contributionAccountRepository.save(account); await this.contributionAccountRepository.save(account);
// 发布账户更新事件到 outbox用于 CDC 同步到 mining-admin-service
await this.publishContributionAccountUpdatedEvent(account);
this.logger.debug( this.logger.debug(
`Updated referrer ${referrerAccountSequence} unlock status: level=${account.unlockedLevelDepth}, bonus=${account.unlockedBonusTiers}`, `Updated referrer ${referrerAccountSequence} unlock status: level=${account.unlockedLevelDepth}, bonus=${account.unlockedBonusTiers}`,
); );
// 检查并处理奖励补发T2: 直推≥2人, T3: 直推≥4人
await this.bonusClaimService.checkAndClaimBonus(
referrerAccountSequence,
previousCount,
directReferralAdoptedCount,
);
} }
} }
@ -620,43 +393,4 @@ export class ContributionCalculationService {
}, },
}; };
} }
/**
* CDC mining-admin-service
*/
private async publishContributionAccountUpdatedEvent(
account: ContributionAccountAggregate,
): Promise<void> {
// 总算力 = 个人算力 + 层级待解锁 + 加成待解锁
const totalContribution = account.personalContribution.value
.plus(account.totalLevelPending.value)
.plus(account.totalBonusPending.value);
const event = new ContributionAccountUpdatedEvent(
account.accountSequence,
account.personalContribution.value.toString(),
account.totalLevelPending.value.toString(),
account.totalBonusPending.value.toString(),
totalContribution.toString(),
account.effectiveContribution.value.toString(),
account.hasAdopted,
account.directReferralAdoptedCount,
account.unlockedLevelDepth,
account.unlockedBonusTiers,
account.createdAt,
);
await this.outboxRepository.save({
aggregateType: ContributionAccountUpdatedEvent.AGGREGATE_TYPE,
aggregateId: account.accountSequence,
eventType: ContributionAccountUpdatedEvent.EVENT_TYPE,
payload: event.toPayload(),
});
this.logger.debug(
`Published ContributionAccountUpdatedEvent for ${account.accountSequence}: ` +
`directReferralAdoptedCount=${account.directReferralAdoptedCount}, ` +
`hasAdopted=${account.hasAdopted}`,
);
}
} }

View File

@ -121,16 +121,11 @@ export class ContributionDistributionPublisherService {
return result.systemContributions.map((sys) => ({ return result.systemContributions.map((sys) => ({
accountType: sys.accountType, accountType: sys.accountType,
amount: sys.amount.value.toString(), amount: sys.amount.value.toString(),
// 省份代码PROVINCE 用自己的 regionCodeCITY 需要传递省份代码用于创建省份
provinceCode: provinceCode:
sys.accountType === 'PROVINCE' sys.accountType === 'PROVINCE' || sys.accountType === 'CITY'
? sys.regionCode || provinceCode ? provinceCode
: sys.accountType === 'CITY' : undefined,
? provinceCode // CITY 需要省份代码来创建省份(如果省份不存在) cityCode: sys.accountType === 'CITY' ? cityCode : undefined,
: undefined,
// 城市代码:只有 CITY 类型有
cityCode:
sys.accountType === 'CITY' ? sys.regionCode || cityCode : undefined,
neverExpires: sys.accountType === 'OPERATION', // 运营账户永不过期 neverExpires: sys.accountType === 'OPERATION', // 运营账户永不过期
})); }));
} }

View File

@ -1,40 +0,0 @@
/**
*
* directReferralAdoptedCount, unlockedLevelDepth, unlockedBonusTiers
* mining-admin-service
*/
export class ContributionAccountUpdatedEvent {
static readonly EVENT_TYPE = 'ContributionAccountUpdated';
static readonly AGGREGATE_TYPE = 'ContributionAccount';
constructor(
public readonly accountSequence: string,
public readonly personalContribution: string,
public readonly teamLevelContribution: string,
public readonly teamBonusContribution: string,
public readonly totalContribution: string,
public readonly effectiveContribution: string,
public readonly hasAdopted: boolean,
public readonly directReferralAdoptedCount: number,
public readonly unlockedLevelDepth: number,
public readonly unlockedBonusTiers: number,
public readonly createdAt: Date,
) {}
toPayload(): Record<string, any> {
return {
eventType: ContributionAccountUpdatedEvent.EVENT_TYPE,
accountSequence: this.accountSequence,
personalContribution: this.personalContribution,
teamLevelContribution: this.teamLevelContribution,
teamBonusContribution: this.teamBonusContribution,
totalContribution: this.totalContribution,
effectiveContribution: this.effectiveContribution,
hasAdopted: this.hasAdopted,
directReferralAdoptedCount: this.directReferralAdoptedCount,
unlockedLevelDepth: this.unlockedLevelDepth,
unlockedBonusTiers: this.unlockedBonusTiers,
createdAt: this.createdAt.toISOString(),
};
}
}

View File

@ -1,11 +1,7 @@
export * from './contribution-calculated.event'; export * from './contribution-calculated.event';
export * from './daily-snapshot-created.event'; export * from './daily-snapshot-created.event';
export * from './contribution-account-synced.event'; export * from './contribution-account-synced.event';
export * from './contribution-account-updated.event';
export * from './referral-synced.event'; export * from './referral-synced.event';
export * from './adoption-synced.event'; export * from './adoption-synced.event';
export * from './contribution-record-synced.event'; export * from './contribution-record-synced.event';
export * from './network-progress-updated.event'; export * from './network-progress-updated.event';
export * from './system-account-synced.event';
export * from './system-contribution-record-created.event';
export * from './unallocated-contribution-synced.event';

View File

@ -1,27 +0,0 @@
/**
*
* mining-service
*/
export class SystemAccountSyncedEvent {
static readonly EVENT_TYPE = 'SystemAccountSynced';
static readonly AGGREGATE_TYPE = 'SystemAccount';
constructor(
public readonly accountType: string, // OPERATION / PROVINCE / CITY / HEADQUARTERS
public readonly regionCode: string | null, // 省/市代码,如 440000, 440100
public readonly name: string,
public readonly contributionBalance: string,
public readonly createdAt: Date,
) {}
toPayload(): Record<string, any> {
return {
eventType: SystemAccountSyncedEvent.EVENT_TYPE,
accountType: this.accountType,
regionCode: this.regionCode,
name: this.name,
contributionBalance: this.contributionBalance,
createdAt: this.createdAt.toISOString(),
};
}
}

View File

@ -1,56 +0,0 @@
/**
*
* - FIXED_RATE: 固定比例分配OPERATION 12%PROVINCE 1%CITY 2%
* - LEVEL_OVERFLOW: 层级溢出归总部线
* - LEVEL_NO_ANCESTOR: 无上线归总部线
* - BONUS_TIER_1/2/3: 团队奖励未解锁归总部
*/
export type SystemContributionSourceType =
| 'FIXED_RATE'
| 'LEVEL_OVERFLOW'
| 'LEVEL_NO_ANCESTOR'
| 'BONUS_TIER_1'
| 'BONUS_TIER_2'
| 'BONUS_TIER_3';
/**
*
* mining-admin-service
*/
export class SystemContributionRecordCreatedEvent {
static readonly EVENT_TYPE = 'SystemContributionRecordCreated';
static readonly AGGREGATE_TYPE = 'SystemContributionRecord';
constructor(
public readonly recordId: bigint, // 明细记录ID
public readonly accountType: string, // 系统账户类型OPERATION/PROVINCE/CITY/HEADQUARTERS
public readonly regionCode: string | null, // 区域代码(省/市代码,如 440000, 440100
public readonly sourceAdoptionId: bigint, // 来源认种ID
public readonly sourceAccountSequence: string, // 认种人账号
public readonly sourceType: SystemContributionSourceType, // 来源类型
public readonly levelDepth: number | null, // 层级深度1-15仅对 LEVEL_OVERFLOW/LEVEL_NO_ANCESTOR 有效
public readonly distributionRate: number, // 分配比例
public readonly amount: string, // 算力金额
public readonly effectiveDate: Date, // 生效日期
public readonly expireDate: Date | null, // 过期日期
public readonly createdAt: Date, // 创建时间
) {}
toPayload(): Record<string, any> {
return {
eventType: SystemContributionRecordCreatedEvent.EVENT_TYPE,
recordId: this.recordId.toString(),
accountType: this.accountType,
regionCode: this.regionCode,
sourceAdoptionId: this.sourceAdoptionId.toString(),
sourceAccountSequence: this.sourceAccountSequence,
sourceType: this.sourceType,
levelDepth: this.levelDepth,
distributionRate: this.distributionRate,
amount: this.amount,
effectiveDate: this.effectiveDate.toISOString(),
expireDate: this.expireDate?.toISOString() ?? null,
createdAt: this.createdAt.toISOString(),
};
}
}

View File

@ -1,33 +0,0 @@
/**
*
* mining-service
*/
export class UnallocatedContributionSyncedEvent {
static readonly EVENT_TYPE = 'UnallocatedContributionSynced';
static readonly AGGREGATE_TYPE = 'UnallocatedContribution';
constructor(
public readonly sourceAdoptionId: bigint,
public readonly sourceAccountSequence: string,
public readonly wouldBeAccountSequence: string | null,
public readonly contributionType: string, // LEVEL_NO_ANCESTOR, LEVEL_OVERFLOW, BONUS_TIER_1, BONUS_TIER_2, BONUS_TIER_3
public readonly amount: string,
public readonly reason: string | null,
public readonly effectiveDate: Date,
public readonly expireDate: Date,
) {}
toPayload(): Record<string, any> {
return {
eventType: UnallocatedContributionSyncedEvent.EVENT_TYPE,
sourceAdoptionId: this.sourceAdoptionId.toString(),
sourceAccountSequence: this.sourceAccountSequence,
wouldBeAccountSequence: this.wouldBeAccountSequence,
contributionType: this.contributionType,
amount: this.amount,
reason: this.reason,
effectiveDate: this.effectiveDate.toISOString(),
expireDate: this.expireDate.toISOString(),
};
}
}

View File

@ -5,16 +5,6 @@ import { ContributionAccountAggregate, ContributionSourceType } from '../aggrega
import { ContributionRecordAggregate } from '../aggregates/contribution-record.aggregate'; import { ContributionRecordAggregate } from '../aggregates/contribution-record.aggregate';
import { SyncedAdoption, SyncedReferral } from '../repositories/synced-data.repository.interface'; import { SyncedAdoption, SyncedReferral } from '../repositories/synced-data.repository.interface';
/**
*
*/
export interface SystemContributionAllocation {
accountType: 'OPERATION' | 'PROVINCE' | 'CITY' | 'HEADQUARTERS';
regionCode: string | null; // 省市代码,如 440000、440100
rate: DistributionRate;
amount: ContributionAmount;
}
/** /**
* *
*/ */
@ -37,8 +27,12 @@ export interface ContributionDistributionResult {
reason: string; reason: string;
}[]; }[];
// 系统账户贡献值(支持按省市细分) // 系统账户贡献值
systemContributions: SystemContributionAllocation[]; systemContributions: {
accountType: 'OPERATION' | 'PROVINCE' | 'CITY';
rate: DistributionRate;
amount: ContributionAmount;
}[];
} }
/** /**
@ -91,31 +85,23 @@ export class ContributionCalculatorService {
}); });
// 2. 系统账户贡献值 (15%) // 2. 系统账户贡献值 (15%)
// 运营账户(全国)- 12% result.systemContributions = [
result.systemContributions.push({ {
accountType: 'OPERATION', accountType: 'OPERATION',
regionCode: null, rate: DistributionRate.OPERATION,
rate: DistributionRate.OPERATION, amount: totalContribution.multiply(DistributionRate.OPERATION.value),
amount: totalContribution.multiply(DistributionRate.OPERATION.value), },
}); {
accountType: 'PROVINCE',
// 省公司账户 - 1%(按认种选择的省份) rate: DistributionRate.PROVINCE,
const provinceCode = adoption.selectedProvince; amount: totalContribution.multiply(DistributionRate.PROVINCE.value),
result.systemContributions.push({ },
accountType: 'PROVINCE', {
regionCode: provinceCode || null, accountType: 'CITY',
rate: DistributionRate.PROVINCE, rate: DistributionRate.CITY,
amount: totalContribution.multiply(DistributionRate.PROVINCE.value), amount: totalContribution.multiply(DistributionRate.CITY.value),
}); },
];
// 市公司账户 - 2%(按认种选择的城市)
const cityCode = adoption.selectedCity;
result.systemContributions.push({
accountType: 'CITY',
regionCode: cityCode || null,
rate: DistributionRate.CITY,
amount: totalContribution.multiply(DistributionRate.CITY.value),
});
// 3. 团队贡献值 (15%) // 3. 团队贡献值 (15%)
this.distributeTeamContribution( this.distributeTeamContribution(

View File

@ -13,11 +13,11 @@ import { KafkaModule } from './kafka/kafka.module';
import { KafkaProducerService } from './kafka/kafka-producer.service'; import { KafkaProducerService } from './kafka/kafka-producer.service';
import { CDCConsumerService } from './kafka/cdc-consumer.service'; import { CDCConsumerService } from './kafka/cdc-consumer.service';
import { RedisModule } from './redis/redis.module'; import { RedisModule } from './redis/redis.module';
import { SYNCED_DATA_REPOSITORY } from '../domain/repositories/synced-data.repository.interface';
// Repository injection tokens // Repository injection tokens
export const CONTRIBUTION_ACCOUNT_REPOSITORY = 'CONTRIBUTION_ACCOUNT_REPOSITORY'; export const CONTRIBUTION_ACCOUNT_REPOSITORY = 'CONTRIBUTION_ACCOUNT_REPOSITORY';
export const CONTRIBUTION_RECORD_REPOSITORY = 'CONTRIBUTION_RECORD_REPOSITORY'; export const CONTRIBUTION_RECORD_REPOSITORY = 'CONTRIBUTION_RECORD_REPOSITORY';
export const SYNCED_DATA_REPOSITORY = 'SYNCED_DATA_REPOSITORY';
@Module({ @Module({
imports: [PrismaModule, KafkaModule, RedisModule], imports: [PrismaModule, KafkaModule, RedisModule],

View File

@ -53,21 +53,6 @@ export type TransactionalCDCHandlerWithResult<T> = (event: CDCEvent, tx: Transac
/** 事务提交后的回调函数 */ /** 事务提交后的回调函数 */
export type PostCommitCallback<T> = (result: T) => Promise<void>; export type PostCommitCallback<T> = (result: T) => Promise<void>;
/** Topic 消费阶段配置 */
export interface TopicPhase {
topic: string;
tableName: string;
}
/**
*
* Kafka
*/
interface CollectedMessage {
payload: EachMessagePayload;
orderId: bigint; // 用于排序的 order_id
}
@Injectable() @Injectable()
export class CDCConsumerService implements OnModuleInit, OnModuleDestroy { export class CDCConsumerService implements OnModuleInit, OnModuleDestroy {
private readonly logger = new Logger(CDCConsumerService.name); private readonly logger = new Logger(CDCConsumerService.name);
@ -76,14 +61,6 @@ export class CDCConsumerService implements OnModuleInit, OnModuleDestroy {
private handlers: Map<string, CDCHandler> = new Map(); private handlers: Map<string, CDCHandler> = new Map();
private isRunning = false; private isRunning = false;
// 分阶段消费配置
private topicPhases: TopicPhase[] = [];
private currentPhaseIndex = 0;
private sequentialMode = false;
// 初始同步完成标记(只有顺序同步全部完成后才为 true
private initialSyncCompleted = false;
constructor( constructor(
private readonly configService: ConfigService, private readonly configService: ConfigService,
private readonly prisma: PrismaService, private readonly prisma: PrismaService,
@ -270,14 +247,7 @@ export class CDCConsumerService implements OnModuleInit, OnModuleDestroy {
} }
/** /**
* *
*
* topic
* 1. (user_accounts)
* 2. (referral_relationships) -
* 3. (planting_orders) -
*
*
*/ */
async start(): Promise<void> { async start(): Promise<void> {
if (this.isRunning) { if (this.isRunning) {
@ -289,359 +259,36 @@ export class CDCConsumerService implements OnModuleInit, OnModuleDestroy {
await this.consumer.connect(); await this.consumer.connect();
this.logger.log('CDC consumer connected'); this.logger.log('CDC consumer connected');
// 配置顺序消费阶段(顺序很重要!) // 订阅 Debezium CDC topics (从1.0服务全量同步)
this.topicPhases = [ const topics = [
{ // 用户账户表 (identity-service: user_accounts)
topic: this.configService.get<string>('CDC_TOPIC_USERS', 'cdc.identity.public.user_accounts'), this.configService.get<string>('CDC_TOPIC_USERS', 'cdc.identity.public.user_accounts'),
tableName: 'user_accounts', // 认种订单表 (planting-service: planting_orders)
}, this.configService.get<string>('CDC_TOPIC_ADOPTIONS', 'cdc.planting.public.planting_orders'),
{ // 推荐关系表 (referral-service: referral_relationships)
topic: this.configService.get<string>('CDC_TOPIC_REFERRALS', 'cdc.referral.public.referral_relationships'), this.configService.get<string>('CDC_TOPIC_REFERRALS', 'cdc.referral.public.referral_relationships'),
tableName: 'referral_relationships',
},
{
topic: this.configService.get<string>('CDC_TOPIC_ADOPTIONS', 'cdc.planting.public.planting_orders'),
tableName: 'planting_orders',
},
]; ];
this.currentPhaseIndex = 0; await this.consumer.subscribe({
this.sequentialMode = true; topics,
fromBeginning: true, // 首次启动时全量同步历史数据
});
this.logger.log(`Subscribed to topics: ${topics.join(', ')}`);
await this.consumer.run({
eachMessage: async (payload: EachMessagePayload) => {
await this.handleMessage(payload);
},
});
this.isRunning = true; this.isRunning = true;
this.logger.log('CDC consumer started with transactional idempotency protection');
// 开始顺序消费(阻塞直到完成,确保数据依赖顺序正确)
await this.startSequentialConsumption();
this.logger.log('CDC consumer started with sequential phase consumption');
} catch (error) { } catch (error) {
this.logger.error('Failed to start CDC consumer', error); this.logger.error('Failed to start CDC consumer', error);
// 不抛出错误,允许服务在没有 Kafka 的情况下启动(用于本地开发) // 不抛出错误,允许服务在没有 Kafka 的情况下启动(用于本地开发)
} }
} }
/**
*
*/
private async startSequentialConsumption(): Promise<void> {
for (let i = 0; i < this.topicPhases.length; i++) {
this.currentPhaseIndex = i;
const phase = this.topicPhases[i];
this.logger.log(`[CDC] Starting phase ${i + 1}/${this.topicPhases.length}: ${phase.tableName} (${phase.topic})`);
// 消费当前阶段直到追上最新
await this.consumePhaseToEnd(phase);
this.logger.log(`[CDC] Completed phase ${i + 1}/${this.topicPhases.length}: ${phase.tableName}`);
}
this.logger.log('[CDC] All phases completed. Switching to continuous mode...');
// 所有阶段完成后,切换到持续消费模式(同时监听所有 topic
await this.startContinuousMode();
}
/**
*
*
* planting_orders 使"收集-排序-处理"
* 1.
* 2. order_id
* 3.
*
* Debezium snapshot
*
*/
private async consumePhaseToEnd(phase: TopicPhase): Promise<void> {
const admin = this.kafka.admin();
await admin.connect();
// 获取 topic 的高水位线和最早 offset
const topicOffsets = await admin.fetchTopicOffsets(phase.topic);
const highWatermarks: Map<number, string> = new Map();
const earliestOffsets: Map<number, string> = new Map();
for (const partitionOffset of topicOffsets) {
highWatermarks.set(partitionOffset.partition, partitionOffset.high);
earliestOffsets.set(partitionOffset.partition, partitionOffset.low);
}
this.logger.log(`[CDC] Phase ${phase.tableName}: High watermarks = ${JSON.stringify(Object.fromEntries(highWatermarks))}`);
// 检查是否 topic 为空
const allEmpty = Array.from(highWatermarks.values()).every(hw => hw === '0');
if (allEmpty) {
this.logger.log(`[CDC] Phase ${phase.tableName}: Topic is empty, skipping`);
await admin.disconnect();
return;
}
// 使用固定的 group id
const phaseGroupId = `contribution-service-cdc-phase-${phase.tableName}`;
// 重置 consumer group 的 offset 到最早位置
// 使用 admin.resetOffsets 而不是 setOffsets更简洁且专门用于重置到 earliest/latest
// 这确保每次服务启动都会从头开始消费,不受之前 committed offset 影响
// 参考: https://kafka.js.org/docs/admin#a-name-reset-offsets-a-resetoffsets
this.logger.log(`[CDC] Phase ${phase.tableName}: Resetting consumer group ${phaseGroupId} offsets to earliest`);
try {
await admin.resetOffsets({
groupId: phaseGroupId,
topic: phase.topic,
earliest: true,
});
this.logger.log(`[CDC] Phase ${phase.tableName}: Consumer group offsets reset successfully`);
} catch (resetError: any) {
// 如果 consumer group 不存在resetOffsets 会失败,这是正常的(首次运行)
// fromBeginning: true 会在这种情况下生效
this.logger.log(`[CDC] Phase ${phase.tableName}: Could not reset offsets (may be first run): ${resetError.message}`);
}
const phaseConsumer = this.kafka.consumer({
groupId: phaseGroupId,
});
try {
await phaseConsumer.connect();
// 订阅单个 topicfromBeginning 对新 group 有效
await phaseConsumer.subscribe({
topic: phase.topic,
fromBeginning: true,
});
// 判断是否需要使用"收集-排序-处理"模式
const needsSorting = phase.tableName === 'planting_orders';
if (needsSorting) {
// planting_orders 阶段:使用"收集-排序-处理"模式
await this.consumePhaseWithSorting(phaseConsumer, phase, highWatermarks);
} else {
// 其他阶段:使用原有的"边消费边处理"模式
await this.consumePhaseDirectly(phaseConsumer, phase, highWatermarks);
}
// 停止消费
await phaseConsumer.stop();
await phaseConsumer.disconnect();
await admin.disconnect();
} catch (error) {
this.logger.error(`[CDC] Error in phase ${phase.tableName}`, error);
await phaseConsumer.disconnect();
await admin.disconnect();
throw error;
}
}
/**
*
* user_accounts referral_relationships
*/
private async consumePhaseDirectly(
phaseConsumer: Consumer,
phase: TopicPhase,
highWatermarks: Map<number, string>,
): Promise<void> {
let processedOffsets: Map<number, bigint> = new Map();
let isComplete = false;
for (const partition of highWatermarks.keys()) {
processedOffsets.set(partition, BigInt(-1));
}
// 开始消费
await phaseConsumer.run({
eachMessage: async (payload: EachMessagePayload) => {
await this.handleMessage(payload);
// 更新已处理的 offset
processedOffsets.set(payload.partition, BigInt(payload.message.offset));
// 检查是否所有 partition 都已追上高水位线
let allCaughtUp = true;
for (const [partition, highWatermark] of highWatermarks) {
const processed = processedOffsets.get(partition) ?? BigInt(-1);
// 高水位线是下一个将被写入的 offset所以已处理的 offset 需要 >= highWatermark - 1
if (processed < BigInt(highWatermark) - BigInt(1)) {
allCaughtUp = false;
break;
}
}
if (allCaughtUp && !isComplete) {
isComplete = true;
this.logger.log(`[CDC] Phase ${phase.tableName}: Caught up with all partitions`);
}
},
});
// 等待追上高水位线
while (!isComplete) {
await new Promise(resolve => setTimeout(resolve, 100));
// 每秒检查一次进度
const currentProgress = Array.from(processedOffsets.entries())
.map(([p, o]) => `P${p}:${o}/${highWatermarks.get(p)}`)
.join(', ');
this.logger.debug(`[CDC] Phase ${phase.tableName} progress: ${currentProgress}`);
}
}
/**
* --
* planting_orders order_id
*
* Debezium snapshot PostgreSQL
*
* unlocked_level_depth
*/
private async consumePhaseWithSorting(
phaseConsumer: Consumer,
phase: TopicPhase,
highWatermarks: Map<number, string>,
): Promise<void> {
const collectedMessages: CollectedMessage[] = [];
let processedOffsets: Map<number, bigint> = new Map();
let isComplete = false;
for (const partition of highWatermarks.keys()) {
processedOffsets.set(partition, BigInt(-1));
}
this.logger.log(`[CDC] Phase ${phase.tableName}: Using collect-sort-process mode`);
// 第一步:收集所有消息(不处理)
await phaseConsumer.run({
eachMessage: async (payload: EachMessagePayload) => {
// 解析消息获取 order_id 用于排序
const orderId = this.extractOrderIdFromPayload(payload);
collectedMessages.push({
payload,
orderId,
});
// 更新已处理的 offset
processedOffsets.set(payload.partition, BigInt(payload.message.offset));
// 检查是否所有 partition 都已追上高水位线
let allCaughtUp = true;
for (const [partition, highWatermark] of highWatermarks) {
const processed = processedOffsets.get(partition) ?? BigInt(-1);
if (processed < BigInt(highWatermark) - BigInt(1)) {
allCaughtUp = false;
break;
}
}
if (allCaughtUp && !isComplete) {
isComplete = true;
this.logger.log(`[CDC] Phase ${phase.tableName}: Collected all ${collectedMessages.length} messages`);
}
},
});
// 等待收集完成
while (!isComplete) {
await new Promise(resolve => setTimeout(resolve, 100));
// 每秒检查一次进度
const currentProgress = Array.from(processedOffsets.entries())
.map(([p, o]) => `P${p}:${o}/${highWatermarks.get(p)}`)
.join(', ');
this.logger.debug(`[CDC] Phase ${phase.tableName} collecting: ${currentProgress}, collected: ${collectedMessages.length}`);
}
// 第二步:按 order_id 升序排序
this.logger.log(`[CDC] Phase ${phase.tableName}: Sorting ${collectedMessages.length} messages by order_id`);
collectedMessages.sort((a, b) => {
if (a.orderId < b.orderId) return -1;
if (a.orderId > b.orderId) return 1;
return 0;
});
// 记录排序前后的变化(用于调试)
if (collectedMessages.length > 0) {
const firstFive = collectedMessages.slice(0, 5).map(m => m.orderId.toString()).join(', ');
const lastFive = collectedMessages.slice(-5).map(m => m.orderId.toString()).join(', ');
this.logger.log(`[CDC] Phase ${phase.tableName}: Sorted order_ids: first=[${firstFive}], last=[${lastFive}]`);
}
// 第三步:按排序后的顺序处理消息
this.logger.log(`[CDC] Phase ${phase.tableName}: Processing ${collectedMessages.length} messages in sorted order`);
let processedCount = 0;
for (const collected of collectedMessages) {
await this.handleMessage(collected.payload);
processedCount++;
// 每处理 100 条记录日志一次进度
if (processedCount % 100 === 0) {
this.logger.log(`[CDC] Phase ${phase.tableName}: Processed ${processedCount}/${collectedMessages.length} messages`);
}
}
this.logger.log(`[CDC] Phase ${phase.tableName}: Completed processing all ${collectedMessages.length} messages in order_id order`);
}
/**
* Kafka order_id
*/
private extractOrderIdFromPayload(payload: EachMessagePayload): bigint {
try {
if (!payload.message.value) {
return BigInt(0);
}
const rawData = JSON.parse(payload.message.value.toString());
// order_id 是源表的主键字段
const orderId = rawData.order_id || rawData.id || 0;
// 转换为 bigint 用于比较
return BigInt(orderId);
} catch (error) {
this.logger.warn(`[CDC] Failed to extract order_id from message, using 0`, error);
return BigInt(0);
}
}
/**
* topic
*/
private async startContinuousMode(): Promise<void> {
this.sequentialMode = false;
this.initialSyncCompleted = true; // 标记初始同步完成
const topics = this.topicPhases.map(p => p.topic);
await this.consumer.subscribe({
topics,
fromBeginning: false, // 从上次消费的位置继续(不是从头开始)
});
this.logger.log(`[CDC] Continuous mode: Subscribed to topics: ${topics.join(', ')}`);
await this.consumer.run({
eachMessage: async (payload: EachMessagePayload) => {
await this.handleMessage(payload);
},
});
this.logger.log('[CDC] Continuous mode started - all topics being consumed in parallel');
}
/**
* CDC
* - initialSyncCompleted = true:
*/
getSyncStatus(): { isRunning: boolean; sequentialMode: boolean; allPhasesCompleted: boolean } {
return {
isRunning: this.isRunning,
sequentialMode: this.sequentialMode,
allPhasesCompleted: this.initialSyncCompleted,
};
}
/** /**
* *
*/ */

View File

@ -223,117 +223,6 @@ export class ContributionAccountRepository implements IContributionAccountReposi
}); });
} }
async findRecentlyUpdated(since: Date, limit: number = 500): Promise<ContributionAccountAggregate[]> {
const records = await this.client.contributionAccount.findMany({
where: { updatedAt: { gte: since } },
orderBy: { updatedAt: 'desc' },
take: limit,
});
return records.map((r) => this.toDomain(r));
}
/**
*
*/
async getDetailedContributionStats(): Promise<{
// 个人算力总计
personalTotal: string;
// 层级算力 - 已解锁(已分配给上线)
levelUnlocked: string;
// 层级算力 - 未解锁待解锁的pending
levelPending: string;
// 层级按档位分解
levelByTier: {
tier1: { unlocked: string; pending: string }; // 1-5级
tier2: { unlocked: string; pending: string }; // 6-10级
tier3: { unlocked: string; pending: string }; // 11-15级
};
// 团队奖励算力 - 已解锁
bonusUnlocked: string;
// 团队奖励算力 - 未解锁
bonusPending: string;
// 团队奖励按档位分解
bonusByTier: {
tier1: { unlocked: string; pending: string };
tier2: { unlocked: string; pending: string };
tier3: { unlocked: string; pending: string };
};
}> {
const result = await this.client.contributionAccount.aggregate({
_sum: {
personalContribution: true,
// 层级 1-5
level1Pending: true,
level2Pending: true,
level3Pending: true,
level4Pending: true,
level5Pending: true,
// 层级 6-10
level6Pending: true,
level7Pending: true,
level8Pending: true,
level9Pending: true,
level10Pending: true,
// 层级 11-15
level11Pending: true,
level12Pending: true,
level13Pending: true,
level14Pending: true,
level15Pending: true,
// 团队奖励
bonusTier1Pending: true,
bonusTier2Pending: true,
bonusTier3Pending: true,
// 汇总
totalLevelPending: true,
totalBonusPending: true,
totalUnlocked: true,
},
});
const sum = result._sum;
// 层级 1-5 已解锁在pending字段中存储的是已分配给该用户的层级算力
const level1to5 = new Decimal(sum.level1Pending || 0)
.plus(sum.level2Pending || 0)
.plus(sum.level3Pending || 0)
.plus(sum.level4Pending || 0)
.plus(sum.level5Pending || 0);
// 层级 6-10
const level6to10 = new Decimal(sum.level6Pending || 0)
.plus(sum.level7Pending || 0)
.plus(sum.level8Pending || 0)
.plus(sum.level9Pending || 0)
.plus(sum.level10Pending || 0);
// 层级 11-15
const level11to15 = new Decimal(sum.level11Pending || 0)
.plus(sum.level12Pending || 0)
.plus(sum.level13Pending || 0)
.plus(sum.level14Pending || 0)
.plus(sum.level15Pending || 0);
return {
personalTotal: (sum.personalContribution || new Decimal(0)).toString(),
levelUnlocked: (sum.totalLevelPending || new Decimal(0)).toString(),
levelPending: '0', // 未解锁的存储在 unallocated 表中
levelByTier: {
tier1: { unlocked: level1to5.toString(), pending: '0' },
tier2: { unlocked: level6to10.toString(), pending: '0' },
tier3: { unlocked: level11to15.toString(), pending: '0' },
},
bonusUnlocked: (sum.totalBonusPending || new Decimal(0)).toString(),
bonusPending: '0', // 未解锁的存储在 unallocated 表中
bonusByTier: {
tier1: { unlocked: (sum.bonusTier1Pending || new Decimal(0)).toString(), pending: '0' },
tier2: { unlocked: (sum.bonusTier2Pending || new Decimal(0)).toString(), pending: '0' },
tier3: { unlocked: (sum.bonusTier3Pending || new Decimal(0)).toString(), pending: '0' },
},
};
}
private toDomain(record: any): ContributionAccountAggregate { private toDomain(record: any): ContributionAccountAggregate {
return ContributionAccountAggregate.fromPersistence({ return ContributionAccountAggregate.fromPersistence({
id: record.id, id: record.id,

View File

@ -136,10 +136,7 @@ export class SyncedDataRepository implements ISyncedDataRepository {
async findUndistributedAdoptions(limit: number = 100): Promise<SyncedAdoption[]> { async findUndistributedAdoptions(limit: number = 100): Promise<SyncedAdoption[]> {
const records = await this.client.syncedAdoption.findMany({ const records = await this.client.syncedAdoption.findMany({
where: { where: { contributionDistributed: false },
contributionDistributed: false,
status: 'MINING_ENABLED', // 只处理最终成功的认种订单
},
orderBy: { adoptionDate: 'asc' }, orderBy: { adoptionDate: 'asc' },
take: limit, take: limit,
}); });
@ -174,10 +171,7 @@ export class SyncedDataRepository implements ISyncedDataRepository {
async getTotalTreesByAccountSequence(accountSequence: string): Promise<number> { async getTotalTreesByAccountSequence(accountSequence: string): Promise<number> {
const result = await this.client.syncedAdoption.aggregate({ const result = await this.client.syncedAdoption.aggregate({
where: { where: { accountSequence },
accountSequence,
status: 'MINING_ENABLED', // 只统计最终成功的认种订单
},
_sum: { treeCount: true }, _sum: { treeCount: true },
}); });
return result._sum.treeCount ?? 0; return result._sum.treeCount ?? 0;
@ -291,12 +285,8 @@ export class SyncedDataRepository implements ISyncedDataRepository {
const accountSequences = directReferrals.map((r) => r.accountSequence); const accountSequences = directReferrals.map((r) => r.accountSequence);
// 只统计有 MINING_ENABLED 状态认种记录的直推用户数
const adoptedCount = await this.client.syncedAdoption.findMany({ const adoptedCount = await this.client.syncedAdoption.findMany({
where: { where: { accountSequence: { in: accountSequences } },
accountSequence: { in: accountSequences },
status: 'MINING_ENABLED', // 只统计最终成功的认种订单
},
distinct: ['accountSequence'], distinct: ['accountSequence'],
}); });
@ -318,10 +308,7 @@ export class SyncedDataRepository implements ISyncedDataRepository {
const adoptions = await this.client.syncedAdoption.groupBy({ const adoptions = await this.client.syncedAdoption.groupBy({
by: ['accountSequence'], by: ['accountSequence'],
where: { where: { accountSequence: { in: sequences } },
accountSequence: { in: sequences },
status: 'MINING_ENABLED', // 只统计最终成功的认种订单
},
_sum: { treeCount: true }, _sum: { treeCount: true },
}); });
@ -359,89 +346,6 @@ export class SyncedDataRepository implements ISyncedDataRepository {
return result; return result;
} }
// ========== 认种分类账查询 ==========
async getPlantingLedger(
accountSequence: string,
page: number = 1,
pageSize: number = 20,
): Promise<{
items: SyncedAdoption[];
total: number;
page: number;
pageSize: number;
totalPages: number;
}> {
const skip = (page - 1) * pageSize;
// 只返回 MINING_ENABLED 状态的认种记录
const whereClause = { accountSequence, status: 'MINING_ENABLED' };
const [items, total] = await Promise.all([
this.client.syncedAdoption.findMany({
where: whereClause,
orderBy: { adoptionDate: 'desc' },
skip,
take: pageSize,
}),
this.client.syncedAdoption.count({
where: whereClause,
}),
]);
return {
items: items.map((r) => this.toSyncedAdoption(r)),
total,
page,
pageSize,
totalPages: Math.ceil(total / pageSize),
};
}
async getPlantingSummary(accountSequence: string): Promise<{
totalOrders: number;
totalTreeCount: number;
totalAmount: string;
effectiveTreeCount: number;
firstPlantingAt: Date | null;
lastPlantingAt: Date | null;
}> {
// 只统计 MINING_ENABLED 状态的认种记录
const adoptions = await this.client.syncedAdoption.findMany({
where: { accountSequence, status: 'MINING_ENABLED' },
orderBy: { adoptionDate: 'asc' },
});
if (adoptions.length === 0) {
return {
totalOrders: 0,
totalTreeCount: 0,
totalAmount: '0',
effectiveTreeCount: 0,
firstPlantingAt: null,
lastPlantingAt: null,
};
}
const totalOrders = adoptions.length;
const totalTreeCount = adoptions.reduce((sum, a) => sum + a.treeCount, 0);
// 计算总金额treeCount * contributionPerTree
let totalAmount = new Decimal(0);
for (const adoption of adoptions) {
const amount = new Decimal(adoption.contributionPerTree).mul(adoption.treeCount);
totalAmount = totalAmount.add(amount);
}
return {
totalOrders,
totalTreeCount,
totalAmount: totalAmount.toString(),
effectiveTreeCount: totalTreeCount, // 全部都是有效的 MINING_ENABLED
firstPlantingAt: adoptions[0]?.adoptionDate || null,
lastPlantingAt: adoptions[adoptions.length - 1]?.adoptionDate || null,
};
}
// ========== 统计方法(用于查询服务)========== // ========== 统计方法(用于查询服务)==========
async countUsers(): Promise<number> { async countUsers(): Promise<number> {
@ -454,23 +358,10 @@ export class SyncedDataRepository implements ISyncedDataRepository {
async countUndistributedAdoptions(): Promise<number> { async countUndistributedAdoptions(): Promise<number> {
return this.client.syncedAdoption.count({ return this.client.syncedAdoption.count({
where: { where: { contributionDistributed: false },
contributionDistributed: false,
status: 'MINING_ENABLED', // 只统计最终成功的认种订单
},
}); });
} }
async getTotalTrees(): Promise<number> {
const result = await this.client.syncedAdoption.aggregate({
where: {
status: 'MINING_ENABLED', // 只统计最终成功的认种订单
},
_sum: { treeCount: true },
});
return result._sum.treeCount ?? 0;
}
// ========== 私有方法 ========== // ========== 私有方法 ==========
private toSyncedUser(record: any): SyncedUser { private toSyncedUser(record: any): SyncedUser {

View File

@ -7,7 +7,6 @@ export type SystemAccountType = 'OPERATION' | 'PROVINCE' | 'CITY' | 'HEADQUARTER
export interface SystemAccount { export interface SystemAccount {
id: bigint; id: bigint;
accountType: SystemAccountType; accountType: SystemAccountType;
regionCode: string | null; // 省/市代码
name: string; name: string;
contributionBalance: ContributionAmount; contributionBalance: ContributionAmount;
contributionNeverExpires: boolean; contributionNeverExpires: boolean;
@ -21,8 +20,6 @@ export interface SystemContributionRecord {
systemAccountId: bigint; systemAccountId: bigint;
sourceAdoptionId: bigint; sourceAdoptionId: bigint;
sourceAccountSequence: string; sourceAccountSequence: string;
sourceType: string; // 来源类型FIXED_RATE / LEVEL_OVERFLOW / LEVEL_NO_ANCESTOR / BONUS_TIER_1/2/3
levelDepth: number | null; // 层级深度1-15仅对层级相关类型有效
distributionRate: number; distributionRate: number;
amount: ContributionAmount; amount: ContributionAmount;
effectiveDate: Date; effectiveDate: Date;
@ -39,19 +36,9 @@ export class SystemAccountRepository {
return this.unitOfWork.getClient(); return this.unitOfWork.getClient();
} }
/** async findByType(accountType: SystemAccountType): Promise<SystemAccount | null> {
* accountType + regionCode const record = await this.client.systemAccount.findUnique({
* regionCode 使 findFirst findUnique where: { accountType },
*/
async findByTypeAndRegion(
accountType: SystemAccountType,
regionCode: string | null,
): Promise<SystemAccount | null> {
const record = await this.client.systemAccount.findFirst({
where: {
accountType,
regionCode: regionCode === null ? { equals: null } : regionCode,
},
}); });
if (!record) { if (!record) {
@ -61,225 +48,123 @@ export class SystemAccountRepository {
return this.toSystemAccount(record); return this.toSystemAccount(record);
} }
/**
* CITY
*/
async findByType(accountType: SystemAccountType): Promise<SystemAccount[]> {
const records = await this.client.systemAccount.findMany({
where: { accountType },
orderBy: { regionCode: 'asc' },
});
return records.map((r) => this.toSystemAccount(r));
}
async findAll(): Promise<SystemAccount[]> { async findAll(): Promise<SystemAccount[]> {
const records = await this.client.systemAccount.findMany({ const records = await this.client.systemAccount.findMany({
orderBy: [{ accountType: 'asc' }, { regionCode: 'asc' }], orderBy: { accountType: 'asc' },
}); });
return records.map((r) => this.toSystemAccount(r)); return records.map((r) => this.toSystemAccount(r));
} }
/**
*
*/
async ensureSystemAccountsExist(): Promise<void> { async ensureSystemAccountsExist(): Promise<void> {
const accounts: { accountType: SystemAccountType; name: string }[] = [ const accounts: { accountType: SystemAccountType; name: string }[] = [
{ accountType: 'OPERATION', name: '运营账户' }, { accountType: 'OPERATION', name: '运营账户' },
{ accountType: 'PROVINCE', name: '省公司账户' },
{ accountType: 'CITY', name: '市公司账户' },
{ accountType: 'HEADQUARTERS', name: '总部账户' }, { accountType: 'HEADQUARTERS', name: '总部账户' },
]; ];
for (const account of accounts) { for (const account of accounts) {
// 由于 regionCode 是 nullable使用 findFirst + create 替代 upsert await this.client.systemAccount.upsert({
const existing = await this.client.systemAccount.findFirst({ where: { accountType: account.accountType },
where: { create: {
accountType: account.accountType, accountType: account.accountType,
regionCode: { equals: null }, name: account.name,
contributionBalance: 0,
}, },
update: {},
}); });
if (!existing) {
await this.client.systemAccount.create({
data: {
accountType: account.accountType,
regionCode: null,
name: account.name,
contributionBalance: 0,
contributionNeverExpires: true,
},
});
}
} }
} }
/**
*
*/
async addContribution( async addContribution(
accountType: SystemAccountType, accountType: SystemAccountType,
regionCode: string | null,
amount: ContributionAmount, amount: ContributionAmount,
): Promise<void> { ): Promise<void> {
const name = this.getAccountName(accountType, regionCode); await this.client.systemAccount.update({
where: { accountType },
// 由于 regionCode 是 nullable使用 findFirst + create/update 替代 upsert
const existing = await this.client.systemAccount.findFirst({
where: {
accountType,
regionCode: regionCode === null ? { equals: null } : regionCode,
},
});
if (existing) {
await this.client.systemAccount.update({
where: { id: existing.id },
data: {
contributionBalance: { increment: amount.value },
},
});
} else {
await this.client.systemAccount.create({
data: {
accountType,
regionCode,
name,
contributionBalance: amount.value,
contributionNeverExpires: true,
},
});
}
}
/**
*
*/
private getAccountName(accountType: SystemAccountType, regionCode: string | null): string {
if (!regionCode) {
const names: Record<SystemAccountType, string> = {
OPERATION: '运营账户',
PROVINCE: '省公司账户',
CITY: '市公司账户',
HEADQUARTERS: '总部账户',
};
return names[accountType] || accountType;
}
return `${regionCode}账户`;
}
/**
*
*/
async subtractContribution(
accountType: SystemAccountType,
regionCode: string | null,
amount: ContributionAmount,
): Promise<void> {
const existing = await this.client.systemAccount.findFirst({
where: {
accountType,
regionCode: regionCode === null ? { equals: null } : regionCode,
},
});
if (existing) {
await this.client.systemAccount.update({
where: { id: existing.id },
data: {
contributionBalance: { decrement: amount.value },
},
});
}
}
/**
*
*/
async deleteContributionRecordsByAdoption(
accountType: SystemAccountType,
regionCode: string | null,
sourceAdoptionId: bigint,
sourceAccountSequence: string,
): Promise<number> {
const systemAccount = await this.findByTypeAndRegion(accountType, regionCode);
if (!systemAccount) {
return 0;
}
const result = await this.client.systemContributionRecord.updateMany({
where: {
systemAccountId: systemAccount.id,
sourceAdoptionId,
sourceAccountSequence,
deletedAt: null, // 只软删除未删除的记录
},
data: { data: {
deletedAt: new Date(), contributionBalance: { increment: amount.value },
}, },
}); });
return result.count;
} }
async saveContributionRecord(record: { async saveContributionRecord(record: {
accountType: SystemAccountType; systemAccountType: SystemAccountType;
regionCode: string | null;
sourceAdoptionId: bigint; sourceAdoptionId: bigint;
sourceAccountSequence: string; sourceAccountSequence: string;
sourceType: string; // 来源类型
levelDepth?: number | null; // 层级深度
distributionRate: number; distributionRate: number;
amount: ContributionAmount; amount: ContributionAmount;
effectiveDate: Date; effectiveDate: Date;
expireDate?: Date | null; expireDate?: Date | null;
}): Promise<SystemContributionRecord> { }): Promise<void> {
const systemAccount = await this.findByTypeAndRegion(record.accountType, record.regionCode); const systemAccount = await this.findByType(record.systemAccountType);
if (!systemAccount) { if (!systemAccount) {
throw new Error(`System account ${record.accountType}:${record.regionCode} not found`); throw new Error(`System account ${record.systemAccountType} not found`);
} }
const created = await this.client.systemContributionRecord.create({ await this.client.systemContributionRecord.create({
data: { data: {
systemAccountId: systemAccount.id, systemAccountId: systemAccount.id,
sourceAdoptionId: record.sourceAdoptionId, sourceAdoptionId: record.sourceAdoptionId,
sourceAccountSequence: record.sourceAccountSequence, sourceAccountSequence: record.sourceAccountSequence,
sourceType: record.sourceType,
levelDepth: record.levelDepth ?? null,
distributionRate: record.distributionRate, distributionRate: record.distributionRate,
amount: record.amount.value, amount: record.amount.value,
effectiveDate: record.effectiveDate, effectiveDate: record.effectiveDate,
expireDate: record.expireDate ?? null, expireDate: record.expireDate ?? null,
}, },
}); });
}
return this.toContributionRecord(created); async saveContributionRecords(records: {
systemAccountType: SystemAccountType;
sourceAdoptionId: bigint;
sourceAccountSequence: string;
distributionRate: number;
amount: ContributionAmount;
effectiveDate: Date;
expireDate?: Date | null;
}[]): Promise<void> {
if (records.length === 0) return;
const systemAccounts = await this.findAll();
const accountMap = new Map<SystemAccountType, bigint>();
for (const account of systemAccounts) {
accountMap.set(account.accountType, account.id);
}
await this.client.systemContributionRecord.createMany({
data: records.map((r) => ({
systemAccountId: accountMap.get(r.systemAccountType)!,
sourceAdoptionId: r.sourceAdoptionId,
sourceAccountSequence: r.sourceAccountSequence,
distributionRate: r.distributionRate,
amount: r.amount.value,
effectiveDate: r.effectiveDate,
expireDate: r.expireDate ?? null,
})),
});
} }
async findContributionRecords( async findContributionRecords(
accountType: SystemAccountType, systemAccountType: SystemAccountType,
regionCode: string | null,
page: number, page: number,
pageSize: number, pageSize: number,
): Promise<{ data: SystemContributionRecord[]; total: number }> { ): Promise<{ data: SystemContributionRecord[]; total: number }> {
const systemAccount = await this.findByTypeAndRegion(accountType, regionCode); const systemAccount = await this.findByType(systemAccountType);
if (!systemAccount) { if (!systemAccount) {
return { data: [], total: 0 }; return { data: [], total: 0 };
} }
const whereClause = {
systemAccountId: systemAccount.id,
deletedAt: null, // 过滤已软删除的记录
};
const [records, total] = await Promise.all([ const [records, total] = await Promise.all([
this.client.systemContributionRecord.findMany({ this.client.systemContributionRecord.findMany({
where: whereClause, where: { systemAccountId: systemAccount.id },
skip: (page - 1) * pageSize, skip: (page - 1) * pageSize,
take: pageSize, take: pageSize,
orderBy: { createdAt: 'desc' }, orderBy: { createdAt: 'desc' },
}), }),
this.client.systemContributionRecord.count({ this.client.systemContributionRecord.count({
where: whereClause, where: { systemAccountId: systemAccount.id },
}), }),
]); ]);
@ -293,7 +178,6 @@ export class SystemAccountRepository {
return { return {
id: record.id, id: record.id,
accountType: record.accountType as SystemAccountType, accountType: record.accountType as SystemAccountType,
regionCode: record.regionCode,
name: record.name, name: record.name,
contributionBalance: new ContributionAmount(record.contributionBalance), contributionBalance: new ContributionAmount(record.contributionBalance),
contributionNeverExpires: record.contributionNeverExpires, contributionNeverExpires: record.contributionNeverExpires,
@ -309,8 +193,6 @@ export class SystemAccountRepository {
systemAccountId: record.systemAccountId, systemAccountId: record.systemAccountId,
sourceAdoptionId: record.sourceAdoptionId, sourceAdoptionId: record.sourceAdoptionId,
sourceAccountSequence: record.sourceAccountSequence, sourceAccountSequence: record.sourceAccountSequence,
sourceType: record.sourceType,
levelDepth: record.levelDepth,
distributionRate: record.distributionRate, distributionRate: record.distributionRate,
amount: new ContributionAmount(record.amount), amount: new ContributionAmount(record.amount),
effectiveDate: record.effectiveDate, effectiveDate: record.effectiveDate,

View File

@ -7,16 +7,14 @@ export interface UnallocatedContribution {
unallocType: string; unallocType: string;
wouldBeAccountSequence: string | null; wouldBeAccountSequence: string | null;
levelDepth: number | null; levelDepth: number | null;
bonusTier: number | null;
amount: ContributionAmount; amount: ContributionAmount;
reason: string | null; reason: string | null;
sourceAdoptionId: bigint; sourceAdoptionId: bigint;
sourceAccountSequence: string; sourceAccountSequence: string;
effectiveDate: Date; effectiveDate: Date;
expireDate: Date; expireDate: Date;
status: string; allocatedToHeadquarters: boolean;
allocatedAt: Date | null; allocatedAt: Date | null;
allocatedToAccountSequence: string | null;
createdAt: Date; createdAt: Date;
} }
@ -132,157 +130,20 @@ export class UnallocatedContributionRepository {
}; };
} }
/**
*
* @param accountSequence
* @param bonusTier (2 3)
*/
async findPendingBonusByAccountSequence(
accountSequence: string,
bonusTier: number,
): Promise<UnallocatedContribution[]> {
const records = await this.client.unallocatedContribution.findMany({
where: {
wouldBeAccountSequence: accountSequence,
unallocType: `BONUS_TIER_${bonusTier}`,
status: 'PENDING',
},
orderBy: { createdAt: 'asc' },
});
return records.map((r) => this.toDomain(r));
}
/**
* -
* @param ids ID列表
* @param accountSequence
*/
async claimBonusRecords(ids: bigint[], accountSequence: string): Promise<void> {
if (ids.length === 0) return;
await this.client.unallocatedContribution.updateMany({
where: {
id: { in: ids },
status: 'PENDING',
},
data: {
status: 'ALLOCATED_TO_USER',
allocatedAt: new Date(),
allocatedToAccountSequence: accountSequence,
},
});
}
/**
*
*/
async findAllPendingBonusByAccountSequence(
accountSequence: string,
): Promise<UnallocatedContribution[]> {
const records = await this.client.unallocatedContribution.findMany({
where: {
wouldBeAccountSequence: accountSequence,
unallocType: { startsWith: 'BONUS_TIER_' },
status: 'PENDING',
},
orderBy: { createdAt: 'asc' },
});
return records.map((r) => this.toDomain(r));
}
/**
*
*/
async getUnallocatedByLevelTier(): Promise<{
tier1: string; // 1-5级未分配
tier2: string; // 6-10级未分配
tier3: string; // 11-15级未分配
}> {
const results = await this.client.unallocatedContribution.groupBy({
by: ['levelDepth'],
where: {
levelDepth: { not: null },
status: 'PENDING',
},
_sum: { amount: true },
});
let tier1 = new ContributionAmount(0);
let tier2 = new ContributionAmount(0);
let tier3 = new ContributionAmount(0);
for (const item of results) {
const depth = item.levelDepth!;
const amount = new ContributionAmount(item._sum.amount || 0);
if (depth >= 1 && depth <= 5) {
tier1 = tier1.add(amount);
} else if (depth >= 6 && depth <= 10) {
tier2 = tier2.add(amount);
} else if (depth >= 11 && depth <= 15) {
tier3 = tier3.add(amount);
}
}
return {
tier1: tier1.value.toString(),
tier2: tier2.value.toString(),
tier3: tier3.value.toString(),
};
}
/**
*
*/
async getUnallocatedByBonusTier(): Promise<{
tier1: string;
tier2: string;
tier3: string;
}> {
const results = await this.client.unallocatedContribution.groupBy({
by: ['unallocType'],
where: {
unallocType: { startsWith: 'BONUS_TIER_' },
status: 'PENDING',
},
_sum: { amount: true },
});
let tier1 = '0';
let tier2 = '0';
let tier3 = '0';
for (const item of results) {
const amount = (item._sum.amount || 0).toString();
if (item.unallocType === 'BONUS_TIER_1') {
tier1 = amount;
} else if (item.unallocType === 'BONUS_TIER_2') {
tier2 = amount;
} else if (item.unallocType === 'BONUS_TIER_3') {
tier3 = amount;
}
}
return { tier1, tier2, tier3 };
}
private toDomain(record: any): UnallocatedContribution { private toDomain(record: any): UnallocatedContribution {
return { return {
id: record.id, id: record.id,
unallocType: record.unallocType, unallocType: record.unallocType,
wouldBeAccountSequence: record.wouldBeAccountSequence, wouldBeAccountSequence: record.wouldBeAccountSequence,
levelDepth: record.levelDepth, levelDepth: record.levelDepth,
bonusTier: record.bonusTier,
amount: new ContributionAmount(record.amount), amount: new ContributionAmount(record.amount),
reason: record.reason, reason: record.reason,
sourceAdoptionId: record.sourceAdoptionId, sourceAdoptionId: record.sourceAdoptionId,
sourceAccountSequence: record.sourceAccountSequence, sourceAccountSequence: record.sourceAccountSequence,
effectiveDate: record.effectiveDate, effectiveDate: record.effectiveDate,
expireDate: record.expireDate, expireDate: record.expireDate,
status: record.status, allocatedToHeadquarters: record.allocatedToHeadquarters,
allocatedAt: record.allocatedAt, allocatedAt: record.allocatedAt,
allocatedToAccountSequence: record.allocatedToAccountSequence,
createdAt: record.createdAt, createdAt: record.createdAt,
}; };
} }

View File

@ -25,7 +25,6 @@
# CDC & Sync: # CDC & Sync:
# ./deploy-mining.sh sync-reset # Reset CDC consumer offsets to beginning # ./deploy-mining.sh sync-reset # Reset CDC consumer offsets to beginning
# ./deploy-mining.sh sync-status # Show CDC consumer group status # ./deploy-mining.sh sync-status # Show CDC consumer group status
# ./deploy-mining.sh cdc-resnapshot # Force Debezium to re-snapshot (use when Kafka data lost)
# #
# Full Reset (for development/testing): # Full Reset (for development/testing):
# ./deploy-mining.sh full-reset # Complete reset: stop services, drop DBs, recreate, resync # ./deploy-mining.sh full-reset # Complete reset: stop services, drop DBs, recreate, resync
@ -104,13 +103,8 @@ declare -A SERVICE_PORTS=(
) )
# CDC Consumer Groups (all groups that need to be reset during full-reset) # CDC Consumer Groups (all groups that need to be reset during full-reset)
# NOTE: contribution-service uses sequential phase consumption with separate consumer groups
# for each table (user_accounts, referral_relationships, planting_orders)
CDC_CONSUMER_GROUPS=( CDC_CONSUMER_GROUPS=(
"contribution-service-cdc-group" "contribution-service-cdc-group"
"contribution-service-cdc-phase-user_accounts"
"contribution-service-cdc-phase-referral_relationships"
"contribution-service-cdc-phase-planting_orders"
"auth-service-cdc-group" "auth-service-cdc-group"
"mining-admin-service-cdc-group" "mining-admin-service-cdc-group"
) )
@ -125,14 +119,6 @@ OUTBOX_CONNECTORS=(
"mining-wallet-outbox-connector" "mining-wallet-outbox-connector"
) )
# Debezium CDC Postgres Connectors (for 1.0 -> 2.0 data sync)
# These connectors capture changes from 1.0 service databases
CDC_POSTGRES_CONNECTORS=(
"identity-postgres-connector"
"referral-postgres-connector"
"planting-postgres-connector"
)
# Debezium Connect URL (default port 8084 as mapped in docker-compose) # Debezium Connect URL (default port 8084 as mapped in docker-compose)
DEBEZIUM_CONNECT_URL="${DEBEZIUM_CONNECT_URL:-http://localhost:8084}" DEBEZIUM_CONNECT_URL="${DEBEZIUM_CONNECT_URL:-http://localhost:8084}"
@ -722,148 +708,6 @@ sync_reset() {
log_info "Run: ./deploy-mining.sh up contribution-service && ./deploy-mining.sh up auth-service" log_info "Run: ./deploy-mining.sh up contribution-service && ./deploy-mining.sh up auth-service"
} }
# Trigger Debezium CDC connectors to re-snapshot
# This is needed when Kafka topic messages are deleted (due to retention or manual cleanup)
# and the connector needs to re-export all data from the source database
cdc_resnapshot() {
print_section "Triggering CDC Connectors Re-Snapshot"
local connect_url="$DEBEZIUM_CONNECT_URL"
# Check if Debezium Connect is available
if ! curl -s "$connect_url" &>/dev/null; then
log_error "Debezium Connect not available at $connect_url"
return 1
fi
echo -e "${YELLOW}WARNING: This will delete and recreate CDC Postgres connectors.${NC}"
echo -e "${YELLOW}All connectors will re-snapshot their source tables.${NC}"
echo ""
echo "Connectors to be re-created:"
for connector in "${CDC_POSTGRES_CONNECTORS[@]}"; do
echo " - $connector"
done
echo ""
read -p "Continue? (y/n): " confirm
if [ "$confirm" != "y" ]; then
log_warn "Aborted"
return 1
fi
# Stop CDC consumer services first
log_step "Stopping CDC consumer services..."
service_stop "contribution-service"
# Wait for consumer groups to become inactive
log_info "Waiting 10 seconds for consumers to disconnect..."
sleep 10
# Delete consumer groups to ensure fresh consumption
log_step "Deleting consumer groups..."
for group in "${CDC_CONSUMER_GROUPS[@]}"; do
log_info "Deleting consumer group: $group"
if docker ps --format '{{.Names}}' 2>/dev/null | grep -q "^${KAFKA_CONTAINER}$"; then
docker exec "$KAFKA_CONTAINER" kafka-consumer-groups --bootstrap-server localhost:9092 \
--delete --group "$group" 2>/dev/null && log_success "Deleted $group" || log_warn "Could not delete $group"
fi
done
# Clear processed_cdc_events table
log_step "Clearing processed CDC events..."
if run_psql "rwa_contribution" "TRUNCATE TABLE processed_cdc_events;" 2>/dev/null; then
log_success "Truncated processed_cdc_events in rwa_contribution"
else
log_warn "Could not truncate processed_cdc_events (table may not exist)"
fi
# For each CDC Postgres connector, save config, delete, and recreate
log_step "Re-creating CDC Postgres connectors..."
local scripts_dir="$SCRIPT_DIR/scripts/debezium"
for connector in "${CDC_POSTGRES_CONNECTORS[@]}"; do
log_info "Processing connector: $connector"
# Get current config from running connector
local config
config=$(curl -s "$connect_url/connectors/$connector/config" 2>/dev/null)
local config_file=""
local use_file_config=false
# If connector doesn't exist, try to find config file
if [ -z "$config" ] || echo "$config" | grep -q "error_code"; then
log_warn "Connector $connector not found, looking for config file..."
# Map connector name to config file
case "$connector" in
"identity-postgres-connector")
config_file="$scripts_dir/identity-connector.json"
;;
"referral-postgres-connector")
config_file="$scripts_dir/referral-connector.json"
;;
"planting-postgres-connector")
config_file="$scripts_dir/planting-connector.json"
;;
esac
if [ -n "$config_file" ] && [ -f "$config_file" ]; then
log_info "Found config file: $config_file"
use_file_config=true
else
log_error "No config available for $connector, skipping"
continue
fi
else
# Delete existing connector
log_info "Deleting connector: $connector"
curl -s -X DELETE "$connect_url/connectors/$connector" &>/dev/null
sleep 2
fi
# Create connector
log_info "Creating connector: $connector with snapshot.mode=always"
local result
if [ "$use_file_config" = true ]; then
# Use config file, replace snapshot.mode with always
local json_config
json_config=$(cat "$config_file" | envsubst | sed 's/"snapshot.mode": "initial"/"snapshot.mode": "always"/')
result=$(echo "$json_config" | curl -s -X POST "$connect_url/connectors" \
-H "Content-Type: application/json" \
-d @- 2>/dev/null)
else
# Use config from running connector, but change snapshot.mode to always
local modified_config
modified_config=$(echo "$config" | sed 's/"snapshot.mode":"initial"/"snapshot.mode":"always"/' | sed 's/"snapshot.mode": "initial"/"snapshot.mode": "always"/')
result=$(curl -s -X POST "$connect_url/connectors" \
-H "Content-Type: application/json" \
-d "{\"name\":\"$connector\",\"config\":$modified_config}" 2>/dev/null)
fi
if echo "$result" | grep -q '"name"'; then
log_success "Created connector: $connector"
else
log_error "Failed to create connector $connector: $result"
fi
# Wait between connectors
sleep 3
done
# Wait for snapshots to complete
log_step "Waiting 30 seconds for Debezium snapshots to complete..."
sleep 30
# Start services
log_step "Starting CDC consumer services..."
service_start "contribution-service"
log_success "CDC re-snapshot completed!"
log_info "Monitor sync progress with: ./deploy-mining.sh sync-status"
}
sync_status() { sync_status() {
print_section "CDC Sync Status" print_section "CDC Sync Status"
@ -1258,47 +1102,9 @@ full_reset() {
service_start "$service" service_start "$service"
done done
log_step "Step 10/18: Waiting for contribution-service CDC sync to complete..." log_step "Step 10/18: Waiting for services to be ready and sync from 1.0..."
log_info "Waiting for contribution-service to complete CDC sync (user_accounts -> referral_relationships -> planting_orders)..." log_info "Waiting 30 seconds for all services to start and sync data from 1.0 CDC..."
sleep 30
# 等待 contribution-service 的 CDC 顺序同步完成
# 通过 /health/cdc-sync API 检查同步状态
local max_wait=600 # 最多等待 10 分钟
local wait_count=0
local sync_completed=false
local cdc_sync_url="http://localhost:3020/api/v2/health/cdc-sync"
while [ "$wait_count" -lt "$max_wait" ] && [ "$sync_completed" = false ]; do
# 调用 API 检查同步状态
local sync_status
sync_status=$(curl -s "$cdc_sync_url" 2>/dev/null || echo '{}')
if echo "$sync_status" | grep -q '"allPhasesCompleted":true'; then
sync_completed=true
log_success "CDC sync completed - all phases finished"
else
# 显示当前状态
local is_running
local sequential_mode
is_running=$(echo "$sync_status" | grep -o '"isRunning":[^,}]*' | cut -d':' -f2)
sequential_mode=$(echo "$sync_status" | grep -o '"sequentialMode":[^,}]*' | cut -d':' -f2)
if [ "$is_running" = "true" ] && [ "$sequential_mode" = "true" ]; then
log_info "CDC sync in progress (sequential mode)... (waited ${wait_count}s)"
elif [ "$is_running" = "true" ]; then
log_info "CDC consumer running... (waited ${wait_count}s)"
else
log_info "Waiting for CDC consumer to start... (waited ${wait_count}s)"
fi
sleep 5
wait_count=$((wait_count + 5))
fi
done
if [ "$sync_completed" = false ]; then
log_warn "CDC sync did not complete within ${max_wait}s, proceeding anyway..."
log_info "You may need to wait longer or check: curl $cdc_sync_url"
fi
log_step "Step 11/18: Registering Debezium outbox connectors..." log_step "Step 11/18: Registering Debezium outbox connectors..."
# Register outbox connectors AFTER services are running and have synced data # Register outbox connectors AFTER services are running and have synced data
@ -1521,7 +1327,6 @@ show_help() {
echo -e "${BOLD}CDC / Sync Management:${NC}" echo -e "${BOLD}CDC / Sync Management:${NC}"
echo " sync-reset Reset CDC consumer to read from beginning" echo " sync-reset Reset CDC consumer to read from beginning"
echo " sync-status Show CDC consumer group status" echo " sync-status Show CDC consumer group status"
echo " cdc-resnapshot Force Debezium CDC connectors to re-snapshot ${YELLOW}(use when Kafka data lost)${NC}"
echo " outbox-register Register all Debezium outbox connectors" echo " outbox-register Register all Debezium outbox connectors"
echo " outbox-status Show outbox connector status" echo " outbox-status Show outbox connector status"
echo " outbox-delete Delete all outbox connectors" echo " outbox-delete Delete all outbox connectors"
@ -1624,10 +1429,6 @@ main() {
sync-status) sync-status)
sync_status sync_status
;; ;;
cdc-resnapshot)
print_header
cdc_resnapshot
;;
# Outbox connector commands # Outbox connector commands
outbox-register) outbox-register)

View File

@ -76,10 +76,6 @@ services:
REDIS_DB: 11 REDIS_DB: 11
# Kafka # Kafka
KAFKA_BROKERS: kafka:29092 KAFKA_BROKERS: kafka:29092
# JWT 配置 (与 auth-service 共享密钥以验证 token)
JWT_SECRET: ${JWT_SECRET:-your-jwt-secret-change-in-production}
# 2.0 内部服务调用
CONTRIBUTION_SERVICE_URL: http://contribution-service:3020
ports: ports:
- "3021:3021" - "3021:3021"
healthcheck: healthcheck:
@ -112,8 +108,6 @@ services:
KAFKA_BROKERS: kafka:29092 KAFKA_BROKERS: kafka:29092
# 2.0 内部服务调用 # 2.0 内部服务调用
MINING_SERVICE_URL: http://mining-service:3021 MINING_SERVICE_URL: http://mining-service:3021
# JWT 配置 (与 auth-service 共享密钥以验证 token)
JWT_SECRET: ${JWT_SECRET:-your-jwt-secret-change-in-production}
ports: ports:
- "3022:3022" - "3022:3022"
healthcheck: healthcheck:

View File

@ -8,14 +8,12 @@
"name": "mining-admin-service", "name": "mining-admin-service",
"version": "1.0.0", "version": "1.0.0",
"dependencies": { "dependencies": {
"@nestjs/axios": "^3.1.3",
"@nestjs/common": "^10.3.0", "@nestjs/common": "^10.3.0",
"@nestjs/config": "^3.1.1", "@nestjs/config": "^3.1.1",
"@nestjs/core": "^10.3.0", "@nestjs/core": "^10.3.0",
"@nestjs/platform-express": "^10.3.0", "@nestjs/platform-express": "^10.3.0",
"@nestjs/swagger": "^7.1.17", "@nestjs/swagger": "^7.1.17",
"@prisma/client": "^5.7.1", "@prisma/client": "^5.7.1",
"axios": "^1.13.2",
"bcrypt": "^5.1.1", "bcrypt": "^5.1.1",
"class-transformer": "^0.5.1", "class-transformer": "^0.5.1",
"class-validator": "^0.14.0", "class-validator": "^0.14.0",
@ -25,8 +23,7 @@
"kafkajs": "^2.2.4", "kafkajs": "^2.2.4",
"reflect-metadata": "^0.1.14", "reflect-metadata": "^0.1.14",
"rxjs": "^7.8.1", "rxjs": "^7.8.1",
"swagger-ui-express": "^5.0.0", "swagger-ui-express": "^5.0.0"
"xlsx": "^0.18.5"
}, },
"devDependencies": { "devDependencies": {
"@nestjs/cli": "^10.2.1", "@nestjs/cli": "^10.2.1",
@ -35,7 +32,6 @@
"@types/bcrypt": "^6.0.0", "@types/bcrypt": "^6.0.0",
"@types/express": "^4.17.21", "@types/express": "^4.17.21",
"@types/jsonwebtoken": "^9.0.10", "@types/jsonwebtoken": "^9.0.10",
"@types/multer": "^1.4.13",
"@types/node": "^20.10.5", "@types/node": "^20.10.5",
"eslint": "^8.56.0", "eslint": "^8.56.0",
"prettier": "^3.1.1", "prettier": "^3.1.1",
@ -631,17 +627,6 @@
"integrity": "sha512-4aErSrCR/On/e5G2hDP0wjooqDdauzEbIq8hIkIe5pXV0rtWJZvdCEKL0ykZxex+IxIwBp0eGeV48hQN07dXtw==", "integrity": "sha512-4aErSrCR/On/e5G2hDP0wjooqDdauzEbIq8hIkIe5pXV0rtWJZvdCEKL0ykZxex+IxIwBp0eGeV48hQN07dXtw==",
"license": "MIT" "license": "MIT"
}, },
"node_modules/@nestjs/axios": {
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/@nestjs/axios/-/axios-3.1.3.tgz",
"integrity": "sha512-RZ/63c1tMxGLqyG3iOCVt7A72oy4x1eM6QEhd4KzCYpaVWW0igq0WSREeRoEZhIxRcZfDfIIkvsOMiM7yfVGZQ==",
"license": "MIT",
"peerDependencies": {
"@nestjs/common": "^7.0.0 || ^8.0.0 || ^9.0.0 || ^10.0.0",
"axios": "^1.3.1",
"rxjs": "^6.0.0 || ^7.0.0"
}
},
"node_modules/@nestjs/cli": { "node_modules/@nestjs/cli": {
"version": "10.4.9", "version": "10.4.9",
"resolved": "https://registry.npmjs.org/@nestjs/cli/-/cli-10.4.9.tgz", "resolved": "https://registry.npmjs.org/@nestjs/cli/-/cli-10.4.9.tgz",
@ -1221,16 +1206,6 @@
"dev": true, "dev": true,
"license": "MIT" "license": "MIT"
}, },
"node_modules/@types/multer": {
"version": "1.4.13",
"resolved": "https://registry.npmjs.org/@types/multer/-/multer-1.4.13.tgz",
"integrity": "sha512-bhhdtPw7JqCiEfC9Jimx5LqX9BDIPJEh2q/fQ4bqbBPtyEZYr3cvF22NwG0DmPZNYA0CAf2CnqDB4KIGGpJcaw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/express": "*"
}
},
"node_modules/@types/node": { "node_modules/@types/node": {
"version": "20.19.28", "version": "20.19.28",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.28.tgz", "resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.28.tgz",
@ -1519,15 +1494,6 @@
"acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0"
} }
}, },
"node_modules/adler-32": {
"version": "1.3.1",
"resolved": "https://registry.npmjs.org/adler-32/-/adler-32-1.3.1.tgz",
"integrity": "sha512-ynZ4w/nUUv5rrsR8UUGoe1VC9hZj6V5hU9Qw1HlMDJGEJw5S7TfTErWTjMys6M7vr0YWcPqs3qAr4ss0nDfP+A==",
"license": "Apache-2.0",
"engines": {
"node": ">=0.8"
}
},
"node_modules/agent-base": { "node_modules/agent-base": {
"version": "6.0.2", "version": "6.0.2",
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
@ -1768,24 +1734,6 @@
"dev": true, "dev": true,
"license": "MIT" "license": "MIT"
}, },
"node_modules/asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==",
"license": "MIT"
},
"node_modules/axios": {
"version": "1.13.2",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.13.2.tgz",
"integrity": "sha512-VPk9ebNqPcy5lRGuSlKx752IlDatOjT9paPlm8A7yOuW2Fbvp4X3JznJtT4f0GzGLLiWE9W8onz51SqLYwzGaA==",
"license": "MIT",
"peer": true,
"dependencies": {
"follow-redirects": "^1.15.6",
"form-data": "^4.0.4",
"proxy-from-env": "^1.1.0"
}
},
"node_modules/balanced-match": { "node_modules/balanced-match": {
"version": "1.0.2", "version": "1.0.2",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
@ -2080,19 +2028,6 @@
], ],
"license": "CC-BY-4.0" "license": "CC-BY-4.0"
}, },
"node_modules/cfb": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/cfb/-/cfb-1.2.2.tgz",
"integrity": "sha512-KfdUZsSOw19/ObEWasvBP/Ac4reZvAGauZhs6S/gqNhXhI7cKwvlH7ulj+dOEYnca4bm4SGo8C1bTAQvnTjgQA==",
"license": "Apache-2.0",
"dependencies": {
"adler-32": "~1.3.0",
"crc-32": "~1.2.0"
},
"engines": {
"node": ">=0.8"
}
},
"node_modules/chalk": { "node_modules/chalk": {
"version": "4.1.2", "version": "4.1.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
@ -2250,15 +2185,6 @@
"node": ">=0.10.0" "node": ">=0.10.0"
} }
}, },
"node_modules/codepage": {
"version": "1.15.0",
"resolved": "https://registry.npmjs.org/codepage/-/codepage-1.15.0.tgz",
"integrity": "sha512-3g6NUTPd/YtuuGrhMnOMRjFc+LJw/bnMp3+0r/Wcz3IXUuCosKRJvMphm5+Q+bvTVGcJJuRvVLuYba+WojaFaA==",
"license": "Apache-2.0",
"engines": {
"node": ">=0.8"
}
},
"node_modules/color-convert": { "node_modules/color-convert": {
"version": "2.0.1", "version": "2.0.1",
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
@ -2286,18 +2212,6 @@
"color-support": "bin.js" "color-support": "bin.js"
} }
}, },
"node_modules/combined-stream": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
"license": "MIT",
"dependencies": {
"delayed-stream": "~1.0.0"
},
"engines": {
"node": ">= 0.8"
}
},
"node_modules/commander": { "node_modules/commander": {
"version": "4.1.1", "version": "4.1.1",
"resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz",
@ -2441,18 +2355,6 @@
} }
} }
}, },
"node_modules/crc-32": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.2.tgz",
"integrity": "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==",
"license": "Apache-2.0",
"bin": {
"crc32": "bin/crc32.njs"
},
"engines": {
"node": ">=0.8"
}
},
"node_modules/cross-spawn": { "node_modules/cross-spawn": {
"version": "7.0.6", "version": "7.0.6",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
@ -2531,15 +2433,6 @@
"url": "https://github.com/sponsors/ljharb" "url": "https://github.com/sponsors/ljharb"
} }
}, },
"node_modules/delayed-stream": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
"license": "MIT",
"engines": {
"node": ">=0.4.0"
}
},
"node_modules/delegates": { "node_modules/delegates": {
"version": "1.0.0", "version": "1.0.0",
"resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz",
@ -2736,21 +2629,6 @@
"node": ">= 0.4" "node": ">= 0.4"
} }
}, },
"node_modules/es-set-tostringtag": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz",
"integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==",
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0",
"get-intrinsic": "^1.2.6",
"has-tostringtag": "^1.0.2",
"hasown": "^2.0.2"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/escalade": { "node_modules/escalade": {
"version": "3.2.0", "version": "3.2.0",
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz",
@ -3258,26 +3136,6 @@
"dev": true, "dev": true,
"license": "ISC" "license": "ISC"
}, },
"node_modules/follow-redirects": {
"version": "1.15.11",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz",
"integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==",
"funding": [
{
"type": "individual",
"url": "https://github.com/sponsors/RubenVerborgh"
}
],
"license": "MIT",
"engines": {
"node": ">=4.0"
},
"peerDependenciesMeta": {
"debug": {
"optional": true
}
}
},
"node_modules/foreground-child": { "node_modules/foreground-child": {
"version": "3.3.1", "version": "3.3.1",
"resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz", "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz",
@ -3324,22 +3182,6 @@
"webpack": "^5.11.0" "webpack": "^5.11.0"
} }
}, },
"node_modules/form-data": {
"version": "4.0.5",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.5.tgz",
"integrity": "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==",
"license": "MIT",
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"es-set-tostringtag": "^2.1.0",
"hasown": "^2.0.2",
"mime-types": "^2.1.12"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/forwarded": { "node_modules/forwarded": {
"version": "0.2.0", "version": "0.2.0",
"resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz",
@ -3349,15 +3191,6 @@
"node": ">= 0.6" "node": ">= 0.6"
} }
}, },
"node_modules/frac": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/frac/-/frac-1.1.2.tgz",
"integrity": "sha512-w/XBfkibaTl3YDqASwfDUqkna4Z2p9cFSr1aHDt0WoMTECnRfBOv2WArlZILlqgWlmdIlALXGpM2AOhEk5W3IA==",
"license": "Apache-2.0",
"engines": {
"node": ">=0.8"
}
},
"node_modules/fresh": { "node_modules/fresh": {
"version": "0.5.2", "version": "0.5.2",
"resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz",
@ -3660,21 +3493,6 @@
"url": "https://github.com/sponsors/ljharb" "url": "https://github.com/sponsors/ljharb"
} }
}, },
"node_modules/has-tostringtag": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
"integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
"license": "MIT",
"dependencies": {
"has-symbols": "^1.0.3"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/has-unicode": { "node_modules/has-unicode": {
"version": "2.0.1", "version": "2.0.1",
"resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz",
@ -5060,12 +4878,6 @@
"node": ">= 0.10" "node": ">= 0.10"
} }
}, },
"node_modules/proxy-from-env": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==",
"license": "MIT"
},
"node_modules/punycode": { "node_modules/punycode": {
"version": "2.3.1", "version": "2.3.1",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz",
@ -5638,18 +5450,6 @@
"node": ">=0.10.0" "node": ">=0.10.0"
} }
}, },
"node_modules/ssf": {
"version": "0.11.2",
"resolved": "https://registry.npmjs.org/ssf/-/ssf-0.11.2.tgz",
"integrity": "sha512-+idbmIXoYET47hH+d7dfm2epdOMUDjqcB4648sTZ+t2JwoyBFL/insLfB/racrDmsKB3diwsDA696pZMieAC5g==",
"license": "Apache-2.0",
"dependencies": {
"frac": "~1.1.2"
},
"engines": {
"node": ">=0.8"
}
},
"node_modules/standard-as-callback": { "node_modules/standard-as-callback": {
"version": "2.1.0", "version": "2.1.0",
"resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.1.0.tgz", "resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.1.0.tgz",
@ -6452,24 +6252,6 @@
"string-width": "^1.0.2 || 2 || 3 || 4" "string-width": "^1.0.2 || 2 || 3 || 4"
} }
}, },
"node_modules/wmf": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wmf/-/wmf-1.0.2.tgz",
"integrity": "sha512-/p9K7bEh0Dj6WbXg4JG0xvLQmIadrner1bi45VMJTfnbVHsc7yIajZyoSoK60/dtVBs12Fm6WkUI5/3WAVsNMw==",
"license": "Apache-2.0",
"engines": {
"node": ">=0.8"
}
},
"node_modules/word": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/word/-/word-0.3.0.tgz",
"integrity": "sha512-OELeY0Q61OXpdUfTp+oweA/vtLVg5VDOXh+3he3PNzLGG/y0oylSOC1xRVj0+l4vQ3tj/bB1HVHv1ocXkQceFA==",
"license": "Apache-2.0",
"engines": {
"node": ">=0.8"
}
},
"node_modules/word-wrap": { "node_modules/word-wrap": {
"version": "1.2.5", "version": "1.2.5",
"resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz",
@ -6520,27 +6302,6 @@
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==",
"license": "ISC" "license": "ISC"
}, },
"node_modules/xlsx": {
"version": "0.18.5",
"resolved": "https://registry.npmjs.org/xlsx/-/xlsx-0.18.5.tgz",
"integrity": "sha512-dmg3LCjBPHZnQp5/F/+nnTa+miPJxUXB6vtk42YjBBKayDNagxGEeIdWApkYPOf3Z3pm3k62Knjzp7lMeTEtFQ==",
"license": "Apache-2.0",
"dependencies": {
"adler-32": "~1.3.0",
"cfb": "~1.2.1",
"codepage": "~1.15.0",
"crc-32": "~1.2.1",
"ssf": "~0.11.2",
"wmf": "~1.0.1",
"word": "~0.3.0"
},
"bin": {
"xlsx": "bin/xlsx.njs"
},
"engines": {
"node": ">=0.8"
}
},
"node_modules/xtend": { "node_modules/xtend": {
"version": "4.0.2", "version": "4.0.2",
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz",

View File

@ -15,14 +15,12 @@
"prisma:migrate": "prisma migrate dev" "prisma:migrate": "prisma migrate dev"
}, },
"dependencies": { "dependencies": {
"@nestjs/axios": "^3.1.3",
"@nestjs/common": "^10.3.0", "@nestjs/common": "^10.3.0",
"@nestjs/config": "^3.1.1", "@nestjs/config": "^3.1.1",
"@nestjs/core": "^10.3.0", "@nestjs/core": "^10.3.0",
"@nestjs/platform-express": "^10.3.0", "@nestjs/platform-express": "^10.3.0",
"@nestjs/swagger": "^7.1.17", "@nestjs/swagger": "^7.1.17",
"@prisma/client": "^5.7.1", "@prisma/client": "^5.7.1",
"axios": "^1.13.2",
"bcrypt": "^5.1.1", "bcrypt": "^5.1.1",
"class-transformer": "^0.5.1", "class-transformer": "^0.5.1",
"class-validator": "^0.14.0", "class-validator": "^0.14.0",
@ -32,8 +30,7 @@
"kafkajs": "^2.2.4", "kafkajs": "^2.2.4",
"reflect-metadata": "^0.1.14", "reflect-metadata": "^0.1.14",
"rxjs": "^7.8.1", "rxjs": "^7.8.1",
"swagger-ui-express": "^5.0.0", "swagger-ui-express": "^5.0.0"
"xlsx": "^0.18.5"
}, },
"devDependencies": { "devDependencies": {
"@nestjs/cli": "^10.2.1", "@nestjs/cli": "^10.2.1",
@ -42,7 +39,6 @@
"@types/bcrypt": "^6.0.0", "@types/bcrypt": "^6.0.0",
"@types/express": "^4.17.21", "@types/express": "^4.17.21",
"@types/jsonwebtoken": "^9.0.10", "@types/jsonwebtoken": "^9.0.10",
"@types/multer": "^1.4.13",
"@types/node": "^20.10.5", "@types/node": "^20.10.5",
"eslint": "^8.56.0", "eslint": "^8.56.0",
"prettier": "^3.1.1", "prettier": "^3.1.1",

View File

@ -1,8 +1,8 @@
-- ============================================================================ -- ============================================================================
-- mining-admin-service 初始化 migration -- mining-admin-service 初始化 migration
-- 合并自: 0001_init, 0002_fix_processed_event_composite_key, -- 合并自: 20260111000000_init, 20260112110000_add_referral_adoption_nickname,
-- 20250120000001_add_region_to_synced_system_contributions, -- 20260112150000_add_unlocked_bonus_tiers, 20260112200000_add_contribution_records_network_progress,
-- 20250120000002_add_synced_system_contribution_records -- 20260113000000_use_prisma_relation_mode, 20260113100000_add_distribution_summary
-- 注意: 使用 Prisma relationMode = "prisma"不在数据库层创建FK约束 -- 注意: 使用 Prisma relationMode = "prisma"不在数据库层创建FK约束
-- ============================================================================ -- ============================================================================
@ -302,11 +302,10 @@ CREATE TABLE "synced_circulation_pools" (
CONSTRAINT "synced_circulation_pools_pkey" PRIMARY KEY ("id") CONSTRAINT "synced_circulation_pools_pkey" PRIMARY KEY ("id")
); );
-- CreateTable: 系统账户算力 (from contribution-service) -- CreateTable
CREATE TABLE "synced_system_contributions" ( CREATE TABLE "synced_system_contributions" (
"id" TEXT NOT NULL, "id" TEXT NOT NULL,
"accountType" TEXT NOT NULL, "accountType" TEXT NOT NULL,
"region_code" TEXT,
"name" TEXT NOT NULL, "name" TEXT NOT NULL,
"contributionBalance" DECIMAL(30,8) NOT NULL DEFAULT 0, "contributionBalance" DECIMAL(30,8) NOT NULL DEFAULT 0,
"contributionNeverExpires" BOOLEAN NOT NULL DEFAULT false, "contributionNeverExpires" BOOLEAN NOT NULL DEFAULT false,
@ -688,12 +687,8 @@ CREATE UNIQUE INDEX "synced_daily_mining_stats_statDate_key" ON "synced_daily_mi
-- CreateIndex -- CreateIndex
CREATE UNIQUE INDEX "synced_day_klines_klineDate_key" ON "synced_day_klines"("klineDate"); CREATE UNIQUE INDEX "synced_day_klines_klineDate_key" ON "synced_day_klines"("klineDate");
-- CreateIndex: synced_system_contributions -- CreateIndex
-- 使用 accountType + COALESCE(region_code, '__NULL__') 复合唯一键 CREATE UNIQUE INDEX "synced_system_contributions_accountType_key" ON "synced_system_contributions"("accountType");
-- 注意PostgreSQL 中 NULL != NULL所以直接用 region_code 做唯一索引无法阻止重复的 (OPERATION, NULL)
CREATE UNIQUE INDEX "synced_system_contributions_accountType_region_code_key" ON "synced_system_contributions"("accountType", COALESCE(region_code, '__NULL__'));
CREATE INDEX "synced_system_contributions_accountType_idx" ON "synced_system_contributions"("accountType");
CREATE INDEX "synced_system_contributions_region_code_idx" ON "synced_system_contributions"("region_code");
-- CreateIndex -- CreateIndex
CREATE UNIQUE INDEX "cdc_sync_progress_sourceTopic_key" ON "cdc_sync_progress"("sourceTopic"); CREATE UNIQUE INDEX "cdc_sync_progress_sourceTopic_key" ON "cdc_sync_progress"("sourceTopic");
@ -701,8 +696,11 @@ CREATE UNIQUE INDEX "cdc_sync_progress_sourceTopic_key" ON "cdc_sync_progress"("
-- CreateIndex -- CreateIndex
CREATE INDEX "cdc_sync_progress_sourceService_idx" ON "cdc_sync_progress"("sourceService"); CREATE INDEX "cdc_sync_progress_sourceService_idx" ON "cdc_sync_progress"("sourceService");
-- CreateIndex (使用复合唯一键替代单独的 eventId 唯一约束) -- CreateIndex
CREATE UNIQUE INDEX "processed_events_sourceService_eventId_key" ON "processed_events"("sourceService", "eventId"); CREATE UNIQUE INDEX "processed_events_eventId_key" ON "processed_events"("eventId");
-- CreateIndex
CREATE INDEX "processed_events_sourceService_idx" ON "processed_events"("sourceService");
-- CreateIndex -- CreateIndex
CREATE INDEX "processed_events_processedAt_idx" ON "processed_events"("processedAt"); CREATE INDEX "processed_events_processedAt_idx" ON "processed_events"("processedAt");
@ -862,40 +860,3 @@ CREATE UNIQUE INDEX "synced_fee_configs_fee_type_key" ON "synced_fee_configs"("f
-- AddForeignKey (保留 admin 相关的外键) -- AddForeignKey (保留 admin 相关的外键)
ALTER TABLE "audit_logs" ADD CONSTRAINT "audit_logs_adminId_fkey" FOREIGN KEY ("adminId") REFERENCES "admin_users"("id") ON DELETE RESTRICT ON UPDATE CASCADE; ALTER TABLE "audit_logs" ADD CONSTRAINT "audit_logs_adminId_fkey" FOREIGN KEY ("adminId") REFERENCES "admin_users"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
-- ============================================================================
-- 系统账户算力明细同步表
-- 用于存储从 contribution-service 同步的系统账户算力来源明细
-- ============================================================================
-- CreateTable: 系统账户算力明细 (from contribution-service)
CREATE TABLE "synced_system_contribution_records" (
"id" TEXT NOT NULL,
"original_record_id" BIGINT NOT NULL,
"account_type" TEXT NOT NULL,
"region_code" TEXT,
"source_adoption_id" BIGINT NOT NULL,
"source_account_sequence" TEXT NOT NULL,
-- 来源类型: FIXED_RATE(固定比例) / LEVEL_OVERFLOW(层级溢出) / LEVEL_NO_ANCESTOR(无上线) / BONUS_TIER_1/2/3(团队奖励未解锁)
"source_type" VARCHAR(30) NOT NULL,
-- 层级深度1-15仅对 LEVEL_OVERFLOW 和 LEVEL_NO_ANCESTOR 类型有效
"level_depth" INTEGER,
"distribution_rate" DECIMAL(10,6) NOT NULL,
"amount" DECIMAL(30,10) NOT NULL,
"effective_date" DATE NOT NULL,
"expire_date" DATE,
"is_expired" BOOLEAN NOT NULL DEFAULT false,
"created_at" TIMESTAMP(3) NOT NULL,
"syncedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "synced_system_contribution_records_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE UNIQUE INDEX "synced_system_contribution_records_original_record_id_key" ON "synced_system_contribution_records"("original_record_id");
CREATE INDEX "synced_system_contribution_records_account_type_region_code_idx" ON "synced_system_contribution_records"("account_type", "region_code");
CREATE INDEX "synced_system_contribution_records_source_adoption_id_idx" ON "synced_system_contribution_records"("source_adoption_id");
CREATE INDEX "synced_system_contribution_records_source_account_sequence_idx" ON "synced_system_contribution_records"("source_account_sequence");
CREATE INDEX "synced_system_contribution_records_source_type_idx" ON "synced_system_contribution_records"("source_type");
CREATE INDEX "synced_system_contribution_records_created_at_idx" ON "synced_system_contribution_records"("created_at" DESC);

View File

@ -0,0 +1,26 @@
-- ============================================================================
-- 修复 processed_events 表的幂等键
-- 用于 2.0 服务间 Outbox 事件的 100% exactly-once 语义
-- ============================================================================
--
-- 问题: 原来使用 eventId 作为唯一键,但不同服务的 outbox ID 可能相同
-- 解决: 使用 (sourceService, eventId) 作为复合唯一键
--
-- 唯一键说明:
-- - sourceService: 发送事件的服务名(如 "auth-service", "contribution-service"
-- - eventId: 发送方 outbox 表的自增 ID非 UUID而是数据库自增主键
-- - 组合后在全局唯一,可用于精确追踪事件来源
-- ============================================================================
-- 先清空已有数据(因为之前的数据可能有冲突)
TRUNCATE TABLE "processed_events";
-- 删除旧的唯一索引(仅 eventId
DROP INDEX IF EXISTS "processed_events_eventId_key";
-- 删除旧的 sourceService 普通索引
DROP INDEX IF EXISTS "processed_events_sourceService_idx";
-- 创建新的复合唯一索引:(sourceService, eventId)
-- 这个组合保证跨服务的唯一性
CREATE UNIQUE INDEX "processed_events_sourceService_eventId_key" ON "processed_events"("sourceService", "eventId");

View File

@ -422,62 +422,16 @@ model SyncedCirculationPool {
model SyncedSystemContribution { model SyncedSystemContribution {
id String @id @default(uuid()) id String @id @default(uuid())
accountType String // OPERATION / PROVINCE / CITY / HEADQUARTERS accountType String @unique // OPERATION, PROVINCE, CITY, HEADQUARTERS
regionCode String? @map("region_code") // 省/市代码,如 440000, 440100
name String name String
contributionBalance Decimal @db.Decimal(30, 8) @default(0) contributionBalance Decimal @db.Decimal(30, 8) @default(0)
contributionNeverExpires Boolean @default(false) contributionNeverExpires Boolean @default(false)
syncedAt DateTime @default(now()) syncedAt DateTime @default(now())
updatedAt DateTime @updatedAt updatedAt DateTime @updatedAt
@@unique([accountType, regionCode])
@@index([accountType])
@@index([regionCode])
@@map("synced_system_contributions") @@map("synced_system_contributions")
} }
// =============================================================================
// CDC 同步表 - 系统账户算力明细 (from contribution-service)
// =============================================================================
model SyncedSystemContributionRecord {
id String @id @default(uuid())
originalRecordId BigInt @unique @map("original_record_id") // contribution-service 中的原始 ID
// 系统账户信息(冗余存储,便于查询)
accountType String @map("account_type") // OPERATION / PROVINCE / CITY / HEADQUARTERS
regionCode String? @map("region_code") // 省/市代码
// 来源信息
sourceAdoptionId BigInt @map("source_adoption_id") // 来源认种ID
sourceAccountSequence String @map("source_account_sequence") // 认种人账号
// 来源类型: FIXED_RATE(固定比例) / LEVEL_OVERFLOW(层级溢出) / LEVEL_NO_ANCESTOR(无上线) / BONUS_TIER_1/2/3(团队奖励未解锁)
sourceType String @map("source_type") @db.VarChar(30)
// 层级深度1-15仅对 LEVEL_OVERFLOW 和 LEVEL_NO_ANCESTOR 类型有效
levelDepth Int? @map("level_depth")
// 分配参数
distributionRate Decimal @map("distribution_rate") @db.Decimal(10, 6) // 分配比例
amount Decimal @map("amount") @db.Decimal(30, 10) // 算力金额
// 有效期
effectiveDate DateTime @map("effective_date") @db.Date // 生效日期
expireDate DateTime? @map("expire_date") @db.Date // 过期日期系统账户一般为null永不过期
isExpired Boolean @default(false) @map("is_expired")
createdAt DateTime @map("created_at") // 原始记录创建时间
syncedAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@index([accountType, regionCode])
@@index([sourceAdoptionId])
@@index([sourceAccountSequence])
@@index([sourceType])
@@index([createdAt(sort: Desc)])
@@map("synced_system_contribution_records")
}
// ============================================================================= // =============================================================================
// CDC 同步进度跟踪 // CDC 同步进度跟踪
// ============================================================================= // =============================================================================

View File

@ -3,14 +3,11 @@ import { ApplicationModule } from '../application/application.module';
import { AuthController } from './controllers/auth.controller'; import { AuthController } from './controllers/auth.controller';
import { DashboardController } from './controllers/dashboard.controller'; import { DashboardController } from './controllers/dashboard.controller';
import { ConfigController } from './controllers/config.controller'; import { ConfigController } from './controllers/config.controller';
import { InitializationController } from './controllers/initialization.controller';
import { AuditController } from './controllers/audit.controller'; import { AuditController } from './controllers/audit.controller';
import { HealthController } from './controllers/health.controller'; import { HealthController } from './controllers/health.controller';
import { UsersController } from './controllers/users.controller'; import { UsersController } from './controllers/users.controller';
import { SystemAccountsController } from './controllers/system-accounts.controller'; import { SystemAccountsController } from './controllers/system-accounts.controller';
import { ReportsController } from './controllers/reports.controller';
import { ManualMiningController } from './controllers/manual-mining.controller';
import { PendingContributionsController } from './controllers/pending-contributions.controller';
import { BatchMiningController } from './controllers/batch-mining.controller';
@Module({ @Module({
imports: [ApplicationModule], imports: [ApplicationModule],
@ -18,14 +15,11 @@ import { BatchMiningController } from './controllers/batch-mining.controller';
AuthController, AuthController,
DashboardController, DashboardController,
ConfigController, ConfigController,
InitializationController,
AuditController, AuditController,
HealthController, HealthController,
UsersController, UsersController,
SystemAccountsController, SystemAccountsController,
ReportsController,
ManualMiningController,
PendingContributionsController,
BatchMiningController,
], ],
}) })
export class ApiModule {} export class ApiModule {}

View File

@ -4,7 +4,7 @@ import { DashboardService } from '../../application/services/dashboard.service';
@ApiTags('Audit') @ApiTags('Audit')
@ApiBearerAuth() @ApiBearerAuth()
@Controller('audit') @Controller('audit-logs')
export class AuditController { export class AuditController {
constructor(private readonly dashboardService: DashboardService) {} constructor(private readonly dashboardService: DashboardService) {}
@ -13,42 +13,15 @@ export class AuditController {
@ApiQuery({ name: 'adminId', required: false }) @ApiQuery({ name: 'adminId', required: false })
@ApiQuery({ name: 'action', required: false }) @ApiQuery({ name: 'action', required: false })
@ApiQuery({ name: 'resource', required: false }) @ApiQuery({ name: 'resource', required: false })
@ApiQuery({ name: 'keyword', required: false })
@ApiQuery({ name: 'page', required: false, type: Number }) @ApiQuery({ name: 'page', required: false, type: Number })
@ApiQuery({ name: 'pageSize', required: false, type: Number }) @ApiQuery({ name: 'pageSize', required: false, type: Number })
async getAuditLogs( async getAuditLogs(
@Query('adminId') adminId?: string, @Query('adminId') adminId?: string,
@Query('action') action?: string, @Query('action') action?: string,
@Query('resource') resource?: string, @Query('resource') resource?: string,
@Query('keyword') keyword?: string,
@Query('page') page?: number, @Query('page') page?: number,
@Query('pageSize') pageSize?: number, @Query('pageSize') pageSize?: number,
) { ) {
const result = await this.dashboardService.getAuditLogs({ return this.dashboardService.getAuditLogs({ adminId, action, resource, page: page ?? 1, pageSize: pageSize ?? 50 });
adminId,
action,
resource,
page: page ?? 1,
pageSize: pageSize ?? 20,
});
// 转换为前端期望的格式
return {
items: result.data.map((log: any) => ({
id: log.id,
adminId: log.adminId,
adminUsername: log.admin?.username || 'unknown',
action: log.action,
resource: log.resource,
resourceId: log.resourceId,
details: log.newValue ? JSON.stringify(log.newValue) : null,
ipAddress: log.ipAddress || '-',
createdAt: log.createdAt,
})),
total: result.total,
page: result.pagination.page,
pageSize: result.pagination.pageSize,
totalPages: result.pagination.totalPages,
};
} }
} }

View File

@ -1,364 +0,0 @@
import {
Controller,
Get,
Post,
Body,
Req,
HttpException,
HttpStatus,
UseInterceptors,
UploadedFile,
Logger,
} from '@nestjs/common';
import {
ApiTags,
ApiOperation,
ApiBearerAuth,
ApiBody,
ApiConsumes,
} from '@nestjs/swagger';
import { FileInterceptor } from '@nestjs/platform-express';
import * as XLSX from 'xlsx';
import { BatchMiningService, BatchMiningItem } from '../../application/services/batch-mining.service';
@ApiTags('Batch Mining')
@ApiBearerAuth()
@Controller('batch-mining')
export class BatchMiningController {
private readonly logger = new Logger(BatchMiningController.name);
constructor(private readonly batchMiningService: BatchMiningService) {}
@Get('status')
@ApiOperation({ summary: '获取批量补发状态(是否已执行)' })
async getStatus() {
this.logger.log(`[GET /batch-mining/status] 请求获取批量补发状态`);
try {
const result = await this.batchMiningService.getStatus();
this.logger.log(`[GET /batch-mining/status] 返回: ${JSON.stringify(result)}`);
return result;
} catch (error) {
this.logger.error(`[GET /batch-mining/status] 错误:`, error);
throw error;
}
}
@Post('upload-preview')
@ApiOperation({ summary: '上传 Excel 文件并预览(不执行)' })
@ApiConsumes('multipart/form-data')
@ApiBody({
schema: {
type: 'object',
properties: {
file: {
type: 'string',
format: 'binary',
description: 'Excel 文件 (.xlsx)',
},
},
},
})
@UseInterceptors(FileInterceptor('file'))
async uploadAndPreview(@UploadedFile() file: Express.Multer.File) {
this.logger.log(`[POST /batch-mining/upload-preview] 开始处理上传预览请求`);
if (!file) {
this.logger.error(`[POST /batch-mining/upload-preview] 未收到文件`);
throw new HttpException('请上传文件', HttpStatus.BAD_REQUEST);
}
this.logger.log(`[POST /batch-mining/upload-preview] 收到文件: ${file.originalname}, 大小: ${file.size}, 类型: ${file.mimetype}`);
// 检查文件类型
const validTypes = [
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
'application/vnd.ms-excel',
];
if (!validTypes.includes(file.mimetype) && !file.originalname.endsWith('.xlsx')) {
this.logger.error(`[POST /batch-mining/upload-preview] 文件类型不正确: ${file.mimetype}`);
throw new HttpException('请上传 Excel 文件 (.xlsx)', HttpStatus.BAD_REQUEST);
}
try {
// 解析 Excel
this.logger.log(`[POST /batch-mining/upload-preview] 开始解析 Excel...`);
const workbook = XLSX.read(file.buffer, { type: 'buffer' });
this.logger.log(`[POST /batch-mining/upload-preview] Excel Sheet 列表: ${workbook.SheetNames.join(', ')}`);
const sheetName = workbook.SheetNames[0];
const worksheet = workbook.Sheets[sheetName];
// 尝试读取 Sheet2如果存在
const actualSheetName = workbook.SheetNames.includes('Sheet2') ? 'Sheet2' : sheetName;
const actualSheet = workbook.Sheets[actualSheetName];
this.logger.log(`[POST /batch-mining/upload-preview] 使用 Sheet: ${actualSheetName}`);
// 转换为数组
const rows: any[][] = XLSX.utils.sheet_to_json(actualSheet, { header: 1 });
this.logger.log(`[POST /batch-mining/upload-preview] Excel 总行数: ${rows.length}`);
// 解析数据
const items = this.batchMiningService.parseExcelData(rows);
this.logger.log(`[POST /batch-mining/upload-preview] 解析后有效数据: ${items.length}`);
if (items.length === 0) {
this.logger.error(`[POST /batch-mining/upload-preview] Excel 文件中没有有效数据`);
throw new HttpException('Excel 文件中没有有效数据', HttpStatus.BAD_REQUEST);
}
// 调用预览 API
this.logger.log(`[POST /batch-mining/upload-preview] 调用 mining-service 预览 API...`);
const preview = await this.batchMiningService.preview(items);
this.logger.log(`[POST /batch-mining/upload-preview] 预览成功, 总金额: ${preview.grandTotalAmount}`);
return {
...preview,
parsedItems: items,
originalFileName: file.originalname,
};
} catch (error) {
if (error instanceof HttpException) {
throw error;
}
this.logger.error(`[POST /batch-mining/upload-preview] 解析 Excel 文件失败:`, error);
throw new HttpException(
`解析 Excel 文件失败: ${error instanceof Error ? error.message : error}`,
HttpStatus.BAD_REQUEST,
);
}
}
@Post('preview')
@ApiOperation({ summary: '预览批量补发(传入解析后的数据)' })
@ApiBody({
schema: {
type: 'object',
required: ['items'],
properties: {
items: {
type: 'array',
items: {
type: 'object',
properties: {
accountSequence: { type: 'string' },
treeCount: { type: 'number' },
miningStartDate: { type: 'string' },
batch: { type: 'number' },
preMineDays: { type: 'number' },
remark: { type: 'string' },
},
},
},
},
},
})
async preview(@Body() body: { items: BatchMiningItem[] }) {
this.logger.log(`[POST /batch-mining/preview] 请求预览, 数据条数: ${body.items?.length || 0}`);
if (!body.items || body.items.length === 0) {
this.logger.error(`[POST /batch-mining/preview] 数据为空`);
throw new HttpException('数据不能为空', HttpStatus.BAD_REQUEST);
}
try {
const result = await this.batchMiningService.preview(body.items);
this.logger.log(`[POST /batch-mining/preview] 预览成功`);
return result;
} catch (error) {
this.logger.error(`[POST /batch-mining/preview] 错误:`, error);
throw error;
}
}
@Post('upload-execute')
@ApiOperation({ summary: '上传 Excel 文件并执行批量补发(只能执行一次)' })
@ApiConsumes('multipart/form-data')
@ApiBody({
schema: {
type: 'object',
required: ['file', 'reason'],
properties: {
file: {
type: 'string',
format: 'binary',
description: 'Excel 文件 (.xlsx)',
},
reason: {
type: 'string',
description: '补发原因(必填)',
},
},
},
})
@UseInterceptors(FileInterceptor('file'))
async uploadAndExecute(
@UploadedFile() file: Express.Multer.File,
@Body() body: { reason: string },
@Req() req: any,
) {
this.logger.log(`[POST /batch-mining/upload-execute] 开始处理上传执行请求`);
if (!file) {
this.logger.error(`[POST /batch-mining/upload-execute] 未收到文件`);
throw new HttpException('请上传文件', HttpStatus.BAD_REQUEST);
}
this.logger.log(`[POST /batch-mining/upload-execute] 收到文件: ${file.originalname}, 原因: ${body.reason}`);
if (!body.reason || body.reason.trim().length === 0) {
this.logger.error(`[POST /batch-mining/upload-execute] 补发原因为空`);
throw new HttpException('补发原因不能为空', HttpStatus.BAD_REQUEST);
}
// 检查文件类型
const validTypes = [
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
'application/vnd.ms-excel',
];
if (!validTypes.includes(file.mimetype) && !file.originalname.endsWith('.xlsx')) {
this.logger.error(`[POST /batch-mining/upload-execute] 文件类型不正确: ${file.mimetype}`);
throw new HttpException('请上传 Excel 文件 (.xlsx)', HttpStatus.BAD_REQUEST);
}
try {
// 解析 Excel
this.logger.log(`[POST /batch-mining/upload-execute] 开始解析 Excel...`);
const workbook = XLSX.read(file.buffer, { type: 'buffer' });
this.logger.log(`[POST /batch-mining/upload-execute] Excel Sheet 列表: ${workbook.SheetNames.join(', ')}`);
// 尝试读取 Sheet2如果存在
const actualSheetName = workbook.SheetNames.includes('Sheet2') ? 'Sheet2' : workbook.SheetNames[0];
const actualSheet = workbook.Sheets[actualSheetName];
this.logger.log(`[POST /batch-mining/upload-execute] 使用 Sheet: ${actualSheetName}`);
// 转换为数组
const rows: any[][] = XLSX.utils.sheet_to_json(actualSheet, { header: 1 });
this.logger.log(`[POST /batch-mining/upload-execute] Excel 总行数: ${rows.length}`);
// 解析数据
const items = this.batchMiningService.parseExcelData(rows);
this.logger.log(`[POST /batch-mining/upload-execute] 解析后有效数据: ${items.length}`);
if (items.length === 0) {
this.logger.error(`[POST /batch-mining/upload-execute] Excel 文件中没有有效数据`);
throw new HttpException('Excel 文件中没有有效数据', HttpStatus.BAD_REQUEST);
}
const admin = req.admin;
this.logger.log(`[POST /batch-mining/upload-execute] 操作管理员: ${admin?.username} (${admin?.id})`);
// 调用执行 API
this.logger.log(`[POST /batch-mining/upload-execute] 调用 mining-service 执行 API...`);
const result = await this.batchMiningService.execute(
{
items,
operatorId: admin.id,
operatorName: admin.username,
reason: body.reason,
},
admin.id,
);
this.logger.log(`[POST /batch-mining/upload-execute] 执行成功: successCount=${result.successCount}, totalAmount=${result.totalAmount}`);
return {
...result,
originalFileName: file.originalname,
};
} catch (error) {
if (error instanceof HttpException) {
throw error;
}
this.logger.error(`[POST /batch-mining/upload-execute] 执行失败:`, error);
throw new HttpException(
`执行失败: ${error instanceof Error ? error.message : error}`,
HttpStatus.BAD_REQUEST,
);
}
}
@Post('execute')
@ApiOperation({ summary: '执行批量补发(传入解析后的数据,只能执行一次)' })
@ApiBody({
schema: {
type: 'object',
required: ['items', 'reason'],
properties: {
items: {
type: 'array',
items: {
type: 'object',
properties: {
accountSequence: { type: 'string' },
treeCount: { type: 'number' },
miningStartDate: { type: 'string' },
batch: { type: 'number' },
preMineDays: { type: 'number' },
remark: { type: 'string' },
},
},
},
reason: { type: 'string', description: '补发原因(必填)' },
},
},
})
async execute(
@Body() body: { items: BatchMiningItem[]; reason: string },
@Req() req: any,
) {
this.logger.log(`[POST /batch-mining/execute] 请求执行批量补发`);
this.logger.log(`[POST /batch-mining/execute] 数据条数: ${body.items?.length || 0}, 原因: ${body.reason}`);
if (!body.items || body.items.length === 0) {
this.logger.error(`[POST /batch-mining/execute] 数据为空`);
throw new HttpException('数据不能为空', HttpStatus.BAD_REQUEST);
}
if (!body.reason || body.reason.trim().length === 0) {
this.logger.error(`[POST /batch-mining/execute] 补发原因为空`);
throw new HttpException('补发原因不能为空', HttpStatus.BAD_REQUEST);
}
const admin = req.admin;
this.logger.log(`[POST /batch-mining/execute] 操作管理员: ${admin?.username} (${admin?.id})`);
try {
const result = await this.batchMiningService.execute(
{
items: body.items,
operatorId: admin.id,
operatorName: admin.username,
reason: body.reason,
},
admin.id,
);
this.logger.log(`[POST /batch-mining/execute] 执行成功`);
return result;
} catch (error) {
this.logger.error(`[POST /batch-mining/execute] 错误:`, error);
throw error;
}
}
@Get('execution')
@ApiOperation({ summary: '获取批量补发执行记录(含明细)' })
async getExecution() {
this.logger.log(`[GET /batch-mining/execution] 请求获取执行记录`);
try {
const execution = await this.batchMiningService.getExecution();
if (!execution) {
this.logger.log(`[GET /batch-mining/execution] 尚未执行过批量补发`);
throw new HttpException('尚未执行过批量补发', HttpStatus.NOT_FOUND);
}
this.logger.log(`[GET /batch-mining/execution] 返回执行记录: id=${execution.id}`);
return execution;
} catch (error) {
if (error instanceof HttpException) {
throw error;
}
this.logger.error(`[GET /batch-mining/execution] 错误:`, error);
throw error;
}
}
}

View File

@ -1,6 +1,5 @@
import { Controller, Get, Post, Delete, Body, Param, Query, Req, Logger } from '@nestjs/common'; import { Controller, Get, Post, Delete, Body, Param, Query, Req } from '@nestjs/common';
import { ApiTags, ApiOperation, ApiBearerAuth, ApiQuery, ApiParam } from '@nestjs/swagger'; import { ApiTags, ApiOperation, ApiBearerAuth, ApiQuery, ApiParam } from '@nestjs/swagger';
import { ConfigService } from '@nestjs/config';
import { ConfigManagementService } from '../../application/services/config.service'; import { ConfigManagementService } from '../../application/services/config.service';
class SetConfigDto { category: string; key: string; value: string; description?: string; } class SetConfigDto { category: string; key: string; value: string; description?: string; }
@ -9,12 +8,7 @@ class SetConfigDto { category: string; key: string; value: string; description?:
@ApiBearerAuth() @ApiBearerAuth()
@Controller('configs') @Controller('configs')
export class ConfigController { export class ConfigController {
private readonly logger = new Logger(ConfigController.name); constructor(private readonly configService: ConfigManagementService) {}
constructor(
private readonly configService: ConfigManagementService,
private readonly appConfigService: ConfigService,
) {}
@Get() @Get()
@ApiOperation({ summary: '获取配置列表' }) @ApiOperation({ summary: '获取配置列表' })
@ -23,155 +17,6 @@ export class ConfigController {
return this.configService.getConfigs(category); return this.configService.getConfigs(category);
} }
@Get('transfer-enabled')
@ApiOperation({ summary: '获取划转开关状态' })
async getTransferEnabled() {
const config = await this.configService.getConfig('system', 'transfer_enabled');
return { enabled: config?.configValue === 'true' };
}
@Post('transfer-enabled')
@ApiOperation({ summary: '设置划转开关状态' })
async setTransferEnabled(@Body() body: { enabled: boolean }, @Req() req: any) {
await this.configService.setConfig(req.admin.id, 'system', 'transfer_enabled', String(body.enabled), '划转开关');
return { success: true };
}
@Get('mining/status')
@ApiOperation({ summary: '获取挖矿状态' })
async getMiningStatus() {
const miningServiceUrl = this.appConfigService.get<string>('MINING_SERVICE_URL', 'http://localhost:3021');
const contributionServiceUrl = this.appConfigService.get<string>('CONTRIBUTION_SERVICE_URL', 'http://localhost:3020');
this.logger.log(`Fetching mining status from ${miningServiceUrl}/api/v2/mining/admin/status`);
try {
// 并行获取 mining-service 状态和 contribution-service 统计数据
const [miningResponse, contributionResponse] = await Promise.all([
fetch(`${miningServiceUrl}/api/v2/mining/admin/status`),
fetch(`${contributionServiceUrl}/api/v2/contribution/stats`).catch(() => null),
]);
if (!miningResponse.ok) {
throw new Error(`Failed to fetch mining status: ${miningResponse.status}`);
}
const miningResult = await miningResponse.json();
this.logger.log(`Mining service response: ${JSON.stringify(miningResult)}`);
const miningData = miningResult.data || miningResult;
// 获取 contribution-service 的全网理论算力
let networkTotalContribution: string | null = null;
let userEffectiveContribution: string | null = null;
let systemAccountsContribution: string | null = null;
if (contributionResponse && contributionResponse.ok) {
const contributionResult = await contributionResponse.json();
const data = contributionResult.data || contributionResult;
// 全网理论算力 = 总认种树 × 每棵树算力
networkTotalContribution = data.networkTotalContribution || null;
// 用户有效算力
userEffectiveContribution = data.totalContribution || null;
// 系统账户算力
const systemAccounts = data.systemAccounts || [];
const systemTotal = systemAccounts
.filter((a: any) => a.accountType !== 'HEADQUARTERS')
.reduce((sum: number, a: any) => sum + parseFloat(a.totalContribution || '0'), 0);
systemAccountsContribution = systemTotal.toString();
}
// mining-service 中的全网理论算力
const miningNetworkTotal = miningData.networkTotalContribution || '0';
// mining-service 中的用户有效算力
const miningUserTotal = miningData.totalContribution || '0';
// 判断算力是否同步完成
// 核心条件全网理论算力已同步mining-service 的 networkTotalContribution 与 contribution-service 相近)
// 全网理论算力是挖矿分母,必须同步后才能正确计算挖矿比例
const networkSynced = networkTotalContribution !== null &&
parseFloat(networkTotalContribution) > 0 &&
parseFloat(miningNetworkTotal) > 0 &&
Math.abs(parseFloat(miningNetworkTotal) - parseFloat(networkTotalContribution)) / parseFloat(networkTotalContribution) < 0.001;
const isSynced = networkSynced;
return {
...miningData,
contributionSyncStatus: {
isSynced,
// 全网理论算力(应作为挖矿分母)
networkTotalContribution: networkTotalContribution || '0',
miningNetworkTotal,
// 用户有效算力
userEffectiveContribution: userEffectiveContribution || '0',
miningUserTotal,
// 系统账户算力
systemAccountsContribution: systemAccountsContribution || '0',
// 兼容旧字段
miningTotal: miningUserTotal,
contributionTotal: userEffectiveContribution || '0',
},
};
} catch (error) {
this.logger.error('Failed to get mining status', error);
return {
initialized: false,
isActive: false,
error: `Unable to connect to mining service: ${error.message}`,
contributionSyncStatus: {
isSynced: false,
networkTotalContribution: '0',
miningNetworkTotal: '0',
userEffectiveContribution: '0',
miningUserTotal: '0',
systemAccountsContribution: '0',
miningTotal: '0',
contributionTotal: '0',
},
};
}
}
@Post('mining/activate')
@ApiOperation({ summary: '激活挖矿' })
async activateMining(@Req() req: any) {
const miningServiceUrl = this.appConfigService.get<string>('MINING_SERVICE_URL', 'http://localhost:3021');
try {
const response = await fetch(`${miningServiceUrl}/api/v2/mining/admin/activate`, {
method: 'POST',
});
if (!response.ok) {
throw new Error('Failed to activate mining');
}
const result = await response.json();
this.logger.log(`Mining activated by admin ${req.admin?.id}`);
return result;
} catch (error) {
this.logger.error('Failed to activate mining', error);
return { success: false, message: 'Failed to activate mining' };
}
}
@Post('mining/deactivate')
@ApiOperation({ summary: '停用挖矿' })
async deactivateMining(@Req() req: any) {
const miningServiceUrl = this.appConfigService.get<string>('MINING_SERVICE_URL', 'http://localhost:3021');
try {
const response = await fetch(`${miningServiceUrl}/api/v2/mining/admin/deactivate`, {
method: 'POST',
});
if (!response.ok) {
throw new Error('Failed to deactivate mining');
}
const result = await response.json();
this.logger.log(`Mining deactivated by admin ${req.admin?.id}`);
return result;
} catch (error) {
this.logger.error('Failed to deactivate mining', error);
return { success: false, message: 'Failed to deactivate mining' };
}
}
@Get(':category/:key') @Get(':category/:key')
@ApiOperation({ summary: '获取单个配置' }) @ApiOperation({ summary: '获取单个配置' })
@ApiParam({ name: 'category' }) @ApiParam({ name: 'category' })

View File

@ -16,105 +16,19 @@ export class DashboardController {
@Get() @Get()
@ApiOperation({ summary: '获取仪表盘统计数据' }) @ApiOperation({ summary: '获取仪表盘统计数据' })
async getStats() { async getStats() {
const raw = await this.dashboardService.getDashboardStats(); return this.dashboardService.getDashboardStats();
// 计算24小时价格变化
let priceChange24h = 0;
if (raw.latestPrice) {
const open = parseFloat(raw.latestPrice.open) || 1;
const close = parseFloat(raw.latestPrice.close) || 1;
priceChange24h = (close - open) / open;
}
// 详细算力分解数据
const dc = raw.detailedContribution || {};
// 转换为前端期望的格式
// 优先使用远程服务数据,因为 CDC 同步可能不完整
const remoteData = raw.remoteData || {};
return {
// 基础统计
totalUsers: raw.users?.total || 0,
adoptedUsers: raw.users?.adopted || 0,
totalTrees: raw.contribution?.totalTrees || 0,
networkEffectiveContribution: raw.contribution?.effectiveContribution || '0',
networkTotalContribution: raw.contribution?.totalContribution || '0',
networkLevelPending: dc.levelContribution?.pending || '0',
networkBonusPending: dc.bonusContribution?.pending || '0',
// 已分配积分股:优先使用远程数据
totalDistributed: remoteData.totalDistributed || raw.mining?.totalMined || '0',
// 已销毁积分股:优先使用远程数据
totalBurned: remoteData.totalBurned || raw.mining?.latestDailyStat?.totalBurned || '0',
// 流通池:优先使用远程数据
circulationPool: remoteData.circulationPool || raw.trading?.circulationPool?.totalShares || '0',
currentPrice: raw.latestPrice?.close || '1',
priceChange24h,
totalOrders: raw.trading?.totalAccounts || 0,
totalTrades: raw.trading?.totalAccounts || 0,
// ========== 详细算力分解 ==========
detailedContribution: {
totalTrees: dc.totalTrees || 0,
// 全网算力(理论值)= 总树数 * 22617
networkTotalTheory: dc.networkTotalTheory || '0',
// 个人算力70%
personalTheory: dc.personalTheory || '0',
personalActual: raw.contribution?.personalContribution || '0',
// 运营账户12%
operationTheory: dc.operationTheory || '0',
operationActual: dc.operationActual || '0',
// 省公司1%
provinceTheory: dc.provinceTheory || '0',
provinceActual: dc.provinceActual || '0',
// 市公司2%
cityTheory: dc.cityTheory || '0',
cityActual: dc.cityActual || '0',
// 层级算力7.5%
level: {
theory: dc.levelTheory || '0',
unlocked: dc.levelContribution?.unlocked || '0',
pending: dc.levelContribution?.pending || '0',
// 分档详情
tier1: dc.levelContribution?.byTier?.tier1 || { unlocked: '0', pending: '0' },
tier2: dc.levelContribution?.byTier?.tier2 || { unlocked: '0', pending: '0' },
tier3: dc.levelContribution?.byTier?.tier3 || { unlocked: '0', pending: '0' },
},
// 团队奖励算力7.5%
bonus: {
theory: dc.bonusTheory || '0',
unlocked: dc.bonusContribution?.unlocked || '0',
pending: dc.bonusContribution?.pending || '0',
// 分档详情
tier1: dc.bonusContribution?.byTier?.tier1 || { unlocked: '0', pending: '0' },
tier2: dc.bonusContribution?.byTier?.tier2 || { unlocked: '0', pending: '0' },
tier3: dc.bonusContribution?.byTier?.tier3 || { unlocked: '0', pending: '0' },
},
},
};
} }
@Get('stats') @Get('stats')
@ApiOperation({ summary: '获取仪表盘统计数据(别名)' }) @ApiOperation({ summary: '获取仪表盘统计数据(别名)' })
async getStatsAlias() { async getStatsAlias() {
return this.getStats(); return this.dashboardService.getDashboardStats();
} }
@Get('realtime') @Get('realtime')
@ApiOperation({ summary: '获取实时数据' }) @ApiOperation({ summary: '获取实时数据' })
async getRealtimeStats() { async getRealtimeStats() {
const raw = await this.dashboardService.getRealtimeStats(); return this.dashboardService.getRealtimeStats();
// 转换为前端期望的格式
return {
currentMinuteDistribution: raw.minuteDistribution || '0',
currentMinuteBurn: '0', // 暂无实时销毁数据
activeOrders: 0, // 暂无实时订单数据
pendingTrades: 0, // 暂无待处理交易数据
lastPriceUpdateAt: raw.timestamp,
};
} }
@Get('reports') @Get('reports')

View File

@ -0,0 +1,77 @@
import { Controller, Post, Body, Req } from '@nestjs/common';
import { ApiTags, ApiOperation, ApiBearerAuth } from '@nestjs/swagger';
import { InitializationService } from '../../application/services/initialization.service';
class InitMiningConfigDto {
totalShares: string;
distributionPool: string;
halvingPeriodYears: number;
burnTarget: string;
}
@ApiTags('Initialization')
@ApiBearerAuth()
@Controller('initialization')
export class InitializationController {
constructor(private readonly initService: InitializationService) {}
@Post('mining-config')
@ApiOperation({ summary: '初始化挖矿配置' })
async initMiningConfig(@Body() dto: InitMiningConfigDto, @Req() req: any) {
return this.initService.initializeMiningConfig(req.admin.id, dto);
}
@Post('system-accounts')
@ApiOperation({ summary: '初始化系统账户' })
async initSystemAccounts(@Req() req: any) {
return this.initService.initializeSystemAccounts(req.admin.id);
}
@Post('activate-mining')
@ApiOperation({ summary: '激活挖矿' })
async activateMining(@Req() req: any) {
return this.initService.activateMining(req.admin.id);
}
@Post('sync-users')
@ApiOperation({ summary: '同步所有用户数据从auth-service初始同步' })
async syncUsers(@Req() req: any) {
return this.initService.syncAllUsers(req.admin.id);
}
@Post('sync-contribution-accounts')
@ApiOperation({ summary: '同步所有算力账户从contribution-service初始同步' })
async syncContributionAccounts(@Req() req: any) {
return this.initService.syncAllContributionAccounts(req.admin.id);
}
@Post('sync-mining-accounts')
@ApiOperation({ summary: '同步所有挖矿账户从mining-service初始同步' })
async syncMiningAccounts(@Req() req: any) {
return this.initService.syncAllMiningAccounts(req.admin.id);
}
@Post('sync-trading-accounts')
@ApiOperation({ summary: '同步所有交易账户从trading-service初始同步' })
async syncTradingAccounts(@Req() req: any) {
return this.initService.syncAllTradingAccounts(req.admin.id);
}
@Post('sync-all')
@ApiOperation({ summary: '执行完整的数据同步(用户+算力+挖矿+交易)' })
async syncAll(@Req() req: any) {
const adminId = req.admin.id;
const results = {
users: await this.initService.syncAllUsers(adminId),
contribution: await this.initService.syncAllContributionAccounts(adminId),
mining: await this.initService.syncAllMiningAccounts(adminId),
trading: await this.initService.syncAllTradingAccounts(adminId),
};
return {
success: true,
message: '全部同步完成',
details: results,
};
}
}

View File

@ -1,116 +0,0 @@
import {
Controller,
Get,
Post,
Body,
Query,
Param,
HttpException,
HttpStatus,
Req,
} from '@nestjs/common';
import {
ApiTags,
ApiOperation,
ApiBearerAuth,
ApiBody,
ApiQuery,
ApiParam,
} from '@nestjs/swagger';
import { ManualMiningService } from '../../application/services/manual-mining.service';
@ApiTags('Manual Mining')
@ApiBearerAuth()
@Controller('manual-mining')
export class ManualMiningController {
constructor(private readonly manualMiningService: ManualMiningService) {}
@Post('calculate')
@ApiOperation({ summary: '计算手工补发挖矿预估金额' })
@ApiBody({
schema: {
type: 'object',
required: ['accountSequence', 'adoptionDate'],
properties: {
accountSequence: { type: 'string', description: '用户账户序列号' },
adoptionDate: {
type: 'string',
format: 'date',
description: '认种日期 (YYYY-MM-DD)',
},
},
},
})
async calculate(
@Body() body: { accountSequence: string; adoptionDate: string },
) {
if (!body.accountSequence || !body.adoptionDate) {
throw new HttpException('账户序列号和认种日期不能为空', HttpStatus.BAD_REQUEST);
}
return this.manualMiningService.calculate(body);
}
@Post('execute')
@ApiOperation({ summary: '执行手工补发挖矿(仅超级管理员)' })
@ApiBody({
schema: {
type: 'object',
required: ['accountSequence', 'adoptionDate', 'reason'],
properties: {
accountSequence: { type: 'string', description: '用户账户序列号' },
adoptionDate: {
type: 'string',
format: 'date',
description: '认种日期 (YYYY-MM-DD)',
},
reason: { type: 'string', description: '补发原因(必填)' },
},
},
})
async execute(
@Body() body: { accountSequence: string; adoptionDate: string; reason: string },
@Req() req: any,
) {
if (!body.accountSequence || !body.adoptionDate) {
throw new HttpException('账户序列号和认种日期不能为空', HttpStatus.BAD_REQUEST);
}
if (!body.reason || body.reason.trim().length === 0) {
throw new HttpException('补发原因不能为空', HttpStatus.BAD_REQUEST);
}
const admin = req.admin;
return this.manualMiningService.execute(
{
accountSequence: body.accountSequence,
adoptionDate: body.adoptionDate,
operatorId: admin.id,
operatorName: admin.username,
reason: body.reason,
},
admin.id,
);
}
@Get('records')
@ApiOperation({ summary: '获取手工补发记录列表' })
@ApiQuery({ name: 'page', required: false, type: Number })
@ApiQuery({ name: 'pageSize', required: false, type: Number })
async getRecords(
@Query('page') page?: number,
@Query('pageSize') pageSize?: number,
) {
return this.manualMiningService.getRecords(page ?? 1, pageSize ?? 20);
}
@Get('records/:accountSequence')
@ApiOperation({ summary: '查询指定用户的手工补发记录' })
@ApiParam({ name: 'accountSequence', type: String })
async getRecordByAccount(@Param('accountSequence') accountSequence: string) {
const record =
await this.manualMiningService.getRecordByAccountSequence(accountSequence);
if (!record) {
throw new HttpException('该用户没有手工补发记录', HttpStatus.NOT_FOUND);
}
return record;
}
}

View File

@ -1,77 +0,0 @@
import { Controller, Get, Param, Query } from '@nestjs/common';
import {
ApiTags,
ApiOperation,
ApiBearerAuth,
ApiParam,
ApiQuery,
} from '@nestjs/swagger';
import { PendingContributionsService } from '../../application/services/pending-contributions.service';
@ApiTags('Pending Contributions')
@ApiBearerAuth()
@Controller('pending-contributions')
export class PendingContributionsController {
constructor(
private readonly pendingContributionsService: PendingContributionsService,
) {}
@Get()
@ApiOperation({ summary: '获取待解锁算力列表' })
@ApiQuery({ name: 'page', required: false, type: Number })
@ApiQuery({ name: 'pageSize', required: false, type: Number })
@ApiQuery({
name: 'contributionType',
required: false,
type: String,
description: '算力类型筛选',
})
async getPendingContributions(
@Query('page') page?: number,
@Query('pageSize') pageSize?: number,
@Query('contributionType') contributionType?: string,
) {
return this.pendingContributionsService.getPendingContributions(
page ?? 1,
pageSize ?? 20,
contributionType,
);
}
@Get('summary')
@ApiOperation({ summary: '获取待解锁算力汇总统计' })
async getPendingContributionsSummary() {
return this.pendingContributionsService.getPendingContributionsSummary();
}
@Get('mining-records')
@ApiOperation({ summary: '获取所有待解锁算力的挖矿记录' })
@ApiQuery({ name: 'page', required: false, type: Number })
@ApiQuery({ name: 'pageSize', required: false, type: Number })
async getAllPendingMiningRecords(
@Query('page') page?: number,
@Query('pageSize') pageSize?: number,
) {
return this.pendingContributionsService.getAllPendingMiningRecords(
page ?? 1,
pageSize ?? 20,
);
}
@Get(':id/records')
@ApiOperation({ summary: '获取某条待解锁算力的挖矿记录' })
@ApiParam({ name: 'id', type: String, description: '待解锁算力ID' })
@ApiQuery({ name: 'page', required: false, type: Number })
@ApiQuery({ name: 'pageSize', required: false, type: Number })
async getPendingContributionMiningRecords(
@Param('id') id: string,
@Query('page') page?: number,
@Query('pageSize') pageSize?: number,
) {
return this.pendingContributionsService.getPendingContributionMiningRecords(
id,
page ?? 1,
pageSize ?? 20,
);
}
}

View File

@ -1,59 +0,0 @@
import { Controller, Get, Query } from '@nestjs/common';
import {
ApiTags,
ApiOperation,
ApiBearerAuth,
ApiQuery,
} from '@nestjs/swagger';
import { DashboardService } from '../../application/services/dashboard.service';
@ApiTags('Reports')
@ApiBearerAuth()
@Controller('reports')
export class ReportsController {
constructor(private readonly dashboardService: DashboardService) {}
@Get('daily')
@ApiOperation({ summary: '获取每日报表' })
@ApiQuery({ name: 'page', required: false, type: Number })
@ApiQuery({ name: 'pageSize', required: false, type: Number })
@ApiQuery({ name: 'days', required: false, type: Number })
async getDailyReports(
@Query('page') page?: number,
@Query('pageSize') pageSize?: number,
@Query('days') days?: number,
) {
const result = await this.dashboardService.getReports(
page ?? 1,
pageSize ?? 30,
);
// 转换为前端期望的格式
return {
items: result.data.map((report: any) => ({
id: report.id,
reportDate: report.reportDate,
totalUsers: report.users?.total || 0,
newUsers: report.users?.new || 0,
adoptedUsers: report.adoptions?.total || 0,
newAdoptedUsers: report.adoptions?.new || 0,
totalContribution: report.contribution?.total || '0',
newContribution: report.contribution?.growth || '0',
totalDistributed: report.mining?.distributed || '0',
dailyDistributed: report.mining?.distributed || '0',
totalBurned: report.mining?.burned || '0',
dailyBurned: report.mining?.burned || '0',
openPrice: report.price?.open || '1',
closePrice: report.price?.close || '1',
highPrice: report.price?.high || '1',
lowPrice: report.price?.low || '1',
totalVolume: report.trading?.volume || '0',
dailyVolume: report.trading?.volume || '0',
})),
total: result.total,
page: result.pagination.page,
pageSize: result.pagination.pageSize,
totalPages: result.pagination.totalPages,
};
}
}

View File

@ -1,5 +1,5 @@
import { Controller, Get, Param, Query } from '@nestjs/common'; import { Controller, Get } from '@nestjs/common';
import { ApiTags, ApiOperation, ApiBearerAuth, ApiParam, ApiQuery } from '@nestjs/swagger'; import { ApiTags, ApiOperation, ApiBearerAuth } from '@nestjs/swagger';
import { SystemAccountsService } from '../../application/services/system-accounts.service'; import { SystemAccountsService } from '../../application/services/system-accounts.service';
@ApiTags('System Accounts') @ApiTags('System Accounts')
@ -19,89 +19,4 @@ export class SystemAccountsController {
async getSystemAccountsSummary() { async getSystemAccountsSummary() {
return this.systemAccountsService.getSystemAccountsSummary(); return this.systemAccountsService.getSystemAccountsSummary();
} }
@Get(':accountType/records')
@ApiOperation({ summary: '获取系统账户挖矿记录' })
@ApiParam({ name: 'accountType', type: String, description: '系统账户类型OPERATION/PROVINCE/CITY/HEADQUARTERS' })
@ApiQuery({ name: 'regionCode', required: false, type: String, description: '区域代码(省/市代码)' })
@ApiQuery({ name: 'page', required: false, type: Number })
@ApiQuery({ name: 'pageSize', required: false, type: Number })
async getSystemAccountMiningRecords(
@Param('accountType') accountType: string,
@Query('regionCode') regionCode?: string,
@Query('page') page?: number,
@Query('pageSize') pageSize?: number,
) {
return this.systemAccountsService.getSystemAccountMiningRecords(
accountType,
regionCode || null,
page ?? 1,
pageSize ?? 20,
);
}
@Get(':accountType/transactions')
@ApiOperation({ summary: '获取系统账户交易记录' })
@ApiParam({ name: 'accountType', type: String, description: '系统账户类型OPERATION/PROVINCE/CITY/HEADQUARTERS' })
@ApiQuery({ name: 'regionCode', required: false, type: String, description: '区域代码(省/市代码)' })
@ApiQuery({ name: 'page', required: false, type: Number })
@ApiQuery({ name: 'pageSize', required: false, type: Number })
async getSystemAccountTransactions(
@Param('accountType') accountType: string,
@Query('regionCode') regionCode?: string,
@Query('page') page?: number,
@Query('pageSize') pageSize?: number,
) {
return this.systemAccountsService.getSystemAccountTransactions(
accountType,
regionCode || null,
page ?? 1,
pageSize ?? 20,
);
}
@Get(':accountType/contributions')
@ApiOperation({
summary: '获取系统账户算力来源明细',
description: '显示该账户的每笔算力来自哪个认种订单',
})
@ApiParam({
name: 'accountType',
type: String,
description: '系统账户类型OPERATION/PROVINCE/CITY/HEADQUARTERS',
})
@ApiQuery({ name: 'regionCode', required: false, type: String, description: '区域代码(省/市代码)' })
@ApiQuery({ name: 'page', required: false, type: Number, description: '页码默认1' })
@ApiQuery({ name: 'pageSize', required: false, type: Number, description: '每页数量默认20' })
async getSystemAccountContributionRecords(
@Param('accountType') accountType: string,
@Query('regionCode') regionCode?: string,
@Query('page') page?: number,
@Query('pageSize') pageSize?: number,
) {
return this.systemAccountsService.getSystemAccountContributionRecords(
accountType,
regionCode || null,
page ?? 1,
pageSize ?? 20,
);
}
@Get(':accountType/contribution-stats')
@ApiOperation({
summary: '获取系统账户算力明细统计',
description: '显示算力来源的汇总信息,包括记录数、来源认种订单数、来源用户数等',
})
@ApiParam({
name: 'accountType',
type: String,
description: '系统账户类型OPERATION/PROVINCE/CITY/HEADQUARTERS',
})
@ApiQuery({ name: 'regionCode', required: false, type: String, description: '区域代码(省/市代码)' })
async getSystemAccountContributionStats(
@Param('accountType') accountType: string,
@Query('regionCode') regionCode?: string,
) {
return this.systemAccountsService.getSystemAccountContributionStats(accountType, regionCode || null);
}
} }

View File

@ -141,17 +141,4 @@ export class UsersController {
) { ) {
return this.usersService.getWalletLedger(accountSequence, page ?? 1, pageSize ?? 20); return this.usersService.getWalletLedger(accountSequence, page ?? 1, pageSize ?? 20);
} }
@Get(':accountSequence/batch-mining-records')
@ApiOperation({ summary: '获取用户批量补发记录' })
@ApiParam({ name: 'accountSequence', type: String })
@ApiQuery({ name: 'page', required: false, type: Number })
@ApiQuery({ name: 'pageSize', required: false, type: Number })
async getBatchMiningRecords(
@Param('accountSequence') accountSequence: string,
@Query('page') page?: number,
@Query('pageSize') pageSize?: number,
) {
return this.usersService.getBatchMiningRecords(accountSequence, page ?? 1, pageSize ?? 20);
}
} }

View File

@ -12,11 +12,7 @@ import { AdminAuthGuard } from './shared/guards/admin-auth.guard';
imports: [ imports: [
ConfigModule.forRoot({ ConfigModule.forRoot({
isGlobal: true, isGlobal: true,
envFilePath: [ envFilePath: [`.env.${process.env.NODE_ENV || 'development'}`, '.env'],
`.env.${process.env.NODE_ENV || 'development'}`,
'.env',
'../.env', // 父目录共享 .env
],
}), }),
InfrastructureModule, InfrastructureModule,
ApplicationModule, ApplicationModule,

View File

@ -2,37 +2,28 @@ import { Module, OnModuleInit } from '@nestjs/common';
import { InfrastructureModule } from '../infrastructure/infrastructure.module'; import { InfrastructureModule } from '../infrastructure/infrastructure.module';
import { AuthService } from './services/auth.service'; import { AuthService } from './services/auth.service';
import { ConfigManagementService } from './services/config.service'; import { ConfigManagementService } from './services/config.service';
import { InitializationService } from './services/initialization.service';
import { DashboardService } from './services/dashboard.service'; import { DashboardService } from './services/dashboard.service';
import { UsersService } from './services/users.service'; import { UsersService } from './services/users.service';
import { SystemAccountsService } from './services/system-accounts.service'; import { SystemAccountsService } from './services/system-accounts.service';
import { DailyReportService } from './services/daily-report.service';
import { ManualMiningService } from './services/manual-mining.service';
import { PendingContributionsService } from './services/pending-contributions.service';
import { BatchMiningService } from './services/batch-mining.service';
@Module({ @Module({
imports: [InfrastructureModule], imports: [InfrastructureModule],
providers: [ providers: [
AuthService, AuthService,
ConfigManagementService, ConfigManagementService,
InitializationService,
DashboardService, DashboardService,
UsersService, UsersService,
SystemAccountsService, SystemAccountsService,
DailyReportService,
ManualMiningService,
PendingContributionsService,
BatchMiningService,
], ],
exports: [ exports: [
AuthService, AuthService,
ConfigManagementService, ConfigManagementService,
InitializationService,
DashboardService, DashboardService,
UsersService, UsersService,
SystemAccountsService, SystemAccountsService,
DailyReportService,
ManualMiningService,
PendingContributionsService,
BatchMiningService,
], ],
}) })
export class ApplicationModule implements OnModuleInit { export class ApplicationModule implements OnModuleInit {

View File

@ -1,383 +0,0 @@
import { Injectable, Logger, HttpException, HttpStatus } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import { PrismaService } from '../../infrastructure/persistence/prisma/prisma.service';
/**
* Excel
*/
export interface BatchMiningItem {
accountSequence: string; // 注册ID (用户账号序列号)
treeCount: number; // 认种量(棵)
miningStartDate: string; // 挖矿开始时间
batch: number; // 批次号
preMineDays: number; // 授权提前挖的天数(该批次比后续批次提前的天数)
totalMiningDays: number; // 总挖矿天数(从挖矿开始日期到今天)
remark?: string; // 备注
}
/**
*
*/
export interface BatchMiningRequest {
items: BatchMiningItem[];
operatorId: string;
operatorName: string;
reason: string;
}
/**
* -
* mining-service API
*/
@Injectable()
export class BatchMiningService {
private readonly logger = new Logger(BatchMiningService.name);
private readonly miningServiceUrl: string;
constructor(
private readonly prisma: PrismaService,
private readonly configService: ConfigService,
) {
this.miningServiceUrl = this.configService.get<string>(
'MINING_SERVICE_URL',
'http://localhost:3021',
);
}
/**
*
*/
async getStatus(): Promise<any> {
const url = `${this.miningServiceUrl}/api/v2/mining/admin/batch-mining/status`;
this.logger.log(`[getStatus] 开始获取批量补发状态, URL: ${url}`);
try {
this.logger.log(`[getStatus] 发送 GET 请求...`);
const response = await fetch(url, {
method: 'GET',
headers: { 'Content-Type': 'application/json' },
});
this.logger.log(`[getStatus] 响应状态码: ${response.status}`);
const result = await response.json();
this.logger.log(`[getStatus] 响应数据: ${JSON.stringify(result)}`);
if (!response.ok) {
this.logger.error(`[getStatus] 请求失败: ${result.message || '未知错误'}`);
throw new HttpException(
result.message || '获取状态失败',
response.status,
);
}
// mining-service 使用 TransformInterceptor 包装响应为 { success, data, timestamp }
const data = result.data || result;
this.logger.log(`[getStatus] 成功获取状态: hasExecuted=${data.hasExecuted}`);
return data;
} catch (error) {
if (error instanceof HttpException) {
throw error;
}
this.logger.error(`[getStatus] 调用 mining-service 失败:`, error);
throw new HttpException(
`调用 mining-service 失败: ${error instanceof Error ? error.message : error}`,
HttpStatus.INTERNAL_SERVER_ERROR,
);
}
}
/**
*
*/
async preview(items: BatchMiningItem[]): Promise<any> {
const url = `${this.miningServiceUrl}/api/v2/mining/admin/batch-mining/preview`;
this.logger.log(`[preview] 开始预览批量补发, URL: ${url}`);
this.logger.log(`[preview] 数据条数: ${items.length}`);
this.logger.log(`[preview] 前3条数据: ${JSON.stringify(items.slice(0, 3))}`);
try {
this.logger.log(`[preview] 发送 POST 请求...`);
const response = await fetch(url, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ items }),
});
this.logger.log(`[preview] 响应状态码: ${response.status}`);
const result = await response.json();
if (!response.ok) {
this.logger.error(`[preview] 请求失败: ${result.message || '未知错误'}`);
throw new HttpException(
result.message || '预览失败',
response.status,
);
}
// mining-service 使用 TransformInterceptor 包装响应为 { success, data, timestamp }
const data = result.data || result;
this.logger.log(`[preview] 响应数据概要: totalBatches=${data.totalBatches}, totalUsers=${data.totalUsers}, grandTotalAmount=${data.grandTotalAmount}`);
this.logger.log(`[preview] 预览成功`);
return data;
} catch (error) {
if (error instanceof HttpException) {
throw error;
}
this.logger.error(`[preview] 调用 mining-service 失败:`, error);
throw new HttpException(
`调用 mining-service 失败: ${error instanceof Error ? error.message : error}`,
HttpStatus.INTERNAL_SERVER_ERROR,
);
}
}
/**
*
*/
async execute(
request: BatchMiningRequest,
adminId: string,
): Promise<any> {
const url = `${this.miningServiceUrl}/api/v2/mining/admin/batch-mining/execute`;
this.logger.log(`[execute] 开始执行批量补发, URL: ${url}`);
this.logger.log(`[execute] 操作人: ${request.operatorName} (${request.operatorId})`);
this.logger.log(`[execute] 原因: ${request.reason}`);
this.logger.log(`[execute] 数据条数: ${request.items.length}`);
try {
this.logger.log(`[execute] 发送 POST 请求...`);
const response = await fetch(url, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(request),
});
this.logger.log(`[execute] 响应状态码: ${response.status}`);
const result = await response.json();
this.logger.log(`[execute] 响应数据: ${JSON.stringify(result)}`);
if (!response.ok) {
this.logger.error(`[execute] 请求失败: ${result.message || '未知错误'}`);
throw new HttpException(
result.message || '执行失败',
response.status,
);
}
// mining-service 使用 TransformInterceptor 包装响应为 { success, data, timestamp }
const data = result.data || result;
// 记录审计日志(失败不影响返回结果,因为实际操作已完成)
this.logger.log(`[execute] 记录审计日志...`);
try {
await this.prisma.auditLog.create({
data: {
adminId,
action: 'CREATE',
resource: 'BATCH_MINING',
resourceId: data.batchId,
newValue: {
totalUsers: data.totalUsers,
successCount: data.successCount,
failedCount: data.failedCount,
totalAmount: data.totalAmount,
reason: request.reason,
},
},
});
this.logger.log(`[execute] 审计日志记录成功`);
} catch (auditError) {
// 审计日志失败不应该影响返回结果,因为批量补发已经成功执行
this.logger.error(`[execute] 审计日志记录失败(不影响返回结果):`, auditError);
}
this.logger.log(
`[execute] 批量补发执行成功: admin=${adminId}, total=${data.totalUsers}, success=${data.successCount}, amount=${data.totalAmount}`,
);
return data;
} catch (error) {
if (error instanceof HttpException) {
throw error;
}
this.logger.error(`[execute] 调用 mining-service 失败:`, error);
throw new HttpException(
`调用 mining-service 失败: ${error instanceof Error ? error.message : error}`,
HttpStatus.INTERNAL_SERVER_ERROR,
);
}
}
/**
*
*/
async getExecution(): Promise<any> {
const url = `${this.miningServiceUrl}/api/v2/mining/admin/batch-mining/execution`;
this.logger.log(`[getExecution] 开始获取执行记录, URL: ${url}`);
try {
this.logger.log(`[getExecution] 发送 GET 请求...`);
const response = await fetch(url, {
method: 'GET',
headers: { 'Content-Type': 'application/json' },
});
this.logger.log(`[getExecution] 响应状态码: ${response.status}`);
if (response.status === 404) {
this.logger.log(`[getExecution] 未找到执行记录 (404)`);
return null;
}
const result = await response.json();
if (!response.ok) {
this.logger.error(`[getExecution] 请求失败: ${result.message || '未知错误'}`);
throw new HttpException(
result.message || '获取记录失败',
response.status,
);
}
// mining-service 使用 TransformInterceptor 包装响应为 { success, data, timestamp }
const data = result.data || result;
this.logger.log(`[getExecution] 响应数据概要: id=${data.id}, totalUsers=${data.totalUsers}`);
this.logger.log(`[getExecution] 成功获取执行记录`);
return data;
} catch (error) {
if (error instanceof HttpException) {
throw error;
}
this.logger.error(`[getExecution] 调用 mining-service 失败:`, error);
throw new HttpException(
`调用 mining-service 失败: ${error instanceof Error ? error.message : error}`,
HttpStatus.INTERNAL_SERVER_ERROR,
);
}
}
/**
* Excel
* Excel :
* ID | | | | | |
*/
parseExcelData(rows: any[]): BatchMiningItem[] {
this.logger.log(`[parseExcelData] 开始解析 Excel 数据, 总行数: ${rows.length}`);
const items: BatchMiningItem[] = [];
const today = new Date();
today.setHours(0, 0, 0, 0);
// 打印前5行原始数据用于调试
this.logger.log(`[parseExcelData] 前5行原始数据:`);
for (let i = 0; i < Math.min(5, rows.length); i++) {
this.logger.log(`[parseExcelData] 行${i}: ${JSON.stringify(rows[i])}`);
}
for (let i = 0; i < rows.length; i++) {
const row = rows[i];
// 跳过空行
if (!row || !row[0]) {
this.logger.debug(`[parseExcelData] 跳过行 ${i + 1}: 空行`);
continue;
}
// 跳过标题行
const firstCell = String(row[0]).trim();
if (firstCell === '用户ID' || firstCell === '注册ID' || firstCell === '序号') {
this.logger.debug(`[parseExcelData] 跳过行 ${i + 1}: 标题行`);
continue;
}
// Excel 格式:序号 | 注册ID | 认种量(棵)| 挖矿开始时间 | 批次 | 授权提前挖的天数 | 备注
// 索引: 0 1 2 3 4 5 6
// 获取用户ID (第二列索引1)
let accountSequence = String(row[1]).trim();
if (!accountSequence.startsWith('D')) {
accountSequence = 'D' + accountSequence;
}
// 获取认种量 (第三列索引2)
const treeCount = parseInt(row[2], 10);
if (isNaN(treeCount) || treeCount <= 0) {
this.logger.debug(`[parseExcelData] 跳过行 ${i + 1}: 认种量无效 (${row[2]})`);
continue;
}
// 获取挖矿开始时间 (第四列索引3)
const miningStartDateStr = String(row[3] || '').trim();
// 解析挖矿开始时间,计算总挖矿天数
const miningStartDate = this.parseDate(miningStartDateStr);
let totalMiningDays = 0;
if (miningStartDate) {
const diffTime = today.getTime() - miningStartDate.getTime();
totalMiningDays = Math.floor(diffTime / (1000 * 60 * 60 * 24));
}
// 获取批次 (第五列索引4)
const batch = parseInt(row[4], 10);
if (isNaN(batch) || batch <= 0) {
this.logger.warn(`[parseExcelData] 跳过行 ${i + 1}: 批次无效 (${row[4]})`);
continue;
}
// 获取授权提前挖的天数 (第六列索引5)
const preMineDays = parseInt(row[5], 10);
if (isNaN(preMineDays) || preMineDays <= 0) {
this.logger.warn(`[parseExcelData] 跳过行 ${i + 1}: 授权提前挖的天数无效 (${row[5]})`);
continue;
}
// 获取备注 (第七列索引6)
const remark = row[6] ? String(row[6]).trim() : undefined;
items.push({
accountSequence,
treeCount,
miningStartDate: miningStartDateStr,
batch,
preMineDays,
totalMiningDays,
remark,
});
}
this.logger.log(`[parseExcelData] 解析完成, 有效数据: ${items.length}`);
if (items.length > 0) {
this.logger.log(`[parseExcelData] 第一条数据: ${JSON.stringify(items[0])}`);
this.logger.log(`[parseExcelData] 最后一条数据: ${JSON.stringify(items[items.length - 1])}`);
}
return items;
}
/**
*
* 支持格式: 2025.11.8, 2025-11-08, 2025/11/8
*/
private parseDate(dateStr: string): Date | null {
if (!dateStr) return null;
const formats = [
/^(\d{4})\.(\d{1,2})\.(\d{1,2})$/, // 2025.11.8
/^(\d{4})-(\d{1,2})-(\d{1,2})$/, // 2025-11-08
/^(\d{4})\/(\d{1,2})\/(\d{1,2})$/, // 2025/11/8
];
for (const format of formats) {
const match = dateStr.match(format);
if (match) {
const year = parseInt(match[1], 10);
const month = parseInt(match[2], 10) - 1;
const day = parseInt(match[3], 10);
const date = new Date(year, month, day);
date.setHours(0, 0, 0, 0);
return date;
}
}
return null;
}
}

View File

@ -1,264 +0,0 @@
import { Injectable, Logger, OnModuleInit } from '@nestjs/common';
import { PrismaService } from '../../infrastructure/persistence/prisma/prisma.service';
import Decimal from 'decimal.js';
@Injectable()
export class DailyReportService implements OnModuleInit {
private readonly logger = new Logger(DailyReportService.name);
private reportInterval: NodeJS.Timeout | null = null;
constructor(private readonly prisma: PrismaService) {}
async onModuleInit() {
// 启动时先生成一次报表
await this.generateTodayReport();
// 每小时检查并更新当日报表
this.reportInterval = setInterval(
() => this.generateTodayReport(),
60 * 60 * 1000, // 1 hour
);
this.logger.log('Daily report service initialized');
}
/**
*
*/
async generateTodayReport(): Promise<void> {
const today = new Date();
today.setHours(0, 0, 0, 0);
try {
this.logger.log(`Generating daily report for ${today.toISOString().split('T')[0]}`);
// 收集各项统计数据
const [
userStats,
adoptionStats,
contributionStats,
miningStats,
tradingStats,
priceStats,
] = await Promise.all([
this.getUserStats(today),
this.getAdoptionStats(today),
this.getContributionStats(today),
this.getMiningStats(),
this.getTradingStats(today),
this.getPriceStats(today),
]);
// 更新或创建今日报表
await this.prisma.dailyReport.upsert({
where: { reportDate: today },
create: {
reportDate: today,
...userStats,
...adoptionStats,
...contributionStats,
...miningStats,
...tradingStats,
...priceStats,
},
update: {
...userStats,
...adoptionStats,
...contributionStats,
...miningStats,
...tradingStats,
...priceStats,
},
});
this.logger.log(`Daily report generated successfully for ${today.toISOString().split('T')[0]}`);
} catch (error) {
this.logger.error('Failed to generate daily report', error);
}
}
/**
*
*/
async generateHistoricalReport(date: Date): Promise<void> {
const reportDate = new Date(date);
reportDate.setHours(0, 0, 0, 0);
const [
userStats,
adoptionStats,
contributionStats,
miningStats,
tradingStats,
priceStats,
] = await Promise.all([
this.getUserStats(reportDate),
this.getAdoptionStats(reportDate),
this.getContributionStats(reportDate),
this.getMiningStats(),
this.getTradingStats(reportDate),
this.getPriceStats(reportDate),
]);
await this.prisma.dailyReport.upsert({
where: { reportDate },
create: {
reportDate,
...userStats,
...adoptionStats,
...contributionStats,
...miningStats,
...tradingStats,
...priceStats,
},
update: {
...userStats,
...adoptionStats,
...contributionStats,
...miningStats,
...tradingStats,
...priceStats,
},
});
}
/**
*
*/
private async getUserStats(date: Date) {
const nextDay = new Date(date);
nextDay.setDate(nextDay.getDate() + 1);
const [totalUsers, newUsers] = await Promise.all([
this.prisma.syncedUser.count({
where: { createdAt: { lt: nextDay } },
}),
this.prisma.syncedUser.count({
where: {
createdAt: { gte: date, lt: nextDay },
},
}),
]);
// 活跃用户暂时用总用户数(需要有活跃度跟踪才能准确计算)
const activeUsers = totalUsers;
return {
totalUsers,
newUsers,
activeUsers,
};
}
/**
*
*/
private async getAdoptionStats(date: Date) {
const nextDay = new Date(date);
nextDay.setDate(nextDay.getDate() + 1);
const [totalAdoptions, newAdoptions, treesResult] = await Promise.all([
this.prisma.syncedAdoption.count({
where: { adoptionDate: { lt: nextDay } },
}),
this.prisma.syncedAdoption.count({
where: {
adoptionDate: { gte: date, lt: nextDay },
},
}),
this.prisma.syncedAdoption.aggregate({
where: { adoptionDate: { lt: nextDay } },
_sum: { treeCount: true },
}),
]);
return {
totalAdoptions,
newAdoptions,
totalTrees: treesResult._sum.treeCount || 0,
};
}
/**
*
*/
private async getContributionStats(date: Date) {
// 获取全网算力进度
const networkProgress = await this.prisma.syncedNetworkProgress.findFirst();
// 获取用户算力汇总
const userContribution = await this.prisma.syncedContributionAccount.aggregate({
_sum: {
totalContribution: true,
effectiveContribution: true,
},
});
const totalContribution = new Decimal(
userContribution._sum.totalContribution?.toString() || '0',
);
// 获取昨日报表计算增长
const yesterday = new Date(date);
yesterday.setDate(yesterday.getDate() - 1);
const yesterdayReport = await this.prisma.dailyReport.findUnique({
where: { reportDate: yesterday },
});
const contributionGrowth = yesterdayReport
? totalContribution.minus(new Decimal(yesterdayReport.totalContribution.toString()))
: totalContribution;
return {
totalContribution,
contributionGrowth: contributionGrowth.gt(0) ? contributionGrowth : new Decimal(0),
};
}
/**
*
*/
private async getMiningStats() {
const dailyStat = await this.prisma.syncedDailyMiningStat.findFirst({
orderBy: { statDate: 'desc' },
});
return {
totalDistributed: dailyStat?.totalDistributed || new Decimal(0),
totalBurned: dailyStat?.totalBurned || new Decimal(0),
};
}
/**
*
*/
private async getTradingStats(date: Date) {
const kline = await this.prisma.syncedDayKLine.findUnique({
where: { klineDate: date },
});
return {
tradingVolume: kline?.volume || new Decimal(0),
tradingAmount: kline?.amount || new Decimal(0),
tradeCount: kline?.tradeCount || 0,
};
}
/**
*
*/
private async getPriceStats(date: Date) {
const kline = await this.prisma.syncedDayKLine.findUnique({
where: { klineDate: date },
});
const defaultPrice = new Decimal(1);
return {
openPrice: kline?.open || defaultPrice,
closePrice: kline?.close || defaultPrice,
highPrice: kline?.high || defaultPrice,
lowPrice: kline?.low || defaultPrice,
};
}
}

View File

@ -1,30 +1,10 @@
import { Injectable, Logger } from '@nestjs/common'; import { Injectable, Logger } from '@nestjs/common';
import { ConfigService } from '@nestjs/config'; import { ConfigService } from '@nestjs/config';
import { Decimal } from 'decimal.js';
import { PrismaService } from '../../infrastructure/persistence/prisma/prisma.service'; import { PrismaService } from '../../infrastructure/persistence/prisma/prisma.service';
// 基准算力常量
const BASE_CONTRIBUTION_PER_TREE = new Decimal('22617');
const RATE_PERSONAL = new Decimal('0.70');
const RATE_OPERATION = new Decimal('0.12');
const RATE_PROVINCE = new Decimal('0.01');
const RATE_CITY = new Decimal('0.02');
const RATE_LEVEL_TOTAL = new Decimal('0.075');
const RATE_BONUS_TOTAL = new Decimal('0.075');
// 远程服务数据缓存
interface RemoteServiceData {
totalDistributed: string;
totalBurned: string;
circulationPool: string;
fetchedAt: Date;
}
@Injectable() @Injectable()
export class DashboardService { export class DashboardService {
private readonly logger = new Logger(DashboardService.name); private readonly logger = new Logger(DashboardService.name);
private remoteDataCache: RemoteServiceData | null = null;
private readonly CACHE_TTL_MS = 30000; // 30秒缓存
constructor( constructor(
private readonly prisma: PrismaService, private readonly prisma: PrismaService,
@ -43,8 +23,6 @@ export class DashboardService {
tradingStats, tradingStats,
latestReport, latestReport,
latestKLine, latestKLine,
detailedContributionStats,
remoteData,
] = await Promise.all([ ] = await Promise.all([
this.getUserStats(), this.getUserStats(),
this.getContributionStats(), this.getContributionStats(),
@ -52,42 +30,13 @@ export class DashboardService {
this.getTradingStats(), this.getTradingStats(),
this.prisma.dailyReport.findFirst({ orderBy: { reportDate: 'desc' } }), this.prisma.dailyReport.findFirst({ orderBy: { reportDate: 'desc' } }),
this.prisma.syncedDayKLine.findFirst({ orderBy: { klineDate: 'desc' } }), this.prisma.syncedDayKLine.findFirst({ orderBy: { klineDate: 'desc' } }),
this.getDetailedContributionStats(),
this.fetchRemoteServiceData(),
]); ]);
// 合并远程服务数据如果本地数据为空或为0则使用远程数据
const totalMined = miningStats.totalMined !== '0'
? miningStats.totalMined
: remoteData.totalDistributed;
const totalBurned = miningStats.latestDailyStat?.totalBurned || remoteData.totalBurned;
const circulationPoolShares = tradingStats.circulationPool?.totalShares !== '0'
? tradingStats.circulationPool?.totalShares
: remoteData.circulationPool;
return { return {
users: userStats, users: userStats,
contribution: contributionStats, contribution: contributionStats,
mining: { mining: miningStats,
...miningStats, trading: tradingStats,
totalMined, // 使用合并后的已分配数据
},
trading: {
...tradingStats,
circulationPool: {
totalShares: circulationPoolShares || '0',
totalCash: tradingStats.circulationPool?.totalCash || '0',
},
},
// 直接提供远程数据用于仪表盘显示
remoteData: {
totalDistributed: remoteData.totalDistributed,
totalBurned: remoteData.totalBurned,
circulationPool: remoteData.circulationPool,
},
detailedContribution: detailedContributionStats,
latestReport: latestReport latestReport: latestReport
? this.formatDailyReport(latestReport) ? this.formatDailyReport(latestReport)
: null, : null,
@ -161,302 +110,39 @@ export class DashboardService {
/** /**
* *
*
* = (70%) + (12%) + (1%) + (2%)
* + + + +
* =
* = * 22617
*/ */
private async getContributionStats() { private async getContributionStats() {
const [accounts, systemContributions, adoptionStats] = await Promise.all([ const accounts = await this.prisma.syncedContributionAccount.aggregate({
this.prisma.syncedContributionAccount.aggregate({ _sum: {
_sum: { totalContribution: true,
totalContribution: true, effectiveContribution: true,
effectiveContribution: true, personalContribution: true,
personalContribution: true, teamLevelContribution: true,
teamLevelContribution: true, teamBonusContribution: true,
teamBonusContribution: true, },
}, _count: true,
_count: true, });
}),
this.prisma.syncedSystemContribution.aggregate({ const systemContributions =
await this.prisma.syncedSystemContribution.aggregate({
_sum: { contributionBalance: true }, _sum: { contributionBalance: true },
_count: true, _count: true,
}), });
this.prisma.syncedAdoption.aggregate({
where: { status: 'MINING_ENABLED' },
_sum: { treeCount: true },
_count: true,
}),
]);
const totalTrees = adoptionStats._sum.treeCount || 0;
// 有效算力 = 理论总算力 = 总树数 * 22617
// 因为按照公式有效算力包含所有部分个人70%+运营12%+省1%+市2%+层级7.5%+团队7.5%=100%
const effectiveContribution = BASE_CONTRIBUTION_PER_TREE.mul(totalTrees);
// 个人算力(已分配到用户账户)
const personalContribution = new Decimal(accounts._sum.personalContribution || 0);
// 系统账户算力(运营+省+市)
const systemContribution = new Decimal(systemContributions._sum.contributionBalance || 0);
return { return {
totalAccounts: accounts._count, totalAccounts: accounts._count,
totalContribution: accounts._sum.totalContribution?.toString() || '0', totalContribution: accounts._sum.totalContribution?.toString() || '0',
effectiveContribution: effectiveContribution.toString(), effectiveContribution:
personalContribution: personalContribution.toString(), accounts._sum.effectiveContribution?.toString() || '0',
personalContribution:
accounts._sum.personalContribution?.toString() || '0',
teamLevelContribution: teamLevelContribution:
accounts._sum.teamLevelContribution?.toString() || '0', accounts._sum.teamLevelContribution?.toString() || '0',
teamBonusContribution: teamBonusContribution:
accounts._sum.teamBonusContribution?.toString() || '0', accounts._sum.teamBonusContribution?.toString() || '0',
systemAccounts: systemContributions._count, systemAccounts: systemContributions._count,
systemContribution: systemContribution.toString(), systemContribution:
totalAdoptions: adoptionStats._count, systemContributions._sum.contributionBalance?.toString() || '0',
totalTrees,
};
}
/**
*
* contribution-service API pending
* API 退
*/
private async getDetailedContributionStats() {
// 尝试从 contribution-service 获取完整数据
const contributionServiceData = await this.fetchContributionServiceStats();
if (contributionServiceData) {
return contributionServiceData;
}
// 回退:从本地同步数据计算
return this.getDetailedContributionStatsFromLocal();
}
/**
* contribution-service API
*/
private async fetchContributionServiceStats(): Promise<any | null> {
const contributionServiceUrl = this.configService.get<string>(
'CONTRIBUTION_SERVICE_URL',
'http://localhost:3020',
);
try {
const response = await fetch(`${contributionServiceUrl}/api/v2/contribution/stats`);
if (!response.ok) {
this.logger.warn(`Contribution service returned ${response.status}`);
return null;
}
const result = await response.json();
const data = result.data || result;
// 获取系统账户实际值(本地数据)
const systemAccounts = await this.prisma.syncedSystemContribution.findMany();
let operationActual = new Decimal(0);
let provinceActual = new Decimal(0);
let cityActual = new Decimal(0);
for (const account of systemAccounts) {
const balance = new Decimal(account.contributionBalance || 0);
if (account.accountType === 'OPERATION') operationActual = operationActual.plus(balance);
else if (account.accountType === 'PROVINCE') provinceActual = provinceActual.plus(balance);
else if (account.accountType === 'CITY') cityActual = cityActual.plus(balance);
}
return {
totalTrees: data.totalTrees || 0,
// 理论值
networkTotalTheory: data.networkTotalContribution || '0',
personalTheory: data.personalTotalContribution || '0',
operationTheory: data.operationTotalContribution || '0',
provinceTheory: data.provinceTotalContribution || '0',
cityTheory: data.cityTotalContribution || '0',
levelTheory: data.levelContribution?.total || '0',
bonusTheory: data.bonusContribution?.total || '0',
// 实际值
operationActual: operationActual.toString(),
provinceActual: provinceActual.toString(),
cityActual: cityActual.toString(),
// 层级算力详情(包含正确的 pending 数据)
levelContribution: {
total: data.levelContribution?.total || '0',
unlocked: data.levelContribution?.unlocked || '0',
pending: data.levelContribution?.pending || '0',
byTier: {
tier1: {
unlocked: data.levelContribution?.byTier?.tier1?.unlocked || '0',
pending: data.levelContribution?.byTier?.tier1?.pending || '0',
},
tier2: {
unlocked: data.levelContribution?.byTier?.tier2?.unlocked || '0',
pending: data.levelContribution?.byTier?.tier2?.pending || '0',
},
tier3: {
unlocked: data.levelContribution?.byTier?.tier3?.unlocked || '0',
pending: data.levelContribution?.byTier?.tier3?.pending || '0',
},
},
},
// 团队奖励算力详情(包含正确的 pending 数据)
bonusContribution: {
total: data.bonusContribution?.total || '0',
unlocked: data.bonusContribution?.unlocked || '0',
pending: data.bonusContribution?.pending || '0',
byTier: {
tier1: {
unlocked: data.bonusContribution?.byTier?.tier1?.unlocked || '0',
pending: data.bonusContribution?.byTier?.tier1?.pending || '0',
},
tier2: {
unlocked: data.bonusContribution?.byTier?.tier2?.unlocked || '0',
pending: data.bonusContribution?.byTier?.tier2?.pending || '0',
},
tier3: {
unlocked: data.bonusContribution?.byTier?.tier3?.unlocked || '0',
pending: data.bonusContribution?.byTier?.tier3?.pending || '0',
},
},
},
};
} catch (error) {
this.logger.warn(`Failed to fetch contribution service stats: ${error.message}`);
return null;
}
}
/**
* 退
*/
private async getDetailedContributionStatsFromLocal() {
// 获取总树数
const adoptionStats = await this.prisma.syncedAdoption.aggregate({
where: { status: 'MINING_ENABLED' },
_sum: { treeCount: true },
});
const totalTrees = adoptionStats._sum.treeCount || 0;
// 按层级统计已分配的层级算力
const levelRecords = await this.prisma.syncedContributionRecord.groupBy({
by: ['levelDepth'],
where: {
sourceType: 'TEAM_LEVEL',
levelDepth: { not: null },
},
_sum: { amount: true },
});
// 按档位统计已分配的团队奖励算力
const bonusRecords = await this.prisma.syncedContributionRecord.groupBy({
by: ['bonusTier'],
where: {
sourceType: 'TEAM_BONUS',
bonusTier: { not: null },
},
_sum: { amount: true },
});
// 获取系统账户按类型的算力
const systemAccounts = await this.prisma.syncedSystemContribution.findMany();
// 汇总层级1-5, 6-10, 11-15
let levelTier1 = new Decimal(0);
let levelTier2 = new Decimal(0);
let levelTier3 = new Decimal(0);
for (const record of levelRecords) {
const depth = record.levelDepth!;
const amount = new Decimal(record._sum.amount || 0);
if (depth >= 1 && depth <= 5) levelTier1 = levelTier1.plus(amount);
else if (depth >= 6 && depth <= 10) levelTier2 = levelTier2.plus(amount);
else if (depth >= 11 && depth <= 15) levelTier3 = levelTier3.plus(amount);
}
// 汇总团队奖励档位
let bonusTier1 = new Decimal(0);
let bonusTier2 = new Decimal(0);
let bonusTier3 = new Decimal(0);
for (const record of bonusRecords) {
const tier = record.bonusTier!;
const amount = new Decimal(record._sum.amount || 0);
if (tier === 1) bonusTier1 = amount;
else if (tier === 2) bonusTier2 = amount;
else if (tier === 3) bonusTier3 = amount;
}
const levelUnlocked = levelTier1.plus(levelTier2).plus(levelTier3);
const bonusUnlocked = bonusTier1.plus(bonusTier2).plus(bonusTier3);
// 计算理论值
const networkTotal = BASE_CONTRIBUTION_PER_TREE.mul(totalTrees);
const personalTheory = networkTotal.mul(RATE_PERSONAL);
const operationTheory = networkTotal.mul(RATE_OPERATION);
const provinceTheory = networkTotal.mul(RATE_PROVINCE);
const cityTheory = networkTotal.mul(RATE_CITY);
const levelTheory = networkTotal.mul(RATE_LEVEL_TOTAL);
const bonusTheory = networkTotal.mul(RATE_BONUS_TOTAL);
// 计算未解锁(理论 - 已解锁)- 仅用于总数,各档位无法获取
const levelPending = levelTheory.minus(levelUnlocked).greaterThan(0)
? levelTheory.minus(levelUnlocked)
: new Decimal(0);
const bonusPending = bonusTheory.minus(bonusUnlocked).greaterThan(0)
? bonusTheory.minus(bonusUnlocked)
: new Decimal(0);
// 系统账户按类型汇总
let operationActual = new Decimal(0);
let provinceActual = new Decimal(0);
let cityActual = new Decimal(0);
for (const account of systemAccounts) {
const balance = new Decimal(account.contributionBalance || 0);
if (account.accountType === 'OPERATION') operationActual = operationActual.plus(balance);
else if (account.accountType === 'PROVINCE') provinceActual = provinceActual.plus(balance);
else if (account.accountType === 'CITY') cityActual = cityActual.plus(balance);
}
return {
totalTrees,
// 理论值(基于总树数计算)
networkTotalTheory: networkTotal.toString(),
personalTheory: personalTheory.toString(),
operationTheory: operationTheory.toString(),
provinceTheory: provinceTheory.toString(),
cityTheory: cityTheory.toString(),
levelTheory: levelTheory.toString(),
bonusTheory: bonusTheory.toString(),
// 实际值(从数据库统计)
operationActual: operationActual.toString(),
provinceActual: provinceActual.toString(),
cityActual: cityActual.toString(),
// 层级算力详情(本地无法获取各档位 pending显示为 N/A
levelContribution: {
total: levelTheory.toString(),
unlocked: levelUnlocked.toString(),
pending: levelPending.toString(),
byTier: {
tier1: { unlocked: levelTier1.toString(), pending: 'N/A' },
tier2: { unlocked: levelTier2.toString(), pending: 'N/A' },
tier3: { unlocked: levelTier3.toString(), pending: 'N/A' },
},
},
// 团队奖励算力详情(本地无法获取各档位 pending显示为 N/A
bonusContribution: {
total: bonusTheory.toString(),
unlocked: bonusUnlocked.toString(),
pending: bonusPending.toString(),
byTier: {
tier1: { unlocked: bonusTier1.toString(), pending: 'N/A' },
tier2: { unlocked: bonusTier2.toString(), pending: 'N/A' },
tier3: { unlocked: bonusTier3.toString(), pending: 'N/A' },
},
},
}; };
} }
@ -609,79 +295,6 @@ export class DashboardService {
}; };
} }
// ===========================================================================
// 远程服务数据获取(实时数据备选方案)
// ===========================================================================
/**
* mining-service trading-service
* CDC
*/
private async fetchRemoteServiceData(): Promise<RemoteServiceData> {
// 检查缓存
if (
this.remoteDataCache &&
Date.now() - this.remoteDataCache.fetchedAt.getTime() < this.CACHE_TTL_MS
) {
return this.remoteDataCache;
}
const miningServiceUrl = this.configService.get<string>(
'MINING_SERVICE_URL',
'http://localhost:3021',
);
const tradingServiceUrl = this.configService.get<string>(
'TRADING_SERVICE_URL',
'http://localhost:3022',
);
let totalDistributed = '0';
let totalBurned = '0';
let circulationPool = '0';
try {
// 从 mining-service 获取已分配积分股
const miningResponse = await fetch(
`${miningServiceUrl}/api/v2/mining/admin/status`,
);
if (miningResponse.ok) {
const miningResult = await miningResponse.json();
const miningData = miningResult.data || miningResult;
// 直接使用 totalDistributed所有用户 totalMined 的总和)
totalDistributed = miningData.totalDistributed || '0';
}
} catch (error) {
this.logger.warn(`Failed to fetch mining service data: ${error.message}`);
}
try {
// 从 trading-service 获取市场概览(包含销毁和流通池数据)
const marketResponse = await fetch(
`${tradingServiceUrl}/api/v2/asset/market`,
);
if (marketResponse.ok) {
const marketResult = await marketResponse.json();
const marketData = marketResult.data || marketResult;
// blackHoleAmount 是已销毁总量
totalBurned = marketData.blackHoleAmount || '0';
// circulationPool 是流通池余额
circulationPool = marketData.circulationPool || '0';
}
} catch (error) {
this.logger.warn(`Failed to fetch market overview: ${error.message}`);
}
// 更新缓存
this.remoteDataCache = {
totalDistributed,
totalBurned,
circulationPool,
fetchedAt: new Date(),
};
return this.remoteDataCache;
}
// =========================================================================== // ===========================================================================
// 辅助方法 // 辅助方法
// =========================================================================== // ===========================================================================

View File

@ -0,0 +1,304 @@
import { Injectable, Logger } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import { PrismaService } from '../../infrastructure/persistence/prisma/prisma.service';
@Injectable()
export class InitializationService {
private readonly logger = new Logger(InitializationService.name);
constructor(
private readonly prisma: PrismaService,
private readonly configService: ConfigService,
) {}
async initializeMiningConfig(
adminId: string,
config: {
totalShares: string;
distributionPool: string;
halvingPeriodYears: number;
burnTarget: string;
},
): Promise<{ success: boolean; message: string }> {
const record = await this.prisma.initializationRecord.create({
data: { type: 'MINING_CONFIG', status: 'PENDING', config, executedBy: adminId },
});
try {
const miningServiceUrl = this.configService.get<string>('MINING_SERVICE_URL', 'http://localhost:3021');
const response = await fetch(`${miningServiceUrl}/api/v1/admin/initialize`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(config),
});
if (!response.ok) {
throw new Error('Failed to initialize mining config');
}
await this.prisma.initializationRecord.update({
where: { id: record.id },
data: { status: 'COMPLETED', executedAt: new Date() },
});
await this.prisma.auditLog.create({
data: { adminId, action: 'INIT', resource: 'MINING', resourceId: record.id, newValue: config },
});
return { success: true, message: 'Mining config initialized successfully' };
} catch (error: any) {
await this.prisma.initializationRecord.update({
where: { id: record.id },
data: { status: 'FAILED', errorMessage: error.message },
});
return { success: false, message: error.message };
}
}
async initializeSystemAccounts(adminId: string): Promise<{ success: boolean; message: string }> {
const accounts = [
{ accountType: 'OPERATION', name: '运营账户', description: '12% 运营收入' },
{ accountType: 'PROVINCE', name: '省公司账户', description: '1% 省公司收入' },
{ accountType: 'CITY', name: '市公司账户', description: '2% 市公司收入' },
];
for (const account of accounts) {
await this.prisma.systemAccount.upsert({
where: { accountType: account.accountType },
create: account,
update: { name: account.name, description: account.description },
});
}
await this.prisma.auditLog.create({
data: { adminId, action: 'INIT', resource: 'SYSTEM_ACCOUNT', newValue: accounts },
});
return { success: true, message: 'System accounts initialized successfully' };
}
async activateMining(adminId: string): Promise<{ success: boolean; message: string }> {
try {
const miningServiceUrl = this.configService.get<string>('MINING_SERVICE_URL', 'http://localhost:3021');
const response = await fetch(`${miningServiceUrl}/api/v1/admin/activate`, { method: 'POST' });
if (!response.ok) {
throw new Error('Failed to activate mining');
}
await this.prisma.auditLog.create({
data: { adminId, action: 'INIT', resource: 'MINING', newValue: { action: 'ACTIVATE' } },
});
return { success: true, message: 'Mining activated successfully' };
} catch (error: any) {
return { success: false, message: error.message };
}
}
async syncAllUsers(adminId: string): Promise<{ success: boolean; message: string; syncedCount?: number }> {
try {
const authServiceUrl = this.configService.get<string>('AUTH_SERVICE_URL', 'http://localhost:3024');
const response = await fetch(`${authServiceUrl}/api/v2/admin/users/sync`);
if (!response.ok) {
throw new Error(`Failed to fetch users: ${response.statusText}`);
}
const responseData = await response.json();
const users = responseData.data?.users || responseData.users || [];
let syncedCount = 0;
for (const user of users) {
try {
await this.prisma.syncedUser.upsert({
where: { accountSequence: user.accountSequence },
create: {
originalUserId: user.id || user.accountSequence,
accountSequence: user.accountSequence,
phone: user.phone,
status: user.status || 'ACTIVE',
kycStatus: user.kycStatus || 'PENDING',
realName: user.realName || null,
isLegacyUser: user.isLegacyUser || false,
createdAt: new Date(user.createdAt),
},
update: {
phone: user.phone,
status: user.status || 'ACTIVE',
kycStatus: user.kycStatus || 'PENDING',
realName: user.realName || null,
},
});
syncedCount++;
} catch (err) {
this.logger.warn(`Failed to sync user ${user.accountSequence}: ${err}`);
}
}
await this.prisma.auditLog.create({
data: { adminId, action: 'SYNC', resource: 'USER', newValue: { syncedCount } },
});
return { success: true, message: `Synced ${syncedCount} users`, syncedCount };
} catch (error: any) {
return { success: false, message: error.message };
}
}
async syncAllContributionAccounts(adminId: string): Promise<{ success: boolean; message: string; syncedCount?: number }> {
try {
const contributionServiceUrl = this.configService.get<string>('CONTRIBUTION_SERVICE_URL', 'http://localhost:3020');
const response = await fetch(`${contributionServiceUrl}/api/v2/admin/accounts/sync`);
if (!response.ok) {
throw new Error(`Failed to fetch accounts: ${response.statusText}`);
}
const responseData = await response.json();
const accounts = responseData.data?.accounts || responseData.accounts || [];
let syncedCount = 0;
for (const account of accounts) {
try {
await this.prisma.syncedContributionAccount.upsert({
where: { accountSequence: account.accountSequence },
create: {
accountSequence: account.accountSequence,
personalContribution: account.personalContribution || 0,
teamLevelContribution: account.teamLevelContribution || 0,
teamBonusContribution: account.teamBonusContribution || 0,
totalContribution: account.totalContribution || 0,
effectiveContribution: account.effectiveContribution || 0,
hasAdopted: account.hasAdopted || false,
directReferralCount: account.directReferralAdoptedCount || 0,
unlockedLevelDepth: account.unlockedLevelDepth || 0,
unlockedBonusTiers: account.unlockedBonusTiers || 0,
},
update: {
personalContribution: account.personalContribution,
teamLevelContribution: account.teamLevelContribution,
teamBonusContribution: account.teamBonusContribution,
totalContribution: account.totalContribution,
effectiveContribution: account.effectiveContribution,
hasAdopted: account.hasAdopted,
directReferralCount: account.directReferralAdoptedCount,
unlockedLevelDepth: account.unlockedLevelDepth,
unlockedBonusTiers: account.unlockedBonusTiers,
},
});
syncedCount++;
} catch (err) {
this.logger.warn(`Failed to sync account ${account.accountSequence}: ${err}`);
}
}
await this.prisma.auditLog.create({
data: { adminId, action: 'SYNC', resource: 'CONTRIBUTION_ACCOUNT', newValue: { syncedCount } },
});
return { success: true, message: `Synced ${syncedCount} accounts`, syncedCount };
} catch (error: any) {
return { success: false, message: error.message };
}
}
async syncAllMiningAccounts(adminId: string): Promise<{ success: boolean; message: string; syncedCount?: number }> {
try {
const miningServiceUrl = this.configService.get<string>('MINING_SERVICE_URL', 'http://localhost:3021');
const response = await fetch(`${miningServiceUrl}/api/v1/admin/accounts/sync`);
if (!response.ok) {
throw new Error(`Failed to fetch accounts: ${response.statusText}`);
}
const responseData = await response.json();
const accounts = responseData.data?.accounts || responseData.accounts || [];
let syncedCount = 0;
for (const account of accounts) {
try {
await this.prisma.syncedMiningAccount.upsert({
where: { accountSequence: account.accountSequence },
create: {
accountSequence: account.accountSequence,
totalMined: account.totalMined || 0,
availableBalance: account.availableBalance || 0,
frozenBalance: account.frozenBalance || 0,
totalContribution: account.totalContribution || 0,
},
update: {
totalMined: account.totalMined,
availableBalance: account.availableBalance,
frozenBalance: account.frozenBalance,
totalContribution: account.totalContribution,
},
});
syncedCount++;
} catch (err) {
this.logger.warn(`Failed to sync mining account ${account.accountSequence}: ${err}`);
}
}
await this.prisma.auditLog.create({
data: { adminId, action: 'SYNC', resource: 'MINING_ACCOUNT', newValue: { syncedCount } },
});
return { success: true, message: `Synced ${syncedCount} mining accounts`, syncedCount };
} catch (error: any) {
return { success: false, message: error.message };
}
}
async syncAllTradingAccounts(adminId: string): Promise<{ success: boolean; message: string; syncedCount?: number }> {
try {
const tradingServiceUrl = this.configService.get<string>('TRADING_SERVICE_URL', 'http://localhost:3022');
const response = await fetch(`${tradingServiceUrl}/api/v1/admin/accounts/sync`);
if (!response.ok) {
throw new Error(`Failed to fetch accounts: ${response.statusText}`);
}
const responseData = await response.json();
const accounts = responseData.data?.accounts || responseData.accounts || [];
let syncedCount = 0;
for (const account of accounts) {
try {
await this.prisma.syncedTradingAccount.upsert({
where: { accountSequence: account.accountSequence },
create: {
accountSequence: account.accountSequence,
shareBalance: account.shareBalance || 0,
cashBalance: account.cashBalance || 0,
frozenShares: account.frozenShares || 0,
frozenCash: account.frozenCash || 0,
totalBought: account.totalBought || 0,
totalSold: account.totalSold || 0,
},
update: {
shareBalance: account.shareBalance,
cashBalance: account.cashBalance,
frozenShares: account.frozenShares,
frozenCash: account.frozenCash,
totalBought: account.totalBought,
totalSold: account.totalSold,
},
});
syncedCount++;
} catch (err) {
this.logger.warn(`Failed to sync trading account ${account.accountSequence}: ${err}`);
}
}
await this.prisma.auditLog.create({
data: { adminId, action: 'SYNC', resource: 'TRADING_ACCOUNT', newValue: { syncedCount } },
});
return { success: true, message: `Synced ${syncedCount} trading accounts`, syncedCount };
} catch (error: any) {
return { success: false, message: error.message };
}
}
}

View File

@ -1,205 +0,0 @@
import { Injectable, Logger, HttpException, HttpStatus } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import { PrismaService } from '../../infrastructure/persistence/prisma/prisma.service';
export interface ManualMiningCalculateRequest {
accountSequence: string;
adoptionDate: string;
}
export interface ManualMiningExecuteRequest {
accountSequence: string;
adoptionDate: string;
operatorId: string;
operatorName: string;
reason: string;
}
/**
* -
* mining-service API
*/
@Injectable()
export class ManualMiningService {
private readonly logger = new Logger(ManualMiningService.name);
private readonly miningServiceUrl: string;
constructor(
private readonly prisma: PrismaService,
private readonly configService: ConfigService,
) {
this.miningServiceUrl = this.configService.get<string>(
'MINING_SERVICE_URL',
'http://localhost:3021',
);
}
/**
*
*/
async calculate(request: ManualMiningCalculateRequest): Promise<any> {
try {
const response = await fetch(
`${this.miningServiceUrl}/admin/manual-mining/calculate`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(request),
},
);
const result = await response.json();
if (!response.ok) {
throw new HttpException(
result.message || '计算失败',
response.status,
);
}
return result;
} catch (error) {
if (error instanceof HttpException) {
throw error;
}
this.logger.error('Failed to calculate manual mining', error);
throw new HttpException(
`调用 mining-service 失败: ${error instanceof Error ? error.message : error}`,
HttpStatus.INTERNAL_SERVER_ERROR,
);
}
}
/**
*
*/
async execute(
request: ManualMiningExecuteRequest,
adminId: string,
): Promise<any> {
try {
const response = await fetch(
`${this.miningServiceUrl}/admin/manual-mining/execute`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(request),
},
);
const result = await response.json();
if (!response.ok) {
throw new HttpException(
result.message || '执行失败',
response.status,
);
}
// 记录审计日志
await this.prisma.auditLog.create({
data: {
adminId,
action: 'CREATE',
resource: 'MANUAL_MINING',
resourceId: result.recordId,
newValue: {
accountSequence: request.accountSequence,
adoptionDate: request.adoptionDate,
amount: result.amount,
reason: request.reason,
},
},
});
this.logger.log(
`Manual mining executed by admin ${adminId}: account=${request.accountSequence}, amount=${result.amount}`,
);
return result;
} catch (error) {
if (error instanceof HttpException) {
throw error;
}
this.logger.error('Failed to execute manual mining', error);
throw new HttpException(
`调用 mining-service 失败: ${error instanceof Error ? error.message : error}`,
HttpStatus.INTERNAL_SERVER_ERROR,
);
}
}
/**
*
*/
async getRecords(page: number = 1, pageSize: number = 20): Promise<any> {
try {
const response = await fetch(
`${this.miningServiceUrl}/admin/manual-mining/records?page=${page}&pageSize=${pageSize}`,
{
method: 'GET',
headers: { 'Content-Type': 'application/json' },
},
);
const result = await response.json();
if (!response.ok) {
throw new HttpException(
result.message || '获取记录失败',
response.status,
);
}
return result;
} catch (error) {
if (error instanceof HttpException) {
throw error;
}
this.logger.error('Failed to get manual mining records', error);
throw new HttpException(
`调用 mining-service 失败: ${error instanceof Error ? error.message : error}`,
HttpStatus.INTERNAL_SERVER_ERROR,
);
}
}
/**
* accountSequence
*/
async getRecordByAccountSequence(accountSequence: string): Promise<any> {
try {
const response = await fetch(
`${this.miningServiceUrl}/admin/manual-mining/records/${accountSequence}`,
{
method: 'GET',
headers: { 'Content-Type': 'application/json' },
},
);
if (response.status === 404) {
return null;
}
const result = await response.json();
if (!response.ok) {
throw new HttpException(
result.message || '获取记录失败',
response.status,
);
}
return result;
} catch (error) {
if (error instanceof HttpException) {
throw error;
}
this.logger.error('Failed to get manual mining record', error);
throw new HttpException(
`调用 mining-service 失败: ${error instanceof Error ? error.message : error}`,
HttpStatus.INTERNAL_SERVER_ERROR,
);
}
}
}

View File

@ -1,138 +0,0 @@
import { Injectable, Logger } from '@nestjs/common';
import { HttpService } from '@nestjs/axios';
import { ConfigService } from '@nestjs/config';
import { firstValueFrom } from 'rxjs';
@Injectable()
export class PendingContributionsService {
private readonly logger = new Logger(PendingContributionsService.name);
constructor(
private readonly httpService: HttpService,
private readonly configService: ConfigService,
) {}
private getMiningServiceUrl(): string {
return this.configService.get<string>(
'MINING_SERVICE_URL',
'http://localhost:3021',
);
}
/**
*
*/
async getPendingContributions(
page: number = 1,
pageSize: number = 20,
contributionType?: string,
) {
const miningServiceUrl = this.getMiningServiceUrl();
try {
const params: any = { page, pageSize };
if (contributionType) {
params.contributionType = contributionType;
}
const response = await firstValueFrom(
this.httpService.get(`${miningServiceUrl}/admin/pending-contributions`, {
params,
}),
);
return response.data;
} catch (error) {
this.logger.warn(
`Failed to fetch pending contributions: ${error.message}`,
);
return { contributions: [], total: 0, page, pageSize };
}
}
/**
*
*/
async getPendingContributionsSummary() {
const miningServiceUrl = this.getMiningServiceUrl();
try {
const response = await firstValueFrom(
this.httpService.get(
`${miningServiceUrl}/admin/pending-contributions/summary`,
),
);
return response.data;
} catch (error) {
this.logger.warn(
`Failed to fetch pending contributions summary: ${error.message}`,
);
return {
byType: [],
total: { totalAmount: '0', count: 0 },
totalMinedToHeadquarters: '0',
};
}
}
/**
*
*/
async getPendingContributionMiningRecords(
id: string,
page: number = 1,
pageSize: number = 20,
) {
const miningServiceUrl = this.getMiningServiceUrl();
try {
const response = await firstValueFrom(
this.httpService.get(
`${miningServiceUrl}/admin/pending-contributions/${id}/records`,
{
params: { page, pageSize },
},
),
);
return response.data;
} catch (error) {
this.logger.warn(
`Failed to fetch pending contribution mining records: ${error.message}`,
);
return {
pendingContribution: null,
records: [],
total: 0,
page,
pageSize,
};
}
}
/**
*
*/
async getAllPendingMiningRecords(page: number = 1, pageSize: number = 20) {
const miningServiceUrl = this.getMiningServiceUrl();
try {
const response = await firstValueFrom(
this.httpService.get(
`${miningServiceUrl}/admin/pending-contributions/mining-records`,
{
params: { page, pageSize },
},
),
);
return response.data;
} catch (error) {
this.logger.warn(
`Failed to fetch all pending mining records: ${error.message}`,
);
return { records: [], total: 0, page, pageSize };
}
}
}

View File

@ -1,246 +1,84 @@
import { Injectable, Logger } from '@nestjs/common'; import { Injectable } from '@nestjs/common';
import { HttpService } from '@nestjs/axios';
import { ConfigService } from '@nestjs/config';
import { firstValueFrom } from 'rxjs';
import { PrismaService } from '../../infrastructure/persistence/prisma/prisma.service'; import { PrismaService } from '../../infrastructure/persistence/prisma/prisma.service';
interface MiningServiceSystemAccount {
id: string;
accountType: string; // OPERATION / PROVINCE / CITY / HEADQUARTERS
regionCode: string | null; // 省/市代码,如 440000, 440100
name: string;
totalMined: string;
availableBalance: string;
totalContribution: string;
lastSyncedAt: string | null;
}
interface MiningServiceResponse {
accounts: MiningServiceSystemAccount[];
total: number;
}
@Injectable() @Injectable()
export class SystemAccountsService { export class SystemAccountsService {
private readonly logger = new Logger(SystemAccountsService.name); constructor(private readonly prisma: PrismaService) {}
constructor(
private readonly prisma: PrismaService,
private readonly httpService: HttpService,
private readonly configService: ConfigService,
) {}
/**
* mining-service
*/
private async fetchMiningServiceSystemAccounts(): Promise<Map<string, MiningServiceSystemAccount>> {
const miningServiceUrl = this.configService.get<string>(
'MINING_SERVICE_URL',
'http://localhost:3021',
);
try {
const response = await firstValueFrom(
this.httpService.get<MiningServiceResponse>(
`${miningServiceUrl}/admin/system-accounts`,
),
);
const miningDataMap = new Map<string, MiningServiceSystemAccount>();
for (const account of response.data.accounts) {
// 使用 accountType:regionCode 作为 key与 contribution 表一致
const key = account.regionCode
? `${account.accountType}:${account.regionCode}`
: account.accountType;
miningDataMap.set(key, account);
}
return miningDataMap;
} catch (error) {
this.logger.warn(
`Failed to fetch mining service system accounts: ${error.message}`,
);
return new Map();
}
}
/** /**
* *
* synced_system_contributions
*
*/ */
async getSystemAccounts() { async getSystemAccounts() {
// 从 CDC 同步的 SyncedSystemContribution 获取算力数据(主要数据源) // 先从本地 SystemAccount 表获取
const syncedContributions = await this.prisma.syncedSystemContribution.findMany({ const localAccounts = await this.prisma.systemAccount.findMany({
orderBy: [{ accountType: 'asc' }, { regionCode: 'asc' }], orderBy: { accountType: 'asc' },
}); });
// 从 CDC 同步的 SyncedWalletSystemAccount 表获取钱包数据 // 再从 CDC 同步的 SyncedSystemContribution 获取算力数据
const syncedWalletAccounts = await this.prisma.syncedWalletSystemAccount.findMany(); const syncedContributions =
await this.prisma.syncedSystemContribution.findMany();
// 从 mining-service 获取挖矿数据 // 合并数据
const miningDataMap = await this.fetchMiningServiceSystemAccounts(); const accountsMap = new Map<string, any>();
// 获取省市名称映射 // 添加本地账户
const regionNameMap = await this.buildRegionNameMap(); for (const account of localAccounts) {
accountsMap.set(account.accountType, {
// 构建钱包数据映射 accountType: account.accountType,
const walletMap = new Map<string, any>(); name: account.name,
for (const wallet of syncedWalletAccounts) { description: account.description,
// 钱包账户的 code 格式为 "CITY-440100"、"PROVINCE-440000" 等 totalContribution: account.totalContribution.toString(),
if (wallet.code) { createdAt: account.createdAt,
const regionCode = this.extractRegionCodeFromCode(wallet.code); source: 'local',
if (regionCode) { });
const key = `${wallet.accountType}:${regionCode}`;
walletMap.set(key, wallet);
}
}
// 同时用 accountType 作为 key用于 OPERATION、HEADQUARTERS 等)
walletMap.set(wallet.accountType, wallet);
} }
// 构建返回数据 - 以算力账户为主 // 更新或添加同步的算力数据
const accounts = syncedContributions.map((contrib) => { for (const contrib of syncedContributions) {
const key = contrib.regionCode const existing = accountsMap.get(contrib.accountType);
? `${contrib.accountType}:${contrib.regionCode}` if (existing) {
: contrib.accountType; existing.contributionBalance = contrib.contributionBalance.toString();
existing.contributionNeverExpires = contrib.contributionNeverExpires;
const wallet = walletMap.get(key) || walletMap.get(contrib.accountType); existing.syncedAt = contrib.syncedAt;
const miningData = miningDataMap.get(key) || miningDataMap.get(contrib.accountType); existing.source = 'synced';
} else {
// 获取显示名称 accountsMap.set(contrib.accountType, {
const displayName = this.getDisplayName(contrib.accountType, contrib.regionCode, regionNameMap); accountType: contrib.accountType,
name: contrib.name,
return { contributionBalance: contrib.contributionBalance.toString(),
id: contrib.id, contributionNeverExpires: contrib.contributionNeverExpires,
accountType: contrib.accountType, syncedAt: contrib.syncedAt,
regionCode: contrib.regionCode, source: 'synced',
name: displayName, });
code: wallet?.code || null, }
provinceId: wallet?.provinceId || null, }
cityId: wallet?.cityId || null,
// 钱包余额(如果有钱包账户)
shareBalance: wallet?.shareBalance?.toString() || '0',
usdtBalance: wallet?.usdtBalance?.toString() || '0',
greenPointBalance: wallet?.greenPointBalance?.toString() || '0',
frozenShare: wallet?.frozenShare?.toString() || '0',
frozenUsdt: wallet?.frozenUsdt?.toString() || '0',
totalInflow: wallet?.totalInflow?.toString() || '0',
totalOutflow: wallet?.totalOutflow?.toString() || '0',
blockchainAddress: wallet?.blockchainAddress || null,
isActive: wallet?.isActive ?? true,
// 算力数据
contributionBalance: contrib.contributionBalance?.toString() || '0',
contributionNeverExpires: contrib.contributionNeverExpires || false,
// 挖矿数据
totalMined: miningData?.totalMined || '0',
availableBalance: miningData?.availableBalance || '0',
miningContribution: miningData?.totalContribution || '0',
miningLastSyncedAt: miningData?.lastSyncedAt || null,
syncedAt: contrib.syncedAt,
source: 'contribution',
};
});
return { return {
accounts, accounts: Array.from(accountsMap.values()),
total: accounts.length, total: accountsMap.size,
}; };
} }
/**
*
*/
private async buildRegionNameMap(): Promise<Map<string, string>> {
const [provinces, cities] = await Promise.all([
this.prisma.syncedProvince.findMany({ select: { code: true, name: true } }),
this.prisma.syncedCity.findMany({ select: { code: true, name: true } }),
]);
const map = new Map<string, string>();
for (const province of provinces) {
map.set(province.code, province.name);
}
for (const city of cities) {
map.set(city.code, city.name);
}
return map;
}
/**
*
*/
private getDisplayName(
accountType: string,
regionCode: string | null,
regionNameMap: Map<string, string>,
): string {
// 基础账户类型名称
const baseNames: Record<string, string> = {
OPERATION: '运营账户',
HEADQUARTERS: '总部账户',
PROVINCE: '省公司账户',
CITY: '市公司账户',
};
if (!regionCode) {
return baseNames[accountType] || accountType;
}
// 根据区域代码查找名称
const regionName = regionNameMap.get(regionCode);
if (regionName) {
if (accountType === 'PROVINCE') {
return `${regionName}省公司`;
} else if (accountType === 'CITY') {
return `${regionName}市公司`;
}
}
// 回退:使用区域代码
return `${regionCode}账户`;
}
/**
* code
* : "CITY-440100" -> "440100", "PROVINCE-440000" -> "440000"
* "HEADQUARTERS" null
*/
private extractRegionCodeFromCode(code: string): string | null {
if (!code) return null;
// 匹配 CITY-XXXXXX, PROVINCE-XXXXXX, PROV-XXXXXX 格式
const match = code.match(/^(?:CITY|PROVINCE|PROV)-(\d+)$/);
return match ? match[1] : null;
}
/** /**
* *
*/ */
async getSystemAccountsSummary() { async getSystemAccountsSummary() {
const [ const [localAccounts, syncedContributions, miningConfig, circulationPool] =
syncedSystemAccounts, await Promise.all([
syncedPoolAccounts, this.prisma.systemAccount.findMany(),
syncedContributions, this.prisma.syncedSystemContribution.findMany(),
miningConfig, this.prisma.syncedMiningConfig.findFirst(),
circulationPool, this.prisma.syncedCirculationPool.findFirst(),
] = await Promise.all([ ]);
this.prisma.syncedWalletSystemAccount.findMany(),
this.prisma.syncedWalletPoolAccount.findMany(),
this.prisma.syncedSystemContribution.findMany(),
this.prisma.syncedMiningConfig.findFirst(),
this.prisma.syncedCirculationPool.findFirst(),
]);
// 从 mining-service 获取挖矿数据汇总
const miningDataMap = await this.fetchMiningServiceSystemAccounts();
// 计算总挖矿积分股
let totalMined = 0;
for (const miningData of miningDataMap.values()) {
totalMined += Number(miningData.totalMined || 0);
}
// 计算总算力 // 计算总算力
let totalSystemContribution = 0n;
for (const account of localAccounts) {
totalSystemContribution += BigInt(
account.totalContribution.toString().replace('.', ''),
);
}
let totalSyncedContribution = 0n; let totalSyncedContribution = 0n;
for (const contrib of syncedContributions) { for (const contrib of syncedContributions) {
totalSyncedContribution += BigInt( totalSyncedContribution += BigInt(
@ -250,22 +88,10 @@ export class SystemAccountsService {
return { return {
systemAccounts: { systemAccounts: {
count: syncedSystemAccounts.length, count: localAccounts.length,
totalBalance: syncedSystemAccounts.reduce( totalContribution: (
(sum, acc) => sum + Number(acc.shareBalance), Number(totalSystemContribution) / 100000000
0,
).toFixed(8), ).toFixed(8),
totalMined: totalMined.toFixed(8),
},
poolAccounts: {
count: syncedPoolAccounts.length,
pools: syncedPoolAccounts.map((pool) => ({
poolType: pool.poolType,
name: pool.name,
balance: pool.balance.toString(),
targetBurn: pool.targetBurn?.toString(),
remainingBurn: pool.remainingBurn?.toString(),
})),
}, },
syncedContributions: { syncedContributions: {
count: syncedContributions.length, count: syncedContributions.length,
@ -289,243 +115,4 @@ export class SystemAccountsService {
: null, : null,
}; };
} }
/**
*
* @param accountType OPERATION/PROVINCE/CITY/HEADQUARTERS
* @param regionCode / 440000, 440100
* @param page
* @param pageSize
*/
async getSystemAccountMiningRecords(
accountType: string,
regionCode: string | null,
page: number = 1,
pageSize: number = 20,
) {
const miningServiceUrl = this.configService.get<string>(
'MINING_SERVICE_URL',
'http://localhost:3021',
);
try {
const params: Record<string, any> = { page, pageSize };
if (regionCode) {
params.regionCode = regionCode;
}
const response = await firstValueFrom(
this.httpService.get(
`${miningServiceUrl}/admin/system-accounts/${accountType}/records`,
{ params },
),
);
return response.data;
} catch (error) {
this.logger.warn(
`Failed to fetch system account mining records: ${error.message}`,
);
return { records: [], total: 0, page, pageSize, accountType, regionCode };
}
}
/**
*
* @param accountType OPERATION/PROVINCE/CITY/HEADQUARTERS
* @param regionCode / 440000, 440100
* @param page
* @param pageSize
*/
async getSystemAccountTransactions(
accountType: string,
regionCode: string | null,
page: number = 1,
pageSize: number = 20,
) {
const miningServiceUrl = this.configService.get<string>(
'MINING_SERVICE_URL',
'http://localhost:3021',
);
try {
const params: Record<string, any> = { page, pageSize };
if (regionCode) {
params.regionCode = regionCode;
}
const response = await firstValueFrom(
this.httpService.get(
`${miningServiceUrl}/admin/system-accounts/${accountType}/transactions`,
{ params },
),
);
return response.data;
} catch (error) {
this.logger.warn(
`Failed to fetch system account transactions: ${error.message}`,
);
return { transactions: [], total: 0, page, pageSize, accountType, regionCode };
}
}
/**
*
*
*
* @param accountType OPERATION/PROVINCE/CITY/HEADQUARTERS
* @param regionCode / 440000, 440100
* @param page
* @param pageSize
*/
async getSystemAccountContributionRecords(
accountType: string,
regionCode: string | null,
page: number = 1,
pageSize: number = 20,
) {
// Prisma 查询 null 值需要用 { equals: null }
const whereClause = regionCode
? { accountType, regionCode }
: { accountType, regionCode: { equals: null } };
const [records, total] = await Promise.all([
this.prisma.syncedSystemContributionRecord.findMany({
where: whereClause,
skip: (page - 1) * pageSize,
take: pageSize,
orderBy: { createdAt: 'desc' },
}),
this.prisma.syncedSystemContributionRecord.count({
where: whereClause,
}),
]);
// 获取关联的认种订单和用户信息
const adoptionIds = [...new Set(records.map(r => r.sourceAdoptionId))];
const accountSequences = [...new Set(records.map(r => r.sourceAccountSequence))];
const [adoptions, users] = await Promise.all([
this.prisma.syncedAdoption.findMany({
where: { originalAdoptionId: { in: adoptionIds } },
select: {
originalAdoptionId: true,
accountSequence: true,
treeCount: true,
adoptionDate: true,
status: true,
contributionPerTree: true,
},
}),
this.prisma.syncedUser.findMany({
where: { accountSequence: { in: accountSequences } },
select: {
accountSequence: true,
phone: true,
realName: true,
nickname: true,
},
}),
]);
// 构建映射
const adoptionMap = new Map(adoptions.map(a => [a.originalAdoptionId.toString(), a]));
const userMap = new Map(users.map(u => [u.accountSequence, u]));
return {
records: records.map((record) => {
const adoption = adoptionMap.get(record.sourceAdoptionId.toString());
const user = userMap.get(record.sourceAccountSequence);
return {
originalRecordId: record.originalRecordId.toString(),
accountType: record.accountType,
regionCode: record.regionCode,
sourceAdoptionId: record.sourceAdoptionId.toString(),
sourceAccountSequence: record.sourceAccountSequence,
// 来源类型
sourceType: record.sourceType,
levelDepth: record.levelDepth,
// 认种订单详情
adoptionTreeCount: adoption?.treeCount || 0,
adoptionDate: adoption?.adoptionDate || null,
adoptionStatus: adoption?.status || null,
contributionPerTree: adoption?.contributionPerTree?.toString() || '0',
// 用户信息
sourceUserPhone: user?.phone ? this.maskPhone(user.phone) : null,
sourceUserName: user?.realName || user?.nickname || null,
// 分配信息
distributionRate: record.distributionRate.toString(),
amount: record.amount.toString(),
effectiveDate: record.effectiveDate,
expireDate: record.expireDate,
isExpired: record.isExpired,
createdAt: record.createdAt,
syncedAt: record.syncedAt,
};
}),
total,
page,
pageSize,
totalPages: Math.ceil(total / pageSize),
};
}
/**
*
*/
private maskPhone(phone: string): string {
if (!phone || phone.length < 7) return phone;
return phone.substring(0, 3) + '****' + phone.substring(phone.length - 4);
}
/**
*
*
*/
async getSystemAccountContributionStats(accountType: string, regionCode: string | null) {
// 获取算力账户信息
// 使用 findFirst 替代 findUnique因为 regionCode 可以为 null
const contribution = await this.prisma.syncedSystemContribution.findFirst({
where: {
accountType,
regionCode: regionCode === null ? { equals: null } : regionCode,
},
});
const whereClause = regionCode
? { accountType, regionCode }
: { accountType, regionCode: { equals: null } };
// 获取明细记录统计
const recordStats = await this.prisma.syncedSystemContributionRecord.aggregate({
where: whereClause,
_count: true,
_sum: { amount: true },
});
// 获取来源认种订单数量(去重)
const uniqueAdoptions = await this.prisma.syncedSystemContributionRecord.groupBy({
by: ['sourceAdoptionId'],
where: whereClause,
});
// 获取来源用户数量(去重)
const uniqueUsers = await this.prisma.syncedSystemContributionRecord.groupBy({
by: ['sourceAccountSequence'],
where: whereClause,
});
return {
accountType,
regionCode,
name: contribution?.name || accountType,
totalContribution: contribution?.contributionBalance?.toString() || '0',
recordCount: recordStats._count,
sumFromRecords: recordStats._sum?.amount?.toString() || '0',
uniqueAdoptionCount: uniqueAdoptions.length,
uniqueUserCount: uniqueUsers.length,
};
}
} }

View File

@ -1,5 +1,4 @@
import { Injectable, NotFoundException, Logger } from '@nestjs/common'; import { Injectable, NotFoundException } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import { PrismaService } from '../../infrastructure/persistence/prisma/prisma.service'; import { PrismaService } from '../../infrastructure/persistence/prisma/prisma.service';
import { Prisma } from '@prisma/client'; import { Prisma } from '@prisma/client';
@ -21,15 +20,7 @@ export interface GetOrdersQuery {
@Injectable() @Injectable()
export class UsersService { export class UsersService {
private readonly logger = new Logger(UsersService.name); constructor(private readonly prisma: PrismaService) {}
private readonly miningServiceUrl: string;
constructor(
private readonly prisma: PrismaService,
private readonly configService: ConfigService,
) {
this.miningServiceUrl = this.configService.get<string>('MINING_SERVICE_URL', 'http://localhost:3021');
}
/** /**
* *
@ -112,38 +103,32 @@ export class UsersService {
*/ */
private async getAdoptionStatsForUsers( private async getAdoptionStatsForUsers(
accountSequences: string[], accountSequences: string[],
): Promise<Map<string, { personalCount: number; personalOrders: number; teamCount: number; teamOrders: number }>> { ): Promise<Map<string, { personalCount: number; teamCount: number }>> {
const result = new Map< const result = new Map<
string, string,
{ personalCount: number; personalOrders: number; teamCount: number; teamOrders: number } { personalCount: number; teamCount: number }
>(); >();
if (accountSequences.length === 0) return result; if (accountSequences.length === 0) return result;
// 获取每个用户的个人认种数量和订单数(只统计 MINING_ENABLED 状态) // 获取每个用户的个人认种数量
const personalAdoptions = await this.prisma.syncedAdoption.groupBy({ const personalAdoptions = await this.prisma.syncedAdoption.groupBy({
by: ['accountSequence'], by: ['accountSequence'],
where: { where: { accountSequence: { in: accountSequences } },
accountSequence: { in: accountSequences },
status: 'MINING_ENABLED',
},
_sum: { treeCount: true }, _sum: { treeCount: true },
_count: { id: true },
}); });
for (const stat of personalAdoptions) { for (const stat of personalAdoptions) {
result.set(stat.accountSequence, { result.set(stat.accountSequence, {
personalCount: stat._sum.treeCount || 0, personalCount: stat._sum.treeCount || 0,
personalOrders: stat._count.id || 0,
teamCount: 0, teamCount: 0,
teamOrders: 0,
}); });
} }
// 确保所有用户都有记录 // 确保所有用户都有记录
for (const seq of accountSequences) { for (const seq of accountSequences) {
if (!result.has(seq)) { if (!result.has(seq)) {
result.set(seq, { personalCount: 0, personalOrders: 0, teamCount: 0, teamOrders: 0 }); result.set(seq, { personalCount: 0, teamCount: 0 });
} }
} }
@ -168,15 +153,12 @@ export class UsersService {
const teamAdoptionStats = await this.prisma.syncedAdoption.aggregate({ const teamAdoptionStats = await this.prisma.syncedAdoption.aggregate({
where: { where: {
accountSequence: { in: teamMembers.map((m) => m.accountSequence) }, accountSequence: { in: teamMembers.map((m) => m.accountSequence) },
status: 'MINING_ENABLED',
}, },
_sum: { treeCount: true }, _sum: { treeCount: true },
_count: { id: true },
}); });
const stats = result.get(ref.accountSequence); const stats = result.get(ref.accountSequence);
if (stats) { if (stats) {
stats.teamCount = teamAdoptionStats._sum.treeCount || 0; stats.teamCount = teamAdoptionStats._sum.treeCount || 0;
stats.teamOrders = teamAdoptionStats._count.id || 0;
} }
} }
} }
@ -230,9 +212,9 @@ export class UsersService {
throw new NotFoundException(`用户 ${accountSequence} 不存在`); throw new NotFoundException(`用户 ${accountSequence} 不存在`);
} }
// 获取个人认种数量(从 synced_adoptions 统计,只统计 MINING_ENABLED 状态 // 获取个人认种数量(从 synced_adoptions 统计
const personalAdoptionStats = await this.prisma.syncedAdoption.aggregate({ const personalAdoptionStats = await this.prisma.syncedAdoption.aggregate({
where: { accountSequence, status: 'MINING_ENABLED' }, where: { accountSequence },
_sum: { treeCount: true }, _sum: { treeCount: true },
_count: { id: true }, _count: { id: true },
}); });
@ -244,7 +226,7 @@ export class UsersService {
}); });
const directReferralCount = directReferrals.length; const directReferralCount = directReferrals.length;
// 获取直推认种数量(只统计 MINING_ENABLED 状态) // 获取直推认种数量
let directReferralAdoptions = 0; let directReferralAdoptions = 0;
if (directReferrals.length > 0) { if (directReferrals.length > 0) {
const directAdoptionStats = await this.prisma.syncedAdoption.aggregate({ const directAdoptionStats = await this.prisma.syncedAdoption.aggregate({
@ -252,7 +234,6 @@ export class UsersService {
accountSequence: { accountSequence: {
in: directReferrals.map((r) => r.accountSequence), in: directReferrals.map((r) => r.accountSequence),
}, },
status: 'MINING_ENABLED',
}, },
_sum: { treeCount: true }, _sum: { treeCount: true },
}); });
@ -286,7 +267,6 @@ export class UsersService {
accountSequence: { accountSequence: {
in: teamMembers.map((m) => m.accountSequence), in: teamMembers.map((m) => m.accountSequence),
}, },
status: 'MINING_ENABLED',
}, },
_sum: { treeCount: true }, _sum: { treeCount: true },
}); });
@ -432,7 +412,8 @@ export class UsersService {
} }
/** /**
* mining-service *
* mining-service
*/ */
async getUserMiningRecords( async getUserMiningRecords(
accountSequence: string, accountSequence: string,
@ -449,79 +430,33 @@ export class UsersService {
} }
const mining = user.miningAccount; const mining = user.miningAccount;
const emptySummary = { if (!mining) {
accountSequence,
totalMined: '0',
availableBalance: '0',
frozenBalance: '0',
totalContribution: '0',
};
// 从 mining-service 获取挖矿记录
try {
const url = `${this.miningServiceUrl}/api/v2/mining/accounts/${accountSequence}/records?page=${page}&pageSize=${pageSize}`;
this.logger.log(`Fetching mining records from ${url}`);
const response = await fetch(url);
if (!response.ok) {
this.logger.warn(`Failed to fetch mining records: ${response.status}`);
return {
summary: mining ? {
accountSequence,
totalMined: mining.totalMined.toString(),
availableBalance: mining.availableBalance.toString(),
frozenBalance: mining.frozenBalance.toString(),
totalContribution: mining.totalContribution.toString(),
} : emptySummary,
records: [],
pagination: { page, pageSize, total: 0, totalPages: 0 },
};
}
const result = await response.json();
const recordsData = result.data || result;
// 格式化记录以匹配前端期望的格式
const records = (recordsData.data || []).map((r: any) => ({
id: r.id,
accountSequence,
distributionMinute: r.miningMinute,
contributionRatio: r.contributionRatio,
shareAmount: r.minedAmount,
priceSnapshot: r.secondDistribution,
createdAt: r.createdAt,
}));
return { return {
summary: mining ? { summary: {
accountSequence, accountSequence,
totalMined: mining.totalMined.toString(), totalMined: '0',
availableBalance: mining.availableBalance.toString(), availableBalance: '0',
frozenBalance: mining.frozenBalance.toString(), frozenBalance: '0',
totalContribution: mining.totalContribution.toString(), totalContribution: '0',
} : emptySummary,
records,
pagination: {
page,
pageSize,
total: recordsData.total || 0,
totalPages: Math.ceil((recordsData.total || 0) / pageSize),
}, },
};
} catch (error) {
this.logger.error('Failed to fetch mining records from mining-service', error);
return {
summary: mining ? {
accountSequence,
totalMined: mining.totalMined.toString(),
availableBalance: mining.availableBalance.toString(),
frozenBalance: mining.frozenBalance.toString(),
totalContribution: mining.totalContribution.toString(),
} : emptySummary,
records: [], records: [],
pagination: { page, pageSize, total: 0, totalPages: 0 }, pagination: { page, pageSize, total: 0, totalPages: 0 },
}; };
} }
return {
summary: {
accountSequence,
totalMined: mining.totalMined.toString(),
availableBalance: mining.availableBalance.toString(),
frozenBalance: mining.frozenBalance.toString(),
totalContribution: mining.totalContribution.toString(),
},
// 详细流水需要从 mining-service 获取
records: [],
pagination: { page, pageSize, total: 0, totalPages: 0 },
note: '详细挖矿记录请查看 mining-service',
};
} }
/** /**
@ -633,14 +568,14 @@ export class UsersService {
} }
/** /**
* MINING_ENABLED *
*/ */
private async getUserAdoptionStats( private async getUserAdoptionStats(
accountSequence: string, accountSequence: string,
): Promise<{ personal: number; team: number }> { ): Promise<{ personal: number; team: number }> {
// 个人认种(只统计 MINING_ENABLED 状态) // 个人认种
const personalStats = await this.prisma.syncedAdoption.aggregate({ const personalStats = await this.prisma.syncedAdoption.aggregate({
where: { accountSequence, status: 'MINING_ENABLED' }, where: { accountSequence },
_sum: { treeCount: true }, _sum: { treeCount: true },
}); });
@ -652,7 +587,7 @@ export class UsersService {
let teamCount = 0; let teamCount = 0;
if (referral?.originalUserId) { if (referral?.originalUserId) {
// 团队认种 = 所有下级的认种总和(只统计 MINING_ENABLED 状态) // 团队认种 = 所有下级的认种总和
const teamMembers = await this.prisma.syncedReferral.findMany({ const teamMembers = await this.prisma.syncedReferral.findMany({
where: { where: {
ancestorPath: { contains: referral.originalUserId.toString() }, ancestorPath: { contains: referral.originalUserId.toString() },
@ -664,7 +599,6 @@ export class UsersService {
const teamStats = await this.prisma.syncedAdoption.aggregate({ const teamStats = await this.prisma.syncedAdoption.aggregate({
where: { where: {
accountSequence: { in: teamMembers.map((m) => m.accountSequence) }, accountSequence: { in: teamMembers.map((m) => m.accountSequence) },
status: 'MINING_ENABLED',
}, },
_sum: { treeCount: true }, _sum: { treeCount: true },
}); });
@ -904,76 +838,9 @@ export class UsersService {
}; };
} }
/**
* mining-service
*/
async getBatchMiningRecords(
accountSequence: string,
page: number,
pageSize: number,
) {
const user = await this.prisma.syncedUser.findUnique({
where: { accountSequence },
});
if (!user) {
throw new NotFoundException(`用户 ${accountSequence} 不存在`);
}
try {
const url = `${this.miningServiceUrl}/api/v2/mining/admin/batch-mining/records/${accountSequence}?page=${page}&pageSize=${pageSize}`;
this.logger.log(`Fetching batch mining records from ${url}`);
const response = await fetch(url);
if (!response.ok) {
if (response.status === 404) {
return {
records: [],
total: 0,
page,
pageSize,
totalPages: 0,
totalAmount: '0',
};
}
this.logger.warn(`Failed to fetch batch mining records: ${response.status}`);
return {
records: [],
total: 0,
page,
pageSize,
totalPages: 0,
totalAmount: '0',
};
}
const result = await response.json();
const data = result.data || result;
return {
records: data.records || [],
total: data.total || 0,
page: data.page || page,
pageSize: data.pageSize || pageSize,
totalPages: Math.ceil((data.total || 0) / pageSize),
totalAmount: data.totalAmount || '0',
};
} catch (error) {
this.logger.error('Failed to fetch batch mining records from mining-service', error);
return {
records: [],
total: 0,
page,
pageSize,
totalPages: 0,
totalAmount: '0',
};
}
}
/** /**
* *
* SyncedUserWallet SyncedMiningAccount * TODO: mining-service
*/ */
async getWalletLedger(accountSequence: string, page: number, pageSize: number) { async getWalletLedger(accountSequence: string, page: number, pageSize: number) {
const user = await this.prisma.syncedUser.findUnique({ const user = await this.prisma.syncedUser.findUnique({
@ -985,44 +852,20 @@ export class UsersService {
throw new NotFoundException(`用户 ${accountSequence} 不存在`); throw new NotFoundException(`用户 ${accountSequence} 不存在`);
} }
// 获取用户的各类钱包数据
const wallets = await this.prisma.syncedUserWallet.findMany({
where: { accountSequence },
});
// 按钱包类型分类
const walletByType = new Map(wallets.map(w => [w.walletType, w]));
const greenPointsWallet = walletByType.get('GREEN_POINTS');
const contributionWallet = walletByType.get('CONTRIBUTION');
const tokenWallet = walletByType.get('TOKEN_STORAGE');
const mining = user.miningAccount; const mining = user.miningAccount;
// 构建前端期望的钱包汇总格式
// usdtAvailable = GREEN_POINTS 钱包的可用余额 (绿积分)
// usdtFrozen = GREEN_POINTS 钱包的冻结余额
// pendingUsdt = 待领取收益(挖矿余额)
// settleableUsdt = 可结算收益
// settledTotalUsdt = 已结算收益
// expiredTotalUsdt = 过期收益
const summary = {
usdtAvailable: greenPointsWallet?.balance?.toString() || '0',
usdtFrozen: greenPointsWallet?.frozenBalance?.toString() || '0',
pendingUsdt: mining?.availableBalance?.toString() || '0', // 挖矿可用余额作为待领取
settleableUsdt: '0', // 暂无数据源
settledTotalUsdt: greenPointsWallet?.totalInflow?.toString() || '0', // 总流入作为已结算
expiredTotalUsdt: '0', // 暂无数据源
};
// TODO: 实现钱包流水分页查询
// 目前从 SyncedUserWallet 只能获取汇总数据,流水明细需要额外的表
return { return {
summary, summary: {
availableBalance: mining?.availableBalance?.toString() || '0',
frozenBalance: mining?.frozenBalance?.toString() || '0',
totalMined: mining?.totalMined?.toString() || '0',
},
items: [], items: [],
total: 0, total: 0,
page, page,
pageSize, pageSize,
totalPages: 0, totalPages: 0,
note: '钱包流水数据需要从 mining-service 同步',
}; };
} }
@ -1033,7 +876,7 @@ export class UsersService {
private formatUserListItem( private formatUserListItem(
user: any, user: any,
extra?: { extra?: {
adoptionStats?: { personalCount: number; personalOrders: number; teamCount: number; teamOrders: number }; adoptionStats?: { personalCount: number; teamCount: number };
referrerInfo?: { nickname: string | null; phone: string } | null; referrerInfo?: { nickname: string | null; phone: string } | null;
}, },
) { ) {
@ -1049,9 +892,7 @@ export class UsersService {
// 认种统计 // 认种统计
adoption: { adoption: {
personalAdoptionCount: extra?.adoptionStats?.personalCount || 0, personalAdoptionCount: extra?.adoptionStats?.personalCount || 0,
personalAdoptionOrders: extra?.adoptionStats?.personalOrders || 0,
teamAdoptions: extra?.adoptionStats?.teamCount || 0, teamAdoptions: extra?.adoptionStats?.teamCount || 0,
teamAdoptionOrders: extra?.adoptionStats?.teamOrders || 0,
}, },
// 推荐人信息 // 推荐人信息
referral: user.referral referral: user.referral

View File

@ -1,20 +1,12 @@
import { Module, Global } from '@nestjs/common'; import { Module, Global } from '@nestjs/common';
import { ConfigModule, ConfigService } from '@nestjs/config'; import { ConfigModule, ConfigService } from '@nestjs/config';
import { HttpModule } from '@nestjs/axios';
import { PrismaModule } from './persistence/prisma/prisma.module'; import { PrismaModule } from './persistence/prisma/prisma.module';
import { RedisService } from './redis/redis.service'; import { RedisService } from './redis/redis.service';
import { KafkaModule } from './kafka/kafka.module'; import { KafkaModule } from './kafka/kafka.module';
@Global() @Global()
@Module({ @Module({
imports: [ imports: [PrismaModule, KafkaModule],
PrismaModule,
KafkaModule,
HttpModule.register({
timeout: 10000,
maxRedirects: 5,
}),
],
providers: [ providers: [
{ {
provide: 'REDIS_OPTIONS', provide: 'REDIS_OPTIONS',
@ -28,6 +20,6 @@ import { KafkaModule } from './kafka/kafka.module';
}, },
RedisService, RedisService,
], ],
exports: [PrismaModule, RedisService, KafkaModule, HttpModule], exports: [PrismaModule, RedisService, KafkaModule],
}) })
export class InfrastructureModule {} export class InfrastructureModule {}

View File

@ -317,17 +317,9 @@ export class CdcConsumerService implements OnModuleInit, OnModuleDestroy {
* Debezium outbox 线 * Debezium outbox 线
*/ */
private normalizeServiceEvent(data: any): Omit<ServiceEvent, 'sequenceNum' | 'sourceTopic'> { private normalizeServiceEvent(data: any): Omit<ServiceEvent, 'sequenceNum' | 'sourceTopic'> {
// 如果已经是驼峰格式mining-wallet-service 直接发布的事件) // 如果已经是驼峰格式,直接返回
// 注意mining-wallet-service 使用 eventId 而不是 id
if (data.eventType && data.aggregateType) { if (data.eventType && data.aggregateType) {
return { return data;
id: data.id ?? data.eventId,
eventType: data.eventType,
aggregateType: data.aggregateType,
aggregateId: data.aggregateId,
payload: data.payload,
createdAt: data.createdAt,
};
} }
// Debezium outbox 格式转换 // Debezium outbox 格式转换

View File

@ -158,16 +158,6 @@ export class CdcSyncService implements OnModuleInit {
'SystemContributionUpdated', 'SystemContributionUpdated',
this.withIdempotency(this.handleSystemContributionUpdated.bind(this)), this.withIdempotency(this.handleSystemContributionUpdated.bind(this)),
); );
// SystemAccountSynced 事件 - 同步系统账户算力(来自 contribution-service
this.cdcConsumer.registerServiceHandler(
'SystemAccountSynced',
this.withIdempotency(this.handleSystemAccountSynced.bind(this)),
);
// SystemContributionRecordCreated 事件 - 同步系统账户算力明细(来自 contribution-service
this.cdcConsumer.registerServiceHandler(
'SystemContributionRecordCreated',
this.withIdempotency(this.handleSystemContributionRecordCreated.bind(this)),
);
// ReferralSynced 事件 - 同步推荐关系 // ReferralSynced 事件 - 同步推荐关系
this.cdcConsumer.registerServiceHandler( this.cdcConsumer.registerServiceHandler(
'ReferralSynced', 'ReferralSynced',
@ -363,12 +353,6 @@ export class CdcSyncService implements OnModuleInit {
this.withIdempotency(this.walletHandlers.handleFeeConfigUpdated.bind(this.walletHandlers)), this.withIdempotency(this.walletHandlers.handleFeeConfigUpdated.bind(this.walletHandlers)),
); );
// CONTRIBUTION_CREDITED 事件 - 贡献值入账时更新用户钱包
this.cdcConsumer.registerServiceHandler(
'CONTRIBUTION_CREDITED',
this.withIdempotency(this.handleContributionCredited.bind(this)),
);
this.logger.log('CDC sync handlers registered with idempotency protection'); this.logger.log('CDC sync handlers registered with idempotency protection');
} }
@ -540,165 +524,20 @@ export class CdcSyncService implements OnModuleInit {
private async handleSystemContributionUpdated(event: ServiceEvent, tx: TransactionClient): Promise<void> { private async handleSystemContributionUpdated(event: ServiceEvent, tx: TransactionClient): Promise<void> {
const { payload } = event; const { payload } = event;
const accountType = payload.accountType; await tx.syncedSystemContribution.upsert({
const regionCode = payload.regionCode || null; where: { accountType: payload.accountType },
// 查找所有匹配的记录(处理可能存在的重复记录情况)
// 注意:由于 PostgreSQL 中 NULL != NULL唯一约束在 regionCode 为 NULL 时不生效
const existingRecords = await tx.syncedSystemContribution.findMany({
where: {
accountType,
regionCode: regionCode === null ? { equals: null } : regionCode,
},
orderBy: { syncedAt: 'asc' },
});
if (existingRecords.length > 0) {
await tx.syncedSystemContribution.update({
where: { id: existingRecords[0].id },
data: {
name: payload.name,
contributionBalance: payload.contributionBalance,
contributionNeverExpires: payload.contributionNeverExpires,
},
});
// 删除重复记录
if (existingRecords.length > 1) {
const duplicateIds = existingRecords.slice(1).map(r => r.id);
await tx.syncedSystemContribution.deleteMany({
where: { id: { in: duplicateIds } },
});
this.logger.warn(
`Deleted ${duplicateIds.length} duplicate system contribution records for ${accountType}:${regionCode}`,
);
}
} else {
await tx.syncedSystemContribution.create({
data: {
accountType,
regionCode,
name: payload.name,
contributionBalance: payload.contributionBalance || 0,
contributionNeverExpires: payload.contributionNeverExpires || false,
},
});
}
}
/**
* SystemAccountSynced -
* contribution-service
* accountType: OPERATION / PROVINCE / CITY / HEADQUARTERS
* regionCode: / 440000, 440100
*
* PostgreSQL NULL != NULL@@unique([accountType, regionCode])
* regionCode NULL
*/
private async handleSystemAccountSynced(event: ServiceEvent, tx: TransactionClient): Promise<void> {
const { payload } = event;
const accountType = payload.accountType; // OPERATION / PROVINCE / CITY / HEADQUARTERS
const regionCode = payload.regionCode || null;
// 查找所有匹配的记录(处理可能存在的重复记录情况)
const existingRecords = await tx.syncedSystemContribution.findMany({
where: {
accountType,
regionCode: regionCode === null ? { equals: null } : regionCode,
},
orderBy: { syncedAt: 'asc' }, // 保留最早创建的记录
});
if (existingRecords.length > 0) {
// 更新第一条记录
await tx.syncedSystemContribution.update({
where: { id: existingRecords[0].id },
data: {
name: payload.name,
contributionBalance: payload.contributionBalance,
},
});
// 如果存在重复记录,删除多余的(只保留第一条)
if (existingRecords.length > 1) {
const duplicateIds = existingRecords.slice(1).map(r => r.id);
await tx.syncedSystemContribution.deleteMany({
where: { id: { in: duplicateIds } },
});
this.logger.warn(
`Deleted ${duplicateIds.length} duplicate system contribution records for ${accountType}:${regionCode}`,
);
}
} else {
await tx.syncedSystemContribution.create({
data: {
accountType,
regionCode,
name: payload.name,
contributionBalance: payload.contributionBalance || 0,
contributionNeverExpires: true, // 系统账户算力永不过期
},
});
}
}
/**
* SystemContributionRecordCreated -
* contribution-service
*/
private async handleSystemContributionRecordCreated(event: ServiceEvent, tx: TransactionClient): Promise<void> {
const { payload } = event;
// contribution-service 使用 systemAccountType 字段,需要兼容处理
const systemAccountType = payload.systemAccountType || payload.accountType;
// 解析 systemAccountType可能是 "PROVINCE_440000" 或 "PROVINCE"
let accountType: string;
let regionCode: string | null = null;
if (systemAccountType?.includes('_')) {
const parts = systemAccountType.split('_');
accountType = parts[0];
regionCode = parts.slice(1).join('_');
} else {
accountType = systemAccountType;
regionCode = payload.regionCode || null;
}
await tx.syncedSystemContributionRecord.upsert({
where: { originalRecordId: BigInt(payload.recordId) },
create: { create: {
originalRecordId: BigInt(payload.recordId), accountType: payload.accountType,
accountType, name: payload.name,
regionCode, contributionBalance: payload.contributionBalance || 0,
sourceAdoptionId: BigInt(payload.sourceAdoptionId), contributionNeverExpires: payload.contributionNeverExpires || false,
sourceAccountSequence: payload.sourceAccountSequence,
sourceType: payload.sourceType || 'FIXED_RATE', // 来源类型
levelDepth: payload.levelDepth ?? null, // 层级深度
distributionRate: payload.distributionRate,
amount: payload.amount,
effectiveDate: new Date(payload.effectiveDate),
expireDate: payload.expireDate ? new Date(payload.expireDate) : null,
isExpired: false,
createdAt: new Date(payload.createdAt),
}, },
update: { update: {
accountType, name: payload.name,
regionCode, contributionBalance: payload.contributionBalance,
sourceAdoptionId: BigInt(payload.sourceAdoptionId), contributionNeverExpires: payload.contributionNeverExpires,
sourceAccountSequence: payload.sourceAccountSequence,
sourceType: payload.sourceType || 'FIXED_RATE',
levelDepth: payload.levelDepth ?? null,
distributionRate: payload.distributionRate,
amount: payload.amount,
effectiveDate: new Date(payload.effectiveDate),
expireDate: payload.expireDate ? new Date(payload.expireDate) : null,
}, },
}); });
this.logger.debug(
`Synced system contribution record: recordId=${payload.recordId}, account=${accountType}:${regionCode}, amount=${payload.amount}`,
);
} }
/** /**
@ -857,9 +696,6 @@ export class CdcSyncService implements OnModuleInit {
const { payload } = event; const { payload } = event;
// 只保留一条挖矿配置记录 // 只保留一条挖矿配置记录
await tx.syncedMiningConfig.deleteMany({}); await tx.syncedMiningConfig.deleteMany({});
// mining-service 发布 secondDistribution计算 minuteDistribution = secondDistribution * 60
const secondDistribution = parseFloat(payload.secondDistribution || '0');
const minuteDistribution = payload.minuteDistribution || (secondDistribution * 60).toString();
await tx.syncedMiningConfig.create({ await tx.syncedMiningConfig.create({
data: { data: {
totalShares: payload.totalShares, totalShares: payload.totalShares,
@ -867,7 +703,7 @@ export class CdcSyncService implements OnModuleInit {
remainingDistribution: payload.remainingDistribution, remainingDistribution: payload.remainingDistribution,
halvingPeriodYears: payload.halvingPeriodYears, halvingPeriodYears: payload.halvingPeriodYears,
currentEra: payload.currentEra || 1, currentEra: payload.currentEra || 1,
minuteDistribution: minuteDistribution, minuteDistribution: payload.minuteDistribution,
isActive: payload.isActive || false, isActive: payload.isActive || false,
activatedAt: payload.activatedAt ? new Date(payload.activatedAt) : null, activatedAt: payload.activatedAt ? new Date(payload.activatedAt) : null,
}, },
@ -977,60 +813,4 @@ export class CdcSyncService implements OnModuleInit {
this.logger.debug('Synced circulation pool'); this.logger.debug('Synced circulation pool');
} }
// ===========================================================================
// 钱包事件处理 (mining-wallet-service)
// ===========================================================================
/**
* CONTRIBUTION_CREDITED
* mining-wallet-service
* payload: { accountSequence, walletType, amount, balanceAfter, transactionId, ... }
*/
private async handleContributionCredited(event: ServiceEvent, tx: TransactionClient): Promise<void> {
const { payload } = event;
const walletType = payload.walletType || 'CONTRIBUTION';
// 先查找是否已存在
const existing = await tx.syncedUserWallet.findUnique({
where: {
accountSequence_walletType: {
accountSequence: payload.accountSequence,
walletType,
},
},
});
if (existing) {
// 更新余额(使用最新的 balanceAfter
await tx.syncedUserWallet.update({
where: { id: existing.id },
data: {
balance: payload.balanceAfter,
totalInflow: {
increment: parseFloat(payload.amount) || 0,
},
},
});
} else {
// 创建新钱包记录
// originalId 使用 accountSequence + walletType 的组合生成一个稳定的 ID
const originalId = `wallet-${payload.accountSequence}-${walletType}`;
await tx.syncedUserWallet.create({
data: {
originalId,
accountSequence: payload.accountSequence,
walletType,
balance: payload.balanceAfter || 0,
frozenBalance: 0,
totalInflow: parseFloat(payload.amount) || 0,
totalOutflow: 0,
isActive: true,
},
});
}
this.logger.debug(`Synced user wallet from CONTRIBUTION_CREDITED: ${payload.accountSequence}, balance: ${payload.balanceAfter}`);
}
} }

View File

@ -21,8 +21,8 @@ KAFKA_GROUP_ID=mining-service-group
JWT_SECRET=your-jwt-secret-key JWT_SECRET=your-jwt-secret-key
# Mining Configuration # Mining Configuration
TOTAL_SHARES=10002000000 TOTAL_SHARES=100020000000
DISTRIBUTION_POOL=2000000 DISTRIBUTION_POOL=200000000
INITIAL_PRICE=1 INITIAL_PRICE=1
HALVING_PERIOD_YEARS=2 HALVING_PERIOD_YEARS=2
BURN_TARGET=10000000000 BURN_TARGET=10000000000

View File

@ -14,7 +14,7 @@ RUN npm ci
RUN DATABASE_URL="postgresql://user:pass@localhost:5432/db" npx prisma generate RUN DATABASE_URL="postgresql://user:pass@localhost:5432/db" npx prisma generate
COPY src ./src COPY src ./src
RUN npm run build && ls -la dist/ && test -f dist/main.js && echo "Build successful: dist/main.js exists" RUN npm run build
# 阶段2: 生产运行 # 阶段2: 生产运行
FROM node:20-alpine AS runner FROM node:20-alpine AS runner
@ -30,16 +30,14 @@ WORKDIR /app
USER nestjs USER nestjs
COPY --chown=nestjs:nodejs package*.json ./ COPY --chown=nestjs:nodejs package*.json ./
COPY --chown=nestjs:nodejs tsconfig*.json ./ RUN npm ci --only=production && npm cache clean --force
RUN npm ci --only=production && npm install ts-node typescript @types/node --save-dev && npm cache clean --force
COPY --chown=nestjs:nodejs prisma ./prisma/ COPY --chown=nestjs:nodejs prisma ./prisma/
RUN DATABASE_URL="postgresql://user:pass@localhost:5432/db" npx prisma generate RUN DATABASE_URL="postgresql://user:pass@localhost:5432/db" npx prisma generate
COPY --chown=nestjs:nodejs --from=builder /app/dist ./dist COPY --chown=nestjs:nodejs --from=builder /app/dist ./dist
RUN ls -la dist/ && test -f dist/main.js && echo "Copy successful: dist/main.js exists"
RUN printf '#!/bin/sh\nset -e\necho "Running database migrations..."\nnpx prisma migrate deploy\necho "Running database seed..."\nnpx prisma db seed || echo "Seed skipped or already applied"\necho "Starting application..."\nexec node dist/main.js\n' > /app/start.sh && chmod +x /app/start.sh RUN printf '#!/bin/sh\nset -e\necho "Running database migrations..."\nnpx prisma migrate deploy\necho "Starting application..."\nexec node dist/main.js\n' > /app/start.sh && chmod +x /app/start.sh
ENV NODE_ENV=production ENV NODE_ENV=production
ENV TZ=Asia/Shanghai ENV TZ=Asia/Shanghai

View File

@ -16,8 +16,7 @@
"prisma:generate": "prisma generate", "prisma:generate": "prisma generate",
"prisma:migrate": "prisma migrate dev", "prisma:migrate": "prisma migrate dev",
"prisma:migrate:prod": "prisma migrate deploy", "prisma:migrate:prod": "prisma migrate deploy",
"prisma:studio": "prisma studio", "prisma:studio": "prisma studio"
"prisma:seed": "ts-node prisma/seed.ts"
}, },
"dependencies": { "dependencies": {
"@nestjs/common": "^10.3.0", "@nestjs/common": "^10.3.0",
@ -38,9 +37,6 @@
"rxjs": "^7.8.1", "rxjs": "^7.8.1",
"swagger-ui-express": "^5.0.0" "swagger-ui-express": "^5.0.0"
}, },
"prisma": {
"seed": "ts-node prisma/seed.ts"
},
"devDependencies": { "devDependencies": {
"@nestjs/cli": "^10.2.1", "@nestjs/cli": "^10.2.1",
"@nestjs/schematics": "^10.0.3", "@nestjs/schematics": "^10.0.3",

View File

@ -1,7 +1,6 @@
-- ============================================================================ -- ============================================================================
-- mining-service 初始化 migration -- mining-service 初始化 migration
-- 合并自: 0001_init, 0002_minute_to_second, 0003_add_system_accounts_and_pending_mining, -- 合并自: 20260111000000_init (只有一个,无需合并)
-- 20250120000001_add_region_to_system_mining_accounts
-- ============================================================================ -- ============================================================================
-- CreateEnum -- CreateEnum
@ -22,11 +21,7 @@ CREATE TABLE "mining_configs" (
"halvingPeriodYears" INTEGER NOT NULL DEFAULT 2, "halvingPeriodYears" INTEGER NOT NULL DEFAULT 2,
"currentEra" INTEGER NOT NULL DEFAULT 1, "currentEra" INTEGER NOT NULL DEFAULT 1,
"eraStartDate" TIMESTAMP(3) NOT NULL, "eraStartDate" TIMESTAMP(3) NOT NULL,
"secondDistribution" DECIMAL(30,18) NOT NULL, "minuteDistribution" DECIMAL(30,18) NOT NULL,
"network_total_contribution" DECIMAL(30, 8) NOT NULL DEFAULT 0,
"total_tree_count" INTEGER NOT NULL DEFAULT 0,
"contribution_per_tree" DECIMAL(20, 10) NOT NULL DEFAULT 22617,
"network_last_synced_at" TIMESTAMP(3),
"isActive" BOOLEAN NOT NULL DEFAULT false, "isActive" BOOLEAN NOT NULL DEFAULT false,
"activatedAt" TIMESTAMP(3), "activatedAt" TIMESTAMP(3),
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
@ -43,7 +38,7 @@ CREATE TABLE "mining_eras" (
"endDate" TIMESTAMP(3), "endDate" TIMESTAMP(3),
"initialDistribution" DECIMAL(30,8) NOT NULL, "initialDistribution" DECIMAL(30,8) NOT NULL,
"totalDistributed" DECIMAL(30,8) NOT NULL DEFAULT 0, "totalDistributed" DECIMAL(30,8) NOT NULL DEFAULT 0,
"secondDistribution" DECIMAL(30,18) NOT NULL, "minuteDistribution" DECIMAL(30,18) NOT NULL,
"isActive" BOOLEAN NOT NULL DEFAULT true, "isActive" BOOLEAN NOT NULL DEFAULT true,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
@ -72,7 +67,7 @@ CREATE TABLE "mining_records" (
"miningMinute" TIMESTAMP(3) NOT NULL, "miningMinute" TIMESTAMP(3) NOT NULL,
"contributionRatio" DECIMAL(30,18) NOT NULL, "contributionRatio" DECIMAL(30,18) NOT NULL,
"totalContribution" DECIMAL(30,8) NOT NULL, "totalContribution" DECIMAL(30,8) NOT NULL,
"secondDistribution" DECIMAL(30,18) NOT NULL, "minuteDistribution" DECIMAL(30,18) NOT NULL,
"minedAmount" DECIMAL(30,18) NOT NULL, "minedAmount" DECIMAL(30,18) NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
@ -99,90 +94,6 @@ CREATE TABLE "mining_transactions" (
CONSTRAINT "mining_transactions_pkey" PRIMARY KEY ("id") CONSTRAINT "mining_transactions_pkey" PRIMARY KEY ("id")
); );
-- CreateTable: 系统挖矿账户
CREATE TABLE "system_mining_accounts" (
"id" TEXT NOT NULL,
"account_type" TEXT NOT NULL,
"region_code" TEXT,
"name" TEXT NOT NULL,
"totalMined" DECIMAL(30, 8) NOT NULL DEFAULT 0,
"availableBalance" DECIMAL(30, 8) NOT NULL DEFAULT 0,
"totalContribution" DECIMAL(30, 8) NOT NULL DEFAULT 0,
"last_synced_at" TIMESTAMP(3),
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updated_at" TIMESTAMP(3) NOT NULL,
CONSTRAINT "system_mining_accounts_pkey" PRIMARY KEY ("id")
);
-- CreateTable: 系统账户挖矿记录
CREATE TABLE "system_mining_records" (
"id" TEXT NOT NULL,
"system_account_id" TEXT NOT NULL,
"mining_minute" TIMESTAMP(3) NOT NULL,
"contribution_ratio" DECIMAL(30, 18) NOT NULL,
"total_contribution" DECIMAL(30, 8) NOT NULL,
"second_distribution" DECIMAL(30, 18) NOT NULL,
"mined_amount" DECIMAL(30, 18) NOT NULL,
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "system_mining_records_pkey" PRIMARY KEY ("id")
);
-- CreateTable: 系统账户交易流水
CREATE TABLE "system_mining_transactions" (
"id" TEXT NOT NULL,
"system_account_id" TEXT NOT NULL,
"type" TEXT NOT NULL,
"amount" DECIMAL(30, 8) NOT NULL,
"balance_before" DECIMAL(30, 8) NOT NULL,
"balance_after" DECIMAL(30, 8) NOT NULL,
"reference_id" TEXT,
"reference_type" TEXT,
"memo" TEXT,
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "system_mining_transactions_pkey" PRIMARY KEY ("id")
);
-- CreateTable: 待解锁算力挖矿
CREATE TABLE "pending_contribution_mining" (
"id" BIGSERIAL NOT NULL,
"source_adoption_id" BIGINT NOT NULL,
"source_account_sequence" VARCHAR(20) NOT NULL,
"would_be_account_sequence" VARCHAR(20),
"contribution_type" VARCHAR(30) NOT NULL,
"amount" DECIMAL(30, 10) NOT NULL,
"reason" VARCHAR(200),
"effective_date" DATE NOT NULL,
"expire_date" DATE NOT NULL,
"is_expired" BOOLEAN NOT NULL DEFAULT false,
"last_synced_at" TIMESTAMP(3),
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "pending_contribution_mining_pkey" PRIMARY KEY ("id")
);
-- CreateTable: 待解锁算力挖矿记录
CREATE TABLE "pending_mining_records" (
"id" BIGSERIAL NOT NULL,
"pending_contribution_id" BIGINT NOT NULL,
"mining_minute" TIMESTAMP(3) NOT NULL,
"source_adoption_id" BIGINT NOT NULL,
"source_account_sequence" VARCHAR(20) NOT NULL,
"would_be_account_sequence" VARCHAR(20),
"contribution_type" VARCHAR(30) NOT NULL,
"contribution_amount" DECIMAL(30, 10) NOT NULL,
"network_total_contribution" DECIMAL(30, 10) NOT NULL,
"contribution_ratio" DECIMAL(30, 18) NOT NULL,
"second_distribution" DECIMAL(30, 18) NOT NULL,
"mined_amount" DECIMAL(30, 18) NOT NULL,
"allocated_to" VARCHAR(20) NOT NULL DEFAULT 'HEADQUARTERS',
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "pending_mining_records_pkey" PRIMARY KEY ("id")
);
-- CreateTable -- CreateTable
CREATE TABLE "mining_reward_allocations" ( CREATE TABLE "mining_reward_allocations" (
"id" BIGSERIAL NOT NULL, "id" BIGSERIAL NOT NULL,
@ -405,33 +316,6 @@ CREATE INDEX "mining_transactions_counterparty_account_seq_idx" ON "mining_trans
-- CreateIndex -- CreateIndex
CREATE INDEX "mining_transactions_counterparty_user_id_idx" ON "mining_transactions"("counterparty_user_id"); CREATE INDEX "mining_transactions_counterparty_user_id_idx" ON "mining_transactions"("counterparty_user_id");
-- CreateIndex: system_mining_accounts
CREATE UNIQUE INDEX "system_mining_accounts_account_type_region_code_key" ON "system_mining_accounts"("account_type", "region_code");
CREATE INDEX "system_mining_accounts_totalContribution_idx" ON "system_mining_accounts"("totalContribution" DESC);
CREATE INDEX "system_mining_accounts_account_type_idx" ON "system_mining_accounts"("account_type");
CREATE INDEX "system_mining_accounts_region_code_idx" ON "system_mining_accounts"("region_code");
-- CreateIndex: system_mining_records
CREATE UNIQUE INDEX "system_mining_records_system_account_id_mining_minute_key" ON "system_mining_records"("system_account_id", "mining_minute");
CREATE INDEX "system_mining_records_mining_minute_idx" ON "system_mining_records"("mining_minute");
-- CreateIndex: system_mining_transactions
CREATE INDEX "system_mining_transactions_system_account_id_created_at_idx" ON "system_mining_transactions"("system_account_id", "created_at" DESC);
-- CreateIndex: pending_contribution_mining
CREATE UNIQUE INDEX "pending_contribution_mining_source_adoption_id_would_be_acco_key"
ON "pending_contribution_mining"("source_adoption_id", "would_be_account_sequence", "contribution_type");
CREATE INDEX "pending_contribution_mining_would_be_account_sequence_idx" ON "pending_contribution_mining"("would_be_account_sequence");
CREATE INDEX "pending_contribution_mining_contribution_type_idx" ON "pending_contribution_mining"("contribution_type");
CREATE INDEX "pending_contribution_mining_is_expired_idx" ON "pending_contribution_mining"("is_expired");
-- CreateIndex: pending_mining_records
CREATE UNIQUE INDEX "pending_mining_records_pending_contribution_id_mining_minute_key"
ON "pending_mining_records"("pending_contribution_id", "mining_minute");
CREATE INDEX "pending_mining_records_mining_minute_idx" ON "pending_mining_records"("mining_minute");
CREATE INDEX "pending_mining_records_source_account_sequence_idx" ON "pending_mining_records"("source_account_sequence");
CREATE INDEX "pending_mining_records_would_be_account_sequence_idx" ON "pending_mining_records"("would_be_account_sequence");
-- CreateIndex -- CreateIndex
CREATE INDEX "mining_reward_allocations_mining_date_idx" ON "mining_reward_allocations"("mining_date"); CREATE INDEX "mining_reward_allocations_mining_date_idx" ON "mining_reward_allocations"("mining_date");
@ -531,27 +415,8 @@ ALTER TABLE "mining_records" ADD CONSTRAINT "mining_records_accountSequence_fkey
-- AddForeignKey -- AddForeignKey
ALTER TABLE "mining_transactions" ADD CONSTRAINT "mining_transactions_accountSequence_fkey" FOREIGN KEY ("accountSequence") REFERENCES "mining_accounts"("accountSequence") ON DELETE RESTRICT ON UPDATE CASCADE; ALTER TABLE "mining_transactions" ADD CONSTRAINT "mining_transactions_accountSequence_fkey" FOREIGN KEY ("accountSequence") REFERENCES "mining_accounts"("accountSequence") ON DELETE RESTRICT ON UPDATE CASCADE;
-- AddForeignKey: system_mining_records
ALTER TABLE "system_mining_records" ADD CONSTRAINT "system_mining_records_system_account_id_fkey"
FOREIGN KEY ("system_account_id") REFERENCES "system_mining_accounts"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
-- AddForeignKey: system_mining_transactions
ALTER TABLE "system_mining_transactions" ADD CONSTRAINT "system_mining_transactions_system_account_id_fkey"
FOREIGN KEY ("system_account_id") REFERENCES "system_mining_accounts"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
-- AddForeignKey: pending_mining_records
ALTER TABLE "pending_mining_records" ADD CONSTRAINT "pending_mining_records_pending_contribution_id_fkey"
FOREIGN KEY ("pending_contribution_id") REFERENCES "pending_contribution_mining"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
-- AddForeignKey -- AddForeignKey
ALTER TABLE "burn_records" ADD CONSTRAINT "burn_records_blackHoleId_fkey" FOREIGN KEY ("blackHoleId") REFERENCES "black_holes"("id") ON DELETE RESTRICT ON UPDATE CASCADE; ALTER TABLE "burn_records" ADD CONSTRAINT "burn_records_blackHoleId_fkey" FOREIGN KEY ("blackHoleId") REFERENCES "black_holes"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
-- AddForeignKey -- AddForeignKey
ALTER TABLE "pool_transactions" ADD CONSTRAINT "pool_transactions_pool_account_id_fkey" FOREIGN KEY ("pool_account_id") REFERENCES "pool_accounts"("id") ON DELETE RESTRICT ON UPDATE CASCADE; ALTER TABLE "pool_transactions" ADD CONSTRAINT "pool_transactions_pool_account_id_fkey" FOREIGN KEY ("pool_account_id") REFERENCES "pool_accounts"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
-- 初始化系统账户 (无 regionCode 的汇总账户)
INSERT INTO "system_mining_accounts" ("id", "account_type", "region_code", "name", "totalMined", "availableBalance", "totalContribution", "updated_at")
VALUES
(gen_random_uuid(), 'OPERATION', NULL, '运营账户', 0, 0, 0, NOW()),
(gen_random_uuid(), 'HEADQUARTERS', NULL, '总部账户', 0, 0, 0, NOW())
ON CONFLICT ("account_type", "region_code") DO NOTHING;

Some files were not shown because too many files have changed in this diff Show More