2026-03-12 15:09:25 +08:00
|
|
|
import { v4 as uuidv4 } from 'uuid'
|
|
|
|
|
import { fetchTransactions, insertTransaction, mergeImportedTransaction } from './transactionService.js'
|
|
|
|
|
import { getDb, saveDbToStore } from '../lib/sqlite.js'
|
|
|
|
|
|
|
|
|
|
const IMPORT_SOURCE_ALIPAY = 'alipay'
|
|
|
|
|
const IMPORT_SUCCESS_STATUS = '交易成功'
|
|
|
|
|
const ALIPAY_REQUIRED_HEADERS = ['交易时间', '交易分类', '交易对方', '商品说明', '收/支', '金额', '交易状态']
|
|
|
|
|
const ALIPAY_HEADERS = {
|
|
|
|
|
occurredAt: '交易时间',
|
|
|
|
|
tradeCategory: '交易分类',
|
|
|
|
|
merchant: '交易对方',
|
|
|
|
|
counterpartyAccount: '对方账号',
|
|
|
|
|
productName: '商品说明',
|
|
|
|
|
direction: '收/支',
|
|
|
|
|
amount: '金额',
|
|
|
|
|
paymentMethod: '收/付款方式',
|
|
|
|
|
status: '交易状态',
|
|
|
|
|
orderId: '交易订单号',
|
|
|
|
|
merchantOrderId: '商家订单号',
|
|
|
|
|
note: '备注',
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const downloadText = (filename, text, mimeType) => {
|
|
|
|
|
const blob = new Blob([text], { type: mimeType })
|
|
|
|
|
const url = URL.createObjectURL(blob)
|
|
|
|
|
const link = document.createElement('a')
|
|
|
|
|
link.href = url
|
|
|
|
|
link.download = filename
|
|
|
|
|
document.body.appendChild(link)
|
|
|
|
|
link.click()
|
|
|
|
|
document.body.removeChild(link)
|
|
|
|
|
URL.revokeObjectURL(url)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const escapeCsvCell = (value) => `"${String(value ?? '').replace(/"/g, '""')}"`
|
|
|
|
|
|
|
|
|
|
const cleanCell = (value) =>
|
|
|
|
|
String(value ?? '')
|
|
|
|
|
.replace(/^\uFEFF/, '')
|
|
|
|
|
.replace(/\r/g, '')
|
|
|
|
|
.replace(/^"|"$/g, '')
|
|
|
|
|
.trim()
|
|
|
|
|
|
|
|
|
|
const normalizeMerchantKey = (value) =>
|
|
|
|
|
String(value || '')
|
|
|
|
|
.toLowerCase()
|
|
|
|
|
.replace(/[\s"'`~!@#$%^&*()_+\-=[\]{};:,.<>/?\\|,。!?;:()【】《》·、]/g, '')
|
|
|
|
|
.slice(0, 80)
|
|
|
|
|
|
|
|
|
|
const normalizeAmount = (value) => {
|
|
|
|
|
const numeric = Number(String(value ?? '').replace(/[^\d.-]/g, ''))
|
|
|
|
|
return Number.isFinite(numeric) ? numeric : 0
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const normalizeDate = (value) => {
|
|
|
|
|
if (!value) return ''
|
|
|
|
|
const normalized = String(value).trim().replace(/\//g, '-')
|
|
|
|
|
const parsed = new Date(normalized)
|
|
|
|
|
return Number.isNaN(parsed.getTime()) ? '' : parsed.toISOString()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const parseDelimitedLine = (line, delimiter) => {
|
|
|
|
|
const cells = []
|
|
|
|
|
let current = ''
|
|
|
|
|
let inQuotes = false
|
|
|
|
|
|
|
|
|
|
for (let index = 0; index < line.length; index += 1) {
|
|
|
|
|
const char = line[index]
|
|
|
|
|
const next = line[index + 1]
|
|
|
|
|
|
|
|
|
|
if (char === '"') {
|
|
|
|
|
if (inQuotes && next === '"') {
|
|
|
|
|
current += '"'
|
|
|
|
|
index += 1
|
|
|
|
|
} else {
|
|
|
|
|
inQuotes = !inQuotes
|
|
|
|
|
}
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (!inQuotes && char === delimiter) {
|
|
|
|
|
cells.push(cleanCell(current))
|
|
|
|
|
current = ''
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
current += char
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
cells.push(cleanCell(current))
|
|
|
|
|
return cells
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const detectHeaderIndex = (rows) =>
|
|
|
|
|
rows.findIndex((row) => ALIPAY_REQUIRED_HEADERS.every((header) => row.includes(header)))
|
|
|
|
|
|
|
|
|
|
const extractRowsFromHtml = (content) => {
|
|
|
|
|
const parser = new DOMParser()
|
|
|
|
|
const doc = parser.parseFromString(content, 'text/html')
|
|
|
|
|
return Array.from(doc.querySelectorAll('tr')).map((row) =>
|
|
|
|
|
Array.from(row.querySelectorAll('th,td')).map((cell) => cleanCell(cell.textContent)),
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const extractRowsFromText = (content) => {
|
|
|
|
|
const lines = content.split(/\r?\n/).filter((line) => line.trim())
|
|
|
|
|
const headerLine = lines.find((line) => ALIPAY_REQUIRED_HEADERS.every((header) => line.includes(header))) || ''
|
|
|
|
|
const delimiter = headerLine.split('\t').length >= headerLine.split(',').length ? '\t' : ','
|
|
|
|
|
return lines.map((line) => parseDelimitedLine(line, delimiter))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const parseAlipayRows = (content) => {
|
|
|
|
|
const rows = /<table/i.test(content) ? extractRowsFromHtml(content) : extractRowsFromText(content)
|
|
|
|
|
const headerIndex = detectHeaderIndex(rows)
|
|
|
|
|
if (headerIndex < 0) {
|
|
|
|
|
throw new Error('未找到支付宝账单表头,请确认导出文件内容完整')
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const header = rows[headerIndex]
|
|
|
|
|
const getCell = (row, key) => row[header.indexOf(ALIPAY_HEADERS[key])] || ''
|
|
|
|
|
|
|
|
|
|
return rows
|
|
|
|
|
.slice(headerIndex + 1)
|
|
|
|
|
.filter((row) => cleanCell(getCell(row, 'occurredAt')))
|
|
|
|
|
.map((row) => ({
|
|
|
|
|
occurredAt: cleanCell(getCell(row, 'occurredAt')),
|
|
|
|
|
tradeCategory: cleanCell(getCell(row, 'tradeCategory')),
|
|
|
|
|
merchant: cleanCell(getCell(row, 'merchant')),
|
|
|
|
|
counterpartyAccount: cleanCell(getCell(row, 'counterpartyAccount')),
|
|
|
|
|
productName: cleanCell(getCell(row, 'productName')),
|
|
|
|
|
direction: cleanCell(getCell(row, 'direction')),
|
|
|
|
|
amount: cleanCell(getCell(row, 'amount')),
|
|
|
|
|
paymentMethod: cleanCell(getCell(row, 'paymentMethod')),
|
|
|
|
|
status: cleanCell(getCell(row, 'status')),
|
|
|
|
|
orderId: cleanCell(getCell(row, 'orderId')),
|
|
|
|
|
merchantOrderId: cleanCell(getCell(row, 'merchantOrderId')),
|
|
|
|
|
note: cleanCell(getCell(row, 'note')),
|
|
|
|
|
}))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const resolveCategory = (row, direction) => {
|
|
|
|
|
const text = `${row.tradeCategory} ${row.productName} ${row.merchant}`.toLowerCase()
|
|
|
|
|
if (direction === 'neutral') return 'Transfer'
|
|
|
|
|
if (direction === 'income') return 'Income'
|
|
|
|
|
if (/餐饮|美食|咖啡|奶茶|外卖|饭|面|粉/.test(text)) return 'Food'
|
|
|
|
|
if (/交通|出行|打车|公交|地铁|单车|骑行|哈啰|滴滴/.test(text)) return 'Transport'
|
|
|
|
|
if (/超市|便利|百货|买菜|生鲜|7-11|罗森|全家/.test(text)) return 'Groceries'
|
|
|
|
|
if (/健康|药|医院|诊所|体检/.test(text)) return 'Health'
|
|
|
|
|
if (/娱乐|电影|游戏|门票|演出|网吧/.test(text)) return 'Entertainment'
|
|
|
|
|
return direction === 'expense' ? 'Expense' : 'Uncategorized'
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const resolveFundingSource = (paymentMethod) => {
|
|
|
|
|
const text = String(paymentMethod || '')
|
|
|
|
|
if (!text) return { fundSourceType: 'cash', fundSourceName: '现金账户' }
|
|
|
|
|
if (/信用卡|花呗|白条/.test(text)) {
|
|
|
|
|
return { fundSourceType: 'credit', fundSourceName: text }
|
|
|
|
|
}
|
|
|
|
|
if (/储蓄卡|银行卡|银行/.test(text)) {
|
|
|
|
|
return { fundSourceType: 'bank', fundSourceName: text }
|
|
|
|
|
}
|
|
|
|
|
if (/余额|零钱|账户余额/.test(text)) {
|
|
|
|
|
return { fundSourceType: 'cash', fundSourceName: text }
|
|
|
|
|
}
|
|
|
|
|
if (/卡/.test(text)) {
|
|
|
|
|
return { fundSourceType: 'stored_value', fundSourceName: text }
|
|
|
|
|
}
|
|
|
|
|
return { fundSourceType: 'cash', fundSourceName: text }
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const buildSourceRecordHash = (payload) =>
|
|
|
|
|
[
|
|
|
|
|
payload.sourceType,
|
|
|
|
|
payload.date,
|
|
|
|
|
payload.amount.toFixed(2),
|
|
|
|
|
normalizeMerchantKey(payload.merchant),
|
|
|
|
|
normalizeMerchantKey(payload.note),
|
|
|
|
|
].join('|')
|
|
|
|
|
|
|
|
|
|
const normalizeImportedRow = (row, importBatchId) => {
|
|
|
|
|
const normalizedDate = normalizeDate(row.occurredAt)
|
|
|
|
|
if (!normalizedDate) return null
|
|
|
|
|
|
|
|
|
|
const directionText = row.direction
|
|
|
|
|
const rawAmount = Math.abs(normalizeAmount(row.amount))
|
|
|
|
|
const direction =
|
|
|
|
|
directionText.includes('收入') ? 'income' : directionText.includes('支出') ? 'expense' : 'neutral'
|
|
|
|
|
|
|
|
|
|
const { fundSourceType, fundSourceName } = resolveFundingSource(row.paymentMethod)
|
|
|
|
|
const merchant = row.merchant || row.productName || '支付宝账单导入'
|
|
|
|
|
const note = [row.productName, row.note].filter(Boolean).join(' | ')
|
|
|
|
|
const amount =
|
|
|
|
|
direction === 'income' ? rawAmount : direction === 'expense' ? (rawAmount === 0 ? 0 : rawAmount * -1) : rawAmount
|
|
|
|
|
|
|
|
|
|
const payload = {
|
|
|
|
|
merchant,
|
|
|
|
|
category: resolveCategory(row, direction),
|
|
|
|
|
note,
|
|
|
|
|
date: normalizedDate,
|
|
|
|
|
amount,
|
|
|
|
|
syncStatus: 'pending',
|
|
|
|
|
entryType: direction === 'neutral' ? 'transfer' : direction,
|
|
|
|
|
fundSourceType,
|
|
|
|
|
fundSourceName,
|
|
|
|
|
fundTargetType: direction === 'income' ? 'cash' : direction === 'neutral' ? 'stored_value' : 'merchant',
|
|
|
|
|
fundTargetName:
|
|
|
|
|
direction === 'income' ? '现金账户' : direction === 'neutral' ? merchant || '储值账户' : merchant,
|
|
|
|
|
impactExpense: direction === 'expense',
|
|
|
|
|
impactIncome: direction === 'income',
|
|
|
|
|
sourceType: IMPORT_SOURCE_ALIPAY,
|
|
|
|
|
sourceOrderId: row.orderId,
|
|
|
|
|
sourceMerchantOrderId: row.merchantOrderId,
|
|
|
|
|
sourceCounterparty: row.merchant,
|
|
|
|
|
sourceAccount: row.counterpartyAccount,
|
|
|
|
|
sourcePaymentMethod: row.paymentMethod,
|
|
|
|
|
importBatchId,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
payload.sourceRecordHash = buildSourceRecordHash(payload)
|
|
|
|
|
return payload
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const isSameAmount = (left, right) => Math.abs(Number(left) - Number(right)) < 0.005
|
|
|
|
|
|
|
|
|
|
const isMerchantLikelySame = (left, right) => {
|
|
|
|
|
const leftKey = normalizeMerchantKey(left)
|
|
|
|
|
const rightKey = normalizeMerchantKey(right)
|
|
|
|
|
if (!leftKey || !rightKey) return false
|
|
|
|
|
return leftKey === rightKey || leftKey.includes(rightKey) || rightKey.includes(leftKey)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const findExactDuplicate = (transactions, payload) =>
|
|
|
|
|
transactions.find((transaction) => {
|
|
|
|
|
if (payload.sourceOrderId && transaction.sourceType === payload.sourceType) {
|
|
|
|
|
return transaction.sourceOrderId === payload.sourceOrderId
|
|
|
|
|
}
|
|
|
|
|
return payload.sourceRecordHash && transaction.sourceRecordHash === payload.sourceRecordHash
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
const findMergeCandidate = (transactions, payload) => {
|
|
|
|
|
const payloadTime = new Date(payload.date).getTime()
|
|
|
|
|
return transactions.find((transaction) => {
|
|
|
|
|
const diff = Math.abs(new Date(transaction.date).getTime() - payloadTime)
|
|
|
|
|
return (
|
|
|
|
|
diff <= 10 * 60 * 1000 &&
|
|
|
|
|
isSameAmount(transaction.amount, payload.amount) &&
|
|
|
|
|
isMerchantLikelySame(transaction.merchant, payload.merchant)
|
|
|
|
|
)
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const createImportBatch = async ({ source, fileName }) => {
|
|
|
|
|
const db = await getDb()
|
|
|
|
|
const id = uuidv4()
|
|
|
|
|
await db.run(
|
|
|
|
|
`
|
|
|
|
|
INSERT INTO import_batches (id, source, file_name, imported_at, status)
|
|
|
|
|
VALUES (?, ?, ?, ?, ?)
|
|
|
|
|
`,
|
|
|
|
|
[id, source, fileName || null, new Date().toISOString(), 'running'],
|
|
|
|
|
)
|
|
|
|
|
return id
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const recordImportLine = async (batchId, payload) => {
|
|
|
|
|
const db = await getDb()
|
|
|
|
|
await db.run(
|
|
|
|
|
`
|
|
|
|
|
INSERT INTO import_lines (
|
|
|
|
|
id,
|
|
|
|
|
batch_id,
|
|
|
|
|
source_order_id,
|
|
|
|
|
merchant,
|
|
|
|
|
amount,
|
|
|
|
|
occurred_at,
|
|
|
|
|
status,
|
|
|
|
|
transaction_id,
|
|
|
|
|
reason,
|
|
|
|
|
raw_data,
|
|
|
|
|
created_at
|
|
|
|
|
)
|
|
|
|
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
|
|
|
`,
|
|
|
|
|
[
|
|
|
|
|
uuidv4(),
|
|
|
|
|
batchId,
|
|
|
|
|
payload.sourceOrderId || null,
|
|
|
|
|
payload.merchant || null,
|
|
|
|
|
Number(payload.amount || 0),
|
|
|
|
|
payload.date || null,
|
|
|
|
|
payload.status,
|
|
|
|
|
payload.transactionId || null,
|
|
|
|
|
payload.reason || null,
|
|
|
|
|
JSON.stringify(payload.rawData || {}),
|
|
|
|
|
new Date().toISOString(),
|
|
|
|
|
],
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const finalizeImportBatch = async (batchId, summary) => {
|
|
|
|
|
const db = await getDb()
|
|
|
|
|
await db.run(
|
|
|
|
|
`
|
|
|
|
|
UPDATE import_batches
|
|
|
|
|
SET total_count = ?, inserted_count = ?, merged_count = ?, skipped_count = ?, status = ?, summary_json = ?
|
|
|
|
|
WHERE id = ?
|
|
|
|
|
`,
|
|
|
|
|
[
|
|
|
|
|
summary.totalCount,
|
|
|
|
|
summary.insertedCount,
|
|
|
|
|
summary.mergedCount,
|
|
|
|
|
summary.skippedCount,
|
|
|
|
|
'completed',
|
|
|
|
|
JSON.stringify(summary),
|
|
|
|
|
batchId,
|
|
|
|
|
],
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
export const importAlipayStatement = async (file) => {
|
|
|
|
|
const content = await file.text()
|
|
|
|
|
const parsedRows = parseAlipayRows(content)
|
|
|
|
|
const batchId = await createImportBatch({
|
|
|
|
|
source: IMPORT_SOURCE_ALIPAY,
|
|
|
|
|
fileName: file?.name || 'alipay-statement',
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
const summary = {
|
|
|
|
|
batchId,
|
|
|
|
|
source: IMPORT_SOURCE_ALIPAY,
|
|
|
|
|
fileName: file?.name || '',
|
|
|
|
|
totalCount: 0,
|
|
|
|
|
insertedCount: 0,
|
|
|
|
|
mergedCount: 0,
|
|
|
|
|
skippedCount: 0,
|
|
|
|
|
skippedReasons: [],
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const existingTransactions = await fetchTransactions()
|
|
|
|
|
const inBatchKeys = new Set()
|
|
|
|
|
|
|
|
|
|
for (const row of parsedRows) {
|
|
|
|
|
summary.totalCount += 1
|
|
|
|
|
|
|
|
|
|
if (row.status && row.status !== IMPORT_SUCCESS_STATUS) {
|
|
|
|
|
summary.skippedCount += 1
|
|
|
|
|
summary.skippedReasons.push(`${row.occurredAt} ${row.merchant || row.productName}:状态不是交易成功`)
|
|
|
|
|
await recordImportLine(batchId, {
|
|
|
|
|
sourceOrderId: row.orderId,
|
|
|
|
|
merchant: row.merchant || row.productName,
|
|
|
|
|
amount: normalizeAmount(row.amount),
|
|
|
|
|
date: normalizeDate(row.occurredAt),
|
|
|
|
|
status: 'skipped',
|
|
|
|
|
reason: '交易状态不是交易成功',
|
|
|
|
|
rawData: row,
|
|
|
|
|
})
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const payload = normalizeImportedRow(row, batchId)
|
|
|
|
|
if (!payload) {
|
|
|
|
|
summary.skippedCount += 1
|
|
|
|
|
summary.skippedReasons.push(`${row.occurredAt}:无法解析交易时间`)
|
|
|
|
|
await recordImportLine(batchId, {
|
|
|
|
|
sourceOrderId: row.orderId,
|
|
|
|
|
merchant: row.merchant || row.productName,
|
|
|
|
|
amount: normalizeAmount(row.amount),
|
|
|
|
|
date: '',
|
|
|
|
|
status: 'skipped',
|
|
|
|
|
reason: '无法解析交易时间',
|
|
|
|
|
rawData: row,
|
|
|
|
|
})
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const inBatchKey = payload.sourceOrderId || payload.sourceRecordHash
|
|
|
|
|
if (inBatchKey && inBatchKeys.has(inBatchKey)) {
|
|
|
|
|
summary.skippedCount += 1
|
|
|
|
|
summary.skippedReasons.push(`${payload.merchant}:导入文件内重复记录已跳过`)
|
|
|
|
|
await recordImportLine(batchId, {
|
|
|
|
|
...payload,
|
|
|
|
|
status: 'skipped',
|
|
|
|
|
reason: '导入文件内重复记录',
|
|
|
|
|
rawData: row,
|
|
|
|
|
})
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
if (inBatchKey) {
|
|
|
|
|
inBatchKeys.add(inBatchKey)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const exactDuplicate = findExactDuplicate(existingTransactions, payload)
|
|
|
|
|
if (exactDuplicate) {
|
|
|
|
|
summary.skippedCount += 1
|
|
|
|
|
await recordImportLine(batchId, {
|
|
|
|
|
...payload,
|
|
|
|
|
transactionId: exactDuplicate.id,
|
|
|
|
|
status: 'skipped',
|
|
|
|
|
reason: '外部订单号或记录指纹已存在',
|
|
|
|
|
rawData: row,
|
|
|
|
|
})
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const mergeCandidate = findMergeCandidate(existingTransactions, payload)
|
|
|
|
|
if (mergeCandidate) {
|
|
|
|
|
const merged = await mergeImportedTransaction(mergeCandidate.id, payload)
|
|
|
|
|
if (merged) {
|
|
|
|
|
summary.mergedCount += 1
|
|
|
|
|
const index = existingTransactions.findIndex((item) => item.id === merged.id)
|
|
|
|
|
if (index >= 0) existingTransactions[index] = merged
|
|
|
|
|
await recordImportLine(batchId, {
|
|
|
|
|
...payload,
|
|
|
|
|
transactionId: merged.id,
|
|
|
|
|
status: 'merged',
|
|
|
|
|
reason: '与现有记录合并',
|
|
|
|
|
rawData: row,
|
|
|
|
|
})
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const inserted = await insertTransaction(payload)
|
|
|
|
|
existingTransactions.unshift(inserted)
|
|
|
|
|
summary.insertedCount += 1
|
|
|
|
|
await recordImportLine(batchId, {
|
|
|
|
|
...payload,
|
|
|
|
|
transactionId: inserted.id,
|
|
|
|
|
status: 'inserted',
|
|
|
|
|
reason: '新记录已导入',
|
|
|
|
|
rawData: row,
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
await finalizeImportBatch(batchId, summary)
|
|
|
|
|
await saveDbToStore()
|
|
|
|
|
return summary
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
export const exportTransactionsAsCsv = async () => {
|
|
|
|
|
const transactions = await fetchTransactions()
|
|
|
|
|
const rows = [
|
|
|
|
|
[
|
|
|
|
|
'交易时间',
|
|
|
|
|
'金额',
|
|
|
|
|
'方向',
|
|
|
|
|
'商户',
|
|
|
|
|
'分类',
|
|
|
|
|
'备注',
|
|
|
|
|
'记录类型',
|
|
|
|
|
'资金来源',
|
|
|
|
|
'资金去向',
|
|
|
|
|
'AI 状态',
|
|
|
|
|
'来源',
|
|
|
|
|
'外部订单号',
|
|
|
|
|
],
|
|
|
|
|
...transactions.map((transaction) => [
|
|
|
|
|
transaction.date,
|
|
|
|
|
transaction.amount,
|
|
|
|
|
transaction.amount > 0 ? '收入' : transaction.amount < 0 ? '支出' : '零额',
|
|
|
|
|
transaction.merchant,
|
|
|
|
|
transaction.category,
|
|
|
|
|
transaction.note,
|
|
|
|
|
transaction.entryType,
|
|
|
|
|
transaction.fundSourceName || transaction.fundSourceType,
|
|
|
|
|
transaction.fundTargetName || transaction.fundTargetType,
|
|
|
|
|
transaction.aiStatus,
|
|
|
|
|
transaction.sourceType,
|
|
|
|
|
transaction.sourceOrderId,
|
|
|
|
|
]),
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
const content = `\uFEFF${rows.map((row) => row.map(escapeCsvCell).join(',')).join('\n')}`
|
|
|
|
|
downloadText(`echo-ledger-${new Date().toISOString().slice(0, 10)}.csv`, content, 'text/csv;charset=utf-8;')
|
|
|
|
|
return transactions.length
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
export const exportTransactionsAsJson = async () => {
|
|
|
|
|
const transactions = await fetchTransactions()
|
|
|
|
|
const content = JSON.stringify(
|
|
|
|
|
{
|
|
|
|
|
exportedAt: new Date().toISOString(),
|
|
|
|
|
count: transactions.length,
|
|
|
|
|
transactions,
|
|
|
|
|
},
|
|
|
|
|
null,
|
|
|
|
|
2,
|
|
|
|
|
)
|
|
|
|
|
downloadText(
|
|
|
|
|
`echo-ledger-${new Date().toISOString().slice(0, 10)}.json`,
|
|
|
|
|
content,
|
|
|
|
|
'application/json;charset=utf-8;',
|
|
|
|
|
)
|
|
|
|
|
return transactions.length
|
|
|
|
|
}
|
2026-03-13 10:00:21 +08:00
|
|
|
|
|
|
|
|
const parseSummaryJson = (value) => {
|
|
|
|
|
if (!value) return null
|
|
|
|
|
try {
|
|
|
|
|
return JSON.parse(value)
|
|
|
|
|
} catch {
|
|
|
|
|
return null
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
export const fetchImportBatches = async (limit = 20) => {
|
|
|
|
|
const db = await getDb()
|
|
|
|
|
const safeLimit = Math.max(1, Math.min(Number(limit) || 20, 100))
|
|
|
|
|
const result = await db.query(
|
|
|
|
|
`
|
|
|
|
|
SELECT
|
|
|
|
|
id,
|
|
|
|
|
source,
|
|
|
|
|
file_name,
|
|
|
|
|
imported_at,
|
|
|
|
|
total_count,
|
|
|
|
|
inserted_count,
|
|
|
|
|
merged_count,
|
|
|
|
|
skipped_count,
|
|
|
|
|
status,
|
|
|
|
|
summary_json
|
|
|
|
|
FROM import_batches
|
|
|
|
|
ORDER BY imported_at DESC
|
|
|
|
|
LIMIT ?
|
|
|
|
|
`,
|
|
|
|
|
[safeLimit],
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
return (result?.values || []).map((row) => ({
|
|
|
|
|
id: row.id,
|
|
|
|
|
source: row.source || '',
|
|
|
|
|
fileName: row.file_name || '',
|
|
|
|
|
importedAt: row.imported_at || '',
|
|
|
|
|
totalCount: Number(row.total_count || 0),
|
|
|
|
|
insertedCount: Number(row.inserted_count || 0),
|
|
|
|
|
mergedCount: Number(row.merged_count || 0),
|
|
|
|
|
skippedCount: Number(row.skipped_count || 0),
|
|
|
|
|
status: row.status || 'completed',
|
|
|
|
|
summary: parseSummaryJson(row.summary_json),
|
|
|
|
|
}))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
export const fetchImportBatchLines = async (batchId, status = 'all') => {
|
|
|
|
|
if (!batchId) return []
|
|
|
|
|
|
|
|
|
|
const db = await getDb()
|
|
|
|
|
const params = [batchId]
|
|
|
|
|
const statusClause = status !== 'all' ? 'AND status = ?' : ''
|
|
|
|
|
if (status !== 'all') {
|
|
|
|
|
params.push(status)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const result = await db.query(
|
|
|
|
|
`
|
|
|
|
|
SELECT
|
|
|
|
|
id,
|
|
|
|
|
batch_id,
|
|
|
|
|
source_order_id,
|
|
|
|
|
merchant,
|
|
|
|
|
amount,
|
|
|
|
|
occurred_at,
|
|
|
|
|
status,
|
|
|
|
|
transaction_id,
|
|
|
|
|
reason,
|
|
|
|
|
raw_data,
|
|
|
|
|
created_at
|
|
|
|
|
FROM import_lines
|
|
|
|
|
WHERE batch_id = ?
|
|
|
|
|
${statusClause}
|
|
|
|
|
ORDER BY occurred_at DESC, created_at DESC
|
|
|
|
|
`,
|
|
|
|
|
params,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
return (result?.values || []).map((row) => ({
|
|
|
|
|
id: row.id,
|
|
|
|
|
batchId: row.batch_id,
|
|
|
|
|
sourceOrderId: row.source_order_id || '',
|
|
|
|
|
merchant: row.merchant || '',
|
|
|
|
|
amount: Number(row.amount || 0),
|
|
|
|
|
occurredAt: row.occurred_at || '',
|
|
|
|
|
status: row.status || 'skipped',
|
|
|
|
|
transactionId: row.transaction_id || '',
|
|
|
|
|
reason: row.reason || '',
|
|
|
|
|
rawData: parseSummaryJson(row.raw_data) || {},
|
|
|
|
|
createdAt: row.created_at || '',
|
|
|
|
|
}))
|
|
|
|
|
}
|