Commit 227fae2e authored by xianyang's avatar xianyang

Merge branch 'development'

# Conflicts:
#	core/config/env.py
parents ce181c18 950a59a1
This diff is collapsed.
......@@ -7,6 +7,8 @@ from app.api.account import schemas, crud
from app.api.account.crud import AccountStatistics, SpecificAccountQuery, HomePageDisplay
from app.api.statement import crud as statement_crud
from libs import functions
from libs.business import GUILD_NAME
from libs.export import Export, TableToFile
from libs.functions import get_date_list
from libs.log_utils import Logger
from libs.result_format import HttpResultResponse, HttpMessage
......@@ -71,7 +73,7 @@ def finance_info_excel(data: schemas.FinanceInfo, request: Request,
token=Depends(login_required), db: Session = Depends(get_db)):
"""账户财务详情导出"""
headers = request.get("headers")
statement_list = crud.get_finance_info(data.unique_tag, data.id, data.page, 99999, data.start_time, data.end_time)
statement_list = crud.get_finance_info(data.unique_tag, data.id, data.page, 99999999, data.start_time, data.end_time)
df = ['账户余额', '入账', '出账', '时间']
return statement_crud.data_to_file(db, statement_list, "财务信息", headers, df)
......@@ -100,15 +102,20 @@ def finance_info_excel(data: schemas.FinanceDetails, request: Request,
token=Depends(login_required), db: Session = Depends(get_db)):
"""账户财务明细导出"""
headers = request.get("headers")
statement_list = AccountStatistics(data.page, 99999, data.uuid, data.user_id, data.start_time, data.end_time, data.type,
statement_list = AccountStatistics(data.page, 99999999, data.uuid, data.user_id, data.start_time, data.end_time, data.type,
data.gift_type, data.unique_tag).get_finance_details()
if data.unique_tag in ["knapsack_account", "user_account", "guild_account", "pledgeDeduction"]:
field_head = ['uuid', '入账', '出账', '时间']
statement_list = statement_list[0]
if data.unique_tag == 'guild_account':
field_head = ['公会名', '公会id', '入账', '出账', '余额', '时间']
data = crud.delete_guild_specify_field(statement_list)
return TableToFile(db, data, "财务明细", headers, field_head).main_method()
else:
field_head = ['订单号', '出入账方式', '礼物类型', '金额', '时间']
data = crud.delete_specify_field(statement_list, data.unique_tag)
return statement_crud.data_to_file(db, data, "财务明细", headers, field_head)
# return statement_crud.data_to_file(db, data, "财务明细", headers, field_head)
return TableToFile(db, data, "财务明细", headers, field_head).main_method()
@router.get("/finance/fourth/details")
......@@ -226,3 +233,10 @@ def outon_account(token=Depends(login_required)):
"""系统账户列表"""
account_list = crud.query_account_data()
return HttpResultResponse(data=account_list)
@router.get("/anchor")
def anchor_account(anchor_id: Optional[int] = None, ):
"""主播账户余额"""
anchor_money = crud.AccountAnchor(anchor_id).anchor_balance()
return HttpResultResponse(data=anchor_money)
......@@ -127,8 +127,7 @@ class CalculationMonthlyBill(object):
def kv_search(self):
"""查询筛选的key, value"""
k_list = []
type_name = query_fi_account_type()
for k, v in type_name.items():
for k, v in TYPE_NAME.items():
if v == self.name or self.name in v:
k_list.append(k)
return k_list
......@@ -186,7 +185,7 @@ class CalculationMonthlyBill(object):
v['error_money'] = float('%.3f' % (v['expenditure'] - v['income']))
res_all_data.append(v)
# 存入redis
red.set('business_type_sum-' + str(self.date), str(res_all_data), 3600)
red.set('business_type_sum-' + str(self.date), str(res_all_data), 1800)
else:
if assert_list:
res_all_data = self.search_red_data(business_type_sum_data)
......@@ -225,6 +224,14 @@ class CalculationMonthlyDetails(object):
num = future2.result()
total = future3.result()
if data:
for i in data:
if i['amount_type'] == 'consumable':
i['amount_type'] = '钻石'
if i['amount_type'] == 'withdrawable':
i['amount_type'] = '珍珠'
if i['amount_type'] == 'backpack':
i['amount_type'] = '背包'
i['reference_type'] = TYPE_NAME.get(param.get('key'), param.get('key'))
return data, num, float(total[0]['amount'])
return [], 0, 0
......@@ -264,8 +271,7 @@ class MonthDataDerive(object):
assert_list = []
if name:
k_list = []
type_name = query_fi_account_type()
for k, v in type_name.items():
for k, v in TYPE_NAME.items():
if v == name or name in v:
k_list.append(k)
if len(k_list) > 1:
......@@ -285,9 +291,8 @@ class MonthDataDerive(object):
for res in res_data:
if res["reference_type"] in self.derive_key:
continue
type_name = query_fi_account_type()
if res["reference_type"] in type_name:
name = type_name[res["reference_type"]]
if res["reference_type"] in TYPE_NAME:
name = TYPE_NAME[res["reference_type"]]
else:
name = res["reference_type"]
out = [i['money'] for i in res_data if i['reference_type'] == res["reference_type"] and i['type'] == 0]
......@@ -318,7 +323,7 @@ class ReferenceTypeClassification():
def classification_summary(self):
data_sql = f"select uuid,type,sum(amount) as amount,reference_type from {self.date} where reference_type='{self.reference_type}' GROUP BY uuid,type"
data_sql = f"select uuid,type,sum(amount) as amount,reference_type,amount_type from {self.date} where reference_type='{self.reference_type}' GROUP BY uuid,type,amount_type"
guild_sql = f"select uuid from guild"
account_sql = f"select uuid,name from fi_account"
anchor_sql = f"select uuid from v2_user where is_achor in(1,2)"
......@@ -381,6 +386,7 @@ class AbnormalDataDetails(object):
self.size = size
def abnormal_task(self):
Logger().logger.info('开始查询异常数据')
out_sql = f"select order_number from {self.date} where reference_type='{self.reference_type}' and type=0"
income_sql = f"select order_number from {self.date} where reference_type='{self.reference_type}' and type=1"
with ThreadPoolExecutor(max_workers=2) as pool:
......
......@@ -12,6 +12,7 @@ from core.config.env import env, COS_PATH, COS_RERURN_PATH
from libs.db_link import LinkMysql
from libs.functions import get_now_timestamp, get_now_datetime, get_order, get_ip, time_str_to_timestamp, \
time_int_timestamp, send_json_rpc_request, AES_Decrypt, AES_Encrypt
from libs.log_utils import Logger
from libs.orm import QueryAllData
from libs.token_verify import get_current_user
from models.recharge import Settlement, Fitransferlog, FinanceFixLog, Account_log, Paymentlog
......@@ -270,6 +271,7 @@ def query_token(db, h_list):
def transfer_trigger_task(uuid, user_id, balance, type, amount_type, remark='用户转账', money_data=[],
isUser=1, dst_uuid="", reference_number=''):
"""转账验证"""
Logger(20).logger.info("转账验证")
request_data = {
"ip": get_ip(),
"uuid": uuid,
......@@ -281,13 +283,17 @@ def transfer_trigger_task(uuid, user_id, balance, type, amount_type, remark='用
"amount_type": amount_type if amount_type else 'consumable',
"notify_url": ""
}
res = send_json_rpc_request(request_data, 'Server.UserExecute.Transfer')
try:
if not res['data']['result']['status']:
res = send_json_rpc_request(request_data, 'Server.UserExecute.Transfer')
if not res:
return '清算系统调用失败'
Logger(20).logger.info("数据验证,是否成功")
if not res['status']:
if 'Insufficient assets' in res['data']['result']['msg']:
return 200,"资产不足,无法转账"
return res['data']['result']['msg']
except Exception as e:
Logger(40).logger.error(f"错误数据格式:{res}")
return f"清算系统异常:{str(e)}"
status = 2
if res['status'] == 0:
......
......@@ -20,7 +20,7 @@ class StatementCreate(StatementBase):
class StatementList(BaseModel):
page: int = 1
size: int = 9999999
size: int = 99999999
start_time: Optional[str] = ""
end_time: Optional[str] = ""
order_number: Optional[str] = ""
......
......@@ -57,7 +57,7 @@ class TestingEnv(Env):
NACOS_URL = YAML_DATA.get('config_url')
NACOSCONFIG = "show=all&dataId=fj-finance-test&group=DEFAULT_GROUP&tenant=cw-test&namespaceId=cw-test"
NACOS_NAME = YAML_DATA.get('name')
NACOS_PWD = YAML_DATA.get('pwd')
NACOS_PWD = YAML_DATA.get('password')
DB_HISTORY = apo.get('history')
DB_3YV2 = apo.get('business')
Redis = apo.get('redis')
......@@ -90,11 +90,11 @@ class ProdEnv(Env):
SECRET_KEY: str = "09d25e094faa6ca2556c818166b7a9563b93f7099f6f0f4caa6cf63b88e8d3e7"
ALGORITHM: str = "HS256"
PASSWORD: str = "fj123456"
CLEARING_CENTER_URL: str = 'http://47.103.144.36:5454/'
CLEARING_CENTER_HOST: str = '47.103.144.36'
CLEARING_CENTER_URL: str = 'http://219.152.95.226:5454/'
CLEARING_CENTER_HOST: str = '219.152.95.226'
CLEARING_CENTER_PORT: int = 5454
KEY = "dK8tZ1jM0wA6oE3j"
PHP_URL = "http://47.103.97.109:6750"
PHP_URL = "http://219.152.95.226:6750"
# env = TestingEnv() # 开发环境
......
......@@ -9,11 +9,24 @@ def query_fi_account_type():
res_data = LinkMysql(env.DB_3YV2).query_mysql(sql)
for i in res_data:
if not fi_type.get(i['keyValue']):
fi_type[i['keyValue']] = i['keyName']
try:
name, type = i['keyName'].split('-')
except Exception as e:
name = i['keyName']
fi_type[i['keyValue']] = name
return fi_type
def query_fi_guild_name():
guild_name = {}
sql = f"SELECT id,guild_name,uuid FROM guild"
res_data = LinkMysql(env.DB_3YV2).query_mysql(sql)
for i in res_data:
guild_name[i['id']] = i['guild_name']
return guild_name
TYPE_NAME = query_fi_account_type()
GUILD_NAME = query_fi_guild_name()
# TYPE_NAME_T = {
......
import math
import time
import openpyxl
import threading
import pandas as pd
from app.api.statement.guild import query_token
from starlette.responses import StreamingResponse
from app.api.export import crud
from libs.log_utils import Logger
class Export(object):
def __init__(self, db, data, name, header, field_list):
self.db = db
self.data = data
self.name = name
self.header = header
self.field_list = field_list
self.lock = threading.Lock()
self.wb = openpyxl.Workbook() # 创建一个新的 Excel 文件
self.sheet = self.wb.active
def write_data(self, sheet, row, col, data):
sheet.cell(row=row, column=col, value=data)
# 定义写入任务
def write_task(self, start_row, end_row, data):
for row in range(start_row, end_row):
with self.lock:
index = 0
col = 1
if index < len(data):
for k, v in data[index].items():
self.write_data(self.sheet, row, col, v)
col += 1
index += 1
def data_to_file(self):
# 获取操作人
user = query_token(self.db, self.header)
params = {"source": self.name, "method": "data_to_file", "status": 1}
if len(self.data) == 0:
params["status"] = 3
try:
bk = pd.DataFrame(self.data)
if self.data[0].get('create_time'):
if isinstance(self.data[0]['create_time'], int):
bk['create_time'] = bk['create_time'].apply(
lambda x: time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(x)))
bk.columns = self.field_list # 修改pandas头
write_data = bk.to_dict(orient='records')
# 创建线程列表
threads = []
rows_per_thread = math.ceil(len(write_data) / 10)
# 写入头部
# self.wb
for i in self.field_list:
self.write_data(self.sheet, 1, self.field_list.index(i)+1, i)
# 启动线程
for i in range(10):
start_row = i*rows_per_thread + 2 + i
end_row = (i+1)*rows_per_thread + 2 + i
thread = threading.Thread(target=self.write_task, args=(start_row, end_row, write_data[i*rows_per_thread:(i+1)*rows_per_thread]))
thread.start()
threads.append(thread)
# 等待所有线程完成
for thread in threads:
thread.join()
crud.create_export_data(self.db, params, user)
# 保存 Excel 文件
self.wb.save(f'static/{self.name}.xlsx')
self.wb.close()
file = open(f'static/{self.name}.xlsx', 'rb')
return StreamingResponse(file,
media_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')
except Exception as e:
Logger(40).logger.error("导出失败:%s" % str(e))
params["status"] = 2
crud.create_export_data(self.db, params, user)
class TableToFile(object):
def __init__(self, db, data, name, header, field_list):
self.db = db
self.data = data
self.name = name
self.header = header
self.field_list = field_list
self.lock = threading.Lock()
self.wb = openpyxl.Workbook() # 创建一个新的 Excel 文件
self.lock = threading.Lock()
def thread_task(self,bk, writer, sheet_name):
"""线程执行方法"""
self.lock.acquire()
bk.to_excel(writer, sheet_name=sheet_name, index=False)
self.lock.release()
def main_method(self):
"""主函数"""
Logger().logger.info('开始导出')
user = query_token(self.db, self.header)
params = {"source": self.name, "method": "data_to_file", "status": 1}
if len(self.data) == 0:
params["status"] = 3
crud.create_export_data(self.db, params, user)
Logger().logger.info(f'导出没有数据')
return None
try:
bk = pd.DataFrame(self.data)
if self.data[0].get('create_time'):
if isinstance(self.data[0]['create_time'], int):
bk['create_time'] = bk['create_time'].apply(
lambda x: time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(x)))
bk.columns = self.field_list # 修改pandas头
write_data = bk.to_dict(orient='records')
with pd.ExcelWriter(f'static/{self.name}.xlsx') as writer:
# bk.to_excel(writer, sheet_name='Sheet1', index=False)
threads = []
rows_per_thread = math.ceil(len(write_data) / 5)
for i in range(5):
sheet_name = 'sheet' + str(i + 1)
threads.append(threading.Thread(target=self.thread_task,
args=[bk.iloc[i * rows_per_thread: rows_per_thread * (i+1)], writer, sheet_name]))
# 启动线程
for y in threads:
y.start()
# 等待所有线程完成
for z in threads:
z.join()
file = open(writer, 'rb')
# 记录导出
crud.create_export_data(self.db, params, user)
return StreamingResponse(file,
media_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')
except Exception as e:
Logger().logger.info(f'导出异常:{str(e)}')
params["status"] = 2
crud.create_export_data(self.db, params, user)
......@@ -79,6 +79,13 @@ def get_last_month():
return datetime.now().strftime("%Y%m"), last_month.strftime("%Y%m"), before_last_month.strftime("%Y%m")
def judge_time_period(start_day):
"""判断时间段有多少天"""
last_month = datetime.strptime(start_day, '%Y-%m-%d') + relativedelta(months=1)
before_last_month = datetime.strptime(start_day, '%Y-%m-%d') + relativedelta(months=2)
return datetime.strptime(start_day, '%Y-%m-%d').strftime("%Y%m"), last_month.strftime("%Y%m"), before_last_month.strftime("%Y%m")
def md5(s):
"""md5加密"""
sign_str = hashlib.md5()
......@@ -153,11 +160,10 @@ def send_json_rpc_request(params, method):
s.sendall(request_str.encode())
# 接收服务器响应
response_str = s.recv(1024).decode()
response_str = s.recv(10240).decode()
# 将响应字符串解码为 JSON 对象
response = json.loads(response_str)
if "error" in response:
return {}
return response["result"]
......@@ -259,7 +265,9 @@ def AES_Decrypt(data):
except Exception as e:
Logger(40).logger.error(f"php数据解密异常:{str(e)},数据:{plaintext}")
coding_data = str(plaintext, encoding="utf-8")
return list(eval(coding_data))
num = coding_data.index(']')
return list(eval(coding_data[:num + 1]))
return res_data
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment