更新洗稿逻辑,以及取消待播状态

main
fanpt 4 days ago
parent d798fbbcc7
commit 3606d6da1b

@ -6,7 +6,8 @@ from enum import Enum
from multiprocessing import Queue
from settings import sqlite_file
from queue import Empty
import random
import json
class MessageType(Enum):
ENTER_LIVE_ROOM = 1
@ -215,17 +216,72 @@ class LiveChatConfig:
self.conn.commit()
cursor.close()
def flush_message(self):
cursor = self.conn.cursor()
# 1.删除所有batch_number=0且status=1的数据
cursor.execute('delete from message where batch_number = 0 and status = 1')
# 2.将batch_number=1的数据的更新为0
cursor.execute('update message set batch_number = 0 where batch_number = 1')
# 3.生成新的备用系统文案batch_number=1
self.conn.commit()
cursor.close()
def system_messages_random_mix_dict(self, product_id=None, ensure_mixed=True):
"""
随机跨组拼接返回 {type: message} 的字典 JSON
"""
cursor = self.conn.cursor()
if product_id is None:
cursor.execute(
"""
select script_type_order_num, script_group_id, type, message
from system_message
order by script_type_order_num asc, script_group_id asc, id asc
"""
)
rows = cursor.fetchall()
else:
cursor.execute(
"""
select script_type_order_num, script_group_id, type, message
from system_message
where product_id = ?
order by script_type_order_num asc, script_group_id asc, id asc
""",
(product_id,)
)
rows = cursor.fetchall()
cursor.close()
# 分层整理
by_order = {}
for order_num, group_id, _type, msg in rows:
by_order.setdefault(order_num, [])
by_order[order_num].append({
"order": int(order_num),
"group": int(group_id),
"type": _type,
"message": msg
})
# 逐层随机挑选
result_dict = {}
used_groups = set()
orders = sorted(by_order.keys())
for order_num in orders:
candidates = by_order[order_num]
if not candidates:
continue
if ensure_mixed:
prefer = [c for c in candidates if c["group"] not in used_groups]
pick_source = prefer if prefer else candidates
else:
pick_source = candidates
choice = random.choice(pick_source)
result_dict[choice["type"]] = choice["message"]
used_groups.add(choice["group"])
return json.dumps(result_dict, ensure_ascii=False, indent=4)
class PromptQueue:
def __init__(self, maxsize=0):
self.queue = Queue(maxsize)

@ -401,19 +401,21 @@ class DouyinLiveWebReply:
logger.error(f'Failed to post to human: {response.text}')
def generate_messages(self, batch_number=0):
message_count = self.live_chat_config.messages(batch_number)
if message_count == 0:
logger.info(f'生成系统文案batch_number: {batch_number}')
system_messages = self.live_chat_config.system_messages
llm_prompt = self.live_chat_config.refine_system_message_prompt.format(
content=system_messages)
logger.info(f'llm_prompt: {llm_prompt}')
reply_messages = self._llm(llm_prompt, False)
reply_messages = re.findall(r'\{.*?\}', reply_messages, re.DOTALL)[0]
reply_messages = json.loads(reply_messages)
for _type, message in reply_messages.items():
self.live_chat_config.insert_message(message, _type, batch_number)
logger.info(f'入库文案:{_type} | {message}')
# message_count = self.live_chat_config.messages(batch_number)
# if message_count == 0:
logger.info(f'生成系统文案batch_number: {batch_number}')
system_messages = self.live_chat_config.system_messages_random_mix_dict(product_id=1, ensure_mixed=True)
logger.info(f'生成系统文案system_messages: {system_messages}')
llm_prompt = self.live_chat_config.refine_system_message_prompt.format(content=system_messages)
logger.info(f'llm_prompt: {llm_prompt}')
reply_messages = self._llm(llm_prompt, False)
reply_messages = re.findall(r'\{.*?\}', reply_messages, re.DOTALL)[0]
reply_messages = json.loads(reply_messages)
for _type, message in reply_messages.items():
self.live_chat_config.insert_message(message, _type, batch_number)
logger.info(f'入库文案:{_type} | {message}')
def text_to_adio_cost_predict(self, text):
@ -502,8 +504,8 @@ class DouyinLiveWebReply:
else:
if not self.live_chat_config.next_reply_message:
logger.info('文案已用完,重新生成文案')
self.generate_messages(0)
self.live_chat_config.flush_message()
self.generate_messages(1)
continue
else:
# 不在说话 -> 从队列/库里取一句话播报
@ -522,7 +524,7 @@ class DouyinLiveWebReply:
if not message:
logger.info('文案已用完,重新生成文案')
self.generate_messages(0)
self.generate_messages(1)
# self.generate_messages(1)
continue
reply_message, _id = message
logger.info(f'更新文案id:{_id}状态为: 1')

Loading…
Cancel
Save