remove challenge's content

This commit is contained in:
2026-03-26 22:33:23 -06:00
parent cbdd1265bf
commit 504c2bb70b
2 changed files with 5 additions and 13 deletions

View File

@@ -53,12 +53,11 @@ class PromptConsumer(AsyncWebsocketConsumer):
try: try:
# Build history for LLM # Build history for LLM
history = await self.get_history_for_llm() history = await self.get_history_for_llm()
task_content = await self.get_task_content()
# Stream AI response # Stream AI response
full_response = "" full_response = ""
try: try:
async for chunk in stream_chat(task_content, history): async for chunk in stream_chat(history):
full_response += chunk full_response += chunk
await self.send(text_data=json.dumps({ await self.send(text_data=json.dumps({
"type": "stream", "type": "stream",
@@ -139,9 +138,3 @@ class PromptConsumer(AsyncWebsocketConsumer):
def get_history_for_llm(self): def get_history_for_llm(self):
messages = self.conversation.messages.all() messages = self.conversation.messages.all()
return [{"role": m.role, "content": m.content} for m in messages] return [{"role": m.role, "content": m.content} for m in messages]
@database_sync_to_async
def get_task_content(self):
from task.models import Task
task = Task.objects.get(id=self.task_id)
return task.content

View File

@@ -14,17 +14,16 @@ SYSTEM_PROMPT = """你是一个网页生成助手。根据用户的需求描述
6. 由于任何外部链接都被屏蔽,使用纯 HTML、CSS 和 JS 实现功能,不要依赖外部库""" 6. 由于任何外部链接都被屏蔽,使用纯 HTML、CSS 和 JS 实现功能,不要依赖外部库"""
def build_messages(task_content: str, history: list[dict]) -> list[dict]: def build_messages(history: list[dict]) -> list[dict]:
"""Build the message list for the LLM API call.""" """Build the message list for the LLM API call."""
system = SYSTEM_PROMPT + f"\n\n当前挑战任务要求:\n{task_content}" messages = [{"role": "system", "content": SYSTEM_PROMPT}]
messages = [{"role": "system", "content": system}]
messages.extend(history) messages.extend(history)
return messages return messages
async def stream_chat(task_content: str, history: list[dict]): async def stream_chat(history: list[dict]):
"""Stream chat completion from the LLM. Yields content chunks.""" """Stream chat completion from the LLM. Yields content chunks."""
messages = build_messages(task_content, history) messages = build_messages(history)
async with AsyncOpenAI( async with AsyncOpenAI(
api_key=settings.LLM_API_KEY, api_key=settings.LLM_API_KEY,
base_url=settings.LLM_BASE_URL, base_url=settings.LLM_BASE_URL,