优化
This commit is contained in:
122
ai/views/oj.py
122
ai/views/oj.py
@@ -18,7 +18,6 @@ from problem.models import Problem
|
|||||||
from submission.models import Submission, JudgeStatus
|
from submission.models import Submission, JudgeStatus
|
||||||
from account.decorators import login_required
|
from account.decorators import login_required
|
||||||
from ai.models import AIAnalysis
|
from ai.models import AIAnalysis
|
||||||
from textwrap import dedent
|
|
||||||
|
|
||||||
|
|
||||||
# 常量定义
|
# 常量定义
|
||||||
@@ -429,63 +428,42 @@ class AIAnalysisAPI(APIView):
|
|||||||
|
|
||||||
client = OpenAI(api_key=api_key, base_url="https://api.deepseek.com")
|
client = OpenAI(api_key=api_key, base_url="https://api.deepseek.com")
|
||||||
|
|
||||||
system_prompt ="""
|
system_prompt = "你是一个风趣的编程老师,学生使用判题狗平台进行编程练习。请根据学生提供的详细数据和每周数据,给出用户的学习建议。请使用 markdown 格式输出,不要在代码块中输出。最后不要忘记写一句祝福语。"
|
||||||
你是一个风趣的编程老师,学生使用判题狗平台进行编程练习。
|
user_prompt = f"这段时间内的详细数据: {details} \n每周或每月的数据: {weekly}"
|
||||||
请根据学生提供的详细数据和每周数据,给出用户的学习建议。
|
|
||||||
请使用 markdown 格式输出,不要在代码块中输出。
|
|
||||||
最后不要忘记写一句祝福语。
|
|
||||||
"""
|
|
||||||
|
|
||||||
user_prompt = f"""
|
|
||||||
这段时间内的详细数据: {details}
|
|
||||||
每周或每月的数据: {weekly}
|
|
||||||
"""
|
|
||||||
|
|
||||||
analysis_chunks = []
|
analysis_chunks = []
|
||||||
saved = False
|
saved_instance = None
|
||||||
save_error = None
|
completed = False
|
||||||
store_error_sent = False
|
|
||||||
|
|
||||||
def try_save():
|
def save_analysis():
|
||||||
nonlocal saved, save_error
|
nonlocal saved_instance
|
||||||
if saved:
|
if analysis_chunks and not saved_instance:
|
||||||
return
|
|
||||||
if not analysis_chunks:
|
|
||||||
saved = True
|
|
||||||
return
|
|
||||||
try:
|
try:
|
||||||
AIAnalysis.objects.create(
|
saved_instance = AIAnalysis.objects.create(
|
||||||
user=user,
|
user=user,
|
||||||
|
provider="deepseek",
|
||||||
|
model="deepseek-chat",
|
||||||
data={"details": details, "weekly": weekly},
|
data={"details": details, "weekly": weekly},
|
||||||
system_prompt=dedent(system_prompt).strip(),
|
system_prompt=system_prompt,
|
||||||
user_prompt="这段时间内的详细数据, 每周或每月的数据",
|
user_prompt=user_prompt,
|
||||||
analysis="".join(analysis_chunks).strip(),
|
analysis="".join(analysis_chunks).strip(),
|
||||||
)
|
)
|
||||||
except Exception as exc:
|
except Exception:
|
||||||
save_error = str(exc)
|
pass
|
||||||
finally:
|
|
||||||
saved = True
|
|
||||||
|
|
||||||
def stream_generator():
|
def stream_generator():
|
||||||
nonlocal store_error_sent
|
nonlocal completed
|
||||||
try:
|
try:
|
||||||
stream = client.chat.completions.create(
|
stream = client.chat.completions.create(
|
||||||
model="deepseek-chat",
|
model="deepseek-chat",
|
||||||
messages=[
|
messages=[
|
||||||
{
|
{"role": "system", "content": system_prompt},
|
||||||
"role": "system",
|
{"role": "user", "content": user_prompt},
|
||||||
"content": system_prompt,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"role": "user",
|
|
||||||
"content": user_prompt,
|
|
||||||
},
|
|
||||||
],
|
],
|
||||||
stream=True,
|
stream=True,
|
||||||
)
|
)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
payload = json.dumps({"type": "error", "message": str(exc)})
|
yield f"data: {json.dumps({'type': 'error', 'message': str(exc)})}\n\n"
|
||||||
yield f"data: {payload}\n\n"
|
|
||||||
yield "event: end\n\n"
|
yield "event: end\n\n"
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -497,54 +475,26 @@ class AIAnalysisAPI(APIView):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
choice = chunk.choices[0]
|
choice = chunk.choices[0]
|
||||||
finish_reason = getattr(choice, "finish_reason", None)
|
if choice.finish_reason:
|
||||||
|
completed = True
|
||||||
delta = getattr(choice, "delta", None)
|
save_analysis()
|
||||||
raw_content = getattr(delta, "content", None)
|
yield f"data: {json.dumps({'type': 'done'})}\n\n"
|
||||||
if raw_content:
|
|
||||||
if isinstance(raw_content, list):
|
|
||||||
text_content = "".join(
|
|
||||||
(
|
|
||||||
item.get("text")
|
|
||||||
if isinstance(item, dict)
|
|
||||||
else getattr(item, "text", None) or ""
|
|
||||||
)
|
|
||||||
for item in raw_content
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
text_content = str(raw_content)
|
|
||||||
|
|
||||||
if text_content:
|
|
||||||
analysis_chunks.append(text_content)
|
|
||||||
payload = json.dumps(
|
|
||||||
{"type": "delta", "content": text_content}
|
|
||||||
)
|
|
||||||
yield f"data: {payload}\n\n"
|
|
||||||
|
|
||||||
if finish_reason:
|
|
||||||
try_save()
|
|
||||||
if save_error and not store_error_sent:
|
|
||||||
error_payload = json.dumps(
|
|
||||||
{"type": "store_error", "message": save_error}
|
|
||||||
)
|
|
||||||
yield f"data: {error_payload}\n\n"
|
|
||||||
store_error_sent = True
|
|
||||||
|
|
||||||
payload = json.dumps({"type": "done"})
|
|
||||||
yield f"data: {payload}\n\n"
|
|
||||||
break
|
break
|
||||||
except Exception as exc:
|
|
||||||
payload = json.dumps({"type": "error", "message": str(exc)})
|
|
||||||
yield f"data: {payload}\n\n"
|
|
||||||
finally:
|
|
||||||
try_save()
|
|
||||||
if save_error and not store_error_sent:
|
|
||||||
error_payload = json.dumps(
|
|
||||||
{"type": "store_error", "message": save_error}
|
|
||||||
)
|
|
||||||
yield f"data: {error_payload}\n\n"
|
|
||||||
store_error_sent = True
|
|
||||||
|
|
||||||
|
content = choice.delta.content
|
||||||
|
if content:
|
||||||
|
analysis_chunks.append(content)
|
||||||
|
yield f"data: {json.dumps({'type': 'delta', 'content': content})}\n\n"
|
||||||
|
|
||||||
|
except Exception as exc:
|
||||||
|
yield f"data: {json.dumps({'type': 'error', 'message': str(exc)})}\n\n"
|
||||||
|
finally:
|
||||||
|
save_analysis()
|
||||||
|
if saved_instance and not completed:
|
||||||
|
try:
|
||||||
|
saved_instance.delete()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
yield "event: end\n\n"
|
yield "event: end\n\n"
|
||||||
|
|
||||||
response = StreamingHttpResponse(
|
response = StreamingHttpResponse(
|
||||||
|
|||||||
Reference in New Issue
Block a user