From 638d9fe8f3d9086526fd93f311afba470239851a Mon Sep 17 00:00:00 2001 From: Miyamizu-MitsuhaSang <2510681107@qq.com> Date: Thu, 16 Oct 2025 17:50:32 +0800 Subject: [PATCH] =?UTF-8?q?AI=E5=8A=A9=E6=89=8B=E5=8A=9F=E8=83=BD=E6=8E=A8?= =?UTF-8?q?=E9=80=81=20=E5=9F=BA=E7=A1=80=E5=8D=95=E8=AF=8Dcomment?= =?UTF-8?q?=E5=8A=9F=E8=83=BD=E6=8E=A8=E9=80=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/api/ai_assist/__init__.py | 0 app/api/ai_assist/ai_schemas.py | 47 +++++++++++ app/api/ai_assist/routes.py | 103 ++++++++++++++++++++++++ app/api/ai_assist/service.py | 17 ++++ app/api/ai_assist/utils/redis_memory.py | 35 ++++++++ app/api/make_comment.py | 28 +++++++ app/api/word_comment/__init__.py | 0 app/api/word_comment/routes.py | 28 +++++++ app/article_teacher.py | 26 ++++++ app/models/comments.py | 1 + app/models/fr.py | 6 +- app/schemas/comment_schemas.py | 3 +- main.py | 7 +- scripts/update_fr.py | 4 +- 14 files changed, 293 insertions(+), 12 deletions(-) create mode 100644 app/api/ai_assist/__init__.py create mode 100644 app/api/ai_assist/ai_schemas.py create mode 100644 app/api/ai_assist/routes.py create mode 100644 app/api/ai_assist/service.py create mode 100644 app/api/ai_assist/utils/redis_memory.py create mode 100644 app/api/make_comment.py create mode 100644 app/api/word_comment/__init__.py create mode 100644 app/api/word_comment/routes.py create mode 100644 app/article_teacher.py diff --git a/app/api/ai_assist/__init__.py b/app/api/ai_assist/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/api/ai_assist/ai_schemas.py b/app/api/ai_assist/ai_schemas.py new file mode 100644 index 0000000..bc37a9e --- /dev/null +++ b/app/api/ai_assist/ai_schemas.py @@ -0,0 +1,47 @@ +from typing import Optional, List + +from pydantic import BaseModel + + +class Message(BaseModel): + role: str + content: str + + +class Choice(BaseModel): + index: int + message: Message + finish_reason: Optional[str] = None + + +class Usage(BaseModel): + prompt_tokens: int + completion_tokens: int + total_tokens: int + + +class AIQuestionRequest(BaseModel): + word: str + question: str + + +class AIAnswerResponse(BaseModel): + id: str + object: str + created: int + model: str + choices: List[Choice] + usage: Optional[Usage] = None + + def get_answer(self) -> str: + """返回第一个回答的文本内容""" + if self.choices and self.choices[0].message: + return self.choices[0].message.content + return "" + + +class AIAnswerOut(BaseModel): + word: str + answer: str + model: str + tokens_used: Optional[int] = None diff --git a/app/api/ai_assist/routes.py b/app/api/ai_assist/routes.py new file mode 100644 index 0000000..05904c7 --- /dev/null +++ b/app/api/ai_assist/routes.py @@ -0,0 +1,103 @@ +import os +from typing import Dict, Tuple + +import httpx +from fastapi import APIRouter, HTTPException, Depends +from starlette.requests import Request + +from app.api.ai_assist import service +from app.api.ai_assist.ai_schemas import AIAnswerResponse, AIAnswerOut, AIQuestionRequest +from app.api.ai_assist.utils.redis_memory import get_chat_history, save_message, clear_chat_history +from app.models import User +from app.utils.security import get_current_user + +ai_router = APIRouter() + +ZJU_AI_URL = 'https://chat.zju.edu.cn/api/ai/v1/chat/completions' +AI_API_KEY = os.getenv("AI_ASSIST_KEY") +MAX_USAGE_PER = 100 + +CHAT_TTL = 7200 + + +@ai_router.post("/exp") +async def dict_exp( + request: Request, + Q: AIQuestionRequest, + user: Tuple[User, Dict] = Depends(get_current_user) +): + """ + + :param word: + :param question: 不允许question为空调用 + :return: + """ + if user[0].token_usage > CHAT_TTL and not user[0].is_admin: + raise HTTPException(status_code=400, detail="本月API使用量已超") + + redis = request.app.state.redis + + user_id = str(user[0].id) + word = Q.word + question = Q.question + + await service.get_and_set_last_key(redis, word=word, user_id=user_id) + + history = await get_chat_history(redis, user_id, word) + + prompt = ( + f"用户正在学习词语「{word}」。" + f"请回答与该词相关的问题:{question}\n" + ) + + messages = [ + {"role": "system", "content": "你是一位语言词典助手,回答要简洁、自然,适合初学者理解。只回答与词汇有关的问题。"}, + ] + messages.extend(history) + messages.append( + {"role": "user", "content": prompt} + ) + + payload = { + "model": "deepseek-r1-671b", + "messages": messages, + "stream": False + } + + headers = { + "Authorization": f"Bearer {AI_API_KEY}", + "Content-Type": "application/json" + } + try: + async with httpx.AsyncClient(timeout=60) as client: + resp = await client.post(ZJU_AI_URL, json=payload, headers=headers) + + # 如果状态码不是200,抛异常 + if resp.status_code != 200: + raise HTTPException(status_code=resp.status_code, detail=resp.text) + + # 用 Pydantic 模型验证和解析返回结果 + ai_resp = AIAnswerResponse(**resp.json()) + + answer = ai_resp.get_answer() + + await save_message(redis, user_id, word, "user", question) + await save_message(redis, user_id, word, "assistant", answer) + + return AIAnswerOut( + word=word, + answer=ai_resp.get_answer(), + model=ai_resp.model, + tokens_used=ai_resp.usage.total_tokens if ai_resp.usage else None + ) + + except Exception as e: + raise HTTPException(status_code=500, detail=f"AI调用失败: {str(e)}") + + +@ai_router.post("/clear") +async def clear_history(word: str, request: Request, user: Tuple[User, Dict] = Depends(get_current_user)): + redis = request.app.state.redis + user_id = user[0].id + await clear_chat_history(redis, user_id, word) + return {"msg": f"已清除 {word} 的聊天记录"} diff --git a/app/api/ai_assist/service.py b/app/api/ai_assist/service.py new file mode 100644 index 0000000..7a6a90a --- /dev/null +++ b/app/api/ai_assist/service.py @@ -0,0 +1,17 @@ +from redis import Redis + +from app.api.ai_assist.utils.redis_memory import clear_chat_history + +CHAT_TTL = 7200 + + +async def get_and_set_last_key(redis: Redis, word: str, user_id: str): + last_key = f"last_word:{user_id}" + last_word = await redis.get(last_key) + + # 如果上一次查的词和这次不同,就清空旧词的记录 + if last_word and last_word.decode() != word: + await clear_chat_history(redis, user_id, last_word.decode()) + + # 更新当前词 + await redis.set(last_key, word, ex=CHAT_TTL) diff --git a/app/api/ai_assist/utils/redis_memory.py b/app/api/ai_assist/utils/redis_memory.py new file mode 100644 index 0000000..4097aeb --- /dev/null +++ b/app/api/ai_assist/utils/redis_memory.py @@ -0,0 +1,35 @@ +import json +from typing import List, Dict + +MAX_HISTORY = 6 # 每个用户保留最近3轮 (user+assistant) +CHAT_TTL = 7200 + + +async def get_chat_history(redis, user_id: str, word: str) -> List[Dict]: + """ + 从 Redis 获取历史消息 + """ + key = f"chat:{user_id}:{word}" + data = await redis.lrange(key, 0, -1) + messages = [json.loads(d) for d in data] + return messages[-MAX_HISTORY:] # 仅返回最近N条 + + +async def save_message(redis, user_id: str, word: str, role: str, content: str): + """ + 保存单条消息到 Redis + """ + key = f"chat:{user_id}:{word}" + msg = msg = json.dumps({"role": role, "content": content}) + await redis.rpush(key, msg) + # 限制总长度 + await redis.ltrim(key, -MAX_HISTORY, -1) + await redis.expire(key, CHAT_TTL) + + +async def clear_chat_history(redis, user_id: str, word: str): + """ + 删除某个用户针对某个词汇的全部聊天记录 + """ + key = f"chat:{user_id}:{word}" + await redis.delete(key) diff --git a/app/api/make_comment.py b/app/api/make_comment.py new file mode 100644 index 0000000..154c7fe --- /dev/null +++ b/app/api/make_comment.py @@ -0,0 +1,28 @@ +from typing import Tuple + +from fastapi import APIRouter, Depends + +from app.models import User, CommentFr, CommentJp +from app.schemas.comment_schemas import CommentUpload +from app.utils.security import get_current_user + +comment_router = APIRouter() + + +@comment_router.post("/make-comment") +async def new_word_comment( + upload: CommentUpload, + user: Tuple[User, dict] = Depends(get_current_user) +) -> None: + if upload.lang == "fr": + await CommentFr.create( + user=user[0], + comment_text=upload.comment_content, + comment_word=upload.comment_word, + ) + else: + await CommentJp.create( + user=user[0], + comment_text=upload.comment_content, + comment_word=upload.comment_word, + ) diff --git a/app/api/word_comment/__init__.py b/app/api/word_comment/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/api/word_comment/routes.py b/app/api/word_comment/routes.py new file mode 100644 index 0000000..a736377 --- /dev/null +++ b/app/api/word_comment/routes.py @@ -0,0 +1,28 @@ +from typing import Literal, Tuple + +from fastapi import APIRouter, Depends + +from app.models import User, CommentFr, CommentJp +from app.schemas.comment_schemas import CommentUpload +from app.utils.security import get_current_user + +word_comment_router = APIRouter() + +@word_comment_router.post("/{lang}") +async def create_word_comment( + lang: Literal["jp", "fr"], + upload: CommentUpload, + user: Tuple[User, dict] = Depends(get_current_user) +): + if lang == "fr": + await CommentFr.create( + user=user[0], + comment_text=upload.comment_content, + comment_word=upload.comment_word, + ) + else: + await CommentJp.create( + user=user[0], + comment_text=upload.comment_content, + comment_word=upload.comment_word, + ) \ No newline at end of file diff --git a/app/article_teacher.py b/app/article_teacher.py new file mode 100644 index 0000000..f5a54a1 --- /dev/null +++ b/app/article_teacher.py @@ -0,0 +1,26 @@ +import os + +from fastapi import APIRouter + +""" +# 背景 +你是一个人工智能助手,名字叫EduChat,是一个由华东师范大学开发的教育领域大语言模型。 +# 对话主题:作文指导 +## 作文指导主题的要求: +EduChat你需要扮演一位经验丰富的语文老师,现在需要帮助一位学生审阅作文并给出修改建议。请按照以下步骤进行: +整体评价:先对作文的整体质量进行简要评价,指出主要优点和需要改进的方向。 +亮点分析:具体指出作文中的亮点(如结构、描写、情感表达等方面的优点)。 +具体修改建议:针对作文中的不足,从以下几个方面提出具体修改建议,并给出修改后的示例: +语言表达:是否生动、准确?有无冗余或重复?可以如何优化? +细节描写:是否足够具体?能否加入更多感官描写(视觉、听觉、嗅觉、触觉等)使画面更立体? +情感表达:情感是否自然?能否更深入或升华? +结构布局:段落衔接是否自然?开头结尾是否呼应? (注意:每个建议点都要结合原文具体句子进行分析,并给出修改后的句子或段落作为示例) +写作技巧提示:提供2-3条实用的写作技巧(如动态描写公式、感官交织法等),帮助学生举一反三。 +修改效果总结:简要说明按照建议修改后,作文会有哪些方面的提升(如文学性、情感层次、场景沉浸感等)。 +请用亲切、鼓励的语气进行点评,保持专业性同时让学生易于接受。 +""" + +article_router = APIRouter() + +ECNU_API_KEY = os.getenv("ECNU_TEACH_AI_KEY") + diff --git a/app/models/comments.py b/app/models/comments.py index 8871969..ba16f1e 100644 --- a/app/models/comments.py +++ b/app/models/comments.py @@ -8,6 +8,7 @@ class CommentFr(Model): comment_word = fields.ForeignKeyField("models.WordlistFr", related_name="comments_fr") created_at = fields.DatetimeField(auto_now_add=True) updated_at = fields.DatetimeField(auto_now=True) + supervised = fields.BooleanField(default=False) class Meta: table = "comments_fr" diff --git a/app/models/fr.py b/app/models/fr.py index 86c84ae..ab52c80 100644 --- a/app/models/fr.py +++ b/app/models/fr.py @@ -1,9 +1,5 @@ -from enum import Enum - -import pandas as pd -from tortoise.models import Model from tortoise import fields -from typing import Tuple, Type, TypeVar +from tortoise.models import Model from app.schemas.admin_schemas import PosEnumFr diff --git a/app/schemas/comment_schemas.py b/app/schemas/comment_schemas.py index 09dfac0..1cae008 100644 --- a/app/schemas/comment_schemas.py +++ b/app/schemas/comment_schemas.py @@ -1,5 +1,4 @@ from typing import List, Tuple -from typing import Literal from pydantic import BaseModel @@ -22,7 +21,7 @@ class CommentSet(BaseModel): class CommentUpload(BaseModel): comment_word: str comment_content: str - lang: Literal["fr", "jp"] + # lang: Literal["fr", "jp"] class Config: from_attributes = True \ No newline at end of file diff --git a/main.py b/main.py index 094156b..9ab69e8 100644 --- a/main.py +++ b/main.py @@ -12,9 +12,10 @@ from app.api.redis_test import redis_test_router from app.api.search import dict_search from app.api.translator import translator_router from app.api.user.routes import users_router +from app.api.word_comment.routes import word_comment_router from app.core.redis import init_redis, close_redis from app.utils.phone_encrypt import PhoneEncrypt -from settings import TORTOISE_ORM +from settings import ONLINE_SETTINGS @asynccontextmanager @@ -42,7 +43,7 @@ app.add_middleware( register_tortoise( app=app, - config=TORTOISE_ORM, + config=ONLINE_SETTINGS, ) app.include_router(users_router, tags=["User API"], prefix="/users") @@ -55,5 +56,7 @@ app.include_router(translator_router, tags=["Translation API"]) app.include_router(ai_router, tags=["AI Assist API"], prefix="/ai_assist") +app.include_router(word_comment_router, tags=["Word Comment API"], prefix="/comment/word") + if __name__ == "__main__": uvicorn.run("main:app", host="127.0.0.1", port=8000, reload=True) diff --git a/scripts/update_fr.py b/scripts/update_fr.py index 1ac6feb..f1dd767 100644 --- a/scripts/update_fr.py +++ b/scripts/update_fr.py @@ -5,11 +5,9 @@ from tkinter.scrolledtext import example import pandas as pd from tortoise import Tortoise, connections from tortoise.exceptions import MultipleObjectsReturned -from fastapi import UploadFile from app.models.fr import DefinitionFr, WordlistFr from settings import TORTOISE_ORM -import app.models.signals xlsx_name = "./DictTable_20250811.xlsx" xlsx_path = Path(xlsx_name) @@ -68,7 +66,7 @@ async def import_def_fr( example = None if pd.isna(row.法语例句1) else str(row.法语例句1).strip() pos = None if pd.isna(row.词性1) else pos_process(str(row.词性1).strip()) eng_exp = None if pd.isna(row.英语释义1) else str(row.英语释义1).strip() - chi_exp = str(row[2]).strip() + chi_exp = str(row[3]).strip() # 去重:同一个词条不能有重复释义(同 pos + meaning) exists = await DefinitionFr.filter(