AI助手功能推送

基础单词comment功能推送
This commit is contained in:
Miyamizu-MitsuhaSang 2025-10-16 17:50:32 +08:00
parent 3b025a2eca
commit 638d9fe8f3
14 changed files with 293 additions and 12 deletions

View File

View File

@ -0,0 +1,47 @@
from typing import Optional, List
from pydantic import BaseModel
class Message(BaseModel):
role: str
content: str
class Choice(BaseModel):
index: int
message: Message
finish_reason: Optional[str] = None
class Usage(BaseModel):
prompt_tokens: int
completion_tokens: int
total_tokens: int
class AIQuestionRequest(BaseModel):
word: str
question: str
class AIAnswerResponse(BaseModel):
id: str
object: str
created: int
model: str
choices: List[Choice]
usage: Optional[Usage] = None
def get_answer(self) -> str:
"""返回第一个回答的文本内容"""
if self.choices and self.choices[0].message:
return self.choices[0].message.content
return ""
class AIAnswerOut(BaseModel):
word: str
answer: str
model: str
tokens_used: Optional[int] = None

103
app/api/ai_assist/routes.py Normal file
View File

@ -0,0 +1,103 @@
import os
from typing import Dict, Tuple
import httpx
from fastapi import APIRouter, HTTPException, Depends
from starlette.requests import Request
from app.api.ai_assist import service
from app.api.ai_assist.ai_schemas import AIAnswerResponse, AIAnswerOut, AIQuestionRequest
from app.api.ai_assist.utils.redis_memory import get_chat_history, save_message, clear_chat_history
from app.models import User
from app.utils.security import get_current_user
ai_router = APIRouter()
ZJU_AI_URL = 'https://chat.zju.edu.cn/api/ai/v1/chat/completions'
AI_API_KEY = os.getenv("AI_ASSIST_KEY")
MAX_USAGE_PER = 100
CHAT_TTL = 7200
@ai_router.post("/exp")
async def dict_exp(
request: Request,
Q: AIQuestionRequest,
user: Tuple[User, Dict] = Depends(get_current_user)
):
"""
:param word:
:param question: 不允许question为空调用
:return:
"""
if user[0].token_usage > CHAT_TTL and not user[0].is_admin:
raise HTTPException(status_code=400, detail="本月API使用量已超")
redis = request.app.state.redis
user_id = str(user[0].id)
word = Q.word
question = Q.question
await service.get_and_set_last_key(redis, word=word, user_id=user_id)
history = await get_chat_history(redis, user_id, word)
prompt = (
f"用户正在学习词语「{word}」。"
f"请回答与该词相关的问题:{question}\n"
)
messages = [
{"role": "system", "content": "你是一位语言词典助手,回答要简洁、自然,适合初学者理解。只回答与词汇有关的问题。"},
]
messages.extend(history)
messages.append(
{"role": "user", "content": prompt}
)
payload = {
"model": "deepseek-r1-671b",
"messages": messages,
"stream": False
}
headers = {
"Authorization": f"Bearer {AI_API_KEY}",
"Content-Type": "application/json"
}
try:
async with httpx.AsyncClient(timeout=60) as client:
resp = await client.post(ZJU_AI_URL, json=payload, headers=headers)
# 如果状态码不是200抛异常
if resp.status_code != 200:
raise HTTPException(status_code=resp.status_code, detail=resp.text)
# 用 Pydantic 模型验证和解析返回结果
ai_resp = AIAnswerResponse(**resp.json())
answer = ai_resp.get_answer()
await save_message(redis, user_id, word, "user", question)
await save_message(redis, user_id, word, "assistant", answer)
return AIAnswerOut(
word=word,
answer=ai_resp.get_answer(),
model=ai_resp.model,
tokens_used=ai_resp.usage.total_tokens if ai_resp.usage else None
)
except Exception as e:
raise HTTPException(status_code=500, detail=f"AI调用失败: {str(e)}")
@ai_router.post("/clear")
async def clear_history(word: str, request: Request, user: Tuple[User, Dict] = Depends(get_current_user)):
redis = request.app.state.redis
user_id = user[0].id
await clear_chat_history(redis, user_id, word)
return {"msg": f"已清除 {word} 的聊天记录"}

View File

@ -0,0 +1,17 @@
from redis import Redis
from app.api.ai_assist.utils.redis_memory import clear_chat_history
CHAT_TTL = 7200
async def get_and_set_last_key(redis: Redis, word: str, user_id: str):
last_key = f"last_word:{user_id}"
last_word = await redis.get(last_key)
# 如果上一次查的词和这次不同,就清空旧词的记录
if last_word and last_word.decode() != word:
await clear_chat_history(redis, user_id, last_word.decode())
# 更新当前词
await redis.set(last_key, word, ex=CHAT_TTL)

View File

@ -0,0 +1,35 @@
import json
from typing import List, Dict
MAX_HISTORY = 6 # 每个用户保留最近3轮 (user+assistant)
CHAT_TTL = 7200
async def get_chat_history(redis, user_id: str, word: str) -> List[Dict]:
"""
Redis 获取历史消息
"""
key = f"chat:{user_id}:{word}"
data = await redis.lrange(key, 0, -1)
messages = [json.loads(d) for d in data]
return messages[-MAX_HISTORY:] # 仅返回最近N条
async def save_message(redis, user_id: str, word: str, role: str, content: str):
"""
保存单条消息到 Redis
"""
key = f"chat:{user_id}:{word}"
msg = msg = json.dumps({"role": role, "content": content})
await redis.rpush(key, msg)
# 限制总长度
await redis.ltrim(key, -MAX_HISTORY, -1)
await redis.expire(key, CHAT_TTL)
async def clear_chat_history(redis, user_id: str, word: str):
"""
删除某个用户针对某个词汇的全部聊天记录
"""
key = f"chat:{user_id}:{word}"
await redis.delete(key)

28
app/api/make_comment.py Normal file
View File

@ -0,0 +1,28 @@
from typing import Tuple
from fastapi import APIRouter, Depends
from app.models import User, CommentFr, CommentJp
from app.schemas.comment_schemas import CommentUpload
from app.utils.security import get_current_user
comment_router = APIRouter()
@comment_router.post("/make-comment")
async def new_word_comment(
upload: CommentUpload,
user: Tuple[User, dict] = Depends(get_current_user)
) -> None:
if upload.lang == "fr":
await CommentFr.create(
user=user[0],
comment_text=upload.comment_content,
comment_word=upload.comment_word,
)
else:
await CommentJp.create(
user=user[0],
comment_text=upload.comment_content,
comment_word=upload.comment_word,
)

View File

View File

@ -0,0 +1,28 @@
from typing import Literal, Tuple
from fastapi import APIRouter, Depends
from app.models import User, CommentFr, CommentJp
from app.schemas.comment_schemas import CommentUpload
from app.utils.security import get_current_user
word_comment_router = APIRouter()
@word_comment_router.post("/{lang}")
async def create_word_comment(
lang: Literal["jp", "fr"],
upload: CommentUpload,
user: Tuple[User, dict] = Depends(get_current_user)
):
if lang == "fr":
await CommentFr.create(
user=user[0],
comment_text=upload.comment_content,
comment_word=upload.comment_word,
)
else:
await CommentJp.create(
user=user[0],
comment_text=upload.comment_content,
comment_word=upload.comment_word,
)

26
app/article_teacher.py Normal file
View File

@ -0,0 +1,26 @@
import os
from fastapi import APIRouter
"""
# 背景
你是一个人工智能助手名字叫EduChat,是一个由华东师范大学开发的教育领域大语言模型
# 对话主题:作文指导
## 作文指导主题的要求:
EduChat你需要扮演一位经验丰富的语文老师现在需要帮助一位学生审阅作文并给出修改建议请按照以下步骤进行
整体评价先对作文的整体质量进行简要评价指出主要优点和需要改进的方向
亮点分析具体指出作文中的亮点如结构描写情感表达等方面的优点
具体修改建议针对作文中的不足从以下几个方面提出具体修改建议并给出修改后的示例
语言表达是否生动准确有无冗余或重复可以如何优化
细节描写是否足够具体能否加入更多感官描写视觉听觉嗅觉触觉等使画面更立体
情感表达情感是否自然能否更深入或升华
结构布局段落衔接是否自然开头结尾是否呼应 注意每个建议点都要结合原文具体句子进行分析并给出修改后的句子或段落作为示例
写作技巧提示提供2-3条实用的写作技巧如动态描写公式感官交织法等帮助学生举一反三
修改效果总结简要说明按照建议修改后作文会有哪些方面的提升如文学性情感层次场景沉浸感等
请用亲切鼓励的语气进行点评保持专业性同时让学生易于接受
"""
article_router = APIRouter()
ECNU_API_KEY = os.getenv("ECNU_TEACH_AI_KEY")

View File

@ -8,6 +8,7 @@ class CommentFr(Model):
comment_word = fields.ForeignKeyField("models.WordlistFr", related_name="comments_fr") comment_word = fields.ForeignKeyField("models.WordlistFr", related_name="comments_fr")
created_at = fields.DatetimeField(auto_now_add=True) created_at = fields.DatetimeField(auto_now_add=True)
updated_at = fields.DatetimeField(auto_now=True) updated_at = fields.DatetimeField(auto_now=True)
supervised = fields.BooleanField(default=False)
class Meta: class Meta:
table = "comments_fr" table = "comments_fr"

View File

@ -1,9 +1,5 @@
from enum import Enum
import pandas as pd
from tortoise.models import Model
from tortoise import fields from tortoise import fields
from typing import Tuple, Type, TypeVar from tortoise.models import Model
from app.schemas.admin_schemas import PosEnumFr from app.schemas.admin_schemas import PosEnumFr

View File

@ -1,5 +1,4 @@
from typing import List, Tuple from typing import List, Tuple
from typing import Literal
from pydantic import BaseModel from pydantic import BaseModel
@ -22,7 +21,7 @@ class CommentSet(BaseModel):
class CommentUpload(BaseModel): class CommentUpload(BaseModel):
comment_word: str comment_word: str
comment_content: str comment_content: str
lang: Literal["fr", "jp"] # lang: Literal["fr", "jp"]
class Config: class Config:
from_attributes = True from_attributes = True

View File

@ -12,9 +12,10 @@ from app.api.redis_test import redis_test_router
from app.api.search import dict_search from app.api.search import dict_search
from app.api.translator import translator_router from app.api.translator import translator_router
from app.api.user.routes import users_router from app.api.user.routes import users_router
from app.api.word_comment.routes import word_comment_router
from app.core.redis import init_redis, close_redis from app.core.redis import init_redis, close_redis
from app.utils.phone_encrypt import PhoneEncrypt from app.utils.phone_encrypt import PhoneEncrypt
from settings import TORTOISE_ORM from settings import ONLINE_SETTINGS
@asynccontextmanager @asynccontextmanager
@ -42,7 +43,7 @@ app.add_middleware(
register_tortoise( register_tortoise(
app=app, app=app,
config=TORTOISE_ORM, config=ONLINE_SETTINGS,
) )
app.include_router(users_router, tags=["User API"], prefix="/users") app.include_router(users_router, tags=["User API"], prefix="/users")
@ -55,5 +56,7 @@ app.include_router(translator_router, tags=["Translation API"])
app.include_router(ai_router, tags=["AI Assist API"], prefix="/ai_assist") app.include_router(ai_router, tags=["AI Assist API"], prefix="/ai_assist")
app.include_router(word_comment_router, tags=["Word Comment API"], prefix="/comment/word")
if __name__ == "__main__": if __name__ == "__main__":
uvicorn.run("main:app", host="127.0.0.1", port=8000, reload=True) uvicorn.run("main:app", host="127.0.0.1", port=8000, reload=True)

View File

@ -5,11 +5,9 @@ from tkinter.scrolledtext import example
import pandas as pd import pandas as pd
from tortoise import Tortoise, connections from tortoise import Tortoise, connections
from tortoise.exceptions import MultipleObjectsReturned from tortoise.exceptions import MultipleObjectsReturned
from fastapi import UploadFile
from app.models.fr import DefinitionFr, WordlistFr from app.models.fr import DefinitionFr, WordlistFr
from settings import TORTOISE_ORM from settings import TORTOISE_ORM
import app.models.signals
xlsx_name = "./DictTable_20250811.xlsx" xlsx_name = "./DictTable_20250811.xlsx"
xlsx_path = Path(xlsx_name) xlsx_path = Path(xlsx_name)
@ -68,7 +66,7 @@ async def import_def_fr(
example = None if pd.isna(row.法语例句1) else str(row.法语例句1).strip() example = None if pd.isna(row.法语例句1) else str(row.法语例句1).strip()
pos = None if pd.isna(row.词性1) else pos_process(str(row.词性1).strip()) pos = None if pd.isna(row.词性1) else pos_process(str(row.词性1).strip())
eng_exp = None if pd.isna(row.英语释义1) else str(row.英语释义1).strip() eng_exp = None if pd.isna(row.英语释义1) else str(row.英语释义1).strip()
chi_exp = str(row[2]).strip() chi_exp = str(row[3]).strip()
# 去重:同一个词条不能有重复释义(同 pos + meaning # 去重:同一个词条不能有重复释义(同 pos + meaning
exists = await DefinitionFr.filter( exists = await DefinitionFr.filter(