前后端基本架构和完全excel表的解析及统计图表的生成以及excel表的到出

This commit is contained in:
2026-03-19 01:51:34 +08:00
parent c23b93bb70
commit 2f630695ff
194 changed files with 23354 additions and 174 deletions

10
.idea/.gitignore generated vendored
View File

@@ -1,10 +0,0 @@
# 默认忽略的文件
/shelf/
/workspace.xml
/.idea/
/venv/
# 基于编辑器的 HTTP 客户端请求
/httpRequests/
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml

View File

@@ -1,12 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="jdk" jdkName="Python 3.12 virtualenv at H:\OwnProject\FilesReadSysteam\backend\venv" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
<component name="PyDocumentationSettings">
<option name="format" value="PLAIN" />
<option name="myDocStringFormat" value="Plain" />
</component>
</module>

6
.idea/encodings.xml generated
View File

@@ -1,6 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="Encoding">
<file url="file://$PROJECT_DIR$/backend/requirements.txt" charset="UTF-8" />
</component>
</project>

View File

@@ -1,6 +0,0 @@
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>

7
.idea/misc.xml generated
View File

@@ -1,7 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="Black">
<option name="sdkName" value="Python 3.12" />
</component>
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.12 virtualenv at H:\OwnProject\FilesReadSysteam\backend\venv" project-jdk-type="Python SDK" />
</project>

8
.idea/modules.xml generated
View File

@@ -1,8 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/FilesReadSysteam.iml" filepath="$PROJECT_DIR$/.idea/FilesReadSysteam.iml" />
</modules>
</component>
</project>

6
.idea/vcs.xml generated
View File

@@ -1,6 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>

View File

@@ -0,0 +1,14 @@
"""
API 路由注册模块
"""
from fastapi import APIRouter
from app.api.endpoints import upload, ai_analyze, visualization, analysis_charts
# 创建主路由
api_router = APIRouter()
# 注册各模块路由
api_router.include_router(upload.router)
api_router.include_router(ai_analyze.router)
api_router.include_router(visualization.router)
api_router.include_router(analysis_charts.router)

Binary file not shown.

View File

@@ -0,0 +1,144 @@
"""
AI 分析 API 接口
"""
from fastapi import APIRouter, UploadFile, File, HTTPException, Query, Body
from typing import Optional
import logging
from app.services.excel_ai_service import excel_ai_service
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/ai", tags=["AI 分析"])
@router.post("/analyze/excel")
async def analyze_excel(
file: UploadFile = File(...),
user_prompt: str = Query("", description="用户自定义提示词"),
analysis_type: str = Query("general", description="分析类型: general, summary, statistics, insights"),
parse_all_sheets: bool = Query(False, description="是否分析所有工作表")
):
"""
上传并使用 AI 分析 Excel 文件
Args:
file: 上传的 Excel 文件
user_prompt: 用户自定义提示词
analysis_type: 分析类型
parse_all_sheets: 是否分析所有工作表
Returns:
dict: 分析结果,包含 Excel 数据和 AI 分析结果
"""
# 检查文件类型
if not file.filename:
raise HTTPException(status_code=400, detail="文件名为空")
file_ext = file.filename.split('.')[-1].lower()
if file_ext not in ['xlsx', 'xls']:
raise HTTPException(
status_code=400,
detail=f"不支持的文件类型: {file_ext},仅支持 .xlsx 和 .xls"
)
# 验证分析类型
supported_types = ['general', 'summary', 'statistics', 'insights']
if analysis_type not in supported_types:
raise HTTPException(
status_code=400,
detail=f"不支持的分析类型: {analysis_type},支持的类型: {', '.join(supported_types)}"
)
try:
# 读取文件内容
content = await file.read()
logger.info(f"开始分析文件: {file.filename}, 分析类型: {analysis_type}")
# 调用 AI 分析服务
if parse_all_sheets:
result = await excel_ai_service.batch_analyze_sheets(
content,
file.filename,
user_prompt=user_prompt,
analysis_type=analysis_type
)
else:
# 解析选项
parse_options = {"header_row": 0}
result = await excel_ai_service.analyze_excel_file(
content,
file.filename,
user_prompt=user_prompt,
analysis_type=analysis_type,
parse_options=parse_options
)
logger.info(f"文件分析完成: {file.filename}, 成功: {result['success']}")
return result
except HTTPException:
raise
except Exception as e:
logger.error(f"AI 分析过程中出错: {str(e)}")
raise HTTPException(status_code=500, detail=f"分析失败: {str(e)}")
@router.get("/analysis/types")
async def get_analysis_types():
"""
获取支持的分析类型列表
Returns:
list: 支持的分析类型
"""
return {
"types": excel_ai_service.get_supported_analysis_types()
}
@router.post("/analyze/text")
async def analyze_text(
excel_data: dict = Body(..., description="Excel 解析后的数据"),
user_prompt: str = Body("", description="用户提示词"),
analysis_type: str = Body("general", description="分析类型")
):
"""
对已解析的 Excel 数据进行 AI 分析
Args:
excel_data: Excel 数据
user_prompt: 用户提示词
analysis_type: 分析类型
Returns:
dict: 分析结果
"""
try:
logger.info(f"开始文本分析, 分析类型: {analysis_type}")
# 调用 LLM 服务
from app.services.llm_service import llm_service
if user_prompt and user_prompt.strip():
result = await llm_service.analyze_with_template(
excel_data,
user_prompt
)
else:
result = await llm_service.analyze_excel_data(
excel_data,
user_prompt,
analysis_type
)
logger.info(f"文本分析完成, 成功: {result['success']}")
return result
except Exception as e:
logger.error(f"文本分析失败: {str(e)}")
raise HTTPException(status_code=500, detail=f"分析失败: {str(e)}")

View File

@@ -0,0 +1,105 @@
"""
分析结果图表 API - 根据文本分析结果生成图表
"""
from fastapi import APIRouter, HTTPException
from pydantic import BaseModel
from typing import Optional
import logging
from app.services.text_analysis_service import text_analysis_service
from app.services.chart_generator_service import chart_generator_service
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/analysis", tags=["分析结果图表"])
class AnalysisChartRequest(BaseModel):
"""分析图表生成请求模型"""
analysis_text: str
original_filename: Optional[str] = ""
file_type: Optional[str] = "text"
@router.post("/extract-and-chart")
async def extract_and_generate_charts(request: AnalysisChartRequest):
"""
从 AI 分析结果中提取数据并生成图表
Args:
request: 包含分析文本的请求
Returns:
dict: 包含图表数据的结果
"""
if not request.analysis_text or not request.analysis_text.strip():
raise HTTPException(status_code=400, detail="分析文本不能为空")
try:
logger.info("开始从分析结果中提取结构化数据...")
# 1. 使用 LLM 提取结构化数据
extract_result = await text_analysis_service.extract_structured_data(
analysis_text=request.analysis_text,
original_filename=request.original_filename or "unknown",
file_type=request.file_type or "text"
)
if not extract_result.get("success"):
raise HTTPException(
status_code=500,
detail=f"提取结构化数据失败: {extract_result.get('error', '未知错误')}"
)
logger.info("结构化数据提取成功,开始生成图表...")
# 2. 根据提取的数据生成图表
chart_result = chart_generator_service.generate_charts_from_analysis(extract_result)
if not chart_result.get("success"):
raise HTTPException(
status_code=500,
detail=f"生成图表失败: {chart_result.get('error', '未知错误')}"
)
logger.info("图表生成成功")
return chart_result
except HTTPException:
raise
except Exception as e:
logger.error(f"分析结果图表生成失败: {str(e)}")
raise HTTPException(
status_code=500,
detail=f"图表生成失败: {str(e)}"
)
@router.post("/analyze-text")
async def analyze_text_only(request: AnalysisChartRequest):
"""
仅提取结构化数据(不生成图表),用于调试
Args:
request: 包含分析文本的请求
Returns:
dict: 提取的结构化数据
"""
if not request.analysis_text or not request.analysis_text.strip():
raise HTTPException(status_code=400, detail="分析文本不能为空")
try:
result = await text_analysis_service.extract_structured_data(
analysis_text=request.analysis_text,
original_filename=request.original_filename or "unknown",
file_type=request.file_type or "text"
)
return result
except Exception as e:
logger.error(f"文本分析失败: {str(e)}")
raise HTTPException(
status_code=500,
detail=f"文本分析失败: {str(e)}"
)

View File

@@ -0,0 +1,205 @@
"""
文件上传 API 接口
"""
from fastapi import APIRouter, UploadFile, File, HTTPException, Query
from fastapi.responses import StreamingResponse
from typing import Optional
import logging
import pandas as pd
import io
from app.services.file_service import file_service
from app.core.document_parser import XlsxParser
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/upload", tags=["文件上传"])
# 初始化解析器
excel_parser = XlsxParser()
@router.post("/excel")
async def upload_excel(
file: UploadFile = File(...),
parse_all_sheets: bool = Query(False, description="是否解析所有工作表"),
sheet_name: Optional[str] = Query(None, description="指定解析的工作表名称"),
header_row: int = Query(0, description="表头所在的行索引")
):
"""
上传并解析 Excel 文件
Args:
file: 上传的 Excel 文件
parse_all_sheets: 是否解析所有工作表
sheet_name: 指定解析的工作表名称
header_row: 表头所在的行索引
Returns:
dict: 解析结果
"""
# 检查文件类型
if not file.filename:
raise HTTPException(status_code=400, detail="文件名为空")
file_ext = file.filename.split('.')[-1].lower()
if file_ext not in ['xlsx', 'xls']:
raise HTTPException(
status_code=400,
detail=f"不支持的文件类型: {file_ext},仅支持 .xlsx 和 .xls"
)
try:
# 读取文件内容
content = await file.read()
# 保存文件
saved_path = file_service.save_uploaded_file(
content,
file.filename,
subfolder="excel"
)
logger.info(f"文件已保存: {saved_path}")
# 解析文件
if parse_all_sheets:
result = excel_parser.parse_all_sheets(saved_path)
else:
# 如果指定了 sheet_name使用指定的否则使用默认的第一个
if sheet_name:
result = excel_parser.parse(saved_path, sheet_name=sheet_name, header_row=header_row)
else:
result = excel_parser.parse(saved_path, header_row=header_row)
# 添加文件路径到元数据
if result.metadata:
result.metadata['saved_path'] = saved_path
result.metadata['original_filename'] = file.filename
return result.to_dict()
except HTTPException:
raise
except Exception as e:
logger.error(f"解析 Excel 文件时出错: {str(e)}")
raise HTTPException(status_code=500, detail=f"解析失败: {str(e)}")
@router.get("/excel/preview/{file_path:path}")
async def get_excel_preview(
file_path: str,
sheet_name: Optional[str] = Query(None, description="工作表名称"),
max_rows: int = Query(10, description="最多返回的行数", ge=1, le=100)
):
"""
获取 Excel 文件的预览数据
Args:
file_path: 文件路径
sheet_name: 工作表名称
max_rows: 最多返回的行数
Returns:
dict: 预览数据
"""
try:
# 解析工作表名称参数
sheet_param = sheet_name if sheet_name else 0
result = excel_parser.get_sheet_preview(
file_path,
sheet_name=sheet_param,
max_rows=max_rows
)
return result.to_dict()
except Exception as e:
logger.error(f"获取预览数据时出错: {str(e)}")
raise HTTPException(status_code=500, detail=f"获取预览失败: {str(e)}")
@router.delete("/file")
async def delete_uploaded_file(file_path: str = Query(..., description="要删除的文件路径")):
"""
删除已上传的文件
Args:
file_path: 文件路径
Returns:
dict: 删除结果
"""
try:
success = file_service.delete_file(file_path)
if success:
return {"success": True, "message": "文件删除成功"}
else:
return {"success": False, "message": "文件不存在或删除失败"}
except Exception as e:
logger.error(f"删除文件时出错: {str(e)}")
raise HTTPException(status_code=500, detail=f"删除失败: {str(e)}")
@router.get("/excel/export/{file_path:path}")
async def export_excel(
file_path: str,
sheet_name: Optional[str] = Query(None, description="工作表名称"),
columns: Optional[str] = Query(None, description="要导出的列,逗号分隔")
):
"""
导出 Excel 文件(可选择工作表和列)
Args:
file_path: 原始文件路径
sheet_name: 工作表名称(可选)
columns: 要导出的列名,逗号分隔(可选)
Returns:
StreamingResponse: Excel 文件
"""
try:
# 读取 Excel 文件
if sheet_name:
df = pd.read_excel(file_path, sheet_name=sheet_name)
else:
df = pd.read_excel(file_path)
# 如果指定了列,只选择这些列
if columns:
column_list = [col.strip() for col in columns.split(',')]
# 过滤掉不存在的列
available_columns = [col for col in column_list if col in df.columns]
if available_columns:
df = df[available_columns]
# 创建 Excel 文件
output = io.BytesIO()
with pd.ExcelWriter(output, engine='openpyxl') as writer:
df.to_excel(writer, index=False, sheet_name=sheet_name or 'Sheet1')
output.seek(0)
# 生成文件名
original_name = file_path.split('/')[-1] if '/' in file_path else file_path
if columns:
export_name = f"export_{sheet_name or 'data'}_{len(column_list) if columns else 'all'}_cols.xlsx"
else:
export_name = f"export_{original_name}"
# 返回文件流
return StreamingResponse(
io.BytesIO(output.getvalue()),
media_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
headers={"Content-Disposition": f"attachment; filename={export_name}"}
)
except FileNotFoundError:
logger.error(f"文件不存在: {file_path}")
raise HTTPException(status_code=404, detail="文件不存在")
except Exception as e:
logger.error(f"导出 Excel 文件时出错: {str(e)}")
raise HTTPException(status_code=500, detail=f"导出失败: {str(e)}")

View File

@@ -0,0 +1,90 @@
"""
可视化 API 接口 - 生成统计图表
"""
from fastapi import APIRouter, HTTPException, Body
from typing import Dict, Any
import logging
from app.services.visualization_service import visualization_service
from pydantic import BaseModel
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/visualization", tags=["数据可视化"])
class StatisticsRequest(BaseModel):
"""统计图表生成请求模型"""
excel_data: Dict[str, Any]
analysis_type: str = "statistics"
@router.post("/statistics")
async def generate_statistics(request: StatisticsRequest):
"""
生成统计信息和可视化图表
Args:
request: 包含 excel_data 和 analysis_type 的请求体
Returns:
dict: 包含统计信息和图表数据的结果
"""
excel_data = request.excel_data
analysis_type = request.analysis_type
if not excel_data:
raise HTTPException(status_code=400, detail="未提供 Excel 数据")
try:
result = visualization_service.analyze_and_visualize(
excel_data,
analysis_type
)
if not result.get("success"):
raise HTTPException(status_code=500, detail=result.get("error", "分析失败"))
logger.info("统计图表生成成功")
return result
except HTTPException:
raise
except Exception as e:
logger.error(f"统计图表生成失败: {str(e)}")
raise HTTPException(status_code=500, detail=f"图表生成失败: {str(e)}")
@router.get("/chart-types")
async def get_chart_types():
"""
获取支持的图表类型
Returns:
dict: 支持的图表类型列表
"""
return {
"chart_types": [
{
"value": "histogram",
"label": "直方图",
"description": "显示数值型列的分布情况"
},
{
"value": "bar_chart",
"label": "条形图",
"description": "显示分类列的频次分布"
},
{
"value": "box_plot",
"label": "箱线图",
"description": "显示数值列的四分位数和异常值"
},
{
"value": "correlation_heatmap",
"label": "相关性热力图",
"description": "显示数值列之间的相关性"
}
]
}

View File

@@ -0,0 +1,7 @@
"""
文档解析模块 - 支持多种文件格式的解析
"""
from .base import BaseParser
from .xlsx_parser import XlsxParser
__all__ = ['BaseParser', 'XlsxParser']

View File

@@ -0,0 +1,87 @@
"""
解析器基类 - 定义所有解析器的通用接口
"""
from abc import ABC, abstractmethod
from typing import Any, Dict, List, Optional
from pathlib import Path
class ParseResult:
"""解析结果类"""
def __init__(
self,
success: bool,
data: Optional[Dict[str, Any]] = None,
error: Optional[str] = None,
metadata: Optional[Dict[str, Any]] = None
):
self.success = success
self.data = data or {}
self.error = error
self.metadata = metadata or {}
def to_dict(self) -> Dict[str, Any]:
"""转换为字典"""
return {
"success": self.success,
"data": self.data,
"error": self.error,
"metadata": self.metadata
}
class BaseParser(ABC):
"""文档解析器基类"""
def __init__(self):
self.supported_extensions: List[str] = []
self.parser_name: str = "base_parser"
@abstractmethod
def parse(self, file_path: str, **kwargs) -> ParseResult:
"""
解析文件
Args:
file_path: 文件路径
**kwargs: 其他解析参数
Returns:
ParseResult: 解析结果
"""
pass
def can_parse(self, file_path: str) -> bool:
"""
检查是否可以解析该文件
Args:
file_path: 文件路径
Returns:
bool: 是否可以解析
"""
ext = Path(file_path).suffix.lower()
return ext in self.supported_extensions
def get_file_info(self, file_path: str) -> Dict[str, Any]:
"""
获取文件基本信息
Args:
file_path: 文件路径
Returns:
Dict[str, Any]: 文件信息
"""
path = Path(file_path)
if not path.exists():
return {"error": "File not found"}
return {
"filename": path.name,
"extension": path.suffix.lower(),
"size": path.stat().st_size,
"parser": self.parser_name
}

View File

@@ -0,0 +1,120 @@
"""
文档解析工具函数
"""
import re
from typing import List, Optional, Dict, Any
def clean_text(text: str) -> str:
"""
清洗文本,去除多余的空白字符和特殊符号
Args:
text: 原始文本
Returns:
str: 清洗后的文本
"""
if not text:
return ""
# 去除首尾空白
text = text.strip()
# 将多个连续的空白字符替换为单个空格
text = re.sub(r'\s+', ' ', text)
# 去除不可打印字符
text = ''.join(char for char in text if char.isprintable() or char in '\n\r\t')
return text
def chunk_text(
text: str,
chunk_size: int = 1000,
overlap: int = 100
) -> List[str]:
"""
将文本分块
Args:
text: 原始文本
chunk_size: 每块的大小(字符数)
overlap: 重叠区域的大小
Returns:
List[str]: 文本块列表
"""
if not text:
return []
chunks = []
start = 0
text_length = len(text)
while start < text_length:
end = start + chunk_size
chunk = text[start:end]
chunks.append(chunk)
start = end - overlap
return chunks
def normalize_string(s: Any) -> str:
"""
标准化字符串
Args:
s: 输入值
Returns:
str: 标准化后的字符串
"""
if s is None:
return ""
if isinstance(s, (int, float)):
return str(s)
if isinstance(s, str):
return clean_text(s)
return str(s)
def detect_encoding(file_path: str) -> Optional[str]:
"""
检测文件编码(简化版)
Args:
file_path: 文件路径
Returns:
Optional[str]: 编码格式,无法检测则返回 None
"""
import chardet
try:
with open(file_path, 'rb') as f:
raw_data = f.read(10000) # 读取前 10000 字节
result = chardet.detect(raw_data)
return result.get('encoding')
except Exception:
return None
def safe_get(d: Dict[str, Any], key: str, default: Any = None) -> Any:
"""
安全地获取字典值
Args:
d: 字典
key: 键
default: 默认值
Returns:
Any: 字典值或默认值
"""
try:
return d.get(key, default)
except Exception:
return default

View File

@@ -0,0 +1,288 @@
"""
Excel 文件解析器 - 解析 .xlsx 和 .xls 文件
"""
from typing import Any, Dict, List, Optional
from pathlib import Path
import pandas as pd
import logging
from .base import BaseParser, ParseResult
logger = logging.getLogger(__name__)
class XlsxParser(BaseParser):
"""Excel 文件解析器"""
def __init__(self):
super().__init__()
self.supported_extensions = ['.xlsx', '.xls']
self.parser_name = "excel_parser"
def parse(
self,
file_path: str,
sheet_name: Optional[str | int] = 0,
header_row: int = 0,
**kwargs
) -> ParseResult:
"""
解析 Excel 文件
Args:
file_path: 文件路径
sheet_name: 工作表名称或索引,默认为第一个工作表
header_row: 表头所在的行索引,默认为 0
**kwargs: 其他参数传递给 pandas.read_excel
Returns:
ParseResult: 解析结果
"""
path = Path(file_path)
# 检查文件是否存在
if not path.exists():
return ParseResult(
success=False,
error=f"File not found: {file_path}"
)
# 检查文件扩展名
if path.suffix.lower() not in self.supported_extensions:
return ParseResult(
success=False,
error=f"Unsupported file type: {path.suffix}"
)
# 检查文件大小
file_size = path.stat().st_size
if file_size == 0:
return ParseResult(
success=False,
error=f"File is empty: {file_path}"
)
try:
# 尝试读取 Excel 文件,检查是否有工作表
xls_file = pd.ExcelFile(file_path)
sheet_names = xls_file.sheet_names
if not sheet_names:
return ParseResult(
success=False,
error=f"Excel 文件没有找到任何工作表: {file_path}"
)
# 验证请求的工作表索引/名称
target_sheet = None
if sheet_name is not None:
if isinstance(sheet_name, int) and sheet_name < len(sheet_names):
target_sheet = sheet_names[sheet_name]
elif isinstance(sheet_name, str) and sheet_name in sheet_names:
target_sheet = sheet_name
else:
# 如果指定的 sheet_name 无效,使用第一个工作表
target_sheet = sheet_names[0]
else:
# 默认使用第一个工作表
target_sheet = sheet_names[0]
# 读取 Excel 文件
df = pd.read_excel(
file_path,
sheet_name=target_sheet,
header=header_row,
**kwargs
)
# 检查 DataFrame 是否为空
if df.empty:
return ParseResult(
success=False,
error=f"工作表 '{target_sheet}' 为空,请检查 Excel 文件内容"
)
# 转换为可序列化的数据
data = self._df_to_dict(df)
# 构建元数据
metadata = {
"filename": path.name,
"extension": path.suffix.lower(),
"sheet_count": len(sheet_names),
"sheet_names": sheet_names,
"current_sheet": target_sheet,
"row_count": len(df),
"column_count": len(df.columns) if not df.empty else 0,
"columns": df.columns.tolist() if not df.empty else [],
"file_size": file_size
}
return ParseResult(
success=True,
data=data,
metadata=metadata
)
except IndexError as e:
logger.error(f"工作表索引错误: {str(e)}")
# 工作表索引超出范围时,尝试使用第一个工作表
try:
xls_file = pd.ExcelFile(file_path)
sheet_names = xls_file.sheet_names
if sheet_names:
df = pd.read_excel(
file_path,
sheet_name=sheet_names[0],
header=header_row,
**kwargs
)
data = self._df_to_dict(df)
metadata = {
"filename": path.name,
"extension": path.suffix.lower(),
"sheet_count": len(sheet_names),
"sheet_names": sheet_names,
"current_sheet": sheet_names[0],
"row_count": len(df),
"column_count": len(df.columns) if not df.empty else 0,
"columns": df.columns.tolist() if not df.empty else [],
"file_size": path.stat().st_size
}
return ParseResult(
success=True,
data=data,
metadata=metadata
)
else:
return ParseResult(
success=False,
error=f"Excel 文件没有有效的工作表"
)
except Exception as e2:
logger.error(f"重试解析失败: {str(e2)}")
return ParseResult(
success=False,
error=f"无法解析 Excel 文件: {str(e)}"
)
except Exception as e:
logger.error(f"解析 Excel 文件时出错: {str(e)}")
return ParseResult(
success=False,
error=f"Failed to parse Excel file: {str(e)}"
)
def parse_all_sheets(self, file_path: str, **kwargs) -> ParseResult:
"""
解析 Excel 文件的所有工作表
Args:
file_path: 文件路径
**kwargs: 其他参数传递给 pandas.read_excel
Returns:
ParseResult: 解析结果
"""
path = Path(file_path)
# 检查文件是否存在
if not path.exists():
return ParseResult(
success=False,
error=f"File not found: {file_path}"
)
if path.suffix.lower() not in self.supported_extensions:
return ParseResult(
success=False,
error=f"Unsupported file type: {path.suffix}"
)
# 检查文件大小
file_size = path.stat().st_size
if file_size == 0:
return ParseResult(
success=False,
error=f"File is empty: {file_path}"
)
try:
# 读取所有工作表
all_data = pd.read_excel(file_path, sheet_name=None, **kwargs)
# 检查是否成功读取到数据
if not all_data or len(all_data) == 0:
return ParseResult(
success=False,
error=f"无法读取 Excel 文件或文件为空: {file_path}"
)
# 转换为可序列化的数据
sheets_data = {}
for sheet_name, df in all_data.items():
sheets_data[sheet_name] = self._df_to_dict(df)
# 获取所有工作表名称
all_sheets = list(all_data.keys())
# 构建元数据
total_rows = sum(len(df) for df in all_data.values())
metadata = {
"filename": path.name,
"extension": path.suffix.lower(),
"sheet_count": len(all_sheets),
"sheet_names": all_sheets,
"total_rows": total_rows,
"file_size": file_size
}
return ParseResult(
success=True,
data={"sheets": sheets_data},
metadata=metadata
)
except Exception as e:
logger.error(f"Failed to parse Excel file: {str(e)}")
return ParseResult(
success=False,
error=f"Failed to parse Excel file: {str(e)}"
)
def _get_sheet_names(self, file_path: str) -> List[str]:
"""获取 Excel 文件中的所有工作表名称"""
try:
xls = pd.ExcelFile(file_path)
sheet_names = xls.sheet_names
if not sheet_names:
return []
return sheet_names
except Exception as e:
logger.error(f"获取工作表名称失败: {str(e)}")
return []
def _df_to_dict(self, df: pd.DataFrame) -> Dict[str, Any]:
"""
将 DataFrame 转换为字典,处理 NaN 值
Args:
df: pandas DataFrame
Returns:
Dict[str, Any]: 转换后的字典
"""
# 将 NaN 替换为 None
df = df.replace({pd.NA: None, float('nan'): None})
# 转换为字典列表(每一行一个字典)
rows = df.to_dict(orient='records')
return {
"columns": df.columns.tolist(),
"rows": rows,
"row_count": len(rows),
"column_count": len(df.columns) if not df.empty else 0
}

View File

@@ -1,18 +1,61 @@
"""
FastAPI 应用主入口
"""
from fastapi import FastAPI
from config import settings
from fastapi.middleware.cors import CORSMiddleware
from app.config import settings
from app.api import api_router
# 创建 FastAPI 应用实例
app = FastAPI(
title=settings.APP_NAME,
openapi_url=f"{settings.API_V1_STR}/openapi.json"
description="基于大语言模型的文档理解与多源数据融合系统",
version="1.0.0",
openapi_url=f"{settings.API_V1_STR}/openapi.json",
docs_url=f"{settings.API_V1_STR}/docs",
redoc_url=f"{settings.API_V1_STR}/redoc"
)
# 配置 CORS 中间件
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# 注册 API 路由
app.include_router(api_router, prefix=settings.API_V1_STR)
@app.get("/")
async def root():
"""根路径"""
return {
"message": f"Welcome to {settings.APP_NAME}",
"status": "online",
"debug_mode": settings.DEBUG
"version": "1.0.0",
"debug_mode": settings.DEBUG,
"api_docs": f"{settings.API_V1_STR}/docs"
}
@app.get("/health")
async def health_check():
"""健康检查接口"""
return {
"status": "healthy",
"service": settings.APP_NAME
}
if __name__ == "__main__":
import uvicorn
uvicorn.run("main:app", host="127.0.0.1", port=8000, reload=True)
uvicorn.run(
"app.main:app",
host="127.0.0.1",
port=8000,
reload=settings.DEBUG
)

View File

@@ -0,0 +1,349 @@
"""
图表生成服务 - 根据结构化数据生成图表
"""
import io
import base64
import logging
from typing import Dict, Any, List, Optional
from pathlib import Path
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib
import numpy as np
# 使用字体辅助模块配置中文字体
from app.services.font_helper import configure_matplotlib_fonts
configure_matplotlib_fonts()
logger = logging.getLogger(__name__)
class ChartGeneratorService:
"""图表生成服务类"""
def __init__(self):
self.output_dir = Path(__file__).resolve().parent.parent.parent / "data" / "charts"
self.output_dir.mkdir(parents=True, exist_ok=True)
def generate_charts_from_analysis(
self,
structured_data: Dict[str, Any]
) -> Dict[str, Any]:
"""
根据提取的结构化数据生成图表
Args:
structured_data: 从 AI 分析结果中提取的结构化数据
Returns:
Dict[str, Any]: 包含图表数据的结果
"""
if not structured_data.get("success"):
return {
"success": False,
"error": structured_data.get("error", "数据提取失败")
}
data = structured_data.get("data", {})
charts = {}
statistics = {}
try:
# 1. 数值型数据图表
numeric_data = data.get("numeric_data", [])
if numeric_data:
charts["numeric_charts"] = self._create_numeric_charts(numeric_data)
statistics["numeric_summary"] = self._create_numeric_summary(numeric_data)
# 2. 分类数据图表
categorical_data = data.get("categorical_data", [])
if categorical_data:
charts["categorical_charts"] = self._create_categorical_charts(categorical_data)
# 3. 时间序列图表
time_series_data = data.get("time_series_data", [])
if time_series_data:
charts["time_series_chart"] = self._create_time_series_chart(time_series_data)
# 4. 对比数据图表
comparison_data = data.get("comparison_data", [])
if comparison_data:
charts["comparison_chart"] = self._create_comparison_chart(comparison_data)
# 5. 表格数据可视化
table_data = data.get("table_data")
if table_data:
charts["table_preview"] = self._create_table_preview(table_data)
# 元数据
metadata = data.get("metadata", {})
return {
"success": True,
"charts": charts,
"statistics": statistics,
"metadata": metadata,
"data_source": "ai_analysis"
}
except Exception as e:
logger.error(f"生成图表失败: {str(e)}", exc_info=True)
return {
"success": False,
"error": str(e)
}
def _create_numeric_charts(self, numeric_data: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
"""创建数值型数据图表"""
charts = []
# 提取数值和标签
names = [item.get("name", f"{i}") for i, item in enumerate(numeric_data)]
values = [item.get("value", 0) for item in numeric_data]
if not values:
return charts
# 1. 柱状图
try:
fig, ax = plt.subplots(figsize=(12, 7))
colors = plt.cm.Set3(np.linspace(0, 1, len(values)))
bars = ax.bar(names, values, color=colors, alpha=0.8, edgecolor='black', linewidth=0.5)
# 添加数值标签
for bar, value in zip(bars, values):
height = bar.get_height()
ax.text(bar.get_x() + bar.get_width() / 2., height,
f'{value:,.0f}',
ha='center', va='bottom', fontsize=9, fontweight='bold')
ax.set_xlabel('项目', fontsize=10, labelpad=10, fontweight='bold')
ax.set_ylabel('数值', fontsize=10, labelpad=10, fontweight='bold')
ax.set_title('数值型数据对比', fontsize=12, fontweight='bold', pad=15)
ax.set_xticklabels(names, rotation=30, ha='right', fontsize=9)
ax.tick_params(axis='both', which='major', labelsize=9)
plt.grid(axis='y', alpha=0.3)
plt.tight_layout(pad=1.5)
img_base64 = self._figure_to_base64(fig)
charts.append({
"type": "bar",
"title": "数值型数据对比",
"image": img_base64,
"data": [{"name": n, "value": v} for n, v in zip(names, values)]
})
except Exception as e:
logger.error(f"创建柱状图失败: {str(e)}")
# 2. 饼图
if len(values) > 0 and len(values) <= 10:
try:
fig, ax = plt.subplots(figsize=(10, 10))
wedges, texts, autotexts = ax.pie(values, labels=names, autopct='%1.1f%%',
startangle=90, colors=plt.cm.Set3.colors[:len(values)])
for autotext in autotexts:
autotext.set_color('white')
autotext.set_fontsize(9)
autotext.set_fontweight('bold')
ax.set_title('数值型数据占比', fontsize=12, fontweight='bold', pad=15)
img_base64 = self._figure_to_base64(fig)
charts.append({
"type": "pie",
"title": "数值型数据占比",
"image": img_base64,
"data": [{"name": n, "value": v} for n, v in zip(names, values)]
})
except Exception as e:
logger.error(f"创建饼图失败: {str(e)}")
return charts
def _create_categorical_charts(self, categorical_data: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
"""创建分类数据图表"""
charts = []
# 提取数据
names = [item.get("name", f"{i}") for i, item in enumerate(categorical_data)]
counts = [item.get("count", 1) for item in categorical_data]
if not names or not counts:
return charts
# 水平条形图
try:
fig, ax = plt.subplots(figsize=(10, max(6, len(names) * 0.8)))
y_pos = np.arange(len(names))
bars = ax.barh(y_pos, counts, align='center', color='#10b981', alpha=0.8, edgecolor='black', linewidth=0.5)
# 添加数值标签
for bar, count in zip(bars, counts):
width = bar.get_width()
ax.text(width, bar.get_y() + bar.get_height() / 2.,
f'{count}',
ha='left', va='center', fontsize=10, fontweight='bold')
ax.set_yticks(y_pos)
ax.set_yticklabels(names, fontsize=10)
ax.invert_yaxis()
ax.set_xlabel('数量', fontsize=10, labelpad=10, fontweight='bold')
ax.set_title('分类数据分布', fontsize=12, fontweight='bold', pad=15)
ax.tick_params(axis='both', which='major', labelsize=9)
ax.grid(axis='x', alpha=0.3)
plt.tight_layout(pad=1.5)
img_base64 = self._figure_to_base64(fig)
charts.append({
"type": "barh",
"title": "分类数据分布",
"image": img_base64,
"data": [{"name": n, "count": c} for n, c in zip(names, counts)]
})
except Exception as e:
logger.error(f"创建分类图表失败: {str(e)}")
return charts
def _create_time_series_chart(self, time_series_data: List[Dict[str, Any]]) -> Optional[Dict[str, Any]]:
"""创建时间序列图表"""
if not time_series_data:
return None
try:
names = [item.get("name", f"时间{i}") for i, item in enumerate(time_series_data)]
values = [item.get("value", 0) for item in time_series_data]
if len(values) < 2:
return None
fig, ax = plt.subplots(figsize=(14, 7))
# 绘制折线图和柱状图
x_pos = np.arange(len(names))
bars = ax.bar(x_pos, values, width=0.4, label='数值', color='#3b82f6', alpha=0.7)
# 添加折线
line = ax.plot(x_pos, values, 'o-', color='#ef4444', linewidth=2.5, markersize=8, label='趋势')
ax.set_xticks(x_pos)
ax.set_xticklabels(names, rotation=30, ha='right', fontsize=9)
ax.set_ylabel('数值', fontsize=10, labelpad=10, fontweight='bold')
ax.set_title('时间序列数据', fontsize=12, fontweight='bold', pad=15)
ax.legend(loc='best', fontsize=9)
ax.tick_params(axis='both', which='major', labelsize=9)
ax.grid(True, alpha=0.3)
plt.tight_layout(pad=1.5)
img_base64 = self._figure_to_base64(fig)
return {
"type": "time_series",
"title": "时间序列数据",
"image": img_base64,
"data": [{"name": n, "value": v} for n, v in zip(names, values)]
}
except Exception as e:
logger.error(f"创建时间序列图表失败: {str(e)}")
return None
def _create_comparison_chart(self, comparison_data: List[Dict[str, Any]]) -> Optional[Dict[str, Any]]:
"""创建对比图表"""
if not comparison_data:
return None
try:
names = [item.get("name", f"对比{i}") for i, item in enumerate(comparison_data)]
values = [item.get("value", 0) for item in comparison_data]
fig, ax = plt.subplots(figsize=(10, 7))
# 区分正负值
colors = ['#10b981' if v >= 0 else '#ef4444' for v in values]
bars = ax.bar(names, values, color=colors, alpha=0.8, edgecolor='black', linewidth=0.8)
# 添加数值标签
for bar, value in zip(bars, values):
height = bar.get_height()
ax.text(bar.get_x() + bar.get_width() / 2., height,
f'{value:,.1f}',
ha='center', va='bottom' if value >= 0 else 'top',
fontsize=10, fontweight='bold')
# 添加零线
ax.axhline(y=0, color='black', linestyle='-', linewidth=1)
ax.set_ylabel('', fontsize=10, labelpad=10, fontweight='bold')
ax.set_title('对比数据', fontsize=12, fontweight='bold', pad=15)
ax.set_xticklabels(names, rotation=30, ha='right', fontsize=9)
ax.tick_params(axis='both', which='major', labelsize=9)
plt.grid(axis='y', alpha=0.3)
plt.tight_layout(pad=1.5)
img_base64 = self._figure_to_base64(fig)
return {
"type": "comparison",
"title": "对比数据",
"image": img_base64,
"data": [{"name": n, "value": v} for n, v in zip(names, values)]
}
except Exception as e:
logger.error(f"创建对比图表失败: {str(e)}")
return None
def _create_table_preview(self, table_data: Dict[str, Any]) -> Dict[str, Any]:
"""创建表格预览数据"""
if not table_data:
return {}
columns = table_data.get("columns", [])
rows = table_data.get("rows", [])
return {
"columns": columns,
"rows": rows[:50], # 限制显示前50行
"total_rows": len(rows),
"preview_rows": min(50, len(rows))
}
def _create_numeric_summary(self, numeric_data: List[Dict[str, Any]]) -> Dict[str, Any]:
"""创建数值型数据摘要"""
values = [item.get("value", 0) for item in numeric_data if isinstance(item.get("value"), (int, float))]
if not values:
return {}
return {
"count": len(values),
"sum": float(sum(values)),
"mean": float(np.mean(values)),
"median": float(np.median(values)),
"min": float(min(values)),
"max": float(max(values)),
"std": float(np.std(values)) if len(values) > 1 else 0
}
def _figure_to_base64(self, fig) -> str:
"""将 matplotlib 图形转换为 base64 字符串"""
buf = io.BytesIO()
fig.savefig(
buf,
format='png',
dpi=120,
bbox_inches='tight',
pad_inches=0.3,
facecolor='white',
edgecolor='none',
transparent=False
)
plt.close(fig)
buf.seek(0)
img_base64 = base64.b64encode(buf.read()).decode('utf-8')
return f"data:image/png;base64,{img_base64}"
# 全局单例
chart_generator_service = ChartGeneratorService()

View File

@@ -0,0 +1,253 @@
"""
Excel AI 分析服务 - 集成 Excel 解析和 LLM 分析
"""
import logging
from typing import Dict, Any, Optional, List
from app.core.document_parser import XlsxParser
from app.services.file_service import file_service
from app.services.llm_service import llm_service
logger = logging.getLogger(__name__)
class ExcelAIService:
"""Excel AI 分析服务"""
def __init__(self):
self.parser = XlsxParser()
self.file_service = file_service
self.llm_service = llm_service
async def analyze_excel_file(
self,
file_content: bytes,
filename: str,
user_prompt: str = "",
analysis_type: str = "general",
parse_options: Optional[Dict[str, Any]] = None
) -> Dict[str, Any]:
"""
分析 Excel 文件
Args:
file_content: 文件内容字节
filename: 文件名
user_prompt: 用户自定义提示词
analysis_type: 分析类型
parse_options: 解析选项
Returns:
Dict[str, Any]: 分析结果
"""
# 1. 保存文件
try:
saved_path = self.file_service.save_uploaded_file(
file_content,
filename,
subfolder="excel"
)
logger.info(f"文件已保存: {saved_path}")
except Exception as e:
logger.error(f"文件保存失败: {str(e)}")
return {
"success": False,
"error": f"文件保存失败: {str(e)}",
"analysis": None
}
# 2. 解析 Excel 文件
try:
parse_options = parse_options or {}
parse_result = self.parser.parse(saved_path, **parse_options)
if not parse_result.success:
return {
"success": False,
"error": parse_result.error,
"analysis": None
}
excel_data = parse_result.data
logger.info(f"Excel 解析成功: {parse_result.metadata}")
except Exception as e:
logger.error(f"Excel 解析失败: {str(e)}")
return {
"success": False,
"error": f"Excel 解析失败: {str(e)}",
"analysis": None
}
# 3. 调用 LLM 进行分析
try:
# 如果有自定义提示词,使用模板分析
if user_prompt and user_prompt.strip():
llm_result = await self.llm_service.analyze_with_template(
excel_data,
user_prompt
)
else:
# 否则使用标准分析
llm_result = await self.llm_service.analyze_excel_data(
excel_data,
user_prompt,
analysis_type
)
logger.info(f"AI 分析完成: {llm_result['success']}")
# 4. 组合结果
return {
"success": True,
"excel": {
"data": excel_data,
"metadata": parse_result.metadata,
"saved_path": saved_path
},
"analysis": llm_result
}
except Exception as e:
logger.error(f"AI 分析失败: {str(e)}")
return {
"success": False,
"error": f"AI 分析失败: {str(e)}",
"excel": {
"data": excel_data,
"metadata": parse_result.metadata
},
"analysis": None
}
async def batch_analyze_sheets(
self,
file_content: bytes,
filename: str,
user_prompt: str = "",
analysis_type: str = "general"
) -> Dict[str, Any]:
"""
批量分析 Excel 文件的所有工作表
Args:
file_content: 文件内容字节
filename: 文件名
user_prompt: 用户自定义提示词
analysis_type: 分析类型
Returns:
Dict[str, Any]: 分析结果
"""
# 1. 保存文件
try:
saved_path = self.file_service.save_uploaded_file(
file_content,
filename,
subfolder="excel"
)
logger.info(f"文件已保存: {saved_path}")
except Exception as e:
logger.error(f"文件保存失败: {str(e)}")
return {
"success": False,
"error": f"文件保存失败: {str(e)}",
"analysis": None
}
# 2. 解析所有工作表
try:
parse_result = self.parser.parse_all_sheets(saved_path)
if not parse_result.success:
return {
"success": False,
"error": parse_result.error,
"analysis": None
}
sheets_data = parse_result.data.get("sheets", {})
logger.info(f"Excel 解析成功,共 {len(sheets_data)} 个工作表")
except Exception as e:
logger.error(f"Excel 解析失败: {str(e)}")
return {
"success": False,
"error": f"Excel 解析失败: {str(e)}",
"analysis": None
}
# 3. 批量分析每个工作表
sheet_analyses = {}
errors = {}
for sheet_name, sheet_data in sheets_data.items():
try:
# 调用 LLM 分析
if user_prompt and user_prompt.strip():
llm_result = await self.llm_service.analyze_with_template(
sheet_data,
user_prompt
)
else:
llm_result = await self.llm_service.analyze_excel_data(
sheet_data,
user_prompt,
analysis_type
)
sheet_analyses[sheet_name] = llm_result
if not llm_result["success"]:
errors[sheet_name] = llm_result.get("error", "未知错误")
logger.info(f"工作表 '{sheet_name}' 分析完成")
except Exception as e:
logger.error(f"工作表 '{sheet_name}' 分析失败: {str(e)}")
errors[sheet_name] = str(e)
# 4. 组合结果
return {
"success": len(errors) == 0,
"excel": {
"sheets": sheets_data,
"metadata": parse_result.metadata,
"saved_path": saved_path
},
"analysis": {
"sheets": sheet_analyses,
"total_sheets": len(sheets_data),
"successful": len(sheet_analyses) - len(errors),
"errors": errors
}
}
def get_supported_analysis_types(self) -> List[str]:
"""获取支持的分析类型"""
return [
{
"value": "general",
"label": "综合分析",
"description": "提供数据概览、关键发现、质量评估和建议"
},
{
"value": "summary",
"label": "数据摘要",
"description": "快速了解数据的结构、范围和主要内容"
},
{
"value": "statistics",
"label": "统计分析",
"description": "数值型列的统计信息和分类列的分布"
},
{
"value": "insights",
"label": "深度洞察",
"description": "深入挖掘数据,提供异常值和业务建议"
}
]
# 全局单例
excel_ai_service = ExcelAIService()

View File

@@ -0,0 +1,132 @@
"""
文件服务模块 - 处理文件存储和读取
"""
import os
import shutil
from pathlib import Path
from datetime import datetime
from typing import Optional
import uuid
from app.config import settings
class FileService:
"""文件服务类,负责文件的存储、读取和管理"""
def __init__(self):
self.upload_dir = Path(settings.UPLOAD_DIR)
self._ensure_upload_dir()
def _ensure_upload_dir(self):
"""确保上传目录存在"""
self.upload_dir.mkdir(parents=True, exist_ok=True)
def save_uploaded_file(
self,
file_content: bytes,
filename: str,
subfolder: Optional[str] = None
) -> str:
"""
保存上传的文件
Args:
file_content: 文件内容字节
filename: 原始文件名
subfolder: 可选的子文件夹名称
Returns:
str: 保存后的文件路径
"""
# 生成唯一文件名,避免覆盖
file_ext = Path(filename).suffix
unique_name = f"{uuid.uuid4().hex}{file_ext}"
# 确定保存路径
if subfolder:
save_dir = self.upload_dir / subfolder
save_dir.mkdir(parents=True, exist_ok=True)
else:
save_dir = self.upload_dir
file_path = save_dir / unique_name
# 写入文件
with open(file_path, 'wb') as f:
f.write(file_content)
return str(file_path)
def read_file(self, file_path: str) -> bytes:
"""
读取文件内容
Args:
file_path: 文件路径
Returns:
bytes: 文件内容
"""
with open(file_path, 'rb') as f:
return f.read()
def delete_file(self, file_path: str) -> bool:
"""
删除文件
Args:
file_path: 文件路径
Returns:
bool: 是否删除成功
"""
try:
file = Path(file_path)
if file.exists():
file.unlink()
return True
return False
except Exception:
return False
def get_file_info(self, file_path: str) -> dict:
"""
获取文件信息
Args:
file_path: 文件路径
Returns:
dict: 文件信息
"""
file = Path(file_path)
if not file.exists():
return {}
stat = file.stat()
return {
"filename": file.name,
"filepath": str(file),
"size": stat.st_size,
"created": datetime.fromtimestamp(stat.st_ctime).isoformat(),
"modified": datetime.fromtimestamp(stat.st_mtime).isoformat(),
"extension": file.suffix.lower()
}
def get_file_size(self, file_path: str) -> int:
"""
获取文件大小(字节)
Args:
file_path: 文件路径
Returns:
int: 文件大小,文件不存在返回 0
"""
file = Path(file_path)
return file.stat().st_size if file.exists() else 0
# 全局单例
file_service = FileService()

View File

@@ -0,0 +1,105 @@
"""
字体辅助模块 - 处理中文字体检测和配置
"""
import matplotlib
import matplotlib.font_manager as fm
import platform
import os
from pathlib import Path
import logging
logger = logging.getLogger(__name__)
def get_chinese_font() -> str:
"""
获取可用的中文字体
Returns:
str: 可用的中文字体名称
"""
# 获取系统中所有可用字体
available_fonts = set([f.name for f in fm.fontManager.ttflist])
# 定义字体优先级列表
# Windows 优先
if platform.system() == 'Windows':
font_list = [
'Microsoft YaHei', # 微软雅黑
'SimHei', # 黑体
'SimSun', # 宋体
'KaiTi', # 楷体
'FangSong', # 仿宋
'STXihei', # 华文细黑
'STKaiti', # 华文楷体
'STSong', # 华文宋体
'STFangsong', # 华文仿宋
]
# macOS 优先
elif platform.system() == 'Darwin':
font_list = [
'PingFang SC', # 苹方-简
'PingFang TC', # 苹方-繁
'Heiti SC', # 黑体-简
'Heiti TC', # 黑体-繁
'STHeiti', # 华文黑体
'STSong', # 华文宋体
'STKaiti', # 华文楷体
'Arial Unicode MS', # Arial Unicode MS
]
# Linux 优先
else:
font_list = [
'Noto Sans CJK SC', # Noto Sans CJK 简体中文
'WenQuanYi Micro Hei', # 文泉驿微米黑
'AR PL UMing CN', # AR PL UMing
'AR PL UKai CN', # AR PL UKai
'ZCOOL XiaoWei', # ZCOOL 小薇
]
# 通用备选字体
font_list.extend([
'SimHei',
'Microsoft YaHei',
'Arial Unicode MS',
'Droid Sans Fallback',
])
# 查找第一个可用的字体
for font_name in font_list:
if font_name in available_fonts:
logger.info(f"找到中文字体: {font_name}")
return font_name
# 如果没找到,尝试获取第一个中文字体
for font in fm.fontManager.ttflist:
if 'CJK' in font.name or 'SC' in font.name or 'TC' in font.name:
logger.info(f"使用找到的中文字体: {font.name}")
return font.name
# 最终备选:使用系统默认字体
logger.warning("未找到合适的中文字体,使用默认字体")
return 'sans-serif'
def configure_matplotlib_fonts():
"""
配置 matplotlib 的字体设置
"""
chinese_font = get_chinese_font()
# 配置字体
matplotlib.rcParams['font.sans-serif'] = [chinese_font]
matplotlib.rcParams['axes.unicode_minus'] = False
matplotlib.rcParams['figure.dpi'] = 100
matplotlib.rcParams['savefig.dpi'] = 120
# 字体大小设置
matplotlib.rcParams['font.size'] = 10
matplotlib.rcParams['axes.labelsize'] = 10
matplotlib.rcParams['axes.titlesize'] = 11
matplotlib.rcParams['xtick.labelsize'] = 9
matplotlib.rcParams['ytick.labelsize'] = 9
matplotlib.rcParams['legend.fontsize'] = 9
logger.info(f"配置完成,使用字体: {chinese_font}")
return chinese_font

View File

@@ -0,0 +1,268 @@
"""
LLM 服务模块 - 封装大模型 API 调用
"""
import logging
from typing import Dict, Any, List, Optional
import httpx
from app.config import settings
logger = logging.getLogger(__name__)
class LLMService:
"""大语言模型服务类"""
def __init__(self):
self.api_key = settings.LLM_API_KEY
self.base_url = settings.LLM_BASE_URL
self.model_name = settings.LLM_MODEL_NAME
async def chat(
self,
messages: List[Dict[str, str]],
temperature: float = 0.7,
max_tokens: Optional[int] = None,
**kwargs
) -> Dict[str, Any]:
"""
调用聊天 API
Args:
messages: 消息列表,格式为 [{"role": "user", "content": "..."}]
temperature: 温度参数,控制随机性
max_tokens: 最大生成 token 数
**kwargs: 其他参数
Returns:
Dict[str, Any]: API 响应结果
"""
headers = {
"Authorization": f"Bearer {self.api_key}",
"Content-Type": "application/json"
}
payload = {
"model": self.model_name,
"messages": messages,
"temperature": temperature
}
if max_tokens:
payload["max_tokens"] = max_tokens
# 添加其他参数
payload.update(kwargs)
try:
async with httpx.AsyncClient(timeout=60.0) as client:
response = await client.post(
f"{self.base_url}/chat/completions",
headers=headers,
json=payload
)
response.raise_for_status()
return response.json()
except httpx.HTTPStatusError as e:
logger.error(f"LLM API 请求失败: {e.response.status_code} - {e.response.text}")
raise
except Exception as e:
logger.error(f"LLM API 调用异常: {str(e)}")
raise
def extract_message_content(self, response: Dict[str, Any]) -> str:
"""
从 API 响应中提取消息内容
Args:
response: API 响应
Returns:
str: 消息内容
"""
try:
return response["choices"][0]["message"]["content"]
except (KeyError, IndexError) as e:
logger.error(f"解析 API 响应失败: {str(e)}")
raise
async def analyze_excel_data(
self,
excel_data: Dict[str, Any],
user_prompt: str,
analysis_type: str = "general"
) -> Dict[str, Any]:
"""
分析 Excel 数据
Args:
excel_data: Excel 解析后的数据
user_prompt: 用户提示词
analysis_type: 分析类型 (general, summary, statistics, insights)
Returns:
Dict[str, Any]: 分析结果
"""
# 构建 Prompt
system_prompt = self._get_system_prompt(analysis_type)
user_message = self._format_user_message(excel_data, user_prompt)
messages = [
{"role": "system", "content": system_prompt},
{"role": "user", "content": user_message}
]
try:
response = await self.chat(
messages=messages,
temperature=0.3, # 较低的温度以获得更稳定的输出
max_tokens=2000
)
content = self.extract_message_content(response)
return {
"success": True,
"analysis": content,
"model": self.model_name,
"analysis_type": analysis_type
}
except Exception as e:
logger.error(f"Excel 数据分析失败: {str(e)}")
return {
"success": False,
"error": str(e),
"analysis": None
}
def _get_system_prompt(self, analysis_type: str) -> str:
"""获取系统提示词"""
prompts = {
"general": """你是一个专业的数据分析师。请分析用户提供的 Excel 数据,提供有价值的见解和建议。
请按照以下格式输出:
1. 数据概览
2. 关键发现
3. 数据质量评估
4. 建议
输出语言:中文""",
"summary": """你是一个专业的数据分析师。请对用户提供的 Excel 数据进行简洁的总结。
输出格式:
- 数据行数和列数
- 主要列的说明
- 数据范围概述
输出语言:中文""",
"statistics": """你是一个专业的数据分析师。请对用户提供的 Excel 数据进行统计分析。
请分析:
- 数值型列的统计信息(平均值、中位数、最大值、最小值)
- 分类列的分布情况
- 数据相关性
输出语言:中文,使用表格或结构化格式展示""",
"insights": """你是一个专业的数据分析师。请深入挖掘用户提供的 Excel 数据,提供有价值的洞察。
请分析:
1. 数据中的异常值或特殊模式
2. 数据之间的潜在关联
3. 基于数据的业务建议
4. 数据趋势分析(如适用)
输出语言:中文,提供详细且可操作的建议"""
}
return prompts.get(analysis_type, prompts["general"])
def _format_user_message(self, excel_data: Dict[str, Any], user_prompt: str) -> str:
"""格式化用户消息"""
columns = excel_data.get("columns", [])
rows = excel_data.get("rows", [])
row_count = excel_data.get("row_count", 0)
column_count = excel_data.get("column_count", 0)
# 构建数据描述
data_info = f"""
Excel 数据概览:
- 行数: {row_count}
- 列数: {column_count}
- 列名: {', '.join(columns)}
数据样例(前 5 行):
"""
# 添加数据样例
for i, row in enumerate(rows[:5], 1):
row_str = " | ".join([f"{col}: {row.get(col, '')}" for col in columns])
data_info += f"{i} 行: {row_str}\n"
if row_count > 5:
data_info += f"\n(还有 {row_count - 5} 行数据...)\n"
# 添加用户自定义提示
if user_prompt and user_prompt.strip():
data_info += f"\n用户需求:\n{user_prompt}"
else:
data_info += "\n用户需求: 请对上述数据进行分析"
return data_info
async def analyze_with_template(
self,
excel_data: Dict[str, Any],
template_prompt: str
) -> Dict[str, Any]:
"""
使用自定义模板分析 Excel 数据
Args:
excel_data: Excel 解析后的数据
template_prompt: 自定义提示词模板
Returns:
Dict[str, Any]: 分析结果
"""
system_prompt = """你是一个专业的数据分析师。请根据用户提供的自定义提示词分析 Excel 数据。
请严格按照用户的要求进行分析,输出清晰、有条理的结果。
输出语言:中文"""
user_message = self._format_user_message(excel_data, template_prompt)
messages = [
{"role": "system", "content": system_prompt},
{"role": "user", "content": user_message}
]
try:
response = await self.chat(
messages=messages,
temperature=0.5,
max_tokens=3000
)
content = self.extract_message_content(response)
return {
"success": True,
"analysis": content,
"model": self.model_name,
"is_template": True
}
except Exception as e:
logger.error(f"自定义模板分析失败: {str(e)}")
return {
"success": False,
"error": str(e),
"analysis": None
}
# 全局单例
llm_service = LLMService()

View File

@@ -0,0 +1,218 @@
"""
文本分析服务 - 从 AI 分析结果中提取结构化数据用于可视化
"""
import logging
from typing import Dict, Any, List, Optional
import re
import json
from app.services.llm_service import llm_service
logger = logging.getLogger(__name__)
class TextAnalysisService:
"""文本分析服务类"""
def __init__(self):
self.llm_service = llm_service
async def extract_structured_data(
self,
analysis_text: str,
original_filename: str = "",
file_type: str = "text"
) -> Dict[str, Any]:
"""
从 AI 分析结果文本中提取结构化数据
Args:
analysis_text: AI 分析结果文本
original_filename: 原始文件名
file_type: 文件类型
Returns:
Dict[str, Any]: 提取的结构化数据
"""
# 限制分析的文本长度,避免 token 超限
max_text_length = 8000
truncated_text = analysis_text[:max_text_length]
system_prompt = """你是一个专业的数据提取助手。你的任务是从AI分析结果中提取结构化数据用于生成图表。
请按照以下要求提取数据:
1. 数值型数据:
- 提取所有的数值、统计信息、百分比等
- 为每个数值创建一个条目,包含:名称、值、单位(如果有)
- 格式示例:{"name": "销售额", "value": 123456.78, "unit": ""}
2. 分类数据:
- 提取所有的类别、状态、枚举值等
- 为每个类别创建一个条目,包含:名称、值、数量(如果有)
- 格式示例:{"name": "产品类别", "value": "电子产品", "count": 25}
3. 时间序列数据:
- 提取所有的时间相关数据(年月、季度、日期等)
- 格式示例:{"name": "2025年1月", "value": 12345}
4. 对比数据:
- 提取所有的对比、排名、趋势等数据
- 格式示例:{"name": "同比增长", "value": 15.3, "unit": "%"}
5. 表格数据:
- 如果分析结果中包含表格或列表形式的数据,提取出来
- 格式:{"columns": ["列1", "列2"], "rows": [{"列1": "值1", "列2": "值2"}]}
重要规则:
- 只提取明确提到的数据和数值
- 如果某种类型的数据不存在,返回空数组 []
- 确保所有数值都是有效的数字类型
- 保持数据的原始精度
- 返回的 JSON 必须完整且格式正确
- 表格数据最多提取 20 行
请以 JSON 格式返回,不要添加任何 Markdown 标记或解释文字,只返回纯 JSON
{
"success": true,
"data": {
"numeric_data": [
{"name": string, "value": number, "unit": string|null}
],
"categorical_data": [
{"name": string, "value": string, "count": number|null}
],
"time_series_data": [
{"name": string, "value": number}
],
"comparison_data": [
{"name": string, "value": number, "unit": string|null}
],
"table_data": {
"columns": string[],
"rows": object[]
} | null
},
"metadata": {
"total_items": number,
"data_types": string[]
}
}"""
user_message = f"""请从以下 AI 分析结果中提取结构化数据:
原始文件名:{original_filename}
文件类型:{file_type}
AI 分析结果:
{truncated_text}
请按照系统提示的要求提取数据并返回纯 JSON 格式。"""
messages = [
{"role": "system", "content": system_prompt},
{"role": "user", "content": user_message}
]
try:
logger.info(f"开始提取结构化数据,文本长度: {len(truncated_text)}")
response = await self.llm_service.chat(
messages=messages,
temperature=0.1,
max_tokens=4000
)
content = self.llm_service.extract_message_content(response)
logger.info(f"LLM 返回内容长度: {len(content)}")
# 使用简单的方法提取 JSON
result = self._extract_json_simple(content)
if not result:
logger.error("无法从 LLM 响应中提取有效的 JSON")
return {
"success": False,
"error": "AI 返回的数据格式不正确或被截断",
"raw_content": content[:500]
}
logger.info(f"成功提取结构化数据")
return result
except Exception as e:
logger.error(f"提取结构化数据失败: {str(e)}")
return {
"success": False,
"error": str(e)
}
def _extract_json_simple(self, content: str) -> Optional[Dict[str, Any]]:
"""
简化的 JSON 提取方法
Args:
content: LLM 返回的内容
Returns:
Optional[Dict[str, Any]]: 解析后的 JSON失败返回 None
"""
try:
# 方法 1: 查找 ```json 代码块
code_block_match = re.search(r'```json\n{[\s\S]*?}[\s\S]*?}\n```', content, re.DOTALL)
if code_block_match:
json_str = code_block_match.group(1)
logger.info("从代码块中提取 JSON")
return json.loads(json_str)
# 方法 2: 查找第一个完整的 { } 对象
brace_count = 0
json_start = -1
for i in range(len(content)):
if content[i] == '{':
if brace_count == 0:
json_start = i
brace_count += 1
elif content[i] == '}':
brace_count -= 1
if brace_count == 0:
# 找到了完整的 JSON 对象
json_end = i + 1
json_str = content[json_start:json_end]
logger.info(f"从大括号中提取 JSON")
return json.loads(json_str)
# 方法 3: 尝试直接解析
logger.info("尝试直接解析整个内容")
return json.loads(content)
except json.JSONDecodeError as e:
logger.error(f"JSON 解析失败: {str(e)}")
logger.error(f"原始内容(前 500 字符): {content[:500]}...")
return None
except Exception as e:
logger.error(f"提取 JSON 失败: {str(e)}")
return None
def detect_data_types(self, data: Dict[str, Any]) -> List[str]:
"""检测数据中包含的类型"""
types = []
d = data.get("data", {})
if d.get("numeric_data") and len(d["numeric_data"]) > 0:
types.append("numeric")
if d.get("categorical_data") and len(d["categorical_data"]) > 0:
types.append("categorical")
if d.get("time_series_data") and len(d["time_series_data"]) > 0:
types.append("time_series")
if d.get("comparison_data") and len(d["comparison_data"]) > 0:
types.append("comparison")
if d.get("table_data") and d["table_data"]:
types.append("table")
return types
# 全局单例
text_analysis_service = TextAnalysisService()

View File

@@ -0,0 +1,388 @@
"""
数据可视化服务 - 使用 matplotlib/plotly 生成统计图表
"""
import io
import base64
import logging
from typing import Dict, Any, List, Optional, Union
from pathlib import Path
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib
import numpy as np
# 使用字体辅助模块配置中文字体
from app.services.font_helper import configure_matplotlib_fonts
configure_matplotlib_fonts()
logger = logging.getLogger(__name__)
class VisualizationService:
"""数据可视化服务类"""
def __init__(self):
self.output_dir = Path(__file__).resolve().parent.parent.parent / "data" / "charts"
self.output_dir.mkdir(parents=True, exist_ok=True)
def analyze_and_visualize(
self,
excel_data: Dict[str, Any],
analysis_type: str = "statistics"
) -> Dict[str, Any]:
"""
分析数据并生成可视化图表
Args:
excel_data: Excel 解析后的数据
analysis_type: 分析类型
Returns:
Dict[str, Any]: 包含图表数据和统计信息的结果
"""
try:
columns = excel_data.get("columns", [])
rows = excel_data.get("rows", [])
if not columns or not rows:
return {
"success": False,
"error": "没有数据可用于分析"
}
# 转换为 DataFrame
df = pd.DataFrame(rows, columns=columns)
# 根据列类型分类
numeric_columns = df.select_dtypes(include=[np.number]).columns.tolist()
categorical_columns = df.select_dtypes(exclude=[np.number]).columns.tolist()
# 生成统计信息
statistics = self._generate_statistics(df, numeric_columns, categorical_columns)
# 生成图表
charts = self._generate_charts(df, numeric_columns, categorical_columns)
# 生成数据分布信息
distributions = self._generate_distributions(df, categorical_columns)
return {
"success": True,
"statistics": statistics,
"charts": charts,
"distributions": distributions,
"row_count": len(df),
"column_count": len(columns)
}
except Exception as e:
logger.error(f"可视化分析失败: {str(e)}", exc_info=True)
return {
"success": False,
"error": str(e)
}
def _generate_statistics(
self,
df: pd.DataFrame,
numeric_columns: List[str],
categorical_columns: List[str]
) -> Dict[str, Any]:
"""生成统计信息"""
statistics = {
"numeric": {},
"categorical": {}
}
# 数值型列统计
for col in numeric_columns:
try:
stats = {
"count": int(df[col].count()),
"mean": float(df[col].mean()),
"median": float(df[col].median()),
"std": float(df[col].std()) if df[col].count() > 1 else 0,
"min": float(df[col].min()),
"max": float(df[col].max()),
"q25": float(df[col].quantile(0.25)),
"q75": float(df[col].quantile(0.75)),
"missing": int(df[col].isna().sum())
}
statistics["numeric"][col] = stats
except Exception as e:
logger.warning(f"{col} 统计失败: {str(e)}")
# 分类型列统计
for col in categorical_columns:
try:
value_counts = df[col].value_counts()
stats = {
"unique": int(df[col].nunique()),
"most_common": str(value_counts.index[0]) if len(value_counts) > 0 else "",
"most_common_count": int(value_counts.iloc[0]) if len(value_counts) > 0 else 0,
"missing": int(df[col].isna().sum()),
"distribution": {str(k): int(v) for k, v in value_counts.items()}
}
statistics["categorical"][col] = stats
except Exception as e:
logger.warning(f"{col} 统计失败: {str(e)}")
return statistics
def _generate_charts(
self,
df: pd.DataFrame,
numeric_columns: List[str],
categorical_columns: List[str]
) -> Dict[str, Any]:
"""生成图表"""
charts = {}
# 1. 数值型列的直方图
charts["histograms"] = []
for col in numeric_columns[:5]: # 限制最多 5 个数值列
chart_data = self._create_histogram(df[col], col)
if chart_data:
charts["histograms"].append(chart_data)
# 2. 分类型列的条形图
charts["bar_charts"] = []
for col in categorical_columns[:5]: # 限制最多 5 个分类型列
chart_data = self._create_bar_chart(df[col], col)
if chart_data:
charts["bar_charts"].append(chart_data)
# 3. 数值型列的箱线图
charts["box_plots"] = []
if len(numeric_columns) > 0:
chart_data = self._create_box_plot(df[numeric_columns[:5]], numeric_columns[:5])
if chart_data:
charts["box_plots"].append(chart_data)
# 4. 相关性热力图
if len(numeric_columns) >= 2:
chart_data = self._create_correlation_heatmap(df[numeric_columns], numeric_columns)
if chart_data:
charts["correlation"] = chart_data
return charts
def _create_histogram(self, series: pd.Series, column_name: str) -> Optional[Dict[str, Any]]:
"""创建直方图"""
try:
fig, ax = plt.subplots(figsize=(11, 7))
ax.hist(series.dropna(), bins=20, edgecolor='black', alpha=0.7, color='#3b82f6')
ax.set_xlabel(column_name, fontsize=10, labelpad=10)
ax.set_ylabel('频数', fontsize=10, labelpad=10)
ax.set_title(f'{column_name} 分布', fontsize=12, fontweight='bold', pad=15)
ax.grid(True, alpha=0.3, axis='y')
ax.tick_params(axis='both', which='major', labelsize=9)
# 改进布局
plt.tight_layout(pad=1.5, w_pad=1.0, h_pad=1.0)
# 转换为 base64
img_base64 = self._figure_to_base64(fig)
return {
"type": "histogram",
"column": column_name,
"image": img_base64,
"stats": {
"mean": float(series.mean()),
"median": float(series.median()),
"std": float(series.std()) if len(series) > 1 else 0
}
}
except Exception as e:
logger.error(f"创建直方图失败 ({column_name}): {str(e)}")
return None
def _create_bar_chart(self, series: pd.Series, column_name: str) -> Optional[Dict[str, Any]]:
"""创建条形图"""
try:
value_counts = series.value_counts().head(10) # 只显示前 10 个
fig, ax = plt.subplots(figsize=(12, 7))
# 处理标签显示
labels = [str(x)[:15] + '...' if len(str(x)) > 15 else str(x) for x in value_counts.index]
x_pos = range(len(value_counts))
bars = ax.bar(x_pos, value_counts.values, color='#10b981', alpha=0.8, edgecolor='black', linewidth=0.5)
ax.set_xticks(x_pos)
ax.set_xticklabels(labels, rotation=30, ha='right', fontsize=8)
ax.set_xlabel(column_name, fontsize=10, labelpad=10)
ax.set_ylabel('数量', fontsize=10, labelpad=10)
ax.set_title(f'{column_name} 分布 (Top 10)', fontsize=12, fontweight='bold', pad=15)
ax.grid(True, alpha=0.3, axis='y')
ax.tick_params(axis='both', which='major', labelsize=9)
# 添加数值标签(位置稍微上移)
max_val = value_counts.values.max()
y_offset = max_val * 0.02 if max_val > 0 else 0.5
for bar, value in zip(bars, value_counts.values):
ax.text(bar.get_x() + bar.get_width() / 2., value + y_offset,
f'{int(value)}',
ha='center', va='bottom', fontsize=8, fontweight='bold')
# 改进布局
plt.tight_layout(pad=1.5, w_pad=1.0, h_pad=1.0)
# 转换为 base64
img_base64 = self._figure_to_base64(fig)
return {
"type": "bar_chart",
"column": column_name,
"image": img_base64,
"categories": {str(k): int(v) for k, v in value_counts.items()}
}
except Exception as e:
logger.error(f"创建条形图失败 ({column_name}): {str(e)}")
return None
def _create_box_plot(self, df: pd.DataFrame, columns: List[str]) -> Optional[Dict[str, Any]]:
"""创建箱线图"""
try:
fig, ax = plt.subplots(figsize=(14, 7))
# 准备数据
box_data = [df[col].dropna() for col in columns]
bp = ax.boxplot(box_data, labels=columns, patch_artist=True,
notch=True, showcaps=True, showfliers=True)
# 美化箱线图
box_colors = ['#3b82f6', '#10b981', '#f59e0b', '#ef4444', '#8b5cf6']
for patch, color in zip(bp['boxes'], box_colors[:len(bp['boxes'])]):
patch.set_facecolor(color)
patch.set_alpha(0.6)
patch.set_linewidth(1.5)
# 设置其他元素样式
for element in ['whiskers', 'fliers', 'means', 'medians', 'caps']:
plt.setp(bp[element], linewidth=1.5)
ax.set_ylabel('', fontsize=10, labelpad=10)
ax.set_title('数值型列分布对比', fontsize=12, fontweight='bold', pad=15)
ax.grid(True, alpha=0.3, axis='y')
# 旋转 x 轴标签以避免重叠
plt.setp(ax.get_xticklabels(), rotation=30, ha='right', fontsize=9)
ax.tick_params(axis='both', which='major', labelsize=9)
# 改进布局
plt.tight_layout(pad=1.5, w_pad=1.5, h_pad=1.0)
# 转换为 base64
img_base64 = self._figure_to_base64(fig)
return {
"type": "box_plot",
"columns": columns,
"image": img_base64
}
except Exception as e:
logger.error(f"创建箱线图失败: {str(e)}")
return None
def _create_correlation_heatmap(self, df: pd.DataFrame, columns: List[str]) -> Optional[Dict[str, Any]]:
"""创建相关性热力图"""
try:
# 计算相关系数
corr = df.corr()
fig, ax = plt.subplots(figsize=(11, 9))
im = ax.imshow(corr, cmap='RdBu_r', aspect='auto', vmin=-1, vmax=1)
# 设置刻度
n_cols = len(corr)
ax.set_xticks(np.arange(n_cols))
ax.set_yticks(np.arange(n_cols))
# 处理过长的列名
x_labels = [str(col)[:10] + '...' if len(str(col)) > 10 else str(col) for col in corr.columns]
y_labels = [str(col)[:10] + '...' if len(str(col)) > 10 else str(col) for col in corr.columns]
ax.set_xticklabels(x_labels, rotation=30, ha='right', fontsize=9)
ax.set_yticklabels(y_labels, fontsize=9)
# 添加数值标签,根据相关性值选择颜色
for i in range(n_cols):
for j in range(n_cols):
value = corr.iloc[i, j]
# 根据背景色深浅选择文字颜色
text_color = 'white' if abs(value) > 0.5 else 'black'
ax.text(j, i, f'{value:.2f}',
ha="center", va="center", color=text_color,
fontsize=8, fontweight='bold' if abs(value) > 0.7 else 'normal')
ax.set_title('数值型列相关性热力图', fontsize=12, fontweight='bold', pad=15)
ax.tick_params(axis='both', which='major', labelsize=9)
# 添加颜色条
cbar = plt.colorbar(im, ax=ax)
cbar.set_label('相关系数', rotation=270, labelpad=20, fontsize=10)
cbar.ax.tick_params(labelsize=9)
# 改进布局
plt.tight_layout(pad=2.0, w_pad=1.0, h_pad=1.0)
# 转换为 base64
img_base64 = self._figure_to_base64(fig)
return {
"type": "correlation_heatmap",
"columns": columns,
"image": img_base64,
"correlation_matrix": corr.to_dict()
}
except Exception as e:
logger.error(f"创建相关性热力图失败: {str(e)}")
return None
def _generate_distributions(
self,
df: pd.DataFrame,
categorical_columns: List[str]
) -> Dict[str, Any]:
"""生成数据分布信息"""
distributions = {}
for col in categorical_columns[:5]:
try:
value_counts = df[col].value_counts()
total = len(df)
distributions[col] = {
"categories": {str(k): int(v) for k, v in value_counts.items()},
"percentages": {str(k): round(v / total * 100, 2) for k, v in value_counts.items()},
"unique_count": len(value_counts)
}
except Exception as e:
logger.warning(f"{col} 分布生成失败: {str(e)}")
return distributions
def _figure_to_base64(self, fig) -> str:
"""将 matplotlib 图形转换为 base64 字符串"""
buf = io.BytesIO()
fig.savefig(
buf,
format='png',
dpi=120,
bbox_inches='tight',
pad_inches=0.3,
facecolor='white',
edgecolor='none',
transparent=False
)
plt.close(fig)
buf.seek(0)
img_base64 = base64.b64encode(buf.read()).decode('utf-8')
return f"data:image/png;base64,{img_base64}"
# 全局单例
visualization_service = VisualizationService()

View File

@@ -103,6 +103,21 @@ git config user.email #同上
#如果想看全局的,可以加上 --global例如 git config --global user.name
```
## 启动后端项目
在终端输入以下命令:
```bash
cd backend #确保启动时在后端跟目录下
./venv/Scripts/python.exe -m uvicorn app.main:app --host 127.0.0.1 --port 8000
--reload #启动后端项目
```
先启动后端项目,再启动前端项目
记得在你的.gitignore中添加
```
/backend/data/uploads
/backend/data/charts
```
## 预计项目结构:
```bash

View File

@@ -1,6 +1,7 @@
fastapi[all]==0.104.1
fastapi[all]==0.104.1
uvicorn[standard]==0.24.0
pydantic==2.5.0
pydantic-settings==2.1.0
python-multipart==0.0.6
pymongo==4.5.0
redis==5.0.0
@@ -10,6 +11,8 @@ faiss-cpu==1.8.0
python-docx==0.8.11
pandas==2.1.4
openpyxl==3.1.2
matplotlib==3.8.2
numpy==1.26.2
markdown==3.5.1
langchain==0.1.0
langchain-community==0.0.10
@@ -18,5 +21,4 @@ httpx==0.25.2
python-dotenv==1.0.0
loguru==0.7.2
tqdm==4.66.1
numpy==1.26.2
PyYAML==6.0.1

View File

@@ -1 +0,0 @@
print("Hello,World")

View File

@@ -1 +0,0 @@
print("Hello world!")

View File

@@ -1 +0,0 @@
print("hello,world!")

View File

@@ -0,0 +1,71 @@
"""
测试字体配置是否正常工作
"""
import matplotlib.pyplot as plt
import matplotlib
import numpy as np
from app.services.font_helper import configure_matplotlib_fonts
import io
import base64
# 配置字体
font_name = configure_matplotlib_fonts()
print(f"当前使用字体: {font_name}")
print(f"matplotlib 中文字体设置: {matplotlib.rcParams['font.sans-serif']}")
# 创建测试图表
fig, ax = plt.subplots(figsize=(10, 6))
# 测试数据
x = ['销售', '库存', '采购', '退货', '其他']
y = [150, 200, 180, 50, 30]
bars = ax.bar(x, y, color='#3b82f6', alpha=0.8)
ax.set_xlabel('类别', fontsize=12, labelpad=10)
ax.set_ylabel('数值', fontsize=12, labelpad=10)
ax.set_title('测试图表 - 中文显示', fontsize=14, fontweight='bold', pad=15)
ax.tick_params(axis='both', which='major', labelsize=10)
# 添加数值标签
for bar, value in zip(bars, y):
height = bar.get_height()
ax.text(bar.get_x() + bar.get_width() / 2., height,
f'{value}',
ha='center', va='bottom', fontsize=10, fontweight='bold')
plt.grid(axis='y', alpha=0.3)
plt.tight_layout(pad=1.5)
# 转换为 base64
buf = io.BytesIO()
fig.savefig(buf, format='png', dpi=120, bbox_inches='tight', pad_inches=0.3, facecolor='white')
plt.close(fig)
buf.seek(0)
img_base64 = base64.b64encode(buf.read()).decode('utf-8')
data_url = f"data:image/png;base64,{img_base64}"
print("\n=== 测试完成 ===")
print(f"图表大小: {len(img_base64)} 字符")
print("如果看到字体警告,请检查系统是否有安装中文字体")
# 尝试获取所有可用字体
import matplotlib.font_manager as fm
available_fonts = set([f.name for f in fm.fontManager.ttflist])
print(f"\n=== 可用字体列表(部分)===")
chinese_fonts = [f for f in available_fonts if 'CJK' in f or 'Chinese' in f or 'YaHei' in f or 'SimHei' in f or 'PingFang' in f]
for font in sorted(chinese_fonts)[:10]:
print(f" - {font}")
if not chinese_fonts:
print(" 未找到中文字体!")
print("\n=== 推荐安装的中文字体 ===")
print("Windows: Microsoft YaHei (系统自带)")
print("macOS: PingFang SC (系统自带)")
print("Linux: fonts-noto-cjk 或 fonts-wqy-zenhei")
print("\n=== 生成的 base64 数据前100字符===")
print(data_url[:100] + "...")

Binary file not shown.

Binary file not shown.

113
docs/test/test.md Normal file
View File

@@ -0,0 +1,113 @@
✅ Excel 文件解析功能已完成并测试通过
已完成的工作
后端部分
1. 文件服务层 (backend/app/services/file_service.py)
- 文件保存、读取、删除功能
- 文件信息获取
2. Excel 解析模块 (backend/app/core/document_parser/)
- base.py - 解析器基类
- xlsx_parser.py - Excel 文件解析器
- utils.py - 工具函数
3. API 接口 (backend/app/api/endpoints/upload.py)
- POST /upload/excel - 上传并解析 Excel 文件
- GET /upload/excel/preview/{file_path} - 获取预览
- DELETE /upload/file - 删除文件
4. 主应用配置 (backend/app/main.py)
- CORS 中间件配置
- API 路由注册
前端部分
1. 后端 API 封装 (frontendTest/app-a6ww9j3ja3nl/src/db/backend-api.ts)
- Excel 上传、预览、删除函数
2. Excel 解析页面 (frontendTest/app-a6ww9j3ja3nl/src/pages/ExcelParse.tsx)
- 拖拽上传界面
- 解析选项配置
- 数据表格展示
- 多工作表支持
3. 路由配置 (frontendTest/app-a6ww9j3ja3nl/src/routes.tsx)
- 添加 /excel-parse 路由
4. 环境配置 (.env)
- 后端 API URL 配置
测试结果
后端 API 测试通过:
- ✅ 健康检查http://127.0.0.1:8000/health
- ✅ Excel 上传解析:成功解析 5 行 6 列的测试数据
访问方式
1. 后端 API 文档http://127.0.0.1:8000/api/v1/docs
2. 前端页面:
- 启动前端cd frontendTest/app-a6ww9j3ja3nl && npm run dev
- 访问 Excel 解析页面:/excel-parse
下一步建议
1. 启动前端开发服务器测试完整的前端界面
2. 添加更多文档格式解析支持docx, md, txt
3. 完善错误处理和用户提示
✻ Worked for 4m 18s · 1 background task still running (↓ to manage)
> 如何启动后端呢
● 后端启动步骤如下:
方法一:使用命令行启动(开发模式)
# 1. 进入后端目录
cd H:/OwnProject/FilesReadSysteam/backend
# 2. 激活虚拟环境
# PowerShell:
.\venv\Scripts\Activate.ps1
# 或 CMD:
.\venv\Scripts\Activate.bat
# 3. 启动后端服务器
python -m uvicorn app.main:app --host 127.0.0.1 --port 8000 --reload
方法二:直接使用虚拟环境中的 Python
cd H:/OwnProject/FilesReadSysteam/backend
./venv/Scripts/python.exe -m uvicorn app.main:app --host 127.0.0.1 --port 8000 --reload
方法三:直接运行 main.py开发调试
cd H:/OwnProject/FilesReadSysteam/backend
./venv/Scripts/python.exe app/main.py
启动成功的标志
INFO: Uvicorn running on http://127.0.0.1:8000
INFO: Started server process [xxxxx]
INFO: Waiting for application startup.
INFO: Application startup complete.
验证服务是否正常
# 健康检查
curl http://127.0.0.1:8000/health
# 访问 API 文档
# 浏览器打开: http://127.0.0.1:8000/api/v1/docs
---
当前状态:后端已在后台运行(任务 ID: b22jkg69j可以直接访问 http://127.0.0.1:8000
需要停止的话告诉我即可。

Binary file not shown.

Binary file not shown.

7
frontend/.env Normal file
View File

@@ -0,0 +1,7 @@
VITE_APP_ID=app-a6ww9j3ja3nl
VITE_SUPABASE_URL=https://backend.appmiaoda.com/projects/supabase290100332300644352
VITE_SUPABASE_ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOiJhdXRoZW50aWNhdGVkIiwiZXhwIjoyMDg4NTkyNTA5LCJpc3MiOiJzdXBhYmFzZSIsInJvbGUiOiJhbm9uIiwic3ViIjoiYW5vbiJ9.sdVzWIT_AjxVjEmBaEQcOoFGlHTTT8NH59fYdkaA4WU
VITE_BACKEND_API_URL=http://localhost:8000/api/v1

29
frontend/.gitignore vendored Normal file
View File

@@ -0,0 +1,29 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
lerna-debug.log*
node_modules
dist
dist-ssr
output
*.local
package-lock.json
# Editor directories and files
.vscode/*
!.vscode/extensions.json
.idea
.DS_Store
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw?
.sync
history/*.json
.vite_cache

View File

@@ -0,0 +1,28 @@
id: selectItemWithEmptyValue
language: Tsx
files:
- src/**/*.tsx
rule:
kind: jsx_opening_element
all:
- has:
kind: identifier
regex: '^SelectItem$'
- has:
kind: jsx_attribute
all:
- has:
kind: property_identifier
regex: '^value$'
- any:
- has:
kind: string
regex: '^""$'
- has:
kind: jsx_expression
has:
kind: string
regex: '^""$'
message: "检测到 SelectItem 组件使用空字符串 value: $MATCH 这是错误用法, 运行时会报错, 请修改, 如果想实现全选建议使用all代替空字符串"
severity: error

39
frontend/.rules/check.sh Normal file
View File

@@ -0,0 +1,39 @@
#!/bin/bash
ast-grep scan -r .rules/SelectItem.yml
ast-grep scan -r .rules/contrast.yml
ast-grep scan -r .rules/supabase-google-sso.yml
ast-grep scan -r .rules/toast-hook.yml
ast-grep scan -r .rules/slot-nesting.yml
ast-grep scan -r .rules/require-button-interaction.yml
useauth_output=$(ast-grep scan -r .rules/useAuth.yml 2>/dev/null)
if [ -z "$useauth_output" ]; then
exit 0
fi
authprovider_output=$(ast-grep scan -r .rules/authProvider.yml 2>/dev/null)
if [ -n "$authprovider_output" ]; then
exit 0
fi
echo "=== ast-grep scan -r .rules/useAuth.yml output ==="
echo "$useauth_output"
echo ""
echo "=== ast-grep scan -r .rules/authProvider.yml output ==="
echo "$authprovider_output"
echo ""
echo "⚠️ Issue detected:"
echo "The code uses useAuth Hook but does not have AuthProvider component wrapping the components."
echo "Please ensure that components using useAuth are wrapped with AuthProvider to provide proper authentication context."
echo ""
echo "Suggested fixes:"
echo "1. Add AuthProvider wrapper in app.tsx or corresponding root component"
echo "2. Ensure all components using useAuth are within AuthProvider scope"

View File

@@ -0,0 +1,103 @@
id: button-outline-text-foreground-contrast
language: tsx
files:
- src/**/*.tsx
message: "Outline button with text-foreground class causes invisible text. The outline variant has a transparent background, making text-foreground color blend with the background and become unreadable. Use text-primary or another contrasting color instead."
rule:
kind: jsx_element
has:
kind: jsx_opening_element
all:
- has:
field: name
regex: "^Button$"
- has:
kind: jsx_attribute
all:
- has:
kind: property_identifier
regex: "^variant$"
- has:
kind: string
has:
kind: string_fragment
regex: "^outline$"
- has:
kind: jsx_attribute
all:
- has:
kind: property_identifier
regex: "^className$"
- has:
kind: string
has:
kind: string_fragment
regex: "(^|\\s)text-foreground(\\s|$)"
---
id: button-default-text-primary-contrast
language: tsx
files:
- src/**/*.tsx
message: "Default button with text-primary class causes poor contrast. The default variant has a primary-colored background, making text-primary color blend with the background and become hard to read. Remove the text-primary class or specify a different variant like 'outline' or 'ghost'."
rule:
kind: jsx_element
has:
kind: jsx_opening_element
all:
- has:
field: name
regex: "^Button$"
- has:
kind: jsx_attribute
all:
- has:
kind: property_identifier
regex: "^className$"
- has:
kind: string
has:
kind: string_fragment
regex: "(^|\\s)text-primary(\\s|$)"
- not:
has:
kind: jsx_attribute
has:
kind: property_identifier
regex: "^variant$"
---
id: button-outline-white-gray-contrast
language: tsx
files:
- src/**/*.tsx
message: "Outline button with white/gray text color has poor contrast. Remove the text color class and use the default button text color."
rule:
kind: jsx_element
has:
kind: jsx_opening_element
all:
- has:
field: name
regex: "^Button$"
- has:
kind: jsx_attribute
all:
- has:
kind: property_identifier
regex: "^variant$"
- has:
kind: string
has:
kind: string_fragment
regex: "^outline$"
- has:
kind: jsx_attribute
all:
- has:
kind: property_identifier
regex: "^className$"
- has:
kind: string
has:
kind: string_fragment
regex: "(^|\\s)text-(white|gray)(-[0-9]+)?(\\s|$)"

View File

@@ -0,0 +1,56 @@
id: require-button-interaction
language: Tsx
files:
- src/**/*.tsx
- src/**/*.jsx
rule:
kind: jsx_opening_element
all:
# 必须是 <Button> 组件
- has:
kind: identifier
regex: '^Button$'
# 没有 onClick
- not:
has:
kind: jsx_attribute
has:
kind: property_identifier
regex: '^onClick$'
# 没有 asChild
- not:
has:
kind: jsx_attribute
has:
kind: property_identifier
regex: '^asChild$'
# 没有 type="submit" 或 type="reset"
- not:
has:
kind: jsx_attribute
all:
- has:
kind: property_identifier
regex: '^type$'
- any:
- has:
kind: string
regex: '^"(submit|reset)"$'
- has:
kind: jsx_expression
has:
kind: string
regex: '^"(submit|reset)"$'
# 不在 *Trigger 组件内部(如 DialogTrigger、SheetTrigger
- not:
inside:
stopBy: end
kind: jsx_element
has:
kind: jsx_opening_element
has:
kind: identifier
regex: 'Trigger$'
message: '<Button> 必须是可点击的:请添加 onClick、type="submit"、type="reset"、asChild 属性,或将其包裹在 *Trigger 组件中'
severity: error

View File

@@ -0,0 +1,52 @@
---
id: radix-trigger-formcontrol-nesting
language: tsx
files:
- src/**/*.tsx
message: |
❌ 检测到危险的 Slot 嵌套Radix UI Trigger (asChild) 内包裹 FormControl
问题代码:
<PopoverTrigger asChild>
<FormControl> ← 会导致点击事件失效
<Button>...</Button>
</FormControl>
</PopoverTrigger>
正确写法:
<PopoverTrigger asChild>
<Button>...</Button> ← 直接使用 Button
</PopoverTrigger>
原因FormControl 和 Trigger 都使用 Radix UI 的 Slot 机制,双层嵌套会导致:
- ref 传递链断裂
- 点击事件丢失
- 内部组件无法交互
FormControl 只应该用于原生表单控件Input, Textarea, Select不要用于触发器按钮。
severity: error
rule:
kind: jsx_element
all:
# 开始标签需要满足的条件
- has:
kind: jsx_opening_element
all:
# 匹配所有 Radix UI 的 Trigger 组件
- has:
field: name
regex: "^(Popover|Dialog|DropdownMenu|AlertDialog|HoverCard|Menubar|NavigationMenu|ContextMenu|Tooltip)Trigger$"
# 必须有 asChild 属性
- has:
kind: jsx_attribute
has:
kind: property_identifier
regex: "^asChild$"
# 直接子元素包含 FormControl
- has:
kind: jsx_element
has:
kind: jsx_opening_element
has:
field: name
regex: "^FormControl$"

View File

@@ -0,0 +1,20 @@
id: supabase-google-sso
language: Tsx
files:
- src/**/*.tsx
rule:
pattern: |
$AUTH.signInWithOAuth({ provider: 'google', $$$ })
message: |
Replace `signInWithOAuth` with `signInWithSSO` for Google authentication (Supabase).
Refactor to:
```typescript
const { data, error } = await supabase.auth.signInWithSSO({
domain: 'miaoda-gg.com',
options: { redirectTo: window.location.origin },
});
if (data?.url) window.open(data.url, '_self');
```
Ensure `window.open` uses `_self` target.
severity: warning

View File

@@ -0,0 +1,10 @@
#!/bin/bash
OUTPUT=$(npx vite build --minify false --logLevel error --outDir /workspace/.dist 2>&1)
EXIT_CODE=$?
if [ $EXIT_CODE -ne 0 ]; then
echo "$OUTPUT"
fi
exit $EXIT_CODE

View File

@@ -0,0 +1,11 @@
id: use-toast-import
message: Use 'import { toast } from "sonner"' instead of "@/hooks/use-toast"
severity: error
language: Tsx
note: |
The new shadcn/ui pattern uses sonner for toast notifications.
Replace: import { toast } from "@/hooks/use-toast"
With: import { toast } from "sonner"
rule:
pattern: import { $$$IMPORTS } from "@/hooks/use-toast"

View File

@@ -1,128 +1,66 @@
> [!TIP]
>
> 注意本文档仅为前端开发过程中团队交流使用非正式的readme文档。
> 以下为官方自带说明,请忽略(正文开始在下面)
## 介绍
# 前端项目说明
项目介绍
这是一个使用 Vue 3 和 Vite 的模板项目,帮助您开始开发。
## 目录结构
## 推荐的 IDE 设置
[VS Code](https://code.visualstudio.com/) + [Vue (官方)](https://marketplace.visualstudio.com/items?itemName=Vue.volar)(并禁用 Vetur
## 推荐的浏览器设置
- 基于 Chromium 的浏览器Chrome、Edge、Brave 等):
- [Vue.js 开发者工具](https://chromewebstore.google.com/detail/vuejs-devtools/nhdogjmejiglipccpnnnanhbledajbpd)
- [在 Chrome DevTools 中开启自定义对象格式化程序](http://bit.ly/object-formatters)
- Firefox
- [Vue.js 开发者工具](https://addons.mozilla.org/en-US/firefox/addon/vue-js-devtools/)
- [在 Firefox DevTools 中开启自定义对象格式化程序](https://fxdx.dev/firefox-devtools-custom-object-formatters/)
## TypeScript 对 `.vue` 导入的类型支持
TypeScript 默认无法处理 `.vue` 导入的类型信息,因此我们用 `vue-tsc` 替换了 `tsc` 命令进行类型检查。在编辑器中,我们需要 [Volar](https://marketplace.visualstudio.com/items?itemName=Vue.volar) 让 TypeScript 语言服务识别 `.vue` 类型。
## 自定义配置
参见 [Vite 配置参考](https://vite.dev/config/)。
## 项目设置
```sh
npm install
```
├── README.md # 说明文档
├── components.json # 组件库配置
├── index.html # 入口文件
├── package.json # 包管理
├── postcss.config.js # postcss 配置
├── public # 静态资源目录
│   ├── favicon.png # 图标
│   └── images # 图片资源
├── src # 源码目录
│   ├── App.tsx # 入口文件
│   ├── components # 组件目录
│   ├── contexts # 上下文目录
│   ├── db # 数据库配置目录
│   ├── hooks # 通用钩子函数目录
│   ├── index.css # 全局样式
│   ├── layout # 布局目录
│   ├── lib # 工具库目录
│   ├── main.tsx # 入口文件
│   ├── routes.tsx # 路由配置
│   ├── pages # 页面目录
│   ├── services # 数据库交互目录
│   ├── types # 类型定义目录
├── tsconfig.app.json # ts 前端配置文件
├── tsconfig.json # ts 配置文件
├── tsconfig.node.json # ts node端配置文件
└── vite.config.ts # vite 配置文件
```
### 编译和热重载用于开发
## 技术栈
```sh
npm run dev
Vite、TypeScript、React、Supabase
## 本地开发
首先进行包安装:
```bash
cd frontend #进入前端目录
npm install #确定目录中有node_modules文件夹后输入命令安装依赖包
```
### 类型检查、编译和生产环境压缩
```sh
npm run build
## 启动项目
启动项目:
```bash
npm run dev #启动项目,需要确保后端已启动,否则前端功能无法使用
```
启动后在终端ctrl+左键点击项目地址打开浏览器一般是http://localhost:5173
### 使用 [ESLint](https://eslint.org/) 进行代码检查
```sh
npm run lint
```
记得在你根目录下的.gitignore文件中添加
## vscode设置
首先在插件库中安装`Vue`插件
![alt text](image\image-3.png)
选择官方的即可
## 构建
在安装完`Node.js`
直接在项目根目录下即xxx\FileReadSystem\)下运行
```sh
npm create vue@latest
```
项目名直接用frontend
随后的选择如下:
```
✔ Project name: … frontend
✔ Add TypeScript? … **Yes** (推荐,更好的类型支持)
✔ Add JSX? … No / Yes (根据需要,一般不需要)
✔ Add Vue Router for Single Page Application development? … **Yes** (需要路由)
✔ Add Pinia for state management? … **Yes** (推荐,状态管理)
✔ Add Vitest for Unit Testing? … No (测试,可以暂时不选)
✔ Add an End-to-End Testing Solution? … No (端到端测试,暂时不选)
✔ Add ESLint for code quality? … **Yes** (代码规范)
✔ Add Prettier for code formatting? … **Yes** (代码格式化)
```
实验特性不选就行
跳过所有示例代码创建一个空白的Vue项目 Yes
完成后输入
```sh
cd frontend
#随后输入
npm install #确保frontend目录下有package.json文件
```
安装包完成后输入
```sh
npm run dev
```
以进入调试
若终端出现:
![alt text](image\image-4.png)
说明构建成功ctrl+鼠标左键点击第一个网址即可查看当前页面
在终端输入q即可退出调试
## 关于`.gitignore`
根目录下`.gitignore`文件修改如下:
```sh
/.git/
/.gitignore
/.idea/
/.vscode/
/backend/venv/
/backend/command/
/backend/.env
/backend/.env.local
/backend/.env.*.local
/backend/app/__pycache__/
# 前端忽略
/frontend/*
!/frontend/README.md
!/frontend/image/
# 在你自己构建好后,请删除上面这三行
```bash
/frontend/node_modules/
/frontend/dist/
/frontend/build/
/frontend/.env
/frontend/.vscode/
/frontend/.idea/
/frontend/*.log

37
frontend/TODO.md Normal file
View File

@@ -0,0 +1,37 @@
# Task: 基于大语言模型的文档理解与多源数据融合系统
## Plan
- [x] 数据库初始化与权限配置 (Supabase)
- [x] 创建 `profiles` 表及触发器 (登录同步)
- [x] 创建 `documents` 表 (存储上传的文档信息)
- [x] 创建 `extracted_entities` 表 (存储从文档提取的结构化数据)
- [x] 创建 `templates` 表 (存储表格模板)
- [x] 创建 `fill_tasks` 表 (存储填写任务)
- [x] 配置 RLS 策略 (Row Level Security)
- [x] 创建 Storage 存储桶 `document_storage` (存储文档和模板)
- [x] 基础架构与登录模块
- [x] 配置路由 `@/routes.tsx`
- [x] 创建登录/注册页面
- [x] 实现 `AuthContext``RouteGuard` (登录状态管理)
- [x] 创建系统主布局 `MainLayout` (含侧边栏导航)
- [x] 文档上传与智能提取功能
- [x] 实现文档上传组件 (支持 docx, md, xlsx, txt)
- [x] 部署 Edge Function `process-document` (调用 MiniMax 处理文档提取)
- [x] 实现文档列表与详情页 (显示提取的结构化数据)
- [x] 表格模板与自动填写模块
- [x] 实现模板上传与管理
- [x] 部署 Edge Function `fill-template` (基于提取数据填充表格)
- [x] 实现任务监控与结果下载
- [x] 智能对话交互模块
- [x] 实现智能助手聊天界面 (侧边栏或独立页面)
- [x] 部署 Edge Function `chat-assistant` (解析自然语言指令执行操作)
- [x] 系统优化与美化
- [x] 全面应用科技蓝办公风格 (index.css, tailwind.config.js)
- [x] 响应式适配 (移动端兼容)
- [x] 完善错误处理与加载状态 (Skeleton, Toast)
## Notes
- 所有 Edge Functions 已部署并集成 MiniMax API
- 文档解析使用 mammoth (docx), xlsx (excel), 原生 TextDecoder (txt/md)
- 系统采用科技蓝主题,支持暗色模式
- 所有代码已通过 lint 检查

24
frontend/biome.json Normal file
View File

@@ -0,0 +1,24 @@
{
"$schema": "./node_modules/@biomejs/biome/configuration_schema.json",
"files": {
"includes": ["src/**/*.{js,jsx,ts,tsx}"]
},
"linter": {
"enabled": true,
"rules": {
"recommended": false,
"correctness": {
"noUndeclaredDependencies": "error"
},
"suspicious": {
"noRedeclare": "error"
},
"style": {
"noCommonJs": "error"
}
}
},
"formatter": {
"enabled": false
}
}

21
frontend/components.json Normal file
View File

@@ -0,0 +1,21 @@
{
"$schema": "https://ui.shadcn.com/schema.json",
"style": "new-york",
"rsc": false,
"tsx": true,
"tailwind": {
"config": "tailwind.config.js",
"css": "src/index.css",
"baseColor": "slate",
"cssVariables": true,
"prefix": ""
},
"iconLibrary": "lucide",
"aliases": {
"components": "@/components",
"utils": "@/lib/utils",
"ui": "@/components/ui",
"lib": "@/lib",
"hooks": "@/hooks"
}
}

95
frontend/docs/prd.md Normal file
View File

@@ -0,0 +1,95 @@
# 基于大语言模型的文档理解与多源数据融合系统需求文档
## 1. 应用概述
### 1.1 应用名称
基于大语言模型的文档理解与多源数据融合系统
### 1.2 应用描述
本系统旨在解决企事业单位在日常办公中面临的文本信息处理效率低下问题,通过引入人工智能技术实现文档的智能理解、信息自动提取、结构化存储以及智能表格填写,帮助用户从繁琐的重复性劳动中解放出来,提升整体工作效率。
## 2. 核心功能
### 2.1 文档智能操作交互模块
- 支持用户通过自然语言指令对文档进行操作
- 自动解析用户指令并执行相应的文档编辑、排版、格式调整、内容提取等操作
- 基于自然语言处理与文档结构理解技术实现人机交互
### 2.2 非结构化文档信息提取模块
- 支持用户导入各类非结构化文档(包括但不限于docx、md、xlsx、txt格式)
- 自动识别并提取文档中的关键信息、实体数据或用户指定内容
- 将提取的信息进行数据库存储
- 确保信息提取的准确性和入库的规范性
- 支持桌面端、Web网站或第三方平台部署
### 2.3 表格自定义数据填写模块
- 支持用户提供表格模板(word或excel格式)
- 从用户提供的非结构化数据中自动搜索相关信息
- 将搜索到的信息自动填写到表格中
- 生成具备直接业务应用价值的、格式严谨的汇总表格
## 3. 技术要求
### 3.1 系统架构
- 可基于开源或第三方商业AI平台构建
- 也可采用自研创新算法
- 系统可运行在H5小程序、原生App、Web网站、PC端软件等平台上
### 3.2 性能指标
- 信息提取准确率需高于80%
- 每个文档的响应时间至多为90秒
- 支持异步调用的API接口
### 3.3 数据处理能力
- 能够准确识别多种数据类型并在不同数据类型间稳定运行
- 支持比赛方提供的测试文档样本集(包括5个docx文档、3个md文档、5个xlsx文档、3个txt文档)
## 4. 交互流程
### 4.1 文档上传与处理流程
- 用户上传多个文档文件(支持docx、md、xlsx、txt格式)
- 系统自动识别并提取文档中的关键信息
- 将提取的信息进行数据库存储
### 4.2 表格填写流程
- 用户上传表格模板文件(word或excel格式)
- 系统从已存储的非结构化数据中自动搜索相关信息
- 将相关信息自动填写到表格中
- 完成填写后返回或展示结果表格
### 4.3 智能交互流程
- 用户通过自然语言输入操作指令
- 系统解析指令并识别用户需求
- 执行相应的文档操作并反馈结果
## 5. 参考信息
### 5.1 测试文档样本集
- 5个不小于500KB的docx格式文档
- 3个不小于15KB的md格式文档
- 5个不小于500KB的xlsx格式文档
- 3个不小于15KB的txt文档
### 5.2 评分标准
- 信息填写准确率(平均准确率)
- 响应时间(平均响应时间)
- 准确率差距2%以上时,准确率越高系统越好
- 准确率差距小于2%时,结合响应时间综合评价
## 6. 其他说明
### 6.1 开发工具
- 开发工具及平台不限
- 可借助开源工具
- 数据与功能API需提供技术说明
### 6.2 提交材料
- 项目概要介绍
- 项目简介PPT
- 项目详细方案
- 项目演示视频
- 企业要求提交的材料:
- 训练素材详细的素材介绍与来源说明
- 关键模块的概要设计和创新要点说明文档
- 可运行的Demo实现程序
- 团队自愿提交的其他补充材料

Binary file not shown.

Before

Width:  |  Height:  |  Size: 32 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 25 KiB

12
frontend/index.html Normal file
View File

@@ -0,0 +1,12 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/favicon.png" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
</head>
<body>
<div id="root"></div>
<script type="module" src="/src/main.tsx"></script>
</body>
</html>

96
frontend/package.json Normal file
View File

@@ -0,0 +1,96 @@
{
"name": "miaoda-react-admin",
"version": "0.0.1",
"type": "module",
"scripts": {
"dev": "vite",
"build": "vite build",
"lint": "tsgo -p tsconfig.check.json; npx biome lint; .rules/check.sh;npx tailwindcss -i ./src/index.css -o /dev/null 2>&1 | grep -E '^(CssSyntaxError|Error):.*' || true;.rules/testBuild.sh"
},
"dependencies": {
"@hookform/resolvers": "^5.2.2",
"@radix-ui/react-accordion": "^1.2.12",
"@radix-ui/react-alert-dialog": "^1.1.15",
"@radix-ui/react-aspect-ratio": "^1.1.7",
"@radix-ui/react-avatar": "^1.1.10",
"@radix-ui/react-checkbox": "^1.3.3",
"@radix-ui/react-collapsible": "^1.1.12",
"@radix-ui/react-context-menu": "^2.2.16",
"@radix-ui/react-dialog": "^1.1.15",
"@radix-ui/react-dropdown-menu": "^2.1.16",
"@radix-ui/react-hover-card": "^1.1.15",
"@radix-ui/react-icons": "^1.3.2",
"@radix-ui/react-label": "^2.1.7",
"@radix-ui/react-menubar": "^1.1.16",
"@radix-ui/react-navigation-menu": "^1.2.14",
"@radix-ui/react-popover": "^1.1.15",
"@radix-ui/react-progress": "^1.1.7",
"@radix-ui/react-radio-group": "^1.3.8",
"@radix-ui/react-scroll-area": "^1.2.10",
"@radix-ui/react-select": "^2.2.6",
"@radix-ui/react-separator": "^1.1.7",
"@radix-ui/react-slider": "^1.3.6",
"@radix-ui/react-slot": "^1.2.3",
"@radix-ui/react-switch": "^1.2.6",
"@radix-ui/react-tabs": "^1.1.13",
"@radix-ui/react-toast": "^1.2.15",
"@radix-ui/react-toggle": "^1.1.10",
"@radix-ui/react-toggle-group": "^1.1.11",
"@radix-ui/react-tooltip": "^1.2.8",
"@supabase/supabase-js": "^2.98.0",
"axios": "^1.13.1",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"cmdk": "^1.1.1",
"date-fns": "^3.6.0",
"embla-carousel-react": "^8.6.0",
"eventsource-parser": "^3.0.6",
"framer-motion": "^12.35.2",
"input-otp": "^1.4.2",
"ky": "^1.13.0",
"lucide-react": "^0.576.0",
"mammoth": "^1.11.0",
"miaoda-auth-react": "2.0.6",
"miaoda-sc-plugin": "1.0.56",
"motion": "^12.23.25",
"next-themes": "^0.4.6",
"qrcode": "^1.5.4",
"react": "^18.0.0",
"react-day-picker": "^9.13.0",
"react-dom": "^18.0.0",
"react-dropzone": "^14.3.8",
"react-helmet-async": "^2.0.5",
"react-hook-form": "^7.66.0",
"react-markdown": "^10.1.0",
"react-resizable-panels": "^2.1.8",
"react-router": "^7.9.5",
"react-router-dom": "^7.9.5",
"recharts": "2.15.4",
"remark-gfm": "^4.0.1",
"sonner": "^2.0.7",
"streamdown": "^1.4.0",
"tailwind-merge": "^3.3.1",
"tailwindcss-animate": "^1.0.7",
"tailwindcss-intersect": "^2.2.0",
"vaul": "^1.1.2",
"video-react": "^0.16.0",
"xlsx": "^0.18.5",
"zod": "^3.25.76"
},
"devDependencies": {
"@biomejs/biome": "2.4.5",
"@tailwindcss/container-queries": "^0.1.1",
"@types/lodash": "^4.17.24",
"@types/react": "^19.2.2",
"@types/react-dom": "^19.2.2",
"@types/video-react": "^0.15.8",
"@typescript/native-preview": "7.0.0-dev.20260303.1",
"@vitejs/plugin-react": "^5.1.4",
"autoprefixer": "^10.4.27",
"postcss": "^8.5.6",
"tailwindcss": "^3.4.11",
"typescript": "~5.9.3",
"vite": "npm:rolldown-vite@latest",
"vite-plugin-svgr": "^4.5.0"
}
}

7665
frontend/pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,6 @@
catalog:
'@react-three/drei': 9.122.0
'@react-three/fiber': 8.18.0
three: 0.180.0
lockfileIncludeTarballUrl: false

View File

@@ -0,0 +1,6 @@
export default {
plugins: {
tailwindcss: {},
autoprefixer: {},
},
};

BIN
frontend/public/favicon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.4 KiB

View File

@@ -0,0 +1,20 @@
<svg width="472" height="158" viewBox="0 0 472 158" fill="none" xmlns="http://www.w3.org/2000/svg">
<rect x="203.103" y="41.7015" width="22.1453" height="20.7141" rx="2.63433" fill="#7592FF" stroke="#7592FF" stroke-width="0.752667"/>
<rect x="246.752" y="41.7015" width="22.1453" height="20.7141" rx="2.63433" fill="#7592FF" stroke="#7592FF" stroke-width="0.752667"/>
<rect x="258.201" y="98.2308" width="22.1453" height="20.7141" rx="2.63433" fill="#7592FF" stroke="#7592FF" stroke-width="0.752667"/>
<rect x="191.654" y="98.2308" width="22.1453" height="20.7141" rx="2.63433" fill="#7592FF" stroke="#7592FF" stroke-width="0.752667"/>
<rect x="207.396" y="82.847" width="57.5655" height="20.7141" rx="2.63433" fill="#7592FF" stroke="#7592FF" stroke-width="0.752667"/>
<rect x="152.769" y="15.167" width="166.462" height="130.311" rx="28" stroke="#7592FF" stroke-width="24"/>
<rect x="0.0405273" y="0.522461" width="32.6255" height="77.5957" rx="6.26271" fill="#7592FF"/>
<rect x="0.0405273" y="0.522461" width="32.6255" height="77.5957" rx="6.26271" stroke="#7592FF"/>
<rect x="75.8726" y="3.16797" width="32.6255" height="154.31" rx="6.26271" fill="#7592FF"/>
<rect x="75.8726" y="3.16797" width="32.6255" height="154.31" rx="6.26271" stroke="#7592FF"/>
<rect x="16.7939" y="91.3438" width="32.6255" height="77.5957" rx="6.26271" transform="rotate(-90 16.7939 91.3438)" fill="#7592FF"/>
<rect x="16.7939" y="91.3438" width="32.6255" height="77.5957" rx="6.26271" transform="rotate(-90 16.7939 91.3438)" stroke="#7592FF"/>
<rect x="363.502" y="0.522461" width="32.6255" height="77.5957" rx="6.26271" fill="#7592FF"/>
<rect x="363.502" y="0.522461" width="32.6255" height="77.5957" rx="6.26271" stroke="#7592FF"/>
<rect x="439.334" y="3.16797" width="32.6255" height="154.31" rx="6.26271" fill="#7592FF"/>
<rect x="439.334" y="3.16797" width="32.6255" height="154.31" rx="6.26271" stroke="#7592FF"/>
<rect x="380.255" y="91.3438" width="32.6255" height="77.5957" rx="6.26271" transform="rotate(-90 380.255 91.3438)" fill="#7592FF"/>
<rect x="380.255" y="91.3438" width="32.6255" height="77.5957" rx="6.26271" transform="rotate(-90 380.255 91.3438)" stroke="#7592FF"/>
</svg>

After

Width:  |  Height:  |  Size: 2.1 KiB

View File

@@ -0,0 +1,20 @@
<svg width="472" height="158" viewBox="0 0 472 158" fill="none" xmlns="http://www.w3.org/2000/svg">
<rect x="203.103" y="41.7015" width="22.1453" height="20.7141" rx="2.63433" fill="#465FFF" stroke="#465FFF" stroke-width="0.752667"/>
<rect x="246.752" y="41.7015" width="22.1453" height="20.7141" rx="2.63433" fill="#465FFF" stroke="#465FFF" stroke-width="0.752667"/>
<rect x="258.201" y="98.2303" width="22.1453" height="20.7141" rx="2.63433" fill="#465FFF" stroke="#465FFF" stroke-width="0.752667"/>
<rect x="191.654" y="98.2303" width="22.1453" height="20.7141" rx="2.63433" fill="#465FFF" stroke="#465FFF" stroke-width="0.752667"/>
<rect x="207.396" y="82.847" width="57.5655" height="20.7141" rx="2.63433" fill="#465FFF" stroke="#465FFF" stroke-width="0.752667"/>
<rect x="152.769" y="15.167" width="166.462" height="130.311" rx="28" stroke="#465FFF" stroke-width="24"/>
<rect x="0.0405273" y="0.522461" width="32.6255" height="77.5957" rx="6.26271" fill="#465FFF"/>
<rect x="0.0405273" y="0.522461" width="32.6255" height="77.5957" rx="6.26271" stroke="#465FFF"/>
<rect x="75.8726" y="3.16748" width="32.6255" height="154.31" rx="6.26271" fill="#465FFF"/>
<rect x="75.8726" y="3.16748" width="32.6255" height="154.31" rx="6.26271" stroke="#465FFF"/>
<rect x="16.7939" y="91.3442" width="32.6255" height="77.5957" rx="6.26271" transform="rotate(-90 16.7939 91.3442)" fill="#465FFF"/>
<rect x="16.7939" y="91.3442" width="32.6255" height="77.5957" rx="6.26271" transform="rotate(-90 16.7939 91.3442)" stroke="#465FFF"/>
<rect x="363.502" y="0.522461" width="32.6255" height="77.5957" rx="6.26271" fill="#465FFF"/>
<rect x="363.502" y="0.522461" width="32.6255" height="77.5957" rx="6.26271" stroke="#465FFF"/>
<rect x="439.334" y="3.16748" width="32.6255" height="154.31" rx="6.26271" fill="#465FFF"/>
<rect x="439.334" y="3.16748" width="32.6255" height="154.31" rx="6.26271" stroke="#465FFF"/>
<rect x="380.255" y="91.3442" width="32.6255" height="77.5957" rx="6.26271" transform="rotate(-90 380.255 91.3442)" fill="#465FFF"/>
<rect x="380.255" y="91.3442" width="32.6255" height="77.5957" rx="6.26271" transform="rotate(-90 380.255 91.3442)" stroke="#465FFF"/>
</svg>

After

Width:  |  Height:  |  Size: 2.1 KiB

View File

@@ -0,0 +1,24 @@
<svg width="562" height="156" viewBox="0 0 562 156" fill="none" xmlns="http://www.w3.org/2000/svg">
<rect x="0.161133" y="13.4297" width="32.6255" height="71" rx="6.26271" fill="#7592FF"/>
<rect x="0.161133" y="13.4297" width="32.6255" height="71" rx="6.26271" stroke="#7592FF"/>
<rect x="88.2891" y="80.1504" width="32.6255" height="63.5801" rx="6.26271" fill="#7592FF"/>
<rect x="88.2891" y="80.1504" width="32.6255" height="63.5801" rx="6.26271" stroke="#7592FF"/>
<rect x="15.5254" y="33.4668" width="32.6255" height="105.389" rx="6.26271" transform="rotate(-90 15.5254 33.4668)" fill="#7592FF"/>
<rect x="15.5254" y="33.4668" width="32.6255" height="105.389" rx="6.26271" transform="rotate(-90 15.5254 33.4668)" stroke="#7592FF"/>
<rect x="0.161133" y="155.16" width="30" height="107.028" rx="6.26271" transform="rotate(-90 0.161133 155.16)" fill="#7592FF"/>
<rect x="0.161133" y="155.16" width="30" height="107.028" rx="6.26271" transform="rotate(-90 0.161133 155.16)" stroke="#7592FF"/>
<rect x="15.5254" y="96.3398" width="32.6255" height="91.6638" rx="6.26271" transform="rotate(-90 15.5254 96.3398)" fill="#7592FF"/>
<rect x="15.5254" y="96.3398" width="32.6255" height="91.6638" rx="6.26271" transform="rotate(-90 15.5254 96.3398)" stroke="#7592FF"/>
<rect x="162.915" y="12.8496" width="166.462" height="130.311" rx="28" stroke="#7592FF" stroke-width="24"/>
<rect x="213.52" y="42.0287" width="22.1453" height="20.7141" rx="2.63433" fill="#7592FF" stroke="#7592FF" stroke-width="0.752667"/>
<rect x="257.168" y="42.0287" width="22.1453" height="20.7141" rx="2.63433" fill="#7592FF" stroke="#7592FF" stroke-width="0.752667"/>
<rect x="268.618" y="98.558" width="22.1453" height="20.7141" rx="2.63433" fill="#7592FF" stroke="#7592FF" stroke-width="0.752667"/>
<rect x="202.071" y="98.558" width="22.1453" height="20.7141" rx="2.63433" fill="#7592FF" stroke="#7592FF" stroke-width="0.752667"/>
<rect x="217.813" y="83.1732" width="57.5655" height="20.7141" rx="2.63433" fill="#7592FF" stroke="#7592FF" stroke-width="0.752667"/>
<rect x="383.377" y="12.8496" width="166.462" height="130.311" rx="28" stroke="#7592FF" stroke-width="24"/>
<rect x="433.982" y="42.0287" width="22.1453" height="20.7141" rx="2.63433" fill="#7592FF" stroke="#7592FF" stroke-width="0.752667"/>
<rect x="477.63" y="42.0287" width="22.1453" height="20.7141" rx="2.63433" fill="#7592FF" stroke="#7592FF" stroke-width="0.752667"/>
<rect x="489.079" y="98.558" width="22.1453" height="20.7141" rx="2.63433" fill="#7592FF" stroke="#7592FF" stroke-width="0.752667"/>
<rect x="422.533" y="98.558" width="22.1453" height="20.7141" rx="2.63433" fill="#7592FF" stroke="#7592FF" stroke-width="0.752667"/>
<rect x="438.275" y="83.1732" width="57.5655" height="20.7141" rx="2.63433" fill="#7592FF" stroke="#7592FF" stroke-width="0.752667"/>
</svg>

After

Width:  |  Height:  |  Size: 2.8 KiB

View File

@@ -0,0 +1,24 @@
<svg width="562" height="156" viewBox="0 0 562 156" fill="none" xmlns="http://www.w3.org/2000/svg">
<rect x="0.161133" y="13.4292" width="32.6255" height="71" rx="6.26271" fill="#465FFF"/>
<rect x="0.161133" y="13.4292" width="32.6255" height="71" rx="6.26271" stroke="#465FFF"/>
<rect x="88.2891" y="80.1499" width="32.6255" height="63.5801" rx="6.26271" fill="#465FFF"/>
<rect x="88.2891" y="80.1499" width="32.6255" height="63.5801" rx="6.26271" stroke="#465FFF"/>
<rect x="15.5254" y="33.4673" width="32.6255" height="105.389" rx="6.26271" transform="rotate(-90 15.5254 33.4673)" fill="#465FFF"/>
<rect x="15.5254" y="33.4673" width="32.6255" height="105.389" rx="6.26271" transform="rotate(-90 15.5254 33.4673)" stroke="#465FFF"/>
<rect x="0.161133" y="155.16" width="30" height="107.028" rx="6.26271" transform="rotate(-90 0.161133 155.16)" fill="#465FFF"/>
<rect x="0.161133" y="155.16" width="30" height="107.028" rx="6.26271" transform="rotate(-90 0.161133 155.16)" stroke="#465FFF"/>
<rect x="15.5254" y="96.3398" width="32.6255" height="91.6638" rx="6.26271" transform="rotate(-90 15.5254 96.3398)" fill="#465FFF"/>
<rect x="15.5254" y="96.3398" width="32.6255" height="91.6638" rx="6.26271" transform="rotate(-90 15.5254 96.3398)" stroke="#465FFF"/>
<rect x="162.915" y="12.8496" width="166.462" height="130.311" rx="28" stroke="#465FFF" stroke-width="24"/>
<rect x="213.52" y="42.0287" width="22.1453" height="20.7141" rx="2.63433" fill="#465FFF" stroke="#465FFF" stroke-width="0.752667"/>
<rect x="257.168" y="42.0287" width="22.1453" height="20.7141" rx="2.63433" fill="#465FFF" stroke="#465FFF" stroke-width="0.752667"/>
<rect x="268.618" y="98.558" width="22.1453" height="20.7141" rx="2.63433" fill="#465FFF" stroke="#465FFF" stroke-width="0.752667"/>
<rect x="202.071" y="98.558" width="22.1453" height="20.7141" rx="2.63433" fill="#465FFF" stroke="#465FFF" stroke-width="0.752667"/>
<rect x="217.813" y="83.1732" width="57.5655" height="20.7141" rx="2.63433" fill="#465FFF" stroke="#465FFF" stroke-width="0.752667"/>
<rect x="383.377" y="12.8496" width="166.462" height="130.311" rx="28" stroke="#465FFF" stroke-width="24"/>
<rect x="433.982" y="42.0287" width="22.1453" height="20.7141" rx="2.63433" fill="#465FFF" stroke="#465FFF" stroke-width="0.752667"/>
<rect x="477.63" y="42.0287" width="22.1453" height="20.7141" rx="2.63433" fill="#465FFF" stroke="#465FFF" stroke-width="0.752667"/>
<rect x="489.079" y="98.558" width="22.1453" height="20.7141" rx="2.63433" fill="#465FFF" stroke="#465FFF" stroke-width="0.752667"/>
<rect x="422.533" y="98.558" width="22.1453" height="20.7141" rx="2.63433" fill="#465FFF" stroke="#465FFF" stroke-width="0.752667"/>
<rect x="438.275" y="83.1732" width="57.5655" height="20.7141" rx="2.63433" fill="#465FFF" stroke="#465FFF" stroke-width="0.752667"/>
</svg>

After

Width:  |  Height:  |  Size: 2.8 KiB

View File

@@ -0,0 +1,26 @@
<svg width="494" height="156" viewBox="0 0 494 156" fill="none" xmlns="http://www.w3.org/2000/svg">
<rect x="0.515625" y="13.4492" width="32.6255" height="71" rx="6.26271" fill="#7592FF"/>
<rect x="0.515625" y="13.4492" width="32.6255" height="71" rx="6.26271" stroke="#7592FF"/>
<rect x="88.6436" y="80.1699" width="32.6255" height="63.5801" rx="6.26271" fill="#7592FF"/>
<rect x="88.6436" y="80.1699" width="32.6255" height="63.5801" rx="6.26271" stroke="#7592FF"/>
<rect x="15.8799" y="33.4863" width="32.6255" height="105.389" rx="6.26271" transform="rotate(-90 15.8799 33.4863)" fill="#7592FF"/>
<rect x="15.8799" y="33.4863" width="32.6255" height="105.389" rx="6.26271" transform="rotate(-90 15.8799 33.4863)" stroke="#7592FF"/>
<rect x="0.515625" y="155.18" width="30" height="107.028" rx="6.26271" transform="rotate(-90 0.515625 155.18)" fill="#7592FF"/>
<rect x="0.515625" y="155.18" width="30" height="107.028" rx="6.26271" transform="rotate(-90 0.515625 155.18)" stroke="#7592FF"/>
<rect x="15.8799" y="96.3594" width="32.6255" height="91.6638" rx="6.26271" transform="rotate(-90 15.8799 96.3594)" fill="#7592FF"/>
<rect x="15.8799" y="96.3594" width="32.6255" height="91.6638" rx="6.26271" transform="rotate(-90 15.8799 96.3594)" stroke="#7592FF"/>
<rect x="163.27" y="12.8691" width="166.462" height="130.311" rx="28" stroke="#7592FF" stroke-width="24"/>
<rect x="213.874" y="42.0482" width="22.1453" height="20.7141" rx="2.63433" fill="#7592FF" stroke="#7592FF" stroke-width="0.752667"/>
<rect x="257.523" y="42.0482" width="22.1453" height="20.7141" rx="2.63433" fill="#7592FF" stroke="#7592FF" stroke-width="0.752667"/>
<rect x="268.972" y="98.5775" width="22.1453" height="20.7141" rx="2.63433" fill="#7592FF" stroke="#7592FF" stroke-width="0.752667"/>
<rect x="202.425" y="98.5775" width="22.1453" height="20.7141" rx="2.63433" fill="#7592FF" stroke="#7592FF" stroke-width="0.752667"/>
<rect x="218.167" y="83.1927" width="57.5655" height="20.7141" rx="2.63433" fill="#7592FF" stroke="#7592FF" stroke-width="0.752667"/>
<rect x="460.859" y="11.1885" width="32.6255" height="132.562" rx="6.26271" fill="#7592FF"/>
<rect x="460.859" y="11.1885" width="32.6255" height="132.562" rx="6.26271" stroke="#7592FF"/>
<rect x="371.731" y="33.4453" width="32.6255" height="107.028" rx="6.26271" transform="rotate(-90 371.731 33.4453)" fill="#7592FF"/>
<rect x="371.731" y="33.4453" width="32.6255" height="107.028" rx="6.26271" transform="rotate(-90 371.731 33.4453)" stroke="#7592FF"/>
<rect x="371.731" y="155.18" width="30" height="107.028" rx="6.26271" transform="rotate(-90 371.731 155.18)" fill="#7592FF"/>
<rect x="371.731" y="155.18" width="30" height="107.028" rx="6.26271" transform="rotate(-90 371.731 155.18)" stroke="#7592FF"/>
<rect x="388.096" y="93.7812" width="32.6255" height="91.6638" rx="6.26271" transform="rotate(-90 388.096 93.7812)" fill="#7592FF"/>
<rect x="388.096" y="93.7812" width="32.6255" height="91.6638" rx="6.26271" transform="rotate(-90 388.096 93.7812)" stroke="#7592FF"/>
</svg>

After

Width:  |  Height:  |  Size: 3.0 KiB

View File

@@ -0,0 +1,26 @@
<svg width="494" height="156" viewBox="0 0 494 156" fill="none" xmlns="http://www.w3.org/2000/svg">
<rect x="0.515625" y="13.4492" width="32.6255" height="71" rx="6.26271" fill="#465FFF"/>
<rect x="0.515625" y="13.4492" width="32.6255" height="71" rx="6.26271" stroke="#465FFF"/>
<rect x="88.6436" y="80.1699" width="32.6255" height="63.5801" rx="6.26271" fill="#465FFF"/>
<rect x="88.6436" y="80.1699" width="32.6255" height="63.5801" rx="6.26271" stroke="#465FFF"/>
<rect x="15.8799" y="33.4873" width="32.6255" height="105.389" rx="6.26271" transform="rotate(-90 15.8799 33.4873)" fill="#465FFF"/>
<rect x="15.8799" y="33.4873" width="32.6255" height="105.389" rx="6.26271" transform="rotate(-90 15.8799 33.4873)" stroke="#465FFF"/>
<rect x="0.515625" y="155.18" width="30" height="107.028" rx="6.26271" transform="rotate(-90 0.515625 155.18)" fill="#465FFF"/>
<rect x="0.515625" y="155.18" width="30" height="107.028" rx="6.26271" transform="rotate(-90 0.515625 155.18)" stroke="#465FFF"/>
<rect x="15.8799" y="96.3599" width="32.6255" height="91.6638" rx="6.26271" transform="rotate(-90 15.8799 96.3599)" fill="#465FFF"/>
<rect x="15.8799" y="96.3599" width="32.6255" height="91.6638" rx="6.26271" transform="rotate(-90 15.8799 96.3599)" stroke="#465FFF"/>
<rect x="163.27" y="12.8696" width="166.462" height="130.311" rx="28" stroke="#465FFF" stroke-width="24"/>
<rect x="213.874" y="42.0487" width="22.1453" height="20.7141" rx="2.63433" fill="#465FFF" stroke="#465FFF" stroke-width="0.752667"/>
<rect x="257.523" y="42.0487" width="22.1453" height="20.7141" rx="2.63433" fill="#465FFF" stroke="#465FFF" stroke-width="0.752667"/>
<rect x="268.972" y="98.578" width="22.1453" height="20.7141" rx="2.63433" fill="#465FFF" stroke="#465FFF" stroke-width="0.752667"/>
<rect x="202.425" y="98.578" width="22.1453" height="20.7141" rx="2.63433" fill="#465FFF" stroke="#465FFF" stroke-width="0.752667"/>
<rect x="218.167" y="83.1932" width="57.5655" height="20.7141" rx="2.63433" fill="#465FFF" stroke="#465FFF" stroke-width="0.752667"/>
<rect x="460.859" y="11.188" width="32.6255" height="132.562" rx="6.26271" fill="#465FFF"/>
<rect x="460.859" y="11.188" width="32.6255" height="132.562" rx="6.26271" stroke="#465FFF"/>
<rect x="371.731" y="33.4458" width="32.6255" height="107.028" rx="6.26271" transform="rotate(-90 371.731 33.4458)" fill="#465FFF"/>
<rect x="371.731" y="33.4458" width="32.6255" height="107.028" rx="6.26271" transform="rotate(-90 371.731 33.4458)" stroke="#465FFF"/>
<rect x="371.731" y="155.18" width="30" height="107.028" rx="6.26271" transform="rotate(-90 371.731 155.18)" fill="#465FFF"/>
<rect x="371.731" y="155.18" width="30" height="107.028" rx="6.26271" transform="rotate(-90 371.731 155.18)" stroke="#465FFF"/>
<rect x="388.096" y="93.7812" width="32.6255" height="91.6638" rx="6.26271" transform="rotate(-90 388.096 93.7812)" fill="#465FFF"/>
<rect x="388.096" y="93.7812" width="32.6255" height="91.6638" rx="6.26271" transform="rotate(-90 388.096 93.7812)" stroke="#465FFF"/>
</svg>

After

Width:  |  Height:  |  Size: 2.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

View File

@@ -0,0 +1,53 @@
<svg width="231" height="48" viewBox="0 0 231 48" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M0.425781 12.6316C0.425781 5.65535 6.08113 0 13.0574 0H35.7942C42.7704 0 48.4258 5.65535 48.4258 12.6316V35.3684C48.4258 42.3446 42.7704 48 35.7942 48H13.0574C6.08113 48 0.425781 42.3446 0.425781 35.3684V12.6316Z" fill="#465FFF"/>
<g filter="url(#filter0_d_3903_56743)">
<path d="M13.0615 12.6323C13.0615 11.237 14.1926 10.106 15.5878 10.106C16.9831 10.106 18.1142 11.237 18.1142 12.6323V35.3691C18.1142 36.7644 16.9831 37.8954 15.5878 37.8954C14.1926 37.8954 13.0615 36.7644 13.0615 35.3691V12.6323Z" fill="white"/>
</g>
<g filter="url(#filter1_d_3903_56743)">
<path d="M22.5391 22.7353C22.5391 21.3401 23.6701 20.209 25.0654 20.209C26.4606 20.209 27.5917 21.3401 27.5917 22.7353V35.3669C27.5917 36.7621 26.4606 37.8932 25.0654 37.8932C23.6701 37.8932 22.5391 36.7621 22.5391 35.3669V22.7353Z" fill="white" fill-opacity="0.9" shape-rendering="crispEdges"/>
</g>
<g filter="url(#filter2_d_3903_56743)">
<path d="M32.0078 16.4189C32.0078 15.0236 33.1389 13.8926 34.5341 13.8926C35.9294 13.8926 37.0604 15.0236 37.0604 16.4189V35.3663C37.0604 36.7615 35.9294 37.8926 34.5341 37.8926C33.1389 37.8926 32.0078 36.7615 32.0078 35.3663V16.4189Z" fill="white" fill-opacity="0.7" shape-rendering="crispEdges"/>
</g>
<path d="M66.4258 15.1724H74.0585V37.0363H78.6239V15.1724H86.2567V10.9637H66.4258V15.1724Z" fill="white"/>
<path d="M91.3521 37.5C94.0984 37.5 96.4881 36.2516 97.2371 34.4326L97.5581 37.0363H101.375V26.3362C101.375 21.4498 98.4498 18.8818 93.7061 18.8818C88.9267 18.8818 85.788 21.3785 85.788 25.1948H89.4974C89.4974 23.3402 90.9241 22.2701 93.4921 22.2701C95.7035 22.2701 97.1301 23.2332 97.1301 25.6229V26.0152L91.8514 26.4075C87.6784 26.7285 85.3243 28.7616 85.3243 32.0073C85.3243 35.3243 87.607 37.5 91.3521 37.5ZM92.7788 34.2186C90.8171 34.2186 89.747 33.4339 89.747 31.8289C89.747 30.4022 90.7814 29.5106 93.4921 29.2609L97.1658 28.9756V29.9029C97.1658 32.6136 95.4538 34.2186 92.7788 34.2186Z" fill="white"/>
<path d="M107.825 15.8857C109.252 15.8857 110.429 14.7087 110.429 13.2464C110.429 11.784 109.252 10.6427 107.825 10.6427C106.327 10.6427 105.15 11.784 105.15 13.2464C105.15 14.7087 106.327 15.8857 107.825 15.8857ZM105.649 37.0363H110.001V19.4168H105.649V37.0363Z" fill="white"/>
<path d="M118.883 37.0363V10.5H114.568V37.0363H118.883Z" fill="white"/>
<path d="M126.337 37.0363L128.441 31.0086H138.179L140.283 37.0363H145.098L135.682 10.9637H131.009L121.593 37.0363H126.337ZM132.757 18.7391C133.007 18.0258 133.221 17.2411 133.328 16.7417C133.399 17.2768 133.649 18.0614 133.863 18.7391L136.859 27.1565H129.797L132.757 18.7391Z" fill="white"/>
<path d="M154.165 37.5C156.84 37.5 159.122 36.323 160.192 34.29L160.478 37.0363H164.472V10.5H160.157V21.6638C159.051 19.9161 156.875 18.8818 154.414 18.8818C149.1 18.8818 145.89 22.8052 145.89 28.2979C145.89 33.755 149.064 37.5 154.165 37.5ZM155.128 33.5053C152.096 33.5053 150.241 31.2939 150.241 28.1552C150.241 25.0165 152.096 22.7695 155.128 22.7695C158.159 22.7695 160.121 24.9808 160.121 28.1552C160.121 31.3296 158.159 33.5053 155.128 33.5053Z" fill="white"/>
<path d="M173.359 37.0363V27.0495C173.359 24.1962 175.035 22.8408 177.104 22.8408C179.172 22.8408 180.492 24.1605 180.492 26.6215V37.0363H184.843V27.0495C184.843 24.1605 186.448 22.8052 188.553 22.8052C190.621 22.8052 191.977 24.1248 191.977 26.6572V37.0363H196.292V25.5159C196.292 21.4498 193.938 18.8818 189.658 18.8818C186.983 18.8818 184.915 20.2015 184.023 22.2345C183.096 20.2015 181.241 18.8818 178.566 18.8818C176.034 18.8818 174.25 20.0231 173.359 21.4855L173.002 19.4168H169.007V37.0363H173.359Z" fill="white"/>
<path d="M202.74 15.8857C204.167 15.8857 205.344 14.7087 205.344 13.2464C205.344 11.784 204.167 10.6427 202.74 10.6427C201.242 10.6427 200.065 11.784 200.065 13.2464C200.065 14.7087 201.242 15.8857 202.74 15.8857ZM200.564 37.0363H204.916V19.4168H200.564V37.0363Z" fill="white"/>
<path d="M213.763 37.0363V27.5489C213.763 24.6955 215.403 22.8408 218.078 22.8408C220.325 22.8408 221.788 24.2675 221.788 27.2279V37.0363H226.139V26.1935C226.139 21.6281 223.856 18.8818 219.434 18.8818C217.044 18.8818 214.904 19.9161 213.798 21.6995L213.442 19.4168H209.411V37.0363H213.763Z" fill="white"/>
<defs>
<filter id="filter0_d_3903_56743" x="12.0615" y="9.60596" width="7.05273" height="29.7896" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0" result="hardAlpha"/>
<feOffset dy="0.5"/>
<feGaussianBlur stdDeviation="0.5"/>
<feComposite in2="hardAlpha" operator="out"/>
<feColorMatrix type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.12 0"/>
<feBlend mode="normal" in2="BackgroundImageFix" result="effect1_dropShadow_3903_56743"/>
<feBlend mode="normal" in="SourceGraphic" in2="effect1_dropShadow_3903_56743" result="shape"/>
</filter>
<filter id="filter1_d_3903_56743" x="21.5391" y="19.709" width="7.05273" height="19.6843" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0" result="hardAlpha"/>
<feOffset dy="0.5"/>
<feGaussianBlur stdDeviation="0.5"/>
<feComposite in2="hardAlpha" operator="out"/>
<feColorMatrix type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.12 0"/>
<feBlend mode="normal" in2="BackgroundImageFix" result="effect1_dropShadow_3903_56743"/>
<feBlend mode="normal" in="SourceGraphic" in2="effect1_dropShadow_3903_56743" result="shape"/>
</filter>
<filter id="filter2_d_3903_56743" x="31.0078" y="13.3926" width="7.05273" height="26" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0" result="hardAlpha"/>
<feOffset dy="0.5"/>
<feGaussianBlur stdDeviation="0.5"/>
<feComposite in2="hardAlpha" operator="out"/>
<feColorMatrix type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.12 0"/>
<feBlend mode="normal" in2="BackgroundImageFix" result="effect1_dropShadow_3903_56743"/>
<feBlend mode="normal" in="SourceGraphic" in2="effect1_dropShadow_3903_56743" result="shape"/>
</filter>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 6.3 KiB

View File

@@ -0,0 +1,53 @@
<svg width="154" height="32" viewBox="0 0 154 32" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M0 8.42105C0 3.77023 3.77023 0 8.42105 0H23.5789C28.2298 0 32 3.77023 32 8.42105V23.5789C32 28.2298 28.2298 32 23.5789 32H8.42105C3.77023 32 0 28.2298 0 23.5789V8.42105Z" fill="#465FFF"/>
<g filter="url(#filter0_d_1608_324)">
<path d="M8.42383 8.42152C8.42383 7.49135 9.17787 6.7373 10.108 6.7373C11.0382 6.7373 11.7922 7.49135 11.7922 8.42152V23.5794C11.7922 24.5096 11.0382 25.2636 10.108 25.2636C9.17787 25.2636 8.42383 24.5096 8.42383 23.5794V8.42152Z" fill="white"/>
</g>
<g filter="url(#filter1_d_1608_324)">
<path d="M14.7422 15.1569C14.7422 14.2267 15.4962 13.4727 16.4264 13.4727C17.3566 13.4727 18.1106 14.2267 18.1106 15.1569V23.5779C18.1106 24.5081 17.3566 25.2621 16.4264 25.2621C15.4962 25.2621 14.7422 24.5081 14.7422 23.5779V15.1569Z" fill="white" fill-opacity="0.9" shape-rendering="crispEdges"/>
</g>
<g filter="url(#filter2_d_1608_324)">
<path d="M21.0547 10.9459C21.0547 10.0158 21.8087 9.26172 22.7389 9.26172C23.6691 9.26172 24.4231 10.0158 24.4231 10.9459V23.5775C24.4231 24.5077 23.6691 25.2617 22.7389 25.2617C21.8087 25.2617 21.0547 24.5077 21.0547 23.5775V10.9459Z" fill="white" fill-opacity="0.7" shape-rendering="crispEdges"/>
</g>
<path d="M44 10.1149H49.0885V24.6909H52.1321V10.1149H57.2206V7.30912H44V10.1149Z" fill="white"/>
<path d="M60.6175 25C62.4484 25 64.0416 24.1678 64.5409 22.9551L64.7549 24.6909H67.2992V17.5575C67.2992 14.2999 65.3494 12.5878 62.1869 12.5878C59.0006 12.5878 56.9081 14.2523 56.9081 16.7966H59.3811C59.3811 15.5601 60.3322 14.8468 62.0442 14.8468C63.5184 14.8468 64.4696 15.4888 64.4696 17.0819V17.3435L60.9504 17.605C58.1684 17.819 56.599 19.1744 56.599 21.3382C56.599 23.5495 58.1208 25 60.6175 25ZM61.5686 22.8124C60.2609 22.8124 59.5475 22.2893 59.5475 21.2193C59.5475 20.2682 60.2371 19.6737 62.0442 19.5073L64.4934 19.317V19.9353C64.4934 21.7424 63.352 22.8124 61.5686 22.8124Z" fill="white"/>
<path d="M71.5995 10.5905C72.5506 10.5905 73.3353 9.80581 73.3353 8.83091C73.3353 7.85601 72.5506 7.09511 71.5995 7.09511C70.6008 7.09511 69.8161 7.85601 69.8161 8.83091C69.8161 9.80581 70.6008 10.5905 71.5995 10.5905ZM70.149 24.6909H73.0499V12.9445H70.149V24.6909Z" fill="white"/>
<path d="M78.9718 24.6909V7H76.0946V24.6909H78.9718Z" fill="white"/>
<path d="M83.9408 24.6909L85.3437 20.6724H91.8352L93.2381 24.6909H96.4481L90.1707 7.30912H87.0558L80.7784 24.6909H83.9408ZM88.2209 12.4927C88.3873 12.0172 88.53 11.4941 88.6013 11.1612C88.6489 11.5178 88.8153 12.041 88.958 12.4927L90.9554 18.1044H86.2473L88.2209 12.4927Z" fill="white"/>
<path d="M102.493 25C104.276 25 105.798 24.2153 106.511 22.86L106.701 24.6909H109.364V7H106.487V14.4425C105.75 13.2774 104.3 12.5878 102.659 12.5878C99.1161 12.5878 96.9761 15.2034 96.9761 18.8653C96.9761 22.5033 99.0923 25 102.493 25ZM103.135 22.3369C101.113 22.3369 99.877 20.8626 99.877 18.7701C99.877 16.6777 101.113 15.1797 103.135 15.1797C105.156 15.1797 106.464 16.6539 106.464 18.7701C106.464 20.8864 105.156 22.3369 103.135 22.3369Z" fill="white"/>
<path d="M115.289 24.6909V18.033C115.289 16.1308 116.406 15.2272 117.785 15.2272C119.164 15.2272 120.044 16.107 120.044 17.7477V24.6909H122.945V18.033C122.945 16.107 124.015 15.2034 125.418 15.2034C126.797 15.2034 127.701 16.0832 127.701 17.7715V24.6909H130.578V17.0106C130.578 14.2999 129.008 12.5878 126.155 12.5878C124.372 12.5878 122.993 13.4676 122.398 14.823C121.78 13.4676 120.543 12.5878 118.76 12.5878C117.072 12.5878 115.883 13.3487 115.289 14.3236L115.051 12.9445H112.388V24.6909H115.289Z" fill="white"/>
<path d="M134.876 10.5905C135.827 10.5905 136.612 9.80581 136.612 8.83091C136.612 7.85601 135.827 7.09511 134.876 7.09511C133.877 7.09511 133.093 7.85601 133.093 8.83091C133.093 9.80581 133.877 10.5905 134.876 10.5905ZM133.426 24.6909H136.327V12.9445H133.426V24.6909Z" fill="white"/>
<path d="M142.225 24.6909V18.3659C142.225 16.4637 143.318 15.2272 145.102 15.2272C146.6 15.2272 147.575 16.1783 147.575 18.1519V24.6909H150.476V17.4624C150.476 14.4188 148.954 12.5878 146.005 12.5878C144.412 12.5878 142.985 13.2774 142.248 14.4663L142.011 12.9445H139.324V24.6909H142.225Z" fill="white"/>
<defs>
<filter id="filter0_d_1608_324" x="7.42383" y="6.2373" width="5.36841" height="20.5264" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0" result="hardAlpha"/>
<feOffset dy="0.5"/>
<feGaussianBlur stdDeviation="0.5"/>
<feComposite in2="hardAlpha" operator="out"/>
<feColorMatrix type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.12 0"/>
<feBlend mode="normal" in2="BackgroundImageFix" result="effect1_dropShadow_1608_324"/>
<feBlend mode="normal" in="SourceGraphic" in2="effect1_dropShadow_1608_324" result="shape"/>
</filter>
<filter id="filter1_d_1608_324" x="13.7422" y="12.9727" width="5.36841" height="13.7896" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0" result="hardAlpha"/>
<feOffset dy="0.5"/>
<feGaussianBlur stdDeviation="0.5"/>
<feComposite in2="hardAlpha" operator="out"/>
<feColorMatrix type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.12 0"/>
<feBlend mode="normal" in2="BackgroundImageFix" result="effect1_dropShadow_1608_324"/>
<feBlend mode="normal" in="SourceGraphic" in2="effect1_dropShadow_1608_324" result="shape"/>
</filter>
<filter id="filter2_d_1608_324" x="20.0547" y="8.76172" width="5.36841" height="18" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0" result="hardAlpha"/>
<feOffset dy="0.5"/>
<feGaussianBlur stdDeviation="0.5"/>
<feComposite in2="hardAlpha" operator="out"/>
<feColorMatrix type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.12 0"/>
<feBlend mode="normal" in2="BackgroundImageFix" result="effect1_dropShadow_1608_324"/>
<feBlend mode="normal" in="SourceGraphic" in2="effect1_dropShadow_1608_324" result="shape"/>
</filter>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 6.2 KiB

View File

@@ -0,0 +1,44 @@
<svg width="32" height="32" viewBox="0 0 32 32" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M0 8.42105C0 3.77023 3.77023 0 8.42105 0H23.5789C28.2298 0 32 3.77023 32 8.42105V23.5789C32 28.2298 28.2298 32 23.5789 32H8.42105C3.77023 32 0 28.2298 0 23.5789V8.42105Z" fill="#465FFF"/>
<g filter="url(#filter0_d_1884_16361)">
<path d="M8.42383 8.42152C8.42383 7.49135 9.17787 6.7373 10.108 6.7373C11.0382 6.7373 11.7922 7.49135 11.7922 8.42152V23.5794C11.7922 24.5096 11.0382 25.2636 10.108 25.2636C9.17787 25.2636 8.42383 24.5096 8.42383 23.5794V8.42152Z" fill="white"/>
</g>
<g filter="url(#filter1_d_1884_16361)">
<path d="M14.7422 15.1569C14.7422 14.2267 15.4962 13.4727 16.4264 13.4727C17.3566 13.4727 18.1106 14.2267 18.1106 15.1569V23.5779C18.1106 24.5081 17.3566 25.2621 16.4264 25.2621C15.4962 25.2621 14.7422 24.5081 14.7422 23.5779V15.1569Z" fill="white" fill-opacity="0.9" shape-rendering="crispEdges"/>
</g>
<g filter="url(#filter2_d_1884_16361)">
<path d="M21.0547 10.9459C21.0547 10.0158 21.8087 9.26172 22.7389 9.26172C23.6691 9.26172 24.4231 10.0158 24.4231 10.9459V23.5775C24.4231 24.5077 23.6691 25.2617 22.7389 25.2617C21.8087 25.2617 21.0547 24.5077 21.0547 23.5775V10.9459Z" fill="white" fill-opacity="0.7" shape-rendering="crispEdges"/>
</g>
<defs>
<filter id="filter0_d_1884_16361" x="7.42383" y="6.2373" width="5.36841" height="20.5264" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0" result="hardAlpha"/>
<feOffset dy="0.5"/>
<feGaussianBlur stdDeviation="0.5"/>
<feComposite in2="hardAlpha" operator="out"/>
<feColorMatrix type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.12 0"/>
<feBlend mode="normal" in2="BackgroundImageFix" result="effect1_dropShadow_1884_16361"/>
<feBlend mode="normal" in="SourceGraphic" in2="effect1_dropShadow_1884_16361" result="shape"/>
</filter>
<filter id="filter1_d_1884_16361" x="13.7422" y="12.9727" width="5.36841" height="13.7891" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0" result="hardAlpha"/>
<feOffset dy="0.5"/>
<feGaussianBlur stdDeviation="0.5"/>
<feComposite in2="hardAlpha" operator="out"/>
<feColorMatrix type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.12 0"/>
<feBlend mode="normal" in2="BackgroundImageFix" result="effect1_dropShadow_1884_16361"/>
<feBlend mode="normal" in="SourceGraphic" in2="effect1_dropShadow_1884_16361" result="shape"/>
</filter>
<filter id="filter2_d_1884_16361" x="20.0547" y="8.76172" width="5.36841" height="18" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0" result="hardAlpha"/>
<feOffset dy="0.5"/>
<feGaussianBlur stdDeviation="0.5"/>
<feComposite in2="hardAlpha" operator="out"/>
<feColorMatrix type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.12 0"/>
<feBlend mode="normal" in2="BackgroundImageFix" result="effect1_dropShadow_1884_16361"/>
<feBlend mode="normal" in="SourceGraphic" in2="effect1_dropShadow_1884_16361" result="shape"/>
</filter>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 3.3 KiB

View File

@@ -0,0 +1,71 @@
<svg width="450" height="254" viewBox="0 0 450 254" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd" clip-rule="evenodd" d="M0.50555 45.1131L450 45.1132L450 44.6073L0.50555 44.6072L0.50555 45.1131Z" fill="url(#paint0_linear_3005_4084)" fill-opacity="0.3"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M205.546 253.529L205.546 -2.13709e-05L205.04 -2.1392e-05L205.04 253.529L205.546 253.529Z" fill="url(#paint1_linear_3005_4084)" fill-opacity="0.3"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M0.505546 97.2164L450 97.2165L450 96.7106L0.505546 96.7106L0.505546 97.2164Z" fill="url(#paint2_linear_3005_4084)" fill-opacity="0.3"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M256.806 253.529L256.806 -1.68895e-05L256.3 -1.69106e-05L256.3 253.529L256.806 253.529Z" fill="url(#paint3_linear_3005_4084)" fill-opacity="0.3"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M0.505837 253.529L0.505859 -3.9296e-05L0 -3.93171e-05L-2.21642e-05 253.529L0.505837 253.529Z" fill="url(#paint4_linear_3005_4084)" fill-opacity="0.3"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M0.505541 149.321L450 149.321L450 148.815L0.505541 148.815L0.505541 149.321Z" fill="url(#paint5_linear_3005_4084)" fill-opacity="0.3"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M308.066 253.529L308.066 -1.24083e-05L307.56 -1.24294e-05L307.56 253.529L308.066 253.529Z" fill="url(#paint6_linear_3005_4084)" fill-opacity="0.3"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M51.7662 253.529L51.7662 -3.48147e-05L51.2603 -3.48358e-05L51.2603 253.529L51.7662 253.529Z" fill="url(#paint7_linear_3005_4084)" fill-opacity="0.3"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M0.505537 201.424L450 201.424L450 200.918L0.505537 200.918L0.505537 201.424Z" fill="url(#paint8_linear_3005_4084)" fill-opacity="0.3"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M359.326 253.529L359.326 -7.92695e-06L358.82 -7.94806e-06L358.82 253.529L359.326 253.529Z" fill="url(#paint9_linear_3005_4084)" fill-opacity="0.3"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M103.026 253.529L103.026 -3.03334e-05L102.52 -3.03545e-05L102.52 253.529L103.026 253.529Z" fill="url(#paint10_linear_3005_4084)" fill-opacity="0.3"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M410.586 253.529L410.586 -3.44569e-06L410.08 -3.4668e-06L410.08 253.529L410.586 253.529Z" fill="url(#paint11_linear_3005_4084)" fill-opacity="0.3"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M154.286 253.529L154.286 -2.58521e-05L153.78 -2.58732e-05L153.78 253.529L154.286 253.529Z" fill="url(#paint12_linear_3005_4084)" fill-opacity="0.3"/>
<rect width="50.7536" height="51.5982" transform="matrix(-1 -8.74228e-08 -8.74228e-08 1 358.821 45.1138)" fill="#B2B2B2" fill-opacity="0.08"/>
<rect width="50.756" height="51.5985" transform="matrix(-1 -8.74228e-08 -8.74228e-08 1 307.559 97.2163)" fill="#B2B2B2" fill-opacity="0.08"/>
<defs>
<linearGradient id="paint0_linear_3005_4084" x1="277.872" y1="-9.94587e-06" x2="194.87" y2="235.867" gradientUnits="userSpaceOnUse">
<stop stop-color="#B2B2B2"/>
<stop offset="1" stop-color="#B2B2B2" stop-opacity="0"/>
</linearGradient>
<linearGradient id="paint1_linear_3005_4084" x1="277.872" y1="-9.94587e-06" x2="194.87" y2="235.867" gradientUnits="userSpaceOnUse">
<stop stop-color="#B2B2B2"/>
<stop offset="1" stop-color="#B2B2B2" stop-opacity="0"/>
</linearGradient>
<linearGradient id="paint2_linear_3005_4084" x1="277.872" y1="-9.94587e-06" x2="194.87" y2="235.867" gradientUnits="userSpaceOnUse">
<stop stop-color="#B2B2B2"/>
<stop offset="1" stop-color="#B2B2B2" stop-opacity="0"/>
</linearGradient>
<linearGradient id="paint3_linear_3005_4084" x1="277.872" y1="-9.94587e-06" x2="194.87" y2="235.867" gradientUnits="userSpaceOnUse">
<stop stop-color="#B2B2B2"/>
<stop offset="1" stop-color="#B2B2B2" stop-opacity="0"/>
</linearGradient>
<linearGradient id="paint4_linear_3005_4084" x1="277.872" y1="-9.94587e-06" x2="194.87" y2="235.867" gradientUnits="userSpaceOnUse">
<stop stop-color="#B2B2B2"/>
<stop offset="1" stop-color="#B2B2B2" stop-opacity="0"/>
</linearGradient>
<linearGradient id="paint5_linear_3005_4084" x1="277.872" y1="-9.94587e-06" x2="194.87" y2="235.867" gradientUnits="userSpaceOnUse">
<stop stop-color="#B2B2B2"/>
<stop offset="1" stop-color="#B2B2B2" stop-opacity="0"/>
</linearGradient>
<linearGradient id="paint6_linear_3005_4084" x1="277.872" y1="-9.94587e-06" x2="194.87" y2="235.867" gradientUnits="userSpaceOnUse">
<stop stop-color="#B2B2B2"/>
<stop offset="1" stop-color="#B2B2B2" stop-opacity="0"/>
</linearGradient>
<linearGradient id="paint7_linear_3005_4084" x1="277.872" y1="-9.94587e-06" x2="194.87" y2="235.867" gradientUnits="userSpaceOnUse">
<stop stop-color="#B2B2B2"/>
<stop offset="1" stop-color="#B2B2B2" stop-opacity="0"/>
</linearGradient>
<linearGradient id="paint8_linear_3005_4084" x1="277.872" y1="-9.94587e-06" x2="194.87" y2="235.867" gradientUnits="userSpaceOnUse">
<stop stop-color="#B2B2B2"/>
<stop offset="1" stop-color="#B2B2B2" stop-opacity="0"/>
</linearGradient>
<linearGradient id="paint9_linear_3005_4084" x1="277.872" y1="-9.94587e-06" x2="194.87" y2="235.867" gradientUnits="userSpaceOnUse">
<stop stop-color="#B2B2B2"/>
<stop offset="1" stop-color="#B2B2B2" stop-opacity="0"/>
</linearGradient>
<linearGradient id="paint10_linear_3005_4084" x1="277.872" y1="-9.94587e-06" x2="194.87" y2="235.867" gradientUnits="userSpaceOnUse">
<stop stop-color="#B2B2B2"/>
<stop offset="1" stop-color="#B2B2B2" stop-opacity="0"/>
</linearGradient>
<linearGradient id="paint11_linear_3005_4084" x1="277.872" y1="-9.94587e-06" x2="194.87" y2="235.867" gradientUnits="userSpaceOnUse">
<stop stop-color="#B2B2B2"/>
<stop offset="1" stop-color="#B2B2B2" stop-opacity="0"/>
</linearGradient>
<linearGradient id="paint12_linear_3005_4084" x1="277.872" y1="-9.94587e-06" x2="194.87" y2="235.867" gradientUnits="userSpaceOnUse">
<stop stop-color="#B2B2B2"/>
<stop offset="1" stop-color="#B2B2B2" stop-opacity="0"/>
</linearGradient>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 5.9 KiB

5
frontend/sgconfig.yml Normal file
View File

@@ -0,0 +1,5 @@
ruleDirs:
- .rules
languageGlobs:
TypeScript: ["*.ts"]
Tsx: ["*.tsx"]

15
frontend/src/App.tsx Normal file
View File

@@ -0,0 +1,15 @@
import { RouterProvider } from 'react-router-dom';
import { AuthProvider } from '@/context/AuthContext';
import { router } from '@/routes';
import { Toaster } from 'sonner';
function App() {
return (
<AuthProvider>
<RouterProvider router={router} />
<Toaster position="top-right" richColors closeButton />
</AuthProvider>
);
}
export default App;

View File

@@ -0,0 +1,22 @@
import { useEffect } from 'react';
import { useLocation } from 'react-router-dom';
import { Observer } from 'tailwindcss-intersect';
const IntersectObserver = () => {
const location = useLocation();
useEffect(() => {
// When the location changes, we need to restart the observer
// to pick up new elements on the page.
// We use a small timeout to ensure the DOM has updated.
const timer = setTimeout(() => {
Observer.restart();
}, 100);
return () => clearTimeout(timer);
}, [location]);
return null;
};
export default IntersectObserver;

View File

@@ -0,0 +1,25 @@
import { HelmetProvider, Helmet } from "react-helmet-async";
import { TooltipProvider } from "@/components/ui/tooltip";
const PageMeta = ({
title,
description,
}: {
title: string;
description: string;
}) => (
<Helmet>
<title>{title}</title>
<meta name="description" content={description} />
</Helmet>
);
export const AppWrapper = ({ children }: { children: React.ReactNode }) => (
<HelmetProvider>
<TooltipProvider>
{children}
</TooltipProvider>
</HelmetProvider>
);
export default PageMeta;

View File

@@ -0,0 +1,25 @@
import React from 'react';
import { Navigate, useLocation } from 'react-router-dom';
import { useAuth } from '@/context/AuthContext';
export const RouteGuard: React.FC<{ children: React.ReactNode }> = ({ children }) => {
const { user, loading } = useAuth();
const location = useLocation();
if (loading) {
return (
<div className="flex h-screen w-screen items-center justify-center bg-background">
<div className="flex flex-col items-center gap-4">
<div className="h-12 w-12 animate-spin rounded-full border-4 border-primary border-t-transparent" />
<p className="text-sm font-medium text-muted-foreground">...</p>
</div>
</div>
);
}
if (!user) {
return <Navigate to="/login" state={{ from: location }} replace />;
}
return <>{children}</>;
};

View File

@@ -0,0 +1,227 @@
import { cn } from '@/lib/utils'
import { type UseSupabaseUploadReturn } from '@/hooks/use-supabase-upload'
import { Button } from '@/components/ui/button'
import { CheckCircle, File, Loader2, Upload, X } from 'lucide-react'
import { createContext, type PropsWithChildren, useCallback, useContext } from 'react'
export const formatBytes = (
bytes: number,
decimals = 2,
size?: 'bytes' | 'KB' | 'MB' | 'GB' | 'TB' | 'PB' | 'EB' | 'ZB' | 'YB'
) => {
const k = 1000
const dm = decimals < 0 ? 0 : decimals
const sizes = ['bytes', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']
if (bytes === 0 || bytes === undefined) return size !== undefined ? `0 ${size}` : '0 bytes'
const i = size !== undefined ? sizes.indexOf(size) : Math.floor(Math.log(bytes) / Math.log(k))
return parseFloat((bytes / Math.pow(k, i)).toFixed(dm)) + ' ' + sizes[i]
}
type DropzoneContextType = Omit<UseSupabaseUploadReturn, 'getRootProps' | 'getInputProps'>
const DropzoneContext = createContext<DropzoneContextType | undefined>(undefined)
type DropzoneProps = UseSupabaseUploadReturn & {
className?: string
}
const Dropzone = ({
className,
children,
getRootProps,
getInputProps,
...restProps
}: PropsWithChildren<DropzoneProps>) => {
const isSuccess = restProps.isSuccess
const isActive = restProps.isDragActive
const isInvalid =
(restProps.isDragActive && restProps.isDragReject) ||
(restProps.errors.length > 0 && !restProps.isSuccess) ||
restProps.files.some((file) => file.errors.length !== 0)
return (
<DropzoneContext.Provider value={{ ...restProps }}>
<div
{...getRootProps({
className: cn(
'border-2 border-gray-300 rounded-lg p-6 text-center bg-card transition-colors duration-300 text-foreground',
className,
isSuccess ? 'border-solid' : 'border-dashed',
isActive && 'border-primary bg-primary/10',
isInvalid && 'border-destructive bg-destructive/10'
),
})}
>
<input {...getInputProps()} />
{children}
</div>
</DropzoneContext.Provider>
)
}
const DropzoneContent = ({ className }: { className?: string }) => {
const {
files,
setFiles,
onUpload,
loading,
successes,
errors,
maxFileSize,
maxFiles,
isSuccess,
} = useDropzoneContext()
const exceedMaxFiles = files.length > maxFiles
const handleRemoveFile = useCallback(
(fileName: string) => {
setFiles(files.filter((file) => file.name !== fileName))
},
[files, setFiles]
)
if (isSuccess) {
return (
<div className={cn('flex flex-row items-center gap-x-2 justify-center', className)}>
<CheckCircle size={16} className="text-primary" />
<p className="text-primary text-sm">
Successfully uploaded {files.length} file{files.length > 1 ? 's' : ''}
</p>
</div>
)
}
return (
<div className={cn('flex flex-col', className)}>
{files.map((file, idx) => {
const fileError = errors.find((e) => e.name === file.name)
const isSuccessfullyUploaded = !!successes.find((e) => e === file.name)
return (
<div
key={`${file.name}-${idx}`}
className="flex items-center gap-x-4 border-b py-2 first:mt-4 last:mb-4 "
>
{file.type.startsWith('image/') ? (
<div className="h-10 w-10 rounded border overflow-hidden shrink-0 bg-muted flex items-center justify-center">
<img src={file.preview} alt={file.name} className="object-cover" />
</div>
) : (
<div className="h-10 w-10 rounded border bg-muted flex items-center justify-center">
<File size={18} />
</div>
)}
<div className="shrink grow flex flex-col items-start truncate">
<p title={file.name} className="text-sm truncate max-w-full">
{file.name}
</p>
{file.errors.length > 0 ? (
<p className="text-xs text-destructive">
{file.errors
.map((e) =>
e.message.startsWith('File is larger than')
? `File is larger than ${formatBytes(maxFileSize, 2)} (Size: ${formatBytes(file.size, 2)})`
: e.message
)
.join(', ')}
</p>
) : loading && !isSuccessfullyUploaded ? (
<p className="text-xs text-muted-foreground">Uploading file...</p>
) : !!fileError ? (
<p className="text-xs text-destructive">Failed to upload: {fileError.message}</p>
) : isSuccessfullyUploaded ? (
<p className="text-xs text-primary">Successfully uploaded file</p>
) : (
<p className="text-xs text-muted-foreground">{formatBytes(file.size, 2)}</p>
)}
</div>
{!loading && !isSuccessfullyUploaded && (
<Button
size="icon"
variant="link"
className="shrink-0 justify-self-end text-muted-foreground hover:text-foreground"
onClick={() => handleRemoveFile(file.name)}
>
<X />
</Button>
)}
</div>
)
})}
{exceedMaxFiles && (
<p className="text-sm text-left mt-2 text-destructive">
You may upload only up to {maxFiles} files, please remove {files.length - maxFiles} file
{files.length - maxFiles > 1 ? 's' : ''}.
</p>
)}
{files.length > 0 && !exceedMaxFiles && (
<div className="mt-2">
<Button
variant="outline"
onClick={onUpload}
disabled={files.some((file) => file.errors.length !== 0) || loading}
>
{loading ? (
<>
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
Uploading...
</>
) : (
<>Upload files</>
)}
</Button>
</div>
)}
</div>
)
}
const DropzoneEmptyState = ({ className }: { className?: string }) => {
const { maxFiles, maxFileSize, inputRef, isSuccess } = useDropzoneContext()
if (isSuccess) {
return null
}
return (
<div className={cn('flex flex-col items-center gap-y-2', className)}>
<Upload size={20} className="text-muted-foreground" />
<p className="text-sm">
Upload{!!maxFiles && maxFiles > 1 ? ` ${maxFiles}` : ''} file
{!maxFiles || maxFiles > 1 ? 's' : ''}
</p>
<div className="flex flex-col items-center gap-y-1">
<p className="text-xs text-muted-foreground">
Drag and drop or{' '}
<a
onClick={() => inputRef.current?.click()}
className="underline cursor-pointer transition hover:text-foreground"
>
select {maxFiles === 1 ? `file` : 'files'}
</a>{' '}
to upload
</p>
{maxFileSize !== Number.POSITIVE_INFINITY && (
<p className="text-xs text-muted-foreground">
Maximum file size: {formatBytes(maxFileSize, 2)}
</p>
)}
</div>
</div>
)
}
const useDropzoneContext = () => {
const context = useContext(DropzoneContext)
if (!context) {
throw new Error('useDropzoneContext must be used within a Dropzone')
}
return context
}
export { Dropzone, DropzoneContent, DropzoneEmptyState, useDropzoneContext }

View File

@@ -0,0 +1,124 @@
import React from 'react';
import { Link, useLocation, Outlet, useNavigate } from 'react-router-dom';
import {
LayoutDashboard,
FileText,
TableProperties,
MessageSquareCode,
LogOut,
Menu,
X,
ChevronRight,
User,
Sparkles
} from 'lucide-react';
import { Button } from '@/components/ui/button';
import { useAuth } from '@/context/AuthContext';
import { cn } from '@/lib/utils';
import { Sheet, SheetContent, SheetTrigger } from '@/components/ui/sheet';
const navItems = [
{ name: '控制台', path: '/', icon: LayoutDashboard },
{ name: '文档中心', path: '/documents', icon: FileText },
{ name: 'Excel 解析', path: '/excel-parse', icon: Sparkles },
{ name: '智能填表', path: '/form-fill', icon: TableProperties },
{ name: '智能助手', path: '/assistant', icon: MessageSquareCode },
];
const MainLayout: React.FC = () => {
const { user, profile, signOut } = useAuth();
const location = useLocation();
const navigate = useNavigate();
const handleSignOut = async () => {
await signOut();
navigate('/login');
};
const SidebarContent = () => (
<div className="flex flex-col h-full bg-sidebar py-6 border-r border-sidebar-border">
<div className="px-6 mb-10 flex items-center gap-2">
<div className="w-10 h-10 rounded-xl bg-primary flex items-center justify-center text-primary-foreground shadow-lg shadow-primary/20">
<FileText size={24} />
</div>
<div className="flex flex-col">
<span className="font-bold text-lg tracking-tight text-sidebar-foreground"></span>
<span className="text-xs text-muted-foreground"></span>
</div>
</div>
<nav className="flex-1 px-4 space-y-2">
{navItems.map((item) => {
const isActive = location.pathname === item.path;
return (
<Link
key={item.path}
to={item.path}
className={cn(
"flex items-center gap-3 px-4 py-3 rounded-xl transition-all duration-200 group",
isActive
? "bg-primary text-primary-foreground shadow-md shadow-primary/20"
: "text-sidebar-foreground/70 hover:bg-sidebar-accent hover:text-sidebar-accent-foreground"
)}
>
<item.icon size={20} className={cn(isActive ? "text-white" : "group-hover:scale-110 transition-transform")} />
<span className="font-medium">{item.name}</span>
{isActive && <ChevronRight size={16} className="ml-auto" />}
</Link>
);
})}
</nav>
<div className="px-4 mt-auto">
<div className="bg-sidebar-accent/50 rounded-2xl p-4 mb-4 border border-sidebar-border/50">
<div className="flex items-center gap-3">
<div className="w-10 h-10 rounded-full bg-secondary flex items-center justify-center border-2 border-primary/10">
<User size={20} className="text-primary" />
</div>
<div className="flex flex-col overflow-hidden">
<span className="font-semibold text-sm truncate">{((profile as any)?.email) || '用户'}</span>
<span className="text-[10px] uppercase tracking-wider text-muted-foreground">{((profile as any)?.role) || 'User'}</span>
</div>
</div>
</div>
<Button
variant="outline"
className="w-full justify-start gap-3 border-none hover:bg-destructive/10 hover:text-destructive group rounded-xl"
onClick={handleSignOut}
>
<LogOut size={18} className="group-hover:rotate-180 transition-transform duration-300" />
<span>退</span>
</Button>
</div>
</div>
);
return (
<div className="flex min-h-screen w-full bg-background overflow-hidden">
{/* Desktop Sidebar */}
<aside className="hidden lg:block w-72 shrink-0">
<SidebarContent />
</aside>
{/* Mobile Sidebar */}
<Sheet>
<SheetTrigger asChild>
<Button variant="ghost" size="icon" className="lg:hidden fixed top-4 left-4 z-50 bg-background/50 backdrop-blur shadow-sm">
<Menu size={24} />
</Button>
</SheetTrigger>
<SheetContent side="left" className="p-0 w-72 border-none">
<SidebarContent />
</SheetContent>
</Sheet>
<main className="flex-1 overflow-y-auto relative p-6 lg:p-10 pt-20 lg:pt-10">
<div className="max-w-7xl mx-auto space-y-8 animate-fade-in">
<Outlet />
</div>
</main>
</div>
);
};
export default MainLayout;

Some files were not shown because too many files have changed in this diff Show More