import { Injectable, Logger, OnModuleInit } from '@nestjs/common';
import OpenAI from 'openai';
import { Stream } from 'openai/streaming';
import { AI_CONSTANTS, SYSTEM_PROMPT_TEMPLATE, DEFAULT_USER_PROMPT } from './constants/ai.constants';
import { AIConfigException, AIAnalysisException, AIStreamException } from './exceptions/ai.exceptions';
import { DataFormatter, RetryHelper } from './utils/ai.utils';
export interface AIAnalysisRequest {
systemPrompt?: string;
data: any;
userPrompt?: string;
}
export interface AIAnalysisResponse {
success: boolean;
data?: any;
error?: string;
}
@Injectable()
export class AIService implements OnModuleInit {
private readonly logger = new Logger(AIService.name);
private openai: OpenAI;
private readonly apiKey: string;
private readonly baseURL: string;
private readonly model: string;
constructor() {
this.apiKey = process.env['loveapp.tradeERP.ai.apiKey'];
this.baseURL = process.env['loveapp.tradeERP.ai.baseURL'];
this.model = AI_CONSTANTS.DEFAULT_MODEL;
}
async onModuleInit() {
await this.initializeOpenAI();
}
private async initializeOpenAI(): Promise<void> {
try {
if (!this.apiKey || !this.baseURL) {
this.logger.warn('AI服务配置不完整,将禁用AI功能', {
hasApiKey: !!this.apiKey,
hasBaseURL: !!this.baseURL
});
return;
}
this.openai = new OpenAI({
apiKey: this.apiKey,
baseURL: this.baseURL,
timeout: AI_CONSTANTS.DEFAULT_TIMEOUT,
maxRetries: AI_CONSTANTS.DEFAULT_MAX_RETRIES,
});
this.logger.log('AI服务初始化成功');
} catch (error) {
this.logger.error('AI服务初始化失败,将禁用AI功能', {
error: error.message,
stack: error.stack
});
}
}
private formatData(data: any): any {
try {
const formattedData = DataFormatter.formatStatisticsData(data);
return formattedData;
} catch (error) {
this.logger.error('数据格式化失败', error.stack);
throw new AIAnalysisException('数据格式化失败', error);
}
}
async analyzeDataStream(request: AIAnalysisRequest): Promise<Stream<OpenAI.Chat.Completions.ChatCompletionChunk>> {
const startTime = Date.now();
try {
if (!this.openai) {
throw new AIConfigException('AI服务未初始化或配置不完整,请检查API密钥和基础URL配置');
}
if (!request.data) {
throw new AIAnalysisException(AI_CONSTANTS.ERROR_MESSAGES.EMPTY_DATA);
}
const formattedData = this.formatData(request.data);
const systemPrompt = request.systemPrompt || SYSTEM_PROMPT_TEMPLATE;
const userPrompt = request.userPrompt || DEFAULT_USER_PROMPT;
const userMessage = `${userPrompt} \n${JSON.stringify(formattedData, null, 2)}`;
const stream = await RetryHelper.withRetry(
async () => {
return await this.openai.chat.completions.create({
model: this.model,
messages: [
{
role: 'system',
content: systemPrompt,
},
{
role: 'user',
content: userMessage,
},
],
temperature: AI_CONSTANTS.DEFAULT_TEMPERATURE,
max_tokens: AI_CONSTANTS.DEFAULT_MAX_TOKENS,
stream: true,
});
},
AI_CONSTANTS.DEFAULT_MAX_RETRIES,
1000
);
const duration = Date.now() - startTime;
this.logger.log(`AI流式分析请求成功`, { duration: `${duration}ms` });
return stream;
} catch (error) {
const duration = Date.now() - startTime;
this.logger.error('AI流式分析失败', {
duration: `${duration}ms`,
error: error.message,
stack: error.stack,
});
if (error instanceof AIAnalysisException || error instanceof AIConfigException) {
throw error;
}
throw new AIStreamException(
AI_CONSTANTS.ERROR_MESSAGES.STREAM_FAILED,
{
originalError: error.message,
duration,
}
);
}
}
isAvailable(): boolean {
return !!this.openai;
}
getStatus(): { available: boolean; model: string; hasApiKey: boolean; hasBaseURL: boolean } {
return {
available: this.isAvailable(),
model: this.model,
hasApiKey: !!this.apiKey,
hasBaseURL: !!this.baseURL,
};
}
}
import { Controller, Post, Body, Res, Logger, UsePipes } from '@nestjs/common';
import { FastifyReply } from 'fastify';
import { AIService } from './ai.service';
import { AIAnalysisDto } from './dto/ai-analysis.dto';
import { FieldsToJSONPipe } from 'src/common/pipe/formDataToJsonPipe';
import { AI_CONSTANTS } from './constants/ai.constants';
import { AIAnalysisException, AIStreamException, AIConfigException } from './exceptions/ai.exceptions';
@Controller('ai')
export class AIController {
private readonly logger = new Logger(AIController.name);
constructor(private readonly aiService: AIService) {}
@Post('analyze/stream')
@UsePipes(new FieldsToJSONPipe())
async analyzeDataStream(
@Body() analysisDto: AIAnalysisDto,
@Res() res: FastifyReply
): Promise<void> {
const requestId = `req_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
try {
if (!this.aiService.isAvailable()) {
const status = this.aiService.getStatus();
this.logger.warn(`[${requestId}] AI服务不可用`, status);
res.status(503).send({
error: 'AI服务暂时不可用',
message: 'AI服务配置不完整或初始化失败,请检查配置后重试',
details: status,
requestId,
});
return;
}
const origin = res.request.headers.origin;
const headers = {
'Content-Type': 'text/event-stream; charset=utf-8',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
'Access-Control-Allow-Origin': origin || '*',
'Access-Control-Allow-Credentials': 'true',
'X-Accel-Buffering': 'no',
};
res.raw.writeHead(200, headers);
this.sendSSEMessage(res, {
type: AI_CONSTANTS.SSE_EVENTS.INIT,
message: '连接已建立,开始分析...',
requestId,
});
const stream = await this.aiService.analyzeDataStream(analysisDto);
let fullContent = '';
let chunkCount = 0;
this.logger.log(`[${requestId}] 开始处理流式响应`);
for await (const chunk of stream) {
const content = chunk.choices[0]?.delta?.content || '';
if (content) {
fullContent += content;
chunkCount++;
this.sendSSEMessage(res, {
type: AI_CONSTANTS.SSE_EVENTS.CONTENT,
content: content,
fullContent: fullContent,
chunkIndex: chunkCount,
});
}
}
this.sendSSEMessage(res, {
type: AI_CONSTANTS.SSE_EVENTS.COMPLETE,
fullContent: fullContent,
totalChunks: chunkCount,
requestId,
});
res.raw.end();
this.logger.log(`[${requestId}] 流式分析完成`, {
totalChunks: chunkCount,
contentLength: fullContent.length
});
} catch (error) {
this.logger.error(`[${requestId}] 流式分析失败`, {
error: error.message,
stack: error.stack,
});
try {
this.sendSSEMessage(res, {
type: AI_CONSTANTS.SSE_EVENTS.ERROR,
error: this.getErrorMessage(error),
requestId,
});
res.raw.end();
} catch (writeError) {
this.logger.error(`[${requestId}] 无法发送错误信息,连接可能已关闭`, {
writeError: writeError.message,
});
}
}
}
private sendSSEMessage(res: FastifyReply, data: any): void {
const sseData = `data: ${JSON.stringify(data)}\n\n`;
res.raw.write(sseData);
}
private getErrorMessage(error: any): string {
if (error instanceof AIAnalysisException) {
return error.message;
}
if (error instanceof AIConfigException) {
return 'AI服务配置错误';
}
if (error instanceof AIStreamException) {
return error.message;
}
return AI_CONSTANTS.ERROR_MESSAGES.ANALYSIS_FAILED;
}
}
| 需求 | 实现方式 |
|---|
| 图片阻塞问题 | 占位符 + ready 替换 |
| 心跳保活 | 服务端 ping + 前端检测 |
| 断点续传 | 每条消息带 id + 本地记录 lastId |
| 服务端恢复流 | /stream?lastId=xxx |
| 图片缓存 | 临时缓存 + 映射表,不重复生成 |
| 自动重连 | SSE 重连 + 上次 lastId 续传 |