node接入openai

46 阅读3分钟
import { Injectable, Logger, OnModuleInit } from '@nestjs/common';
import OpenAI from 'openai';
import { Stream } from 'openai/streaming';
import { AI_CONSTANTS, SYSTEM_PROMPT_TEMPLATE, DEFAULT_USER_PROMPT } from './constants/ai.constants';
import { AIConfigException, AIAnalysisException, AIStreamException } from './exceptions/ai.exceptions';
import { DataFormatter, RetryHelper } from './utils/ai.utils';

export interface AIAnalysisRequest {
    systemPrompt?: string;
    data: any;
    userPrompt?: string;
}

export interface AIAnalysisResponse {
    success: boolean;
    data?: any;
    error?: string;
}

@Injectable()
export class AIService implements OnModuleInit {
    private readonly logger = new Logger(AIService.name);
    private openai: OpenAI;
    private readonly apiKey: string;
    private readonly baseURL: string;
    private readonly model: string;

    constructor() {
        // 获取配置
        this.apiKey = process.env['loveapp.tradeERP.ai.apiKey'];
        this.baseURL = process.env['loveapp.tradeERP.ai.baseURL'];
        this.model = AI_CONSTANTS.DEFAULT_MODEL;
    }

    async onModuleInit() {
        await this.initializeOpenAI();
    }

    /**
     * 初始化OpenAI客户端
     */
    private async initializeOpenAI(): Promise<void> {
        try {
            // 检查配置是否存在
            if (!this.apiKey || !this.baseURL) {
                this.logger.warn('AI服务配置不完整,将禁用AI功能', { 
                    hasApiKey: !!this.apiKey, 
                    hasBaseURL: !!this.baseURL 
                });
                return;
            }

            this.openai = new OpenAI({
                apiKey: this.apiKey,
                baseURL: this.baseURL,
                timeout: AI_CONSTANTS.DEFAULT_TIMEOUT,
                maxRetries: AI_CONSTANTS.DEFAULT_MAX_RETRIES,
            });

            this.logger.log('AI服务初始化成功');
        } catch (error) {
            this.logger.error('AI服务初始化失败,将禁用AI功能', {
                error: error.message,
                stack: error.stack
            });
        }
    }

    /**
     * 格式化数据 - 转换为中文字段名,并将金额从分转换为元
     * @param data 原始数据
     * @returns 格式化后的数据
     */
    private formatData(data: any): any {
        try {
            const formattedData = DataFormatter.formatStatisticsData(data);
            return formattedData;
        } catch (error) {
            this.logger.error('数据格式化失败', error.stack);
            throw new AIAnalysisException('数据格式化失败', error);
        }
    }

    /**
     * 流式分析数据
     * @param request 分析请求
     * @returns 流式响应
     */
    async analyzeDataStream(request: AIAnalysisRequest): Promise<Stream<OpenAI.Chat.Completions.ChatCompletionChunk>> {
        const startTime = Date.now();
        
        try {
            // 检查AI服务是否可用
            if (!this.openai) {
                throw new AIConfigException('AI服务未初始化或配置不完整,请检查API密钥和基础URL配置');
            }

            // 验证输入数据
            if (!request.data) {
                throw new AIAnalysisException(AI_CONSTANTS.ERROR_MESSAGES.EMPTY_DATA);
            }

            // 格式化数据
            const formattedData = this.formatData(request.data);

            // 构建提示词
            const systemPrompt = request.systemPrompt || SYSTEM_PROMPT_TEMPLATE;
            const userPrompt = request.userPrompt || DEFAULT_USER_PROMPT;
            const userMessage = `${userPrompt} \n${JSON.stringify(formattedData, null, 2)}`;

            // 重试机制
            const stream = await RetryHelper.withRetry(
                async () => {
                    return await this.openai.chat.completions.create({
                        model: this.model,
                        messages: [
                            {
                                role: 'system',
                                content: systemPrompt,
                            },
                            {
                                role: 'user',
                                content: userMessage,
                            },
                        ],
                        temperature: AI_CONSTANTS.DEFAULT_TEMPERATURE,
                        max_tokens: AI_CONSTANTS.DEFAULT_MAX_TOKENS,
                        stream: true,
                    });
                },
                AI_CONSTANTS.DEFAULT_MAX_RETRIES,
                1000
            );

            const duration = Date.now() - startTime;
            this.logger.log(`AI流式分析请求成功`, { duration: `${duration}ms` });

            return stream;

        } catch (error) {
            const duration = Date.now() - startTime;
            this.logger.error('AI流式分析失败', {
                duration: `${duration}ms`,
                error: error.message,
                stack: error.stack,
            });

            if (error instanceof AIAnalysisException || error instanceof AIConfigException) {
                throw error;
            }

            throw new AIStreamException(
                AI_CONSTANTS.ERROR_MESSAGES.STREAM_FAILED,
                {
                    originalError: error.message,
                    duration,
                }
            );
        }
    }

    /**
     * 检查AI服务是否可用
     * @returns 是否可用
     */
    isAvailable(): boolean {
        return !!this.openai;
    }

    /**
     * 获取AI服务状态信息
     * @returns 状态信息
     */
    getStatus(): { available: boolean; model: string; hasApiKey: boolean; hasBaseURL: boolean } {
        return {
            available: this.isAvailable(),
            model: this.model,
            hasApiKey: !!this.apiKey,
            hasBaseURL: !!this.baseURL,
        };
    }
}
import { Controller, Post, Body, Res, Logger, UsePipes } from '@nestjs/common';
import { FastifyReply } from 'fastify';
import { AIService } from './ai.service';
import { AIAnalysisDto } from './dto/ai-analysis.dto';
import { FieldsToJSONPipe } from 'src/common/pipe/formDataToJsonPipe';
import { AI_CONSTANTS } from './constants/ai.constants';
import { AIAnalysisException, AIStreamException, AIConfigException } from './exceptions/ai.exceptions';

@Controller('ai')
export class AIController {
    private readonly logger = new Logger(AIController.name);

    constructor(private readonly aiService: AIService) {}

    /**
     * 分析数据接口(流式输出)
     * @param analysisDto 分析请求数据
     * @param res Fastify响应对象
     */
    @Post('analyze/stream')
    @UsePipes(new FieldsToJSONPipe())
    async analyzeDataStream(
        @Body() analysisDto: AIAnalysisDto,
        @Res() res: FastifyReply
    ): Promise<void> {
        const requestId = `req_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;

        try {
            // 检查AI服务是否可用
            if (!this.aiService.isAvailable()) {
                const status = this.aiService.getStatus();
                this.logger.warn(`[${requestId}] AI服务不可用`, status);
                
                // 立即返回错误,不设置流式响应头
                res.status(503).send({
                    error: 'AI服务暂时不可用',
                    message: 'AI服务配置不完整或初始化失败,请检查配置后重试',
                    details: status,
                    requestId,
                });
                return;
            }

            // 获取Origin
            const origin = res.request.headers.origin;
            
            // 设置SSE响应头
            const headers = {
                'Content-Type': 'text/event-stream; charset=utf-8',
                'Cache-Control': 'no-cache',
                'Connection': 'keep-alive',
                'Access-Control-Allow-Origin': origin || '*',
                'Access-Control-Allow-Credentials': 'true',
                'X-Accel-Buffering': 'no', // 禁用响应缓冲,让流式内容实时返回
            };

            // 立即写入响应头
            res.raw.writeHead(200, headers);

            // 发送初始连接确认消息
            this.sendSSEMessage(res, {
                type: AI_CONSTANTS.SSE_EVENTS.INIT,
                message: '连接已建立,开始分析...',
                requestId,
            });

            // 获取流式响应
            const stream = await this.aiService.analyzeDataStream(analysisDto);
            
            let fullContent = '';
            let chunkCount = 0;
            
            this.logger.log(`[${requestId}] 开始处理流式响应`);
            
            for await (const chunk of stream) {
                const content = chunk.choices[0]?.delta?.content || '';
                if (content) {
                    fullContent += content;
                    chunkCount++;
                    
                    // 发送内容块
                    this.sendSSEMessage(res, {
                        type: AI_CONSTANTS.SSE_EVENTS.CONTENT,
                        content: content,
                        fullContent: fullContent,
                        chunkIndex: chunkCount,
                    });
                }
            }
            
            // 发送完成信号
            this.sendSSEMessage(res, {
                type: AI_CONSTANTS.SSE_EVENTS.COMPLETE,
                fullContent: fullContent,
                totalChunks: chunkCount,
                requestId,
            });
            
            res.raw.end();
            
            this.logger.log(`[${requestId}] 流式分析完成`, { 
                totalChunks: chunkCount,
                contentLength: fullContent.length 
            });
            
        } catch (error) {
            this.logger.error(`[${requestId}] 流式分析失败`, {
                error: error.message,
                stack: error.stack,
            });
            
            try {
                // 发送错误信息
                this.sendSSEMessage(res, {
                    type: AI_CONSTANTS.SSE_EVENTS.ERROR,
                    error: this.getErrorMessage(error),
                    requestId,
                });
                
                res.raw.end();
            } catch (writeError) {
                // 如果无法写入错误信息,说明连接已经关闭
                this.logger.error(`[${requestId}] 无法发送错误信息,连接可能已关闭`, {
                    writeError: writeError.message,
                });
            }
        }
    }

    /**
     * 发送SSE消息
     * @param res Fastify响应对象
     * @param data 要发送的数据
     */
    private sendSSEMessage(res: FastifyReply, data: any): void {
        const sseData = `data: ${JSON.stringify(data)}\n\n`;
        res.raw.write(sseData);
    }

    /**
     * 获取错误消息
     * @param error 错误对象
     * @returns 错误消息字符串
     */
    private getErrorMessage(error: any): string {
        if (error instanceof AIAnalysisException) {
            return error.message;
        }
        
        if (error instanceof AIConfigException) {
            return 'AI服务配置错误';
        }
        
        if (error instanceof AIStreamException) {
            return error.message;
        }
        
        return AI_CONSTANTS.ERROR_MESSAGES.ANALYSIS_FAILED;
    }
}
需求实现方式
图片阻塞问题占位符 + ready 替换
心跳保活服务端 ping + 前端检测
断点续传每条消息带 id + 本地记录 lastId
服务端恢复流/stream?lastId=xxx
图片缓存临时缓存 + 映射表,不重复生成
自动重连SSE 重连 + 上次 lastId 续传