Java对象通常直接由垃圾回收器进行管理。当我们通过JNI调用C++底层的方法时,如果使用不当可能导致堆外空间的泄漏。
常见的场景
- 音视频JNI服务
- FFmpeg处理音视频文件等工具
踩过的坑总结
- 当调用JNI的时候如果底层分配了native内存,注意释放时机。
- 对JNI的回调返回的对象进行处理的时候,注意对象的释放时机
- JNI的wrapper层对底层C++的功能进行了封装,JNI对底层内存的映射,尽可能早的主动释放
示例
package com.flybot.video.base.trtc;
import com.flybot.video.base.trtc.wrapper.DecryptionDelegateWrapper;
import com.flybot.video.base.trtc.wrapper.PcmAudioBuffer;
import com.tencent.liteav.*;
import lombok.Getter;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import com.flybot.video.base.constants.NoticeEventEnum;
import com.flybot.video.base.constants.RedisKeysConstants;
import com.flybot.video.config.RedisTemplateWrapper;
import com.flybot.video.dto.AudioStreamPushRequest;
import com.flybot.video.dto.CaptureImageRequest;
import com.flybot.video.dto.CaptureVideoRequest;
import com.flybot.video.dto.RoomEnterRequest;
import com.flybot.video.utils.EncodeAndDecryptUtils;
import com.flybot.video.utils.FileUtils;
import com.flybot.video.utils.ImageProcessor;
import com.flybot.video.utils.FFmpegUtils;
import com.flybot.video.utils.MapUtils;
import com.flybot.video.websocket.VideoWebSocketEndpoint;
import org.springframework.context.ApplicationEventPublisher;
import java.awt.image.BufferedImage;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Base64;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* TRTC房间实例
* 管理单个房间的TRTC连接和音频流处理
*
* @since 1.0.0
*/
@Slf4j
public class TRTCRoomInstance extends TRTCCloudDelegate {
private final String roomId;
private final String userId;
private final Long sdkAppId;
private final RoomEnterRequest enterRequest;
private final RedisTemplateWrapper redisTemplateWrapper;
private final VideoWebSocketEndpoint webSocketMessageHandler;
private final ApplicationEventPublisher eventPublisher;
private TRTCCloud cloud;
/**
* -- GETTER --
* 获取当前事件ID
*/
// 当前事件ID,用于回调时匹配用户请求
@Getter
private volatile String currentEventId = null;
private final AtomicBoolean isRunning = new AtomicBoolean(false);
private final AtomicBoolean isCapturingAudio = new AtomicBoolean(false);
// 解密委托包装器,用于音视频数据采集
private DecryptionDelegateWrapper decryptionDelegateWrapper;
// 音频处理相关
private PcmAudioBuffer audioBuffer;
private AudioPushThread audioPushThread;
// 音频采集标识位
private volatile String targetAudioUserId = null;
private boolean audioStream = false;
private boolean audioBlock = false;
// 待截图请求(按需缓存,避免无效缓存)
private static class PendingCaptureRequest {
final CaptureImageRequest request;
final String eventId;
final long createTime;
PendingCaptureRequest(CaptureImageRequest request, String eventId) {
this.request = request;
this.eventId = eventId;
this.createTime = System.currentTimeMillis();
}
}
private final ConcurrentHashMap<String, PendingCaptureRequest> pendingCaptureImageRequests = new ConcurrentHashMap<>();
private static final long CAPTURE_IMAGE_TIMEOUT_MS = 2000; // 截图请求超时时间:5秒
// 视频文件写入相关(参考asr_cloud.cc的实现)
// private final ConcurrentHashMap<String, BufferedOutputStream>
// videoFileOutputStreams = new ConcurrentHashMap<>();
// 视频采集缓存相关(用于captureVideo功能)
private volatile boolean isCapturingVideo = false;
private volatile long videoCaptureStartTime = 0;
private volatile int videoCaptureDuration = 0;
private final ConcurrentHashMap<String, List<String>> userEventIdMapping = new ConcurrentHashMap<>();
private final ConcurrentHashMap<String, List<VideoFrameData>> capturedVideoFrames = new ConcurrentHashMap<>();
private final ConcurrentHashMap<String, List<AudioFrameData>> capturedAudioFrames = new ConcurrentHashMap<>();
// 视频帧数据结构
@Getter
private static class VideoFrameData {
private final byte[] data;
private final long timestamp;
private final int width;
private final int height;
private final StreamType streamType;
public VideoFrameData(byte[] data, long timestamp, int width, int height, StreamType streamType) {
this.data = data;
this.timestamp = timestamp;
this.width = width;
this.height = height;
this.streamType = streamType;
}
}
// 音频帧数据结构
@Getter
private static class AudioFrameData {
private final byte[] data;
private final long timestamp;
private final int sampleRate;
private final int channels;
public AudioFrameData(byte[] data, long timestamp, int sampleRate, int channels) {
this.data = data;
this.timestamp = timestamp;
this.sampleRate = sampleRate;
this.channels = channels;
}
}
// 图片推送相关
private volatile boolean isImagePushEnabled = false;
private volatile String imageUrl = null;
private volatile byte[] imageYuvData = null;
private volatile int imageWidth = 0;
private volatile int imageHeight = 0;
private volatile boolean isImagePushRunning = false;
// 可复用的PixelFrame对象(避免频繁创建,减少GC压力)
private PixelFrame reusablePixelFrame = null;
// 图片推送定时任务(用于取消任务,避免线程泄漏)
private java.util.concurrent.ScheduledFuture<?> imagePushTask = null;
// PTS时间戳管理(原子变量,确保线程安全)
private final java.util.concurrent.atomic.AtomicLong nextPts = new java.util.concurrent.atomic.AtomicLong(20);
// 音频通道状态管理
private volatile boolean isLocalAudioChannelCreated = false;
// 定时任务执行器
private final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(3);
// 实时音频推送线程池(单线程,防止阻塞TRTC回调)
private final ThreadPoolExecutor realtimeAudioExecutor;
// 客户端离线延迟通知相关
private final long clientOfflineDelaySecond; // 客户端离线延迟时间(秒)
private java.util.concurrent.ScheduledFuture<?> clientOfflineDelayTask; // 延迟任务
// BufferedImage对象池(按分辨率复用,减少int数组内存分配)
private final ConcurrentHashMap<String, BufferedImage> bufferedImagePool = new ConcurrentHashMap<>();
// 视频帧处理节流(限制处理频率,避免创建过多BufferedImage)
// private final ConcurrentHashMap<String, Long> lastFrameProcessTime = new ConcurrentHashMap<>();
// private static final long FRAME_PROCESS_INTERVAL_MS = 1000; // 每100ms处理一次,最多10fps
public TRTCRoomInstance(String roomId, String userId, RoomEnterRequest enterRequest,
RedisTemplateWrapper redisTemplateWrapper, VideoWebSocketEndpoint webSocketMessageHandler,
ApplicationEventPublisher eventPublisher, Long sdkAppId, int clientOfflineDelaySecond) {
this.roomId = roomId;
this.userId = userId;
this.enterRequest = enterRequest;
this.redisTemplateWrapper = redisTemplateWrapper;
this.webSocketMessageHandler = webSocketMessageHandler;
this.eventPublisher = eventPublisher;
this.sdkAppId = sdkAppId;
// 获取客户端离线延迟时间配置(默认0,立即发送)
this.clientOfflineDelaySecond = clientOfflineDelaySecond;
log.debug("客户端离线延迟时间配置: roomId={}, delayMs={}", roomId, clientOfflineDelaySecond);
// 检查是否有图片URL需要推送
if (enterRequest.getImageUrl() != null && !enterRequest.getImageUrl().trim().isEmpty()) {
this.imageUrl = enterRequest.getImageUrl().trim();
this.isImagePushEnabled = true;
log.debug("检测到图片URL,将在视频通道创建后推送: roomId={}, imageUrl={}", roomId, imageUrl);
}
// 初始化实时音频推送线程池
this.realtimeAudioExecutor = new ThreadPoolExecutor(
1,
1,
0L,
TimeUnit.MILLISECONDS,
new LinkedBlockingQueue<>(100),
new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(r, "trtc-audio-stream-" + roomId);
t.setDaemon(true);
return t;
}
},
new ThreadPoolExecutor.DiscardOldestPolicy());
}
/**
* 启动房间实例
*/
public void start() {
if (isRunning.get()) {
log.debug("房间实例已在运行: roomId={}", roomId);
return;
}
log.info("启动TRTC房间实例: roomId={}, userId={}", roomId, userId);
try {
// 创建TRTC云实例
cloud = TRTCCloud.Create(this);
// 使用传入的用户签名
log.info("使用用户签名: roomId={}, userId={}", roomId, userId);
String userSig = enterRequest.getUserSig();
// 配置进房参数
EnterRoomParams roomParams = new EnterRoomParams();
RoomParams roomInfo = new RoomParams();
roomInfo.setSdk_app_id(this.sdkAppId);
// if (FileUtils.isDigitsOnly(userId)) {
roomInfo.setUser_id(new TrtcString(userId));
// }else {
// roomInfo.setStr_room_id(new TrtcString(roomId));
// }
roomInfo.setRoom_id(Long.parseLong(roomId));
roomInfo.setUser_sig(new TrtcString(userSig));
// 设置解密回调,(包含音频和视频的数据)
if (enterRequest.getVideoEncryptionType() > 0) {
decryptionDelegateWrapper = new DecryptionDelegateWrapper(roomId, enterRequest.getSessionKey(),
enterRequest.getVideoEncryptionType());
roomInfo.setDecryption_delegate(decryptionDelegateWrapper);
}
roomParams.setRoom(roomInfo);
roomParams.setScene(TRTCScene.TRTC_SCENE_LIVE);
roomParams.setRole(TRTCRole.TRTC_ROLE_ANCHOR);
roomParams.setUse_pixel_frame_input(true);
roomParams.setUse_pixel_frame_output(true);
// 进入房间
cloud.EnterRoom(roomParams);
log.info("调用TRTC房间进房: roomId={}, userId={}", roomId, userId);
// 初始化音频缓冲区(支持一次性传入较大PCM文件)
// 计算:16kHz * 1声道 * 180秒 * 2字节/样本 = 5,760,000字节 (约5.76MB)
audioBuffer = new PcmAudioBuffer(180, 16000, 1); // 3分钟缓冲,16kHz,单声道
isRunning.set(true);
roomInfo.delete();
roomParams.delete();
log.info("TRTC房间实例启动成功: roomId={}, userId={}", roomId, userId);
} catch (Exception e) {
log.error("启动TRTC房间实例失败: roomId={}, userId={}", roomId, userId, e);
throw new RuntimeException("启动TRTC房间实例失败", e);
}
}
/**
* 停止房间实例
*/
public void stop() {
if (!isRunning.get()) {
log.warn("房间实例未在运行: roomId={}", roomId);
// 即使未运行,也要清理缓存(防止之前未清理的缓存)
clearAllCaches();
return;
}
log.info("停止TRTC房间实例: roomId={}, userId={}", roomId, userId);
try {
isRunning.set(false);
// 停止图片推送
stopImagePush();
// 停止音频推送线程
if (audioPushThread != null) {
audioPushThread.interrupt();
try {
// 等待线程结束,最多2秒
audioPushThread.join(2000);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
log.warn("等待音频推送线程结束被中断: roomId={}", roomId);
} finally {
audioPushThread = null;
}
}
// 清理所有缓存(必须在退出TRTC房间之前,避免回调继续写入缓存)
clearAllCaches();
// 退出TRTC房间
if (cloud != null) {
log.debug("退出TRTC房间: roomId={}, userId={}", roomId, userId);
cloud.ExitRoom();
try {
Thread.sleep(500);
} catch (InterruptedException e) {
log.info("sessionId:{},退出房间后,delay 500ms异常", roomId, e);
}
TRTCCloud.Destroy(cloud);
// cloud.delete();
cloud = null;
log.debug("销毁TRTC云实例成功: roomId={}", roomId);
}
// 取消客户端离线延迟通知任务
cancelClientOfflineDelayTask();
// 关闭实时音频线程池
if (realtimeAudioExecutor != null && !realtimeAudioExecutor.isShutdown()) {
realtimeAudioExecutor.shutdownNow();
}
// 关闭定时任务执行器
shutdownScheduler();
log.info("TRTC房间实例已停止: roomId={}, userId={}", roomId, userId);
} catch (Exception e) {
log.error("停止TRTC房间实例失败: roomId={}, userId={}", roomId, userId, e);
// 即使异常也要清理缓存
clearAllCaches();
}
}
/**
* 清理所有缓存
* 防止内存泄漏
*/
private void clearAllCaches() {
log.info("清理房间实例缓存: roomId={}", roomId);
try {
// 清理视频/音频帧缓存(这些缓存可能包含大量数据)
int videoFramesCount = capturedVideoFrames.values().stream()
.mapToInt(List::size).sum();
int audioFramesCount = capturedAudioFrames.values().stream()
.mapToInt(List::size).sum();
capturedVideoFrames.clear();
capturedAudioFrames.clear();
userEventIdMapping.clear();
// 清理所有待截图请求
int pendingCaptureCount = pendingCaptureImageRequests.size();
pendingCaptureImageRequests.clear();
if (pendingCaptureCount > 0) {
log.info("清理待截图请求: roomId={}, count={}", roomId, pendingCaptureCount);
}
log.info("清理缓存完成: roomId={}, 清理视频帧={}, 音频帧={}",
roomId, videoFramesCount, audioFramesCount);
// 清理音频缓冲区
if (audioBuffer != null) {
audioBuffer.clear();
}
// 清理解密委托包装器的缓存
if (decryptionDelegateWrapper != null) {
try {
decryptionDelegateWrapper.stopCapture();
decryptionDelegateWrapper.clearAllCaches();
decryptionDelegateWrapper.delete();
} catch (Exception e) {
log.error("释放 DecryptionDelegateWrapper 失败: roomId={}", roomId, e);
} finally {
decryptionDelegateWrapper = null;
}
}
// 清理BufferedImage对象池(释放int数组内存)
clearBufferedImagePool();
// 清理可复用的PixelFrame对象
if (reusablePixelFrame != null) {
try {
reusablePixelFrame.delete();
} catch (Exception e) {
log.error("释放 PixelFrame 失败: roomId={}", roomId, e);
} finally {
reusablePixelFrame = null;
}
}
// 重置采集状态
isCapturingVideo = false;
isCapturingAudio.set(false);
audioBlock = false;
targetAudioUserId = null;
videoCaptureStartTime = 0;
videoCaptureDuration = 0;
log.info("房间实例缓存清理完成: roomId={}", roomId);
} catch (Exception e) {
log.error("清理房间实例缓存失败: roomId={}", roomId, e);
}
}
/**
* 获取或创建BufferedImage(复用对象,减少内存分配)
* 优化:当替换对象时,显式 flush 旧对象,帮助释放本地资源
*
* @param width 图像宽度
* @param height 图像高度
* @return BufferedImage对象
*/
private BufferedImage getBufferedImage(int width, int height) {
String key = width + "x" + height;
BufferedImage image = bufferedImagePool.get(key);
if (image == null || image.getWidth() != width || image.getHeight() != height) {
// 如果旧对象存在且尺寸不匹配,先 flush 释放本地资源
if (image != null) {
try {
image.flush(); // 释放 BufferedImage 的本地资源,避免 Finalizer 堆积
} catch (Exception e) {
log.debug("flush 旧 BufferedImage 失败: roomId={}, key={}", roomId, key, e);
}
}
image = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
bufferedImagePool.put(key, image);
log.debug("创建BufferedImage对象池: roomId={}, key={}, width={}, height={}", roomId, key, width, height);
}
return image;
}
/**
* 清理BufferedImage对象池(释放int数组内存)
* 优化:显式 flush 所有 BufferedImage,释放本地资源,避免 Finalizer 堆积
*/
private void clearBufferedImagePool() {
int poolSize = bufferedImagePool.size();
// 显式 flush 所有 BufferedImage,释放本地资源
for (BufferedImage image : bufferedImagePool.values()) {
try {
image.flush(); // 释放 BufferedImage 的本地资源(Disposer),避免 Finalizer 堆积
} catch (Exception e) {
log.debug("flush BufferedImage 失败: roomId={}", roomId, e);
}
}
bufferedImagePool.clear();
log.debug("清理BufferedImage对象池: roomId={}, 清理数量={}", roomId, poolSize);
}
/**
* 安全关闭线程池
*/
private void shutdownScheduler() {
if (scheduler != null && !scheduler.isShutdown()) {
scheduler.shutdown();
try {
if (!scheduler.awaitTermination(5, TimeUnit.SECONDS)) {
log.warn("线程池未在5秒内关闭,强制关闭: roomId={}", roomId);
scheduler.shutdownNow();
// 再等待一段时间
if (!scheduler.awaitTermination(3, TimeUnit.SECONDS)) {
log.error("线程池强制关闭失败: roomId={}", roomId);
}
}
} catch (InterruptedException e) {
scheduler.shutdownNow();
Thread.currentThread().interrupt();
}
}
}
/**
* 推送音频流
*/
public void pushAudioStream(AudioStreamPushRequest request, String eventId) {
if (!isRunning.get()) {
log.warn("房间实例未运行,无法推送音频流: roomId={}", roomId);
return;
}
log.debug("推送音频流: roomId={}, userId={}", roomId, request.getUserId());
try {
// 将音频数据添加到缓冲区
PcmAudioBuffer.AudioData audioData = new PcmAudioBuffer.AudioData();
byte[] decode = Base64.getDecoder().decode(request.getAudioData());
audioData.audio_frame = decode;
audioData.audio_channels = FFmpegUtils.DEFAULT_AUDIO_CHANNELS;
audioData.audio_sample_rate = FFmpegUtils.DEFAULT_AUDIO_SAMPLE_RATE;
// 设置时间戳(参考CloudWrapper的实现方式)
audioData.audio_timestamp = 0;
log.info("推音频流长度:{}, channels:{}, sampleRate:{}, timestamp:{}, totalBytes:{}",
decode.length, audioData.audio_channels, audioData.audio_sample_rate,
audioData.audio_timestamp, decode.length);
// 推送到缓冲区
try {
// 检查缓冲区是否有数据,如果有则清空并发送打断通知
int currentUsage = audioBuffer.getCurrentBufferUsage();
if (currentUsage > 0) {
log.warn("音频缓冲区已有数据:{}bytes,清空缓冲区并发送打断通知: roomId={}", currentUsage, roomId);
// 清空缓冲区
audioBuffer.clear();
// 发送AUDIO_INTERRUPT通知
Map<String, Object> interruptData = new HashMap<>();
interruptData.put("roomId", roomId);
interruptData.put("userId", request.getUserId());
webSocketMessageHandler.sendNotificationToRoom(
roomId,
NoticeEventEnum.AUDIO_INTERRUPT,
interruptData,
audioPushThread.eventId);
// 重置音频数据统计
// resetAudioDataStats();
}
// 检查缓冲区容量是否足够
int requiredCapacity = decode.length;
int totalCapacity = getAudioBufferTotalCapacity();
int availableCapacity = totalCapacity - audioBuffer.getCurrentBufferUsage();
log.info("缓冲区状态检查: roomId={}, 需要容量:{}bytes, 当前使用:{}bytes, 总容量:{}bytes, 可用容量:{}bytes",
roomId, requiredCapacity, audioBuffer.getCurrentBufferUsage(), totalCapacity,
availableCapacity);
if (availableCapacity < requiredCapacity) {
log.error("音频缓冲区容量不足: roomId={}, 需要:{}bytes, 可用:{}bytes",
roomId, requiredCapacity, availableCapacity);
return;
}
audioBuffer.PutAudioData(audioData);
} catch (IllegalStateException e) {
log.error("音频缓冲区已满,无法添加更多数据: roomId={}, error={}", roomId, e.getMessage());
}
// 如果音频推送线程未启动,则启动它
if (audioPushThread == null || !audioPushThread.isAlive()) {
log.warn("【重要】启动音频推送线程: roomId={}, userId={}", roomId, request.getUserId());
audioPushThread = new AudioPushThread(request.getUserId(), eventId);
audioPushThread.start();
} else {
audioPushThread.setEventId(eventId);
log.debug("音频推送线程已在运行,打断上次的推送,roomId={}, userId={}, 上次的eventId={}, 当前的eventId={} ",
roomId, request.getUserId(), audioPushThread.eventId, eventId);
}
} catch (Exception e) {
log.error("推送音频流失败: roomId={}, userId={}", roomId, userId, e);
}
}
/**
* 开始获取指定用户的音频数据
*/
public void startCaptureAudio(String targetUserId) {
if (!isRunning.get()) {
log.warn("房间实例未运行,无法开始获取音频数据: roomId={}", roomId);
return;
}
if (isCapturingAudio.get()) {
log.warn("已在获取音频数据: roomId={}", roomId);
return;
}
log.info("开始获取指定用户音频数据: roomId={}, targetUserId={}", roomId, targetUserId);
// 设置目标用户标识位
this.targetAudioUserId = targetUserId;
this.audioStream = true;
isCapturingAudio.set(true);
}
/**
* 结束获取指定用户的音频数据
*/
public void endCaptureAudio(String targetUserId) {
if (!isCapturingAudio.get()) {
log.warn("未在获取音频数据: roomId={}", roomId);
return;
}
log.info("结束获取指定用户音频数据: roomId={}, targetUserId={}", roomId, targetUserId);
// 清除目标用户标识位
this.targetAudioUserId = null;
this.audioStream = false;
isCapturingAudio.set(false);
}
/**
* 截取指定用户的图片帧
* 优化:触发截图时,在视频回调中按需截取,避免一直缓存无效数据
*/
public void captureImage(CaptureImageRequest request, String eventId) {
String targetUserId = request.getUserId();
if (!isRunning.get()) {
log.warn("房间实例未运行,无法截取图片: roomId={}", roomId);
return;
}
log.info("触发截图请求: roomId={}, targetUserId={}, captureId={}, eventId={}",
roomId, targetUserId, request.getCaptureId(), eventId);
// 将截图请求放入待处理队列,等待视频回调处理
// 如果已有该用户的待处理请求,则替换(避免重复请求)
PendingCaptureRequest pendingRequest = new PendingCaptureRequest(request, eventId);
PendingCaptureRequest oldRequest = pendingCaptureImageRequests.put(targetUserId, pendingRequest);
if (oldRequest != null) {
log.warn("替换旧的截图请求: roomId={}, targetUserId={}, oldEventId={}, newEventId={}",
roomId, targetUserId, oldRequest.eventId, eventId);
}
// 设置超时清理任务(5秒后如果还未处理,则清理)
scheduler.schedule(() -> {
PendingCaptureRequest currentRequest = pendingCaptureImageRequests.get(targetUserId);
if (currentRequest != null && currentRequest.eventId.equals(eventId)) {
// 超时未处理,清理请求并发送失败通知
pendingCaptureImageRequests.remove(targetUserId);
log.warn("截图请求超时未处理: roomId={}, targetUserId={}, eventId={}",
roomId, targetUserId, eventId);
try {
Map<String, Object> errorData = MapUtils.of(
"roomId", roomId,
"userId", targetUserId,
"captureId", request.getCaptureId(),
"error", "截图超时,未获取到视频帧");
webSocketMessageHandler.sendNotificationToRoom(roomId, NoticeEventEnum.CAPTURE_IMAGE,
errorData, eventId);
} catch (Exception e) {
log.error("发送截图超时通知失败: roomId={}, targetUserId={}", roomId, targetUserId, e);
}
}
}, CAPTURE_IMAGE_TIMEOUT_MS, TimeUnit.MILLISECONDS);
}
/**
* 截取指定用户的音视频数据
* 从OnRemoteVideoReceived和OnRemoteAudioReceived回调中采集数据并合并为MP4
*/
public void captureVideo(CaptureVideoRequest request, String eventId, boolean onlyAudio) {
String targetUserId = request.getUserId();
int duration = request.getDuration();
String captureId = request.getCaptureId();
if (!isRunning.get()) {
log.warn("房间实例未运行,无法截取音视频: roomId={}, eventId={}", roomId, eventId);
return;
}
log.info("截取指定用户音视频数据: roomId={}, targetUserId={}, duration={}秒, eventId={}, captureId={}",
roomId, targetUserId, duration, eventId, captureId);
// 清空之前的缓存
capturedVideoFrames.remove(eventId);
capturedAudioFrames.remove(eventId);
List<String> eventIds = userEventIdMapping.getOrDefault(targetUserId, new ArrayList<>());
eventIds.add(eventId);
userEventIdMapping.put(targetUserId, eventIds);
List<VideoFrameData> videoFrames;
List<AudioFrameData> audioFrames;
try {
// 开始采集
if (!onlyAudio) {
isCapturingVideo = true;
videoCaptureStartTime = System.currentTimeMillis();
videoCaptureDuration = duration;
}
audioBlock = true;
isCapturingAudio.set(true);
log.info("开始采集视频数据: roomId={}, targetUserId={}, duration={}秒, eventId={}, onlyAudio={}",
roomId, targetUserId, duration, eventId, onlyAudio);
// 等待指定时长
Thread.sleep(duration * 1000);
if (!onlyAudio) {
// 停止采集
isCapturingVideo = false;
}
audioBlock = false;
isCapturingAudio.set(false);
eventIds.remove(eventId);
userEventIdMapping.put(targetUserId, eventIds);
log.info("采集完成,开始生成MP4文件: roomId={}, targetUserId={}, eventId={}, videoFrames={}, audioFrames={}",
roomId, targetUserId, eventId,
capturedVideoFrames.getOrDefault(eventId, new CopyOnWriteArrayList<>()).size(),
capturedAudioFrames.getOrDefault(eventId, new CopyOnWriteArrayList<>()).size());
// 获取采集的数据
videoFrames = capturedVideoFrames.remove(eventId);
if (videoFrames == null) {
videoFrames = new CopyOnWriteArrayList<>();
}
audioFrames = capturedAudioFrames.remove(eventId);
if (audioFrames == null) {
audioFrames = new CopyOnWriteArrayList<>();
}
if (videoFrames.isEmpty() && audioFrames.isEmpty()) {
log.warn("未采集到音视频数据: roomId={}, targetUserId={}, eventId={}", roomId, targetUserId, eventId);
return;
}
// 使用FFmpeg生成MP4文件
String mp4FilePath = generateMP4FromFrames(targetUserId, videoFrames, audioFrames);
if (mp4FilePath != null) {
// 读取文件并转换为Base64
String audioBase64Data = encodePcmFramesToBase64(audioFrames);
// 发送截取音视频WebSocket通知
try {
Map<String, Object> videoDataMap = MapUtils.of(
"captureId", captureId,
"userId", targetUserId,
"audioData", audioBase64Data,
"audioType", "pcm");
if (onlyAudio) {
webSocketMessageHandler.sendNotificationToRoom(roomId, NoticeEventEnum.CAPTURE_AUDIO,
videoDataMap,
eventId);
} else {
String videoBase64Data = convertFileToBase64(mp4FilePath);
videoDataMap.put("videoData", videoBase64Data);
videoDataMap.put("videoType", "mp4");
webSocketMessageHandler.sendNotificationToRoom(roomId, NoticeEventEnum.CAPTURE_VIDEO,
videoDataMap,
eventId);
}
} catch (Exception e) {
log.error("发送截取音视频WebSocket通知失败: roomId={}, targetUserId={}, eventId={}",
roomId, targetUserId, eventId, e);
}
log.info("成功截取音视频数据并生成MP4: roomId={}, targetUserId={}, duration={}, eventId={}, captureId={}",
roomId, targetUserId, duration, eventId, captureId);
// 清理临时文件
deleteFile(mp4FilePath);
} else {
log.error("生成MP4文件失败: roomId={}, targetUserId={}, eventId={}", roomId, targetUserId, eventId);
}
} catch (InterruptedException e) {
log.error("采集视频数据被中断: roomId={}, targetUserId={}, eventId={}", roomId, targetUserId, eventId, e);
Thread.currentThread().interrupt();
// 异常时也要清理缓存
capturedVideoFrames.remove(eventId);
capturedAudioFrames.remove(eventId);
List<String> remainingEventIds = userEventIdMapping.get(targetUserId);
if (remainingEventIds != null) {
remainingEventIds.remove(eventId);
if (remainingEventIds.isEmpty()) {
userEventIdMapping.remove(targetUserId);
} else {
userEventIdMapping.put(targetUserId, remainingEventIds);
}
}
} catch (Exception e) {
log.error("截取指定用户音视频数据失败: roomId={}, targetUserId={}, duration={}, eventId={}",
roomId, targetUserId, duration, eventId, e);
// 异常时也要清理缓存
capturedVideoFrames.remove(eventId);
capturedAudioFrames.remove(eventId);
List<String> remainingEventIds = userEventIdMapping.get(targetUserId);
if (remainingEventIds != null) {
remainingEventIds.remove(eventId);
if (remainingEventIds.isEmpty()) {
userEventIdMapping.remove(targetUserId);
} else {
userEventIdMapping.put(targetUserId, remainingEventIds);
}
}
} finally {
// 确保清理状态
isCapturingVideo = false;
log.debug("采集任务结束,清理状态: roomId={}, targetUserId={}, eventId={}", roomId, targetUserId, eventId);
// 清空缓存(可选,为下次采集做准备)
// capturedVideoFrames.clear();
// capturedAudioFrames.clear();
}
}
// ========== TRTC回调方法 ==========
@Override
public void OnError(com.tencent.liteav.Error error) {
log.error("TRTC错误: roomId={}, error={}", roomId, error);
// 发布房间退出事件
try {
Map<String, Object> notificationData = MapUtils.of("roomId", roomId);
webSocketMessageHandler.sendNotificationToRoom(roomId, NoticeEventEnum.LINUX_SDK_ERROR, notificationData,
getCurrentEventId());
log.debug("发送linux sdk异常WebSocket通知: roomId={}, userId={}", roomId, userId);
} catch (Exception e) {
log.error("发送linux sdk异常WebSocket通知失败: roomId={}, userId={}", roomId, userId, e);
}
}
@Override
public void OnConnectionStateChanged(ConnectionState old_state, ConnectionState new_state) {
log.info("连接状态变化: roomId={}, old={}, new={}", roomId, old_state, new_state);
}
@Override
public void OnEnterRoom() {
log.info("进入房间成功: roomId={}, userId={}", roomId, userId);
// 创建本地音频通道
AudioEncodeParams audioEncodeParams = new AudioEncodeParams();
audioEncodeParams.setChannels(1);
audioEncodeParams.setSample_rate(16000);
cloud.CreateLocalAudioChannel(audioEncodeParams);
cloud.CreateLocalVideoChannel(StreamType.STREAM_TYPE_VIDEO_HIGH);
// 发送WebSocket通知
try {
Map<String, Object> notificationData = MapUtils.of("roomId", roomId, "userId", userId);
webSocketMessageHandler.sendNotificationToRoom(roomId, NoticeEventEnum.ENTER_ROOM_SUCCESS,
notificationData, getCurrentEventId());
log.debug("发送进入房间WebSocket通知: roomId={}, userId={}", roomId, userId);
} catch (Exception e) {
log.error("发送进入房间WebSocket通知失败: roomId={}, userId={}", roomId, userId, e);
}
}
@Override
public void OnExitRoom() {
log.info("退出房间: roomId={}", roomId);
// 发送WebSocket通知
try {
Map<String, Object> notificationData = MapUtils.of("roomId", roomId);
webSocketMessageHandler.sendNotificationToRoom(roomId, NoticeEventEnum.EXIT_ROOM_SUCCESS, notificationData,
getCurrentEventId());
log.debug("发送退房房间WebSocket通知: roomId={}, userId={}", roomId, userId);
} catch (Exception e) {
log.error("发送退房房间WebSocket通知失败: roomId={}, userId={}", roomId, userId, e);
}
}
@Override
public void OnRemoteUserEnterRoom(UserInfo info) {
String remoteUserId = info.getUser_id().GetValue();
log.info("远程用户进入房间: roomId={}, remoteUserId={}", roomId, remoteUserId);
Long count = redisTemplateWrapper.increment(RedisKeysConstants.TRTC_ROOM_USER_COUNT + roomId, 1);
// 如果有新用户进房,取消延迟离线通知任务
if (count > 0 && clientOfflineDelayTask != null && !clientOfflineDelayTask.isDone()) {
log.info("新用户进房,取消延迟离线通知: roomId={}, remoteUserId={}, currentCount={}",
roomId, remoteUserId, count);
cancelClientOfflineDelayTask();
}
}
@Override
public void OnRemoteUserExitRoom(UserInfo info, int reason) {
String remoteUserId = info.getUser_id().GetValue();
log.info("远程用户退出房间: roomId={}, remoteUserId={}, reason={}", roomId, remoteUserId, reason);
// 清理该用户的待截图请求
PendingCaptureRequest removedRequest = pendingCaptureImageRequests.remove(remoteUserId);
if (removedRequest != null) {
log.info("用户退出房间,清理待截图请求: roomId={}, remoteUserId={}, eventId={}",
roomId, remoteUserId, removedRequest.eventId);
}
Long count = redisTemplateWrapper.decrement(RedisKeysConstants.TRTC_ROOM_USER_COUNT + roomId, 1);
if (count <= 1) {
// 如果配置了延迟时间且大于0,则延迟发送离线通知
if (clientOfflineDelaySecond > 0) {
log.info("房间只剩1人,启动延迟离线通知: roomId={}, delays={}, currentCount={}",
roomId, clientOfflineDelaySecond, count);
scheduleClientOfflineNotification();
} else {
// 延迟时间为0或未配置,立即发送离线通知
sendClientOfflineNotification();
}
}
}
/**
* 调度客户端离线延迟通知任务
* 在延迟时间后发送离线通知,如果期间有新用户进房则取消
*/
private void scheduleClientOfflineNotification() {
// 如果已有延迟任务在运行,先取消
cancelClientOfflineDelayTask();
log.info("调度客户端离线延迟通知: roomId={}, delayMs={}", roomId, clientOfflineDelaySecond);
// 使用定时任务执行器调度延迟通知
clientOfflineDelayTask = scheduler.schedule(() -> {
try {
// 再次检查房间人数(防止在延迟期间有新用户进房)
Long currentCount = redisTemplateWrapper.get(RedisKeysConstants.TRTC_ROOM_USER_COUNT + roomId,
Long.class);
if (currentCount == null || currentCount <= 1) {
log.info("延迟时间到,发送客户端离线通知: roomId={}, currentCount={}", roomId, currentCount);
sendClientOfflineNotification();
} else {
log.info("延迟期间有新用户进房,取消发送离线通知: roomId={}, currentCount={}", roomId, currentCount);
}
} catch (Exception e) {
log.error("延迟通知任务执行异常: roomId={}", roomId, e);
} finally {
// 任务执行完成后,清除任务引用
clientOfflineDelayTask = null;
}
}, clientOfflineDelaySecond, TimeUnit.SECONDS);
}
/**
* 取消客户端离线延迟通知任务
*/
private void cancelClientOfflineDelayTask() {
if (clientOfflineDelayTask != null && !clientOfflineDelayTask.isDone()) {
boolean cancelled = clientOfflineDelayTask.cancel(false);
log.info("取消客户端离线延迟通知任务: roomId={}, cancelled={}", roomId, cancelled);
clientOfflineDelayTask = null;
}
}
/**
* 发送客户端离线通知
*/
private void sendClientOfflineNotification() {
try {
Map<String, Object> notificationData = MapUtils.of("roomId", roomId);
webSocketMessageHandler.sendNotificationToRoom(
roomId,
NoticeEventEnum.CLIENT_OFFLINE,
notificationData,
getCurrentEventId());
log.info("发送客户端离线通知: roomId={}, userId={}", roomId, userId);
} catch (Exception e) {
log.error("发送客户端离线通知失败: roomId={}, userId={}", roomId, userId, e);
}
}
@Override
public void OnRemoteAudioAvailable(String user_id, boolean available) {
log.debug("远程音频可用性变化: roomId={}, userId={}, available={}", roomId, user_id, available);
}
@Override
public void OnRemoteVideoAvailable(String user_id, boolean available, StreamType type) {
log.debug("远程视频可用性变化: roomId={}, userId={}, available={}, type={}", roomId, user_id, available, type);
if (!available) {
log.info("远程视频已停止: roomId={}, userId={}", roomId, user_id);
// 清理该用户的待截图请求
PendingCaptureRequest removedRequest = pendingCaptureImageRequests.remove(user_id);
if (removedRequest != null) {
log.info("视频已停止,清理待截图请求: roomId={}, userId={}, eventId={}",
roomId, user_id, removedRequest.eventId);
}
}
}
@Override
public void OnRemoteVideoReceived(String var1, StreamType var2, VideoFrame var3) {
log.debug("收到远程视频帧: roomId={}, StreamType={}", roomId, var2.toString());
}
@Override
public void OnRemoteVideoReceived(String user_id, StreamType type, PixelFrame frame) {
// log.debug("收到远程视频帧(yuv): roomId={}, userId={}, type={}, width={}, height={}",
// roomId, user_id, type, frame.getWidth(), frame.getHeight());
try {
byte[] frameData = frame.data();
if (frameData == null || frameData.length == 0) {
return;
}
int width = (int) frame.getWidth();
int height = (int) frame.getHeight();
// 检查是否有待处理的截图请求(按需处理)
PendingCaptureRequest captureRequest = pendingCaptureImageRequests.get(user_id);
if (captureRequest != null) {
// 没有截图请求,直接返回,避免无效处理
try {
// 获取可复用的BufferedImage(减少内存分配)
BufferedImage reusableImage = getBufferedImage(width, height);
byte[] clonedData = frameData.clone();
// 将视频帧数据转换为JPEG格式
String jpegData = FileUtils.yuv420ToBase64(clonedData, width, height, reusableImage);
if (jpegData != null) {
// 检查请求是否超时
long elapsed = System.currentTimeMillis() - captureRequest.createTime;
if (elapsed > CAPTURE_IMAGE_TIMEOUT_MS) {
log.warn("截图请求已超时,跳过处理: roomId={}, userId={}, elapsed={}ms",
roomId, user_id, elapsed);
pendingCaptureImageRequests.remove(user_id);
} else {
// 发送截取图片WebSocket通知
try {
Map<String, Object> imageDataMap = MapUtils.of(
"roomId", roomId,
"imageData", jpegData,
"dataType", "jpeg",
"captureId", captureRequest.request.getCaptureId(),
"imageSize", jpegData.length());
webSocketMessageHandler.sendNotificationToRoom(roomId, NoticeEventEnum.CAPTURE_IMAGE,
imageDataMap, captureRequest.eventId);
log.info("成功截取图片帧: roomId={}, userId={}, imageSize={}, elapsed={}ms",
roomId, user_id, jpegData.length(), elapsed);
} catch (Exception e) {
log.error("发送截取图片WebSocket通知失败: roomId={}, userId={}", roomId, user_id, e);
}
// 处理完成后,移除请求(避免重复处理)
pendingCaptureImageRequests.remove(user_id);
}
} else {
log.warn("视频帧转换为JPEG失败: roomId={}, userId={}", roomId, user_id);
// 转换失败时也移除请求,避免一直重试
pendingCaptureImageRequests.remove(user_id);
}
} catch (Exception e) {
log.error("处理截图请求异常: roomId={}, userId={}", roomId, user_id, e);
// 异常时也移除请求,避免一直重试
pendingCaptureImageRequests.remove(user_id);
}
}
long currentTime = System.currentTimeMillis();
// 如果正在采集视频且用户匹配,缓存视频帧数据
if (isCapturingVideo && userEventIdMapping.containsKey(user_id)) {
// 检查是否在采集时间范围内(复用已声明的currentTime变量)
// 克隆数据以避免外部修改影响
byte[] clonedData = frameData.clone();
VideoFrameData videoFrame = new VideoFrameData(
clonedData,
currentTime,
(int) frame.getWidth(),
(int) frame.getHeight(),
type);
List<String> eventIds = userEventIdMapping.get(user_id);
for (String eventId : eventIds) {
capturedVideoFrames.computeIfAbsent(eventId, k -> new CopyOnWriteArrayList<>())
.add(videoFrame);
}
log.debug("缓存视频帧: roomId={}, userId={}, dataSize={}, width={}, height={}",
roomId, user_id, clonedData.length, frame.getWidth(), frame.getHeight());
}
} catch (Exception e) {
log.error("处理远程视频帧失败: roomId={}, userId={}", roomId, user_id, e);
} finally {
if (frame != null) {
frame.delete();
}
}
}
@Override
public void OnRemoteAudioReceived(String user_id, AudioFrame frame) {
// log.debug("收到远程音频帧: roomId={}, userId={}, hasVoice={}", roomId, user_id,
// frame.getHas_voice());
try {
// 检查是否在采集音频数据且目标用户匹配(用于实时音频流推送)
if (isCapturingAudio.get() && targetAudioUserId != null && targetAudioUserId.equals(user_id)
&& audioStream) {
byte[] audioData = frame.data();
if (audioData != null && audioData.length > 0) {
final byte[] audioCopy = audioData.clone();
final String targetUserSnapshot = targetAudioUserId;
final String eventIdSnapshot = getCurrentEventId();
try {
realtimeAudioExecutor.execute(() -> {
try {
Map<String, Object> audioDataMap = MapUtils.of(
"roomId", roomId,
"userId", targetUserSnapshot,
"dataType", "pcm",
"sampleRate", 16000,
"channel", 1,
"audioData", java.util.Base64.getEncoder().encodeToString(audioCopy));
webSocketMessageHandler.sendNotificationToRoom(roomId,
NoticeEventEnum.PUSH_AUDIO_STREAM_REAL,
audioDataMap, eventIdSnapshot);
} catch (Exception e) {
log.error("发送远程音频流WebSocket通知失败: roomId={}, userId={}", roomId,
targetUserSnapshot, e);
}
});
} catch (RejectedExecutionException rex) {
log.warn("实时音频推送队列已满,丢弃音频帧: roomId={}, userId={}, pendingTasks={}",
roomId, targetUserSnapshot, realtimeAudioExecutor.getQueue().size());
}
}
}
// 如果正在采集视频且用户匹配,缓存音频帧数据(用于captureVideo功能)
if (isCapturingAudio.get() && userEventIdMapping.containsKey(user_id) && audioBlock) {
byte[] audioData = frame.data();
if (audioData != null && audioData.length > 0) {
long currentTime = System.currentTimeMillis();
// 克隆数据以避免外部修改影响
byte[] clonedData = audioData.clone();
AudioFrameData audioFrame = new AudioFrameData(
clonedData,
currentTime,
frame.getSample_rate(),
frame.getChannels());
List<String> eventIds = userEventIdMapping.get(user_id);
for (String eventId : eventIds) {
capturedAudioFrames.computeIfAbsent(eventId, k -> new CopyOnWriteArrayList<>())
.add(audioFrame);
}
log.debug("缓存音频帧: roomId={}, userId={}, dataSize={}, sampleRate={},channels={}",
roomId, user_id, clonedData.length, frame.getSample_rate(),
frame.getChannels());
}
}
} catch (Exception e) {
log.error("缓存音频帧失败: roomId={}, userId={}", roomId, user_id, e);
} finally {
if (frame != null) {
frame.delete();
}
}
}
@Override
public void OnRemoteMixedAudioReceived(AudioFrame frame) {
}
@Override
public void OnLocalAudioChannelCreated() {
log.info("本地音频通道创建成功: roomId={}", roomId);
isLocalAudioChannelCreated = true;
// 检查是否有音频推送线程在等待
if (audioPushThread != null) {
log.info("通知等待中的音频推送线程: roomId={}", roomId);
} else {
log.debug("本地音频通道创建,但暂无音频推送线程: roomId={}", roomId);
}
}
@Override
public void OnLocalAudioChannelDestroyed() {
log.info("本地音频通道销毁: roomId={}", roomId);
isLocalAudioChannelCreated = false;
}
@Override
public void OnLocalVideoChannelCreated(StreamType type) {
log.info("本地视频通道创建成功: roomId={}, type={}", roomId, type.toString());
// 如果启用了图片推送,则开始推送图片
if (isImagePushEnabled && !isImagePushRunning) {
startImagePush();
}
}
@Override
public void OnLocalVideoChannelDestroyed(StreamType type) {
log.info("本地视频通道销毁: roomId={},type:{}", roomId, type.toString());
}
@Override
public void OnRequestChangeVideoEncodeBitrate(StreamType var1, int var2) {
log.info("请求改变视频编码码率: roomId={}, userId={}, streamType={}, bitrate={}", roomId, userId, var1, var2);
}
@Override
public void OnRequestKeyFrame(StreamType var1) {
log.debug("请求关键帧: roomId={}, userId={}, streamType={}", roomId, userId, var1);
}
@Override
public void OnSeiMessageReceived(String var1, StreamType var2, int var3, byte[] var4) {
log.debug("收到SEI消息: roomId={}, userId={}, streamType={}, payloadType={}, payload={}", roomId, var1,
var2, var3, var4);
}
@Override
public void OnReceiveCustomCmdMsg(String var1, int var2, long var3, byte[] var5) {
log.debug("收到自定义消息: roomId={}, userId={}, cmdId={}, seq={}, timestamp={}", roomId, var1, var2, var3, var5);
}
@Override
public void OnMissCustomCmdMsg(String var1, int var2, long var3, long var5) {
log.debug("缺失自定义消息: roomId={}, userId={}, cmdId={}, seq={}, timestamp={}", roomId, var1, var2, var3, var5);
}
@Override
public void OnNetworkQuality(NetworkQualityInfo var1, NetworkQualityInfo[] var2) {
log.debug("网络质量: roomId={}, userId={}, quality={}, remoteQualities={}", roomId, var1.getUser_id(),
var1.getQuality().swigValue(), var2);
}
/**
* 开始图片推送(参考C++ SendVideoOnly方法优化)
*/
private void startImagePush() {
if (isImagePushRunning) {
log.warn("图片推送已在运行: roomId={}", roomId);
return;
}
log.info("开始图片推送: roomId={}, imageUrl={}", roomId, imageUrl);
try {
// 下载并转换图片为YUV格式
ImageProcessor.ImageData imageData = ImageProcessor.downloadAndConvertToYuv(imageUrl);
if (imageData == null) {
log.error("图片下载或转换失败: roomId={}, imageUrl={}", roomId, imageUrl);
return;
}
byte[] yuvData = imageData.getYuvData();
if (enterRequest.getVideoEncryptionType() > 0) {
yuvData = EncodeAndDecryptUtils.symmetricEncrypt(enterRequest.getSessionKey(),
enterRequest.getVideoEncryptionType(), yuvData);
}
// 保存图片数据
this.imageYuvData = yuvData;
this.imageWidth = imageData.getWidth();
this.imageHeight = imageData.getHeight();
this.isImagePushRunning = true;
// 初始化可复用的PixelFrame对象(避免频繁创建,减少GC压力)
reusablePixelFrame = new PixelFrame();
reusablePixelFrame.SetData(imageYuvData, imageYuvData.length);
reusablePixelFrame.setWidth(imageWidth);
reusablePixelFrame.setHeight(imageHeight);
reusablePixelFrame.setRotation(VideoRotation.VIDEO_ROTATION_0);
reusablePixelFrame.setFormat(VideoPixelFormat.VIDEO_PIXEL_FORMAT_YUV420p);
// 重置PTS时间戳
nextPts.set(20);
// 使用 ScheduledExecutorService 替代手动循环,避免线程长时间占用
// 配置参数:15fps = 66.67ms 间隔
final int frameRate = 15;
final long frameIntervalMs = 1000 / frameRate; // 66ms
// 使用 scheduleAtFixedRate 实现精确的时间控制,避免 Thread.sleep 占用线程
imagePushTask = scheduler.scheduleAtFixedRate(
this::imagePushFrame, // 单次推送任务
0, // 初始延迟:立即开始
frameIntervalMs, // 执行间隔:66ms
java.util.concurrent.TimeUnit.MILLISECONDS);
log.info("图片推送启动成功: roomId={}, width={}, height={}, dataSize={}, frameRate={}fps, interval={}ms",
roomId, imageWidth, imageHeight, imageYuvData.length, frameRate, frameIntervalMs);
} catch (Exception e) {
log.error("启动图片推送失败: roomId={}, imageUrl={}", roomId, imageUrl, e);
}
}
/**
* 图片推送单次任务(由 ScheduledExecutorService 定时调用)
* 优化:使用定时任务替代手动循环,避免 Thread.sleep 占用线程
*/
private void imagePushFrame() {
// 检查是否应该继续推送
if (!isImagePushRunning || !isRunning.get()) {
log.debug("图片推送已停止,取消任务: roomId={}", roomId);
if (imagePushTask != null) {
imagePushTask.cancel(false);
imagePushTask = null;
}
return;
}
// 检查图片数据是否有效
if (imageYuvData == null || cloud == null || reusablePixelFrame == null) {
log.warn("图片数据或TRTC实例无效,停止推送: roomId={}", roomId);
isImagePushRunning = false;
if (imagePushTask != null) {
imagePushTask.cancel(false);
imagePushTask = null;
}
return;
}
try {
// 获取当前PTS并推送图片帧
long currentPts = nextPts.getAndAdd(1000 / 15); // 15fps = 66.67ms = 约67 PTS单位
pushImageFrameWithPts(currentPts);
} catch (Exception e) {
log.error("图片推送任务异常: roomId={}", roomId, e);
// 异常时停止推送
isImagePushRunning = false;
if (imagePushTask != null) {
imagePushTask.cancel(false);
imagePushTask = null;
}
}
}
/**
* 停止图片推送
* 优化:取消定时任务,确保线程及时释放
*/
private void stopImagePush() {
if (!isImagePushRunning && imagePushTask == null) {
log.debug("图片推送未在运行,无需停止: roomId={}", roomId);
return;
}
log.info("停止图片推送: roomId={}", roomId);
// 设置停止标志
isImagePushRunning = false;
// 取消定时任务,确保线程及时释放(避免线程泄漏)
if (imagePushTask != null) {
boolean cancelled = imagePushTask.cancel(false);
log.debug("取消图片推送定时任务: roomId={}, cancelled={}", roomId, cancelled);
imagePushTask = null;
}
// 清理图片数据
imageYuvData = null;
imageWidth = 0;
imageHeight = 0;
// 清理可复用的PixelFrame对象
if (reusablePixelFrame != null) {
try {
reusablePixelFrame.delete();
} catch (Exception e) {
log.error("停止图片推送时释放 PixelFrame 失败: roomId={}", roomId, e);
} finally {
reusablePixelFrame = null;
}
}
// 重置PTS时间戳
nextPts.set(20);
// 清理图片URL(可选,保留用于调试)
// imageUrl = null;
log.info("图片推送已停止: roomId={}", roomId);
}
/**
* 推送图片帧到TRTC(带PTS时间戳,参考C++方法)
* 优化:复用PixelFrame对象,避免频繁创建,减少GC压力
*/
private void pushImageFrameWithPts(long pts) {
if (!isImagePushRunning || imageYuvData == null || cloud == null || reusablePixelFrame == null) {
return;
}
try {
// 复用PixelFrame对象,只更新PTS时间戳(其他属性在startImagePush时已设置)
reusablePixelFrame.setPts(pts);
// 推送视频帧到TRTC - 使用SendVideoFrame方法
cloud.SendVideoFrame(StreamType.STREAM_TYPE_VIDEO_HIGH, reusablePixelFrame);
// log.debug("推送图片帧成功: roomId={}, width={}, height={}, dataSize={}, pts={}",
// roomId, imageWidth, imageHeight, imageYuvData.length, pts);
} catch (Exception e) {
log.error("推送图片帧失败: roomId={}, pts={}", roomId, pts, e);
}
}
/**
* 音频推送线程
* 参考CloudWrapper.java的实现方式,使用精确的时间控制
*/
private class AudioPushThread extends Thread {
// 音频推送时间控制(参考CloudWrapper)
private long audioSendTime = 0; // 纳秒时间戳
private long startTime = 0; // 线程启动时间
private String userId;
@Setter
private String eventId;
// 复用 AudioFrame 对象,避免频繁创建和 Finalizer 堆积
private AudioFrame reusableAudioFrame = null;
AudioPushThread(String userId, String eventId) {
// userId参数保留用于日志标识
this.userId = userId;
this.eventId = eventId;
}
@Override
public void run() {
log.info("音频推送线程启动: roomId={}", roomId);
// 等待本地音频通道创建成功
int waitCount = 0;
while (!isLocalAudioChannelCreated && isRunning.get() && waitCount < 50) { // 最多等待5秒
try {
Thread.sleep(100); // 等待100ms
waitCount++;
if (waitCount % 10 == 0) { // 每1秒打印一次日志
log.info("等待本地音频通道创建: roomId={}, 已等待{}ms", roomId, waitCount * 100);
}
} catch (InterruptedException e) {
log.warn("音频推送线程被中断: roomId={}", roomId);
Thread.currentThread().interrupt();
return;
}
}
if (!isLocalAudioChannelCreated) {
log.error("本地音频通道创建超时,停止音频推送: roomId={}", roomId);
return;
}
int initialBufferUsage = audioBuffer.getCurrentBufferUsage();
log.info("本地音频通道已创建,开始音频推送: roomId={}, 当前缓冲区数据量:{}ms", roomId, initialBufferUsage);
// 如果缓冲区没有数据,等待一段时间
if (initialBufferUsage == 0) {
log.warn("缓冲区为空,等待数据: roomId={}", roomId);
int dataWaitCount = 0;
while (audioBuffer.getCurrentBufferUsage() == 0 && isRunning.get() && dataWaitCount < 20) {
try {
Thread.sleep(100);
dataWaitCount++;
} catch (InterruptedException e) {
log.warn("等待音频数据被中断: roomId={}", roomId);
Thread.currentThread().interrupt();
return;
}
}
log.info("等待结束,缓冲区数据量:{}bytes: roomId={}", audioBuffer.getCurrentBufferUsage(), roomId);
}
// 初始化时间控制(参考CloudWrapper)
startTime = System.nanoTime();
audioSendTime = 0;
// 连续无数据计数器
int noDataCount = 0;
final int MAX_NO_DATA_COUNT = 10; // 最多连续50次无数据(约1秒)
while (!Thread.currentThread().isInterrupted() && isRunning.get()) {
try {
// 检查音频通道是否仍然有效
if (!isLocalAudioChannelCreated) {
log.warn("本地音频通道已销毁,停止音频推送: roomId={}", roomId);
break;
}
// 时间控制检查(参考CloudWrapper的check方法)
if (!checkTimeForAudioSend()) {
try {
Thread.sleep(10); // 10ms间隔,与CloudWrapper保持一致
} catch (InterruptedException e) {
log.warn("音频推送线程被中断: roomId={}", roomId);
Thread.currentThread().interrupt();
break;
}
continue;
}
// 发送20ms音频数据(参考CloudWrapper的send20ms_audio方法)
boolean sentSuccessfully = send20msAudio();
if (sentSuccessfully) {
// 成功发送,重置无数据计数器
noDataCount = 0;
} else {
// 发送失败或无数据,增加计数器
noDataCount++;
// 如果连续多次无数据,可能音频流已结束
if (noDataCount >= MAX_NO_DATA_COUNT) {
log.info("连续{}次无音频数据,可能音频流推送已结束: roomId={}", MAX_NO_DATA_COUNT, roomId);
// 发送结束推流回调
sendAudioPushEndCallback(this.userId, this.eventId);
break;
}
}
// 更新音频发送时间(20ms = 20000000纳秒)
audioSendTime += 20000000;
} catch (Exception e) {
log.error("音频推送线程异常: roomId={}", roomId, e);
break;
}
}
log.info("音频推送线程结束: roomId={}", roomId);
// 确保释放复用的 AudioFrame 对象
if (reusableAudioFrame != null) {
try {
reusableAudioFrame.delete();
} catch (Exception e) {
log.error("释放复用的 AudioFrame 失败: roomId={}", roomId, e);
} finally {
reusableAudioFrame = null;
}
}
}
/**
* 发送音频推送结束回调
*/
private void sendAudioPushEndCallback(String userId, String eventId) {
try {
Map<String, Object> callbackData = MapUtils.of(
"userId", userId,
"roomId", roomId,
"eventId", eventId);
webSocketMessageHandler.sendNotificationToRoom(roomId, NoticeEventEnum.AUDIO_PUSH_END, callbackData,
eventId);
log.info("发送音频推送结束回调: roomId={}, userId={}", roomId, userId);
} catch (Exception e) {
log.error("发送音频推送结束回调失败: roomId={}, userId={}", roomId, userId, e);
}
}
/**
* 时间控制检查(参考CloudWrapper的check方法)
*/
private boolean checkTimeForAudioSend() {
long currentTime = System.nanoTime();
long timeDiff = currentTime - startTime;
return timeDiff >= audioSendTime;
}
/**
* 发送20ms音频数据(参考CloudWrapper的send20ms_audio方法)
* 优化:复用 AudioFrame 对象,避免频繁创建导致 Finalizer 堆积
*
* @return true表示成功发送,false表示没有数据或发送失败
*/
private synchronized boolean send20msAudio() {
// 检查缓冲区是否有足够数据
if (!audioBuffer.CanGetAudioData(20)) {
// log.debug("音频缓冲区数据不足,等待更多数据: roomId={}, 当前数据量:{}",
// roomId, audioBuffer.getCurrentBufferUsage());
return false;
}
// 复用 AudioFrame 对象,避免频繁创建
if (reusableAudioFrame == null) {
reusableAudioFrame = new AudioFrame();
}
try {
// 获取20ms音频数据
PcmAudioBuffer.AudioData audioData = audioBuffer.GetAudioData(20);
if (audioData != null && audioData.audio_frame != null && audioData.audio_frame.length > 0) {
// 设置AudioFrame数据(复用对象,只更新数据)
reusableAudioFrame.SetData(audioData.audio_frame, audioData.audio_frame.length);
reusableAudioFrame.setSample_rate(16000);
reusableAudioFrame.setChannels(1);
reusableAudioFrame.setCodec(AudioCodecType.AUDIO_CODEC_TYPE_PCM);
reusableAudioFrame.setBits_per_sample(16);
// 设置时间戳(参考CloudWrapper,使用audio_timestamp)
reusableAudioFrame.setPts(audioData.audio_timestamp);
// 发送音频帧
cloud.SendAudioFrame(reusableAudioFrame);
// log.warn("【重要】发送音频帧成功到TRTC: roomId={}, dataSize={}bytes, pts={}ms,
// 缓冲区剩余:{}bytes",
// roomId, audioData.audio_frame.length, reusableAudioFrame.getPts(),
// audioBuffer.getCurrentBufferUsage());
return true; // 成功发送
} else {
log.debug("获取到的音频数据为空: roomId={}", roomId);
return false;
}
} catch (Exception e) {
log.error("发送20ms音频数据失败: roomId={}", roomId, e);
return false;
}
// ⚠️ 注意:不复用对象时需要在 finally 中 delete(),但复用对象时在线程结束时统一释放
}
}
// ========== Getter方法 ==========
public String getRoomId() {
return roomId;
}
public String getUserId() {
return userId;
}
public boolean isRunning() {
return isRunning.get();
}
public boolean isCapturingAudio() {
return isCapturingAudio.get();
}
/**
* 检查是否启用了图片推送
*/
public boolean isImagePushEnabled() {
return isImagePushEnabled;
}
/**
* 检查图片推送是否正在运行
*/
public boolean isImagePushRunning() {
return isImagePushRunning;
}
/**
* 获取图片URL
*/
public String getImageUrl() {
return imageUrl;
}
/**
* 检查音频缓冲区是否还有数据
*/
public boolean hasAudioDataInBuffer() {
return audioBuffer != null && audioBuffer.getCurrentBufferUsage() > 0;
}
/**
* 获取音频缓冲区总容量
*/
public int getAudioBufferTotalCapacity() {
// 180秒 * 16kHz * 1声道 * 2字节 = 5,760,000字节
return 180 * 16000 * 1 * 2;
}
/**
* 设置当前事件ID
*/
public void setCurrentEventId(String eventId) {
this.currentEventId = eventId;
log.debug("设置当前事件ID: roomId={}, eventId={}", roomId, eventId);
}
/**
* 清除当前事件ID
*/
public void clearCurrentEventId() {
this.currentEventId = null;
log.debug("清除当前事件ID: roomId={}", roomId);
}
// ========== 视频采集辅助方法 ==========
/**
* 从视频帧和音频帧生成MP4文件
*/
private String generateMP4FromFrames(String targetUserId, List<VideoFrameData> videoFrames,
List<AudioFrameData> audioFrames) {
if ((videoFrames == null || videoFrames.isEmpty()) &&
(audioFrames == null || audioFrames.isEmpty())) {
log.warn("没有音视频帧数据: roomId={}, targetUserId={}", roomId, targetUserId);
return null;
}
// 检查FFmpeg是否可用
if (!FFmpegUtils.isFFmpegAvailable()) {
log.error("FFmpeg不可用,无法生成MP4文件: roomId={}, targetUserId={}", roomId, targetUserId);
return null;
}
try {
// 转换视频帧数据格式(转换为DecryptionDelegateWrapper.MediaFrame格式)
List<DecryptionDelegateWrapper.MediaFrame> ffmpegVideoFrames = new CopyOnWriteArrayList<>();
int videoWidth = 0;
int videoHeight = 0;
StreamType streamType = StreamType.STREAM_TYPE_VIDEO_HIGH;
for (VideoFrameData frame : videoFrames) {
// 使用第一个帧的尺寸作为视频尺寸(所有帧应该相同)
if (videoWidth == 0) {
videoWidth = frame.getWidth();
videoHeight = frame.getHeight();
streamType = frame.getStreamType();
}
DecryptionDelegateWrapper.MediaFrame mediaFrame = new DecryptionDelegateWrapper.MediaFrame(
frame.getData(),
frame.getTimestamp(),
streamType,
0 // sequence暂时设为0
);
ffmpegVideoFrames.add(mediaFrame);
}
// 转换音频帧数据格式
List<DecryptionDelegateWrapper.MediaFrame> ffmpegAudioFrames = new CopyOnWriteArrayList<>();
int audioSampleRate = 16000;
int audioChannels = 1;
for (AudioFrameData frame : audioFrames) {
// 使用第一个帧的参数作为音频参数(所有帧应该相同)
if (audioSampleRate == 16000 && frame.getSampleRate() > 0) {
audioSampleRate = frame.getSampleRate();
audioChannels = frame.getChannels();
}
DecryptionDelegateWrapper.MediaFrame mediaFrame = new DecryptionDelegateWrapper.MediaFrame(
frame.getData(),
frame.getTimestamp(),
StreamType.STREAM_TYPE_AUDIO,
0 // sequence暂时设为0
);
ffmpegAudioFrames.add(mediaFrame);
}
// 如果视频尺寸为空,使用默认值
if (videoWidth == 0 || videoHeight == 0) {
log.warn("无法确定视频尺寸,使用默认值: roomId={}, targetUserId={}", roomId, targetUserId);
videoWidth = 640;
videoHeight = 480;
}
// 创建FFmpeg配置
FFmpegUtils.VideoConfig videoConfig = new FFmpegUtils.VideoConfig();
videoConfig.setVideoWidth(videoWidth);
videoConfig.setVideoHeight(videoHeight);
videoConfig.setVideoFps(15); // 默认15fps
videoConfig.setPixelFormat("yuv420p");
videoConfig.setAudioSampleRate(audioSampleRate);
videoConfig.setAudioChannels(audioChannels);
// 生成文件名
String fileName = String.format("capture_video_%s_%s_%d.mp4",
roomId, targetUserId, System.currentTimeMillis());
String filePath = "./temp_video_files/" + fileName;
// 确保目录存在
File dir = new File("./temp_video_files/");
if (!dir.exists()) {
dir.mkdirs();
}
// 使用FFmpeg生成MP4文件
FFmpegUtils ffmpegUtils = new FFmpegUtils();
boolean success = ffmpegUtils.generateMP4File(ffmpegAudioFrames, ffmpegVideoFrames, filePath, videoConfig);
if (success) {
log.info("MP4文件生成成功: roomId={}, targetUserId={}, filePath={}, videoFrames={}, audioFrames={}",
roomId, targetUserId, filePath, ffmpegVideoFrames.size(), ffmpegAudioFrames.size());
return filePath;
} else {
log.error("MP4文件生成失败: roomId={}, targetUserId={}", roomId, targetUserId);
return null;
}
} catch (Exception e) {
log.error("生成MP4文件异常: roomId={}, targetUserId={}", roomId, targetUserId, e);
return null;
}
}
/**
* 将采集到的PCM音频帧拼接并转成Base64字符串
*/
private String encodePcmFramesToBase64(List<AudioFrameData> audioFrames) {
try {
if (audioFrames == null || audioFrames.isEmpty()) {
return null;
}
ByteArrayOutputStream bos = new ByteArrayOutputStream();
for (AudioFrameData f : audioFrames) {
if (f.getData() != null && f.getData().length > 0) {
bos.write(f.getData());
}
}
byte[] pcm = bos.toByteArray();
return Base64.getEncoder().encodeToString(pcm);
} catch (Exception e) {
log.error("编码PCM为Base64失败: roomId={}", roomId, e);
return null;
}
}
/**
* 将文件转换为Base64字符串
*/
private String convertFileToBase64(String filePath) {
try {
File file = new File(filePath);
if (!file.exists()) {
log.error("文件不存在: {}", filePath);
return null;
}
byte[] fileBytes = Files.readAllBytes(file.toPath());
String base64String = Base64.getEncoder().encodeToString(fileBytes);
log.debug("文件转换为Base64成功: filePath={}, fileSize={} bytes, base64Length={}",
filePath, fileBytes.length, base64String.length());
return base64String;
} catch (Exception e) {
log.error("转换文件为Base64失败: filePath={}", filePath, e);
return null;
}
}
/**
* 删除文件
*/
private void deleteFile(String filePath) {
try {
File file = new File(filePath);
if (file.exists() && file.delete()) {
log.debug("临时文件删除成功: {}", filePath);
} else {
log.warn("删除文件失败: {}", filePath);
}
} catch (Exception e) {
log.warn("删除文件时发生异常: filePath={}", filePath, e);
}
}
}