ReplayKit录屏
- ReplayKit是iOS自带的一个屏幕录制的框架,其支持应用程序对当前应用内页面进行录屏
- 每次开始录屏时会弹出授权对话框,录制过程中是无感的
- 录屏的同时可以录制麦克风以及扬声器的声音与摄像头的内容
- 需要导入
ReplayKit
模块
- 需要暂停和继续处理,可以采用 startCapture 方法
import ReplayKit
class ViewController: UIViewController {
var isCapture:Bool = false
var timeOffset:CMTime = CMTimeMake(value: 0, timescale: 0)
var lastpts:CMTime!
var isUpdatepts:Bool = **false**
lazy var outURLString:NSURL = {
let fileManager = FileManager.default
let urlString = fileManager.urls(for: .documentDirectory, in: .userDomainMask)
let documentDirectory:NSURL = urlString.first! **as** NSURL
let videoOutURLString:NSURL = documentDirectory.appendingPathComponent("videoOut.mp4")! **as** NSURL
if (fileManager.fileExists(atPath: videoOutURLString.path!)){
do{
try fileManager.removeItem(at: videoOutURLString as URL)
}catch{
print("unable to delete file")
}
}
return videoOutURLString
}()
fileprivate lazy var videoQueue = DispatchQueue.global()
fileprivate lazy var session : AVCaptureSession = AVCaptureSession()
fileprivate lazy var previewLayer : AVCaptureVideoPreviewLayer = AVCaptureVideoPreviewLayer(session: **self**.session)
fileprivate lazy var assetWriter : AVAssetWriter = **try**! AVAssetWriter(url: **self**.outURLString **as** URL, fileType: .mp4)
var videoWriterInput:AVAssetWriterInput!
var audioWriterInput:AVAssetWriterInput!
override func viewDidLoad() {
super.viewDidLoad()
videoWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: [
AVVideoCodecKey : AVVideoCodecType.h264,
AVVideoScalingModeKey: AVVideoScalingModeResizeAspectFill,
AVVideoWidthKey : self.view.frame.size.width,
AVVideoHeightKey : self.view.frame.size.height,
AVVideoCompressionPropertiesKey : [
AVVideoExpectedSourceFrameRateKey: 25,
AVVideoQualityKey:0.7,
AVVideoAverageBitRateKey : 6*self.view.frame.size.width*self.view.frame.size.height,
AVVideoProfileLevelKey:AVVideoProfileLevelH264Baseline30
] **as** [String : **Any**],
])
videoWriterInput.expectsMediaDataInRealTime = **true**
if assetWriter.canAdd(videoWriterInput) {
assetWriter.add(videoWriterInput)
print("video input added")
} else {
print("no input added")
}
let audioSettings = [
AVFormatIDKey:kAudioFormatMPEG4AAC,
AVNumberOfChannelsKey:1,
AVSampleRateKey: 44100.0,
AVEncoderBitRateKey:96000,
] as [String : Any]
audioWriterInput = AVAssetWriterInput(mediaType: AVMediaType.audio, outputSettings: audioSettings)
audioWriterInput.expectsMediaDataInRealTime = **true**
if assetWriter.canAdd(audioWriterInput) {
assetWriter.add(audioWriterInput)
print("audio input added")
}else{
print("no input added")
}
assetWriter.startWriting()
}
func start(){
if (RPScreenRecorder.shared().isAvailable) {
self.isCapture = true
RPScreenRecorder.shared().isMicrophoneEnabled = true
RPScreenRecorder.shared().startCapture { (sample, bufferType, error) in
if(self.isCapture == false){
return
}
DispatchQueue.main.async { [weak self] in
var sampleBuffer = sample
if(self?.isUpdatepts == true){
if(bufferType == .video){
return
}
self?.isUpdatepts = false
var pts:CMTime = CMSampleBufferGetPresentationTimeStamp(sample);
let last = self?.lastpts
let cmOffset = self?.timeOffset
if last!.flags.contains(CMTimeFlags.valid) {
if cmOffset!.flags.contains(CMTimeFlags.valid) {
pts = CMTimeSubtract(pts, cmOffset!);
}
let offset:CMTime = CMTimeSubtract(pts, last!)
if (self?.timeOffset.value == 0){
self?.timeOffset = offset;
}
else{
self?.timeOffset = CMTimeAdd(self!.timeOffset, offset);
}
}
self?.lastpts.flags = []
}
if ((self?.timeOffset.value)! > 0){
sampleBuffer = (**self**?.adjustTime(sampleBuffer: sample, offset: **self**!.timeOffset))!
}
let ptsBuffer:CMTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
let dur:CMTime = CMSampleBufferGetDuration(sampleBuffer);
if (dur.value > 0 && bufferType == .audioMic){
self?.lastpts = CMTimeAdd(ptsBuffer, dur);
}
self?.assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
if(bufferType == .video){
if(self?.videoWriterInput.isReadyForMoreMediaData == **true**){
self?.videoWriterInput.append(sampleBuffer)
}
}
if(bufferType == .audioMic){
if(self?.audioWriterInput.isReadyForMoreMediaData == true){
self?.audioWriterInput.append(sampleBuffer )
}
}
}
} completionHandler: { error in
}
}else{
print("该设备不支持录制训练视频")
}
}
func pause(){
self.isCapture = false
self.isUpdatepts = true
}
func resume(){
self.isCapture = true
}
func stop(){
if(RPScreenRecorder.shared().isRecording){
RPScreenRecorder.shared().isMicrophoneEnabled = true
RPScreenRecorder.shared().stopCapture { error in
self.videoWriterInput.markAsFinished()
self.audioWriterInput.markAsFinished()
if(error == nil){
self.assetWriter.finishWriting {}
}
}
}else{
}
}
func adjustTime(sampleBuffer:CMSampleBuffer,offset:CMTime) -> CMSampleBuffer{
var out:CMSampleBuffer?
if offset.value > 0 {
var count:CMItemCount = CMSampleBufferGetNumSamples(sampleBuffer)
let pInfo = UnsafeMutablePointer<CMSampleTimingInfo>.allocate(capacity: count)
CMSampleBufferGetSampleTimingInfoArray(sampleBuffer, entryCount: count, arrayToFill: pInfo, entriesNeededOut: &count)
var i = 0
while i<count {
pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset)
pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset)
i+=1
}
CMSampleBufferCreateCopyWithNewTiming(allocator: nil, sampleBuffer: sampleBuffer, sampleTimingEntryCount: count, sampleTimingArray: pInfo, sampleBufferOut: &out)
}
return out!
}
}