IOS技术分享| anyLive 开源项目

5,644 阅读4分钟

anyLive 简介

anyLive 是 anyRTC 开源的推拉流项目。采用跨平台架构设计,一套代码支持Android、iOS、Windows、Mac、Ubuntu等平台。本文主要介绍anyLive iOS平台的实现。

源码下载

anylive_github

开发环境

  • 开发工具:Xcode13 真机运行
  • 开发语言:Objective-C、Swift
  • 实现:推拉流。

平台兼容

系统编译环境CPU架构
Android 4.4及以上Android Studio、NDKarmeabi-v7a、arm64-v8a
iOS 9.0及以上Xcode13arm64
Windows 7及以上VS2015,VS2017x86、x86-64

项目结构

anyLive 实现了推流、拉流、屏幕共享、美颜等功能。

anylive_all

示例代码

效果展示

anylive_main

代码实现
​
    var menus = [
        [MenuItem(imageName: "icon_push", title: "直播推流", subTitle: "采用WebRTC底层架构,支持RTMP/HLS/HTTP-FLV")],
        [MenuItem(imageName: "icon_pull", title: "直播拉流(播放)", subTitle: "低功直播播放器,支持软硬解切换,横竖切换、低延迟等")],
        [MenuItem(imageName: "icon_video", title: "小视频播放", subTitle: "支持首屏秒开、清晰度无缝切换、码率自适应等多种特性")]
    ]
​
    let identifier = "ARLiveMainCell"
    lazy var identifierArr: [String] = {
        ["Live_JoinVC", "Player_JoinVC", "Video_JoinVC"]
    }()
​
    override func viewDidLoad() {
        super.viewDidLoad()
​
        // Uncomment the following line to preserve selection between presentations
        // self.clearsSelectionOnViewWillAppear = false
​
        // Uncomment the following line to display an Edit button in the navigation bar for this view controller.
        // self.navigationItem.rightBarButtonItem = self.editButtonItem
        let label = UILabel(frame: CGRectZero)
​
        label.textColor = UIColor(hexString: "#C4C4CE")
        label.font = UIFont(name: PingFang, size: 12)
        label.textAlignment = .center
        label.text = "Power by anyRTC"
        view.addSubview(label)
​
        liveEngine = ARLiveEngineKit(delegate: nil)
    }
​
    override func viewWillAppear(_ animated: Bool) {
        super.viewWillAppear(animated)
        navigationController?.setNavigationBarHidden(true, animated: true)
    }
​
    // MARK: - Table view data source
​
    override func numberOfSections(in tableView: UITableView) -> Int {
        return menus.count
    }
​
    override func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
        // #warning Incomplete implementation, return the number of rows
        return menus[section].count
    }
​
    override func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
        let cell: ARMainCell = tableView.dequeueReusableCell(withIdentifier: identifier, for: indexPath) as! ARMainCell
​
        // Configure the cell...
        let menuItem = menus[indexPath.section][indexPath.row]
        cell.mainImageView.image = UIImage(named: menuItem.imageName)
        cell.mainLabel.text = menuItem.title
        cell.subLabel.text = menuItem.subTitle
        cell.expectedImageView.isHidden = (indexPath.section != 2)
        return cell
    }
​
    override func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
        if indexPath.section != 2 {
            guard let vc = storyboard?.instantiateViewController(withIdentifier: identifierArr[indexPath.section]) else { return }
            navigationController?.pushViewController(vc, animated: true)
        } else {
            ARToast.showText(text: " Please look forward!", duration: 1.0)
        }
​
​
效果展示(推流)

anylive_live

代码实现
    func initializePusher() {
        /// 实例化推流对象
        livePusher = liveEngine!.createArLivePusher()
        livePusher.setDelegate(self)
        
        /// 设置推流视频编码参数
        let param = ARLiveVideoEncoderParam(resolution!)
        livePusher.setVideoQuality(param)
        
        livePusher.startCamera(true)
        livePusher.startMicrophone()
        
        /// 设置本地摄像头预览 View
        livePusher.setupCameraRender(renderView)
        livePusher.setRenderFill(.fill)
        
        /// 开始推流
        livePusher.startPush(pushUrl)
    }
    
    // MARK: - ARLivePushDelegateextension ArLiveViewController: ARLivePushDelegate {
    func onError(_ code: ARLiveCode, message msg: String?, extraInfo: [AnyHashable: Any]?) {
        /// 直播推流器错误通知,推流器出现错误时,会回调该通知
        Logger.log(message: "onError (code.rawValue)", level: .error)
    }
    
    func onWarning(_ code: ARLiveCode, message msg: String?, extraInfo: [AnyHashable: Any]?) {
        /// 直播推流器警告通知
        Logger.log(message: "onWarning (code.rawValue)", level: .warning)
    }
    
    func onCaptureFirstAudioFrame() {
        /// 首帧音频采集完成的回调通知
        Logger.log(message: "onCaptureFirstAudioFrame", level: .info)
    }
    
    func onCaptureFirstVideoFrame() {
        /// 首帧视频采集完成的回调通知
        Logger.log(message: "onCaptureFirstVideoFrame", level: .info)
    }
    
    func onMicrophoneVolumeUpdate(_ volume: Int) {
        /// 麦克风采集音量值回调
        Logger.log(message: "onMicrophoneVolumeUpdate volume = (volume)", level: .info)
    }
    
    func onPushStatusUpdate(_ status: ARLivePushStatus, message msg: String?, extraInfo: [AnyHashable: Any]?) {
        /// 推流器连接状态回调通知
        Logger.log(message: "onPushStatusUpdate status = (status.rawValue)", level: .info)
        stateLabel.text = "(status.description)"
    }
    
    func onStatisticsUpdate(_ statistics: ARLivePusherStatistics) {
        /// 直播推流器统计数据回调
        // Logger.log(message: "onStatisticsUpdate width = (statistics.width), height = (statistics.height), fps = (statistics.fps), videoBitrate = (statistics.videoBitrate), audioBitrate = (statistics.audioBitrate)", level: .info)
    }
    
    func onSnapshotComplete(_ image: UIImage) {
        /// 截图回调
        Logger.log(message: "onSnapshotComplete", level: .info)
    }
}
​
效果展示(拉流)

arlive_pull

代码实现
    func initializePlayer() {
        /// 创建拉流实例对象
        livePlayer = liveEngine!.createArLivePlayer()
        livePlayer.setDelegate(self)
        
        /// 设置播放器的视频渲染 View
        livePlayer.setRenderView(renderView)
        livePlayer.setRenderFill(renderMode)
        
        /// 设置播放器缓存自动调整的最小和最大时间 ( 单位:秒 )
        livePlayer.setCacheParams(1.0, maxTime: 100)
        
        /// 开始播放音视频流
        livePlayer.startPlay(pullUrl)
    }
    
    // MARK: - ARLivePlayDelegateextension ArPlayerViewController: ARLivePlayDelegate {
    func onError(_ player: ARLivePlayer, code: ARLiveCode, message msg: String?, extraInfo: [AnyHashable: Any]?) {
        /// 直播播放器错误通知,播放器出现错误时,会回调该通知
        Logger.log(message: "onError code = (code.rawValue)", level: .info)
    }
    
    func onWarning(_ player: ARLivePlayer, code: ARLiveCode, message msg: String?, extraInfo: [AnyHashable: Any]?) {
        /// 直播播放器警告通知
        Logger.log(message: "onWarning code = (code.rawValue)", level: .info)
    }
    
    func onVideoPlayStatusUpdate(_ player: ARLivePlayer, status: ARLivePlayStatus, reason: ARLiveStatusChangeReason, extraInfo: [AnyHashable: Any]?) {
        /// 直播播放器视频状态变化通知
        Logger.log(message: "onVideoPlayStatusUpdate status = (status.rawValue), reason = (reason.rawValue)", level: .info)
        liveStatus = status
        stateLabel.text = "(status.description)"
    }
    
    func onAudioPlayStatusUpdate(_ player: ARLivePlayer, status: ARLivePlayStatus, reason: ARLiveStatusChangeReason, extraInfo: [AnyHashable: Any]?) {
        /// 直播播放器音频状态变化通知
        Logger.log(message: "onAudioPlayStatusUpdate status = (status.rawValue) reason = (reason.rawValue)", level: .info)
    }
    
    func onPlayoutVolumeUpdate(_ player: ARLivePlayer, volume: Int) {
        /// 播放器音量大小回调
        Logger.log(message: "onPlayoutVolumeUpdate volume = (volume)", level: .info)
    }
    
    func onStatisticsUpdate(_ player: ARLivePlayer, statistics: ARLivePlayerStatistics?) {
        /// 直播播放器统计数据回调
        if statistics != nil {
            Logger.log(message: "onStatisticsUpdate width = (statistics!.width), height =(statistics!.height), fps = (statistics!.fps), videoBitrate = (statistics!.videoBitrate), audioBitrate = (statistics!.audioBitrate)", level: .info)
        }
    }
    
    func onSnapshotComplete(_ player: ARLivePlayer, image: UIImage) {
        /// 截图回调
        UIImageWriteToSavedPhotosAlbum(image, self, #selector(saveImage(image:didFinishSavingWithError:contextInfo:)), nil)
        
        NSObject.cancelPreviousPerformRequests(withTarget: self, selector: #selector(removeSnapshot), object: nil)
        snapImageView.image = image
        
        let imageWidth = image.size.width/2
        let imageHeight = image.size.height/2
        snapImageView.frame = CGRect(x: ARScreenWidth - imageWidth - 24, y: 150, width: imageWidth, height: imageHeight)
        view.addSubview(snapImageView)
        perform(#selector(removeSnapshot), with: nil, afterDelay: 2)
        
        Logger.log(message: "onSnapshotComplete sucess, imageWidth = (image.size.width), imageHeight = (image.size.height)", level: .info)
    }
    
    func onRenderVideoFrame(_ player: ARLivePlayer, frame videoFrame: ARLiveVideoFrame?) {
        /// 自定义视频渲染回调
        Logger.log(message: "onRenderVideoFrame", level: .info)
    }
    
    func onReceiveSeiMessage(_ player: ARLivePlayer, payloadType: Int32, data: Data?) {
        /// 收到 SEI 消息的回调
        Logger.log(message: "onReceiveSeiMessage payloadType = (payloadType)", level: .info)
    }
}
​
效果展示(屏幕共享)

arlive_screen

代码实现
    override func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, with sampleBufferType: RPSampleBufferType) {
        DispatchQueue.main.async {
            switch sampleBufferType {
            case RPSampleBufferType.video:
                // Handle video sample buffer
                ARUploader.sendVideoBuffer(sampleBuffer)
            case RPSampleBufferType.audioApp:
                // Handle audio sample buffer for app audio
                ARUploader.sendAudioAppBuffer(sampleBuffer)
            case RPSampleBufferType.audioMic:
                // Handle audio sample buffer for mic audio
                ARUploader.sendAudioMicBuffer(sampleBuffer)
                break
            @unknown default:
                // Handle other sample buffer types
                fatalError("Unknown type of sample buffer")
            }
        }
    }
​
    private static let liverPusher: ARLivePusher = {
        let livePusher = liveEngine.createArLivePusher()
        
        let screenSize = UIScreen.main.currentMode?.size
        let screenWidth = screenSize?.width
        let screenHeight = screenSize?.height
        /// 设置推流视频编码参数
        let videoParam = ARLiveVideoEncoderParam()
        videoParam.videoResolution = .resolution640x480
        videoParam.videoResolutionMode = .portrait
        videoParam.videoScaleMode = .fit
        livePusher.setVideoQuality(videoParam)
        livePusher.startMicrophone()
        
        /// 开启自采集
        livePusher.enableCustomAudioCapture(true)
        livePusher.enableCustomVideoCapture(true)
        /// 开始推流
        livePusher.startPush(<#T##String#>)
        return livePusher
    }()
    
        static func sendAudioAppBuffer(_ sampleBuffer: CMSampleBuffer) {
        ARAudioTube.liverPusher(liverPusher, pushAudioCMSampleBuffer: sampleBuffer, resampleRate: audioSampleRate, type: .app)
    }
    
    static func sendAudioMicBuffer(_ sampleBuffer: CMSampleBuffer) {
        ARAudioTube.liverPusher(liverPusher, pushAudioCMSampleBuffer: sampleBuffer, resampleRate: audioSampleRate, type: .mic)
    }

结束语

最后,因时间有限项目中还存在一些bug和待完善的功能点。仅供参考,欢迎大家fork。有不足之处欢迎大家指出issues。最后再贴一下 Github开源下载地址。

Github开源下载地址

关于我们2.0.png