|
@@ -0,0 +1,450 @@
|
|
|
+//
|
|
|
+// LivePhotoMaker.swift
|
|
|
+// MediaManagerKit
|
|
|
+//
|
|
|
+// Created by Max Mg on 2024/7/20.
|
|
|
+//
|
|
|
+
|
|
|
+import Foundation
|
|
|
+import AVFoundation
|
|
|
+//import ExtensionsKit
|
|
|
+
|
|
|
+public class VideoRecorder: NSObject {
|
|
|
+ public static let shared = VideoRecorder()
|
|
|
+
|
|
|
+ public typealias LivePhotoCompletionHandler = (URL?, URL?, String?) -> Void
|
|
|
+
|
|
|
+ class RecordConfig {
|
|
|
+ var duration: TimeInterval = 1
|
|
|
+ var frameRate: Int = 30
|
|
|
+ var captureView: UIView!
|
|
|
+ var completion: ((Error?) -> Void)?
|
|
|
+ var recordedDuration: TimeInterval = 0
|
|
|
+ var startTime: CFTimeInterval = CACurrentMediaTime()
|
|
|
+ var reqiureFrames: Int = 0
|
|
|
+ var recordFrames: Int = 0
|
|
|
+ }
|
|
|
+
|
|
|
+ lazy var recordConfig = RecordConfig()
|
|
|
+
|
|
|
+ var assetWriter: AVAssetWriter!
|
|
|
+ var assetWriterInput: AVAssetWriterInput!
|
|
|
+ var pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor!
|
|
|
+
|
|
|
+ var timer: Timer?
|
|
|
+ lazy var isRecording = false
|
|
|
+ var displayLink: CADisplayLink?
|
|
|
+ let queue = DispatchQueue(label: "xxxxxx", attributes: .concurrent)
|
|
|
+
|
|
|
+ var videoOutuptURL: URL {
|
|
|
+ let temp = FileManager.default.temporaryDirectory
|
|
|
+ return temp.appendingPathComponent("livePhotoOutput.mov")
|
|
|
+ }
|
|
|
+
|
|
|
+ override init() {
|
|
|
+ super.init()
|
|
|
+ }
|
|
|
+
|
|
|
+ public func capture(in view: UIView,
|
|
|
+ duration: TimeInterval,
|
|
|
+ outputFinder: URL,
|
|
|
+ completion: LivePhotoCompletionHandler?) {
|
|
|
+
|
|
|
+ captureVideo(in: view, duration: duration, frameRate: 30) { [weak self] videoURL, error in
|
|
|
+ guard let videoURL = videoURL else {
|
|
|
+ completion?(nil, nil, error?.localizedDescription)
|
|
|
+ return
|
|
|
+ }
|
|
|
+ LivePhotoCreater.shared.saveLivePhoto(from: videoURL, outputDirectory: outputFinder) { videoURL, imageURL, msg in
|
|
|
+ completion?(videoURL, imageURL, msg)
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ /// 录制一个view生成视频
|
|
|
+ /// - Parameters:
|
|
|
+ /// - view: 目标view
|
|
|
+ /// - duration: 时长
|
|
|
+ /// - frameRate: 帧率
|
|
|
+ /// - completion: URL: 视频temp文件夹地址
|
|
|
+ public func captureVideo(in view: UIView,
|
|
|
+ duration: TimeInterval,
|
|
|
+ frameRate: Int,
|
|
|
+ completion: ((URL?, Error?) -> Void)?) {
|
|
|
+
|
|
|
+ guard !isRecording else {
|
|
|
+ recordConfig.completion?(NSError(domain: "Video is Recording, try again later", code: 401))
|
|
|
+ return
|
|
|
+ }
|
|
|
+
|
|
|
+ recordConfig.duration = duration
|
|
|
+ recordConfig.frameRate = frameRate
|
|
|
+ recordConfig.reqiureFrames = Int(Double(frameRate) * duration)
|
|
|
+ recordConfig.recordFrames = 0
|
|
|
+ recordConfig.captureView = view
|
|
|
+ recordConfig.recordedDuration = 0
|
|
|
+ recordConfig.completion = { [weak self] error in
|
|
|
+ if let error = error {
|
|
|
+ completion?(nil, error)
|
|
|
+ } else {
|
|
|
+ completion?(self?.videoOutuptURL, nil)
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ startRecordVideo()
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+// MARK: -- 视频录制
|
|
|
+extension VideoRecorder {
|
|
|
+ func startRecordVideo() {
|
|
|
+ guard !isRecording, let view = recordConfig.captureView else {
|
|
|
+ recordConfig.completion?(NSError(domain: "Video is Recording, try again later", code: 401))
|
|
|
+ return
|
|
|
+ }
|
|
|
+
|
|
|
+ if FileManager.default.fileExists(atPath: videoOutuptURL.path) {
|
|
|
+ try? FileManager.default.removeItem(atPath: videoOutuptURL.path)
|
|
|
+ }
|
|
|
+
|
|
|
+ do {
|
|
|
+ assetWriter = try AVAssetWriter(outputURL: videoOutuptURL, fileType: .mov)
|
|
|
+ } catch {
|
|
|
+ print("Error creating AVAssetWriter: \(error)")
|
|
|
+ recordConfig.completion?(error)
|
|
|
+ }
|
|
|
+
|
|
|
+ let size = CGSize(width: view.bounds.width * UIScreen.main.scale, height: view.bounds.height * UIScreen.main.scale)
|
|
|
+// let size = CGSize(width: view.bounds.width, height: view.bounds.height)
|
|
|
+ let outputSettings: [String: Any] = [
|
|
|
+ AVVideoCodecKey: AVVideoCodecType.h264,
|
|
|
+ AVVideoWidthKey: size.width,
|
|
|
+ AVVideoHeightKey: size.height
|
|
|
+ ]
|
|
|
+ assetWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: outputSettings)
|
|
|
+ assetWriterInput.expectsMediaDataInRealTime = true
|
|
|
+ assetWriter.add(assetWriterInput)
|
|
|
+
|
|
|
+ let pixelBufferAttributes: [String: Any] = [
|
|
|
+ kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32ARGB,
|
|
|
+ kCVPixelBufferWidthKey as String: size.width,
|
|
|
+ kCVPixelBufferHeightKey as String: size.height
|
|
|
+ ]
|
|
|
+ pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: assetWriterInput, sourcePixelBufferAttributes: pixelBufferAttributes)
|
|
|
+
|
|
|
+ assetWriter.startWriting()
|
|
|
+ assetWriter.startSession(atSourceTime: .zero)
|
|
|
+ isRecording = true
|
|
|
+ recordConfig.startTime = CACurrentMediaTime()
|
|
|
+
|
|
|
+ // 第一帧
|
|
|
+ let layer = self.recordConfig.captureView.layer
|
|
|
+ let cgImage = layer.contents
|
|
|
+// snapshotView(afterScreenUpdates: false)?.layer ?? self.recordConfig.captureView.layer
|
|
|
+ let videoSize = self.recordConfig.captureView.bounds.size
|
|
|
+// queue.async {
|
|
|
+// let image = self.drawViewHierarchyToImage(layer: layer)
|
|
|
+ if let pixelBuffer = self.createPixelBuffer(from: self.recordConfig.captureView, videoSize: videoSize) {
|
|
|
+ for _ in 0..<5 {
|
|
|
+ while true {
|
|
|
+ if self.pixelBufferAdaptor.assetWriterInput.isReadyForMoreMediaData {
|
|
|
+ let presentationTime = CMTime(seconds: Double(recordConfig.recordedDuration), preferredTimescale: 600)
|
|
|
+ self.pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: presentationTime)
|
|
|
+
|
|
|
+ recordConfig.recordFrames += 1
|
|
|
+ recordConfig.recordedDuration += 1.0 / CGFloat(recordConfig.frameRate)
|
|
|
+ Log("append===\(recordConfig.recordFrames)")
|
|
|
+ break
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+// appendPixelBuffer(at: CMTime.zero)
|
|
|
+ timer = Timer.scheduledTimer(withTimeInterval: 1.0 / CGFloat(recordConfig.frameRate), repeats: true) { [weak self] _ in
|
|
|
+ self?.timerHandler()
|
|
|
+ }
|
|
|
+// displayLink = CADisplayLink(target: self, selector: #selector(timerHandler))
|
|
|
+// displayLink?.preferredFramesPerSecond = 10
|
|
|
+// displayLink?.add(to: .current, forMode: .common)
|
|
|
+ }
|
|
|
+
|
|
|
+ func stopRecording() {
|
|
|
+ timer?.invalidate()
|
|
|
+ timer = nil
|
|
|
+ displayLink?.invalidate()
|
|
|
+ displayLink = nil
|
|
|
+
|
|
|
+ assetWriterInput.markAsFinished()
|
|
|
+ assetWriter.finishWriting { [weak self] in
|
|
|
+ guard let self = self else { return }
|
|
|
+ self.isRecording = false
|
|
|
+ if self.assetWriter.status == .completed {
|
|
|
+ self.recordConfig.completion?(nil)
|
|
|
+ } else {
|
|
|
+ self.recordConfig.completion?(assetWriter.error)
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @objc func timerHandler() {
|
|
|
+ if recordConfig.recordFrames >= recordConfig.reqiureFrames {
|
|
|
+// if recordConfig.recordedDuration >= recordConfig.duration {
|
|
|
+ stopRecording()
|
|
|
+ return
|
|
|
+ }
|
|
|
+ let presentationTime = CMTime(seconds: Double(recordConfig.recordedDuration), preferredTimescale: 600)
|
|
|
+ self.appendPixelBuffer(at: presentationTime)
|
|
|
+ recordConfig.recordFrames += 1
|
|
|
+ recordConfig.recordedDuration += 1.0 / CGFloat(recordConfig.frameRate)
|
|
|
+ Log("===\(recordConfig.recordedDuration)")
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+extension VideoRecorder {
|
|
|
+ func appendPixelBuffer(at time: CMTime) {
|
|
|
+ let layer = self.recordConfig.captureView.layer
|
|
|
+ let cgImage = layer.contents
|
|
|
+// snapshotView(afterScreenUpdates: false)?.layer ?? self.recordConfig.captureView.layer
|
|
|
+ let size = self.recordConfig.captureView.bounds.size
|
|
|
+// queue.async {
|
|
|
+// let image = self.drawViewHierarchyToImage(layer: layer)
|
|
|
+ if let pixelBuffer = self.createPixelBuffer(from: self.recordConfig.captureView, videoSize: size),
|
|
|
+ self.pixelBufferAdaptor.assetWriterInput.isReadyForMoreMediaData {
|
|
|
+ self.pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: time)
|
|
|
+ }
|
|
|
+// }
|
|
|
+ }
|
|
|
+
|
|
|
+ /// 获取Buffer
|
|
|
+ func createPixelBuffer(from view: UIView, videoSize: CGSize) -> CVPixelBuffer? {
|
|
|
+// let videoSize = view.frame.size
|
|
|
+// var contextSize = videoSize
|
|
|
+ var contextSize = CGSize(width: videoSize.width*UIScreen.main.scale, height: videoSize.height*UIScreen.main.scale)
|
|
|
+
|
|
|
+ let pixelBufferAttributes: [String: Any] = [
|
|
|
+ kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32ARGB,
|
|
|
+ kCVPixelBufferWidthKey as String: contextSize.width,
|
|
|
+ kCVPixelBufferHeightKey as String: contextSize.height
|
|
|
+ ]
|
|
|
+
|
|
|
+ var pixelBuffer: CVPixelBuffer?
|
|
|
+ let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(contextSize.width), Int(contextSize.height), kCVPixelFormatType_32ARGB, pixelBufferAttributes as CFDictionary, &pixelBuffer)
|
|
|
+
|
|
|
+ if status != kCVReturnSuccess || pixelBuffer == nil {
|
|
|
+ print("Error creating pixel buffer")
|
|
|
+ return nil
|
|
|
+ }
|
|
|
+
|
|
|
+ CVPixelBufferLockBaseAddress(pixelBuffer!, .init(rawValue: 0))
|
|
|
+ guard let context = CGContext(data: CVPixelBufferGetBaseAddress(pixelBuffer!),
|
|
|
+ width: Int(contextSize.width),
|
|
|
+ height: Int(contextSize.height),
|
|
|
+ bitsPerComponent: 8,
|
|
|
+ bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer!),
|
|
|
+ space: CGColorSpaceCreateDeviceRGB(),
|
|
|
+ bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue) else {
|
|
|
+ return nil
|
|
|
+ }
|
|
|
+
|
|
|
+ context.translateBy(x: contextSize.width / 2, y: contextSize.height / 2)
|
|
|
+ context.scaleBy(x: 1, y: -1) // Flip vertically
|
|
|
+ context.translateBy(x: -contextSize.width / 2, y: -contextSize.height / 2)
|
|
|
+
|
|
|
+ UIGraphicsPushContext(context)
|
|
|
+// context.saveGState()
|
|
|
+// CGContextSaveGState(context)
|
|
|
+// layer.render(in: context)
|
|
|
+// context.restoreGState()
|
|
|
+// image.draw(in: CGRect(origin: .zero, size: contextSize))
|
|
|
+
|
|
|
+ view.drawHierarchy(in: CGRect(origin: .zero, size: contextSize), afterScreenUpdates: false)
|
|
|
+ UIGraphicsPopContext()
|
|
|
+
|
|
|
+ CVPixelBufferUnlockBaseAddress(pixelBuffer!, .init(rawValue: 0))
|
|
|
+
|
|
|
+ return pixelBuffer
|
|
|
+ }
|
|
|
+
|
|
|
+ func drawViewHierarchyToImage(layer: CALayer) -> UIImage? {
|
|
|
+ // 创建绘制上下文
|
|
|
+ UIGraphicsBeginImageContextWithOptions(UIScreen.main.bounds.size, false, UIScreen.main.scale)
|
|
|
+ defer { UIGraphicsEndImageContext() }
|
|
|
+
|
|
|
+ guard let context = UIGraphicsGetCurrentContext() else { return nil }
|
|
|
+
|
|
|
+ // 在绘制上下文中绘制视图内容
|
|
|
+ layer.render(in: context)
|
|
|
+
|
|
|
+ // 获取绘制完成的图片
|
|
|
+ let image = UIGraphicsGetImageFromCurrentImageContext()
|
|
|
+ return image
|
|
|
+ }
|
|
|
+
|
|
|
+}
|
|
|
+
|
|
|
+extension VideoRecorder {
|
|
|
+ func generateVideoFromAnimation(view: UIView, size: CGSize, duration: Double, completion: ((URL?) -> Void)?) {
|
|
|
+ // 设置视频的参数
|
|
|
+ let framePerSecond = 30
|
|
|
+ let totalFrames = Int(duration * Double(framePerSecond))
|
|
|
+
|
|
|
+ let mainComposition = AVMutableComposition()
|
|
|
+ let compositionVideoTrack = mainComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
|
|
|
+ try? compositionVideoTrack?.insertTimeRange(CMTimeRange(start: .zero, duration: CMTime(value: 1, timescale: CMTimeScale(framePerSecond))), of: mainComposition.tracks.first!, at: CMTime.zero)
|
|
|
+
|
|
|
+ let videoComposition = AVMutableVideoComposition()
|
|
|
+ videoComposition.frameDuration = CMTime(value: 1, timescale: CMTimeScale(framePerSecond))
|
|
|
+ videoComposition.renderSize = size
|
|
|
+ let tool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: view.layer, in: view.layer)
|
|
|
+ videoComposition.animationTool = tool
|
|
|
+
|
|
|
+ let videoInstruction = AVMutableVideoCompositionInstruction()
|
|
|
+ videoInstruction.timeRange = CMTimeRange(start: .zero, duration: CMTime(value: 1, timescale: CMTimeScale(framePerSecond)))
|
|
|
+ videoComposition.instructions = [videoInstruction]
|
|
|
+
|
|
|
+ // 设置导出的路径和文件名
|
|
|
+ let outputPath = NSTemporaryDirectory() + "output.mp4"
|
|
|
+ let outputURL = URL(fileURLWithPath: outputPath)
|
|
|
+
|
|
|
+ let fileManager = FileManager.default
|
|
|
+ if fileManager.fileExists(atPath: outputPath) {
|
|
|
+ try? fileManager.removeItem(atPath: outputPath)
|
|
|
+ }
|
|
|
+
|
|
|
+ guard let exporter = AVAssetExportSession(asset: mainComposition, presetName: AVAssetExportPresetHighestQuality) else {
|
|
|
+ return
|
|
|
+ }
|
|
|
+ exporter.outputURL = outputURL
|
|
|
+ exporter.outputFileType = .mp4
|
|
|
+ exporter.shouldOptimizeForNetworkUse = true
|
|
|
+ exporter.videoComposition = videoComposition
|
|
|
+ exporter.audioMix = AVAudioMix()
|
|
|
+
|
|
|
+ exporter.exportAsynchronously {
|
|
|
+ switch exporter.status {
|
|
|
+ case .completed:
|
|
|
+ print("视频生成成功: \(outputURL)")
|
|
|
+ completion?(outputURL)
|
|
|
+ case .failed:
|
|
|
+ print("视频生成失败: \(String(describing: exporter.error))")
|
|
|
+ case .cancelled:
|
|
|
+ print("视频生成取消")
|
|
|
+ default:
|
|
|
+ break
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+//import ImageIO
|
|
|
+//import MobileCoreServices
|
|
|
+import ReplayKit
|
|
|
+
|
|
|
+typealias StartRecordHandler = () -> Void
|
|
|
+extension VideoRecorder {
|
|
|
+
|
|
|
+ /*
|
|
|
+ LivePhoto制作
|
|
|
+ 1. 录屏生成视频
|
|
|
+ 2. 从视频中获取最后一帧图片,并给图片添加livePhoto信息
|
|
|
+ 3. 视频转为mov格式并添加livePhoto信息
|
|
|
+ 4. 将拥有相同livePhoto信息的图片和视频合并生成livePhoto保存至相册
|
|
|
+ */
|
|
|
+ /// 录制生成LivePhoto并保存相册
|
|
|
+ func saveLivePhoto(duration: TimeInterval, outputDirectory: URL,
|
|
|
+ prepearHandler: ((StartRecordHandler?) -> Void)?,
|
|
|
+ completion: ((URL?, URL?, String?) -> Void)?) {
|
|
|
+
|
|
|
+ startRecording(duration: duration, prepearHandler: prepearHandler) { videoURL, erMsg in
|
|
|
+ guard let videoURL = videoURL else {
|
|
|
+ completion?(nil, nil, erMsg ?? "Record Failure~")
|
|
|
+ return
|
|
|
+ }
|
|
|
+ LivePhotoCreater.shared.saveLivePhoto(from: videoURL, outputDirectory: outputDirectory, completion: completion)
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ /// 开始录屏任务
|
|
|
+ /// - Parameters:
|
|
|
+ /// - duration: 时长
|
|
|
+ /// - prepearHandler: 录制前回调,用于业务层倒计时、清屏等操作
|
|
|
+ /// - completion: (URL: 录制结果视频URL,String: 报错信息)
|
|
|
+ func startRecording(duration: TimeInterval,
|
|
|
+ prepearHandler: (((StartRecordHandler)?) -> Void)?,
|
|
|
+ completion: ((URL?, String?) -> Void)?) {
|
|
|
+ guard RPScreenRecorder.shared().isAvailable else {
|
|
|
+ Log("RPScreenRecorder isAvailable = false")
|
|
|
+ completion?(nil, "ScreenRecorder unAvailable".localized)
|
|
|
+ return
|
|
|
+ }
|
|
|
+
|
|
|
+ let livePhotoQueue = DispatchQueue(label: "queue_recording_video")
|
|
|
+// let semaphore = DispatchSemaphore(value: 0)
|
|
|
+
|
|
|
+ livePhotoQueue.async {
|
|
|
+ Log("==点击开始")
|
|
|
+
|
|
|
+ // 1. 录制视频
|
|
|
+ // 获取RPScreenRecorder
|
|
|
+ let recorder = RPScreenRecorder.shared()
|
|
|
+ recorder.isMicrophoneEnabled = false
|
|
|
+ recorder.isCameraEnabled = false
|
|
|
+// recorder.delegate = self
|
|
|
+
|
|
|
+ let duration = min(5, duration)
|
|
|
+ // 开始录制
|
|
|
+ recorder.startRecording { error in
|
|
|
+ if let error = error {
|
|
|
+ Log(error.localizedDescription)
|
|
|
+ if recorder.isRecording {
|
|
|
+ recorder.stopRecording()
|
|
|
+ }
|
|
|
+ completion?(nil, error.localizedDescription)
|
|
|
+ } else {
|
|
|
+ Log("已经获取权限")
|
|
|
+// recorder.stopRecording()
|
|
|
+
|
|
|
+ // 开始录制操作
|
|
|
+ let handler = {
|
|
|
+ Log("==开始录制")
|
|
|
+ recorder.startRecording { _ in
|
|
|
+// semaphore.signal()
|
|
|
+ }
|
|
|
+ }
|
|
|
+ prepearHandler?(handler)
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+// semaphore.wait()
|
|
|
+
|
|
|
+ DispatchQueue.global().asyncAfter(deadline: .now()+duration) {
|
|
|
+ let ts = Date().timeIntervalSince1970
|
|
|
+ let tempDirectoryPath = NSTemporaryDirectory()
|
|
|
+ let path = (tempDirectoryPath as NSString).appendingPathComponent("\(Int(ts)).mov")
|
|
|
+
|
|
|
+ let documentURL = TSFileManagerTool.documentsDirectory.appendingPathComponent("\(Int(ts)).mov")
|
|
|
+
|
|
|
+ let recorder = RPScreenRecorder.shared()
|
|
|
+
|
|
|
+ // 停止录制
|
|
|
+ let pathURL = URL(fileURLWithPath: path)
|
|
|
+ recorder.stopRecording(withOutput: pathURL) { (error) in
|
|
|
+ Log("==录制结束")
|
|
|
+
|
|
|
+ TSFileManagerTool.copyFileWithOverwrite(from: pathURL, to: documentURL)
|
|
|
+
|
|
|
+ if let error = error {
|
|
|
+ Log(error)
|
|
|
+ completion?(nil, error.localizedDescription)
|
|
|
+ }
|
|
|
+ else {
|
|
|
+ completion?(pathURL, nil)
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+}
|
|
|
+
|