问题
I have put the code here
I am streaming device using c code and receiving image frame using below method but when i record that frame as video if i record 20 second video then its showing me 2-3 second only
can anyone help me please!! var recordingStatus : Bool = false
var captureStatus : Bool = false
var recordStatus : Bool = false
var frameQueue = Queue<UIImage>()
var frameCount : Int32 = 0
var mediaFilePath : String = ""
/**
* Invoke by native engine
* Callback the frame data.
* @param buf
* @param size
* @param frameNum
* @param isKeyFrame
* @param width
* @param height
*/
@objc(onFrameDataRecv:)
public dynamic func onFrameDataRecv(frame: UIImage) {
self.frameView.image = frame
if self.captureStatus == true {
DispatchQueue.main.async {
self.captureImage(frame: frame)
}
}
if self.recordingStatus {
self.frameQueue.enqueue(frame)
}
}
func writeImagesAsMovie(videoPath: String, videoSize: CGSize, videoFPS: Int32) {
// Create AVAssetWriter to write video
guard let assetWriter = createAssetWriter(path: videoPath, size: videoSize) else {
print("Error converting images to video: AVAssetWriter not created")
return
}
// If here, AVAssetWriter exists so create AVAssetWriterInputPixelBufferAdaptor
let writerInput = assetWriter.inputs.filter{ $0.mediaType == AVMediaType.video }.first!
let sourceBufferAttributes : [String : AnyObject] = [
kCVPixelBufferPixelFormatTypeKey as String : Int(kCVPixelFormatType_32ARGB) as AnyObject,
kCVPixelBufferWidthKey as String : videoSize.width as AnyObject,
kCVPixelBufferHeightKey as String : videoSize.height as AnyObject,
]
let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: writerInput, sourcePixelBufferAttributes: sourceBufferAttributes)
// Start writing session
assetWriter.startWriting()
assetWriter.startSession(atSourceTime: CMTime.zero)
if (pixelBufferAdaptor.pixelBufferPool == nil) {
print("Error converting images to video: pixelBufferPool nil after starting session")
return
}
// -- Create queue for <requestMediaDataWhenReadyOnQueue>
let mediaQueue = DispatchQueue(__label: "mediaInputQueue", attr: nil)
// -- Set video parameters
let frameDuration = CMTimeMake(value: 1, timescale: videoFPS)
var frameCount = 0
// -- Add images to video
writerInput.requestMediaDataWhenReady(on: mediaQueue, using: { () -> Void in
// Append unadded images to video but only while input ready
while self.recordingStatus {
let frameCount = self.frameQueue.count
if frameCount <= 0 {
continue
}
let frame = self.frameQueue.dequeue()
if !writerInput.isReadyForMoreMediaData{
continue
}
let lastFrameTime = CMTimeMake(value: Int64(self.frameCount), timescale: videoFPS)
let presentationTime = self.frameCount == 0 ? lastFrameTime : CMTimeAdd(lastFrameTime, frameDuration)
var count = self.frameQueue.count
if !self.appendPixelBufferForImageAtURL(frame!, pixelBufferAdaptor: pixelBufferAdaptor, presentationTime: presentationTime) {
print("Error converting images to video: AVAssetWriterInputPixelBufferAdapter failed to append pixel buffer")
return
}
self.frameCount += 1
}
writerInput.markAsFinished()
assetWriter.finishWriting {
if (assetWriter.error != nil) {
print("Error converting images to video: \(assetWriter.error)")
} else {
self.saveVideoToLibrary(videoURL: NSURL(fileURLWithPath: videoPath))
print("Converted images to movie @ \(videoPath)")
}
}
})
}
func createAssetWriter(path: String, size: CGSize) -> AVAssetWriter? {
// Convert <path> to NSURL object
let pathURL = NSURL(fileURLWithPath: path)
// Return new asset writer or nil
do {
// Create asset writer
let newWriter = try AVAssetWriter(outputURL: pathURL as URL, fileType: AVFileType.mp4)
// Define settings for video input
let videoSettings: [String : AnyObject] = [
AVVideoCodecKey : AVVideoCodecH264 as AnyObject,
AVVideoWidthKey : size.width as AnyObject,
AVVideoHeightKey : size.height as AnyObject,
]
// Add video input to writer
let assetWriterVideoInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: videoSettings)
newWriter.add(assetWriterVideoInput)
// Return writer
print("Created asset writer for \(size.width)x\(size.height) video")
return newWriter
} catch {
print("Error creating asset writer: \(error)")
return nil
}
}
func appendPixelBufferForImageAtURL(_ image: UIImage, pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor, presentationTime: CMTime) -> Bool {
var appendSucceeded = false
autoreleasepool {
if let pixelBufferPool = pixelBufferAdaptor.pixelBufferPool {
let pixelBufferPointer = UnsafeMutablePointer<CVPixelBuffer?>.allocate(capacity: 1)
let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(
kCFAllocatorDefault,
pixelBufferPool,
pixelBufferPointer
)
let pixelBuffer = pixelBufferPointer.pointee
if pixelBuffer != nil && status == 0 {
fillPixelBufferFromImage(image: image, pixelBuffer: pixelBuffer!)
appendSucceeded = pixelBufferAdaptor.append(
pixelBuffer!,
withPresentationTime: presentationTime
)
pixelBufferPointer.deinitialize(count: 1)
} else {
NSLog("error: Failed to allocate pixel buffer from pool")
}
pixelBufferPointer.deallocate()
}
}
return appendSucceeded
}
func fillPixelBufferFromImage(image: UIImage, pixelBuffer: CVPixelBuffer) {
_ = CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
let pixelData = CVPixelBufferGetBaseAddress(pixelBuffer)
let bitmapInfo = CGBitmapInfo(rawValue: CGImageAlphaInfo.premultipliedFirst.rawValue)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
let context = CGContext(
data: pixelData,
width: Int(image.size.width),
height: Int(image.size.height),
bitsPerComponent: 8,
bytesPerRow: Int(4 * image.size.width),
space: rgbColorSpace,
bitmapInfo: bitmapInfo.rawValue
)
context?.draw(image.cgImage!, in:CGRect(x:0, y:0, width:image.size.width, height:image.size.height))
CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
}
func resizeImage(image: UIImage, newWidth: CGFloat) -> UIImage? {
let scale = newWidth / image.size.width
let newHeight = image.size.height * scale
UIGraphicsBeginImageContext(CGSize(width: newWidth, height: newHeight))
image.draw(in: CGRect(x: 0, y: 0, width: newWidth, height: newHeight))
let newImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return newImage
}
func saveVideoToLibrary(videoURL: NSURL) {
PHPhotoLibrary.requestAuthorization { status in
// Return if unauthorized
guard status == .authorized else {
print("Error saving video: unauthorized access")
return
}
// If here, save video to library
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: videoURL as URL)
}) { success, error in
if !success {
print("Error saving video: \(error)")
}
}
}
}
onFrameDataRecv : Receiver method of image frame
writeImagesAsMovie : write image as a video
Can anyone check this code and tell me where exactly I am doing wrong!!
I have added one sample video link https://drive.google.com/file/d/1pa9DBe_v8K9iWPRPj9BhC1ri49Ou2EWj/view
As you can you see video I have shoot the video of timer from 01:50:10 to 01:49:50 to compare actual seconds to recording second so you can see that video I am shoting as normal video and its fast forward recording in above code so I need to decrease speed of video recording!! can i do that?? as you said, which second shown :: its showing all but its too fast https://www.timeanddate.com/timer/ here is the website for timer that i use
来源:https://stackoverflow.com/questions/59351709/whats-wrong-in-my-videowriting-code-using-image-frame-i-have-shoot-20-sec-video