Hello everyone,
- Nowadays, I'm working for screen recording operation in my one of the projects and it's really an interesting one which I want to share with my blog's followers.
- Please check below code lines for detailed information about implementation with Swift.
- Please create one ScreenRecorder.swift file and add the above contents to that file. After this, please import this to another class where you want to implement this functionality.
- Try this in your project, you will definitely get success and in case of any issue, post your comment. I will try to respond to it with an appropriate solution.
Regards,
- Nowadays, I'm working for screen recording operation in my one of the projects and it's really an interesting one which I want to share with my blog's followers.
- Please check below code lines for detailed information about implementation with Swift.
import Foundation
import ReplayKit
import AVKit
/// A customised class for Video & Audio recording functionality
class ScreenRecorder {
var assetWriter: AVAssetWriter?
var videoInput: AVAssetWriterInput?
var audioInput: AVAssetWriterInput?
var recorder = RPScreenRecorder.shared()
var fileURL: URL?
var timer: Timer?
// MARK: ====================================
// MARK: ScreenRecorder with Capture Screen event
// MARK: ====================================
func startRecording(withFilepath fileURL: URL, recordingHandler:@escaping (Error?) -> Void) {
do {
assetWriter = try AVAssetWriter(outputURL: fileURL, fileType:
AVFileType.mp4)
//-- Video Input
let videoOutputSettings: [String: Any] = [
AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoWidthKey: UIScreen.main.bounds.size.width,
AVVideoHeightKey: UIScreen.main.bounds.size.height,
AVVideoCompressionPropertiesKey: [AVVideoAverageBitRateKey: 2300000,
AVVideoProfileLevelKey: AVVideoProfileLevelH264High40]
]
//-- Audio Input
var channelLayout = AudioChannelLayout()
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_MPEG_5_1_D
let audioOutputSettings: [String: Any] = [
AVNumberOfChannelsKey: 6,
AVFormatIDKey: kAudioFormatMPEG4AAC_HE,
AVSampleRateKey: 44100,
AVEncoderBitRateKey: 128000,
AVChannelLayoutKey: NSData(bytes: &channelLayout, length: MemoryLayout.size(ofValue: channelLayout))
]
audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioOutputSettings)
videoInput = AVAssetWriterInput (mediaType: .video, outputSettings: videoOutputSettings)
audioInput?.expectsMediaDataInRealTime = true
videoInput?.expectsMediaDataInRealTime = true
assetWriter?.add(audioInput!)
assetWriter?.add(videoInput!)
recorder.isMicrophoneEnabled = true
recorder.startCapture(handler: { (sample, bufferType, error) in
recordingHandler(error)
if CMSampleBufferDataIsReady(sample) {
if self.assetWriter?.status == AVAssetWriter.Status.unknown {
self.assetWriter?.startWriting()
self.assetWriter?.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sample))
}
if self.assetWriter?.status == AVAssetWriter.Status.failed {
print("Error occured, status = \(String(describing: self.assetWriter?.status.rawValue)), \(String(describing: self.assetWriter?.error?.localizedDescription)) \(String(describing: self.assetWriter?.error))")
self.assetWriter?.cancelWriting()
self.assetWriter?.endSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sample))
recordingHandler(error)
return
}
//-- Video Data
if (bufferType == .video), let isReadyForMoreMediaData = self.videoInput?.isReadyForMoreMediaData, isReadyForMoreMediaData == true {
self.videoInput?.append(sample)
}
//-- Audio Data
if (bufferType == .audioApp || bufferType == .audioMic), let isReadyForMoreMediaData = self.audioInput?.isReadyForMoreMediaData, isReadyForMoreMediaData == true {
self.audioInput?.append(sample)
}
}
}, completionHandler: { (error) in
recordingHandler(error)
})
} catch {
recordingHandler(error)
}
}
func stopRecording(handler: @escaping (Error?) -> Void) {
recorder.stopCapture { (error) in
handler(error)
if self.assetWriter?.status == AVAssetWriter.Status.failed || self.assetWriter?.status == AVAssetWriter.Status.cancelled || self.assetWriter?.status == AVAssetWriter.Status.unknown || self.assetWriter?.status == AVAssetWriter.Status.completed {
return
} else {
self.audioInput?.markAsFinished()
self.videoInput?.markAsFinished()
self.assetWriter?.finishWriting(completionHandler: {
})
}
}
}
class func createReplaysFolder() {
// path to documents directory
let documentDirectoryPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first
if let documentDirectoryPath = documentDirectoryPath {
// create the custom folder path
let replayDirectoryPath = documentDirectoryPath.appending("/Replays")
let fileManager = FileManager.default
if !fileManager.fileExists(atPath: replayDirectoryPath) {
do {
try fileManager.createDirectory(atPath: replayDirectoryPath,
withIntermediateDirectories: false,
attributes: nil)
} catch {
print("Error creating Replays folder in documents dir: \(error)")
}
}
}
}
class func filePath(_ fileName: String) -> String {
createReplaysFolder()
let paths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
let documentsDirectory = paths[0] as String
let filePath: String = "\(documentsDirectory)/Replays/\(fileName).mp4"
return filePath
}
}
// MARK: ====================================
// MARK: ScreenRecorder with Recording Event
// MARK: ====================================
extension ScreenRecorder {
//-- Start screen recording event
func startRecording(recordingHandler: @escaping(String?) -> Void) {
guard recorder.isAvailable else {
recordingHandler("Recording is not available at this time.")
return
}
//recorder.delegate = self
recorder.isMicrophoneEnabled = true
recorder.startRecording{ (error) in
if error == nil {
#if DEBUG
print("Started Recording Successfully")
#endif
recordingHandler(nil)
} else {
recordingHandler(error?.localizedDescription)
}
}
}
//-- Stop screen recording event
func stopRecording(recordingHandler: @escaping(RPPreviewViewController?, Error?) -> Void) {
recorder.stopRecording { (preview, error) in
recordingHandler(preview, error)
}
}
}
- Please create one ScreenRecorder.swift file and add the above contents to that file. After this, please import this to another class where you want to implement this functionality.
For example :
/// Video button click event
@IBAction func btnVideoClicked(_ sender: UIButton) {
sender.isSelected = !sender.isSelected
if sender.isSelected {
startScreenRecording()
} else {
stopScreenRecording()
}
}
// MARK: ====================================
// MARK: Start Capturing Part
// MARK: ====================================
/// Start event for Screen Recording
func startScreenRecording() {
let fileURL = URL(fileURLWithPath: filePath("Recording_\(Date().convertToStringWith(Constants.DateFormates.filedateformat)!)"))
screenRecorder. startRecording(withFilepath: fileURL!) { (error) in
if error == nil {
} else {
#if DEBUG
print(error ?? "")
#endif
sender.isSelected = !sender.isSelected
}
}
}
// MARK: ====================================
// MARK: Stop Capturing Part
// MARK: ====================================
/// stop event for Screen Recording
func stopScreenRecording() {
screenRecorder.stopRecording{ (error) in
if error == nil {
sender.isSelected = !sender.isSelected
// An alert will be displayed to save or delete recorded video
displayAlertToSaveOrDeleteVideo()
} else {
#if DEBUG
print(error ?? "")
#endif
}
}
}
/// A function to display an alert to save or delete the recorded video
func displayAlertToSaveOrDeleteVideo() {
DispatchQueue.main.async(execute: {
let alert = UIAlertController(title: "Recording Finished", message: "Do you want to save or delete recording?", preferredStyle: .alert)
let deleteAction = UIAlertAction(title: "Delete", style: .destructive, handler: { (action: UIAlertAction) in
DispatchQueue.main.async(execute: {
//-- Share annoted image's local path to upload it on server.
if let savedAnnotedFilePath = screenRecorder.assetWriter?.outputURL.path, FileManager.shared.fileExists(atPath: savedAnnotedFilePath) {
do {
try AppInfo.shared.fileManager.removeItem(atPath: savedAnnotedFilePath)
// recording deleted successfully
} catch {
self.view?.makeToast(error.localizedDescription)
}
} else {
// recorded_file_not_found
}
})
})
let editAction = UIAlertAction(title: "Save", style: .default, handler: { (action: UIAlertAction) -> Void in
DispatchQueue.main.async(execute: {
//-- Share annoted image's local path to upload it on server.
if let savedAnnotedFilePath = self.assetWriter?.outputURL.path, AppInfo.shared.fileManager.fileExists(atPath: savedAnnotedFilePath) {
// MOVE OR UPLOAD FILE TO DESNTINATION
} else {
// recorded_file_not_found
}
})
})
alert.addAction(editAction)
alert.addAction(deleteAction)
self.present(alert, animated: true, completion: nil)
})
}
- Try this in your project, you will definitely get success and in case of any issue, post your comment. I will try to respond to it with an appropriate solution.
Regards,
Thank you for your great codes.
ReplyDeleteI can run it successfully. However, no microphone is captured. Do you know why?
Many thanks
Hi,
ReplyDeleteYou may require to place the below function inside your controller where you have implemented the above functionality.
/// A method to start audio session and to set category by activating it...
private func startAudioSession() {
//-- Audio session for Video recording & playing operations...
do {
try AVAudioSession.sharedInstance().setCategory(.playAndRecord, mode: .videoRecording, options: [.defaultToSpeaker])
try AVAudioSession.sharedInstance().setActive(true, options: .notifyOthersOnDeactivation)
} catch {
#if DEBUG
print("Setting category to AVAudioSessionCategoryPlayback failed.")
#endif
}
}
Hi Nilesh,
ReplyDeletewhere to call startAudioSession() this function, because it's not working for me...