-
Notifications
You must be signed in to change notification settings - Fork 0
/
SLZAvCaptureTool.swift
672 lines (630 loc) · 28.3 KB
/
SLZAvCaptureTool.swift
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
//
// SLZAvCaptureTool.swift
//
// Created by Zheng Li on 2021/11/9.
// Copyright © 2021 Zas. All rights reserved.
// 翻译自: https://github.com/wsl2ls/iOS_Tips/blob/master/iOS_Tips/DarkMode/General/AV/SLAvCaptureTool.h
import UIKit
import AVFoundation
import CoreMotion
///音视频捕获类型
enum SLZAvCaptureType{
/// 音视频
case SLZAvCaptureTypeAv
/// 纯视频
case SLZAvCaptureTypeVideo
/// 纯音频
case SLZAvCaptureTypeAudio
}
class SLZAvCaptureTool: NSObject, AVCapturePhotoCaptureDelegate, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate{
// MARK: - life cycle
override init() {
super.init();
self.videoSize = UIScreen.main.bounds.size;
}
init(with type: SLZAvCaptureType){
super.init();
self.videoSize = UIScreen.main.bounds.size;
self.avCaptureType = type;
}
deinit{
self.stopRunning();
}
// MARK: - public properties
/// 摄像头采集内容预览视图
var preview: UIView?{
didSet{
if let view = preview{
self.previewLayer.frame = view.bounds;
view.layer.addSublayer(self.previewLayer);
}
else{
self.previewLayer.removeFromSuperlayer();
}
}
};
/// 导出的视频宽高 默认设备宽高 已home键朝下为准
var videoSize: CGSize?;
/// 摄像头是否正在运行
var isRunning = false;
/// 摄像头方向 默认后置摄像头
private (set) public var devicePosition: AVCaptureDevice.Position?;
/// 闪光灯状态 默认是关闭的,即黑暗情况下拍照不打开闪光灯 (打开/关闭/自动)
var flashMode: AVCaptureDevice.FlashMode?;
/// 当前焦距 默认最小值1 最大值6
var videoZoomFactor: CGFloat = 1.0{
didSet{
if (videoZoomFactor <= self.maxZoomFactor && videoZoomFactor >= self.minZoomFactor){
do{
try self.videoInput!.device.lockForConfiguration();
self.videoInput!.device.videoZoomFactor = videoZoomFactor;
self.videoInput?.device.unlockForConfiguration();
}
catch{
print("failed");
}
}
}
};
/// 设置previewLayer的图层填充方式
var videoGravity: AVLayerVideoGravity?{
didSet{
if let gravity = self.videoGravity{
self.previewLayer.videoGravity = gravity;
}
}
}
/// 捕获工具输出代理
weak var delegate: SLZAvCaptureToolDelegate?;
// MARK: - private properties
/// 采集会话
private lazy var session: AVCaptureSession = {
let session = AVCaptureSession();
if session.canSetSessionPreset(.hd1280x720) {session.sessionPreset = .hd1280x720};
if self.avCaptureType != .SLZAvCaptureTypeAudio{
if session.canAddInput(self.videoInput!) {session.addInput(self.videoInput!)}
if session.canAddOutput(self.capturePhotoOutput) {session.addOutput(self.capturePhotoOutput)};
if session.canAddOutput(self.videoDataOutput) {session.addOutput(self.videoDataOutput)};
let captureVideoConnection = self.videoDataOutput.connection(with: .video)!;
if self.devicePosition == .front && captureVideoConnection.isVideoMirroringSupported{
captureVideoConnection.isVideoMirrored = true;
}
captureVideoConnection.videoOrientation = .portrait;
}
if self.avCaptureType == .SLZAvCaptureTypeAudio || self.avCaptureType == .SLZAvCaptureTypeAv{
if session.canAddInput(self.audioInput!) {session.addInput(self.audioInput!)}
if session.canAddOutput(self.audioDataOutput) {session.addOutput(self.audioDataOutput)};
}
return session;
}();
/// 摄像头采集内容展示区域
private lazy var previewLayer: AVCaptureVideoPreviewLayer = {
let layer = AVCaptureVideoPreviewLayer.init(session: self.session);
layer.videoGravity = .resizeAspect;
return layer;
}();
//音频输入流
private lazy var audioInput: AVCaptureDeviceInput? = {
let audioCaptureDevice = AVCaptureDevice.default(for: .audio);
do {
let audioInput = try AVCaptureDeviceInput.init(device: audioCaptureDevice!)
return audioInput;
} catch let error{
print(error.localizedDescription)
return nil
}
}();
//视频输入流
private lazy var videoInput: AVCaptureDeviceInput? = {
//添加一个视频输入设备 默认是后置摄像头
if let videoCaptureDevice = self.getCameraDeviceWithPosition(.back){
do {
let videoInput = try AVCaptureDeviceInput.init(device: videoCaptureDevice)
return videoInput;
} catch let error{
print(error.localizedDescription)
return nil
}
}
else{
return nil;
}
}();
//照片输出流
private lazy var capturePhotoOutput: AVCapturePhotoOutput = {
return AVCapturePhotoOutput.init();
}();
//视频数据帧输出流
private lazy var videoDataOutput: AVCaptureVideoDataOutput = {
let _videoDataOutput = AVCaptureVideoDataOutput.init();
_videoDataOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
return _videoDataOutput;
}();
//音频数据帧输出流
private lazy var audioDataOutput: AVCaptureAudioDataOutput = {
let _audioDataOutput = AVCaptureAudioDataOutput.init();
_audioDataOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
return _audioDataOutput;
}();
//音视频数据流文件写入
private var assetWriter: AVAssetWriter?;
//写入视频文件
private lazy var assetWriterVideoInput: AVAssetWriterInput? = {
return self.getNewAssetWriterVideoInput();
}();
//写入音频文件
private lazy var assetWriterAudioInput: AVAssetWriterInput? = {
return self.getNewAssetWriterAudioInput();
}();
//视频写入配置
private lazy var videoCompressionSettings: [String: Any] = {
return [:];
}();
//音频写入配置
private lazy var audioCompressionSettings: [String: Any] = {
return [:];
}();
//是否能写入
private var canWrite = false;
//音视频文件输出路径
private var outputFileURL: NSURL?{
didSet{
if let fileUrl = outputFileURL{
if self.avCaptureType == .SLZAvCaptureTypeAudio{
do {
self.assetWriter = try AVAssetWriter.init(url: fileUrl as URL, fileType: .ac3)
} catch let error{
print(error.localizedDescription);
}
}
else{
do {
self.assetWriter = try AVAssetWriter.init(url: fileUrl as URL, fileType: .mp4)
} catch let error{
print(error.localizedDescription);
}
}
}
}
};
//是否正在录制
private var isRecording = false;
//音视频捕获类型 默认 SLZAvCaptureTypeAv
private var avCaptureType: SLZAvCaptureType = .SLZAvCaptureTypeAv;
//拍摄录制时的手机方向
private var shootingOrientation: UIDeviceOrientation = .portrait;
//运动传感器 监测设备方向
private var motionManager: CMMotionManager? = CMMotionManager.init();
//最大缩放值 焦距
private var maxZoomFactor: CGFloat{
get{
var maxZoomFactorInternal = self.videoInput!.device.activeFormat.videoMaxZoomFactor;
if #available(iOS 11.0, *) {
maxZoomFactorInternal = self.videoInput!.device.maxAvailableVideoZoomFactor;
}
if (maxZoomFactorInternal > 6) {
maxZoomFactorInternal = 6.0;
}
return maxZoomFactorInternal;
}
}
//最小缩放值 焦距
private var minZoomFactor: CGFloat{
get{
var minZoomFactorInternal = 1.0;
if #available(iOS 11.0, *) {
minZoomFactorInternal = self.videoInput!.device.minAvailableVideoZoomFactor;
}
return minZoomFactorInternal;
}
}
}
// MARK: - public methods
extension SLZAvCaptureTool{
///启动捕获
func startRunning(){
if !self.session.isRunning{
self.session.startRunning();
}
self.startUpdateDeviceDirection();
}
///结束捕获
func stopRunning(){
if self.session.isRunning{
self.session.stopRunning();
self.stopUpdateDeviceDirection();
}
}
/// 聚焦点 默认是连续聚焦模式 范围是在previewLayer上
func focusAtPoint(_ focalPoint: CGPoint){
//将UI坐标转化为摄像头坐标 (0,0) -> (1,1)
let cameraPoint = self.previewLayer.captureDevicePointConverted(fromLayerPoint: focalPoint);
let captureDevice = self.videoInput!.device;
do{
//注意改变设备属性前一定要首先调用lockForConfiguration:调用完之后使用unlockForConfiguration方法解锁
try captureDevice.lockForConfiguration();
if captureDevice.isFocusModeSupported(.autoFocus){
if captureDevice.isFocusPointOfInterestSupported{
captureDevice.focusPointOfInterest = cameraPoint;
}
}
//曝光模式
if captureDevice.isExposureModeSupported(.autoExpose){
if captureDevice.isExposurePointOfInterestSupported{
captureDevice.exposurePointOfInterest = cameraPoint;
}
captureDevice.exposureMode = .autoExpose;
}
captureDevice.unlockForConfiguration();
}
catch{
print("287 error");
}
}
/// 切换前/后置摄像头
func switchsCamera(devicePosition: AVCaptureDevice.Position){
//当前设备方向
if (self.devicePosition == devicePosition) {
return;
}
do{
let videoInput = try AVCaptureDeviceInput.init(device: self.getCameraDeviceWithPosition(devicePosition)!);
//先开启配置,配置完成后提交配置改变
self.session.beginConfiguration();
//移除原有输入对象
self.session.removeInput(self.videoInput!);
//添加新的输入对象
if self.session.canAddInput(videoInput){
self.session.addInput(videoInput);
self.videoInput = videoInput;
}
//视频输入对象发生了改变 视频输出的链接也要重新初始化
let captureConnection = self.videoDataOutput.connection(with: .video)!;
if (captureConnection.isVideoStabilizationSupported) {
//视频稳定模式
captureConnection.preferredVideoStabilizationMode = .auto;
}
if (self.devicePosition == .front && captureConnection.isVideoMirroringSupported) {
captureConnection.isVideoMirrored = true;
}
captureConnection.videoOrientation = .portrait;
//提交新的输入对象
self.session.commitConfiguration();
}
catch{
print("324 error");
}
}
/// 输出图片, 执行拍照操作
func outputPhoto(){
//获得图片输出连接
let captureConnection = self.capturePhotoOutput.connection(with: .video)!;
// 设置是否为镜像,前置摄像头采集到的数据本来就是翻转的,这里设置为镜像把画面转回来
if (self.devicePosition == .front && captureConnection.isVideoMirroringSupported) {
captureConnection.isVideoMirrored = true;
}
if (self.shootingOrientation == .landscapeRight) {
captureConnection.videoOrientation = .landscapeLeft;
} else if (self.shootingOrientation == .landscapeLeft) {
captureConnection.videoOrientation = .landscapeRight;
} else if (self.shootingOrientation == .portraitUpsideDown) {
captureConnection.videoOrientation = .portraitUpsideDown;
} else {
captureConnection.videoOrientation = .portrait;
}
//输出样式设置 AVVideoCodecKey:AVVideoCodecJPEG等
let capturePhotoSettings = AVCapturePhotoSettings.init(format: [AVVideoCodecKey: AVVideoCodecType.jpeg]);
capturePhotoSettings.isHighResolutionPhotoEnabled = true; //高分辨率
if self.flashMode != nil{
capturePhotoSettings.flashMode = self.flashMode!; //闪光灯 根据环境亮度自动决定是否打开闪光灯
}
self.capturePhotoOutput.capturePhoto(with: capturePhotoSettings, delegate: self);
}
/// 开始录制视频 默认输出MP4
/// @param path 录制的音视频输出路径
/// @param avRecordType 录制视频类型
func startRecordVideoToOutputFileAtPath(_ path: String, recordType avCaptureType: SLZAvCaptureType){
self.avCaptureType = avCaptureType;
//移除重复文件
if FileManager.default.fileExists(atPath: path){
do{
try FileManager.default.removeItem(atPath: path);
}
catch{
print("file remove fail");
}
}
self.outputFileURL = NSURL.init(fileURLWithPath: path);
self.stopUpdateDeviceDirection();
//获得视频输出连接
let captureConnection = self.videoDataOutput.connection(with: .video)!;
// 设置是否为镜像,前置摄像头采集到的数据本来就是翻转的,这里设置为镜像把画面转回来
if (self.devicePosition == .front && captureConnection.isVideoMirroringSupported) {
captureConnection.isVideoMirrored = true;
}
//这个API 每次开始录制时设置视频输出方向,会造成摄像头的短暂黑暗;切换摄像头时设置此属性没有较大的影响
// captureConnection.videoOrientation = AVCaptureVideoOrientationPortrait;
//由于上述原因,故采用在写入输出视频时调整方向
if (self.shootingOrientation == .landscapeRight) {
self.assetWriterVideoInput?.transform = CGAffineTransform(rotationAngle: .pi/2);
} else if (self.shootingOrientation == .landscapeLeft) {
self.assetWriterVideoInput?.transform = CGAffineTransform(rotationAngle: -.pi/2);
} else if (self.shootingOrientation == .portraitUpsideDown) {
self.assetWriterVideoInput?.transform = CGAffineTransform(rotationAngle: .pi);
} else {
self.assetWriterVideoInput?.transform = CGAffineTransform(rotationAngle: 0);
}
/// 重新录制时, 因为swift无法重新触发lazy load, 只能手动校验了
if self.assetWriterVideoInput == nil{
self.assetWriterVideoInput = self.getNewAssetWriterVideoInput();
}
if self.assetWriterAudioInput == nil{
self.assetWriterAudioInput = self.getNewAssetWriterAudioInput();
}
guard let assetWriterSome = self.assetWriter, let videoWriterInput = self.assetWriterVideoInput, let audioWriterInput = self.assetWriterAudioInput else {return}
if (assetWriterSome.canAdd(videoWriterInput)) {
assetWriterSome.add(videoWriterInput);
} else {
print("视频写入失败");
}
if (assetWriterSome.canAdd(audioWriterInput) && self.avCaptureType == .SLZAvCaptureTypeAv) {
assetWriterSome.add(audioWriterInput);
} else {
print("音频写入失败");
}
self.isRecording = true;
}
/// 结束录制视频
func stopRecordVideo(){
if (self.isRecording) {
self.isRecording = false;
if(self.assetWriter != nil && self.canWrite && self.assetWriter!.status != .unknown) {
self.assetWriter!.finishWriting(completionHandler: { [weak self] in
if let delegateNew = self?.delegate, let this = self{
if delegateNew.responds(to: #selector(SLZAvCaptureToolDelegate.captureTool(captureTool:didFinishRecordingToOutputFileAtURL:error:))){
delegateNew.captureTool?(captureTool: this, didFinishRecordingToOutputFileAtURL: this.outputFileURL, error: this.assetWriter?.error)
}
}
self?.canWrite = false;
self?.assetWriter = nil;
self?.assetWriterAudioInput = nil;
self?.assetWriterVideoInput = nil;
})
}
}
}
/// 开始录制音频 默认输出MP3
/// @param path 录制的音频输出路径
func startRecordAudioToOutputFileAtPath(_ path: String){
self.avCaptureType = .SLZAvCaptureTypeAudio;
//移除重复文件
if FileManager.default.fileExists(atPath: path){
do{
try FileManager.default.removeItem(atPath: path);
}
catch{
print("file remove fail");
}
}
self.outputFileURL = NSURL.init(fileURLWithPath: path);
self.stopUpdateDeviceDirection();
self.session.beginConfiguration();
self.session.removeOutput(self.videoDataOutput);
self.session.commitConfiguration();
/// 重新录制时, 因为swift无法重新触发lazy load, 只能手动校验了
if self.assetWriterAudioInput == nil{
self.assetWriterAudioInput = self.getNewAssetWriterAudioInput();
}
guard let assetWriterTemp = self.assetWriter, let audioInputWriter = self.assetWriterAudioInput else {return};
if assetWriterTemp.canAdd(audioInputWriter){
assetWriterTemp.add(audioInputWriter);
}
else{
print("音频写入失败")
}
self.isRecording = true;
}
/// 结束录制音频
func stopRecordAudio(){
if self.isRecording{
self.isRecording = false;
guard let assetWriterSome = self.assetWriter else {return}
if self.canWrite && (assetWriterSome.status != .unknown){
assetWriterSome.finishWriting {[weak self] in
guard let delegateTemp = self?.delegate, let this = self else {return}
if delegateTemp.responds(to: #selector(SLZAvCaptureToolDelegate.captureTool(captureTool:didFinishRecordingToOutputFileAtURL:error:))){
DispatchQueue.main.async {
delegateTemp.captureTool?(captureTool: this, didFinishRecordingToOutputFileAtURL: this.outputFileURL, error: this.assetWriter?.error!)
}
}
this.canWrite = false;
this.assetWriter = nil;
this.assetWriterAudioInput = nil;
this.assetWriterVideoInput = nil;
}
}
}
}
}
// MARK: - private methods
extension SLZAvCaptureTool{
//获取指定位置的摄像头
private func getCameraDeviceWithPosition(_ position: AVCaptureDevice.Position) -> AVCaptureDevice?{
if #available(iOS 10.2, *) {
let dissession = AVCaptureDevice.DiscoverySession.init(deviceTypes: [.builtInDualCamera, .builtInTelephotoCamera, .builtInWideAngleCamera], mediaType: .video, position: position);
for device in dissession.devices{
if device.position == position{
return device;
}
}
}
else{
for device in AVCaptureDevice.devices(for: .video){
if device.position == position{
return device;
}
}
}
return nil;
}
/// 获取AssetWriterVideoInput
private func getNewAssetWriterVideoInput() -> AVAssetWriterInput{
//写入视频大小
let numPixels = self.videoSize!.width * UIScreen.main.scale * self.videoSize!.height * UIScreen.main.scale;
//每像素比特
let bitsPerPixel = 12.0;
let bitsPerSecond = Int(numPixels * bitsPerPixel);
// 码率和帧率设置
let compressionProperties: [String: Any] = [
AVVideoAverageBitRateKey: bitsPerSecond,
AVVideoExpectedSourceFrameRateKey: 15,
AVVideoMaxKeyFrameIntervalKey: 15,
AVVideoProfileLevelKey: AVVideoProfileLevelH264High40];
let width = self.videoSize!.width * UIScreen.main.scale;
let height = self.videoSize!.height * UIScreen.main.scale;
//视频属性
self.videoCompressionSettings = [
AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoWidthKey: width,
AVVideoHeightKey: height,
AVVideoScalingModeKey: AVVideoScalingModeResizeAspectFill,
AVVideoCompressionPropertiesKey: compressionProperties
];
let _assetWriterVideoInput = AVAssetWriterInput.init(mediaType: .video, outputSettings: self.videoCompressionSettings);
//expectsMediaDataInRealTime 必须设为yes,需要从capture session 实时获取数据
_assetWriterVideoInput.expectsMediaDataInRealTime = true;
return _assetWriterVideoInput;
}
/// 获取AssetWriterAudioInput
private func getNewAssetWriterAudioInput() -> AVAssetWriterInput{
self.audioCompressionSettings = [
AVEncoderBitRatePerChannelKey: 28000,
AVFormatIDKey: kAudioFormatMPEG4AAC,
AVNumberOfChannelsKey: 1,
AVSampleRateKey: 22050
];
let _assetWriterAudioInput = AVAssetWriterInput.init(mediaType: .audio, outputSettings: self.audioCompressionSettings);
_assetWriterAudioInput.expectsMediaDataInRealTime = true;
return _assetWriterAudioInput;
}
}
extension SLZAvCaptureTool{
///开始监听设备方向
private func startUpdateDeviceDirection() {
guard let motionManager = self.motionManager else {return};
if motionManager.isAccelerometerAvailable {
//回调会一直调用,建议获取到就调用下面的停止方法,需要再重新开始,当然如果需求是实时不间断的话可以等离开页面之后再stop
motionManager.accelerometerUpdateInterval = 1.0;
motionManager.startAccelerometerUpdates(to: OperationQueue.current ?? OperationQueue.main) {[weak self] accelerometerData, error in
if let accelerometerDataInternal = accelerometerData{
let x = accelerometerDataInternal.acceleration.x;
let y = accelerometerDataInternal.acceleration.y;
if ((fabs(y) + 0.1) >= fabs(x)) {
if (y >= 0.1) {
// NSLog(@"Down");
if (self?.shootingOrientation == .portraitUpsideDown) {
return ;
}
self?.shootingOrientation = .portraitUpsideDown;
} else {
// NSLog(@"Portrait");
if (self?.shootingOrientation == .portrait) {
return ;
}
self?.shootingOrientation = .portrait;
}
} else {
if (x >= 0.1) {
// NSLog(@"Right");
if (self?.shootingOrientation == .landscapeRight) {
return ;
}
self?.shootingOrientation = .landscapeRight;
} else if (x <= 0.1) {
// NSLog(@"Left");
if (self?.shootingOrientation == .landscapeLeft) {
return ;
}
self?.shootingOrientation = .landscapeLeft;
} else {
// NSLog(@"Portrait");
if (self?.shootingOrientation == .portrait) {
return ;
}
self?.shootingOrientation = .portrait;
}
}
}
}
}
}
/// 停止监测方向
private func stopUpdateDeviceDirection(){
if let motionManager = self.motionManager{
if motionManager.isAccelerometerActive{
motionManager.stopAccelerometerUpdates();
self.motionManager = nil;
}
}
}
}
extension SLZAvCaptureTool{
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
let imageData = photo.fileDataRepresentation();
let image = UIImage.init(data: imageData!);
if let delegateNew = self.delegate, let imageNew = image{
if delegateNew.responds(to: #selector(SLZAvCaptureToolDelegate.captureTool(captureTool:didOutputPhoto:error:))){
delegateNew.captureTool?(captureTool: self, didOutputPhoto: imageNew, error: error)
}
}
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
if !self.isRecording{
return;
}
if output == self.videoDataOutput{
if let delegateNew = self.delegate{
if delegateNew.responds(to: #selector(SLZAvCaptureToolDelegate.captureTool(captureTool:didOutputVideoSampleBuffer:fromConnection:))){
delegateNew.captureTool?(captureTool: self, didOutputVideoSampleBuffer: sampleBuffer, fromConnection: connection);
}
}
self.writerVideoSampleBuffer(sampleBuffer, fromConnection: connection);
}
else if output == self.audioDataOutput{
if let delegateNew = self.delegate{
if delegateNew.responds(to: #selector(SLZAvCaptureToolDelegate.captureTool(captureTool:didOutputAudioSampleBuffer:fromConnection:))){
delegateNew.captureTool?(captureTool: self, didOutputAudioSampleBuffer: sampleBuffer, fromConnection: connection);
}
}
self.writerAudioSampleBuffer(sampleBuffer, fromConnection: connection);
}
}
private func writerVideoSampleBuffer(_ sampleBuffer: CMSampleBuffer, fromConnection connection: AVCaptureConnection){
if !self.canWrite && (self.avCaptureType == .SLZAvCaptureTypeAv || self.avCaptureType == .SLZAvCaptureTypeVideo){
self.assetWriter?.startWriting();
self.assetWriter?.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sampleBuffer));
self.canWrite = true;
}
guard let videoInputWriter = self.assetWriterVideoInput else {return};
if videoInputWriter.isReadyForMoreMediaData{
let isSuccess = videoInputWriter.append(sampleBuffer);
if !isSuccess{
print(self.assetWriter?.error?.localizedDescription as Any);
}
}
}
private func writerAudioSampleBuffer(_ sampleBuffer: CMSampleBuffer, fromConnection connection: AVCaptureConnection){
if !self.canWrite && self.avCaptureType == .SLZAvCaptureTypeAudio{
self.assetWriter?.startWriting();
self.assetWriter?.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sampleBuffer));
self.canWrite = true;
}
guard let audioInputWriter = self.assetWriterAudioInput else {return};
if audioInputWriter.isReadyForMoreMediaData{
let isSuccess = audioInputWriter.append(sampleBuffer);
if !isSuccess{
print(self.assetWriter?.error?.localizedDescription as Any);
}
}
}
}