-
Notifications
You must be signed in to change notification settings - Fork 11
/
nativecodecvideo.cpp
654 lines (506 loc) · 21.5 KB
/
nativecodecvideo.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
#include "nativecodecvideo.h"
#include <opencv2/opencv.hpp>
#include <QDebug>
#include <QString>
#include <QFile>
#include <QStandardPaths>
#include "media/NdkMediaCrypto.h"
#include "media/NdkMediaCodec.h"
#include "media/NdkMediaError.h"
#include "media/NdkMediaFormat.h"
#include "media/NdkMediaMuxer.h"
#include "media/NdkMediaExtractor.h"
#include <OMXAL/OpenMAXAL.h>
#include <OMXAL/OpenMAXAL_Android.h>
using std::string;
NativeCodecReader::NativeCodecReader(QString filename)
:QObject(nullptr)
{
mFilename = filename;
mTotalTimeBuffer = -1;
prepareDecoder();
Q_ASSERT(mExtractor != nullptr);
size_t numberOfTracks = AMediaExtractor_getTrackCount(mExtractor);
qDebug() << "Found " << numberOfTracks << " tracks.";
qDebug() << "Selecting track "<< mTrackIndex;
media_status_t ret = AMediaExtractor_selectTrack(mExtractor, mTrackIndex);
if(ret != AMEDIA_OK){
qWarning() << "AMediaExtractor_selectTrack failed.";
}
}
NativeCodecReader::~NativeCodecReader(){
releaseDecoder();
AMediaFormat_delete(mFormat);
}
cv::Mat NativeCodecReader::performRead(){
//qDebug() << "performRead";
if(mSize.empty() || mSize.width == -1 || mSize.height == -1){
int frameWidth = -1;
int frameHeight = -1;
bool ok = AMediaFormat_getInt32(mFormat, AMEDIAFORMAT_KEY_WIDTH, &frameWidth);
ok = ok && AMediaFormat_getInt32(mFormat, AMEDIAFORMAT_KEY_HEIGHT, &frameHeight);
if(!ok){
qWarning() << "Asking format for frame width / height failed.";
return cv::Mat();
}
else{
qDebug() << "Setting OpenCv Buffer size" << frameWidth << "x" << frameHeight;
mSize = cv::Size(frameWidth, frameHeight);
}
}
if (mTrackIndex >=0) {
ssize_t bufidx;
bufidx = AMediaCodec_dequeueInputBuffer(mCodec, TIMEOUT_USEC);
//qDebug() << "AMediaCodec_dequeueInputBuffer: " << bufidx;
//ALOGV("track %d, input buffer %zd", t, bufidx);
if (bufidx >= 0) {
size_t bufsize;
uint8_t *buf = AMediaCodec_getInputBuffer(mCodec, bufidx, &bufsize);
//qDebug() << "AMediaCodec_getInputBuffer size" << bufsize;
int sampleSize = AMediaExtractor_readSampleData(mExtractor, buf, bufsize);
//qDebug() << "AMediaExtractor_readSampleData: " << sampleSize;
if (sampleSize < 0) {
sampleSize = 0;
sawInputEOS = true;
qDebug() << "Extracting EOS";
return cv::Mat();
}
int64_t presentationTimeUs = AMediaExtractor_getSampleTime(mExtractor);
AMediaCodec_queueInputBuffer(mCodec, bufidx, 0, sampleSize, presentationTimeUs, sawInputEOS ? AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM : 0);
//qDebug() << "Pushing new input sample: " << bufidx << " @ " << presentationTimeUs;
bool ok = AMediaExtractor_advance(mExtractor);
if(!ok){
//qWarning() << "Unable to advance extractor";
// This actually happens regularly and is nothig to worry about
}
}
else{
qWarning() << "AMediaCodec_dequeueInputBuffer returned invalid buffer idx";
}
} else {
qDebug() << "@@@@ no more input samples";
if (!sawInputEOS) {
// we ran out of samples without ever signaling EOS to the codec,
// so do that now
int bufidx;
bufidx = AMediaCodec_dequeueInputBuffer(mCodec, TIMEOUT_USEC);
if (bufidx >= 0) {
AMediaCodec_queueInputBuffer(mCodec, bufidx, 0, 0, 0, AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM);
sawInputEOS = true;
return cv::Mat();
}
}
}
// check all codecs for available data
AMediaCodecBufferInfo info;
AMediaFormat *outputFormat;
if (!sawOutputEOS) {
int status;
status = AMediaCodec_dequeueOutputBuffer(mCodec, &info, 1);
if (status >= 0) {
if (info.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM) {
qDebug() << "EOS on track";
sawOutputEOS = true;
}
//qDebug() << "got decoded buffer for track";
cv::Mat colImg;
if (info.size > 0) {
size_t bufsize;
uint8_t *buf = AMediaCodec_getOutputBuffer(mCodec, status, &bufsize);
//qDebug() << "Converting frame data to OpenCV::Mat (" << bufsize << ")";
//qDebug() << "Allocating OpenCV buffer " << mSize.width << "x" << mSize.height;
cv::Mat YUVframe(cv::Size(mSize.width, mSize.height*1.5), CV_8UC1);
colImg = cv::Mat(mSize, CV_8UC3);
//qDebug() << "Copying frame data";
memcpy(YUVframe.data, buf, bufsize);
//qDebug() << "Color conversion";
cv::cvtColor(YUVframe, colImg, CV_YUV2BGR_I420, 3);
//qDebug() << "Conversion done.";
// right here we have the raw frame data available!
//int adler = checksum(buf, info.size, mFormat);
//sizes.add(adler);
}
AMediaCodec_releaseOutputBuffer(mCodec, status, false);
return colImg;
} else if (status == AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED) {
qDebug() << "output buffers changed";
} else if (status == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED) {
if (mFormat != nullptr) {
AMediaFormat_delete(mFormat);
}
mFormat = AMediaCodec_getOutputFormat(mCodec);
qDebug() << "format changed " << AMediaFormat_toString(mFormat);
//int colorCode = -1;
//AMediaFormat_getInt32(mFormat, AMEDIAFORMAT_KEY_COLOR_FORMAT, &colorCode);
//qDebug() << "Color code was: " << colorCode;
} else if (status == AMEDIACODEC_INFO_TRY_AGAIN_LATER) {
qWarning() << "no output buffer right now";
} else {
qWarning() << "unexpected info code" << status;
}
} else {
qDebug() << "already at EOS";
}
return cv::Mat();
}
void NativeCodecReader::prepareDecoder(){
mExtractor = AMediaExtractor_new();
mTotalTimeBuffer = -1;
if(mExtractor == nullptr){
qWarning() << "Unable to get a media extractor!";
}
//PsshInfo* info = AMediaExtractor_getPsshInfo(mExtractor);
QString inputPath = mFilename;
QFile infile(inputPath);
//if(!infile.open(QIODevice::ReadOnly)){
// qWarning() << "Cannot open video file " << inputPath;
//}
//qDebug() << "AMediaExtractor_setDataSourceFd "<< 0 << " " << infile.size();
//media_status_t status = AMediaExtractor_setDataSourceFd(mExtractor, infile.handle(), 0, infile.size());
media_status_t status = AMediaExtractor_setDataSource(mExtractor, inputPath.toStdString().c_str());
if(status != AMEDIA_OK){
qWarning() << "AMediaExtractor_setDataSourceFd failed: " << status;
}
int numtracks = AMediaExtractor_getTrackCount(mExtractor);
if(numtracks != 1){
qWarning() << "Strange number of tracks";
}
mTrackIndex = 0;
sawInputEOS = false;
sawOutputEOS = false;
mFormat = AMediaExtractor_getTrackFormat(mExtractor, mTrackIndex);
qDebug() << "Media format detected: " << AMediaFormat_toString(mFormat);
// find out the vuideo duration here (it is not possible later on!)
qDebug() << "Recalculating totalTime";
const char* formatDescription = AMediaFormat_toString(mFormat);
QStringList entries = QString::fromLocal8Bit(formatDescription).split(", ");
qDebug() << formatDescription;
for(int i=0; i < entries.size(); i++){
if(entries[i].startsWith("durationUs:")){
qDebug() << "Found duration";
QString crop = entries[i].right(entries[i].length()- QString("durationUs: int64(").length()).chopped(1);
qDebug() << "Parsing " << entries[i] << " to " << crop << " result " << crop.toLong();
mTotalTimeBuffer = crop.toLong() /1000;
break;
}
}
const char *mime;
if (!AMediaFormat_getString(mFormat, AMEDIAFORMAT_KEY_MIME, &mime)) {
qWarning() << "Mime type cannot be determined!";
} else
if (!strncmp(mime, "video/", 6)) {
mCodec = AMediaCodec_createDecoderByType(mime);
media_status_t err =AMediaCodec_configure(mCodec, mFormat, nullptr /* surface */, nullptr /* crypto */, 0);
if(err != AMEDIA_OK){
qWarning() << "Error occurred: " << err;
}
err =AMediaCodec_start(mCodec);
if(err != AMEDIA_OK){
qWarning() << "Error occurred: " << err;
}
sawInputEOS = false;
sawOutputEOS = false;
} else {
qWarning() << "expected audio or video mime type, got "<< mime;
}
qDebug() << "Decoder ready!";
}
/**
* Releases decoder resources. May be called after partial / failed initialization.
*/
void NativeCodecReader::releaseDecoder() {
qDebug() << "releasing encoder objects";
if (mCodec != nullptr) {
AMediaCodec_stop(mCodec);
AMediaCodec_delete(mCodec);
mCodec = nullptr;
}
/*
if (mInputSurface != null) {
mInputSurface.release();
mInputSurface = null;
}
*/
if (mExtractor != nullptr) {
AMediaExtractor_delete(mExtractor);
mExtractor = nullptr;
}
}
int64 NativeCodecReader::nFrames(){
return totalTime() * dst_fps / 1000;
}
int64 NativeCodecReader::currentFrame(){
return currentTime() * dst_fps / 1000;
}
int64 NativeCodecReader::currentTime(){
Q_ASSERT(mExtractor != nullptr);
int64 time = AMediaExtractor_getSampleTime(mExtractor);
return time / 1000;
}
int64 NativeCodecReader::totalTime(){
return mTotalTimeBuffer;
}
bool NativeCodecReader::seek(cv::Mat& mat, int64 frameNumber){
int64 pos = frame2Time(frameNumber);
AMediaExtractor_seekTo(mExtractor, pos, SeekMode::AMEDIAEXTRACTOR_SEEK_CLOSEST_SYNC);
//For decoders that do not support adaptive playback (including when not decoding onto a Surface)
// In order to start decoding data that is not adjacent to previously submitted data (i.e. after a seek) you MUST flush the decoder.
AMediaCodec_flush(mCodec);
}
bool NativeCodecReader::read(cv::Mat& mat){
mat = performRead();
return !mat.empty();
}
int64 NativeCodecReader::frame2Time(int64 frameNo){
return (frameNo*1000000)/dst_fps;
}
NativeCodecWriter::NativeCodecWriter(QString filename, const int fps, const cv::Size& size)
:QObject(nullptr),
mFilename(filename),
mFPS(fps),
mSize(size),
isRunning(false)
{
}
NativeCodecWriter::~NativeCodecWriter(){
}
bool NativeCodecWriter::write(const cv::Mat& mat, const long long timestamp){
// Feed any pending encoder output into the muxer.
drainEncoder(false);
if(mat.empty()) return false;
// Generate a new frame of input.
/**
* Get the index of the next available input buffer. An app will typically use this with
* getInputBuffer() to get a pointer to the buffer, then copy the data to be encoded or decoded
* into the buffer before passing it to the codec.
*/
ssize_t inBufferIdx = AMediaCodec_dequeueInputBuffer(mEncoder, TIMEOUT_USEC);
/**
* Get an input buffer. The specified buffer index must have been previously obtained from
* dequeueInputBuffer, and not yet queued.
*/
size_t out_size;
uint8_t* inBuffer = AMediaCodec_getInputBuffer(mEncoder, inBufferIdx, &out_size);
// Make sure the image is colorful (later on we can try encoding grayscale somehow...)
cv::Mat colorImg;
cv::cvtColor(mat, colorImg, CV_BGR2YUV_I420); // COLOR_FormatYUV420SemiPlanar
// All video codecs support flexible YUV 4:2:0 buffers since Build.VERSION_CODES.LOLLIPOP_MR1.
/*
if(mat.channels() == 3){
cv::cvtColor(mat, colorImg, CV_BGR2YUV_I420);
}
else{
cv::cvtColor(mat, colorImg, CV_GRAY2BGR);
cv::cvtColor(colorImg, colorImg, CV_BGR2YUV_I420);
}
*/
// colorImg = mat;
// here we actually copy the data.
memcpy(inBuffer, colorImg.data, out_size);
/**
* Send the specified buffer to the codec for processing.
*/
//int64_t presentationTimeNs = timestamp;
int64_t presentationTimeNs = computePresentationTimeNsec();
media_status_t status = AMediaCodec_queueInputBuffer(mEncoder, inBufferIdx, 0, out_size, presentationTimeNs, mat.empty() ? AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM : 0);
if(status == AMEDIA_OK){
//qDebug() << "Successfully pushed frame to input buffer";
}
else{
qWarning() << "Something went wrong while pushing frame to input buffer";
return false;
}
// Submit it to the encoder. The eglSwapBuffers call will block if the input
// is full, which would be bad if it stayed full until we dequeued an output
// buffer (which we can't do, since we're stuck here). So long as we fully drain
// the encoder before supplying additional input, the system guarantees that we
// can supply another frame without blocking.
//qDebug() << "sending frame " << i << " to encoder";
//AMediaCodec_flush(mEncoder);
return true;
}
void NativeCodecWriter::end(){
qDebug() << "End of recording called!";
// Send the termination frame
ssize_t inBufferIdx = AMediaCodec_dequeueInputBuffer(mEncoder, TIMEOUT_USEC);
size_t out_size;
uint8_t* inBuffer = AMediaCodec_getInputBuffer(mEncoder, inBufferIdx, &out_size);
int64_t presentationTimeNs = computePresentationTimeNsec();
qDebug() << "Sending EOS";
media_status_t status = AMediaCodec_queueInputBuffer(mEncoder, inBufferIdx, 0, out_size, presentationTimeNs, AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM);
// send end-of-stream to encoder, and drain remaining output
drainEncoder(true);
releaseEncoder();
// To test the result, open the file with MediaExtractor, and get the format. Pass
// that into the MediaCodec decoder configuration, along with a SurfaceTexture surface,
// and examine the output with glReadPixels.
}
void NativeCodecWriter::prepareEncoder(){
AMediaFormat* format = AMediaFormat_new();
AMediaFormat_setInt32(format,AMEDIAFORMAT_KEY_WIDTH,mSize.width);
AMediaFormat_setInt32(format,AMEDIAFORMAT_KEY_HEIGHT,mSize.height);
AMediaFormat_setString(format,AMEDIAFORMAT_KEY_MIME,"video/avc"); // H.264 Advanced Video Coding
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_COLOR_FORMAT, 21); // #21 COLOR_FormatYUV420SemiPlanar (NV12)
AMediaFormat_setInt32(format,AMEDIAFORMAT_KEY_BIT_RATE,500000);
AMediaFormat_setFloat(format,AMEDIAFORMAT_KEY_FRAME_RATE,mFPS);
AMediaFormat_setInt32(format,AMEDIAFORMAT_KEY_I_FRAME_INTERVAL,5);
//AMediaFormat_setInt32(format,AMEDIAFORMAT_KEY_STRIDE,mSize.width);
//AMediaFormat_setInt32(format,AMEDIAFORMAT_KEY_M AX_WIDTH,mSize.width);
//AMediaFormat_setInt32(format,AMEDIAFORMAT_KEY_MAX_HEIGHT,mSize.height);
mEncoder = AMediaCodec_createEncoderByType("video/avc");
if(mEncoder == nullptr){
qWarning() << "Unable to create encoder";
}
media_status_t err = AMediaCodec_configure(mEncoder, format, NULL, NULL, AMEDIACODEC_CONFIGURE_FLAG_ENCODE);
if(err != AMEDIA_OK){
qWarning() << "Error occurred: " << err;
}
err = AMediaCodec_start(mEncoder);
if(err != AMEDIA_OK){
qWarning() << "Error occurred: " << err;
}
if(err != AMEDIA_OK){
qWarning() << "Error occurred: " << err;
}
QFile outFile(mFilename);
if(!outFile.open(QIODevice::WriteOnly)){
qWarning() << "Cannot open file: " << mFilename;
}
else{
qDebug() << "Writing video to file:" << mFilename;
}
// Create a MediaMuxer. We can't add the video track and start() the muxer here,
// because our MediaFormat doesn't have the Magic Goodies. These can only be
// obtained from the encoder after it has started processing data.
//
// We're not actually interested in multiplexing audio. We just want to convert
// the raw H.264 elementary stream we get from MediaCodec into a .mp4 file.
mMuxer = AMediaMuxer_new(outFile.handle(), AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4);
if(mMuxer == nullptr){
qWarning() << "Unable to create Muxer";
}
mTrackIndex = -1;
mMuxerStarted = false;
mFrameCounter = 0;
isRunning = true;
qDebug() << "Encoder ready!";
}
/**
* Extracts all pending data from the encoder.
* <p>
* If endOfStream is not set, this returns when there is no more data to drain. If it
* is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
* Calling this with endOfStream set should be done once, right before stopping the muxer.
*/
void NativeCodecWriter::drainEncoder(bool endOfStream) {
if (endOfStream) {
qDebug() << "Draining encoder to EOS";
// only API >= 26
// Send an empty frame with the end-of-stream flag set.
// AMediaCodec_signalEndOfInputStream();
// Instead, we construct that frame manually.
}
while (true) {
ssize_t encoderStatus = AMediaCodec_dequeueOutputBuffer(mEncoder, &mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == AMEDIACODEC_INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!endOfStream) {
return;
//break; // out of while
}
if(endOfStream){
qDebug() << "no output available, spinning to await EOS";
return;
}
} else if (encoderStatus == AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
} else if (encoderStatus == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
if (mMuxerStarted) {
qWarning() << "ERROR: format changed twice";
}
AMediaFormat* newFormat = AMediaCodec_getOutputFormat(mEncoder);
if(newFormat == nullptr){
qWarning() << "Unable to set new format.";
}
qDebug() << "encoder output format changed: " + QString::fromStdString(AMediaFormat_toString(newFormat));
// now that we have the Magic Goodies, start the muxer
mTrackIndex = AMediaMuxer_addTrack(mMuxer, newFormat);
media_status_t err = AMediaMuxer_start(mMuxer);
if(err != AMEDIA_OK){
qWarning() << "Error occurred: " << err;
}
mMuxerStarted = true;
} else if (encoderStatus < 0) {
qWarning() << "unexpected result from encoder.dequeueOutputBuffer: " + QString::number(encoderStatus);
// let's ignore it
} else {
size_t out_size;
uint8_t* encodedData = AMediaCodec_getOutputBuffer(mEncoder, encoderStatus, &out_size);
if(out_size <= 0){
qWarning() << "Encoded data of size 0.";
}
if (encodedData == nullptr) {
qWarning() << "encoderOutputBuffer " + QString::number(encoderStatus) + " was null";
}
if ((mBufferInfo.flags & AMEDIACODEC_BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
qDebug() << "ignoring BUFFER_FLAG_CODEC_CONFIG";
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0) {
if (!mMuxerStarted) {
qWarning() << "muxer hasn't started";
}
// adjust the ByteBuffer values to match BufferInfo (not needed?)
//encodedData.position(mBufferInfo.offset);
//encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
AMediaMuxer_writeSampleData(mMuxer, mTrackIndex, encodedData, &mBufferInfo);
//qDebug() << "sent " + QString::number(mBufferInfo.size) + " bytes to muxer";
}
else{
qWarning()<< "mBufferInfo empty " << mBufferInfo.size;
}
AMediaCodec_releaseOutputBuffer(mEncoder, encoderStatus, false);
if ((mBufferInfo.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
qWarning() << "reached end of stream unexpectedly";
} else {
qDebug() << "end of stream reached";
}
break; // out of while
}
}
}
}
/**
* Releases encoder resources. May be called after partial / failed initialization.
*/
void NativeCodecWriter::releaseEncoder() {
qDebug() << "releasing encoder objects";
if (mEncoder != nullptr) {
AMediaCodec_stop(mEncoder);
}
if (mMuxer != nullptr) {
AMediaMuxer_stop(mMuxer);
}
if (mEncoder != nullptr) {
AMediaCodec_delete(mEncoder);
mEncoder = nullptr;
}
if (mMuxer != nullptr) {
AMediaMuxer_delete(mMuxer);
mMuxer = nullptr;
}
isRunning = false;
emit recordingFinished();
}
/**
* Generates the presentation time for frame N, in nanoseconds.
*/
long long NativeCodecWriter::computePresentationTimeNsec() {
mFrameCounter++;
double timePerFrame = 1000000.0/mFPS;
return static_cast<long long>(mFrameCounter*timePerFrame);
}