-
Notifications
You must be signed in to change notification settings - Fork 2
/
main.cpp
183 lines (147 loc) · 4.87 KB
/
main.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
/*
* Realsense D400 infrared stream to H.264 with VAAPI encoding
*
* Copyright 2019 (C) Bartosz Meglicki <meglickib@gmail.com>
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
*/
/* This program is example how to use:
* - VAAPI to hardware encode
* - Realsense D400 greyscale infrared stream
* - to H.264 raw video
* - stored to disk as example
*
* See README.md for the details
*
*/
// Hardware Video Encoder
#include "hve.h"
// Realsense API
#include <librealsense2/rs.hpp>
#include <fstream>
#include <iostream>
using namespace std;
//user supplied input
struct input_args
{
int width;
int height;
int framerate;
int seconds;
};
bool main_loop(const input_args& input, rs2::pipeline& realsense, hve *avctx, ofstream& out_file);
void dump_frame_info(rs2::video_frame &frame);
void init_realsense(rs2::pipeline& pipe, const input_args& input);
int process_user_input(int argc, char* argv[], input_args* input, hve_config *config);
int main(int argc, char* argv[])
{
struct hve *hardware_encoder;
struct hve_config hardware_config = {0};
struct input_args user_input = {0};
ofstream out_file("output.h264", ofstream::binary);
rs2::pipeline realsense;
if(process_user_input(argc, argv, &user_input, &hardware_config) < 0)
return 1;
if(!out_file)
return 2;
init_realsense(realsense, user_input);
if( (hardware_encoder = hve_init(&hardware_config)) == NULL)
return 3;
bool status=main_loop(user_input, realsense, hardware_encoder, out_file);
hve_close(hardware_encoder);
out_file.close();
if(status)
{
cout << "Finished successfully." << endl;
cout << "Test with: " << endl << endl << "ffplay output.h264" << endl;
}
return 0;
}
//true on success, false on failure
bool main_loop(const input_args& input, rs2::pipeline& realsense, hve *he, ofstream& out_file)
{
const int frames = input.seconds * input.framerate;
int f, failed;
hve_frame frame = {0};
uint8_t *color_data = NULL; //data of dummy color plane for NV12
AVPacket *packet;
for(f = 0; f < frames; ++f)
{
rs2::frameset frameset = realsense.wait_for_frames();
rs2::video_frame ir_frame = frameset.get_infrared_frame(1);
if(!color_data)
{ //prepare dummy color plane for NV12 format, half the size of Y
//we can't alloc it in advance, this is the first time we know realsense stride
int size = ir_frame.get_stride_in_bytes()*ir_frame.get_height()/2;
color_data = new uint8_t[size];
memset(color_data, 128, size);
}
//supply realsense frame data as ffmpeg frame data
frame.linesize[0] = frame.linesize[1] = ir_frame.get_stride_in_bytes();
frame.data[0] = (uint8_t*) ir_frame.get_data();
frame.data[1] = color_data;
dump_frame_info(ir_frame);
if(hve_send_frame(he, &frame) != HVE_OK)
{
cerr << "failed to send frame to hardware" << endl;
break;
}
while( (packet=hve_receive_packet(he, &failed)) )
{ //do something with the data - here just dump to raw H.264 file
cout << " encoded in: " << packet->size;
out_file.write((const char*)packet->data, packet->size);
}
if(failed != HVE_OK)
{
cerr << "failed to encode frame" << endl;
break;
}
}
//flush the encoder by sending NULL frame
hve_send_frame(he, NULL);
//drain the encoder from buffered frames
while( (packet=hve_receive_packet(he, &failed)) )
{
cout << endl << "encoded in: " << packet->size;
out_file.write((const char*)packet->data, packet->size);
}
cout << endl;
delete [] color_data;
//all the requested frames processed?
return f==frames;
}
void dump_frame_info(rs2::video_frame &f)
{
cout << endl << f.get_frame_number ()
<< ": width " << f.get_width() << " height " << f.get_height()
<< " stride=" << f.get_stride_in_bytes() << " bytes "
<< f.get_stride_in_bytes() * f.get_height();
}
void init_realsense(rs2::pipeline& pipe, const input_args& input)
{
rs2::config cfg;
// depth stream seems to be required for infrared to work
cfg.enable_stream(RS2_STREAM_DEPTH, input.width, input.height, RS2_FORMAT_Z16, input.framerate);
cfg.enable_stream(RS2_STREAM_INFRARED, 1, input.width, input.height, RS2_FORMAT_Y8, input.framerate);
rs2::pipeline_profile profile = pipe.start(cfg);
}
int process_user_input(int argc, char* argv[], input_args* input, hve_config *config)
{
if(argc < 5)
{
cerr << "Usage: " << argv[0] << " <width> <height> <framerate> <seconds> [device]" << endl;
cerr << endl << "examples: " << endl;
cerr << argv[0] << " 640 360 30 5" << endl;
cerr << argv[0] << " 640 360 30 5 /dev/dri/renderD128" << endl;
return -1;
}
config->width = input->width = atoi(argv[1]);
config->height = input->height = atoi(argv[2]);
config->framerate = input->framerate = atoi(argv[3]);
input->seconds = atoi(argv[4]);
config->device = argv[5]; //NULL as last argv argument, or device path
return 0;
}