21 #include "stabilizedata.pb.h"
22 #include <google/protobuf/util/time_util.h>
26 using google::protobuf::util::TimeUtil;
30 : processingController(&processingController){
42 processingController->
SetError(
false,
"");
44 start = _start; end = _end;
46 avr_dx=0; avr_dy=0; avr_da=0; max_dx=0; max_dy=0; max_da=0;
50 cv::Size readerDims(video.
Reader()->info.width, video.
Reader()->info.height);
53 if(!process_interval || end <= 1 || end-start == 0){
55 start = (int)(video.
Start() * video.
Reader()->info.fps.ToFloat()) + 1;
56 end = (int)(video.
End() * video.
Reader()->info.fps.ToFloat()) + 1;
60 for (frame_number = start; frame_number <= end; frame_number++)
67 std::shared_ptr<openshot::Frame> f = video.
GetFrame(frame_number);
70 cv::Mat cvimage = f->GetImageCV();
72 if(cvimage.size().width != readerDims.width || cvimage.size().height != readerDims.height)
73 cv::resize(cvimage, cvimage, cv::Size(readerDims.width, readerDims.height));
74 cv::cvtColor(cvimage, cvimage, cv::COLOR_RGB2GRAY);
76 if(!TrackFrameFeatures(cvimage, frame_number)){
81 processingController->
SetProgress(uint(100*(frame_number-start)/(end-start)));
85 std::vector <CamTrajectory> trajectory = ComputeFramesTrajectory();
95 dataToNormalize.second.x/=readerDims.width;
96 dataToNormalize.second.y/=readerDims.height;
100 dataToNormalize.second.dx/=readerDims.width;
101 dataToNormalize.second.dy/=readerDims.height;
106 bool CVStabilization::TrackFrameFeatures(cv::Mat frame,
size_t frameNum){
108 if(cv::countNonZero(frame) < 1){
113 if(prev_grey.empty()){
119 std::vector <cv::Point2f> prev_corner, cur_corner;
120 std::vector <cv::Point2f> prev_corner2, cur_corner2;
121 std::vector <uchar> status;
122 std::vector <float> err;
124 cv::goodFeaturesToTrack(prev_grey, prev_corner, 200, 0.01, 30);
126 cv::calcOpticalFlowPyrLK(prev_grey, frame, prev_corner, cur_corner, status, err);
128 for(
size_t i=0; i < status.size(); i++) {
130 prev_corner2.push_back(prev_corner[i]);
131 cur_corner2.push_back(cur_corner[i]);
135 if(prev_corner2.empty() || cur_corner2.empty()){
142 cv::Mat T = cv::estimateAffinePartial2D(prev_corner2, cur_corner2);
146 if(T.size().width == 0 || T.size().height == 0){
158 dx = T.at<
double>(0,2);
159 dy = T.at<
double>(1,2);
160 da = atan2(T.at<
double>(1,0), T.at<
double>(0,0));
164 if(dx > 200 || dy > 200 || da > 0.1){
172 if(fabs(dx) > max_dx)
174 if(fabs(dy) > max_dy)
176 if(fabs(da) > max_da)
182 frame.copyTo(prev_grey);
187 std::vector<CamTrajectory> CVStabilization::ComputeFramesTrajectory(){
194 vector <CamTrajectory> trajectory;
197 for(
size_t i=0; i < prev_to_cur_transform.size(); i++) {
198 x += prev_to_cur_transform[i].dx;
199 y += prev_to_cur_transform[i].dy;
200 a += prev_to_cur_transform[i].da;
208 std::map<size_t,CamTrajectory> CVStabilization::SmoothTrajectory(std::vector <CamTrajectory> &trajectory){
210 std::map <size_t,CamTrajectory> smoothed_trajectory;
212 for(
size_t i=0; i < trajectory.size(); i++) {
218 for(
int j=-smoothingWindow; j <= smoothingWindow; j++) {
219 if(i+j < trajectory.size()) {
220 sum_x += trajectory[i+j].x;
221 sum_y += trajectory[i+j].y;
222 sum_a += trajectory[i+j].a;
228 double avg_a = sum_a / count;
229 double avg_x = sum_x / count;
230 double avg_y = sum_y / count;
233 smoothed_trajectory[i + start] =
CamTrajectory(avg_x, avg_y, avg_a);
235 return smoothed_trajectory;
239 std::map<size_t,TransformParam> CVStabilization::GenNewCamPosition(std::map <size_t,CamTrajectory> &smoothed_trajectory){
240 std::map <size_t,TransformParam> new_prev_to_cur_transform;
247 for(
size_t i=0; i < prev_to_cur_transform.size(); i++) {
248 x += prev_to_cur_transform[i].dx;
249 y += prev_to_cur_transform[i].dy;
250 a += prev_to_cur_transform[i].da;
253 double diff_x = smoothed_trajectory[i + start].x - x;
254 double diff_y = smoothed_trajectory[i + start].y - y;
255 double diff_a = smoothed_trajectory[i + start].a - a;
257 double dx = prev_to_cur_transform[i].dx + diff_x;
258 double dy = prev_to_cur_transform[i].dy + diff_y;
259 double da = prev_to_cur_transform[i].da + diff_a;
262 new_prev_to_cur_transform[i + start] =
TransformParam(dx, dy, da);
264 return new_prev_to_cur_transform;
272 pb_stabilize::Stabilization stabilizationMessage;
274 std::map<size_t,CamTrajectory>::iterator trajData =
trajectoryData.begin();
279 AddFrameDataToProto(stabilizationMessage.add_frame(), trajData->second, transData->second, trajData->first);
282 *stabilizationMessage.mutable_last_updated() = TimeUtil::SecondsToTimestamp(time(NULL));
285 std::fstream output(protobuf_data_path, ios::out | ios::trunc | ios::binary);
286 if (!stabilizationMessage.SerializeToOstream(&output)) {
287 std::cerr <<
"Failed to write protobuf message." << std::endl;
292 google::protobuf::ShutdownProtobufLibrary();
301 pbFrameData->set_id(frame_number);
304 pbFrameData->set_a(trajData.
a);
305 pbFrameData->set_x(trajData.
x);
306 pbFrameData->set_y(trajData.
y);
309 pbFrameData->set_da(transData.
da);
310 pbFrameData->set_dx(transData.
dx);
311 pbFrameData->set_dy(transData.
dy);
348 catch (
const std::exception& e)
359 if (!root[
"protobuf_data_path"].isNull()){
360 protobuf_data_path = (root[
"protobuf_data_path"].asString());
362 if (!root[
"smoothing-window"].isNull()){
363 smoothingWindow = (root[
"smoothing-window"].asInt());
377 pb_stabilize::Stabilization stabilizationMessage;
379 std::fstream input(protobuf_data_path, ios::in | ios::binary);
380 if (!stabilizationMessage.ParseFromIstream(&input)) {
381 std::cerr <<
"Failed to parse protobuf message." << std::endl;
390 for (
size_t i = 0; i < stabilizationMessage.frame_size(); i++) {
391 const pb_stabilize::Frame& pbFrameData = stabilizationMessage.frame(i);
394 size_t id = pbFrameData.id();
397 float x = pbFrameData.x();
398 float y = pbFrameData.y();
399 float a = pbFrameData.a();
405 float dx = pbFrameData.dx();
406 float dy = pbFrameData.dy();
407 float da = pbFrameData.da();
414 google::protobuf::ShutdownProtobufLibrary();