37 #include <google/protobuf/util/time_util.h>
41 using google::protobuf::util::TimeUtil;
45 : processingController(&processingController){
57 processingController->
SetError(
false,
"");
59 start = _start; end = _end;
61 avr_dx=0; avr_dy=0; avr_da=0; max_dx=0; max_dy=0; max_da=0;
65 cv::Size readerDims(video.
Reader()->info.width, video.
Reader()->info.height);
68 if(!process_interval || end <= 1 || end-start == 0){
70 start = (int)(video.
Start() * video.
Reader()->info.fps.ToFloat()) + 1;
71 end = (int)(video.
End() * video.
Reader()->info.fps.ToFloat()) + 1;
75 for (frame_number = start; frame_number <= end; frame_number++)
82 std::shared_ptr<openshot::Frame> f = video.
GetFrame(frame_number);
85 cv::Mat cvimage = f->GetImageCV();
87 if(cvimage.size().width != readerDims.width || cvimage.size().height != readerDims.height)
88 cv::resize(cvimage, cvimage, cv::Size(readerDims.width, readerDims.height));
89 cv::cvtColor(cvimage, cvimage, cv::COLOR_RGB2GRAY);
91 if(!TrackFrameFeatures(cvimage, frame_number)){
96 processingController->
SetProgress(uint(100*(frame_number-start)/(end-start)));
100 std::vector <CamTrajectory> trajectory = ComputeFramesTrajectory();
110 dataToNormalize.second.x/=readerDims.width;
111 dataToNormalize.second.y/=readerDims.height;
115 dataToNormalize.second.dx/=readerDims.width;
116 dataToNormalize.second.dy/=readerDims.height;
121 bool CVStabilization::TrackFrameFeatures(cv::Mat frame,
size_t frameNum){
123 if(cv::countNonZero(frame) < 1){
128 if(prev_grey.empty()){
134 std::vector <cv::Point2f> prev_corner, cur_corner;
135 std::vector <cv::Point2f> prev_corner2, cur_corner2;
136 std::vector <uchar> status;
137 std::vector <float> err;
139 cv::goodFeaturesToTrack(prev_grey, prev_corner, 200, 0.01, 30);
141 cv::calcOpticalFlowPyrLK(prev_grey, frame, prev_corner, cur_corner, status, err);
143 for(
size_t i=0; i < status.size(); i++) {
145 prev_corner2.push_back(prev_corner[i]);
146 cur_corner2.push_back(cur_corner[i]);
150 if(prev_corner2.empty() || cur_corner2.empty()){
157 cv::Mat T = cv::estimateAffinePartial2D(prev_corner2, cur_corner2);
161 if(T.size().width == 0 || T.size().height == 0){
173 dx = T.at<
double>(0,2);
174 dy = T.at<
double>(1,2);
175 da = atan2(T.at<
double>(1,0), T.at<
double>(0,0));
179 if(dx > 200 || dy > 200 || da > 0.1){
187 if(fabs(dx) > max_dx)
189 if(fabs(dy) > max_dy)
191 if(fabs(da) > max_da)
197 frame.copyTo(prev_grey);
202 std::vector<CamTrajectory> CVStabilization::ComputeFramesTrajectory(){
209 vector <CamTrajectory> trajectory;
212 for(
size_t i=0; i < prev_to_cur_transform.size(); i++) {
213 x += prev_to_cur_transform[i].dx;
214 y += prev_to_cur_transform[i].dy;
215 a += prev_to_cur_transform[i].da;
223 std::map<size_t,CamTrajectory> CVStabilization::SmoothTrajectory(std::vector <CamTrajectory> &trajectory){
225 std::map <size_t,CamTrajectory> smoothed_trajectory;
227 for(
size_t i=0; i < trajectory.size(); i++) {
233 for(
int j=-smoothingWindow; j <= smoothingWindow; j++) {
234 if(i+j < trajectory.size()) {
235 sum_x += trajectory[i+j].x;
236 sum_y += trajectory[i+j].y;
237 sum_a += trajectory[i+j].a;
243 double avg_a = sum_a / count;
244 double avg_x = sum_x / count;
245 double avg_y = sum_y / count;
248 smoothed_trajectory[i + start] =
CamTrajectory(avg_x, avg_y, avg_a);
250 return smoothed_trajectory;
254 std::map<size_t,TransformParam> CVStabilization::GenNewCamPosition(std::map <size_t,CamTrajectory> &smoothed_trajectory){
255 std::map <size_t,TransformParam> new_prev_to_cur_transform;
262 for(
size_t i=0; i < prev_to_cur_transform.size(); i++) {
263 x += prev_to_cur_transform[i].dx;
264 y += prev_to_cur_transform[i].dy;
265 a += prev_to_cur_transform[i].da;
268 double diff_x = smoothed_trajectory[i + start].x - x;
269 double diff_y = smoothed_trajectory[i + start].y - y;
270 double diff_a = smoothed_trajectory[i + start].a - a;
272 double dx = prev_to_cur_transform[i].dx + diff_x;
273 double dy = prev_to_cur_transform[i].dy + diff_y;
274 double da = prev_to_cur_transform[i].da + diff_a;
277 new_prev_to_cur_transform[i + start] =
TransformParam(dx, dy, da);
279 return new_prev_to_cur_transform;
287 pb_stabilize::Stabilization stabilizationMessage;
289 std::map<size_t,CamTrajectory>::iterator trajData =
trajectoryData.begin();
294 AddFrameDataToProto(stabilizationMessage.add_frame(), trajData->second, transData->second, trajData->first);
297 *stabilizationMessage.mutable_last_updated() = TimeUtil::SecondsToTimestamp(time(NULL));
300 std::fstream output(protobuf_data_path, ios::out | ios::trunc | ios::binary);
301 if (!stabilizationMessage.SerializeToOstream(&output)) {
302 std::cerr <<
"Failed to write protobuf message." << std::endl;
307 google::protobuf::ShutdownProtobufLibrary();
316 pbFrameData->set_id(frame_number);
319 pbFrameData->set_a(trajData.
a);
320 pbFrameData->set_x(trajData.
x);
321 pbFrameData->set_y(trajData.
y);
324 pbFrameData->set_da(transData.
da);
325 pbFrameData->set_dx(transData.
dx);
326 pbFrameData->set_dy(transData.
dy);
363 catch (
const std::exception& e)
374 if (!root[
"protobuf_data_path"].isNull()){
375 protobuf_data_path = (root[
"protobuf_data_path"].asString());
377 if (!root[
"smoothing-window"].isNull()){
378 smoothingWindow = (root[
"smoothing-window"].asInt());
392 pb_stabilize::Stabilization stabilizationMessage;
394 std::fstream input(protobuf_data_path, ios::in | ios::binary);
395 if (!stabilizationMessage.ParseFromIstream(&input)) {
396 std::cerr <<
"Failed to parse protobuf message." << std::endl;
405 for (
size_t i = 0; i < stabilizationMessage.frame_size(); i++) {
406 const pb_stabilize::Frame& pbFrameData = stabilizationMessage.frame(i);
409 size_t id = pbFrameData.id();
412 float x = pbFrameData.x();
413 float y = pbFrameData.y();
414 float a = pbFrameData.a();
420 float dx = pbFrameData.dx();
421 float dy = pbFrameData.dy();
422 float da = pbFrameData.da();
429 google::protobuf::ShutdownProtobufLibrary();
Header file for CVStabilization class.
TransformParam GetTransformParamData(size_t frameId)
bool _LoadStabilizedData()
void SetJsonValue(const Json::Value root)
Load Json::Value into this object.
CVStabilization(std::string processInfoJson, ProcessingController &processingController)
Set default smoothing window value to compute stabilization.
void AddFrameDataToProto(pb_stabilize::Frame *pbFrameData, CamTrajectory &trajData, TransformParam &transData, size_t frame_number)
Add frame stabilization data into protobuf message.
std::map< size_t, TransformParam > transformationData
void stabilizeClip(openshot::Clip &video, size_t _start=0, size_t _end=0, bool process_interval=false)
Process clip and store necessary stabilization data.
std::map< size_t, CamTrajectory > trajectoryData
CamTrajectory GetCamTrajectoryTrackedData(size_t frameId)
bool SaveStabilizedData()
void SetJson(const std::string value)
Load JSON string into this object.
void SetError(bool err, std::string message)
float Start() const
Get start position (in seconds) of clip (trim start of video)
This class represents a clip (used to arrange readers on the timeline)
void Open() override
Open the internal reader.
float End() const
Get end position (in seconds) of clip (trim end of video), which can be affected by the time curve.
std::shared_ptr< openshot::Frame > GetFrame(int64_t frame_number) override
Get an openshot::Frame object for a specific frame number of this clip. The image size and number of ...
void Reader(openshot::ReaderBase *new_reader)
Set the current reader.
Exception for invalid JSON.
This namespace is the default namespace for all code in the openshot library.
const Json::Value stringToJson(const std::string value)