OpenShot Library | libopenshot  0.5.0
Timeline.cpp
Go to the documentation of this file.
1 
9 // Copyright (c) 2008-2019 OpenShot Studios, LLC
10 //
11 // SPDX-License-Identifier: LGPL-3.0-or-later
12 
13 #include "Timeline.h"
14 
15 #include "CacheBase.h"
16 #include "CacheDisk.h"
17 #include "CacheMemory.h"
18 #include "CrashHandler.h"
19 #include "FrameMapper.h"
20 #include "Exceptions.h"
21 
22 #include <QDir>
23 #include <QFileInfo>
24 #include <unordered_map>
25 #include <cmath>
26 #include <cstdint>
27 
28 using namespace openshot;
29 
30 // Default Constructor for the timeline (which sets the canvas width and height)
31 Timeline::Timeline(int width, int height, Fraction fps, int sample_rate, int channels, ChannelLayout channel_layout) :
32  is_open(false), auto_map_clips(true), managed_cache(true), path(""), max_time(0.0)
33 {
34  // Create CrashHandler and Attach (incase of errors)
36 
37  // Init viewport size (curve based, because it can be animated)
38  viewport_scale = Keyframe(100.0);
39  viewport_x = Keyframe(0.0);
40  viewport_y = Keyframe(0.0);
41 
42  // Init background color
43  color.red = Keyframe(0.0);
44  color.green = Keyframe(0.0);
45  color.blue = Keyframe(0.0);
46 
47  // Init FileInfo struct (clear all values)
48  info.width = width;
49  info.height = height;
52  info.fps = fps;
53  info.sample_rate = sample_rate;
54  info.channels = channels;
55  info.channel_layout = channel_layout;
57  info.duration = 60 * 30; // 30 minute default duration
58  info.has_audio = true;
59  info.has_video = true;
61  info.display_ratio = openshot::Fraction(width, height);
64  info.acodec = "openshot::timeline";
65  info.vcodec = "openshot::timeline";
66 
67  // Init max image size
69 
70  // Init cache
71  final_cache = new CacheMemory();
73 }
74 
75 // Delegating constructor that copies parameters from a provided ReaderInfo
77  info.width, info.height, info.fps, info.sample_rate,
78  info.channels, info.channel_layout) {}
79 
80 // Constructor for the timeline (which loads a JSON structure from a file path, and initializes a timeline)
81 Timeline::Timeline(const std::string& projectPath, bool convert_absolute_paths) :
82  is_open(false), auto_map_clips(true), managed_cache(true), path(projectPath), max_time(0.0) {
83 
84  // Create CrashHandler and Attach (incase of errors)
86 
87  // Init final cache as NULL (will be created after loading json)
88  final_cache = NULL;
89 
90  // Init viewport size (curve based, because it can be animated)
91  viewport_scale = Keyframe(100.0);
92  viewport_x = Keyframe(0.0);
93  viewport_y = Keyframe(0.0);
94 
95  // Init background color
96  color.red = Keyframe(0.0);
97  color.green = Keyframe(0.0);
98  color.blue = Keyframe(0.0);
99 
100  // Check if path exists
101  QFileInfo filePath(QString::fromStdString(path));
102  if (!filePath.exists()) {
103  throw InvalidFile("File could not be opened.", path);
104  }
105 
106  // Check OpenShot Install Path exists
108  QDir openshotPath(QString::fromStdString(s->PATH_OPENSHOT_INSTALL));
109  if (!openshotPath.exists()) {
110  throw InvalidFile("PATH_OPENSHOT_INSTALL could not be found.", s->PATH_OPENSHOT_INSTALL);
111  }
112  QDir openshotTransPath(openshotPath.filePath("transitions"));
113  if (!openshotTransPath.exists()) {
114  throw InvalidFile("PATH_OPENSHOT_INSTALL/transitions could not be found.", openshotTransPath.path().toStdString());
115  }
116 
117  // Determine asset path
118  QString asset_name = filePath.baseName().left(30) + "_assets";
119  QDir asset_folder(filePath.dir().filePath(asset_name));
120  if (!asset_folder.exists()) {
121  // Create directory if needed
122  asset_folder.mkpath(".");
123  }
124 
125  // Load UTF-8 project file into QString
126  QFile projectFile(QString::fromStdString(path));
127  projectFile.open(QFile::ReadOnly);
128  QString projectContents = QString::fromUtf8(projectFile.readAll());
129 
130  // Convert all relative paths into absolute paths (if requested)
131  if (convert_absolute_paths) {
132 
133  // Find all "image" or "path" references in JSON (using regex). Must loop through match results
134  // due to our path matching needs, which are not possible with the QString::replace() function.
135  QRegularExpression allPathsRegex(QStringLiteral("\"(image|path)\":.*?\"(.*?)\""));
136  std::vector<QRegularExpressionMatch> matchedPositions;
137  QRegularExpressionMatchIterator i = allPathsRegex.globalMatch(projectContents);
138  while (i.hasNext()) {
139  QRegularExpressionMatch match = i.next();
140  if (match.hasMatch()) {
141  // Push all match objects into a vector (so we can reverse them later)
142  matchedPositions.push_back(match);
143  }
144  }
145 
146  // Reverse the matches (bottom of file to top, so our replacements don't break our match positions)
147  std::vector<QRegularExpressionMatch>::reverse_iterator itr;
148  for (itr = matchedPositions.rbegin(); itr != matchedPositions.rend(); itr++) {
149  QRegularExpressionMatch match = *itr;
150  QString relativeKey = match.captured(1); // image or path
151  QString relativePath = match.captured(2); // relative file path
152  QString absolutePath = "";
153 
154  // Find absolute path of all path, image (including special replacements of @assets and @transitions)
155  if (relativePath.startsWith("@assets")) {
156  absolutePath = QFileInfo(asset_folder.absoluteFilePath(relativePath.replace("@assets", "."))).canonicalFilePath();
157  } else if (relativePath.startsWith("@transitions")) {
158  absolutePath = QFileInfo(openshotTransPath.absoluteFilePath(relativePath.replace("@transitions", "."))).canonicalFilePath();
159  } else {
160  absolutePath = QFileInfo(filePath.absoluteDir().absoluteFilePath(relativePath)).canonicalFilePath();
161  }
162 
163  // Replace path in JSON content, if an absolute path was successfully found
164  if (!absolutePath.isEmpty()) {
165  projectContents.replace(match.capturedStart(0), match.capturedLength(0), "\"" + relativeKey + "\": \"" + absolutePath + "\"");
166  }
167  }
168  // Clear matches
169  matchedPositions.clear();
170  }
171 
172  // Set JSON of project
173  SetJson(projectContents.toStdString());
174 
175  // Calculate valid duration and set has_audio and has_video
176  // based on content inside this Timeline's clips.
177  float calculated_duration = 0.0;
178  for (auto clip : clips)
179  {
180  float clip_last_frame = clip->Position() + clip->Duration();
181  if (clip_last_frame > calculated_duration)
182  calculated_duration = clip_last_frame;
183  if (clip->Reader() && clip->Reader()->info.has_audio)
184  info.has_audio = true;
185  if (clip->Reader() && clip->Reader()->info.has_video)
186  info.has_video = true;
187 
188  }
189  info.video_length = calculated_duration * info.fps.ToFloat();
190  info.duration = calculated_duration;
191 
192  // Init FileInfo settings
193  info.acodec = "openshot::timeline";
194  info.vcodec = "openshot::timeline";
196  info.has_video = true;
197  info.has_audio = true;
198 
199  // Init max image size
201 
202  // Init cache
203  final_cache = new CacheMemory();
205 }
206 
208  if (is_open) {
209  // Auto Close if not already
210  Close();
211  }
212 
213  // Remove all clips, effects, and frame mappers
214  Clear();
215 
216  // Destroy previous cache (if managed by timeline)
217  if (managed_cache && final_cache) {
218  delete final_cache;
219  final_cache = NULL;
220  }
221 }
222 
223 // Add to the tracked_objects map a pointer to a tracked object (TrackedObjectBBox)
224 void Timeline::AddTrackedObject(std::shared_ptr<openshot::TrackedObjectBase> trackedObject){
225 
226  // Search for the tracked object on the map
227  auto iterator = tracked_objects.find(trackedObject->Id());
228 
229  if (iterator != tracked_objects.end()){
230  // Tracked object's id already present on the map, overwrite it
231  iterator->second = trackedObject;
232  }
233  else{
234  // Tracked object's id not present -> insert it on the map
235  tracked_objects[trackedObject->Id()] = trackedObject;
236  }
237 
238  return;
239 }
240 
241 // Return tracked object pointer by it's id
242 std::shared_ptr<openshot::TrackedObjectBase> Timeline::GetTrackedObject(std::string id) const{
243 
244  // Search for the tracked object on the map
245  auto iterator = tracked_objects.find(id);
246 
247  if (iterator != tracked_objects.end()){
248  // Id found, return the pointer to the tracked object
249  std::shared_ptr<openshot::TrackedObjectBase> trackedObject = iterator->second;
250  return trackedObject;
251  }
252  else {
253  // Id not found, return a null pointer
254  return nullptr;
255  }
256 }
257 
258 // Return the ID's of the tracked objects as a list of strings
259 std::list<std::string> Timeline::GetTrackedObjectsIds() const{
260 
261  // Create a list of strings
262  std::list<std::string> trackedObjects_ids;
263 
264  // Iterate through the tracked_objects map
265  for (auto const& it: tracked_objects){
266  // Add the IDs to the list
267  trackedObjects_ids.push_back(it.first);
268  }
269 
270  return trackedObjects_ids;
271 }
272 
273 #ifdef USE_OPENCV
274 // Return the trackedObject's properties as a JSON string
275 std::string Timeline::GetTrackedObjectValues(std::string id, int64_t frame_number) const {
276 
277  // Initialize the JSON object
278  Json::Value trackedObjectJson;
279 
280  // Search for the tracked object on the map
281  auto iterator = tracked_objects.find(id);
282 
283  if (iterator != tracked_objects.end())
284  {
285  // Id found, Get the object pointer and cast it as a TrackedObjectBBox
286  std::shared_ptr<TrackedObjectBBox> trackedObject = std::static_pointer_cast<TrackedObjectBBox>(iterator->second);
287 
288  // Get the trackedObject values for it's first frame
289  if (trackedObject->ExactlyContains(frame_number)){
290  BBox box = trackedObject->GetBox(frame_number);
291  float x1 = box.cx - (box.width/2);
292  float y1 = box.cy - (box.height/2);
293  float x2 = box.cx + (box.width/2);
294  float y2 = box.cy + (box.height/2);
295  float rotation = box.angle;
296 
297  trackedObjectJson["x1"] = x1;
298  trackedObjectJson["y1"] = y1;
299  trackedObjectJson["x2"] = x2;
300  trackedObjectJson["y2"] = y2;
301  trackedObjectJson["rotation"] = rotation;
302 
303  } else {
304  BBox box = trackedObject->BoxVec.begin()->second;
305  float x1 = box.cx - (box.width/2);
306  float y1 = box.cy - (box.height/2);
307  float x2 = box.cx + (box.width/2);
308  float y2 = box.cy + (box.height/2);
309  float rotation = box.angle;
310 
311  trackedObjectJson["x1"] = x1;
312  trackedObjectJson["y1"] = y1;
313  trackedObjectJson["x2"] = x2;
314  trackedObjectJson["y2"] = y2;
315  trackedObjectJson["rotation"] = rotation;
316  }
317 
318  }
319  else {
320  // Id not found, return all 0 values
321  trackedObjectJson["x1"] = 0;
322  trackedObjectJson["y1"] = 0;
323  trackedObjectJson["x2"] = 0;
324  trackedObjectJson["y2"] = 0;
325  trackedObjectJson["rotation"] = 0;
326  }
327 
328  return trackedObjectJson.toStyledString();
329 }
330 #endif
331 
332 // Add an openshot::Clip to the timeline
334 {
335  // Get lock (prevent getting frames while this happens)
336  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
337 
338  // Assign timeline to clip
339  clip->ParentTimeline(this);
340 
341  // Clear cache of clip and nested reader (if any)
342  if (clip->Reader() && clip->Reader()->GetCache())
343  clip->Reader()->GetCache()->Clear();
344 
345  // All clips should be converted to the frame rate of this timeline
346  if (auto_map_clips) {
347  // Apply framemapper (or update existing framemapper)
348  apply_mapper_to_clip(clip);
349  }
350 
351  // Add clip to list
352  clips.push_back(clip);
353 
354  // Sort clips
355  sort_clips();
356 }
357 
358 // Add an effect to the timeline
360 {
361  // Get lock (prevent getting frames while this happens)
362  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
363 
364  // Assign timeline to effect
365  effect->ParentTimeline(this);
366 
367  // Add effect to list
368  effects.push_back(effect);
369 
370  // Sort effects
371  sort_effects();
372 }
373 
374 // Remove an effect from the timeline
376 {
377  // Get lock (prevent getting frames while this happens)
378  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
379 
380  effects.remove(effect);
381 
382  // Delete effect object (if timeline allocated it)
383  if (allocated_effects.count(effect)) {
384  allocated_effects.erase(effect); // erase before nulling the pointer
385  delete effect;
386  effect = NULL;
387  }
388 
389  // Sort effects
390  sort_effects();
391 }
392 
393 // Remove an openshot::Clip to the timeline
395 {
396  // Get lock (prevent getting frames while this happens)
397  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
398 
399  clips.remove(clip);
400 
401  // Delete clip object (if timeline allocated it)
402  if (allocated_clips.count(clip)) {
403  allocated_clips.erase(clip); // erase before nulling the pointer
404  delete clip;
405  clip = NULL;
406  }
407 
408  // Sort clips
409  sort_clips();
410 }
411 
412 // Look up a clip
413 openshot::Clip* Timeline::GetClip(const std::string& id)
414 {
415  // Find the matching clip (if any)
416  for (const auto& clip : clips) {
417  if (clip->Id() == id) {
418  return clip;
419  }
420  }
421  return nullptr;
422 }
423 
424 // Look up a timeline effect
426 {
427  // Find the matching effect (if any)
428  for (const auto& effect : effects) {
429  if (effect->Id() == id) {
430  return effect;
431  }
432  }
433  return nullptr;
434 }
435 
437 {
438  // Search all clips for matching effect ID
439  for (const auto& clip : clips) {
440  const auto e = clip->GetEffect(id);
441  if (e != nullptr) {
442  return e;
443  }
444  }
445  return nullptr;
446 }
447 
448 // Return the list of effects on all clips
449 std::list<openshot::EffectBase*> Timeline::ClipEffects() const {
450 
451  // Initialize the list
452  std::list<EffectBase*> timelineEffectsList;
453 
454  // Loop through all clips
455  for (const auto& clip : clips) {
456 
457  // Get the clip's list of effects
458  std::list<EffectBase*> clipEffectsList = clip->Effects();
459 
460  // Append the clip's effects to the list
461  timelineEffectsList.insert(timelineEffectsList.end(), clipEffectsList.begin(), clipEffectsList.end());
462  }
463 
464  return timelineEffectsList;
465 }
466 
467 // Compute the end time of the latest timeline element
469  // Return cached max_time variable (threadsafe)
470  return max_time;
471 }
472 
473 // Compute the highest frame# based on the latest time and FPS
475  const double fps = info.fps.ToDouble();
476  const double t = GetMaxTime();
477  // Inclusive start, exclusive end -> ceil at the end boundary
478  return static_cast<int64_t>(std::ceil(t * fps));
479 }
480 
481 // Compute the first frame# based on the first clip position
483  const double fps = info.fps.ToDouble();
484  const double t = GetMinTime();
485  // Inclusive start -> floor at the start boundary, then 1-index
486  return static_cast<int64_t>(std::floor(t * fps)) + 1;
487 }
488 
489 // Compute the start time of the first timeline clip
491  // Return cached min_time variable (threadsafe)
492  return min_time;
493 }
494 
495 // Apply a FrameMapper to a clip which matches the settings of this timeline
496 void Timeline::apply_mapper_to_clip(Clip* clip)
497 {
498  // Determine type of reader
499  ReaderBase* clip_reader = NULL;
500  if (clip->Reader()->Name() == "FrameMapper")
501  {
502  // Get the existing reader
503  clip_reader = (ReaderBase*) clip->Reader();
504 
505  // Update the mapping
506  FrameMapper* clip_mapped_reader = (FrameMapper*) clip_reader;
508 
509  } else {
510 
511  // Create a new FrameMapper to wrap the current reader
513  allocated_frame_mappers.insert(mapper);
514  clip_reader = (ReaderBase*) mapper;
515  }
516 
517  // Update clip reader
518  clip->Reader(clip_reader);
519 }
520 
521 // Apply the timeline's framerate and samplerate to all clips
523 {
524  // Clear all cached frames
525  ClearAllCache();
526 
527  // Loop through all clips
528  for (auto clip : clips)
529  {
530  // Apply framemapper (or update existing framemapper)
531  apply_mapper_to_clip(clip);
532  }
533 }
534 
535 // Calculate time of a frame number, based on a framerate
536 double Timeline::calculate_time(int64_t number, Fraction rate)
537 {
538  // Get float version of fps fraction
539  double raw_fps = rate.ToFloat();
540 
541  // Return the time (in seconds) of this frame
542  return double(number - 1) / raw_fps;
543 }
544 
545 // Apply effects to the source frame (if any)
546 std::shared_ptr<Frame> Timeline::apply_effects(std::shared_ptr<Frame> frame, int64_t timeline_frame_number, int layer, TimelineInfoStruct* options)
547 {
548  // Debug output
550  "Timeline::apply_effects",
551  "frame->number", frame->number,
552  "timeline_frame_number", timeline_frame_number,
553  "layer", layer);
554 
555  // Find Effects at this position and layer
556  for (auto effect : effects)
557  {
558  // Does clip intersect the current requested time
559  const double fpsD = info.fps.ToDouble();
560  int64_t effect_start_position = static_cast<int64_t>(std::llround(effect->Position() * fpsD)) + 1;
561  int64_t effect_end_position = static_cast<int64_t>(std::llround((effect->Position() + effect->Duration()) * fpsD));
562 
563  bool does_effect_intersect = (effect_start_position <= timeline_frame_number && effect_end_position >= timeline_frame_number && effect->Layer() == layer);
564 
565  // Clip is visible
566  if (does_effect_intersect)
567  {
568  // Determine the frame needed for this clip (based on the position on the timeline)
569  int64_t effect_start_frame = static_cast<int64_t>(std::llround(effect->Start() * fpsD)) + 1;
570  int64_t effect_frame_number = timeline_frame_number - effect_start_position + effect_start_frame;
571 
572  if (!options->is_top_clip)
573  continue; // skip effect, if overlapped/covered by another clip on same layer
574 
575  if (options->is_before_clip_keyframes != effect->info.apply_before_clip)
576  continue; // skip effect, if this filter does not match
577 
578  // Debug output
580  "Timeline::apply_effects (Process Effect)",
581  "effect_frame_number", effect_frame_number,
582  "does_effect_intersect", does_effect_intersect);
583 
584  // Apply the effect to this frame
585  frame = effect->GetFrame(frame, effect_frame_number);
586  }
587 
588  } // end effect loop
589 
590  // Return modified frame
591  return frame;
592 }
593 
594 // Get or generate a blank frame
595 std::shared_ptr<Frame> Timeline::GetOrCreateFrame(std::shared_ptr<Frame> background_frame, Clip* clip, int64_t number, openshot::TimelineInfoStruct* options)
596 {
597  std::shared_ptr<Frame> new_frame;
598 
599  // Init some basic properties about this frame
600  int samples_in_frame = Frame::GetSamplesPerFrame(number, info.fps, info.sample_rate, info.channels);
601 
602  try {
603  // Debug output
605  "Timeline::GetOrCreateFrame (from reader)",
606  "number", number,
607  "samples_in_frame", samples_in_frame);
608 
609  // Attempt to get a frame (but this could fail if a reader has just been closed)
610  new_frame = std::shared_ptr<Frame>(clip->GetFrame(background_frame, number, options));
611 
612  // Return real frame
613  return new_frame;
614 
615  } catch (const ReaderClosed & e) {
616  // ...
617  } catch (const OutOfBoundsFrame & e) {
618  // ...
619  }
620 
621  // Debug output
623  "Timeline::GetOrCreateFrame (create blank)",
624  "number", number,
625  "samples_in_frame", samples_in_frame);
626 
627  // Create blank frame
628  return new_frame;
629 }
630 
631 // Process a new layer of video or audio
632 void Timeline::add_layer(std::shared_ptr<Frame> new_frame, Clip* source_clip, int64_t clip_frame_number, bool is_top_clip, float max_volume)
633 {
634  // Create timeline options (with details about this current frame request)
635  TimelineInfoStruct options{};
636  options.is_top_clip = is_top_clip;
637  options.is_before_clip_keyframes = true;
638 
639  // Get the clip's frame, composited on top of the current timeline frame
640  std::shared_ptr<Frame> source_frame;
641  source_frame = GetOrCreateFrame(new_frame, source_clip, clip_frame_number, &options);
642 
643  // No frame found... so bail
644  if (!source_frame)
645  return;
646 
647  // Debug output
649  "Timeline::add_layer",
650  "new_frame->number", new_frame->number,
651  "clip_frame_number", clip_frame_number);
652 
653  /* COPY AUDIO - with correct volume */
654  if (source_clip->Reader()->info.has_audio) {
655  // Debug output
657  "Timeline::add_layer (Copy Audio)",
658  "source_clip->Reader()->info.has_audio", source_clip->Reader()->info.has_audio,
659  "source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(),
660  "info.channels", info.channels,
661  "clip_frame_number", clip_frame_number);
662 
663  if (source_frame->GetAudioChannelsCount() == info.channels && source_clip->has_audio.GetInt(clip_frame_number) != 0)
664  {
665  // Ensure timeline frame matches the source samples once per frame
666  if (new_frame->GetAudioSamplesCount() != source_frame->GetAudioSamplesCount()){
667  new_frame->ResizeAudio(info.channels, source_frame->GetAudioSamplesCount(), info.sample_rate, info.channel_layout);
668  }
669 
670  for (int channel = 0; channel < source_frame->GetAudioChannelsCount(); channel++)
671  {
672  // Get volume from previous frame and this frame
673  float previous_volume = source_clip->volume.GetValue(clip_frame_number - 1);
674  float volume = source_clip->volume.GetValue(clip_frame_number);
675  int channel_filter = source_clip->channel_filter.GetInt(clip_frame_number); // optional channel to filter (if not -1)
676  int channel_mapping = source_clip->channel_mapping.GetInt(clip_frame_number); // optional channel to map this channel to (if not -1)
677 
678  // Apply volume mixing strategy
679  if (source_clip->mixing == VOLUME_MIX_AVERAGE && max_volume > 1.0) {
680  // Don't allow this clip to exceed 100% (divide volume equally between all overlapping clips with volume
681  previous_volume = previous_volume / max_volume;
682  volume = volume / max_volume;
683  }
684  else if (source_clip->mixing == VOLUME_MIX_REDUCE && max_volume > 1.0) {
685  // Reduce clip volume by a bit, hoping it will prevent exceeding 100% (but it is very possible it will)
686  previous_volume = previous_volume * 0.77;
687  volume = volume * 0.77;
688  }
689 
690  // If channel filter enabled, check for correct channel (and skip non-matching channels)
691  if (channel_filter != -1 && channel_filter != channel)
692  continue; // skip to next channel
693 
694  // If no volume on this frame or previous frame, do nothing
695  if (previous_volume == 0.0 && volume == 0.0)
696  continue; // skip to next channel
697 
698  // If channel mapping disabled, just use the current channel
699  if (channel_mapping == -1)
700  channel_mapping = channel;
701 
702  // Apply ramp to source frame (if needed)
703  if (!isEqual(previous_volume, 1.0) || !isEqual(volume, 1.0))
704  source_frame->ApplyGainRamp(channel_mapping, 0, source_frame->GetAudioSamplesCount(), previous_volume, volume);
705 
706  // Copy audio samples (and set initial volume). Mix samples with existing audio samples. The gains are added together, to
707  // be sure to set the gain's correctly, so the sum does not exceed 1.0 (of audio distortion will happen).
708  new_frame->AddAudio(false, channel_mapping, 0, source_frame->GetAudioSamples(channel), source_frame->GetAudioSamplesCount(), 1.0);
709  }
710  }
711  else
712  // Debug output
714  "Timeline::add_layer (No Audio Copied - Wrong # of Channels)",
715  "source_clip->Reader()->info.has_audio",
716  source_clip->Reader()->info.has_audio,
717  "source_frame->GetAudioChannelsCount()",
718  source_frame->GetAudioChannelsCount(),
719  "info.channels", info.channels,
720  "clip_frame_number", clip_frame_number);
721  }
722 
723  // Debug output
725  "Timeline::add_layer (Transform: Composite Image Layer: Completed)",
726  "source_frame->number", source_frame->number,
727  "new_frame->GetImage()->width()", new_frame->GetWidth(),
728  "new_frame->GetImage()->height()", new_frame->GetHeight());
729 }
730 
731 // Update the list of 'opened' clips
732 void Timeline::update_open_clips(Clip *clip, bool does_clip_intersect)
733 {
734  // Get lock (prevent getting frames while this happens)
735  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
736 
738  "Timeline::update_open_clips (before)",
739  "does_clip_intersect", does_clip_intersect,
740  "closing_clips.size()", closing_clips.size(),
741  "open_clips.size()", open_clips.size());
742 
743  // is clip already in list?
744  bool clip_found = open_clips.count(clip);
745 
746  if (clip_found && !does_clip_intersect)
747  {
748  // Remove clip from 'opened' list, because it's closed now
749  open_clips.erase(clip);
750 
751  // Close clip
752  clip->Close();
753  }
754  else if (!clip_found && does_clip_intersect)
755  {
756  // Add clip to 'opened' list, because it's missing
757  open_clips[clip] = clip;
758 
759  try {
760  // Open the clip
761  clip->Open();
762 
763  } catch (const InvalidFile & e) {
764  // ...
765  }
766  }
767 
768  // Debug output
770  "Timeline::update_open_clips (after)",
771  "does_clip_intersect", does_clip_intersect,
772  "clip_found", clip_found,
773  "closing_clips.size()", closing_clips.size(),
774  "open_clips.size()", open_clips.size());
775 }
776 
777 // Calculate the max and min duration (in seconds) of the timeline, based on all the clips, and cache the value
778 void Timeline::calculate_max_duration() {
779  double last_clip = 0.0;
780  double last_effect = 0.0;
781  double first_clip = std::numeric_limits<double>::max();
782  double first_effect = std::numeric_limits<double>::max();
783 
784  // Find the last and first clip
785  if (!clips.empty()) {
786  // Find the clip with the maximum end frame
787  const auto max_clip = std::max_element(
788  clips.begin(), clips.end(), CompareClipEndFrames());
789  last_clip = (*max_clip)->Position() + (*max_clip)->Duration();
790 
791  // Find the clip with the minimum start position (ignoring layer)
792  const auto min_clip = std::min_element(
793  clips.begin(), clips.end(), [](const openshot::Clip* lhs, const openshot::Clip* rhs) {
794  return lhs->Position() < rhs->Position();
795  });
796  first_clip = (*min_clip)->Position();
797  }
798 
799  // Find the last and first effect
800  if (!effects.empty()) {
801  // Find the effect with the maximum end frame
802  const auto max_effect = std::max_element(
803  effects.begin(), effects.end(), CompareEffectEndFrames());
804  last_effect = (*max_effect)->Position() + (*max_effect)->Duration();
805 
806  // Find the effect with the minimum start position
807  const auto min_effect = std::min_element(
808  effects.begin(), effects.end(), [](const openshot::EffectBase* lhs, const openshot::EffectBase* rhs) {
809  return lhs->Position() < rhs->Position();
810  });
811  first_effect = (*min_effect)->Position();
812  }
813 
814  // Calculate the max and min time
815  max_time = std::max(last_clip, last_effect);
816  min_time = std::min(first_clip, first_effect);
817 
818  // If no clips or effects exist, set min_time to 0
819  if (clips.empty() && effects.empty()) {
820  min_time = 0.0;
821  max_time = 0.0;
822  }
823 }
824 
825 // Sort clips by position on the timeline
826 void Timeline::sort_clips()
827 {
828  // Get lock (prevent getting frames while this happens)
829  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
830 
831  // Debug output
833  "Timeline::SortClips",
834  "clips.size()", clips.size());
835 
836  // sort clips
837  clips.sort(CompareClips());
838 
839  // calculate max timeline duration
840  calculate_max_duration();
841 }
842 
843 // Sort effects by position on the timeline
844 void Timeline::sort_effects()
845 {
846  // Get lock (prevent getting frames while this happens)
847  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
848 
849  // sort clips
850  effects.sort(CompareEffects());
851 
852  // calculate max timeline duration
853  calculate_max_duration();
854 }
855 
856 // Clear all clips from timeline
858 {
859  ZmqLogger::Instance()->AppendDebugMethod("Timeline::Clear");
860 
861  // Get lock (prevent getting frames while this happens)
862  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
863 
864  // Close all open clips
865  for (auto clip : clips)
866  {
867  update_open_clips(clip, false);
868 
869  // Delete clip object (if timeline allocated it)
870  bool allocated = allocated_clips.count(clip);
871  if (allocated) {
872  delete clip;
873  }
874  }
875  // Clear all clips
876  clips.clear();
877  allocated_clips.clear();
878 
879  // Close all effects
880  for (auto effect : effects)
881  {
882  // Delete effect object (if timeline allocated it)
883  bool allocated = allocated_effects.count(effect);
884  if (allocated) {
885  delete effect;
886  }
887  }
888  // Clear all effects
889  effects.clear();
890  allocated_effects.clear();
891 
892  // Delete all FrameMappers
893  for (auto mapper : allocated_frame_mappers)
894  {
895  mapper->Reader(NULL);
896  mapper->Close();
897  delete mapper;
898  }
899  allocated_frame_mappers.clear();
900 }
901 
902 // Close the reader (and any resources it was consuming)
904 {
905  ZmqLogger::Instance()->AppendDebugMethod("Timeline::Close");
906 
907  // Get lock (prevent getting frames while this happens)
908  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
909 
910  // Close all open clips
911  for (auto clip : clips)
912  {
913  // Open or Close this clip, based on if it's intersecting or not
914  update_open_clips(clip, false);
915  }
916 
917  // Mark timeline as closed
918  is_open = false;
919 
920  // Clear all cache (deep clear, including nested Readers)
921  ClearAllCache(true);
922 }
923 
924 // Open the reader (and start consuming resources)
926 {
927  is_open = true;
928 }
929 
930 // Compare 2 floating point numbers for equality
931 bool Timeline::isEqual(double a, double b)
932 {
933  return fabs(a - b) < 0.000001;
934 }
935 
936 // Get an openshot::Frame object for a specific frame number of this reader.
937 std::shared_ptr<Frame> Timeline::GetFrame(int64_t requested_frame)
938 {
939  // Adjust out of bounds frame number
940  if (requested_frame < 1)
941  requested_frame = 1;
942 
943  // Check cache
944  std::shared_ptr<Frame> frame;
945  frame = final_cache->GetFrame(requested_frame);
946  if (frame) {
947  // Debug output
949  "Timeline::GetFrame (Cached frame found)",
950  "requested_frame", requested_frame);
951 
952  // Return cached frame
953  return frame;
954  }
955  else
956  {
957  // Prevent async calls to the following code
958  const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
959 
960  // Check cache 2nd time
961  std::shared_ptr<Frame> frame;
962  frame = final_cache->GetFrame(requested_frame);
963  if (frame) {
964  // Debug output
966  "Timeline::GetFrame (Cached frame found on 2nd check)",
967  "requested_frame", requested_frame);
968 
969  // Return cached frame
970  return frame;
971  } else {
972  // Get a list of clips that intersect with the requested section of timeline
973  // This also opens the readers for intersecting clips, and marks non-intersecting clips as 'needs closing'
974  std::vector<Clip *> nearby_clips;
975  nearby_clips = find_intersecting_clips(requested_frame, 1, true);
976 
977  // Debug output
979  "Timeline::GetFrame (processing frame)",
980  "requested_frame", requested_frame,
981  "omp_get_thread_num()", omp_get_thread_num());
982 
983  // Init some basic properties about this frame
984  int samples_in_frame = Frame::GetSamplesPerFrame(requested_frame, info.fps, info.sample_rate, info.channels);
985 
986  // Create blank frame (which will become the requested frame)
987  std::shared_ptr<Frame> new_frame(std::make_shared<Frame>(requested_frame, preview_width, preview_height, "#000000", samples_in_frame, info.channels));
988  new_frame->AddAudioSilence(samples_in_frame);
989  new_frame->SampleRate(info.sample_rate);
990  new_frame->ChannelsLayout(info.channel_layout);
991 
992  // Debug output
994  "Timeline::GetFrame (Adding solid color)",
995  "requested_frame", requested_frame,
996  "info.width", info.width,
997  "info.height", info.height);
998 
999  // Add Background Color to 1st layer (if animated or not black)
1000  if ((color.red.GetCount() > 1 || color.green.GetCount() > 1 || color.blue.GetCount() > 1) ||
1001  (color.red.GetValue(requested_frame) != 0.0 || color.green.GetValue(requested_frame) != 0.0 ||
1002  color.blue.GetValue(requested_frame) != 0.0))
1003  new_frame->AddColor(preview_width, preview_height, color.GetColorHex(requested_frame));
1004 
1005  // Debug output
1007  "Timeline::GetFrame (Loop through clips)",
1008  "requested_frame", requested_frame,
1009  "clips.size()", clips.size(),
1010  "nearby_clips.size()", nearby_clips.size());
1011 
1012  // Precompute per-clip timing for this requested frame
1013  struct ClipInfo {
1014  Clip* clip;
1015  int64_t start_pos;
1016  int64_t end_pos;
1017  int64_t start_frame;
1018  int64_t frame_number;
1019  bool intersects;
1020  };
1021  std::vector<ClipInfo> clip_infos;
1022  clip_infos.reserve(nearby_clips.size());
1023  const double fpsD = info.fps.ToDouble();
1024 
1025  for (auto clip : nearby_clips) {
1026  int64_t start_pos = static_cast<int64_t>(std::llround(clip->Position() * fpsD)) + 1;
1027  int64_t end_pos = static_cast<int64_t>(std::llround((clip->Position() + clip->Duration()) * fpsD));
1028  bool intersects = (start_pos <= requested_frame && end_pos >= requested_frame);
1029  int64_t start_frame = static_cast<int64_t>(std::llround(clip->Start() * fpsD)) + 1;
1030  int64_t frame_number = requested_frame - start_pos + start_frame;
1031  clip_infos.push_back({clip, start_pos, end_pos, start_frame, frame_number, intersects});
1032  }
1033 
1034  // Determine top clip per layer (linear, no nested loop)
1035  std::unordered_map<int, int64_t> top_start_for_layer;
1036  std::unordered_map<int, Clip*> top_clip_for_layer;
1037  for (const auto& ci : clip_infos) {
1038  if (!ci.intersects) continue;
1039  const int layer = ci.clip->Layer();
1040  auto it = top_start_for_layer.find(layer);
1041  if (it == top_start_for_layer.end() || ci.start_pos > it->second) {
1042  top_start_for_layer[layer] = ci.start_pos; // strictly greater to match prior logic
1043  top_clip_for_layer[layer] = ci.clip;
1044  }
1045  }
1046 
1047  // Compute max_volume across all overlapping clips once
1048  float max_volume_sum = 0.0f;
1049  for (const auto& ci : clip_infos) {
1050  if (!ci.intersects) continue;
1051  if (ci.clip->Reader() && ci.clip->Reader()->info.has_audio &&
1052  ci.clip->has_audio.GetInt(ci.frame_number) != 0) {
1053  max_volume_sum += static_cast<float>(ci.clip->volume.GetValue(ci.frame_number));
1054  }
1055  }
1056 
1057  // Compose intersecting clips in a single pass
1058  for (const auto& ci : clip_infos) {
1059  // Debug output
1061  "Timeline::GetFrame (Does clip intersect)",
1062  "requested_frame", requested_frame,
1063  "clip->Position()", ci.clip->Position(),
1064  "clip->Duration()", ci.clip->Duration(),
1065  "does_clip_intersect", ci.intersects);
1066 
1067  // Clip is visible
1068  if (ci.intersects) {
1069  // Is this the top clip on its layer?
1070  bool is_top_clip = false;
1071  const int layer = ci.clip->Layer();
1072  auto top_it = top_clip_for_layer.find(layer);
1073  if (top_it != top_clip_for_layer.end())
1074  is_top_clip = (top_it->second == ci.clip);
1075 
1076  // Determine the frame needed for this clip (based on the position on the timeline)
1077  int64_t clip_frame_number = ci.frame_number;
1078 
1079  // Debug output
1081  "Timeline::GetFrame (Calculate clip's frame #)",
1082  "clip->Position()", ci.clip->Position(),
1083  "clip->Start()", ci.clip->Start(),
1084  "info.fps.ToFloat()", info.fps.ToFloat(),
1085  "clip_frame_number", clip_frame_number);
1086 
1087  // Add clip's frame as layer
1088  add_layer(new_frame, ci.clip, clip_frame_number, is_top_clip, max_volume_sum);
1089 
1090  } else {
1091  // Debug output
1093  "Timeline::GetFrame (clip does not intersect)",
1094  "requested_frame", requested_frame,
1095  "does_clip_intersect", ci.intersects);
1096  }
1097 
1098  } // end clip loop
1099 
1100  // Debug output
1102  "Timeline::GetFrame (Add frame to cache)",
1103  "requested_frame", requested_frame,
1104  "info.width", info.width,
1105  "info.height", info.height);
1106 
1107  // Set frame # on mapped frame
1108  new_frame->SetFrameNumber(requested_frame);
1109 
1110  // Add final frame to cache
1111  final_cache->Add(new_frame);
1112 
1113  // Return frame (or blank frame)
1114  return new_frame;
1115  }
1116  }
1117 }
1118 
1119 
1120 // Find intersecting clips (or non intersecting clips)
1121 std::vector<Clip*> Timeline::find_intersecting_clips(int64_t requested_frame, int number_of_frames, bool include)
1122 {
1123  // Find matching clips
1124  std::vector<Clip*> matching_clips;
1125 
1126  // Calculate time of frame
1127  const int64_t min_requested_frame = requested_frame;
1128  const int64_t max_requested_frame = requested_frame + (number_of_frames - 1);
1129 
1130  // Find Clips at this time
1131  matching_clips.reserve(clips.size());
1132  const double fpsD = info.fps.ToDouble();
1133  for (auto clip : clips)
1134  {
1135  // Does clip intersect the current requested time
1136  int64_t clip_start_position = static_cast<int64_t>(std::llround(clip->Position() * fpsD)) + 1;
1137  int64_t clip_end_position = static_cast<int64_t>(std::llround((clip->Position() + clip->Duration()) * fpsD)) + 1;
1138 
1139  bool does_clip_intersect =
1140  (clip_start_position <= min_requested_frame || clip_start_position <= max_requested_frame) &&
1141  (clip_end_position >= min_requested_frame || clip_end_position >= max_requested_frame);
1142 
1143  // Debug output
1145  "Timeline::find_intersecting_clips (Is clip near or intersecting)",
1146  "requested_frame", requested_frame,
1147  "min_requested_frame", min_requested_frame,
1148  "max_requested_frame", max_requested_frame,
1149  "clip->Position()", clip->Position(),
1150  "does_clip_intersect", does_clip_intersect);
1151 
1152  // Open (or schedule for closing) this clip, based on if it's intersecting or not
1153  update_open_clips(clip, does_clip_intersect);
1154 
1155  // Clip is visible
1156  if (does_clip_intersect && include)
1157  // Add the intersecting clip
1158  matching_clips.push_back(clip);
1159 
1160  else if (!does_clip_intersect && !include)
1161  // Add the non-intersecting clip
1162  matching_clips.push_back(clip);
1163 
1164  } // end clip loop
1165 
1166  // return list
1167  return matching_clips;
1168 }
1169 
1170 // Set the cache object used by this reader
1171 void Timeline::SetCache(CacheBase* new_cache) {
1172  // Get lock (prevent getting frames while this happens)
1173  const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
1174 
1175  // Destroy previous cache (if managed by timeline)
1176  if (managed_cache && final_cache) {
1177  delete final_cache;
1178  final_cache = NULL;
1179  managed_cache = false;
1180  }
1181 
1182  // Set new cache
1183  final_cache = new_cache;
1184 }
1185 
1186 // Generate JSON string of this object
1187 std::string Timeline::Json() const {
1188 
1189  // Return formatted string
1190  return JsonValue().toStyledString();
1191 }
1192 
1193 // Generate Json::Value for this object
1194 Json::Value Timeline::JsonValue() const {
1195 
1196  // Create root json object
1197  Json::Value root = ReaderBase::JsonValue(); // get parent properties
1198  root["type"] = "Timeline";
1199  root["viewport_scale"] = viewport_scale.JsonValue();
1200  root["viewport_x"] = viewport_x.JsonValue();
1201  root["viewport_y"] = viewport_y.JsonValue();
1202  root["color"] = color.JsonValue();
1203  root["path"] = path;
1204 
1205  // Add array of clips
1206  root["clips"] = Json::Value(Json::arrayValue);
1207 
1208  // Find Clips at this time
1209  for (const auto existing_clip : clips)
1210  {
1211  root["clips"].append(existing_clip->JsonValue());
1212  }
1213 
1214  // Add array of effects
1215  root["effects"] = Json::Value(Json::arrayValue);
1216 
1217  // loop through effects
1218  for (const auto existing_effect: effects)
1219  {
1220  root["effects"].append(existing_effect->JsonValue());
1221  }
1222 
1223  // return JsonValue
1224  return root;
1225 }
1226 
1227 // Load JSON string into this object
1228 void Timeline::SetJson(const std::string value) {
1229 
1230  // Get lock (prevent getting frames while this happens)
1231  const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
1232 
1233  // Parse JSON string into JSON objects
1234  try
1235  {
1236  const Json::Value root = openshot::stringToJson(value);
1237  // Set all values that match
1238  SetJsonValue(root);
1239  }
1240  catch (const std::exception& e)
1241  {
1242  // Error parsing JSON (or missing keys)
1243  throw InvalidJSON("JSON is invalid (missing keys or invalid data types)");
1244  }
1245 }
1246 
1247 // Load Json::Value into this object
1248 void Timeline::SetJsonValue(const Json::Value root) {
1249 
1250  // Get lock (prevent getting frames while this happens)
1251  const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
1252 
1253  // Close timeline before we do anything (this closes all clips)
1254  bool was_open = is_open;
1255  Close();
1256 
1257  // Set parent data
1259 
1260  // Set data from Json (if key is found)
1261  if (!root["path"].isNull())
1262  path = root["path"].asString();
1263 
1264  if (!root["clips"].isNull()) {
1265  // Clear existing clips
1266  clips.clear();
1267 
1268  // loop through clips
1269  for (const Json::Value existing_clip : root["clips"]) {
1270  // Skip NULL nodes
1271  if (existing_clip.isNull()) {
1272  continue;
1273  }
1274 
1275  // Create Clip
1276  Clip *c = new Clip();
1277 
1278  // Keep track of allocated clip objects
1279  allocated_clips.insert(c);
1280 
1281  // When a clip is attached to an object, it searches for the object
1282  // on it's parent timeline. Setting the parent timeline of the clip here
1283  // allows attaching it to an object when exporting the project (because)
1284  // the exporter script initializes the clip and it's effects
1285  // before setting its parent timeline.
1286  c->ParentTimeline(this);
1287 
1288  // Load Json into Clip
1289  c->SetJsonValue(existing_clip);
1290 
1291  // Add Clip to Timeline
1292  AddClip(c);
1293  }
1294  }
1295 
1296  if (!root["effects"].isNull()) {
1297  // Clear existing effects
1298  effects.clear();
1299 
1300  // loop through effects
1301  for (const Json::Value existing_effect :root["effects"]) {
1302  // Skip NULL nodes
1303  if (existing_effect.isNull()) {
1304  continue;
1305  }
1306 
1307  // Create Effect
1308  EffectBase *e = NULL;
1309 
1310  if (!existing_effect["type"].isNull()) {
1311  // Create instance of effect
1312  if ( (e = EffectInfo().CreateEffect(existing_effect["type"].asString())) ) {
1313 
1314  // Keep track of allocated effect objects
1315  allocated_effects.insert(e);
1316 
1317  // Load Json into Effect
1318  e->SetJsonValue(existing_effect);
1319 
1320  // Add Effect to Timeline
1321  AddEffect(e);
1322  }
1323  }
1324  }
1325  }
1326 
1327  if (!root["duration"].isNull()) {
1328  // Update duration of timeline
1329  info.duration = root["duration"].asDouble();
1331  }
1332 
1333  // Update preview settings
1336 
1337  // Resort (and recalculate min/max duration)
1338  sort_clips();
1339  sort_effects();
1340 
1341  // Re-open if needed
1342  if (was_open)
1343  Open();
1344 }
1345 
1346 // Apply a special formatted JSON object, which represents a change to the timeline (insert, update, delete)
1347 void Timeline::ApplyJsonDiff(std::string value) {
1348 
1349  // Get lock (prevent getting frames while this happens)
1350  const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
1351 
1352  // Parse JSON string into JSON objects
1353  try
1354  {
1355  const Json::Value root = openshot::stringToJson(value);
1356  // Process the JSON change array, loop through each item
1357  for (const Json::Value change : root) {
1358  std::string change_key = change["key"][(uint)0].asString();
1359 
1360  // Process each type of change
1361  if (change_key == "clips")
1362  // Apply to CLIPS
1363  apply_json_to_clips(change);
1364 
1365  else if (change_key == "effects")
1366  // Apply to EFFECTS
1367  apply_json_to_effects(change);
1368 
1369  else
1370  // Apply to TIMELINE
1371  apply_json_to_timeline(change);
1372 
1373  }
1374  }
1375  catch (const std::exception& e)
1376  {
1377  // Error parsing JSON (or missing keys)
1378  throw InvalidJSON("JSON is invalid (missing keys or invalid data types)");
1379  }
1380 }
1381 
1382 // Apply JSON diff to clips
1383 void Timeline::apply_json_to_clips(Json::Value change) {
1384 
1385  // Get key and type of change
1386  std::string change_type = change["type"].asString();
1387  std::string clip_id = "";
1388  Clip *existing_clip = NULL;
1389 
1390  // Find id of clip (if any)
1391  for (auto key_part : change["key"]) {
1392  // Get each change
1393  if (key_part.isObject()) {
1394  // Check for id
1395  if (!key_part["id"].isNull()) {
1396  // Set the id
1397  clip_id = key_part["id"].asString();
1398 
1399  // Find matching clip in timeline (if any)
1400  for (auto c : clips)
1401  {
1402  if (c->Id() == clip_id) {
1403  existing_clip = c;
1404  break; // clip found, exit loop
1405  }
1406  }
1407  break; // id found, exit loop
1408  }
1409  }
1410  }
1411 
1412  // Check for a more specific key (targetting this clip's effects)
1413  // For example: ["clips", {"id:123}, "effects", {"id":432}]
1414  if (existing_clip && change["key"].size() == 4 && change["key"][2] == "effects")
1415  {
1416  // This change is actually targetting a specific effect under a clip (and not the clip)
1417  Json::Value key_part = change["key"][3];
1418 
1419  if (key_part.isObject()) {
1420  // Check for id
1421  if (!key_part["id"].isNull())
1422  {
1423  // Set the id
1424  std::string effect_id = key_part["id"].asString();
1425 
1426  // Find matching effect in timeline (if any)
1427  std::list<EffectBase*> effect_list = existing_clip->Effects();
1428  for (auto e : effect_list)
1429  {
1430  if (e->Id() == effect_id) {
1431  // Apply the change to the effect directly
1432  apply_json_to_effects(change, e);
1433 
1434  // Calculate start and end frames that this impacts, and remove those frames from the cache
1435  int64_t new_starting_frame = (existing_clip->Position() * info.fps.ToDouble()) + 1;
1436  int64_t new_ending_frame = ((existing_clip->Position() + existing_clip->Duration()) * info.fps.ToDouble()) + 1;
1437  final_cache->Remove(new_starting_frame - 8, new_ending_frame + 8);
1438 
1439  return; // effect found, don't update clip
1440  }
1441  }
1442  }
1443  }
1444  }
1445 
1446  // Determine type of change operation
1447  if (change_type == "insert") {
1448 
1449  // Create clip
1450  Clip *clip = new Clip();
1451 
1452  // Keep track of allocated clip objects
1453  allocated_clips.insert(clip);
1454 
1455  // Set properties of clip from JSON
1456  clip->SetJsonValue(change["value"]);
1457 
1458  // Add clip to timeline
1459  AddClip(clip);
1460 
1461  } else if (change_type == "update") {
1462 
1463  // Update existing clip
1464  if (existing_clip) {
1465  // Calculate start and end frames prior to the update
1466  int64_t old_starting_frame = (existing_clip->Position() * info.fps.ToDouble()) + 1;
1467  int64_t old_ending_frame = ((existing_clip->Position() + existing_clip->Duration()) * info.fps.ToDouble()) + 1;
1468 
1469  // Update clip properties from JSON
1470  existing_clip->SetJsonValue(change["value"]);
1471 
1472  // Calculate new start and end frames after the update
1473  int64_t new_starting_frame = (existing_clip->Position() * info.fps.ToDouble()) + 1;
1474  int64_t new_ending_frame = ((existing_clip->Position() + existing_clip->Duration()) * info.fps.ToDouble()) + 1;
1475 
1476  // Remove both the old and new ranges from the timeline cache
1477  final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
1478  final_cache->Remove(new_starting_frame - 8, new_ending_frame + 8);
1479 
1480  // Remove cache on clip's Reader (if found)
1481  if (existing_clip->Reader() && existing_clip->Reader()->GetCache()) {
1482  existing_clip->Reader()->GetCache()->Remove(old_starting_frame - 8, old_ending_frame + 8);
1483  existing_clip->Reader()->GetCache()->Remove(new_starting_frame - 8, new_ending_frame + 8);
1484  }
1485 
1486  // Apply framemapper (or update existing framemapper)
1487  if (auto_map_clips) {
1488  apply_mapper_to_clip(existing_clip);
1489  }
1490  }
1491 
1492  } else if (change_type == "delete") {
1493 
1494  // Remove existing clip
1495  if (existing_clip) {
1496  // Remove clip from timeline
1497  RemoveClip(existing_clip);
1498 
1499  // Calculate start and end frames that this impacts, and remove those frames from the cache
1500  int64_t old_starting_frame = (existing_clip->Position() * info.fps.ToDouble()) + 1;
1501  int64_t old_ending_frame = ((existing_clip->Position() + existing_clip->Duration()) * info.fps.ToDouble()) + 1;
1502  final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
1503  }
1504 
1505  }
1506 
1507  // Re-Sort Clips (since they likely changed)
1508  sort_clips();
1509 }
1510 
1511 // Apply JSON diff to effects
1512 void Timeline::apply_json_to_effects(Json::Value change) {
1513 
1514  // Get key and type of change
1515  std::string change_type = change["type"].asString();
1516  EffectBase *existing_effect = NULL;
1517 
1518  // Find id of an effect (if any)
1519  for (auto key_part : change["key"]) {
1520 
1521  if (key_part.isObject()) {
1522  // Check for id
1523  if (!key_part["id"].isNull())
1524  {
1525  // Set the id
1526  std::string effect_id = key_part["id"].asString();
1527 
1528  // Find matching effect in timeline (if any)
1529  for (auto e : effects)
1530  {
1531  if (e->Id() == effect_id) {
1532  existing_effect = e;
1533  break; // effect found, exit loop
1534  }
1535  }
1536  break; // id found, exit loop
1537  }
1538  }
1539  }
1540 
1541  // Now that we found the effect, apply the change to it
1542  if (existing_effect || change_type == "insert") {
1543  // Apply change to effect
1544  apply_json_to_effects(change, existing_effect);
1545  }
1546 }
1547 
1548 // Apply JSON diff to effects (if you already know which effect needs to be updated)
1549 void Timeline::apply_json_to_effects(Json::Value change, EffectBase* existing_effect) {
1550 
1551  // Get key and type of change
1552  std::string change_type = change["type"].asString();
1553 
1554  // Calculate start and end frames that this impacts, and remove those frames from the cache
1555  if (!change["value"].isArray() && !change["value"]["position"].isNull()) {
1556  int64_t new_starting_frame = (change["value"]["position"].asDouble() * info.fps.ToDouble()) + 1;
1557  int64_t new_ending_frame = ((change["value"]["position"].asDouble() + change["value"]["end"].asDouble() - change["value"]["start"].asDouble()) * info.fps.ToDouble()) + 1;
1558  final_cache->Remove(new_starting_frame - 8, new_ending_frame + 8);
1559  }
1560 
1561  // Determine type of change operation
1562  if (change_type == "insert") {
1563 
1564  // Determine type of effect
1565  std::string effect_type = change["value"]["type"].asString();
1566 
1567  // Create Effect
1568  EffectBase *e = NULL;
1569 
1570  // Init the matching effect object
1571  if ( (e = EffectInfo().CreateEffect(effect_type)) ) {
1572 
1573  // Keep track of allocated effect objects
1574  allocated_effects.insert(e);
1575 
1576  // Load Json into Effect
1577  e->SetJsonValue(change["value"]);
1578 
1579  // Add Effect to Timeline
1580  AddEffect(e);
1581  }
1582 
1583  } else if (change_type == "update") {
1584 
1585  // Update existing effect
1586  if (existing_effect) {
1587 
1588  // Calculate start and end frames that this impacts, and remove those frames from the cache
1589  int64_t old_starting_frame = (existing_effect->Position() * info.fps.ToDouble()) + 1;
1590  int64_t old_ending_frame = ((existing_effect->Position() + existing_effect->Duration()) * info.fps.ToDouble()) + 1;
1591  final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
1592 
1593  // Update effect properties from JSON
1594  existing_effect->SetJsonValue(change["value"]);
1595  }
1596 
1597  } else if (change_type == "delete") {
1598 
1599  // Remove existing effect
1600  if (existing_effect) {
1601 
1602  // Calculate start and end frames that this impacts, and remove those frames from the cache
1603  int64_t old_starting_frame = (existing_effect->Position() * info.fps.ToDouble()) + 1;
1604  int64_t old_ending_frame = ((existing_effect->Position() + existing_effect->Duration()) * info.fps.ToDouble()) + 1;
1605  final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
1606 
1607  // Remove effect from timeline
1608  RemoveEffect(existing_effect);
1609  }
1610 
1611  }
1612 
1613  // Re-Sort Effects (since they likely changed)
1614  sort_effects();
1615 }
1616 
1617 // Apply JSON diff to timeline properties
1618 void Timeline::apply_json_to_timeline(Json::Value change) {
1619  bool cache_dirty = true;
1620 
1621  // Get key and type of change
1622  std::string change_type = change["type"].asString();
1623  std::string root_key = change["key"][(uint)0].asString();
1624  std::string sub_key = "";
1625  if (change["key"].size() >= 2)
1626  sub_key = change["key"][(uint)1].asString();
1627 
1628  // Determine type of change operation
1629  if (change_type == "insert" || change_type == "update") {
1630 
1631  // INSERT / UPDATE
1632  // Check for valid property
1633  if (root_key == "color")
1634  // Set color
1635  color.SetJsonValue(change["value"]);
1636  else if (root_key == "viewport_scale")
1637  // Set viewport scale
1638  viewport_scale.SetJsonValue(change["value"]);
1639  else if (root_key == "viewport_x")
1640  // Set viewport x offset
1641  viewport_x.SetJsonValue(change["value"]);
1642  else if (root_key == "viewport_y")
1643  // Set viewport y offset
1644  viewport_y.SetJsonValue(change["value"]);
1645  else if (root_key == "duration") {
1646  // Update duration of timeline
1647  info.duration = change["value"].asDouble();
1649 
1650  // We don't want to clear cache for duration adjustments
1651  cache_dirty = false;
1652  }
1653  else if (root_key == "width") {
1654  // Set width
1655  info.width = change["value"].asInt();
1657  }
1658  else if (root_key == "height") {
1659  // Set height
1660  info.height = change["value"].asInt();
1662  }
1663  else if (root_key == "fps" && sub_key == "" && change["value"].isObject()) {
1664  // Set fps fraction
1665  if (!change["value"]["num"].isNull())
1666  info.fps.num = change["value"]["num"].asInt();
1667  if (!change["value"]["den"].isNull())
1668  info.fps.den = change["value"]["den"].asInt();
1669  }
1670  else if (root_key == "fps" && sub_key == "num")
1671  // Set fps.num
1672  info.fps.num = change["value"].asInt();
1673  else if (root_key == "fps" && sub_key == "den")
1674  // Set fps.den
1675  info.fps.den = change["value"].asInt();
1676  else if (root_key == "display_ratio" && sub_key == "" && change["value"].isObject()) {
1677  // Set display_ratio fraction
1678  if (!change["value"]["num"].isNull())
1679  info.display_ratio.num = change["value"]["num"].asInt();
1680  if (!change["value"]["den"].isNull())
1681  info.display_ratio.den = change["value"]["den"].asInt();
1682  }
1683  else if (root_key == "display_ratio" && sub_key == "num")
1684  // Set display_ratio.num
1685  info.display_ratio.num = change["value"].asInt();
1686  else if (root_key == "display_ratio" && sub_key == "den")
1687  // Set display_ratio.den
1688  info.display_ratio.den = change["value"].asInt();
1689  else if (root_key == "pixel_ratio" && sub_key == "" && change["value"].isObject()) {
1690  // Set pixel_ratio fraction
1691  if (!change["value"]["num"].isNull())
1692  info.pixel_ratio.num = change["value"]["num"].asInt();
1693  if (!change["value"]["den"].isNull())
1694  info.pixel_ratio.den = change["value"]["den"].asInt();
1695  }
1696  else if (root_key == "pixel_ratio" && sub_key == "num")
1697  // Set pixel_ratio.num
1698  info.pixel_ratio.num = change["value"].asInt();
1699  else if (root_key == "pixel_ratio" && sub_key == "den")
1700  // Set pixel_ratio.den
1701  info.pixel_ratio.den = change["value"].asInt();
1702 
1703  else if (root_key == "sample_rate")
1704  // Set sample rate
1705  info.sample_rate = change["value"].asInt();
1706  else if (root_key == "channels")
1707  // Set channels
1708  info.channels = change["value"].asInt();
1709  else if (root_key == "channel_layout")
1710  // Set channel layout
1711  info.channel_layout = (ChannelLayout) change["value"].asInt();
1712  else
1713  // Error parsing JSON (or missing keys)
1714  throw InvalidJSONKey("JSON change key is invalid", change.toStyledString());
1715 
1716 
1717  } else if (change["type"].asString() == "delete") {
1718 
1719  // DELETE / RESET
1720  // Reset the following properties (since we can't delete them)
1721  if (root_key == "color") {
1722  color = Color();
1723  color.red = Keyframe(0.0);
1724  color.green = Keyframe(0.0);
1725  color.blue = Keyframe(0.0);
1726  }
1727  else if (root_key == "viewport_scale")
1728  viewport_scale = Keyframe(1.0);
1729  else if (root_key == "viewport_x")
1730  viewport_x = Keyframe(0.0);
1731  else if (root_key == "viewport_y")
1732  viewport_y = Keyframe(0.0);
1733  else
1734  // Error parsing JSON (or missing keys)
1735  throw InvalidJSONKey("JSON change key is invalid", change.toStyledString());
1736 
1737  }
1738 
1739  if (cache_dirty) {
1740  // Clear entire cache
1741  ClearAllCache();
1742  }
1743 }
1744 
1745 // Clear all caches
1746 void Timeline::ClearAllCache(bool deep) {
1747 
1748  // Clear primary cache
1749  if (final_cache) {
1750  final_cache->Clear();
1751  }
1752 
1753  // Loop through all clips
1754  try {
1755  for (const auto clip : clips) {
1756  // Clear cache on clip and reader if present
1757  if (clip->Reader()) {
1758  if (auto rc = clip->Reader()->GetCache())
1759  rc->Clear();
1760 
1761  // Clear nested Reader (if deep clear requested)
1762  if (deep && clip->Reader()->Name() == "FrameMapper") {
1763  FrameMapper *nested_reader = static_cast<FrameMapper *>(clip->Reader());
1764  if (nested_reader->Reader()) {
1765  if (auto nc = nested_reader->Reader()->GetCache())
1766  nc->Clear();
1767  }
1768  }
1769  }
1770 
1771  // Clear clip cache
1772  if (auto cc = clip->GetCache())
1773  cc->Clear();
1774  }
1775  } catch (const ReaderClosed & e) {
1776  // ...
1777  }
1778 }
1779 
1780 // Set Max Image Size (used for performance optimization). Convenience function for setting
1781 // Settings::Instance()->MAX_WIDTH and Settings::Instance()->MAX_HEIGHT.
1782 void Timeline::SetMaxSize(int width, int height) {
1783  // Maintain aspect ratio regardless of what size is passed in
1784  QSize display_ratio_size = QSize(info.width, info.height);
1785  QSize proposed_size = QSize(std::min(width, info.width), std::min(height, info.height));
1786 
1787  // Scale QSize up to proposed size
1788  display_ratio_size.scale(proposed_size, Qt::KeepAspectRatio);
1789 
1790  // Update preview settings
1791  preview_width = display_ratio_size.width();
1792  preview_height = display_ratio_size.height();
1793 }
openshot::stringToJson
const Json::Value stringToJson(const std::string value)
Definition: Json.cpp:16
openshot::Timeline::RemoveClip
void RemoveClip(openshot::Clip *clip)
Remove an openshot::Clip from the timeline.
Definition: Timeline.cpp:394
openshot::FrameMapper::ChangeMapping
void ChangeMapping(Fraction target_fps, PulldownType pulldown, int target_sample_rate, int target_channels, ChannelLayout target_channel_layout)
Change frame rate or audio mapping details.
Definition: FrameMapper.cpp:814
openshot::ReaderInfo::sample_rate
int sample_rate
The number of audio samples per second (44100 is a common sample rate)
Definition: ReaderBase.h:60
openshot::EffectInfo
This class returns a listing of all effects supported by libopenshot.
Definition: EffectInfo.h:28
openshot::Fraction::ToFloat
float ToFloat()
Return this fraction as a float (i.e. 1/2 = 0.5)
Definition: Fraction.cpp:35
openshot::Timeline::GetFrame
std::shared_ptr< openshot::Frame > GetFrame(int64_t requested_frame) override
Definition: Timeline.cpp:937
openshot::EffectBase
This abstract class is the base class, used by all effects in libopenshot.
Definition: EffectBase.h:53
openshot::ReaderBase::JsonValue
virtual Json::Value JsonValue() const =0
Generate Json::Value for this object.
Definition: ReaderBase.cpp:106
openshot::Timeline::~Timeline
virtual ~Timeline()
Definition: Timeline.cpp:207
openshot::CacheBase::Clear
virtual void Clear()=0
Clear the cache of all frames.
openshot::CacheBase::GetFrame
virtual std::shared_ptr< openshot::Frame > GetFrame(int64_t frame_number)=0
Get a frame from the cache.
openshot::Timeline::viewport_x
openshot::Keyframe viewport_x
Curve representing the x coordinate for the viewport.
Definition: Timeline.h:324
openshot::TimelineBase::preview_width
int preview_width
Optional preview width of timeline image. If your preview window is smaller than the timeline,...
Definition: TimelineBase.h:44
openshot::CompareClipEndFrames
Definition: Timeline.h:75
openshot::Timeline::SetMaxSize
void SetMaxSize(int width, int height)
Definition: Timeline.cpp:1782
openshot::BBox::height
float height
bounding box height
Definition: TrackedObjectBBox.h:42
openshot::CrashHandler::Instance
static CrashHandler * Instance()
Definition: CrashHandler.cpp:27
openshot::EffectInfo::CreateEffect
EffectBase * CreateEffect(std::string effect_type)
Create an instance of an effect (factory style)
Definition: EffectInfo.cpp:27
openshot::ReaderBase::SetJsonValue
virtual void SetJsonValue(const Json::Value root)=0
Load Json::Value into this object.
Definition: ReaderBase.cpp:157
openshot
This namespace is the default namespace for all code in the openshot library.
Definition: Compressor.h:28
openshot::TimelineBase::preview_height
int preview_height
Optional preview width of timeline image. If your preview window is smaller than the timeline,...
Definition: TimelineBase.h:45
openshot::CacheBase::Add
virtual void Add(std::shared_ptr< openshot::Frame > frame)=0
Add a Frame to the cache.
openshot::Timeline::ApplyJsonDiff
void ApplyJsonDiff(std::string value)
Apply a special formatted JSON object, which represents a change to the timeline (add,...
Definition: Timeline.cpp:1347
openshot::Clip
This class represents a clip (used to arrange readers on the timeline)
Definition: Clip.h:89
openshot::Fraction
This class represents a fraction.
Definition: Fraction.h:30
openshot::BBox::cy
float cy
y-coordinate of the bounding box center
Definition: TrackedObjectBBox.h:40
openshot::ReaderBase::info
openshot::ReaderInfo info
Information about the current media file.
Definition: ReaderBase.h:88
openshot::Settings
This class is contains settings used by libopenshot (and can be safely toggled at any point)
Definition: Settings.h:26
openshot::Timeline::GetMinFrame
int64_t GetMinFrame()
Look up the start frame number of the first element on the timeline (first frame is 1)
Definition: Timeline.cpp:482
Timeline.h
Header file for Timeline class.
openshot::Clip::ParentTimeline
void ParentTimeline(openshot::TimelineBase *new_timeline) override
Set associated Timeline pointer.
Definition: Clip.cpp:445
openshot::Timeline::ClearAllCache
void ClearAllCache(bool deep=false)
Definition: Timeline.cpp:1746
openshot::Timeline::GetTrackedObjectsIds
std::list< std::string > GetTrackedObjectsIds() const
Return the ID's of the tracked objects as a list of strings.
Definition: Timeline.cpp:259
openshot::CompareEffectEndFrames
Like CompareClipEndFrames, but for effects.
Definition: Timeline.h:81
openshot::Keyframe::SetJsonValue
void SetJsonValue(const Json::Value root)
Load Json::Value into this object.
Definition: KeyFrame.cpp:372
openshot::Clip::Effects
std::list< openshot::EffectBase * > Effects()
Return the list of effects on the timeline.
Definition: Clip.h:243
openshot::ReaderInfo::duration
float duration
Length of time (in seconds)
Definition: ReaderBase.h:43
CacheDisk.h
Header file for CacheDisk class.
openshot::Clip::channel_mapping
openshot::Keyframe channel_mapping
A number representing an audio channel to output (only works when filtering a channel)
Definition: Clip.h:348
openshot::ReaderInfo::has_video
bool has_video
Determines if this file has a video stream.
Definition: ReaderBase.h:40
openshot::ReaderInfo::width
int width
The width of the video (in pixesl)
Definition: ReaderBase.h:46
openshot::ClipBase::Position
void Position(float value)
Set the Id of this clip object
Definition: ClipBase.cpp:19
openshot::CacheBase
All cache managers in libopenshot are based on this CacheBase class.
Definition: CacheBase.h:34
openshot::Clip::SetJsonValue
void SetJsonValue(const Json::Value root) override
Load Json::Value into this object.
Definition: Clip.cpp:1014
CacheBase.h
Header file for CacheBase class.
openshot::OutOfBoundsFrame
Exception for frames that are out of bounds.
Definition: Exceptions.h:300
openshot::Fraction::ToDouble
double ToDouble() const
Return this fraction as a double (i.e. 1/2 = 0.5)
Definition: Fraction.cpp:40
openshot::Timeline::apply_effects
std::shared_ptr< openshot::Frame > apply_effects(std::shared_ptr< openshot::Frame > frame, int64_t timeline_frame_number, int layer, TimelineInfoStruct *options)
Apply global/timeline effects to the source frame (if any)
Definition: Timeline.cpp:546
openshot::Keyframe::JsonValue
Json::Value JsonValue() const
Generate Json::Value for this object.
Definition: KeyFrame.cpp:339
openshot::CacheBase::Remove
virtual void Remove(int64_t frame_number)=0
Remove a specific frame.
FrameMapper.h
Header file for the FrameMapper class.
openshot::ReaderBase::clip
openshot::ClipBase * clip
Pointer to the parent clip instance (if any)
Definition: ReaderBase.h:80
openshot::CacheBase::SetMaxBytesFromInfo
void SetMaxBytesFromInfo(int64_t number_of_frames, int width, int height, int sample_rate, int channels)
Set maximum bytes to a different amount based on a ReaderInfo struct.
Definition: CacheBase.cpp:28
openshot::Color
This class represents a color (used on the timeline and clips)
Definition: Color.h:27
openshot::ClipBase::SetJsonValue
virtual void SetJsonValue(const Json::Value root)=0
Load Json::Value into this object.
Definition: ClipBase.cpp:80
openshot::ReaderInfo::video_length
int64_t video_length
The number of frames in the video stream.
Definition: ReaderBase.h:53
openshot::ReaderInfo::height
int height
The height of the video (in pixels)
Definition: ReaderBase.h:45
openshot::VOLUME_MIX_REDUCE
@ VOLUME_MIX_REDUCE
Reduce volume by about %25, and then mix (louder, but could cause pops if the sum exceeds 100%)
Definition: Enums.h:71
openshot::BBox::angle
float angle
bounding box rotation angle [degrees]
Definition: TrackedObjectBBox.h:43
openshot::Fraction::num
int num
Numerator for the fraction.
Definition: Fraction.h:32
openshot::Timeline::GetClip
openshot::Clip * GetClip(const std::string &id)
Look up a single clip by ID.
Definition: Timeline.cpp:413
openshot::Fraction::den
int den
Denominator for the fraction.
Definition: Fraction.h:33
openshot::Keyframe
A Keyframe is a collection of Point instances, which is used to vary a number or property over time.
Definition: KeyFrame.h:53
CrashHandler.h
Header file for CrashHandler class.
openshot::Fraction::Reduce
void Reduce()
Reduce this fraction (i.e. 640/480 = 4/3)
Definition: Fraction.cpp:65
openshot::Color::SetJsonValue
void SetJsonValue(const Json::Value root)
Load Json::Value into this object.
Definition: Color.cpp:117
openshot::Timeline::Open
void Open() override
Open the reader (and start consuming resources)
Definition: Timeline.cpp:925
openshot::Timeline::SetJson
void SetJson(const std::string value) override
Load JSON string into this object.
Definition: Timeline.cpp:1228
openshot::TimelineInfoStruct::is_before_clip_keyframes
bool is_before_clip_keyframes
Is this before clip keyframes are applied.
Definition: TimelineBase.h:35
openshot::Fraction::Reciprocal
Fraction Reciprocal() const
Return the reciprocal as a Fraction.
Definition: Fraction.cpp:78
openshot::ReaderInfo::has_audio
bool has_audio
Determines if this file has an audio stream.
Definition: ReaderBase.h:41
openshot::Timeline::GetTrackedObjectValues
std::string GetTrackedObjectValues(std::string id, int64_t frame_number) const
Return the trackedObject's properties as a JSON string.
Definition: Timeline.cpp:275
openshot::CacheMemory
This class is a memory-based cache manager for Frame objects.
Definition: CacheMemory.h:29
openshot::FrameMapper::Close
void Close() override
Close the openshot::FrameMapper and internal reader.
Definition: FrameMapper.cpp:721
openshot::InvalidJSON
Exception for invalid JSON.
Definition: Exceptions.h:217
openshot::Timeline::GetMaxTime
double GetMaxTime()
Look up the end time of the latest timeline element.
Definition: Timeline.cpp:468
openshot::BBox::width
float width
bounding box width
Definition: TrackedObjectBBox.h:41
openshot::Timeline
This class represents a timeline.
Definition: Timeline.h:154
openshot::Timeline::ClipEffects
std::list< openshot::EffectBase * > ClipEffects() const
Return the list of effects on all clips.
Definition: Timeline.cpp:449
CacheMemory.h
Header file for CacheMemory class.
openshot::Color::green
openshot::Keyframe green
Curve representing the green value (0 - 255)
Definition: Color.h:31
openshot::ReaderInfo
This struct contains info about a media file, such as height, width, frames per second,...
Definition: ReaderBase.h:38
openshot::CompareEffects
Definition: Timeline.h:65
openshot::ReaderInfo::video_timebase
openshot::Fraction video_timebase
The video timebase determines how long each frame stays on the screen.
Definition: ReaderBase.h:55
openshot::Settings::Instance
static Settings * Instance()
Create or get an instance of this logger singleton (invoke the class with this method)
Definition: Settings.cpp:23
openshot::TimelineInfoStruct
This struct contains info about the current Timeline clip instance.
Definition: TimelineBase.h:32
openshot::Timeline::Clear
void Clear()
Clear all clips, effects, and frame mappers from timeline (and free memory)
Definition: Timeline.cpp:857
openshot::Timeline::RemoveEffect
void RemoveEffect(openshot::EffectBase *effect)
Remove an effect from the timeline.
Definition: Timeline.cpp:375
openshot::ClipBase::Start
void Start(float value)
Set start position (in seconds) of clip (trim start of video)
Definition: ClipBase.cpp:42
path
path
Definition: FFmpegWriter.cpp:1468
openshot::Settings::PATH_OPENSHOT_INSTALL
std::string PATH_OPENSHOT_INSTALL
Definition: Settings.h:111
openshot::FrameMapper
This class creates a mapping between 2 different frame rates, applying a specific pull-down technique...
Definition: FrameMapper.h:193
openshot::Timeline::Close
void Close() override
Close the timeline reader (and any resources it was consuming)
Definition: Timeline.cpp:903
openshot::Frame::GetSamplesPerFrame
int GetSamplesPerFrame(openshot::Fraction fps, int sample_rate, int channels)
Calculate the # of samples per video frame (for the current frame number)
Definition: Frame.cpp:484
openshot::Timeline::AddClip
void AddClip(openshot::Clip *clip)
Add an openshot::Clip to the timeline.
Definition: Timeline.cpp:333
openshot::InvalidFile
Exception for files that can not be found or opened.
Definition: Exceptions.h:187
openshot::ZmqLogger::Instance
static ZmqLogger * Instance()
Create or get an instance of this logger singleton (invoke the class with this method)
Definition: ZmqLogger.cpp:35
openshot::ZmqLogger::AppendDebugMethod
void AppendDebugMethod(std::string method_name, std::string arg1_name="", float arg1_value=-1.0, std::string arg2_name="", float arg2_value=-1.0, std::string arg3_name="", float arg3_value=-1.0, std::string arg4_name="", float arg4_value=-1.0, std::string arg5_name="", float arg5_value=-1.0, std::string arg6_name="", float arg6_value=-1.0)
Append debug information.
Definition: ZmqLogger.cpp:178
openshot::Clip::volume
openshot::Keyframe volume
Curve representing the volume (0 to 1)
Definition: Clip.h:331
openshot::ReaderInfo::vcodec
std::string vcodec
The name of the video codec used to encode / decode the video stream.
Definition: ReaderBase.h:52
openshot::Timeline::AddTrackedObject
void AddTrackedObject(std::shared_ptr< openshot::TrackedObjectBase > trackedObject)
Add to the tracked_objects map a pointer to a tracked object (TrackedObjectBBox)
Definition: Timeline.cpp:224
openshot::Color::JsonValue
Json::Value JsonValue() const
Generate Json::Value for this object.
Definition: Color.cpp:86
openshot::Keyframe::GetInt
int GetInt(int64_t index) const
Get the rounded INT value at a specific index.
Definition: KeyFrame.cpp:282
openshot::BBox
This struct holds the information of a bounding-box.
Definition: TrackedObjectBBox.h:37
openshot::Timeline::color
openshot::Color color
Background color of timeline canvas.
Definition: Timeline.h:328
openshot::ReaderClosed
Exception when a reader is closed, and a frame is requested.
Definition: Exceptions.h:363
openshot::Timeline::GetEffect
openshot::EffectBase * GetEffect(const std::string &id)
Look up a timeline effect by ID.
Definition: Timeline.cpp:425
openshot::ReaderInfo::channel_layout
openshot::ChannelLayout channel_layout
The channel layout (mono, stereo, 5 point surround, etc...)
Definition: ReaderBase.h:62
openshot::CompareClips
Definition: Timeline.h:49
openshot::Clip::channel_filter
openshot::Keyframe channel_filter
A number representing an audio channel to filter (clears all other channels)
Definition: Clip.h:347
openshot::ClipBase::Id
void Id(std::string value)
Definition: ClipBase.h:94
openshot::Keyframe::GetCount
int64_t GetCount() const
Get the number of points (i.e. # of points)
Definition: KeyFrame.cpp:424
openshot::Timeline::Timeline
Timeline(int width, int height, openshot::Fraction fps, int sample_rate, int channels, openshot::ChannelLayout channel_layout)
Constructor for the timeline (which configures the default frame properties)
Definition: Timeline.cpp:31
openshot::ReaderInfo::fps
openshot::Fraction fps
Frames per second, as a fraction (i.e. 24/1 = 24 fps)
Definition: ReaderBase.h:48
openshot::ReaderBase
This abstract class is the base class, used by all readers in libopenshot.
Definition: ReaderBase.h:75
openshot::Timeline::Json
std::string Json() const override
Generate JSON string of this object.
Definition: Timeline.cpp:1187
openshot::Timeline::GetMaxFrame
int64_t GetMaxFrame()
Look up the end frame number of the latest element on the timeline.
Definition: Timeline.cpp:474
openshot::ClipBase::GetFrame
virtual std::shared_ptr< openshot::Frame > GetFrame(int64_t frame_number)=0
This method is required for all derived classes of ClipBase, and returns a new openshot::Frame object...
openshot::Timeline::SetJsonValue
void SetJsonValue(const Json::Value root) override
Load Json::Value into this object.
Definition: Timeline.cpp:1248
openshot::VOLUME_MIX_AVERAGE
@ VOLUME_MIX_AVERAGE
Evenly divide the overlapping clips volume keyframes, so that the sum does not exceed 100%.
Definition: Enums.h:70
openshot::Timeline::ApplyMapperToClips
void ApplyMapperToClips()
Apply the timeline's framerate and samplerate to all clips.
Definition: Timeline.cpp:522
openshot::Timeline::viewport_y
openshot::Keyframe viewport_y
Curve representing the y coordinate for the viewport.
Definition: Timeline.h:325
openshot::Clip::has_audio
openshot::Keyframe has_audio
An optional override to determine if this clip has audio (-1=undefined, 0=no, 1=yes)
Definition: Clip.h:351
openshot::ChannelLayout
ChannelLayout
This enumeration determines the audio channel layout (such as stereo, mono, 5 point surround,...
Definition: ChannelLayouts.h:28
openshot::ReaderInfo::pixel_ratio
openshot::Fraction pixel_ratio
The pixel ratio of the video stream as a fraction (i.e. some pixels are not square)
Definition: ReaderBase.h:50
openshot::BBox::cx
float cx
x-coordinate of the bounding box center
Definition: TrackedObjectBBox.h:39
openshot::Clip::Reader
void Reader(openshot::ReaderBase *new_reader)
Set the current reader.
Definition: Clip.cpp:336
openshot::Timeline::GetClipEffect
openshot::EffectBase * GetClipEffect(const std::string &id)
Look up a clip effect by ID.
Definition: Timeline.cpp:436
openshot::Color::red
openshot::Keyframe red
Curve representing the red value (0 - 255)
Definition: Color.h:30
openshot::FrameMapper::Reader
ReaderBase * Reader()
Get the current reader.
Definition: FrameMapper.cpp:65
openshot::ReaderInfo::acodec
std::string acodec
The name of the audio codec used to encode / decode the video stream.
Definition: ReaderBase.h:58
openshot::PULLDOWN_NONE
@ PULLDOWN_NONE
Do not apply pull-down techniques, just repeat or skip entire frames.
Definition: FrameMapper.h:46
openshot::ReaderInfo::display_ratio
openshot::Fraction display_ratio
The ratio of width to height of the video stream (i.e. 640x480 has a ratio of 4/3)
Definition: ReaderBase.h:51
openshot::TimelineInfoStruct::is_top_clip
bool is_top_clip
Is clip on top (if overlapping another clip)
Definition: TimelineBase.h:34
openshot::InvalidJSONKey
Exception for missing JSON Change key.
Definition: Exceptions.h:262
openshot::Color::GetColorHex
std::string GetColorHex(int64_t frame_number)
Get the HEX value of a color at a specific frame.
Definition: Color.cpp:47
openshot::ReaderInfo::channels
int channels
The number of audio channels used in the audio stream.
Definition: ReaderBase.h:61
openshot::Timeline::viewport_scale
openshot::Keyframe viewport_scale
Curve representing the scale of the viewport (0 to 100)
Definition: Timeline.h:323
openshot::Timeline::AddEffect
void AddEffect(openshot::EffectBase *effect)
Add an effect to the timeline.
Definition: Timeline.cpp:359
openshot::Timeline::JsonValue
Json::Value JsonValue() const override
Generate Json::Value for this object.
Definition: Timeline.cpp:1194
openshot::ReaderBase::GetCache
virtual openshot::CacheBase * GetCache()=0
Get the cache object used by this reader (note: not all readers use cache)
openshot::Color::blue
openshot::Keyframe blue
Curve representing the red value (0 - 255)
Definition: Color.h:32
openshot::Timeline::GetTrackedObject
std::shared_ptr< openshot::TrackedObjectBase > GetTrackedObject(std::string id) const
Return tracked object pointer by it's id.
Definition: Timeline.cpp:242
Exceptions.h
Header file for all Exception classes.
openshot::Clip::mixing
openshot::VolumeMixType mixing
What strategy should be followed when mixing audio with other clips.
Definition: Clip.h:180
openshot::Timeline::GetMinTime
double GetMinTime()
Look up the position/start time of the first timeline element.
Definition: Timeline.cpp:490
openshot::EffectBase::SetJsonValue
virtual void SetJsonValue(const Json::Value root)
Load Json::Value into this object.
Definition: EffectBase.cpp:115
openshot::Keyframe::GetValue
double GetValue(int64_t index) const
Get the value at a specific index.
Definition: KeyFrame.cpp:258
openshot::Timeline::SetCache
void SetCache(openshot::CacheBase *new_cache)
Definition: Timeline.cpp:1171
openshot::ReaderBase::getFrameMutex
std::recursive_mutex getFrameMutex
Mutex for multiple threads.
Definition: ReaderBase.h:79