OpenShot Library | libopenshot  0.5.0
Timeline.cpp
Go to the documentation of this file.
1 
9 // Copyright (c) 2008-2019 OpenShot Studios, LLC
10 //
11 // SPDX-License-Identifier: LGPL-3.0-or-later
12 
13 #include "Timeline.h"
14 
15 #include "CacheBase.h"
16 #include "CacheDisk.h"
17 #include "CacheMemory.h"
18 #include "CrashHandler.h"
19 #include "FrameMapper.h"
20 #include "Exceptions.h"
21 
22 #include <algorithm>
23 #include <QDir>
24 #include <QFileInfo>
25 #include <QRegularExpression>
26 #include <unordered_map>
27 #include <cmath>
28 #include <cstdint>
29 
30 using namespace openshot;
31 
32 // Default Constructor for the timeline (which sets the canvas width and height)
33 Timeline::Timeline(int width, int height, Fraction fps, int sample_rate, int channels, ChannelLayout channel_layout) :
34  is_open(false), auto_map_clips(true), managed_cache(true), path(""), max_time(0.0), cache_epoch(0), safe_edit_frames_remaining(0)
35 {
36  // Create CrashHandler and Attach (incase of errors)
38 
39  // Init viewport size (curve based, because it can be animated)
40  viewport_scale = Keyframe(100.0);
41  viewport_x = Keyframe(0.0);
42  viewport_y = Keyframe(0.0);
43 
44  // Init background color
45  color.red = Keyframe(0.0);
46  color.green = Keyframe(0.0);
47  color.blue = Keyframe(0.0);
48 
49  // Init FileInfo struct (clear all values)
50  info.width = width;
51  info.height = height;
54  info.fps = fps;
55  info.sample_rate = sample_rate;
56  info.channels = channels;
57  info.channel_layout = channel_layout;
59  info.duration = 60 * 30; // 30 minute default duration
60  info.has_audio = true;
61  info.has_video = true;
63  info.display_ratio = openshot::Fraction(width, height);
66  info.acodec = "openshot::timeline";
67  info.vcodec = "openshot::timeline";
68 
69  // Init max image size
71 
72  // Init cache
73  final_cache = new CacheMemory();
74  const int cache_frames = std::max(Settings::Instance()->CACHE_MIN_FRAMES, OPEN_MP_NUM_PROCESSORS * 4);
75  final_cache->SetMaxBytesFromInfo(cache_frames, info.width, info.height, info.sample_rate, info.channels);
76 }
77 
78 // Delegating constructor that copies parameters from a provided ReaderInfo
80  info.width, info.height, info.fps, info.sample_rate,
81  info.channels, info.channel_layout) {}
82 
83 // Constructor for the timeline (which loads a JSON structure from a file path, and initializes a timeline)
84 Timeline::Timeline(const std::string& projectPath, bool convert_absolute_paths) :
85  is_open(false), auto_map_clips(true), managed_cache(true), path(projectPath), max_time(0.0), cache_epoch(0), safe_edit_frames_remaining(0) {
86 
87  // Create CrashHandler and Attach (incase of errors)
89 
90  // Init final cache as NULL (will be created after loading json)
91  final_cache = NULL;
92 
93  // Init viewport size (curve based, because it can be animated)
94  viewport_scale = Keyframe(100.0);
95  viewport_x = Keyframe(0.0);
96  viewport_y = Keyframe(0.0);
97 
98  // Init background color
99  color.red = Keyframe(0.0);
100  color.green = Keyframe(0.0);
101  color.blue = Keyframe(0.0);
102 
103  // Check if path exists
104  QFileInfo filePath(QString::fromStdString(path));
105  if (!filePath.exists()) {
106  throw InvalidFile("Timeline project file could not be opened.", path);
107  }
108 
109  // Check OpenShot Install Path exists
111  QDir openshotPath(QString::fromStdString(s->PATH_OPENSHOT_INSTALL));
112  if (!openshotPath.exists()) {
113  throw InvalidFile("PATH_OPENSHOT_INSTALL could not be found.", s->PATH_OPENSHOT_INSTALL);
114  }
115  QDir openshotTransPath(openshotPath.filePath("transitions"));
116  if (!openshotTransPath.exists()) {
117  throw InvalidFile("PATH_OPENSHOT_INSTALL/transitions could not be found.", openshotTransPath.path().toStdString());
118  }
119 
120  // Determine asset path
121  QString asset_name = filePath.baseName().left(30) + "_assets";
122  QDir asset_folder(filePath.dir().filePath(asset_name));
123  if (!asset_folder.exists()) {
124  // Create directory if needed
125  asset_folder.mkpath(".");
126  }
127 
128  // Load UTF-8 project file into QString
129  QFile projectFile(QString::fromStdString(path));
130  projectFile.open(QFile::ReadOnly);
131  QString projectContents = QString::fromUtf8(projectFile.readAll());
132 
133  // Convert all relative paths into absolute paths (if requested)
134  if (convert_absolute_paths) {
135 
136  // Find all "image" or "path" references in JSON (using regex). Must loop through match results
137  // due to our path matching needs, which are not possible with the QString::replace() function.
138  QRegularExpression allPathsRegex(QStringLiteral("\"(image|path)\":.*?\"(.*?)\""));
139  std::vector<QRegularExpressionMatch> matchedPositions;
140  QRegularExpressionMatchIterator i = allPathsRegex.globalMatch(projectContents);
141  while (i.hasNext()) {
142  QRegularExpressionMatch match = i.next();
143  if (match.hasMatch()) {
144  // Push all match objects into a vector (so we can reverse them later)
145  matchedPositions.push_back(match);
146  }
147  }
148 
149  // Reverse the matches (bottom of file to top, so our replacements don't break our match positions)
150  std::vector<QRegularExpressionMatch>::reverse_iterator itr;
151  for (itr = matchedPositions.rbegin(); itr != matchedPositions.rend(); itr++) {
152  QRegularExpressionMatch match = *itr;
153  QString relativeKey = match.captured(1); // image or path
154  QString relativePath = match.captured(2); // relative file path
155  QString absolutePath = "";
156 
157  // Find absolute path of all path, image (including special replacements of @assets and @transitions)
158  if (relativePath.startsWith("@assets")) {
159  absolutePath = QFileInfo(asset_folder.absoluteFilePath(relativePath.replace("@assets", "."))).canonicalFilePath();
160  } else if (relativePath.startsWith("@transitions")) {
161  absolutePath = QFileInfo(openshotTransPath.absoluteFilePath(relativePath.replace("@transitions", "."))).canonicalFilePath();
162  } else {
163  absolutePath = QFileInfo(filePath.absoluteDir().absoluteFilePath(relativePath)).canonicalFilePath();
164  }
165 
166  // Replace path in JSON content, if an absolute path was successfully found
167  if (!absolutePath.isEmpty()) {
168  projectContents.replace(match.capturedStart(0), match.capturedLength(0), "\"" + relativeKey + "\": \"" + absolutePath + "\"");
169  }
170  }
171  // Clear matches
172  matchedPositions.clear();
173  }
174 
175  // Set JSON of project
176  SetJson(projectContents.toStdString());
177 
178  // Calculate valid duration and set has_audio and has_video
179  // based on content inside this Timeline's clips.
180  float calculated_duration = 0.0;
181  for (auto clip : clips)
182  {
183  float clip_last_frame = clip->Position() + clip->Duration();
184  if (clip_last_frame > calculated_duration)
185  calculated_duration = clip_last_frame;
186  if (clip->Reader() && clip->Reader()->info.has_audio)
187  info.has_audio = true;
188  if (clip->Reader() && clip->Reader()->info.has_video)
189  info.has_video = true;
190 
191  }
192  info.video_length = calculated_duration * info.fps.ToFloat();
193  info.duration = calculated_duration;
194 
195  // Init FileInfo settings
196  info.acodec = "openshot::timeline";
197  info.vcodec = "openshot::timeline";
199  info.has_video = true;
200  info.has_audio = true;
201 
202  // Init max image size
204 
205  // Init cache
206  final_cache = new CacheMemory();
207  const int cache_frames = std::max(Settings::Instance()->CACHE_MIN_FRAMES, OPEN_MP_NUM_PROCESSORS * 4);
208  final_cache->SetMaxBytesFromInfo(cache_frames, info.width, info.height, info.sample_rate, info.channels);
209 }
210 
212  if (is_open) {
213  // Auto Close if not already
214  Close();
215  }
216 
217  // Remove all clips, effects, and frame mappers
218  Clear();
219 
220  // Destroy previous cache (if managed by timeline)
221  if (managed_cache && final_cache) {
222  delete final_cache;
223  final_cache = NULL;
224  }
225 }
226 
227 // Add to the tracked_objects map a pointer to a tracked object (TrackedObjectBBox)
228 void Timeline::AddTrackedObject(std::shared_ptr<openshot::TrackedObjectBase> trackedObject){
229 
230  // Search for the tracked object on the map
231  auto iterator = tracked_objects.find(trackedObject->Id());
232 
233  if (iterator != tracked_objects.end()){
234  // Tracked object's id already present on the map, overwrite it
235  iterator->second = trackedObject;
236  }
237  else{
238  // Tracked object's id not present -> insert it on the map
239  tracked_objects[trackedObject->Id()] = trackedObject;
240  }
241 
242  return;
243 }
244 
245 // Return tracked object pointer by it's id
246 std::shared_ptr<openshot::TrackedObjectBase> Timeline::GetTrackedObject(std::string id) const{
247 
248  // Search for the tracked object on the map
249  auto iterator = tracked_objects.find(id);
250 
251  if (iterator != tracked_objects.end()){
252  // Id found, return the pointer to the tracked object
253  std::shared_ptr<openshot::TrackedObjectBase> trackedObject = iterator->second;
254  return trackedObject;
255  }
256  else {
257  // Id not found, return a null pointer
258  return nullptr;
259  }
260 }
261 
262 // Return the ID's of the tracked objects as a list of strings
263 std::list<std::string> Timeline::GetTrackedObjectsIds() const{
264 
265  // Create a list of strings
266  std::list<std::string> trackedObjects_ids;
267 
268  // Iterate through the tracked_objects map
269  for (auto const& it: tracked_objects){
270  // Add the IDs to the list
271  trackedObjects_ids.push_back(it.first);
272  }
273 
274  return trackedObjects_ids;
275 }
276 
277 #ifdef USE_OPENCV
278 // Return the trackedObject's properties as a JSON string
279 std::string Timeline::GetTrackedObjectValues(std::string id, int64_t frame_number) const {
280 
281  // Initialize the JSON object
282  Json::Value trackedObjectJson;
283 
284  // Search for the tracked object on the map
285  auto iterator = tracked_objects.find(id);
286 
287  if (iterator != tracked_objects.end())
288  {
289  // Id found, Get the object pointer and cast it as a TrackedObjectBBox
290  std::shared_ptr<TrackedObjectBBox> trackedObject = std::static_pointer_cast<TrackedObjectBBox>(iterator->second);
291 
292  // Get the trackedObject values for it's first frame
293  if (trackedObject->ExactlyContains(frame_number)){
294  BBox box = trackedObject->GetBox(frame_number);
295  float x1 = box.cx - (box.width/2);
296  float y1 = box.cy - (box.height/2);
297  float x2 = box.cx + (box.width/2);
298  float y2 = box.cy + (box.height/2);
299  float rotation = box.angle;
300 
301  trackedObjectJson["x1"] = x1;
302  trackedObjectJson["y1"] = y1;
303  trackedObjectJson["x2"] = x2;
304  trackedObjectJson["y2"] = y2;
305  trackedObjectJson["rotation"] = rotation;
306 
307  } else {
308  BBox box = trackedObject->BoxVec.begin()->second;
309  float x1 = box.cx - (box.width/2);
310  float y1 = box.cy - (box.height/2);
311  float x2 = box.cx + (box.width/2);
312  float y2 = box.cy + (box.height/2);
313  float rotation = box.angle;
314 
315  trackedObjectJson["x1"] = x1;
316  trackedObjectJson["y1"] = y1;
317  trackedObjectJson["x2"] = x2;
318  trackedObjectJson["y2"] = y2;
319  trackedObjectJson["rotation"] = rotation;
320  }
321 
322  }
323  else {
324  // Id not found, return all 0 values
325  trackedObjectJson["x1"] = 0;
326  trackedObjectJson["y1"] = 0;
327  trackedObjectJson["x2"] = 0;
328  trackedObjectJson["y2"] = 0;
329  trackedObjectJson["rotation"] = 0;
330  }
331 
332  return trackedObjectJson.toStyledString();
333 }
334 #endif
335 
336 // Add an openshot::Clip to the timeline
338 {
339  // Get lock (prevent getting frames while this happens)
340  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
341 
342  // Assign timeline to clip
343  clip->ParentTimeline(this);
344 
345  // Clear cache of clip and nested reader (if any)
346  if (clip->Reader() && clip->Reader()->GetCache())
347  clip->Reader()->GetCache()->Clear();
348 
349  // All clips should be converted to the frame rate of this timeline
350  if (auto_map_clips) {
351  // Apply framemapper (or update existing framemapper)
352  apply_mapper_to_clip(clip);
353  }
354 
355  // Add clip to list
356  clips.push_back(clip);
357 
358  // Sort clips
359  sort_clips();
360 }
361 
362 // Add an effect to the timeline
364 {
365  // Get lock (prevent getting frames while this happens)
366  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
367 
368  // Assign timeline to effect
369  effect->ParentTimeline(this);
370 
371  // Add effect to list
372  effects.push_back(effect);
373 
374  // Sort effects
375  sort_effects();
376 }
377 
378 // Remove an effect from the timeline
380 {
381  // Get lock (prevent getting frames while this happens)
382  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
383 
384  effects.remove(effect);
385 
386  // Delete effect object (if timeline allocated it)
387  if (allocated_effects.count(effect)) {
388  allocated_effects.erase(effect); // erase before nulling the pointer
389  delete effect;
390  effect = NULL;
391  }
392 
393  // Sort effects
394  sort_effects();
395 }
396 
397 // Remove an openshot::Clip to the timeline
399 {
400  // Get lock (prevent getting frames while this happens)
401  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
402 
403  clips.remove(clip);
404 
405  // Delete clip object (if timeline allocated it)
406  if (allocated_clips.count(clip)) {
407  allocated_clips.erase(clip); // erase before nulling the pointer
408  delete clip;
409  clip = NULL;
410  }
411 
412  // Sort clips
413  sort_clips();
414 }
415 
416 // Look up a clip
417 openshot::Clip* Timeline::GetClip(const std::string& id)
418 {
419  // Find the matching clip (if any)
420  for (const auto& clip : clips) {
421  if (clip->Id() == id) {
422  return clip;
423  }
424  }
425  return nullptr;
426 }
427 
428 // Look up a timeline effect
430 {
431  // Find the matching effect (if any)
432  for (const auto& effect : effects) {
433  if (effect->Id() == id) {
434  return effect;
435  }
436  }
437  return nullptr;
438 }
439 
441 {
442  // Search all clips for matching effect ID
443  for (const auto& clip : clips) {
444  const auto e = clip->GetEffect(id);
445  if (e != nullptr) {
446  return e;
447  }
448  }
449  return nullptr;
450 }
451 
452 // Return the list of effects on all clips
453 std::list<openshot::EffectBase*> Timeline::ClipEffects() const {
454 
455  // Initialize the list
456  std::list<EffectBase*> timelineEffectsList;
457 
458  // Loop through all clips
459  for (const auto& clip : clips) {
460 
461  // Get the clip's list of effects
462  std::list<EffectBase*> clipEffectsList = clip->Effects();
463 
464  // Append the clip's effects to the list
465  timelineEffectsList.insert(timelineEffectsList.end(), clipEffectsList.begin(), clipEffectsList.end());
466  }
467 
468  return timelineEffectsList;
469 }
470 
471 // Compute the end time of the latest timeline element
473  // Return cached max_time variable (threadsafe)
474  return max_time;
475 }
476 
477 // Compute the highest frame# based on the latest time and FPS
479  const double fps = info.fps.ToDouble();
480  const double t = GetMaxTime();
481  // Inclusive start, exclusive end -> ceil at the end boundary
482  return static_cast<int64_t>(std::ceil(t * fps));
483 }
484 
485 // Compute the first frame# based on the first clip position
487  const double fps = info.fps.ToDouble();
488  const double t = GetMinTime();
489  // Inclusive start -> floor at the start boundary, then 1-index
490  return static_cast<int64_t>(std::floor(t * fps)) + 1;
491 }
492 
493 // Compute the start time of the first timeline clip
495  // Return cached min_time variable (threadsafe)
496  return min_time;
497 }
498 
499 // Apply a FrameMapper to a clip which matches the settings of this timeline
500 void Timeline::apply_mapper_to_clip(Clip* clip)
501 {
502  // Determine type of reader
503  ReaderBase* clip_reader = NULL;
504  if (clip->Reader()->Name() == "FrameMapper")
505  {
506  // Get the existing reader
507  clip_reader = (ReaderBase*) clip->Reader();
508 
509  // Update the mapping
510  FrameMapper* clip_mapped_reader = (FrameMapper*) clip_reader;
512 
513  } else {
514 
515  // Create a new FrameMapper to wrap the current reader
517  allocated_frame_mappers.insert(mapper);
518  clip_reader = (ReaderBase*) mapper;
519  }
520 
521  // Update clip reader
522  clip->Reader(clip_reader);
523 }
524 
525 // Apply the timeline's framerate and samplerate to all clips
527 {
528  // Clear all cached frames
529  ClearAllCache();
530 
531  // Loop through all clips
532  for (auto clip : clips)
533  {
534  // Apply framemapper (or update existing framemapper)
535  apply_mapper_to_clip(clip);
536  }
537 }
538 
539 // Calculate time of a frame number, based on a framerate
540 double Timeline::calculate_time(int64_t number, Fraction rate)
541 {
542  // Get float version of fps fraction
543  double raw_fps = rate.ToFloat();
544 
545  // Return the time (in seconds) of this frame
546  return double(number - 1) / raw_fps;
547 }
548 
549 // Apply effects to the source frame (if any)
550 std::shared_ptr<Frame> Timeline::apply_effects(std::shared_ptr<Frame> frame, int64_t timeline_frame_number, int layer, TimelineInfoStruct* options)
551 {
552  // Debug output
554  "Timeline::apply_effects",
555  "frame->number", frame->number,
556  "timeline_frame_number", timeline_frame_number,
557  "layer", layer);
558 
559  // Find Effects at this position and layer
560  for (auto effect : effects)
561  {
562  // Does clip intersect the current requested time
563  const double fpsD = info.fps.ToDouble();
564  int64_t effect_start_position = static_cast<int64_t>(std::llround(effect->Position() * fpsD)) + 1;
565  int64_t effect_end_position = static_cast<int64_t>(std::llround((effect->Position() + effect->Duration()) * fpsD));
566 
567  bool does_effect_intersect = (effect_start_position <= timeline_frame_number && effect_end_position >= timeline_frame_number && effect->Layer() == layer);
568 
569  // Clip is visible
570  if (does_effect_intersect)
571  {
572  // Determine the frame needed for this clip (based on the position on the timeline)
573  int64_t effect_start_frame = static_cast<int64_t>(std::llround(effect->Start() * fpsD)) + 1;
574  int64_t effect_frame_number = timeline_frame_number - effect_start_position + effect_start_frame;
575 
576  if (!options->is_top_clip)
577  continue; // skip effect, if overlapped/covered by another clip on same layer
578 
579  if (options->is_before_clip_keyframes != effect->info.apply_before_clip)
580  continue; // skip effect, if this filter does not match
581 
582  // Debug output
584  "Timeline::apply_effects (Process Effect)",
585  "effect_frame_number", effect_frame_number,
586  "does_effect_intersect", does_effect_intersect);
587 
588  // Apply the effect to this frame
589  frame = effect->ProcessFrame(frame, effect_frame_number);
590  }
591 
592  } // end effect loop
593 
594  // Return modified frame
595  return frame;
596 }
597 
598 // Get or generate a blank frame
599 std::shared_ptr<Frame> Timeline::GetOrCreateFrame(std::shared_ptr<Frame> background_frame, Clip* clip, int64_t number, openshot::TimelineInfoStruct* options)
600 {
601  std::shared_ptr<Frame> new_frame;
602 
603  // Init some basic properties about this frame
604  int samples_in_frame = Frame::GetSamplesPerFrame(number, info.fps, info.sample_rate, info.channels);
605 
606  try {
607  // Debug output
609  "Timeline::GetOrCreateFrame (from reader)",
610  "number", number,
611  "samples_in_frame", samples_in_frame);
612 
613  // Attempt to get a frame (but this could fail if a reader has just been closed)
614  new_frame = std::shared_ptr<Frame>(clip->GetFrame(background_frame, number, options));
615 
616  // Return real frame
617  return new_frame;
618 
619  } catch (const ReaderClosed & e) {
620  // ...
621  } catch (const OutOfBoundsFrame & e) {
622  // ...
623  }
624 
625  // Debug output
627  "Timeline::GetOrCreateFrame (create blank)",
628  "number", number,
629  "samples_in_frame", samples_in_frame);
630 
631  // Create blank frame
632  return new_frame;
633 }
634 
635 // Process a new layer of video or audio
636 void Timeline::add_layer(std::shared_ptr<Frame> new_frame, Clip* source_clip, int64_t clip_frame_number, bool is_top_clip, bool force_safe_composite, float max_volume)
637 {
638  // Create timeline options (with details about this current frame request)
639  TimelineInfoStruct options{};
640  options.is_top_clip = is_top_clip;
641  options.is_before_clip_keyframes = true;
642  options.force_safe_composite = force_safe_composite;
643 
644  // Get the clip's frame, composited on top of the current timeline frame
645  std::shared_ptr<Frame> source_frame;
646  source_frame = GetOrCreateFrame(new_frame, source_clip, clip_frame_number, &options);
647 
648  // No frame found... so bail
649  if (!source_frame)
650  return;
651 
652  // Debug output
654  "Timeline::add_layer",
655  "new_frame->number", new_frame->number,
656  "clip_frame_number", clip_frame_number);
657 
658  /* COPY AUDIO - with correct volume */
659  if (source_clip->Reader()->info.has_audio) {
660  // Debug output
662  "Timeline::add_layer (Copy Audio)",
663  "source_clip->Reader()->info.has_audio", source_clip->Reader()->info.has_audio,
664  "source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(),
665  "info.channels", info.channels,
666  "clip_frame_number", clip_frame_number);
667 
668  if (source_frame->GetAudioChannelsCount() == info.channels && source_clip->has_audio.GetInt(clip_frame_number) != 0)
669  {
670  // Ensure timeline frame matches the source samples once per frame
671  if (new_frame->GetAudioSamplesCount() != source_frame->GetAudioSamplesCount()){
672  new_frame->ResizeAudio(info.channels, source_frame->GetAudioSamplesCount(), info.sample_rate, info.channel_layout);
673  }
674 
675  for (int channel = 0; channel < source_frame->GetAudioChannelsCount(); channel++)
676  {
677  // Get volume from previous frame and this frame
678  float previous_volume = source_clip->volume.GetValue(clip_frame_number - 1);
679  float volume = source_clip->volume.GetValue(clip_frame_number);
680  int channel_filter = source_clip->channel_filter.GetInt(clip_frame_number); // optional channel to filter (if not -1)
681  int channel_mapping = source_clip->channel_mapping.GetInt(clip_frame_number); // optional channel to map this channel to (if not -1)
682 
683  // Apply volume mixing strategy
684  if (source_clip->mixing == VOLUME_MIX_AVERAGE && max_volume > 1.0) {
685  // Don't allow this clip to exceed 100% (divide volume equally between all overlapping clips with volume
686  previous_volume = previous_volume / max_volume;
687  volume = volume / max_volume;
688  }
689  else if (source_clip->mixing == VOLUME_MIX_REDUCE && max_volume > 1.0) {
690  // Reduce clip volume by a bit, hoping it will prevent exceeding 100% (but it is very possible it will)
691  previous_volume = previous_volume * 0.77;
692  volume = volume * 0.77;
693  }
694 
695  // If channel filter enabled, check for correct channel (and skip non-matching channels)
696  if (channel_filter != -1 && channel_filter != channel)
697  continue; // skip to next channel
698 
699  // If no volume on this frame or previous frame, do nothing
700  if (previous_volume == 0.0 && volume == 0.0)
701  continue; // skip to next channel
702 
703  // If channel mapping disabled, just use the current channel
704  if (channel_mapping == -1)
705  channel_mapping = channel;
706 
707  // Apply ramp to source frame (if needed)
708  if (!isEqual(previous_volume, 1.0) || !isEqual(volume, 1.0))
709  source_frame->ApplyGainRamp(channel_mapping, 0, source_frame->GetAudioSamplesCount(), previous_volume, volume);
710 
711  // Copy audio samples (and set initial volume). Mix samples with existing audio samples. The gains are added together, to
712  // be sure to set the gain's correctly, so the sum does not exceed 1.0 (of audio distortion will happen).
713  new_frame->AddAudio(false, channel_mapping, 0, source_frame->GetAudioSamples(channel), source_frame->GetAudioSamplesCount(), 1.0);
714  }
715  }
716  else
717  // Debug output
719  "Timeline::add_layer (No Audio Copied - Wrong # of Channels)",
720  "source_clip->Reader()->info.has_audio",
721  source_clip->Reader()->info.has_audio,
722  "source_frame->GetAudioChannelsCount()",
723  source_frame->GetAudioChannelsCount(),
724  "info.channels", info.channels,
725  "clip_frame_number", clip_frame_number);
726  }
727 
728  // Debug output
730  "Timeline::add_layer (Transform: Composite Image Layer: Completed)",
731  "source_frame->number", source_frame->number,
732  "new_frame->GetImage()->width()", new_frame->GetWidth(),
733  "new_frame->GetImage()->height()", new_frame->GetHeight());
734 }
735 
736 // Update the list of 'opened' clips
737 void Timeline::update_open_clips(Clip *clip, bool does_clip_intersect)
738 {
739  // Get lock (prevent getting frames while this happens)
740  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
741 
743  "Timeline::update_open_clips (before)",
744  "does_clip_intersect", does_clip_intersect,
745  "closing_clips.size()", closing_clips.size(),
746  "open_clips.size()", open_clips.size());
747 
748  // is clip already in list?
749  bool clip_found = open_clips.count(clip);
750 
751  if (clip_found && !does_clip_intersect)
752  {
753  // Remove clip from 'opened' list, because it's closed now
754  open_clips.erase(clip);
755 
756  // Close clip
757  clip->Close();
758  }
759  else if (!clip_found && does_clip_intersect)
760  {
761  // Add clip to 'opened' list, because it's missing
762  open_clips[clip] = clip;
763 
764  try {
765  // Open the clip
766  clip->Open();
767 
768  } catch (const InvalidFile & e) {
769  // ...
770  }
771  }
772 
773  // Debug output
775  "Timeline::update_open_clips (after)",
776  "does_clip_intersect", does_clip_intersect,
777  "clip_found", clip_found,
778  "closing_clips.size()", closing_clips.size(),
779  "open_clips.size()", open_clips.size());
780 }
781 
782 // Calculate the max and min duration (in seconds) of the timeline, based on all the clips, and cache the value
783 void Timeline::calculate_max_duration() {
784  double last_clip = 0.0;
785  double last_effect = 0.0;
786  double first_clip = std::numeric_limits<double>::max();
787  double first_effect = std::numeric_limits<double>::max();
788 
789  // Find the last and first clip
790  if (!clips.empty()) {
791  // Find the clip with the maximum end frame
792  const auto max_clip = std::max_element(
793  clips.begin(), clips.end(), CompareClipEndFrames());
794  last_clip = (*max_clip)->Position() + (*max_clip)->Duration();
795 
796  // Find the clip with the minimum start position (ignoring layer)
797  const auto min_clip = std::min_element(
798  clips.begin(), clips.end(), [](const openshot::Clip* lhs, const openshot::Clip* rhs) {
799  return lhs->Position() < rhs->Position();
800  });
801  first_clip = (*min_clip)->Position();
802  }
803 
804  // Find the last and first effect
805  if (!effects.empty()) {
806  // Find the effect with the maximum end frame
807  const auto max_effect = std::max_element(
808  effects.begin(), effects.end(), CompareEffectEndFrames());
809  last_effect = (*max_effect)->Position() + (*max_effect)->Duration();
810 
811  // Find the effect with the minimum start position
812  const auto min_effect = std::min_element(
813  effects.begin(), effects.end(), [](const openshot::EffectBase* lhs, const openshot::EffectBase* rhs) {
814  return lhs->Position() < rhs->Position();
815  });
816  first_effect = (*min_effect)->Position();
817  }
818 
819  // Calculate the max and min time
820  max_time = std::max(last_clip, last_effect);
821  min_time = std::min(first_clip, first_effect);
822 
823  // If no clips or effects exist, set min_time to 0
824  if (clips.empty() && effects.empty()) {
825  min_time = 0.0;
826  max_time = 0.0;
827  }
828 }
829 
830 // Sort clips by position on the timeline
831 void Timeline::sort_clips()
832 {
833  // Get lock (prevent getting frames while this happens)
834  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
835 
836  // Debug output
838  "Timeline::SortClips",
839  "clips.size()", clips.size());
840 
841  // sort clips
842  clips.sort(CompareClips());
843 
844  // calculate max timeline duration
845  calculate_max_duration();
846 }
847 
848 // Sort effects by position on the timeline
849 void Timeline::sort_effects()
850 {
851  // Get lock (prevent getting frames while this happens)
852  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
853 
854  // sort clips
855  effects.sort(CompareEffects());
856 
857  // calculate max timeline duration
858  calculate_max_duration();
859 }
860 
861 // Clear all clips from timeline
863 {
864  ZmqLogger::Instance()->AppendDebugMethod("Timeline::Clear");
865 
866  // Get lock (prevent getting frames while this happens)
867  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
868 
869  // Close all open clips
870  for (auto clip : clips)
871  {
872  update_open_clips(clip, false);
873 
874  // Delete clip object (if timeline allocated it)
875  bool allocated = allocated_clips.count(clip);
876  if (allocated) {
877  delete clip;
878  }
879  }
880  // Clear all clips
881  clips.clear();
882  allocated_clips.clear();
883 
884  // Close all effects
885  for (auto effect : effects)
886  {
887  // Delete effect object (if timeline allocated it)
888  bool allocated = allocated_effects.count(effect);
889  if (allocated) {
890  delete effect;
891  }
892  }
893  // Clear all effects
894  effects.clear();
895  allocated_effects.clear();
896 
897  // Delete all FrameMappers
898  for (auto mapper : allocated_frame_mappers)
899  {
900  mapper->Reader(NULL);
901  mapper->Close();
902  delete mapper;
903  }
904  allocated_frame_mappers.clear();
905 }
906 
907 // Close the reader (and any resources it was consuming)
909 {
910  ZmqLogger::Instance()->AppendDebugMethod("Timeline::Close");
911 
912  // Get lock (prevent getting frames while this happens)
913  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
914 
915  // Close all open clips
916  for (auto clip : clips)
917  {
918  // Open or Close this clip, based on if it's intersecting or not
919  update_open_clips(clip, false);
920  }
921 
922  // Mark timeline as closed
923  is_open = false;
924 
925  // Clear all cache (deep clear, including nested Readers)
926  ClearAllCache(true);
927 }
928 
929 // Open the reader (and start consuming resources)
931 {
932  is_open = true;
933 }
934 
935 // Compare 2 floating point numbers for equality
936 bool Timeline::isEqual(double a, double b)
937 {
938  return fabs(a - b) < 0.000001;
939 }
940 
941 // Get an openshot::Frame object for a specific frame number of this reader.
942 std::shared_ptr<Frame> Timeline::GetFrame(int64_t requested_frame)
943 {
944  // Adjust out of bounds frame number
945  if (requested_frame < 1)
946  requested_frame = 1;
947  const int64_t max_frame = GetMaxFrame();
948  const bool past_timeline_end = (max_frame > 0 && requested_frame > max_frame);
949 
950  // Check cache
951  std::shared_ptr<Frame> frame;
952  if (!past_timeline_end)
953  frame = final_cache->GetFrame(requested_frame);
954  if (frame) {
955  // Debug output
957  "Timeline::GetFrame (Cached frame found)",
958  "requested_frame", requested_frame);
959 
960  // Return cached frame
961  return frame;
962  }
963  else
964  {
965  // Prevent async calls to the following code
966  const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
967 
968  // Check cache 2nd time
969  std::shared_ptr<Frame> frame;
970  if (!past_timeline_end)
971  frame = final_cache->GetFrame(requested_frame);
972  if (frame) {
973  // Debug output
975  "Timeline::GetFrame (Cached frame found on 2nd check)",
976  "requested_frame", requested_frame);
977 
978  // Return cached frame
979  return frame;
980  } else {
981  // Get a list of clips that intersect with the requested section of timeline
982  // This also opens the readers for intersecting clips, and marks non-intersecting clips as 'needs closing'
983  std::vector<Clip *> nearby_clips;
984  nearby_clips = find_intersecting_clips(requested_frame, 1, true);
985 
986  // Debug output
988  "Timeline::GetFrame (processing frame)",
989  "requested_frame", requested_frame,
990  "omp_get_thread_num()", omp_get_thread_num());
991 
992  // Init some basic properties about this frame
993  int samples_in_frame = Frame::GetSamplesPerFrame(requested_frame, info.fps, info.sample_rate, info.channels);
994 
995  // Create blank frame (which will become the requested frame)
996  std::shared_ptr<Frame> new_frame(std::make_shared<Frame>(requested_frame, preview_width, preview_height, "#000000", samples_in_frame, info.channels));
997  new_frame->AddAudioSilence(samples_in_frame);
998  new_frame->SampleRate(info.sample_rate);
999  new_frame->ChannelsLayout(info.channel_layout);
1000 
1001  // Debug output
1003  "Timeline::GetFrame (Adding solid color)",
1004  "requested_frame", requested_frame,
1005  "info.width", info.width,
1006  "info.height", info.height);
1007 
1008  // Add Background Color to 1st layer (if animated or not black)
1009  if ((color.red.GetCount() > 1 || color.green.GetCount() > 1 || color.blue.GetCount() > 1) ||
1010  (color.red.GetValue(requested_frame) != 0.0 || color.green.GetValue(requested_frame) != 0.0 ||
1011  color.blue.GetValue(requested_frame) != 0.0))
1012  new_frame->AddColor(preview_width, preview_height, color.GetColorHex(requested_frame));
1013 
1014  // Debug output
1016  "Timeline::GetFrame (Loop through clips)",
1017  "requested_frame", requested_frame,
1018  "clips.size()", clips.size(),
1019  "nearby_clips.size()", nearby_clips.size());
1020 
1021  // Precompute per-clip timing for this requested frame
1022  struct ClipInfo {
1023  Clip* clip;
1024  int64_t start_pos;
1025  int64_t end_pos;
1026  int64_t start_frame;
1027  int64_t frame_number;
1028  bool intersects;
1029  };
1030  std::vector<ClipInfo> clip_infos;
1031  clip_infos.reserve(nearby_clips.size());
1032  const double fpsD = info.fps.ToDouble();
1033 
1034  for (auto clip : nearby_clips) {
1035  int64_t start_pos = static_cast<int64_t>(std::llround(clip->Position() * fpsD)) + 1;
1036  int64_t end_pos = static_cast<int64_t>(std::llround((clip->Position() + clip->Duration()) * fpsD));
1037  bool intersects = (start_pos <= requested_frame && end_pos >= requested_frame);
1038  int64_t start_frame = static_cast<int64_t>(std::llround(clip->Start() * fpsD)) + 1;
1039  int64_t frame_number = requested_frame - start_pos + start_frame;
1040  clip_infos.push_back({clip, start_pos, end_pos, start_frame, frame_number, intersects});
1041  }
1042 
1043  // Determine top clip per layer (linear, no nested loop)
1044  std::unordered_map<int, int64_t> top_start_for_layer;
1045  std::unordered_map<int, Clip*> top_clip_for_layer;
1046  for (const auto& ci : clip_infos) {
1047  if (!ci.intersects) continue;
1048  const int layer = ci.clip->Layer();
1049  auto it = top_start_for_layer.find(layer);
1050  if (it == top_start_for_layer.end() || ci.start_pos > it->second) {
1051  top_start_for_layer[layer] = ci.start_pos; // strictly greater to match prior logic
1052  top_clip_for_layer[layer] = ci.clip;
1053  }
1054  }
1055 
1056  // Compute max_volume across all overlapping clips once
1057  float max_volume_sum = 0.0f;
1058  for (const auto& ci : clip_infos) {
1059  if (!ci.intersects) continue;
1060  if (ci.clip->Reader() && ci.clip->Reader()->info.has_audio &&
1061  ci.clip->has_audio.GetInt(ci.frame_number) != 0) {
1062  max_volume_sum += static_cast<float>(ci.clip->volume.GetValue(ci.frame_number));
1063  }
1064  }
1065 
1066  // Compose intersecting clips in a single pass
1067  const int safe_remaining = safe_edit_frames_remaining.load(std::memory_order_relaxed);
1068  const bool force_safe_composite = (safe_remaining > 0);
1069  if (force_safe_composite) {
1070  safe_edit_frames_remaining.fetch_sub(1, std::memory_order_relaxed);
1071  }
1072  for (const auto& ci : clip_infos) {
1073  // Debug output
1075  "Timeline::GetFrame (Does clip intersect)",
1076  "requested_frame", requested_frame,
1077  "clip->Position()", ci.clip->Position(),
1078  "clip->Duration()", ci.clip->Duration(),
1079  "does_clip_intersect", ci.intersects);
1080 
1081  // Clip is visible
1082  if (ci.intersects) {
1083  // Is this the top clip on its layer?
1084  bool is_top_clip = false;
1085  const int layer = ci.clip->Layer();
1086  auto top_it = top_clip_for_layer.find(layer);
1087  if (top_it != top_clip_for_layer.end())
1088  is_top_clip = (top_it->second == ci.clip);
1089 
1090  // Determine the frame needed for this clip (based on the position on the timeline)
1091  int64_t clip_frame_number = ci.frame_number;
1092 
1093  // Debug output
1095  "Timeline::GetFrame (Calculate clip's frame #)",
1096  "clip->Position()", ci.clip->Position(),
1097  "clip->Start()", ci.clip->Start(),
1098  "info.fps.ToFloat()", info.fps.ToFloat(),
1099  "clip_frame_number", clip_frame_number);
1100 
1101  // Add clip's frame as layer
1102  add_layer(new_frame, ci.clip, clip_frame_number, is_top_clip, force_safe_composite, max_volume_sum);
1103 
1104  } else {
1105  // Debug output
1107  "Timeline::GetFrame (clip does not intersect)",
1108  "requested_frame", requested_frame,
1109  "does_clip_intersect", ci.intersects);
1110  }
1111 
1112  } // end clip loop
1113 
1114  // Debug output
1116  "Timeline::GetFrame (Add frame to cache)",
1117  "requested_frame", requested_frame,
1118  "info.width", info.width,
1119  "info.height", info.height);
1120 
1121  // Set frame # on mapped frame
1122  new_frame->SetFrameNumber(requested_frame);
1123 
1124  // Add final frame to cache (only for valid timeline range)
1125  if (!past_timeline_end)
1126  final_cache->Add(new_frame);
1127  // Return frame (or blank frame)
1128  return new_frame;
1129  }
1130  }
1131 }
1132 
1133 
1134 // Find intersecting clips (or non intersecting clips)
1135 std::vector<Clip*> Timeline::find_intersecting_clips(int64_t requested_frame, int number_of_frames, bool include)
1136 {
1137  // Find matching clips
1138  std::vector<Clip*> matching_clips;
1139 
1140  // Calculate time of frame
1141  const int64_t min_requested_frame = requested_frame;
1142  const int64_t max_requested_frame = requested_frame + (number_of_frames - 1);
1143 
1144  // Find Clips at this time
1145  matching_clips.reserve(clips.size());
1146  const double fpsD = info.fps.ToDouble();
1147  for (auto clip : clips)
1148  {
1149  // Does clip intersect the current requested time
1150  int64_t clip_start_position = static_cast<int64_t>(std::llround(clip->Position() * fpsD)) + 1;
1151  int64_t clip_end_position = static_cast<int64_t>(std::llround((clip->Position() + clip->Duration()) * fpsD)) + 1;
1152 
1153  bool does_clip_intersect =
1154  (clip_start_position <= min_requested_frame || clip_start_position <= max_requested_frame) &&
1155  (clip_end_position >= min_requested_frame || clip_end_position >= max_requested_frame);
1156 
1157  // Debug output
1159  "Timeline::find_intersecting_clips (Is clip near or intersecting)",
1160  "requested_frame", requested_frame,
1161  "min_requested_frame", min_requested_frame,
1162  "max_requested_frame", max_requested_frame,
1163  "clip->Position()", clip->Position(),
1164  "does_clip_intersect", does_clip_intersect);
1165 
1166  // Open (or schedule for closing) this clip, based on if it's intersecting or not
1167  update_open_clips(clip, does_clip_intersect);
1168 
1169  // Clip is visible
1170  if (does_clip_intersect && include)
1171  // Add the intersecting clip
1172  matching_clips.push_back(clip);
1173 
1174  else if (!does_clip_intersect && !include)
1175  // Add the non-intersecting clip
1176  matching_clips.push_back(clip);
1177 
1178  } // end clip loop
1179 
1180  // return list
1181  return matching_clips;
1182 }
1183 
1184 // Set the cache object used by this reader
1185 void Timeline::SetCache(CacheBase* new_cache) {
1186  // Get lock (prevent getting frames while this happens)
1187  const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
1188 
1189  // Destroy previous cache (if managed by timeline)
1190  if (managed_cache && final_cache) {
1191  delete final_cache;
1192  final_cache = NULL;
1193  managed_cache = false;
1194  }
1195 
1196  // Set new cache
1197  final_cache = new_cache;
1198 }
1199 
1200 // Generate JSON string of this object
1201 std::string Timeline::Json() const {
1202 
1203  // Return formatted string
1204  return JsonValue().toStyledString();
1205 }
1206 
1207 // Generate Json::Value for this object
1208 Json::Value Timeline::JsonValue() const {
1209 
1210  // Create root json object
1211  Json::Value root = ReaderBase::JsonValue(); // get parent properties
1212  root["type"] = "Timeline";
1213  root["viewport_scale"] = viewport_scale.JsonValue();
1214  root["viewport_x"] = viewport_x.JsonValue();
1215  root["viewport_y"] = viewport_y.JsonValue();
1216  root["color"] = color.JsonValue();
1217  root["path"] = path;
1218 
1219  // Add array of clips
1220  root["clips"] = Json::Value(Json::arrayValue);
1221 
1222  // Find Clips at this time
1223  for (const auto existing_clip : clips)
1224  {
1225  root["clips"].append(existing_clip->JsonValue());
1226  }
1227 
1228  // Add array of effects
1229  root["effects"] = Json::Value(Json::arrayValue);
1230 
1231  // loop through effects
1232  for (const auto existing_effect: effects)
1233  {
1234  root["effects"].append(existing_effect->JsonValue());
1235  }
1236 
1237  // return JsonValue
1238  return root;
1239 }
1240 
1241 // Load JSON string into this object
1242 void Timeline::SetJson(const std::string value) {
1243 
1244  // Get lock (prevent getting frames while this happens)
1245  const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
1246 
1247  // Parse JSON string into JSON objects
1248  try
1249  {
1250  const Json::Value root = openshot::stringToJson(value);
1251  // Set all values that match
1252  SetJsonValue(root);
1253  }
1254  catch (const std::exception& e)
1255  {
1256  // Error parsing JSON (or missing keys)
1257  throw InvalidJSON("JSON is invalid (missing keys or invalid data types)");
1258  }
1259 }
1260 
1261 // Load Json::Value into this object
1262 void Timeline::SetJsonValue(const Json::Value root) {
1263 
1264  // Get lock (prevent getting frames while this happens)
1265  const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
1266 
1267  // Close timeline before we do anything (this closes all clips)
1268  bool was_open = is_open;
1269  Close();
1270 
1271  // Set parent data
1273 
1274  // Set data from Json (if key is found)
1275  if (!root["path"].isNull())
1276  path = root["path"].asString();
1277 
1278  if (!root["clips"].isNull()) {
1279  // Clear existing clips
1280  clips.clear();
1281 
1282  // loop through clips
1283  for (const Json::Value existing_clip : root["clips"]) {
1284  // Skip NULL nodes
1285  if (existing_clip.isNull()) {
1286  continue;
1287  }
1288 
1289  // Create Clip
1290  Clip *c = new Clip();
1291 
1292  // Keep track of allocated clip objects
1293  allocated_clips.insert(c);
1294 
1295  // When a clip is attached to an object, it searches for the object
1296  // on it's parent timeline. Setting the parent timeline of the clip here
1297  // allows attaching it to an object when exporting the project (because)
1298  // the exporter script initializes the clip and it's effects
1299  // before setting its parent timeline.
1300  c->ParentTimeline(this);
1301 
1302  // Load Json into Clip
1303  c->SetJsonValue(existing_clip);
1304 
1305  // Add Clip to Timeline
1306  AddClip(c);
1307  }
1308  }
1309 
1310  if (!root["effects"].isNull()) {
1311  // Clear existing effects
1312  effects.clear();
1313 
1314  // loop through effects
1315  for (const Json::Value existing_effect :root["effects"]) {
1316  // Skip NULL nodes
1317  if (existing_effect.isNull()) {
1318  continue;
1319  }
1320 
1321  // Create Effect
1322  EffectBase *e = NULL;
1323 
1324  if (!existing_effect["type"].isNull()) {
1325  // Create instance of effect
1326  if ( (e = EffectInfo().CreateEffect(existing_effect["type"].asString())) ) {
1327 
1328  // Keep track of allocated effect objects
1329  allocated_effects.insert(e);
1330 
1331  // Load Json into Effect
1332  e->SetJsonValue(existing_effect);
1333 
1334  // Add Effect to Timeline
1335  AddEffect(e);
1336  }
1337  }
1338  }
1339  }
1340 
1341  if (!root["duration"].isNull()) {
1342  // Update duration of timeline
1343  info.duration = root["duration"].asDouble();
1345  }
1346 
1347  // Update preview settings
1350 
1351  // Resort (and recalculate min/max duration)
1352  sort_clips();
1353  sort_effects();
1354 
1355  // Re-open if needed
1356  if (was_open)
1357  Open();
1358 
1359  // Timeline content changed: notify cache clients to rescan active window.
1360  BumpCacheEpoch();
1361 }
1362 
1363 // Apply a special formatted JSON object, which represents a change to the timeline (insert, update, delete)
1364 void Timeline::ApplyJsonDiff(std::string value) {
1365 
1366  // Get lock (prevent getting frames while this happens)
1367  const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
1368 
1369  // Parse JSON string into JSON objects
1370  try
1371  {
1372  const Json::Value root = openshot::stringToJson(value);
1373  // Process the JSON change array, loop through each item
1374  for (const Json::Value change : root) {
1375  std::string change_key = change["key"][(uint)0].asString();
1376 
1377  // Process each type of change
1378  if (change_key == "clips")
1379  // Apply to CLIPS
1380  apply_json_to_clips(change);
1381 
1382  else if (change_key == "effects")
1383  // Apply to EFFECTS
1384  apply_json_to_effects(change);
1385 
1386  else
1387  // Apply to TIMELINE
1388  apply_json_to_timeline(change);
1389 
1390  }
1391 
1392  // Timeline content changed: notify cache clients to rescan active window.
1393  if (!root.empty()) {
1394  // After edits, force safe composition for a short window.
1395  safe_edit_frames_remaining.store(240, std::memory_order_relaxed);
1396  BumpCacheEpoch();
1397  }
1398  }
1399  catch (const std::exception& e)
1400  {
1401  // Error parsing JSON (or missing keys)
1402  throw InvalidJSON("JSON is invalid (missing keys or invalid data types)");
1403  }
1404 }
1405 
1406 void Timeline::BumpCacheEpoch() {
1407  cache_epoch.fetch_add(1, std::memory_order_relaxed);
1408 }
1409 
1410 // Apply JSON diff to clips
1411 void Timeline::apply_json_to_clips(Json::Value change) {
1412 
1413  // Get key and type of change
1414  std::string change_type = change["type"].asString();
1415  std::string clip_id = "";
1416  Clip *existing_clip = NULL;
1417 
1418  // Find id of clip (if any)
1419  for (auto key_part : change["key"]) {
1420  // Get each change
1421  if (key_part.isObject()) {
1422  // Check for id
1423  if (!key_part["id"].isNull()) {
1424  // Set the id
1425  clip_id = key_part["id"].asString();
1426 
1427  // Find matching clip in timeline (if any)
1428  for (auto c : clips)
1429  {
1430  if (c->Id() == clip_id) {
1431  existing_clip = c;
1432  break; // clip found, exit loop
1433  }
1434  }
1435  break; // id found, exit loop
1436  }
1437  }
1438  }
1439 
1440  // Check for a more specific key (targetting this clip's effects)
1441  // For example: ["clips", {"id:123}, "effects", {"id":432}]
1442  if (existing_clip && change["key"].size() == 4 && change["key"][2] == "effects")
1443  {
1444  // This change is actually targetting a specific effect under a clip (and not the clip)
1445  Json::Value key_part = change["key"][3];
1446 
1447  if (key_part.isObject()) {
1448  // Check for id
1449  if (!key_part["id"].isNull())
1450  {
1451  // Set the id
1452  std::string effect_id = key_part["id"].asString();
1453 
1454  // Find matching effect in timeline (if any)
1455  std::list<EffectBase*> effect_list = existing_clip->Effects();
1456  for (auto e : effect_list)
1457  {
1458  if (e->Id() == effect_id) {
1459  // Apply the change to the effect directly
1460  apply_json_to_effects(change, e);
1461 
1462  // Effect-only diffs must clear the owning clip cache.
1463  if (existing_clip->GetCache()) {
1464  existing_clip->GetCache()->Clear();
1465  }
1466 
1467  // Calculate start and end frames that this impacts, and remove those frames from the cache
1468  int64_t new_starting_frame = (existing_clip->Position() * info.fps.ToDouble()) + 1;
1469  int64_t new_ending_frame = ((existing_clip->Position() + existing_clip->Duration()) * info.fps.ToDouble()) + 1;
1470  final_cache->Remove(new_starting_frame - 8, new_ending_frame + 8);
1471 
1472  return; // effect found, don't update clip
1473  }
1474  }
1475  }
1476  }
1477  }
1478 
1479  // Determine type of change operation
1480  if (change_type == "insert") {
1481 
1482  // Create clip
1483  Clip *clip = new Clip();
1484 
1485  // Keep track of allocated clip objects
1486  allocated_clips.insert(clip);
1487 
1488  // Set properties of clip from JSON
1489  clip->SetJsonValue(change["value"]);
1490 
1491  // Add clip to timeline
1492  AddClip(clip);
1493 
1494  // Calculate start and end frames that this impacts, and remove those frames from the cache
1495  int64_t new_starting_frame = (clip->Position() * info.fps.ToDouble()) + 1;
1496  int64_t new_ending_frame = ((clip->Position() + clip->Duration()) * info.fps.ToDouble()) + 1;
1497  final_cache->Remove(new_starting_frame - 8, new_ending_frame + 8);
1498 
1499  } else if (change_type == "update") {
1500 
1501  // Update existing clip
1502  if (existing_clip) {
1503  // Calculate start and end frames prior to the update
1504  int64_t old_starting_frame = (existing_clip->Position() * info.fps.ToDouble()) + 1;
1505  int64_t old_ending_frame = ((existing_clip->Position() + existing_clip->Duration()) * info.fps.ToDouble()) + 1;
1506 
1507  // Update clip properties from JSON
1508  existing_clip->SetJsonValue(change["value"]);
1509 
1510  // Calculate new start and end frames after the update
1511  int64_t new_starting_frame = (existing_clip->Position() * info.fps.ToDouble()) + 1;
1512  int64_t new_ending_frame = ((existing_clip->Position() + existing_clip->Duration()) * info.fps.ToDouble()) + 1;
1513 
1514  // Remove both the old and new ranges from the timeline cache
1515  final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
1516  final_cache->Remove(new_starting_frame - 8, new_ending_frame + 8);
1517 
1518  // Apply framemapper (or update existing framemapper)
1519  if (auto_map_clips) {
1520  apply_mapper_to_clip(existing_clip);
1521  }
1522  }
1523 
1524  } else if (change_type == "delete") {
1525 
1526  // Remove existing clip
1527  if (existing_clip) {
1528  // Remove clip from timeline
1529  RemoveClip(existing_clip);
1530 
1531  // Calculate start and end frames that this impacts, and remove those frames from the cache
1532  int64_t old_starting_frame = (existing_clip->Position() * info.fps.ToDouble()) + 1;
1533  int64_t old_ending_frame = ((existing_clip->Position() + existing_clip->Duration()) * info.fps.ToDouble()) + 1;
1534  final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
1535  }
1536 
1537  }
1538 
1539  // Re-Sort Clips (since they likely changed)
1540  sort_clips();
1541 }
1542 
1543 // Apply JSON diff to effects
1544 void Timeline::apply_json_to_effects(Json::Value change) {
1545 
1546  // Get key and type of change
1547  std::string change_type = change["type"].asString();
1548  EffectBase *existing_effect = NULL;
1549 
1550  // Find id of an effect (if any)
1551  for (auto key_part : change["key"]) {
1552 
1553  if (key_part.isObject()) {
1554  // Check for id
1555  if (!key_part["id"].isNull())
1556  {
1557  // Set the id
1558  std::string effect_id = key_part["id"].asString();
1559 
1560  // Find matching effect in timeline (if any)
1561  for (auto e : effects)
1562  {
1563  if (e->Id() == effect_id) {
1564  existing_effect = e;
1565  break; // effect found, exit loop
1566  }
1567  }
1568  break; // id found, exit loop
1569  }
1570  }
1571  }
1572 
1573  // Now that we found the effect, apply the change to it
1574  if (existing_effect || change_type == "insert") {
1575  // Apply change to effect
1576  apply_json_to_effects(change, existing_effect);
1577  }
1578 }
1579 
1580 // Apply JSON diff to effects (if you already know which effect needs to be updated)
1581 void Timeline::apply_json_to_effects(Json::Value change, EffectBase* existing_effect) {
1582 
1583  // Get key and type of change
1584  std::string change_type = change["type"].asString();
1585 
1586  // Calculate start and end frames that this impacts, and remove those frames from the cache
1587  if (!change["value"].isArray() && !change["value"]["position"].isNull()) {
1588  int64_t new_starting_frame = (change["value"]["position"].asDouble() * info.fps.ToDouble()) + 1;
1589  int64_t new_ending_frame = ((change["value"]["position"].asDouble() + change["value"]["end"].asDouble() - change["value"]["start"].asDouble()) * info.fps.ToDouble()) + 1;
1590  final_cache->Remove(new_starting_frame - 8, new_ending_frame + 8);
1591  }
1592 
1593  // Determine type of change operation
1594  if (change_type == "insert") {
1595 
1596  // Determine type of effect
1597  std::string effect_type = change["value"]["type"].asString();
1598 
1599  // Create Effect
1600  EffectBase *e = NULL;
1601 
1602  // Init the matching effect object
1603  if ( (e = EffectInfo().CreateEffect(effect_type)) ) {
1604 
1605  // Keep track of allocated effect objects
1606  allocated_effects.insert(e);
1607 
1608  // Load Json into Effect
1609  e->SetJsonValue(change["value"]);
1610 
1611  // Add Effect to Timeline
1612  AddEffect(e);
1613  }
1614 
1615  } else if (change_type == "update") {
1616 
1617  // Update existing effect
1618  if (existing_effect) {
1619 
1620  // Calculate start and end frames that this impacts, and remove those frames from the cache
1621  int64_t old_starting_frame = (existing_effect->Position() * info.fps.ToDouble()) + 1;
1622  int64_t old_ending_frame = ((existing_effect->Position() + existing_effect->Duration()) * info.fps.ToDouble()) + 1;
1623  final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
1624 
1625  // Update effect properties from JSON
1626  existing_effect->SetJsonValue(change["value"]);
1627  }
1628 
1629  } else if (change_type == "delete") {
1630 
1631  // Remove existing effect
1632  if (existing_effect) {
1633 
1634  // Calculate start and end frames that this impacts, and remove those frames from the cache
1635  int64_t old_starting_frame = (existing_effect->Position() * info.fps.ToDouble()) + 1;
1636  int64_t old_ending_frame = ((existing_effect->Position() + existing_effect->Duration()) * info.fps.ToDouble()) + 1;
1637  final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
1638 
1639  // Remove effect from timeline
1640  RemoveEffect(existing_effect);
1641  }
1642 
1643  }
1644 
1645  // Re-Sort Effects (since they likely changed)
1646  sort_effects();
1647 }
1648 
1649 // Apply JSON diff to timeline properties
1650 void Timeline::apply_json_to_timeline(Json::Value change) {
1651  bool cache_dirty = true;
1652 
1653  // Get key and type of change
1654  std::string change_type = change["type"].asString();
1655  std::string root_key = change["key"][(uint)0].asString();
1656  std::string sub_key = "";
1657  if (change["key"].size() >= 2)
1658  sub_key = change["key"][(uint)1].asString();
1659 
1660  // Determine type of change operation
1661  if (change_type == "insert" || change_type == "update") {
1662 
1663  // INSERT / UPDATE
1664  // Check for valid property
1665  if (root_key == "color")
1666  // Set color
1667  color.SetJsonValue(change["value"]);
1668  else if (root_key == "viewport_scale")
1669  // Set viewport scale
1670  viewport_scale.SetJsonValue(change["value"]);
1671  else if (root_key == "viewport_x")
1672  // Set viewport x offset
1673  viewport_x.SetJsonValue(change["value"]);
1674  else if (root_key == "viewport_y")
1675  // Set viewport y offset
1676  viewport_y.SetJsonValue(change["value"]);
1677  else if (root_key == "duration") {
1678  // Update duration of timeline
1679  info.duration = change["value"].asDouble();
1681 
1682  // We don't want to clear cache for duration adjustments
1683  cache_dirty = false;
1684  }
1685  else if (root_key == "width") {
1686  // Set width
1687  info.width = change["value"].asInt();
1689  }
1690  else if (root_key == "height") {
1691  // Set height
1692  info.height = change["value"].asInt();
1694  }
1695  else if (root_key == "fps" && sub_key == "" && change["value"].isObject()) {
1696  // Set fps fraction
1697  if (!change["value"]["num"].isNull())
1698  info.fps.num = change["value"]["num"].asInt();
1699  if (!change["value"]["den"].isNull())
1700  info.fps.den = change["value"]["den"].asInt();
1701  }
1702  else if (root_key == "fps" && sub_key == "num")
1703  // Set fps.num
1704  info.fps.num = change["value"].asInt();
1705  else if (root_key == "fps" && sub_key == "den")
1706  // Set fps.den
1707  info.fps.den = change["value"].asInt();
1708  else if (root_key == "display_ratio" && sub_key == "" && change["value"].isObject()) {
1709  // Set display_ratio fraction
1710  if (!change["value"]["num"].isNull())
1711  info.display_ratio.num = change["value"]["num"].asInt();
1712  if (!change["value"]["den"].isNull())
1713  info.display_ratio.den = change["value"]["den"].asInt();
1714  }
1715  else if (root_key == "display_ratio" && sub_key == "num")
1716  // Set display_ratio.num
1717  info.display_ratio.num = change["value"].asInt();
1718  else if (root_key == "display_ratio" && sub_key == "den")
1719  // Set display_ratio.den
1720  info.display_ratio.den = change["value"].asInt();
1721  else if (root_key == "pixel_ratio" && sub_key == "" && change["value"].isObject()) {
1722  // Set pixel_ratio fraction
1723  if (!change["value"]["num"].isNull())
1724  info.pixel_ratio.num = change["value"]["num"].asInt();
1725  if (!change["value"]["den"].isNull())
1726  info.pixel_ratio.den = change["value"]["den"].asInt();
1727  }
1728  else if (root_key == "pixel_ratio" && sub_key == "num")
1729  // Set pixel_ratio.num
1730  info.pixel_ratio.num = change["value"].asInt();
1731  else if (root_key == "pixel_ratio" && sub_key == "den")
1732  // Set pixel_ratio.den
1733  info.pixel_ratio.den = change["value"].asInt();
1734 
1735  else if (root_key == "sample_rate")
1736  // Set sample rate
1737  info.sample_rate = change["value"].asInt();
1738  else if (root_key == "channels")
1739  // Set channels
1740  info.channels = change["value"].asInt();
1741  else if (root_key == "channel_layout")
1742  // Set channel layout
1743  info.channel_layout = (ChannelLayout) change["value"].asInt();
1744  else
1745  // Error parsing JSON (or missing keys)
1746  throw InvalidJSONKey("JSON change key is invalid", change.toStyledString());
1747 
1748 
1749  } else if (change["type"].asString() == "delete") {
1750 
1751  // DELETE / RESET
1752  // Reset the following properties (since we can't delete them)
1753  if (root_key == "color") {
1754  color = Color();
1755  color.red = Keyframe(0.0);
1756  color.green = Keyframe(0.0);
1757  color.blue = Keyframe(0.0);
1758  }
1759  else if (root_key == "viewport_scale")
1760  viewport_scale = Keyframe(1.0);
1761  else if (root_key == "viewport_x")
1762  viewport_x = Keyframe(0.0);
1763  else if (root_key == "viewport_y")
1764  viewport_y = Keyframe(0.0);
1765  else
1766  // Error parsing JSON (or missing keys)
1767  throw InvalidJSONKey("JSON change key is invalid", change.toStyledString());
1768 
1769  }
1770 
1771  if (cache_dirty) {
1772  // Clear entire cache
1773  ClearAllCache();
1774  }
1775 }
1776 
1777 // Clear all caches
1778 void Timeline::ClearAllCache(bool deep) {
1779  // Get lock (prevent getting frames while this happens)
1780  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
1781 
1782  // Clear primary cache
1783  if (final_cache) {
1784  final_cache->Clear();
1785  }
1786 
1787  // Loop through all clips
1788  try {
1789  for (const auto clip : clips) {
1790  // Clear cache on clip and reader if present
1791  if (clip->Reader()) {
1792  if (auto rc = clip->Reader()->GetCache())
1793  rc->Clear();
1794 
1795  // Clear nested Reader (if deep clear requested)
1796  if (deep && clip->Reader()->Name() == "FrameMapper") {
1797  FrameMapper *nested_reader = static_cast<FrameMapper *>(clip->Reader());
1798  if (nested_reader->Reader()) {
1799  if (auto nc = nested_reader->Reader()->GetCache())
1800  nc->Clear();
1801  }
1802  }
1803  }
1804 
1805  // Clear clip cache
1806  if (auto cc = clip->GetCache())
1807  cc->Clear();
1808  }
1809  } catch (const ReaderClosed & e) {
1810  // ...
1811  }
1812 
1813  // Cache content changed: notify cache clients to rebuild their window baseline.
1814  BumpCacheEpoch();
1815 }
1816 
1817 // Set Max Image Size (used for performance optimization). Convenience function for setting
1818 // Settings::Instance()->MAX_WIDTH and Settings::Instance()->MAX_HEIGHT.
1819 void Timeline::SetMaxSize(int width, int height) {
1820  // Maintain aspect ratio regardless of what size is passed in
1821  QSize display_ratio_size = QSize(info.width, info.height);
1822  QSize proposed_size = QSize(std::min(width, info.width), std::min(height, info.height));
1823 
1824  // Scale QSize up to proposed size
1825  display_ratio_size.scale(proposed_size, Qt::KeepAspectRatio);
1826 
1827  // Update preview settings
1828  preview_width = display_ratio_size.width();
1829  preview_height = display_ratio_size.height();
1830 }
openshot::stringToJson
const Json::Value stringToJson(const std::string value)
Definition: Json.cpp:16
openshot::CacheMemory::Clear
void Clear()
Clear the cache of all frames.
Definition: CacheMemory.cpp:224
openshot::Timeline::RemoveClip
void RemoveClip(openshot::Clip *clip)
Remove an openshot::Clip from the timeline.
Definition: Timeline.cpp:398
openshot::FrameMapper::ChangeMapping
void ChangeMapping(Fraction target_fps, PulldownType pulldown, int target_sample_rate, int target_channels, ChannelLayout target_channel_layout)
Change frame rate or audio mapping details.
Definition: FrameMapper.cpp:806
openshot::ReaderInfo::sample_rate
int sample_rate
The number of audio samples per second (44100 is a common sample rate)
Definition: ReaderBase.h:60
openshot::EffectInfo
This class returns a listing of all effects supported by libopenshot.
Definition: EffectInfo.h:28
openshot::Fraction::ToFloat
float ToFloat()
Return this fraction as a float (i.e. 1/2 = 0.5)
Definition: Fraction.cpp:35
openshot::Timeline::GetFrame
std::shared_ptr< openshot::Frame > GetFrame(int64_t requested_frame) override
Definition: Timeline.cpp:942
openshot::EffectBase
This abstract class is the base class, used by all effects in libopenshot.
Definition: EffectBase.h:56
openshot::ReaderBase::JsonValue
virtual Json::Value JsonValue() const =0
Generate Json::Value for this object.
Definition: ReaderBase.cpp:106
openshot::Timeline::~Timeline
virtual ~Timeline()
Definition: Timeline.cpp:211
openshot::CacheBase::Clear
virtual void Clear()=0
Clear the cache of all frames.
openshot::CacheBase::GetFrame
virtual std::shared_ptr< openshot::Frame > GetFrame(int64_t frame_number)=0
Get a frame from the cache.
openshot::Timeline::viewport_x
openshot::Keyframe viewport_x
Curve representing the x coordinate for the viewport.
Definition: Timeline.h:331
openshot::TimelineBase::preview_width
int preview_width
Optional preview width of timeline image. If your preview window is smaller than the timeline,...
Definition: TimelineBase.h:45
openshot::CompareClipEndFrames
Definition: Timeline.h:73
openshot::Timeline::SetMaxSize
void SetMaxSize(int width, int height)
Definition: Timeline.cpp:1819
openshot::BBox::height
float height
bounding box height
Definition: TrackedObjectBBox.h:42
openshot::CrashHandler::Instance
static CrashHandler * Instance()
Definition: CrashHandler.cpp:27
openshot::EffectInfo::CreateEffect
EffectBase * CreateEffect(std::string effect_type)
Create an instance of an effect (factory style)
Definition: EffectInfo.cpp:27
openshot::Clip::GetCache
openshot::CacheMemory * GetCache() override
Get the cache object (always return NULL for this reader)
Definition: Clip.h:206
openshot::ReaderBase::SetJsonValue
virtual void SetJsonValue(const Json::Value root)=0
Load Json::Value into this object.
Definition: ReaderBase.cpp:157
openshot
This namespace is the default namespace for all code in the openshot library.
Definition: Compressor.h:28
openshot::TimelineBase::preview_height
int preview_height
Optional preview width of timeline image. If your preview window is smaller than the timeline,...
Definition: TimelineBase.h:46
openshot::CacheBase::Add
virtual void Add(std::shared_ptr< openshot::Frame > frame)=0
Add a Frame to the cache.
openshot::Timeline::ApplyJsonDiff
void ApplyJsonDiff(std::string value)
Apply a special formatted JSON object, which represents a change to the timeline (add,...
Definition: Timeline.cpp:1364
openshot::Clip
This class represents a clip (used to arrange readers on the timeline)
Definition: Clip.h:89
openshot::Fraction
This class represents a fraction.
Definition: Fraction.h:30
openshot::BBox::cy
float cy
y-coordinate of the bounding box center
Definition: TrackedObjectBBox.h:40
openshot::ReaderBase::info
openshot::ReaderInfo info
Information about the current media file.
Definition: ReaderBase.h:88
openshot::Settings
This class is contains settings used by libopenshot (and can be safely toggled at any point)
Definition: Settings.h:26
openshot::Timeline::GetMinFrame
int64_t GetMinFrame()
Look up the start frame number of the first element on the timeline (first frame is 1)
Definition: Timeline.cpp:486
Timeline.h
Header file for Timeline class.
openshot::Clip::ParentTimeline
void ParentTimeline(openshot::TimelineBase *new_timeline) override
Set associated Timeline pointer.
Definition: Clip.cpp:447
openshot::Timeline::ClearAllCache
void ClearAllCache(bool deep=false)
Definition: Timeline.cpp:1778
openshot::Timeline::GetTrackedObjectsIds
std::list< std::string > GetTrackedObjectsIds() const
Return the ID's of the tracked objects as a list of strings.
Definition: Timeline.cpp:263
openshot::CompareEffectEndFrames
Like CompareClipEndFrames, but for effects.
Definition: Timeline.h:79
openshot::Keyframe::SetJsonValue
void SetJsonValue(const Json::Value root)
Load Json::Value into this object.
Definition: KeyFrame.cpp:372
openshot::Clip::Effects
std::list< openshot::EffectBase * > Effects()
Return the list of effects on the timeline.
Definition: Clip.h:245
openshot::ReaderInfo::duration
float duration
Length of time (in seconds)
Definition: ReaderBase.h:43
CacheDisk.h
Header file for CacheDisk class.
openshot::Clip::channel_mapping
openshot::Keyframe channel_mapping
A number representing an audio channel to output (only works when filtering a channel)
Definition: Clip.h:350
openshot::ReaderInfo::has_video
bool has_video
Determines if this file has a video stream.
Definition: ReaderBase.h:40
openshot::ReaderInfo::width
int width
The width of the video (in pixesl)
Definition: ReaderBase.h:46
openshot::ClipBase::Position
void Position(float value)
Set the Id of this clip object
Definition: ClipBase.cpp:19
openshot::CacheBase
All cache managers in libopenshot are based on this CacheBase class.
Definition: CacheBase.h:34
openshot::Clip::SetJsonValue
void SetJsonValue(const Json::Value root) override
Load Json::Value into this object.
Definition: Clip.cpp:1034
CacheBase.h
Header file for CacheBase class.
openshot::OutOfBoundsFrame
Exception for frames that are out of bounds.
Definition: Exceptions.h:306
openshot::Fraction::ToDouble
double ToDouble() const
Return this fraction as a double (i.e. 1/2 = 0.5)
Definition: Fraction.cpp:40
openshot::Timeline::apply_effects
std::shared_ptr< openshot::Frame > apply_effects(std::shared_ptr< openshot::Frame > frame, int64_t timeline_frame_number, int layer, TimelineInfoStruct *options)
Apply global/timeline effects to the source frame (if any)
Definition: Timeline.cpp:550
openshot::TimelineInfoStruct::force_safe_composite
bool force_safe_composite
If true, avoid mutating cached clip images during composition.
Definition: TimelineBase.h:36
openshot::Keyframe::JsonValue
Json::Value JsonValue() const
Generate Json::Value for this object.
Definition: KeyFrame.cpp:339
openshot::CacheBase::Remove
virtual void Remove(int64_t frame_number)=0
Remove a specific frame.
FrameMapper.h
Header file for the FrameMapper class.
openshot::ReaderBase::clip
openshot::ClipBase * clip
Pointer to the parent clip instance (if any)
Definition: ReaderBase.h:80
openshot::CacheBase::SetMaxBytesFromInfo
void SetMaxBytesFromInfo(int64_t number_of_frames, int width, int height, int sample_rate, int channels)
Set maximum bytes to a different amount based on a ReaderInfo struct.
Definition: CacheBase.cpp:28
openshot::Color
This class represents a color (used on the timeline and clips)
Definition: Color.h:27
openshot::ClipBase::SetJsonValue
virtual void SetJsonValue(const Json::Value root)=0
Load Json::Value into this object.
Definition: ClipBase.cpp:80
openshot::ReaderInfo::video_length
int64_t video_length
The number of frames in the video stream.
Definition: ReaderBase.h:53
openshot::ReaderInfo::height
int height
The height of the video (in pixels)
Definition: ReaderBase.h:45
openshot::VOLUME_MIX_REDUCE
@ VOLUME_MIX_REDUCE
Reduce volume by about %25, and then mix (louder, but could cause pops if the sum exceeds 100%)
Definition: Enums.h:71
openshot::BBox::angle
float angle
bounding box rotation angle [degrees]
Definition: TrackedObjectBBox.h:43
openshot::Fraction::num
int num
Numerator for the fraction.
Definition: Fraction.h:32
openshot::Timeline::GetClip
openshot::Clip * GetClip(const std::string &id)
Look up a single clip by ID.
Definition: Timeline.cpp:417
openshot::Fraction::den
int den
Denominator for the fraction.
Definition: Fraction.h:33
OPEN_MP_NUM_PROCESSORS
#define OPEN_MP_NUM_PROCESSORS
Definition: OpenMPUtilities.h:23
openshot::Keyframe
A Keyframe is a collection of Point instances, which is used to vary a number or property over time.
Definition: KeyFrame.h:53
CrashHandler.h
Header file for CrashHandler class.
openshot::Fraction::Reduce
void Reduce()
Reduce this fraction (i.e. 640/480 = 4/3)
Definition: Fraction.cpp:65
openshot::Color::SetJsonValue
void SetJsonValue(const Json::Value root)
Load Json::Value into this object.
Definition: Color.cpp:117
openshot::Timeline::Open
void Open() override
Open the reader (and start consuming resources)
Definition: Timeline.cpp:930
openshot::Timeline::SetJson
void SetJson(const std::string value) override
Load JSON string into this object.
Definition: Timeline.cpp:1242
openshot::TimelineInfoStruct::is_before_clip_keyframes
bool is_before_clip_keyframes
Is this before clip keyframes are applied.
Definition: TimelineBase.h:35
openshot::Fraction::Reciprocal
Fraction Reciprocal() const
Return the reciprocal as a Fraction.
Definition: Fraction.cpp:78
openshot::ReaderInfo::has_audio
bool has_audio
Determines if this file has an audio stream.
Definition: ReaderBase.h:41
openshot::Timeline::GetTrackedObjectValues
std::string GetTrackedObjectValues(std::string id, int64_t frame_number) const
Return the trackedObject's properties as a JSON string.
Definition: Timeline.cpp:279
openshot::CacheMemory
This class is a memory-based cache manager for Frame objects.
Definition: CacheMemory.h:29
openshot::FrameMapper::Close
void Close() override
Close the openshot::FrameMapper and internal reader.
Definition: FrameMapper.cpp:723
openshot::InvalidJSON
Exception for invalid JSON.
Definition: Exceptions.h:223
openshot::Timeline::GetMaxTime
double GetMaxTime()
Look up the end time of the latest timeline element.
Definition: Timeline.cpp:472
openshot::BBox::width
float width
bounding box width
Definition: TrackedObjectBBox.h:41
openshot::Timeline
This class represents a timeline.
Definition: Timeline.h:152
openshot::Timeline::ClipEffects
std::list< openshot::EffectBase * > ClipEffects() const
Return the list of effects on all clips.
Definition: Timeline.cpp:453
CacheMemory.h
Header file for CacheMemory class.
openshot::Color::green
openshot::Keyframe green
Curve representing the green value (0 - 255)
Definition: Color.h:31
openshot::ReaderInfo
This struct contains info about a media file, such as height, width, frames per second,...
Definition: ReaderBase.h:38
openshot::CompareEffects
Definition: Timeline.h:63
openshot::ReaderInfo::video_timebase
openshot::Fraction video_timebase
The video timebase determines how long each frame stays on the screen.
Definition: ReaderBase.h:55
openshot::Settings::Instance
static Settings * Instance()
Create or get an instance of this logger singleton (invoke the class with this method)
Definition: Settings.cpp:23
openshot::TimelineInfoStruct
This struct contains info about the current Timeline clip instance.
Definition: TimelineBase.h:32
openshot::Timeline::Clear
void Clear()
Clear all clips, effects, and frame mappers from timeline (and free memory)
Definition: Timeline.cpp:862
openshot::Timeline::RemoveEffect
void RemoveEffect(openshot::EffectBase *effect)
Remove an effect from the timeline.
Definition: Timeline.cpp:379
openshot::ClipBase::Start
void Start(float value)
Set start position (in seconds) of clip (trim start of video)
Definition: ClipBase.cpp:42
path
path
Definition: FFmpegWriter.cpp:1474
openshot::Settings::PATH_OPENSHOT_INSTALL
std::string PATH_OPENSHOT_INSTALL
Definition: Settings.h:114
openshot::FrameMapper
This class creates a mapping between 2 different frame rates, applying a specific pull-down technique...
Definition: FrameMapper.h:193
openshot::Timeline::Close
void Close() override
Close the timeline reader (and any resources it was consuming)
Definition: Timeline.cpp:908
openshot::Frame::GetSamplesPerFrame
int GetSamplesPerFrame(openshot::Fraction fps, int sample_rate, int channels)
Calculate the # of samples per video frame (for the current frame number)
Definition: Frame.cpp:484
openshot::Timeline::AddClip
void AddClip(openshot::Clip *clip)
Add an openshot::Clip to the timeline.
Definition: Timeline.cpp:337
openshot::InvalidFile
Exception for files that can not be found or opened.
Definition: Exceptions.h:193
openshot::ZmqLogger::Instance
static ZmqLogger * Instance()
Create or get an instance of this logger singleton (invoke the class with this method)
Definition: ZmqLogger.cpp:35
openshot::ZmqLogger::AppendDebugMethod
void AppendDebugMethod(std::string method_name, std::string arg1_name="", float arg1_value=-1.0, std::string arg2_name="", float arg2_value=-1.0, std::string arg3_name="", float arg3_value=-1.0, std::string arg4_name="", float arg4_value=-1.0, std::string arg5_name="", float arg5_value=-1.0, std::string arg6_name="", float arg6_value=-1.0)
Append debug information.
Definition: ZmqLogger.cpp:178
openshot::Clip::volume
openshot::Keyframe volume
Curve representing the volume (0 to 1)
Definition: Clip.h:333
openshot::ReaderInfo::vcodec
std::string vcodec
The name of the video codec used to encode / decode the video stream.
Definition: ReaderBase.h:52
openshot::Timeline::AddTrackedObject
void AddTrackedObject(std::shared_ptr< openshot::TrackedObjectBase > trackedObject)
Add to the tracked_objects map a pointer to a tracked object (TrackedObjectBBox)
Definition: Timeline.cpp:228
openshot::Color::JsonValue
Json::Value JsonValue() const
Generate Json::Value for this object.
Definition: Color.cpp:86
openshot::Keyframe::GetInt
int GetInt(int64_t index) const
Get the rounded INT value at a specific index.
Definition: KeyFrame.cpp:282
openshot::BBox
This struct holds the information of a bounding-box.
Definition: TrackedObjectBBox.h:37
openshot::Timeline::color
openshot::Color color
Background color of timeline canvas.
Definition: Timeline.h:335
openshot::ReaderClosed
Exception when a reader is closed, and a frame is requested.
Definition: Exceptions.h:369
openshot::Timeline::GetEffect
openshot::EffectBase * GetEffect(const std::string &id)
Look up a timeline effect by ID.
Definition: Timeline.cpp:429
openshot::ReaderInfo::channel_layout
openshot::ChannelLayout channel_layout
The channel layout (mono, stereo, 5 point surround, etc...)
Definition: ReaderBase.h:62
openshot::CompareClips
Definition: Timeline.h:47
openshot::Clip::channel_filter
openshot::Keyframe channel_filter
A number representing an audio channel to filter (clears all other channels)
Definition: Clip.h:349
openshot::ClipBase::Id
void Id(std::string value)
Definition: ClipBase.h:94
openshot::Keyframe::GetCount
int64_t GetCount() const
Get the number of points (i.e. # of points)
Definition: KeyFrame.cpp:424
openshot::Timeline::Timeline
Timeline(int width, int height, openshot::Fraction fps, int sample_rate, int channels, openshot::ChannelLayout channel_layout)
Constructor for the timeline (which configures the default frame properties)
Definition: Timeline.cpp:33
openshot::ReaderInfo::fps
openshot::Fraction fps
Frames per second, as a fraction (i.e. 24/1 = 24 fps)
Definition: ReaderBase.h:48
openshot::ReaderBase
This abstract class is the base class, used by all readers in libopenshot.
Definition: ReaderBase.h:75
openshot::Timeline::Json
std::string Json() const override
Generate JSON string of this object.
Definition: Timeline.cpp:1201
openshot::Timeline::GetMaxFrame
int64_t GetMaxFrame()
Look up the end frame number of the latest element on the timeline.
Definition: Timeline.cpp:478
openshot::ClipBase::GetFrame
virtual std::shared_ptr< openshot::Frame > GetFrame(int64_t frame_number)=0
This method is required for all derived classes of ClipBase, and returns a new openshot::Frame object...
openshot::Timeline::SetJsonValue
void SetJsonValue(const Json::Value root) override
Load Json::Value into this object.
Definition: Timeline.cpp:1262
openshot::VOLUME_MIX_AVERAGE
@ VOLUME_MIX_AVERAGE
Evenly divide the overlapping clips volume keyframes, so that the sum does not exceed 100%.
Definition: Enums.h:70
openshot::Timeline::ApplyMapperToClips
void ApplyMapperToClips()
Apply the timeline's framerate and samplerate to all clips.
Definition: Timeline.cpp:526
openshot::Timeline::viewport_y
openshot::Keyframe viewport_y
Curve representing the y coordinate for the viewport.
Definition: Timeline.h:332
openshot::Clip::has_audio
openshot::Keyframe has_audio
An optional override to determine if this clip has audio (-1=undefined, 0=no, 1=yes)
Definition: Clip.h:353
openshot::ChannelLayout
ChannelLayout
This enumeration determines the audio channel layout (such as stereo, mono, 5 point surround,...
Definition: ChannelLayouts.h:28
openshot::ReaderInfo::pixel_ratio
openshot::Fraction pixel_ratio
The pixel ratio of the video stream as a fraction (i.e. some pixels are not square)
Definition: ReaderBase.h:50
openshot::BBox::cx
float cx
x-coordinate of the bounding box center
Definition: TrackedObjectBBox.h:39
openshot::Clip::Reader
void Reader(openshot::ReaderBase *new_reader)
Set the current reader.
Definition: Clip.cpp:338
openshot::Timeline::GetClipEffect
openshot::EffectBase * GetClipEffect(const std::string &id)
Look up a clip effect by ID.
Definition: Timeline.cpp:440
openshot::Color::red
openshot::Keyframe red
Curve representing the red value (0 - 255)
Definition: Color.h:30
openshot::FrameMapper::Reader
ReaderBase * Reader()
Get the current reader.
Definition: FrameMapper.cpp:67
openshot::ReaderInfo::acodec
std::string acodec
The name of the audio codec used to encode / decode the video stream.
Definition: ReaderBase.h:58
openshot::PULLDOWN_NONE
@ PULLDOWN_NONE
Do not apply pull-down techniques, just repeat or skip entire frames.
Definition: FrameMapper.h:46
openshot::ReaderInfo::display_ratio
openshot::Fraction display_ratio
The ratio of width to height of the video stream (i.e. 640x480 has a ratio of 4/3)
Definition: ReaderBase.h:51
openshot::TimelineInfoStruct::is_top_clip
bool is_top_clip
Is clip on top (if overlapping another clip)
Definition: TimelineBase.h:34
openshot::InvalidJSONKey
Exception for missing JSON Change key.
Definition: Exceptions.h:268
openshot::Color::GetColorHex
std::string GetColorHex(int64_t frame_number)
Get the HEX value of a color at a specific frame.
Definition: Color.cpp:47
openshot::ReaderInfo::channels
int channels
The number of audio channels used in the audio stream.
Definition: ReaderBase.h:61
openshot::Timeline::viewport_scale
openshot::Keyframe viewport_scale
Curve representing the scale of the viewport (0 to 100)
Definition: Timeline.h:330
openshot::Timeline::AddEffect
void AddEffect(openshot::EffectBase *effect)
Add an effect to the timeline.
Definition: Timeline.cpp:363
openshot::Timeline::JsonValue
Json::Value JsonValue() const override
Generate Json::Value for this object.
Definition: Timeline.cpp:1208
openshot::ReaderBase::GetCache
virtual openshot::CacheBase * GetCache()=0
Get the cache object used by this reader (note: not all readers use cache)
openshot::Color::blue
openshot::Keyframe blue
Curve representing the red value (0 - 255)
Definition: Color.h:32
openshot::Timeline::GetTrackedObject
std::shared_ptr< openshot::TrackedObjectBase > GetTrackedObject(std::string id) const
Return tracked object pointer by it's id.
Definition: Timeline.cpp:246
Exceptions.h
Header file for all Exception classes.
openshot::Clip::mixing
openshot::VolumeMixType mixing
What strategy should be followed when mixing audio with other clips.
Definition: Clip.h:182
openshot::Timeline::GetMinTime
double GetMinTime()
Look up the position/start time of the first timeline element.
Definition: Timeline.cpp:494
openshot::EffectBase::SetJsonValue
virtual void SetJsonValue(const Json::Value root)
Load Json::Value into this object.
Definition: EffectBase.cpp:139
openshot::Keyframe::GetValue
double GetValue(int64_t index) const
Get the value at a specific index.
Definition: KeyFrame.cpp:258
openshot::Timeline::SetCache
void SetCache(openshot::CacheBase *new_cache)
Definition: Timeline.cpp:1185
openshot::ReaderBase::getFrameMutex
std::recursive_mutex getFrameMutex
Mutex for multiple threads.
Definition: ReaderBase.h:79