29         is_open(false), auto_map_clips(true), managed_cache(true), 
path(
""),
 
   75     info.width, info.height, info.fps, info.sample_rate,
 
   76     info.channels, info.channel_layout) {}
 
   80         is_open(false), auto_map_clips(true), managed_cache(true), 
path(projectPath),
 
  100     QFileInfo filePath(QString::fromStdString(path));
 
  101     if (!filePath.exists()) {
 
  102         throw InvalidFile(
"File could not be opened.", path);
 
  108     if (!openshotPath.exists()) {
 
  111     QDir openshotTransPath(openshotPath.filePath(
"transitions"));
 
  112     if (!openshotTransPath.exists()) {
 
  113         throw InvalidFile(
"PATH_OPENSHOT_INSTALL/transitions could not be found.", openshotTransPath.path().toStdString());
 
  117     QString asset_name = filePath.baseName().left(30) + 
"_assets";
 
  118     QDir asset_folder(filePath.dir().filePath(asset_name));
 
  119     if (!asset_folder.exists()) {
 
  121         asset_folder.mkpath(
".");
 
  125     QFile projectFile(QString::fromStdString(path));
 
  126     projectFile.open(QFile::ReadOnly);
 
  127     QString projectContents = QString::fromUtf8(projectFile.readAll());
 
  130     if (convert_absolute_paths) {
 
  134         QRegularExpression allPathsRegex(QStringLiteral(
"\"(image|path)\":.*?\"(.*?)\""));
 
  135         std::vector<QRegularExpressionMatch> matchedPositions;
 
  136         QRegularExpressionMatchIterator i = allPathsRegex.globalMatch(projectContents);
 
  137         while (i.hasNext()) {
 
  138             QRegularExpressionMatch match = i.next();
 
  139             if (match.hasMatch()) {
 
  141                 matchedPositions.push_back(match);
 
  146         std::vector<QRegularExpressionMatch>::reverse_iterator itr;
 
  147         for (itr = matchedPositions.rbegin(); itr != matchedPositions.rend(); itr++) {
 
  148             QRegularExpressionMatch match = *itr;
 
  149             QString relativeKey = match.captured(1); 
 
  150             QString relativePath = match.captured(2); 
 
  151             QString absolutePath = 
"";
 
  154             if (relativePath.startsWith(
"@assets")) {
 
  155                 absolutePath = QFileInfo(asset_folder.absoluteFilePath(relativePath.replace(
"@assets", 
"."))).canonicalFilePath();
 
  156             } 
else if (relativePath.startsWith(
"@transitions")) {
 
  157                 absolutePath = QFileInfo(openshotTransPath.absoluteFilePath(relativePath.replace(
"@transitions", 
"."))).canonicalFilePath();
 
  159                 absolutePath = QFileInfo(filePath.absoluteDir().absoluteFilePath(relativePath)).canonicalFilePath();
 
  163             if (!absolutePath.isEmpty()) {
 
  164                 projectContents.replace(match.capturedStart(0), match.capturedLength(0), 
"\"" + relativeKey + 
"\": \"" + absolutePath + 
"\"");
 
  168         matchedPositions.clear();
 
  172     SetJson(projectContents.toStdString());
 
  176     float calculated_duration = 0.0;
 
  177     for (
auto clip : clips)
 
  180         if (clip_last_frame > calculated_duration)
 
  181             calculated_duration = clip_last_frame;
 
  182         if (
clip->Reader() && 
clip->Reader()->info.has_audio)
 
  184         if (
clip->Reader() && 
clip->Reader()->info.has_video)
 
  216     if (managed_cache && final_cache) {
 
  226     auto iterator = tracked_objects.find(trackedObject->Id());
 
  228     if (iterator != tracked_objects.end()){
 
  230         iterator->second = trackedObject;
 
  234         tracked_objects[trackedObject->Id()] = trackedObject;
 
  244     auto iterator = tracked_objects.find(
id);
 
  246     if (iterator != tracked_objects.end()){
 
  248         std::shared_ptr<openshot::TrackedObjectBase> trackedObject = iterator->second;
 
  249         return trackedObject;
 
  261     std::list<std::string> trackedObjects_ids;
 
  264     for (
auto const& it: tracked_objects){
 
  266         trackedObjects_ids.push_back(it.first);
 
  269     return trackedObjects_ids;
 
  277     Json::Value trackedObjectJson;
 
  280     auto iterator = tracked_objects.find(
id);
 
  282     if (iterator != tracked_objects.end())
 
  285         std::shared_ptr<TrackedObjectBBox> trackedObject = std::static_pointer_cast<TrackedObjectBBox>(iterator->second);
 
  288         if (trackedObject->ExactlyContains(frame_number)){
 
  289             BBox box = trackedObject->GetBox(frame_number);
 
  290             float x1 = box.
cx - (box.
width/2);
 
  292             float x2 = box.
cx + (box.
width/2);
 
  294             float rotation = box.
angle;
 
  296             trackedObjectJson[
"x1"] = x1;
 
  297             trackedObjectJson[
"y1"] = y1;
 
  298             trackedObjectJson[
"x2"] = x2;
 
  299             trackedObjectJson[
"y2"] = y2;
 
  300             trackedObjectJson[
"rotation"] = rotation;
 
  303             BBox box = trackedObject->BoxVec.begin()->second;
 
  304             float x1 = box.
cx - (box.
width/2);
 
  306             float x2 = box.
cx + (box.
width/2);
 
  308             float rotation = box.
angle;
 
  310             trackedObjectJson[
"x1"] = x1;
 
  311             trackedObjectJson[
"y1"] = y1;
 
  312             trackedObjectJson[
"x2"] = x2;
 
  313             trackedObjectJson[
"y2"] = y2;
 
  314             trackedObjectJson[
"rotation"] = rotation;
 
  320         trackedObjectJson[
"x1"] = 0;
 
  321         trackedObjectJson[
"y1"] = 0;
 
  322         trackedObjectJson[
"x2"] = 0;
 
  323         trackedObjectJson[
"y2"] = 0;
 
  324         trackedObjectJson[
"rotation"] = 0;
 
  327     return trackedObjectJson.toStyledString();
 
  335     const std::lock_guard<std::recursive_mutex> guard(
getFrameMutex);
 
  338     clip->ParentTimeline(
this);
 
  341     if (
clip->Reader() && 
clip->Reader()->GetCache())
 
  342         clip->Reader()->GetCache()->Clear();
 
  345     if (auto_map_clips) {
 
  347         apply_mapper_to_clip(
clip);
 
  351     clips.push_back(
clip);
 
  361     effect->ParentTimeline(
this);
 
  364     effects.push_back(effect);
 
  373     effects.remove(effect);
 
  376     bool allocated = allocated_effects.count(effect);
 
  380         allocated_effects.erase(effect);
 
  391     const std::lock_guard<std::recursive_mutex> guard(
getFrameMutex);
 
  396     bool allocated = allocated_clips.count(
clip);
 
  400         allocated_clips.erase(
clip);
 
  411     for (
const auto& 
clip : clips) {
 
  423     for (
const auto& effect : effects) {
 
  424         if (effect->Id() == 
id) {
 
  434     for (
const auto& 
clip : clips) {
 
  435         const auto e = 
clip->GetEffect(
id);
 
  447     std::list<EffectBase*> timelineEffectsList;
 
  450     for (
const auto& 
clip : clips) {
 
  453         std::list<EffectBase*> clipEffectsList = 
clip->Effects();
 
  456         timelineEffectsList.insert(timelineEffectsList.end(), clipEffectsList.begin(), clipEffectsList.end());
 
  459     return timelineEffectsList;
 
  472     return std::round(max_time * fps);
 
  485     return std::round(min_time * fps) + 1;
 
  489 void Timeline::apply_mapper_to_clip(
Clip* clip)
 
  493     if (
clip->Reader()->Name() == 
"FrameMapper")
 
  506         allocated_frame_mappers.insert(mapper);
 
  511     clip->Reader(clip_reader);
 
  521     for (
auto clip : clips)
 
  524         apply_mapper_to_clip(
clip);
 
  529 double Timeline::calculate_time(int64_t number, 
Fraction rate)
 
  532     double raw_fps = rate.
ToFloat();
 
  535     return double(number - 1) / raw_fps;
 
  543         "Timeline::apply_effects",
 
  544         "frame->number", frame->number,
 
  545         "timeline_frame_number", timeline_frame_number,
 
  549     for (
auto effect : effects)
 
  552         long effect_start_position = round(effect->Position() * 
info.
fps.
ToDouble()) + 1;
 
  553         long effect_end_position = round((effect->Position() + (effect->Duration())) * 
info.
fps.
ToDouble());
 
  555         bool does_effect_intersect = (effect_start_position <= timeline_frame_number && effect_end_position >= timeline_frame_number && effect->Layer() == layer);
 
  558         if (does_effect_intersect)
 
  562             long effect_frame_number = timeline_frame_number - effect_start_position + effect_start_frame;
 
  572                 "Timeline::apply_effects (Process Effect)",
 
  573                 "effect_frame_number", effect_frame_number,
 
  574                 "does_effect_intersect", does_effect_intersect);
 
  577             frame = effect->GetFrame(frame, effect_frame_number);
 
  587 std::shared_ptr<Frame> Timeline::GetOrCreateFrame(std::shared_ptr<Frame> background_frame, 
Clip* clip, int64_t number, 
openshot::TimelineInfoStruct* options)
 
  589     std::shared_ptr<Frame> new_frame;
 
  597             "Timeline::GetOrCreateFrame (from reader)",
 
  599             "samples_in_frame", samples_in_frame);
 
  602         new_frame = std::shared_ptr<Frame>(
clip->
GetFrame(background_frame, number, options));
 
  615         "Timeline::GetOrCreateFrame (create blank)",
 
  617         "samples_in_frame", samples_in_frame);
 
  624 void Timeline::add_layer(std::shared_ptr<Frame> new_frame, 
Clip* source_clip, int64_t clip_frame_number, 
bool is_top_clip, 
float max_volume)
 
  632     std::shared_ptr<Frame> source_frame;
 
  633     source_frame = GetOrCreateFrame(new_frame, source_clip, clip_frame_number, options);
 
  642         "Timeline::add_layer",
 
  643         "new_frame->number", new_frame->number,
 
  644         "clip_frame_number", clip_frame_number);
 
  647     if (source_clip->
Reader()->info.has_audio) {
 
  650             "Timeline::add_layer (Copy Audio)",
 
  651             "source_clip->Reader()->info.has_audio", source_clip->
Reader()->info.has_audio,
 
  652             "source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(),
 
  654             "clip_frame_number", clip_frame_number);
 
  657             for (
int channel = 0; channel < source_frame->GetAudioChannelsCount(); channel++)
 
  660                 float previous_volume = source_clip->
volume.
GetValue(clip_frame_number - 1);
 
  668                     previous_volume = previous_volume / max_volume;
 
  669                     volume = volume / max_volume;
 
  673                     previous_volume = previous_volume * 0.77;
 
  674                     volume = volume * 0.77;
 
  678                 if (channel_filter != -1 && channel_filter != channel)
 
  682                 if (previous_volume == 0.0 && volume == 0.0)
 
  686                 if (channel_mapping == -1)
 
  687                     channel_mapping = channel;
 
  690                 if (!isEqual(previous_volume, 1.0) || !isEqual(volume, 1.0))
 
  691                     source_frame->ApplyGainRamp(channel_mapping, 0, source_frame->GetAudioSamplesCount(), previous_volume, volume);
 
  697                 if (new_frame->GetAudioSamplesCount() != source_frame->GetAudioSamplesCount()){
 
  703                 new_frame->AddAudio(
false, channel_mapping, 0, source_frame->GetAudioSamples(channel), source_frame->GetAudioSamplesCount(), 1.0);
 
  708                 "Timeline::add_layer (No Audio Copied - Wrong # of Channels)",
 
  709                 "source_clip->Reader()->info.has_audio",
 
  710                     source_clip->
Reader()->info.has_audio,
 
  711                 "source_frame->GetAudioChannelsCount()",
 
  712                     source_frame->GetAudioChannelsCount(),
 
  714                 "clip_frame_number", clip_frame_number);
 
  719         "Timeline::add_layer (Transform: Composite Image Layer: Completed)",
 
  720         "source_frame->number", source_frame->number,
 
  721         "new_frame->GetImage()->width()", new_frame->GetWidth(),
 
  722         "new_frame->GetImage()->height()", new_frame->GetHeight());
 
  726 void Timeline::update_open_clips(
Clip *clip, 
bool does_clip_intersect)
 
  729     const std::lock_guard<std::recursive_mutex> guard(
getFrameMutex);
 
  732         "Timeline::update_open_clips (before)",
 
  733         "does_clip_intersect", does_clip_intersect,
 
  734         "closing_clips.size()", closing_clips.size(),
 
  735         "open_clips.size()", open_clips.size());
 
  738     bool clip_found = open_clips.count(
clip);
 
  740     if (clip_found && !does_clip_intersect)
 
  743         open_clips.erase(
clip);
 
  748     else if (!clip_found && does_clip_intersect)
 
  764         "Timeline::update_open_clips (after)",
 
  765         "does_clip_intersect", does_clip_intersect,
 
  766         "clip_found", clip_found,
 
  767         "closing_clips.size()", closing_clips.size(),
 
  768         "open_clips.size()", open_clips.size());
 
  772 void Timeline::calculate_max_duration() {
 
  773     double last_clip = 0.0;
 
  774     double last_effect = 0.0;
 
  775     double first_clip = std::numeric_limits<double>::max();
 
  776     double first_effect = std::numeric_limits<double>::max();
 
  779     if (!clips.empty()) {
 
  781         const auto max_clip = std::max_element(
 
  783         last_clip = (*max_clip)->Position() + (*max_clip)->Duration();
 
  786         const auto min_clip = std::min_element(
 
  788                 return lhs->Position() < rhs->Position();
 
  790         first_clip = (*min_clip)->Position();
 
  794     if (!effects.empty()) {
 
  796         const auto max_effect = std::max_element(
 
  798         last_effect = (*max_effect)->Position() + (*max_effect)->Duration();
 
  801         const auto min_effect = std::min_element(
 
  803                 return lhs->Position() < rhs->Position();
 
  805         first_effect = (*min_effect)->Position();
 
  809     max_time = std::max(last_clip, last_effect);
 
  810     min_time = std::min(first_clip, first_effect);
 
  813     if (clips.empty() && effects.empty()) {
 
  820 void Timeline::sort_clips()
 
  823     const std::lock_guard<std::recursive_mutex> guard(
getFrameMutex);
 
  827         "Timeline::SortClips",
 
  828         "clips.size()", clips.size());
 
  834     calculate_max_duration();
 
  838 void Timeline::sort_effects()
 
  841     const std::lock_guard<std::recursive_mutex> guard(
getFrameMutex);
 
  847     calculate_max_duration();
 
  856     const std::lock_guard<std::recursive_mutex> guard(
getFrameMutex);
 
  859     for (
auto clip : clips)
 
  861         update_open_clips(
clip, 
false);
 
  864         bool allocated = allocated_clips.count(
clip);
 
  871     allocated_clips.clear();
 
  874     for (
auto effect : effects)
 
  877         bool allocated = allocated_effects.count(effect);
 
  884     allocated_effects.clear();
 
  887     for (
auto mapper : allocated_frame_mappers)
 
  893     allocated_frame_mappers.clear();
 
  902     const std::lock_guard<std::recursive_mutex> guard(
getFrameMutex);
 
  905     for (
auto clip : clips)
 
  908         update_open_clips(
clip, 
false);
 
  925 bool Timeline::isEqual(
double a, 
double b)
 
  927     return fabs(a - b) < 0.000001;
 
  934     if (requested_frame < 1)
 
  938     std::shared_ptr<Frame> frame;
 
  939     frame = final_cache->
GetFrame(requested_frame);
 
  943             "Timeline::GetFrame (Cached frame found)",
 
  944             "requested_frame", requested_frame);
 
  952         const std::lock_guard<std::recursive_mutex> lock(
getFrameMutex);
 
  955         std::shared_ptr<Frame> frame;
 
  956         frame = final_cache->
GetFrame(requested_frame);
 
  960                     "Timeline::GetFrame (Cached frame found on 2nd check)",
 
  961                     "requested_frame", requested_frame);
 
  968             std::vector<Clip *> nearby_clips;
 
  969             nearby_clips = find_intersecting_clips(requested_frame, 1, 
true);
 
  973                     "Timeline::GetFrame (processing frame)",
 
  974                     "requested_frame", requested_frame,
 
  975                     "omp_get_thread_num()", omp_get_thread_num());
 
  982             new_frame->AddAudioSilence(samples_in_frame);
 
  988                     "Timeline::GetFrame (Adding solid color)",
 
  989                     "requested_frame", requested_frame,
 
 1001                     "Timeline::GetFrame (Loop through clips)",
 
 1002                     "requested_frame", requested_frame,
 
 1003                     "clips.size()", clips.size(),
 
 1004                     "nearby_clips.size()", nearby_clips.size());
 
 1007             for (
auto clip : nearby_clips) {
 
 1010                 bool does_clip_intersect = (clip_start_position <= requested_frame && clip_end_position >= requested_frame);
 
 1014                         "Timeline::GetFrame (Does clip intersect)",
 
 1015                         "requested_frame", requested_frame,
 
 1017                         "clip->Duration()", 
clip->Duration(),
 
 1018                         "does_clip_intersect", does_clip_intersect);
 
 1021                 if (does_clip_intersect) {
 
 1023                     bool is_top_clip = 
true;
 
 1024                     float max_volume = 0.0;
 
 1025                     for (
auto nearby_clip : nearby_clips) {
 
 1026                         long nearby_clip_start_position = round(nearby_clip->Position() * 
info.
fps.
ToDouble()) + 1;
 
 1027                         long nearby_clip_end_position = round((nearby_clip->Position() + nearby_clip->Duration()) * 
info.
fps.
ToDouble()) + 1;
 
 1028                         long nearby_clip_start_frame = (nearby_clip->Start() * 
info.
fps.
ToDouble()) + 1;
 
 1029                         long nearby_clip_frame_number = requested_frame - nearby_clip_start_position + nearby_clip_start_frame;
 
 1032                         if (
clip->
Id() != nearby_clip->Id() && 
clip->
Layer() == nearby_clip->Layer() &&
 
 1033                             nearby_clip_start_position <= requested_frame && nearby_clip_end_position >= requested_frame &&
 
 1034                             nearby_clip_start_position > clip_start_position && is_top_clip == 
true) {
 
 1035                             is_top_clip = 
false;
 
 1039                         if (nearby_clip->Reader() && nearby_clip->Reader()->info.has_audio &&
 
 1040                             nearby_clip->has_audio.GetInt(nearby_clip_frame_number) != 0 &&
 
 1041                             nearby_clip_start_position <= requested_frame && nearby_clip_end_position >= requested_frame) {
 
 1042                             max_volume += nearby_clip->volume.GetValue(nearby_clip_frame_number);
 
 1048                     long clip_frame_number = requested_frame - clip_start_position + clip_start_frame;
 
 1052                             "Timeline::GetFrame (Calculate clip's frame #)",
 
 1056                             "clip_frame_number", clip_frame_number);
 
 1059                     add_layer(new_frame, 
clip, clip_frame_number, is_top_clip, max_volume);
 
 1064                             "Timeline::GetFrame (clip does not intersect)",
 
 1065                             "requested_frame", requested_frame,
 
 1066                             "does_clip_intersect", does_clip_intersect);
 
 1073                     "Timeline::GetFrame (Add frame to cache)",
 
 1074                     "requested_frame", requested_frame,
 
 1079             new_frame->SetFrameNumber(requested_frame);
 
 1082             final_cache->
Add(new_frame);
 
 1092 std::vector<Clip*> Timeline::find_intersecting_clips(int64_t requested_frame, 
int number_of_frames, 
bool include)
 
 1095     std::vector<Clip*> matching_clips;
 
 1098     float min_requested_frame = requested_frame;
 
 1099     float max_requested_frame = requested_frame + (number_of_frames - 1);
 
 1102     for (
auto clip : clips)
 
 1108         bool does_clip_intersect =
 
 1109                 (clip_start_position <= min_requested_frame || clip_start_position <= max_requested_frame) &&
 
 1110                 (clip_end_position >= min_requested_frame || clip_end_position >= max_requested_frame);
 
 1114             "Timeline::find_intersecting_clips (Is clip near or intersecting)",
 
 1115             "requested_frame", requested_frame,
 
 1116             "min_requested_frame", min_requested_frame,
 
 1117             "max_requested_frame", max_requested_frame,
 
 1119             "does_clip_intersect", does_clip_intersect);
 
 1122         update_open_clips(
clip, does_clip_intersect);
 
 1125         if (does_clip_intersect && include)
 
 1127             matching_clips.push_back(
clip);
 
 1129         else if (!does_clip_intersect && !include)
 
 1131             matching_clips.push_back(
clip);
 
 1136     return matching_clips;
 
 1142     const std::lock_guard<std::recursive_mutex> lock(
getFrameMutex);
 
 1145     if (managed_cache && final_cache) {
 
 1148         managed_cache = 
false;
 
 1152     final_cache = new_cache;
 
 1167     root[
"type"] = 
"Timeline";
 
 1172     root[
"path"] = path;
 
 1175     root[
"clips"] = Json::Value(Json::arrayValue);
 
 1178     for (
const auto existing_clip : clips)
 
 1180         root[
"clips"].append(existing_clip->JsonValue());
 
 1184     root[
"effects"] = Json::Value(Json::arrayValue);
 
 1187     for (
const auto existing_effect: effects)
 
 1189         root[
"effects"].append(existing_effect->JsonValue());
 
 1200     const std::lock_guard<std::recursive_mutex> lock(
getFrameMutex);
 
 1209     catch (
const std::exception& e)
 
 1212         throw InvalidJSON(
"JSON is invalid (missing keys or invalid data types)");
 
 1220     const std::lock_guard<std::recursive_mutex> lock(
getFrameMutex);
 
 1223     bool was_open = is_open;
 
 1230     if (!root[
"path"].isNull())
 
 1231         path = root[
"path"].asString();
 
 1233     if (!root[
"clips"].isNull()) {
 
 1238         for (
const Json::Value existing_clip : root[
"clips"]) {
 
 1240             if (existing_clip.isNull()) {
 
 1248             allocated_clips.insert(c);
 
 1265     if (!root[
"effects"].isNull()) {
 
 1270         for (
const Json::Value existing_effect :root[
"effects"]) {
 
 1272             if (existing_effect.isNull()) {
 
 1279             if (!existing_effect[
"type"].isNull()) {
 
 1281                 if ( (e = 
EffectInfo().CreateEffect(existing_effect[
"type"].asString())) ) {
 
 1284                     allocated_effects.insert(e);
 
 1296     if (!root[
"duration"].isNull()) {
 
 1319     const std::lock_guard<std::recursive_mutex> lock(
getFrameMutex);
 
 1326         for (
const Json::Value change : root) {
 
 1327             std::string change_key = change[
"key"][(uint)0].asString();
 
 1330             if (change_key == 
"clips")
 
 1332                 apply_json_to_clips(change);
 
 1334             else if (change_key == 
"effects")
 
 1336                 apply_json_to_effects(change);
 
 1340                 apply_json_to_timeline(change);
 
 1344     catch (
const std::exception& e)
 
 1347         throw InvalidJSON(
"JSON is invalid (missing keys or invalid data types)");
 
 1352 void Timeline::apply_json_to_clips(Json::Value change) {
 
 1355     std::string change_type = change[
"type"].asString();
 
 1356     std::string clip_id = 
"";
 
 1357     Clip *existing_clip = NULL;
 
 1360     for (
auto key_part : change[
"key"]) {
 
 1362         if (key_part.isObject()) {
 
 1364             if (!key_part[
"id"].isNull()) {
 
 1366                 clip_id = key_part[
"id"].asString();
 
 1369                 for (
auto c : clips)
 
 1371                     if (c->Id() == clip_id) {
 
 1383     if (existing_clip && change[
"key"].size() == 4 && change[
"key"][2] == 
"effects")
 
 1386         Json::Value key_part = change[
"key"][3];
 
 1388         if (key_part.isObject()) {
 
 1390             if (!key_part[
"id"].isNull())
 
 1393                 std::string effect_id = key_part[
"id"].asString();
 
 1396                 std::list<EffectBase*> effect_list = existing_clip->
Effects();
 
 1397                 for (
auto e : effect_list)
 
 1399                     if (e->Id() == effect_id) {
 
 1401                         apply_json_to_effects(change, e);
 
 1405                         int64_t new_ending_frame = ((existing_clip->
Position() + existing_clip->Duration()) * 
info.
fps.
ToDouble()) + 1;
 
 1406                         final_cache->
Remove(new_starting_frame - 8, new_ending_frame + 8);
 
 1416     if (change_type == 
"insert") {
 
 1422         allocated_clips.insert(
clip);
 
 1430     } 
else if (change_type == 
"update") {
 
 1433         if (existing_clip) {
 
 1439             int64_t old_ending_frame = ((existing_clip->
Position() + existing_clip->Duration()) * 
info.
fps.
ToDouble()) + 1;
 
 1440             final_cache->
Remove(old_starting_frame - 8, old_ending_frame + 8);
 
 1443             if (existing_clip->
Reader() && existing_clip->
Reader()->GetCache())
 
 1444                 existing_clip->
Reader()->GetCache()->Remove(old_starting_frame - 8, old_ending_frame + 8);
 
 1447             if (auto_map_clips) {
 
 1448                 apply_mapper_to_clip(existing_clip);
 
 1452     } 
else if (change_type == 
"delete") {
 
 1455         if (existing_clip) {
 
 1461             int64_t old_ending_frame = ((existing_clip->
Position() + existing_clip->Duration()) * 
info.
fps.
ToDouble()) + 1;
 
 1462             final_cache->
Remove(old_starting_frame - 8, old_ending_frame + 8);
 
 1468     if (!change[
"value"].isArray() && !change[
"value"][
"position"].isNull()) {
 
 1469         int64_t new_starting_frame = (change[
"value"][
"position"].asDouble() * 
info.
fps.
ToDouble()) + 1;
 
 1470         int64_t new_ending_frame = ((change[
"value"][
"position"].asDouble() + change[
"value"][
"end"].asDouble() - change[
"value"][
"start"].asDouble()) * 
info.
fps.
ToDouble()) + 1;
 
 1471         final_cache->
Remove(new_starting_frame - 8, new_ending_frame + 8);
 
 1479 void Timeline::apply_json_to_effects(Json::Value change) {
 
 1482     std::string change_type = change[
"type"].asString();
 
 1486     for (
auto key_part : change[
"key"]) {
 
 1488         if (key_part.isObject()) {
 
 1490             if (!key_part[
"id"].isNull())
 
 1493                 std::string effect_id = key_part[
"id"].asString();
 
 1496                 for (
auto e : effects)
 
 1498                     if (e->Id() == effect_id) {
 
 1499                         existing_effect = e;
 
 1509     if (existing_effect || change_type == 
"insert") {
 
 1511         apply_json_to_effects(change, existing_effect);
 
 1516 void Timeline::apply_json_to_effects(Json::Value change, 
EffectBase* existing_effect) {
 
 1519     std::string change_type = change[
"type"].asString();
 
 1522     if (!change[
"value"].isArray() && !change[
"value"][
"position"].isNull()) {
 
 1523         int64_t new_starting_frame = (change[
"value"][
"position"].asDouble() * 
info.
fps.
ToDouble()) + 1;
 
 1524         int64_t new_ending_frame = ((change[
"value"][
"position"].asDouble() + change[
"value"][
"end"].asDouble() - change[
"value"][
"start"].asDouble()) * 
info.
fps.
ToDouble()) + 1;
 
 1525         final_cache->
Remove(new_starting_frame - 8, new_ending_frame + 8);
 
 1529     if (change_type == 
"insert") {
 
 1532         std::string effect_type = change[
"value"][
"type"].asString();
 
 1541             allocated_effects.insert(e);
 
 1550     } 
else if (change_type == 
"update") {
 
 1553         if (existing_effect) {
 
 1557             int64_t old_ending_frame = ((existing_effect->
Position() + existing_effect->Duration()) * 
info.
fps.
ToDouble()) + 1;
 
 1558             final_cache->
Remove(old_starting_frame - 8, old_ending_frame + 8);
 
 1564     } 
else if (change_type == 
"delete") {
 
 1567         if (existing_effect) {
 
 1571             int64_t old_ending_frame = ((existing_effect->
Position() + existing_effect->Duration()) * 
info.
fps.
ToDouble()) + 1;
 
 1572             final_cache->
Remove(old_starting_frame - 8, old_ending_frame + 8);
 
 1585 void Timeline::apply_json_to_timeline(Json::Value change) {
 
 1586     bool cache_dirty = 
true;
 
 1589     std::string change_type = change[
"type"].asString();
 
 1590     std::string root_key = change[
"key"][(uint)0].asString();
 
 1591     std::string sub_key = 
"";
 
 1592     if (change[
"key"].size() >= 2)
 
 1593         sub_key = change[
"key"][(uint)1].asString();
 
 1596     if (change_type == 
"insert" || change_type == 
"update") {
 
 1600         if (root_key == 
"color")
 
 1603         else if (root_key == 
"viewport_scale")
 
 1606         else if (root_key == 
"viewport_x")
 
 1609         else if (root_key == 
"viewport_y")
 
 1612         else if (root_key == 
"duration") {
 
 1618             cache_dirty = 
false;
 
 1620         else if (root_key == 
"width") {
 
 1625         else if (root_key == 
"height") {
 
 1630         else if (root_key == 
"fps" && sub_key == 
"" && change[
"value"].isObject()) {
 
 1632             if (!change[
"value"][
"num"].isNull())
 
 1633                 info.
fps.
num = change[
"value"][
"num"].asInt();
 
 1634             if (!change[
"value"][
"den"].isNull())
 
 1635                 info.
fps.
den = change[
"value"][
"den"].asInt();
 
 1637         else if (root_key == 
"fps" && sub_key == 
"num")
 
 1640         else if (root_key == 
"fps" && sub_key == 
"den")
 
 1643         else if (root_key == 
"display_ratio" && sub_key == 
"" && change[
"value"].isObject()) {
 
 1645             if (!change[
"value"][
"num"].isNull())
 
 1647             if (!change[
"value"][
"den"].isNull())
 
 1650         else if (root_key == 
"display_ratio" && sub_key == 
"num")
 
 1653         else if (root_key == 
"display_ratio" && sub_key == 
"den")
 
 1656         else if (root_key == 
"pixel_ratio" && sub_key == 
"" && change[
"value"].isObject()) {
 
 1658             if (!change[
"value"][
"num"].isNull())
 
 1660             if (!change[
"value"][
"den"].isNull())
 
 1663         else if (root_key == 
"pixel_ratio" && sub_key == 
"num")
 
 1666         else if (root_key == 
"pixel_ratio" && sub_key == 
"den")
 
 1670         else if (root_key == 
"sample_rate")
 
 1673         else if (root_key == 
"channels")
 
 1676         else if (root_key == 
"channel_layout")
 
 1681             throw InvalidJSONKey(
"JSON change key is invalid", change.toStyledString());
 
 1684     } 
else if (change[
"type"].asString() == 
"delete") {
 
 1688         if (root_key == 
"color") {
 
 1694         else if (root_key == 
"viewport_scale")
 
 1696         else if (root_key == 
"viewport_x")
 
 1698         else if (root_key == 
"viewport_y")
 
 1702             throw InvalidJSONKey(
"JSON change key is invalid", change.toStyledString());
 
 1717         final_cache->
Clear();
 
 1722         for (
const auto clip : clips) {
 
 1724             clip->Reader()->GetCache()->Clear();
 
 1727             if (deep && 
clip->Reader()->Name() == 
"FrameMapper") {
 
 1734             clip->GetCache()->Clear();
 
 1749     display_ratio_size.scale(proposed_size, Qt::KeepAspectRatio);