28 #include "../include/Timeline.h"
30 using namespace openshot;
34 is_open(false), auto_map_clips(true)
69 apply_mapper_to_clip(clip);
72 clips.push_back(clip);
82 effects.push_back(effect);
91 effects.remove(effect);
101 void Timeline::apply_mapper_to_clip(
Clip* clip)
105 if (clip->
Reader()->Name() ==
"FrameMapper")
121 clip->
Reader(clip_reader);
131 list<Clip*>::iterator clip_itr;
132 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
135 Clip *clip = (*clip_itr);
138 apply_mapper_to_clip(clip);
143 float Timeline::calculate_time(
long int number,
Fraction rate)
146 float raw_fps = rate.
ToFloat();
149 return float(number - 1) / raw_fps;
153 tr1::shared_ptr<Frame> Timeline::apply_effects(tr1::shared_ptr<Frame> frame,
long int timeline_frame_number,
int layer)
156 float requested_time = calculate_time(timeline_frame_number,
info.
fps);
159 AppendDebugMethod(
"Timeline::apply_effects",
"requested_time", requested_time,
"frame->number", frame->number,
"timeline_frame_number", timeline_frame_number,
"layer", layer,
"", -1,
"", -1);
162 list<EffectBase*>::iterator effect_itr;
163 for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr)
169 float effect_duration = effect->
End() - effect->
Start();
170 bool does_effect_intersect = (effect->
Position() <= requested_time && effect->
Position() + effect_duration >= requested_time && effect->
Layer() == layer);
173 AppendDebugMethod(
"Timeline::apply_effects (Does effect intersect)",
"effect->Position()", effect->
Position(),
"requested_time", requested_time,
"does_effect_intersect", does_effect_intersect,
"timeline_frame_number", timeline_frame_number,
"layer", layer,
"effect_duration", effect_duration);
176 if (does_effect_intersect)
179 float time_diff = (requested_time - effect->
Position()) + effect->
Start();
180 int effect_frame_number = round(time_diff *
info.
fps.
ToFloat()) + 1;
183 AppendDebugMethod(
"Timeline::apply_effects (Process Effect)",
"time_diff", time_diff,
"effect_frame_number", effect_frame_number,
"effect_duration", effect_duration,
"does_effect_intersect", does_effect_intersect,
"", -1,
"", -1);
186 frame = effect->
GetFrame(frame, effect_frame_number);
196 tr1::shared_ptr<Frame> Timeline::GetOrCreateFrame(
Clip* clip,
long int number)
198 tr1::shared_ptr<Frame> new_frame;
206 new_frame = tr1::shared_ptr<Frame>(clip->
GetFrame(number));
227 void Timeline::add_layer(tr1::shared_ptr<Frame> new_frame,
Clip* source_clip,
long int clip_frame_number,
long int timeline_frame_number,
bool is_top_clip)
230 tr1::shared_ptr<Frame> source_frame = GetOrCreateFrame(source_clip, clip_frame_number);
237 AppendDebugMethod(
"Timeline::add_layer",
"new_frame->number", new_frame->number,
"clip_frame_number", clip_frame_number,
"timeline_frame_number", timeline_frame_number,
"", -1,
"", -1,
"", -1);
243 AppendDebugMethod(
"Timeline::add_layer (Generate Waveform Image)",
"source_frame->number", source_frame->number,
"source_clip->Waveform()", source_clip->
Waveform(),
"clip_frame_number", clip_frame_number,
"", -1,
"", -1,
"", -1);
252 tr1::shared_ptr<QImage> source_image = source_frame->GetWaveform(
info.
width,
info.
height, red, green, blue, alpha);
253 source_frame->AddImage(tr1::shared_ptr<QImage>(source_image));
259 source_frame = apply_effects(source_frame, timeline_frame_number, source_clip->
Layer());
262 tr1::shared_ptr<QImage> source_image;
265 if (source_clip->
Reader()->info.has_audio) {
268 AppendDebugMethod(
"Timeline::add_layer (Copy Audio)",
"source_clip->Reader()->info.has_audio", source_clip->
Reader()->info.has_audio,
"source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(),
"info.channels",
info.
channels,
"clip_frame_number", clip_frame_number,
"timeline_frame_number", timeline_frame_number,
"", -1);
270 if (source_frame->GetAudioChannelsCount() ==
info.
channels)
271 for (
int channel = 0; channel < source_frame->GetAudioChannelsCount(); channel++)
273 float initial_volume = 1.0f;
274 float previous_volume = source_clip->
volume.
GetValue(clip_frame_number - 1);
278 if (isEqual(previous_volume, volume))
279 initial_volume = volume;
282 if (!isEqual(previous_volume, volume))
283 source_frame->ApplyGainRamp(channel, 0, source_frame->GetAudioSamplesCount(), previous_volume, volume);
289 if (new_frame->GetAudioSamplesCount() != source_frame->GetAudioSamplesCount())
295 new_frame->AddAudio(
false, channel, 0, source_frame->GetAudioSamples(channel), source_frame->GetAudioSamplesCount(), initial_volume);
300 AppendDebugMethod(
"Timeline::add_layer (No Audio Copied - Wrong # of Channels)",
"source_clip->Reader()->info.has_audio", source_clip->
Reader()->info.has_audio,
"source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(),
"info.channels",
info.
channels,
"clip_frame_number", clip_frame_number,
"timeline_frame_number", timeline_frame_number,
"", -1);
305 if (!source_clip->
Waveform() && !source_clip->
Reader()->info.has_video)
310 AppendDebugMethod(
"Timeline::add_layer (Get Source Image)",
"source_frame->number", source_frame->number,
"source_clip->Waveform()", source_clip->
Waveform(),
"clip_frame_number", clip_frame_number,
"", -1,
"", -1,
"", -1);
313 source_image = source_frame->GetImage();
316 int source_width = source_image->width();
317 int source_height = source_image->height();
322 float alpha = source_clip->
alpha.
GetValue(clip_frame_number);
325 unsigned char *pixels = (
unsigned char *) source_image->bits();
328 for (
int pixel = 0, byte_index=0; pixel < source_image->width() * source_image->height(); pixel++, byte_index+=4)
331 int A = pixels[byte_index + 3];
334 pixels[byte_index + 3] *= alpha;
338 AppendDebugMethod(
"Timeline::add_layer (Set Alpha & Opacity)",
"alpha", alpha,
"source_frame->number", source_frame->number,
"clip_frame_number", clip_frame_number,
"", -1,
"", -1,
"", -1);
342 switch (source_clip->
scale)
346 source_image = tr1::shared_ptr<QImage>(
new QImage(source_image->scaled(
info.
width,
info.
height, Qt::KeepAspectRatio, Qt::SmoothTransformation)));
347 source_width = source_image->width();
348 source_height = source_image->height();
351 AppendDebugMethod(
"Timeline::add_layer (Scale: SCALE_FIT)",
"source_frame->number", source_frame->number,
"source_width", source_width,
"source_height", source_height,
"", -1,
"", -1,
"", -1);
356 source_image = tr1::shared_ptr<QImage>(
new QImage(source_image->scaled(
info.
width,
info.
height, Qt::IgnoreAspectRatio, Qt::SmoothTransformation)));
357 source_width = source_image->width();
358 source_height = source_image->height();
361 AppendDebugMethod(
"Timeline::add_layer (Scale: SCALE_STRETCH)",
"source_frame->number", source_frame->number,
"source_width", source_width,
"source_height", source_height,
"", -1,
"", -1,
"", -1);
365 QSize width_size(
info.
width, round(
info.
width / (
float(source_width) /
float(source_height))));
366 QSize height_size(round(
info.
height / (
float(source_height) /
float(source_width))),
info.
height);
370 source_image = tr1::shared_ptr<QImage>(
new QImage(source_image->scaled(width_size.width(), width_size.height(), Qt::KeepAspectRatio, Qt::SmoothTransformation)));
372 source_image = tr1::shared_ptr<QImage>(
new QImage(source_image->scaled(height_size.width(), height_size.height(), Qt::KeepAspectRatio, Qt::SmoothTransformation)));
373 source_width = source_image->width();
374 source_height = source_image->height();
377 AppendDebugMethod(
"Timeline::add_layer (Scale: SCALE_CROP)",
"source_frame->number", source_frame->number,
"source_width", source_width,
"source_height", source_height,
"", -1,
"", -1,
"", -1);
388 float scaled_source_width = source_width * sx;
389 float scaled_source_height = source_height * sy;
394 x = (
info.
width - scaled_source_width) / 2.0;
400 y = (
info.
height - scaled_source_height) / 2.0;
403 x = (
info.
width - scaled_source_width) / 2.0;
404 y = (
info.
height - scaled_source_height) / 2.0;
408 y = (
info.
height - scaled_source_height) / 2.0;
414 x = (
info.
width - scaled_source_width) / 2.0;
424 AppendDebugMethod(
"Timeline::add_layer (Gravity)",
"source_frame->number", source_frame->number,
"source_clip->gravity", source_clip->
gravity,
"info.width",
info.
width,
"source_width", source_width,
"info.height",
info.
height,
"source_height", source_height);
435 bool transformed =
false;
436 QTransform transform;
437 if ((!isEqual(x, 0) || !isEqual(y, 0)) && (isEqual(r, 0) && isEqual(sx, 1) && isEqual(sy, 1) && !is_x_animated && !is_y_animated))
440 AppendDebugMethod(
"Timeline::add_layer (Transform: SIMPLE)",
"source_frame->number", source_frame->number,
"x", x,
"y", y,
"r", r,
"sx", sx,
"sy", sy);
446 transform.translate(x, y);
448 }
else if (!isEqual(r, 0) || !isEqual(x, 0) || !isEqual(y, 0) || !isEqual(sx, 1) || !isEqual(sy, 1))
451 AppendDebugMethod(
"Timeline::add_layer (Transform: COMPLEX)",
"source_frame->number", source_frame->number,
"x", x,
"y", y,
"r", r,
"sx", sx,
"sy", sy);
457 if (!isEqual(r, 0)) {
459 float origin_x = x + (source_width / 2.0);
460 float origin_y = y + (source_height / 2.0);
461 transform.translate(origin_x, origin_y);
463 transform.translate(-origin_x,-origin_y);
467 if (!isEqual(x, 0) || !isEqual(y, 0)) {
469 transform.translate(x, y);
472 if (!isEqual(sx, 0) || !isEqual(sy, 0)) {
474 transform.scale(sx, sy);
478 AppendDebugMethod(
"Timeline::add_layer (Transform: COMPLEX: Completed ScaleRotateTranslateDistortion)",
"source_frame->number", source_frame->number,
"x", x,
"y", y,
"r", r,
"sx", sx,
"sy", sy);
482 AppendDebugMethod(
"Timeline::add_layer (Transform: Composite Image Layer: Prepare)",
"source_frame->number", source_frame->number,
"offset_x", offset_x,
"offset_y", offset_y,
"new_frame->GetImage()->width()", new_frame->GetImage()->width(),
"transformed", transformed,
"", -1);
485 tr1::shared_ptr<QImage> new_image = new_frame->GetImage();
488 QPainter painter(new_image.get());
489 painter.setRenderHints(QPainter::Antialiasing | QPainter::SmoothPixmapTransform | QPainter::TextAntialiasing,
true);
493 painter.setTransform(transform);
496 painter.setCompositionMode(QPainter::CompositionMode_SourceOver);
497 painter.drawImage(0, 0, *source_image);
501 AppendDebugMethod(
"Timeline::add_layer (Transform: Composite Image Layer: Completed)",
"source_frame->number", source_frame->number,
"offset_x", offset_x,
"offset_y", offset_y,
"new_frame->GetImage()->width()", new_frame->GetImage()->width(),
"transformed", transformed,
"", -1);
505 void Timeline::update_open_clips(
Clip *clip,
bool does_clip_intersect)
507 AppendDebugMethod(
"Timeline::update_open_clips (before)",
"does_clip_intersect", does_clip_intersect,
"closing_clips.size()", closing_clips.size(),
"open_clips.size()", open_clips.size(),
"", -1,
"", -1,
"", -1);
510 bool clip_found = open_clips.count(clip);
512 if (clip_found && !does_clip_intersect)
515 open_clips.erase(clip);
520 else if (!clip_found && does_clip_intersect)
523 open_clips[clip] = clip;
528 clip->
Reader()->debug =
true;
535 AppendDebugMethod(
"Timeline::update_open_clips (after)",
"does_clip_intersect", does_clip_intersect,
"clip_found", clip_found,
"closing_clips.size()", closing_clips.size(),
"open_clips.size()", open_clips.size(),
"", -1,
"", -1);
539 void Timeline::sort_clips()
542 AppendDebugMethod(
"Timeline::SortClips",
"clips.size()", clips.size(),
"", -1,
"", -1,
"", -1,
"", -1,
"", -1);
549 void Timeline::sort_effects()
559 list<Clip*>::iterator clip_itr;
560 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
563 Clip *clip = (*clip_itr);
566 update_open_clips(clip,
false);
583 bool Timeline::isEqual(
double a,
double b)
585 return fabs(a - b) < 0.000001;
593 throw ReaderClosed(
"The Timeline is closed. Call Open() before calling this method.",
"");
596 if (requested_frame < 1)
600 tr1::shared_ptr<Frame> frame = final_cache.GetFrame(requested_frame);
603 AppendDebugMethod(
"Timeline::GetFrame (Cached frame found)",
"requested_frame", requested_frame,
"", -1,
"", -1,
"", -1,
"", -1,
"", -1);
611 const GenericScopedLock<CriticalSection> lock(getFrameCriticalSection);
614 frame = final_cache.GetFrame(requested_frame);
617 AppendDebugMethod(
"Timeline::GetFrame (Cached frame found on 2nd look)",
"requested_frame", requested_frame,
"", -1,
"", -1,
"", -1,
"", -1,
"", -1);
628 vector<Clip*> nearby_clips = find_intersecting_clips(requested_frame, minimum_frames,
true);
632 omp_set_nested(
true);
635 AppendDebugMethod(
"Timeline::GetFrame",
"requested_frame", requested_frame,
"minimum_frames", minimum_frames,
"OPEN_MP_NUM_PROCESSORS",
OPEN_MP_NUM_PROCESSORS,
"", -1,
"", -1,
"", -1);
639 for (
long int frame_number = requested_frame; frame_number < requested_frame + minimum_frames; frame_number++)
642 float requested_time = calculate_time(frame_number, info.fps);
644 for (
int clip_index = 0; clip_index < nearby_clips.size(); clip_index++)
647 Clip *clip = nearby_clips[clip_index];
648 bool does_clip_intersect = (clip->
Position() <= requested_time && clip->
Position() + clip->
Duration() >= requested_time);
649 if (does_clip_intersect)
652 float time_diff = (requested_time - clip->
Position()) + clip->
Start();
653 int clip_frame_number = round(time_diff * info.fps.ToFloat()) + 1;
663 #pragma omp for ordered firstprivate(nearby_clips, requested_frame, minimum_frames)
664 for (
long int frame_number = requested_frame; frame_number < requested_frame + minimum_frames; frame_number++)
667 AppendDebugMethod(
"Timeline::GetFrame (processing frame)",
"frame_number", frame_number,
"omp_get_thread_num()", omp_get_thread_num(),
"", -1,
"", -1,
"", -1,
"", -1);
673 tr1::shared_ptr<Frame> new_frame(tr1::shared_ptr<Frame>(
new Frame(frame_number, info.width, info.height,
"#000000", samples_in_frame, info.channels)));
674 new_frame->SampleRate(info.sample_rate);
675 new_frame->ChannelsLayout(info.channel_layout);
678 AppendDebugMethod(
"Timeline::GetFrame (Adding solid color)",
"frame_number", frame_number,
"info.width", info.width,
"info.height", info.height,
"", -1,
"", -1,
"", -1);
681 if ((color.red.Points.size() > 1 || color.green.Points.size() > 1 || color.blue.Points.size() > 1) ||
682 (color.red.GetValue(frame_number) != 0.0 || color.green.GetValue(frame_number) != 0.0 || color.blue.GetValue(frame_number) != 0.0))
683 new_frame->AddColor(info.width, info.height, color.GetColorHex(frame_number));
686 float requested_time = calculate_time(frame_number, info.fps);
689 AppendDebugMethod(
"Timeline::GetFrame (Loop through clips)",
"frame_number", frame_number,
"requested_time", requested_time,
"clips.size()", clips.size(),
"nearby_clips.size()", nearby_clips.size(),
"", -1,
"", -1);
692 for (
int clip_index = 0; clip_index < nearby_clips.size(); clip_index++)
695 Clip *clip = nearby_clips[clip_index];
698 bool does_clip_intersect = (clip->
Position() <= requested_time && clip->
Position() + clip->
Duration() >= requested_time);
701 AppendDebugMethod(
"Timeline::GetFrame (Does clip intersect)",
"frame_number", frame_number,
"requested_time", requested_time,
"clip->Position()", clip->
Position(),
"clip->Duration()", clip->
Duration(),
"does_clip_intersect", does_clip_intersect,
"", -1);
704 if (does_clip_intersect)
707 bool is_top_clip =
true;
708 for (
int top_clip_index = 0; top_clip_index < nearby_clips.size(); top_clip_index++)
710 Clip *nearby_clip = nearby_clips[top_clip_index];
711 if (clip->
Id() != nearby_clip->
Id() && clip->
Layer() == nearby_clip->
Layer() &&
720 float time_diff = (requested_time - clip->
Position()) + clip->
Start();
721 int clip_frame_number = round(time_diff * info.fps.ToFloat()) + 1;
724 AppendDebugMethod(
"Timeline::GetFrame (Calculate clip's frame #)",
"time_diff", time_diff,
"requested_time", requested_time,
"clip->Position()", clip->
Position(),
"clip->Start()", clip->
Start(),
"info.fps.ToFloat()", info.fps.ToFloat(),
"clip_frame_number", clip_frame_number);
727 add_layer(new_frame, clip, clip_frame_number, frame_number, is_top_clip);
731 AppendDebugMethod(
"Timeline::GetFrame (clip does not intersect)",
"frame_number", frame_number,
"requested_time", requested_time,
"does_clip_intersect", does_clip_intersect,
"", -1,
"", -1,
"", -1);
736 AppendDebugMethod(
"Timeline::GetFrame (Add frame to cache)",
"frame_number", frame_number,
"info.width", info.width,
"info.height", info.height,
"", -1,
"", -1,
"", -1);
739 final_cache.Add(frame_number, new_frame);
745 AppendDebugMethod(
"Timeline::GetFrame (end parallel region)",
"requested_frame", requested_frame,
"omp_get_thread_num()", omp_get_thread_num(),
"", -1,
"", -1,
"", -1,
"", -1);
748 return final_cache.GetFrame(requested_frame);
754 vector<Clip*> Timeline::find_intersecting_clips(
long int requested_frame,
int number_of_frames,
bool include)
757 vector<Clip*> matching_clips;
760 float min_requested_time = calculate_time(requested_frame,
info.
fps);
761 float max_requested_time = calculate_time(requested_frame + (number_of_frames - 1),
info.
fps);
767 list<Clip*>::iterator clip_itr;
768 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
771 Clip *clip = (*clip_itr);
774 float clip_duration = clip->
End() - clip->
Start();
775 bool does_clip_intersect = (clip->
Position() <= min_requested_time && clip->
Position() + clip_duration >= min_requested_time) ||
776 (clip->
Position() > min_requested_time && clip->
Position() <= max_requested_time);
779 AppendDebugMethod(
"Timeline::find_intersecting_clips (Is clip near or intersecting)",
"requested_frame", requested_frame,
"min_requested_time", min_requested_time,
"max_requested_time", max_requested_time,
"clip->Position()", clip->
Position(),
"clip_duration", clip_duration,
"does_clip_intersect", does_clip_intersect);
782 #pragma omp critical (reader_lock)
783 update_open_clips(clip, does_clip_intersect);
787 if (does_clip_intersect && include)
789 matching_clips.push_back(clip);
791 else if (!does_clip_intersect && !include)
793 matching_clips.push_back(clip);
798 return matching_clips;
813 root[
"type"] =
"Timeline";
820 root[
"clips"] = Json::Value(Json::arrayValue);
823 list<Clip*>::iterator clip_itr;
824 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
827 Clip *existing_clip = (*clip_itr);
828 root[
"clips"].append(existing_clip->
JsonValue());
832 root[
"effects"] = Json::Value(Json::arrayValue);
835 list<EffectBase*>::iterator effect_itr;
836 for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr)
840 root[
"effects"].append(existing_effect->
JsonValue());
853 bool success = reader.parse( value, root );
856 throw InvalidJSON(
"JSON could not be parsed (or is invalid)",
"");
866 throw InvalidJSON(
"JSON is invalid (missing keys or invalid data types)",
"");
879 if (!root[
"clips"].isNull()) {
884 for (
int x = 0; x < root[
"clips"].size(); x++) {
886 Json::Value existing_clip = root[
"clips"][x];
899 if (!root[
"effects"].isNull()) {
904 for (
int x = 0; x < root[
"effects"].size(); x++) {
906 Json::Value existing_effect = root[
"effects"][x];
911 if (!existing_effect[
"type"].isNull())
913 if (existing_effect[
"type"].asString() ==
"ChromaKey")
916 else if (existing_effect[
"type"].asString() ==
"Deinterlace")
919 else if (existing_effect[
"type"].asString() ==
"Mask")
922 else if (existing_effect[
"type"].asString() ==
"Negate")
943 bool success = reader.parse( value, root );
944 if (!success || !root.isArray())
946 throw InvalidJSON(
"JSON could not be parsed (or is invalid).",
"");
951 for (
int x = 0; x < root.size(); x++) {
953 Json::Value change = root[x];
954 string root_key = change[
"key"][(uint)0].asString();
957 if (root_key ==
"clips")
959 apply_json_to_clips(change);
961 else if (root_key ==
"effects")
963 apply_json_to_effects(change);
967 apply_json_to_timeline(change);
974 throw InvalidJSON(
"JSON is invalid (missing keys or invalid data types)",
"");
978 final_cache.SetMaxBytesFromInfo(
OPEN_MP_NUM_PROCESSORS * 4, info.width, info.height, info.sample_rate, info.channels);
982 void Timeline::apply_json_to_clips(Json::Value change)
throw(
InvalidJSONKey) {
985 string change_type = change[
"type"].asString();
987 Clip *existing_clip = NULL;
990 for (
int x = 0; x < change[
"key"].size(); x++) {
992 Json::Value key_part = change[
"key"][x];
994 if (key_part.isObject()) {
996 if (!key_part[
"id"].isNull()) {
998 clip_id = key_part[
"id"].asString();
1001 list<Clip*>::iterator clip_itr;
1002 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
1005 Clip *c = (*clip_itr);
1006 if (c->
Id() == clip_id) {
1018 if (existing_clip && change[
"key"].size() == 4 && change[
"key"][2] ==
"effects")
1022 Json::Value key_part = change[
"key"][3];
1024 if (key_part.isObject()) {
1026 if (!key_part[
"id"].isNull())
1029 string effect_id = key_part[
"id"].asString();
1032 list<EffectBase*>::iterator effect_itr;
1033 for (effect_itr=existing_clip->
Effects().begin(); effect_itr != existing_clip->
Effects().end(); ++effect_itr)
1037 if (e->
Id() == effect_id) {
1038 existing_effect =
e;
1041 apply_json_to_effects(change, existing_effect);
1050 if (change_type ==
"insert") {
1057 }
else if (change_type ==
"update") {
1063 }
else if (change_type ==
"delete") {
1067 RemoveClip(existing_clip);
1074 void Timeline::apply_json_to_effects(Json::Value change)
throw(
InvalidJSONKey) {
1077 string change_type = change[
"type"].asString();
1081 for (
int x = 0; x < change[
"key"].size(); x++) {
1083 Json::Value key_part = change[
"key"][x];
1085 if (key_part.isObject()) {
1087 if (!key_part[
"id"].isNull())
1090 string effect_id = key_part[
"id"].asString();
1093 list<EffectBase*>::iterator effect_itr;
1094 for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr)
1098 if (e->
Id() == effect_id) {
1099 existing_effect =
e;
1109 if (existing_effect || change_type ==
"insert")
1111 apply_json_to_effects(change, existing_effect);
1118 string change_type = change[
"type"].asString();
1121 if (change_type ==
"insert") {
1124 string effect_type = change[
"value"][
"type"].asString();
1130 if (effect_type ==
"Blur")
1133 else if (effect_type ==
"Brightness")
1136 else if (effect_type ==
"ChromaKey")
1139 else if (effect_type ==
"Deinterlace")
1142 else if (effect_type ==
"Mask")
1145 else if (effect_type ==
"Negate")
1148 else if (effect_type ==
"Saturation")
1157 }
else if (change_type ==
"update") {
1160 if (existing_effect)
1163 }
else if (change_type ==
"delete") {
1166 if (existing_effect)
1167 RemoveEffect(existing_effect);
1173 void Timeline::apply_json_to_timeline(Json::Value change)
throw(
InvalidJSONKey) {
1176 string change_type = change[
"type"].asString();
1177 string root_key = change[
"key"][(uint)0].asString();
1178 string sub_key =
"";
1179 if (change[
"key"].size() >= 2)
1180 sub_key = change[
"key"][(uint)1].asString();
1183 if (change_type ==
"insert" || change_type ==
"update") {
1187 if (root_key ==
"color")
1189 color.SetJsonValue(change[
"value"]);
1190 else if (root_key ==
"viewport_scale")
1192 viewport_scale.SetJsonValue(change[
"value"]);
1193 else if (root_key ==
"viewport_x")
1195 viewport_x.SetJsonValue(change[
"value"]);
1196 else if (root_key ==
"viewport_y")
1198 viewport_y.SetJsonValue(change[
"value"]);
1199 else if (root_key ==
"duration") { }
1201 else if (root_key ==
"width")
1203 info.width = change[
"value"].asInt();
1204 else if (root_key ==
"height")
1206 info.height = change[
"value"].asInt();
1207 else if (root_key ==
"fps" && sub_key ==
"" && change[
"value"].isObject()) {
1209 if (!change[
"value"][
"num"].isNull())
1210 info.fps.num = change[
"value"][
"num"].asInt();
1211 if (!change[
"value"][
"den"].isNull())
1212 info.fps.den = change[
"value"][
"den"].asInt();
1214 else if (root_key ==
"fps" && sub_key ==
"num")
1216 info.fps.num = change[
"value"].asInt();
1217 else if (root_key ==
"fps" && sub_key ==
"den")
1219 info.fps.den = change[
"value"].asInt();
1220 else if (root_key ==
"sample_rate")
1222 info.sample_rate = change[
"value"].asInt();
1223 else if (root_key ==
"channels")
1225 info.channels = change[
"value"].asInt();
1226 else if (root_key ==
"channel_layout")
1228 info.channel_layout = (
ChannelLayout) change[
"value"].asInt();
1233 throw InvalidJSONKey(
"JSON change key is invalid", change.toStyledString());
1236 }
else if (change[
"type"].asString() ==
"delete") {
1240 if (root_key ==
"color") {
1246 else if (root_key ==
"viewport_scale")
1248 else if (root_key ==
"viewport_x")
1250 else if (root_key ==
"viewport_y")
1254 throw InvalidJSONKey(
"JSON change key is invalid", change.toStyledString());
void SetJsonValue(Json::Value root)
Load Json::JsonValue into this object.
tr1::shared_ptr< Frame > GetFrame(long int requested_frame)
void Close()
Close the internal reader.
string Json()
Get and Set JSON methods.
Json::Value JsonValue()
Generate Json::JsonValue for this object.
Keyframe scale_y
Curve representing the vertical scaling in percent (0 to 100)
This abstract class is the base class, used by all effects in libopenshot.
Align clip to the right of its parent (middle aligned)
Keyframe green
Curve representing the green value (0 - 255)
Keyframe viewport_scale
Curve representing the scale of the viewport (0 to 100)
This class adjusts the blur of an image, and can be animated with openshot::Keyframe curves over time...
float End()
Override End() method.
tr1::shared_ptr< Frame > GetFrame(long int requested_frame)
Get an openshot::Frame object for a specific frame number of this timeline.
Align clip to the bottom right of its parent.
Json::Value JsonValue()
Generate Json::JsonValue for this object.
ChannelLayout channel_layout
The channel layout (mono, stereo, 5 point surround, etc...)
GravityType gravity
The gravity of a clip determines where it snaps to it's parent.
Keyframe alpha
Curve representing the alpha value (0 - 255)
int width
The width of the video (in pixesl)
Keyframe volume
Curve representing the volume (0 to 1)
This class represents a single frame of video (i.e. image & audio data)
float ToFloat()
Return this fraction as a float (i.e. 1/2 = 0.5)
This class uses the ImageMagick++ libraries, to remove (i.e. key out) a color (i.e. greenscreen)
Keyframe red
Curve representing the red value (0 - 255)
float duration
Length of time (in seconds)
Json::Value JsonValue()
Generate Json::JsonValue for this object.
Scale the clip until both height and width fill the canvas (cropping the overlap) ...
float End()
Get end position (in seconds) of clip (trim end of video)
Keyframe viewport_y
Curve representing the y coordinate for the viewport.
Fraction Reciprocal()
Return the reciprocal as a Fraction.
This abstract class is the base class, used by all readers in libopenshot.
int Layer()
Get layer of clip on timeline (lower number is covered by higher numbers)
#define OPEN_MP_NUM_PROCESSORS
Exception when a reader is closed, and a frame is requested.
bool has_video
Determines if this file has a video stream.
virtual tr1::shared_ptr< Frame > GetFrame(tr1::shared_ptr< Frame > frame, long int frame_number)=0
This method is required for all derived classes of EffectBase, and returns a modified openshot::Frame...
Color wave_color
Curve representing the color of the audio wave form.
Align clip to the top right of its parent.
virtual Json::Value JsonValue()=0
Generate Json::JsonValue for this object.
Align clip to the bottom left of its parent.
void SetJsonValue(Json::Value root)
Load Json::JsonValue into this object.
Exception for missing JSON Change key.
void SetMaxBytesFromInfo(long int number_of_frames, int width, int height, int sample_rate, int channels)
Set maximum bytes to a different amount based on a ReaderInfo struct.
Keyframe location_x
Curve representing the relative X position in percent based on the gravity (-100 to 100) ...
float GetValue(long int index)
Get the value at a specific index.
Keyframe location_y
Curve representing the relative Y position in percent based on the gravity (-100 to 100) ...
bool has_audio
Determines if this file has an audio stream.
void AppendDebugMethod(string method_name, string arg1_name, float arg1_value, string arg2_name, float arg2_value, string arg3_name, float arg3_value, string arg4_name, float arg4_value, string arg5_name, float arg5_value, string arg6_name, float arg6_value)
Append debug information as JSON.
This class represents a clip (used to arrange readers on the timeline)
void ChangeMapping(Fraction target_fps, PulldownType pulldown, int target_sample_rate, int target_channels, ChannelLayout target_channel_layout)
Change frame rate or audio mapping details.
Keyframe blue
Curve representing the red value (0 - 255)
bool Waveform()
Waveform property.
ScaleType scale
The scale determines how a clip should be resized to fit it's parent.
int height
The height of the video (in pixels)
Align clip to the bottom center of its parent.
Exception for files that can not be found or opened.
string Id()
Get basic properties.
This class uses the ImageMagick++ libraries, to negate image (i.e. negative)
float Position()
Get position on timeline (in seconds)
void ApplyMapperToClips()
Apply the timeline's framerate and samplerate to all clips.
void Reader(ReaderBase *new_reader)
Set the current reader.
list< EffectBase * > Effects()
Return the list of effects on the timeline.
This class represents a fraction.
ChannelLayout
This enumeration determines the audio channel layout (such as stereo, mono, 5 point surround...
Align clip to the left of its parent (middle aligned)
This class adjusts the saturation of color on a frame's image.
void AddClip(Clip *clip)
Add an openshot::Clip to the timeline.
virtual Json::Value JsonValue()=0
Generate Json::JsonValue for this object.
virtual void SetJsonValue(Json::Value root)=0
Load Json::JsonValue into this object.
void Close()
Close the timeline reader (and any resources it was consuming)
Keyframe rotation
Curve representing the rotation (0 to 360)
virtual void SetJsonValue(Json::Value root)=0
Load Json::JsonValue into this object.
Scale the clip until both height and width fill the canvas (distort to fit)
This class uses the ImageMagick++ libraries, to apply alpha (or transparency) masks to any frame...
vector< Point > Points
Vector of all Points.
ReaderInfo info
Information about the current media file.
Fraction fps
Frames per second, as a fraction (i.e. 24/1 = 24 fps)
Fraction video_timebase
The video timebase determines how long each frame stays on the screen.
This class adjusts the brightness and contrast of an image, and can be animated with openshot::Keyfra...
Exception for frames that are out of bounds.
This class creates a mapping between 2 different frame rates, applying a specific pull-down technique...
void Open()
Open the internal reader.
This class represents a color (used on the timeline and clips)
int GetInt(long int index)
Get the rounded INT value at a specific index.
Align clip to the center of its parent (middle aligned)
void Open()
Open the reader (and start consuming resources)
void ApplyJsonDiff(string value)
Apply a special formatted JSON object, which represents a change to the timeline (add, update, delete) This is primarily designed to keep the timeline (and its child objects... such as clips and effects) in sync with another application... such as OpenShot Video Editor (http://www.openshot.org).
void Clear()
Clear the cache of all frames.
Do not apply pull-down techniques, just repeat or skip entire frames.
void RemoveClip(Clip *clip)
Remove an openshot::Clip from the timeline.
void RemoveEffect(EffectBase *effect)
Remove an effect from the timeline.
This class uses the ImageMagick++ libraries, to de-interlace the image, which removes the EVEN or ODD...
Exception for invalid JSON.
Keyframe alpha
Curve representing the alpha (1 to 0)
Keyframe viewport_x
Curve representing the x coordinate for the viewport.
Keyframe scale_x
Curve representing the horizontal scaling in percent (0 to 100)
Color color
Background color of timeline canvas.
Timeline(int width, int height, Fraction fps, int sample_rate, int channels, ChannelLayout channel_layout)
Default Constructor for the timeline (which sets the canvas width and height and FPS) ...
Align clip to the top center of its parent.
void SetJson(string value)
Load JSON string into this object.
int channels
The number of audio channels used in the audio stream.
A Keyframe is a collection of Point instances, which is used to vary a number or property over time...
Scale the clip until either height or width fills the canvas (with no cropping)
long int video_length
The number of frames in the video stream.
void AddEffect(EffectBase *effect)
Add an effect to the timeline.
int GetSamplesPerFrame(Fraction fps, int sample_rate, int channels)
Calculate the # of samples per video frame (for the current frame number)
Json::Value JsonValue()
Generate Json::JsonValue for this object.
float Duration()
Get the length of this clip (in seconds)
float Start()
Get start position (in seconds) of clip (trim start of video)
int sample_rate
The number of audio samples per second (44100 is a common sample rate)
Exception when too many seek attempts happen.