Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
53 changes: 50 additions & 3 deletions src/Timeline.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -472,6 +472,19 @@ int64_t Timeline::GetMaxFrame() {
return std::round(max_time * fps);
}

// Compute the start time of the first timeline clip
double Timeline::GetMinTime() {
// Return cached min_time variable (threadsafe)
return min_time;
}

// Compute the first frame# based on the first clip position
int64_t Timeline::GetMinFrame() {
double fps = info.fps.ToDouble();
auto min_time = GetMinTime();
return std::round(min_time * fps);
}

// Apply a FrameMapper to a clip which matches the settings of this timeline
void Timeline::apply_mapper_to_clip(Clip* clip)
{
Expand Down Expand Up @@ -755,22 +768,52 @@ void Timeline::update_open_clips(Clip *clip, bool does_clip_intersect)
"open_clips.size()", open_clips.size());
}

// Calculate the max duration (in seconds) of the timeline, based on all the clips, and cache the value
// Calculate the max and min duration (in seconds) of the timeline, based on all the clips, and cache the value
void Timeline::calculate_max_duration() {
double last_clip = 0.0;
double last_effect = 0.0;
double first_clip = std::numeric_limits<double>::max();
double first_effect = std::numeric_limits<double>::max();

// Find the last and first clip
if (!clips.empty()) {
// Find the clip with the maximum end frame
const auto max_clip = std::max_element(
clips.begin(), clips.end(), CompareClipEndFrames());
clips.begin(), clips.end(), CompareClipEndFrames());
last_clip = (*max_clip)->Position() + (*max_clip)->Duration();

// Find the clip with the minimum start position (ignoring layer)
const auto min_clip = std::min_element(
clips.begin(), clips.end(), [](const openshot::Clip* lhs, const openshot::Clip* rhs) {
return lhs->Position() < rhs->Position();
});
first_clip = (*min_clip)->Position();
}

// Find the last and first effect
if (!effects.empty()) {
// Find the effect with the maximum end frame
const auto max_effect = std::max_element(
effects.begin(), effects.end(), CompareEffectEndFrames());
effects.begin(), effects.end(), CompareEffectEndFrames());
last_effect = (*max_effect)->Position() + (*max_effect)->Duration();

// Find the effect with the minimum start position
const auto min_effect = std::min_element(
effects.begin(), effects.end(), [](const openshot::EffectBase* lhs, const openshot::EffectBase* rhs) {
return lhs->Position() < rhs->Position();
});
first_effect = (*min_effect)->Position();
}

// Calculate the max and min time
max_time = std::max(last_clip, last_effect);
min_time = std::min(first_clip, first_effect);

// If no clips or effects exist, set min_time to 0
if (clips.empty() && effects.empty()) {
min_time = 0.0;
max_time = 0.0;
}
}

// Sort clips by position on the timeline
Expand Down Expand Up @@ -1260,6 +1303,10 @@ void Timeline::SetJsonValue(const Json::Value root) {
preview_width = info.width;
preview_height = info.height;

// Resort (and recalculate min/max duration)
sort_clips();
sort_effects();

// Re-open if needed
if (was_open)
Open();
Expand Down
8 changes: 7 additions & 1 deletion src/Timeline.h
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,8 @@ namespace openshot {
bool managed_cache; ///< Does this timeline instance manage the cache object
std::string path; ///< Optional path of loaded UTF-8 OpenShot JSON project file
int max_concurrent_frames; ///< Max concurrent frames to process at one time
double max_time; ///> The max duration (in seconds) of the timeline, based on all the clips
double max_time; ///> The max duration (in seconds) of the timeline, based on the furthest clip (right edge)
double min_time; ///> The min duration (in seconds) of the timeline, based on the position of the first clip (left edge)

std::map<std::string, std::shared_ptr<openshot::TrackedObjectBase>> tracked_objects; ///< map of TrackedObjectBBoxes and their IDs

Expand Down Expand Up @@ -286,6 +287,11 @@ namespace openshot {
/// Look up the end frame number of the latest element on the timeline
int64_t GetMaxFrame();

/// Look up the position/start time of the first timeline element
double GetMinTime();
/// Look up the start frame number of the first element on the timeline
int64_t GetMinFrame();

/// Close the timeline reader (and any resources it was consuming)
void Close() override;

Expand Down
61 changes: 61 additions & 0 deletions tests/Timeline.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -661,6 +661,67 @@ TEST_CASE( "GetMaxFrame and GetMaxTime", "[libopenshot][timeline]" )
CHECK(t.GetMaxTime() == Approx(20.0).margin(0.001));
}

TEST_CASE( "GetMinFrame and GetMinTime", "[libopenshot][timeline]" )
{
// Create a timeline
Timeline t(640, 480, Fraction(30, 1), 44100, 2, LAYOUT_STEREO);

std::stringstream path1;
path1 << TEST_MEDIA_PATH << "interlaced.png";
Clip clip1(path1.str());
clip1.Id("C1");
clip1.Layer(1);
clip1.Position(50); // Start at 50 seconds
clip1.End(45); // Ends at 95 seconds
t.AddClip(&clip1);

CHECK(t.GetMinTime() == Approx(50.0).margin(0.001));
CHECK(t.GetMinFrame() == 50 * 30);

Clip clip2(path1.str());
clip2.Id("C2");
clip2.Layer(2);
clip2.Position(0); // Start at 0 seconds
clip2.End(55); // Ends at 55 seconds
t.AddClip(&clip2);

CHECK(t.GetMinTime() == Approx(0.0).margin(0.001));
CHECK(t.GetMinFrame() == 0);

clip1.Position(80); // Move clip1 to start at 80 seconds
clip2.Position(100); // Move clip2 to start at 100 seconds
CHECK(t.GetMinTime() == Approx(80.0).margin(0.001));
CHECK(t.GetMinFrame() == 80 * 30);

clip2.Position(20); // Adjust clip2 to start at 20 seconds
CHECK(t.GetMinTime() == Approx(20.0).margin(0.001));
CHECK(t.GetMinFrame() == 20 * 30);

clip2.End(35); // Adjust clip2 to end at 35 seconds
CHECK(t.GetMinTime() == Approx(20.0).margin(0.001));
CHECK(t.GetMinFrame() == 20 * 30);

t.RemoveClip(&clip1);
CHECK(t.GetMinTime() == Approx(20.0).margin(0.001));
CHECK(t.GetMinFrame() == 20 * 30);

// Update Clip's basic properties with JSON Diff
std::stringstream json_change1;
json_change1 << "[{\"type\":\"update\",\"key\":[\"clips\",{\"id\":\"C2\"}],\"value\":{\"id\":\"C2\",\"layer\":4000000,\"position\":5.0,\"start\":0,\"end\":10},\"partial\":false}]";
t.ApplyJsonDiff(json_change1.str());

CHECK(t.GetMinTime() == Approx(5.0).margin(0.001));
CHECK(t.GetMinFrame() == 5 * 30);

// Insert NEW Clip with JSON Diff
std::stringstream json_change2;
json_change2 << "[{\"type\":\"insert\",\"key\":[\"clips\"],\"value\":{\"id\":\"C3\",\"layer\":4000000,\"position\":10.0,\"start\":0,\"end\":10,\"reader\":{\"acodec\":\"\",\"audio_bit_rate\":0,\"audio_stream_index\":-1,\"audio_timebase\":{\"den\":1,\"num\":1},\"channel_layout\":4,\"channels\":0,\"display_ratio\":{\"den\":1,\"num\":1},\"duration\":3600.0,\"file_size\":\"160000\",\"fps\":{\"den\":1,\"num\":30},\"has_audio\":false,\"has_single_image\":true,\"has_video\":true,\"height\":200,\"interlaced_frame\":false,\"metadata\":{},\"path\":\"" << path1.str() << "\",\"pixel_format\":-1,\"pixel_ratio\":{\"den\":1,\"num\":1},\"sample_rate\":0,\"top_field_first\":true,\"type\":\"QtImageReader\",\"vcodec\":\"\",\"video_bit_rate\":0,\"video_length\":\"108000\",\"video_stream_index\":-1,\"video_timebase\":{\"den\":30,\"num\":1},\"width\":200}},\"partial\":false}]";
t.ApplyJsonDiff(json_change2.str());

CHECK(t.GetMinTime() == Approx(5.0).margin(0.001));
CHECK(t.GetMinFrame() == 5 * 30);
}

TEST_CASE( "Multi-threaded Timeline GetFrame", "[libopenshot][timeline]" )
{
Timeline *t = new Timeline(1280, 720, Fraction(24, 1), 48000, 2, LAYOUT_STEREO);
Expand Down
Loading