28 #include "../include/Clip.h"
30 using namespace openshot;
33 void Clip::init_settings()
63 wave_color =
Color((
unsigned char)0, (
unsigned char)123, (
unsigned char)255, (
unsigned char)255);
88 manage_reader =
false;
122 string ext = get_file_extension(path);
123 transform(ext.begin(), ext.end(), ext.begin(), ::tolower);
126 if (ext==
"avi" || ext==
"mov" || ext==
"mkv" || ext==
"mpg" || ext==
"mpeg" || ext==
"mp3" || ext==
"mp4" || ext==
"mts" ||
127 ext==
"ogg" || ext==
"wav" || ext==
"wmv" || ext==
"webm" || ext==
"vob")
158 manage_reader =
true;
166 if (manage_reader && reader) {
192 throw ReaderClosed(
"No Reader has been initialized for this Clip. Call Reader(*reader) before calling this method.",
"");
209 throw ReaderClosed(
"No Reader has been initialized for this Clip. Call Reader(*reader) before calling this method.",
"");
221 throw ReaderClosed(
"No Reader has been initialized for this Clip. Call Reader(*reader) before calling this method.",
"");
237 throw ReaderClosed(
"No Reader has been initialized for this Clip. Call Reader(*reader) before calling this method.",
"");
252 requested_frame = adjust_frame_number_minimum(requested_frame);
255 long int new_frame_number = requested_frame;
256 if (time.Values.size() > 1)
257 new_frame_number = time.GetLong(requested_frame);
261 tr1::shared_ptr<Frame> original_frame = GetOrCreateFrame(new_frame_number);
264 tr1::shared_ptr<Frame> frame(
new Frame(new_frame_number, 1, 1,
"#000000", original_frame->GetAudioSamplesCount(), original_frame->GetAudioChannelsCount()));
265 frame->SampleRate(original_frame->SampleRate());
266 frame->ChannelsLayout(original_frame->ChannelsLayout());
269 frame->AddImage(tr1::shared_ptr<QImage>(
new QImage(*original_frame->GetImage())));
272 if (reader->info.has_audio)
273 for (
int channel = 0; channel < original_frame->GetAudioChannelsCount(); channel++)
274 frame->AddAudio(
true, channel, 0, original_frame->GetAudioSamples(channel), original_frame->GetAudioSamplesCount(), 1.0);
277 tr1::shared_ptr<Frame> new_frame = get_time_mapped_frame(frame, requested_frame);
280 apply_effects(new_frame);
287 throw ReaderClosed(
"No Reader has been initialized for this Clip. Call Reader(*reader) before calling this method.",
"");
291 string Clip::get_file_extension(
string path)
294 return path.substr(path.find_last_of(
".") + 1);
298 void Clip::reverse_buffer(juce::AudioSampleBuffer* buffer)
300 int number_of_samples = buffer->getNumSamples();
301 int channels = buffer->getNumChannels();
304 AudioSampleBuffer *reversed =
new juce::AudioSampleBuffer(channels, number_of_samples);
307 for (
int channel = 0; channel < channels; channel++)
310 for (
int s = number_of_samples - 1; s >= 0; s--, n++)
311 reversed->getWritePointer(channel)[n] = buffer->getWritePointer(channel)[s];
317 for (
int channel = 0; channel < channels; channel++)
319 buffer->addFrom(channel, 0, reversed->getReadPointer(channel), number_of_samples, 1.0f);
326 tr1::shared_ptr<Frame> Clip::get_time_mapped_frame(tr1::shared_ptr<Frame> frame,
long int frame_number)
throw(
ReaderClosed)
331 throw ReaderClosed(
"No Reader has been initialized for this Clip. Call Reader(*reader) before calling this method.",
"");
334 if (time.Values.size() > 1)
336 tr1::shared_ptr<Frame> new_frame;
339 juce::AudioSampleBuffer *samples = NULL;
344 int new_frame_number = round(time.GetValue(frame_number));
347 int samples_in_frame =
Frame::GetSamplesPerFrame(new_frame_number, reader->info.fps, reader->info.sample_rate, frame->GetAudioChannelsCount());
348 new_frame = tr1::shared_ptr<Frame>(
new Frame(new_frame_number, 1, 1,
"#000000", samples_in_frame, frame->GetAudioChannelsCount()));
351 new_frame->AddImage(GetOrCreateFrame(new_frame_number)->GetImage());
355 int delta = int(round(time.GetDelta(frame_number)));
358 int sample_rate = reader->info.sample_rate;
359 int channels = reader->info.channels;
360 int number_of_samples = GetOrCreateFrame(new_frame_number)->GetAudioSamplesCount();
363 if (reader->info.has_audio) {
365 if (time.GetRepeatFraction(frame_number).den > 1) {
368 AudioSampleBuffer *resampled_buffer = NULL;
369 int resampled_buffer_size = 0;
372 samples =
new juce::AudioSampleBuffer(channels, number_of_samples);
376 for (
int channel = 0; channel < channels; channel++)
378 samples->addFrom(channel, 0, GetOrCreateFrame(new_frame_number)->GetAudioSamples(channel),
379 number_of_samples, 1.0f);
382 if (!time.IsIncreasing(frame_number))
383 reverse_buffer(samples);
386 resampler->SetBuffer(samples, 1.0 / time.GetRepeatFraction(frame_number).den);
389 resampled_buffer = resampler->GetResampledBuffer();
392 resampled_buffer_size = resampled_buffer->getNumSamples();
395 int start = (number_of_samples * (time.GetRepeatFraction(frame_number).num - 1));
398 for (
int channel = 0; channel < channels; channel++)
400 new_frame->AddAudio(
true, channel, 0, resampled_buffer->getReadPointer(channel, start),
401 number_of_samples, 1.0f);
404 resampled_buffer = NULL;
407 else if (abs(delta) > 1 && abs(delta) < 100) {
411 int total_delta_samples = 0;
412 for (
int delta_frame = new_frame_number - (delta - 1);
413 delta_frame <= new_frame_number; delta_frame++)
415 reader->info.sample_rate,
416 reader->info.channels);
419 samples =
new juce::AudioSampleBuffer(channels, total_delta_samples);
423 for (
int delta_frame = new_frame_number - (delta - 1);
424 delta_frame <= new_frame_number; delta_frame++) {
426 int number_of_delta_samples = GetOrCreateFrame(delta_frame)->GetAudioSamplesCount();
427 AudioSampleBuffer *delta_samples =
new juce::AudioSampleBuffer(channels,
428 number_of_delta_samples);
429 delta_samples->clear();
431 for (
int channel = 0; channel < channels; channel++)
432 delta_samples->addFrom(channel, 0, GetOrCreateFrame(delta_frame)->GetAudioSamples(channel),
433 number_of_delta_samples, 1.0f);
436 if (!time.IsIncreasing(frame_number))
437 reverse_buffer(delta_samples);
440 for (
int channel = 0; channel < channels; channel++)
442 samples->addFrom(channel, start, delta_samples->getReadPointer(channel),
443 number_of_delta_samples, 1.0f);
446 delete delta_samples;
447 delta_samples = NULL;
450 start += number_of_delta_samples;
455 int total_delta_samples = 0;
456 for (
int delta_frame = new_frame_number - (delta + 1);
457 delta_frame >= new_frame_number; delta_frame--)
459 reader->info.sample_rate,
460 reader->info.channels);
463 samples =
new juce::AudioSampleBuffer(channels, total_delta_samples);
467 for (
int delta_frame = new_frame_number - (delta + 1);
468 delta_frame >= new_frame_number; delta_frame--) {
470 int number_of_delta_samples = GetOrCreateFrame(delta_frame)->GetAudioSamplesCount();
471 AudioSampleBuffer *delta_samples =
new juce::AudioSampleBuffer(channels,
472 number_of_delta_samples);
473 delta_samples->clear();
475 for (
int channel = 0; channel < channels; channel++)
476 delta_samples->addFrom(channel, 0, GetOrCreateFrame(delta_frame)->GetAudioSamples(channel),
477 number_of_delta_samples, 1.0f);
480 if (!time.IsIncreasing(frame_number))
481 reverse_buffer(delta_samples);
484 for (
int channel = 0; channel < channels; channel++)
486 samples->addFrom(channel, start, delta_samples->getReadPointer(channel),
487 number_of_delta_samples, 1.0f);
490 delete delta_samples;
491 delta_samples = NULL;
494 start += number_of_delta_samples;
499 resampler->SetBuffer(samples,
float(start) /
float(number_of_samples));
502 AudioSampleBuffer *buffer = resampler->GetResampledBuffer();
503 int resampled_buffer_size = buffer->getNumSamples();
506 for (
int channel = 0; channel < channels; channel++)
508 new_frame->AddAudio(
true, channel, 0, buffer->getReadPointer(channel), number_of_samples, 1.0f);
515 samples =
new juce::AudioSampleBuffer(channels, number_of_samples);
519 for (
int channel = 0; channel < channels; channel++)
521 samples->addFrom(channel, 0, frame->GetAudioSamples(channel), number_of_samples, 1.0f);
524 if (!time.IsIncreasing(frame_number))
525 reverse_buffer(samples);
528 for (
int channel = 0; channel < channels; channel++)
529 new_frame->AddAudio(
true, channel, 0, samples->getReadPointer(channel), number_of_samples, 1.0f);
547 long int Clip::adjust_frame_number_minimum(
long int frame_number)
550 if (frame_number < 1)
558 tr1::shared_ptr<Frame> Clip::GetOrCreateFrame(
long int number)
560 tr1::shared_ptr<Frame> new_frame;
567 new_frame = reader->
GetFrame(number);
598 Point requested_point(requested_frame, requested_frame);
602 root[
"id"] =
add_property_json(
"ID", 0.0,
"string",
Id(),
false, 0, -1, -1,
CONSTANT, -1,
true);
603 root[
"position"] =
add_property_json(
"Position",
Position(),
"float",
"",
false, 0, 0, 1000 * 60 * 30,
CONSTANT, -1,
false);
604 root[
"layer"] =
add_property_json(
"Layer",
Layer(),
"int",
"",
false, 0, 0, 1000,
CONSTANT, -1,
false);
605 root[
"start"] =
add_property_json(
"Start",
Start(),
"float",
"",
false, 0, 0, 1000 * 60 * 30,
CONSTANT, -1,
false);
606 root[
"end"] =
add_property_json(
"End",
End(),
"float",
"",
false, 0, 0, 1000 * 60 * 30,
CONSTANT, -1,
false);
607 root[
"duration"] =
add_property_json(
"Duration",
Duration(),
"float",
"",
false, 0, 0, 1000 * 60 * 30,
CONSTANT, -1,
true);
608 root[
"gravity"] =
add_property_json(
"Gravity",
gravity,
"int",
"",
false, 0, -1, -1,
CONSTANT, -1,
false);
609 root[
"scale"] =
add_property_json(
"Scale",
scale,
"int",
"",
false, 0, -1, -1,
CONSTANT, -1,
false);
610 root[
"anchor"] =
add_property_json(
"Anchor",
anchor,
"int",
"",
false, 0, -1, -1,
CONSTANT, -1,
false);
611 root[
"waveform"] =
add_property_json(
"Waveform", waveform,
"bool",
"",
false, 0, -1, -1,
CONSTANT, -1,
false);
639 root[
"location_x"] =
add_property_json(
"Location X",
location_x.
GetValue(requested_frame),
"float",
"",
location_x.
Contains(requested_point),
location_x.
GetCount(), -10000, 10000,
location_x.
GetClosestPoint(requested_point).
interpolation,
location_x.
GetClosestPoint(requested_point).
co.
X,
false);
640 root[
"location_y"] =
add_property_json(
"Location Y",
location_y.
GetValue(requested_frame),
"float",
"",
location_y.
Contains(requested_point),
location_y.
GetCount(), -10000, 10000,
location_y.
GetClosestPoint(requested_point).
interpolation,
location_y.
GetClosestPoint(requested_point).
co.
X,
false);
641 root[
"scale_x"] =
add_property_json(
"Scale X",
scale_x.
GetValue(requested_frame),
"float",
"",
scale_x.
Contains(requested_point),
scale_x.
GetCount(), 0.0, 100.0,
scale_x.
GetClosestPoint(requested_point).
interpolation,
scale_x.
GetClosestPoint(requested_point).
co.
X,
false);
642 root[
"scale_y"] =
add_property_json(
"Scale Y",
scale_y.
GetValue(requested_frame),
"float",
"",
scale_y.
Contains(requested_point),
scale_y.
GetCount(), 0.0, 100.0,
scale_y.
GetClosestPoint(requested_point).
interpolation,
scale_y.
GetClosestPoint(requested_point).
co.
X,
false);
643 root[
"alpha"] =
add_property_json(
"Alpha",
alpha.
GetValue(requested_frame),
"float",
"",
alpha.
Contains(requested_point),
alpha.
GetCount(), 0.0, 1.0,
alpha.
GetClosestPoint(requested_point).
interpolation,
alpha.
GetClosestPoint(requested_point).
co.
X,
false);
644 root[
"rotation"] =
add_property_json(
"Rotation",
rotation.
GetValue(requested_frame),
"float",
"",
rotation.
Contains(requested_point),
rotation.
GetCount(), -10000, 10000,
rotation.
GetClosestPoint(requested_point).
interpolation,
rotation.
GetClosestPoint(requested_point).
co.
X,
false);
645 root[
"volume"] =
add_property_json(
"Volume",
volume.
GetValue(requested_frame),
"float",
"",
volume.
Contains(requested_point),
volume.
GetCount(), 0.0, 1.0,
volume.
GetClosestPoint(requested_point).
interpolation,
volume.
GetClosestPoint(requested_point).
co.
X,
false);
646 root[
"time"] =
add_property_json(
"Time",
time.
GetValue(requested_frame),
"float",
"",
time.
Contains(requested_point),
time.
GetCount(), 0.0, 1000 * 60 * 30,
time.
GetClosestPoint(requested_point).
interpolation,
time.
GetClosestPoint(requested_point).
co.
X,
false);
648 root[
"wave_color"] =
add_property_json(
"Wave Color", 0.0,
"color",
"",
wave_color.
red.
Contains(requested_point),
wave_color.
red.
GetCount(), -10000, 10000,
wave_color.
red.
GetClosestPoint(requested_point).
interpolation,
wave_color.
red.
GetClosestPoint(requested_point).
co.
X,
false);
649 root[
"wave_color"][
"red"] =
add_property_json(
"Red",
wave_color.
red.
GetValue(requested_frame),
"float",
"",
wave_color.
red.
Contains(requested_point),
wave_color.
red.
GetCount(), -10000, 10000,
wave_color.
red.
GetClosestPoint(requested_point).
interpolation,
wave_color.
red.
GetClosestPoint(requested_point).
co.
X,
false);
650 root[
"wave_color"][
"blue"] =
add_property_json(
"Blue",
wave_color.
blue.
GetValue(requested_frame),
"float",
"",
wave_color.
blue.
Contains(requested_point),
wave_color.
blue.
GetCount(), -10000, 10000,
wave_color.
blue.
GetClosestPoint(requested_point).
interpolation,
wave_color.
blue.
GetClosestPoint(requested_point).
co.
X,
false);
651 root[
"wave_color"][
"green"] =
add_property_json(
"Green",
wave_color.
green.
GetValue(requested_frame),
"float",
"",
wave_color.
green.
Contains(requested_point),
wave_color.
green.
GetCount(), -10000, 10000,
wave_color.
green.
GetClosestPoint(requested_point).
interpolation,
wave_color.
green.
GetClosestPoint(requested_point).
co.
X,
false);
655 return root.toStyledString();
664 root[
"scale"] =
scale;
666 root[
"waveform"] = waveform;
692 root[
"effects"] = Json::Value(Json::arrayValue);
695 list<EffectBase*>::iterator effect_itr;
696 for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr)
700 root[
"effects"].append(existing_effect->
JsonValue());
716 bool success = reader.parse( value, root );
719 throw InvalidJSON(
"JSON could not be parsed (or is invalid)",
"");
729 throw InvalidJSON(
"JSON is invalid (missing keys or invalid data types)",
"");
740 if (!root[
"gravity"].isNull())
742 if (!root[
"scale"].isNull())
744 if (!root[
"anchor"].isNull())
746 if (!root[
"waveform"].isNull())
747 waveform = root[
"waveform"].asBool();
748 if (!root[
"scale_x"].isNull())
750 if (!root[
"scale_y"].isNull())
752 if (!root[
"location_x"].isNull())
754 if (!root[
"location_y"].isNull())
756 if (!root[
"alpha"].isNull())
758 if (!root[
"rotation"].isNull())
760 if (!root[
"time"].isNull())
762 if (!root[
"volume"].isNull())
764 if (!root[
"wave_color"].isNull())
766 if (!root[
"crop_width"].isNull())
768 if (!root[
"crop_height"].isNull())
770 if (!root[
"crop_x"].isNull())
772 if (!root[
"crop_y"].isNull())
774 if (!root[
"shear_x"].isNull())
776 if (!root[
"shear_y"].isNull())
778 if (!root[
"perspective_c1_x"].isNull())
780 if (!root[
"perspective_c1_y"].isNull())
782 if (!root[
"perspective_c2_x"].isNull())
784 if (!root[
"perspective_c2_y"].isNull())
786 if (!root[
"perspective_c3_x"].isNull())
788 if (!root[
"perspective_c3_y"].isNull())
790 if (!root[
"perspective_c4_x"].isNull())
792 if (!root[
"perspective_c4_y"].isNull())
794 if (!root[
"effects"].isNull()) {
800 for (
int x = 0; x < root[
"effects"].size(); x++) {
802 Json::Value existing_effect = root[
"effects"][x];
807 if (!existing_effect[
"type"].isNull())
809 if (existing_effect[
"type"].asString() ==
"Blur")
812 else if (existing_effect[
"type"].asString() ==
"Brightness")
815 else if (existing_effect[
"type"].asString() ==
"ChromaKey")
818 else if (existing_effect[
"type"].asString() ==
"Deinterlace")
821 else if (existing_effect[
"type"].asString() ==
"Mask")
824 else if (existing_effect[
"type"].asString() ==
"Negate")
827 else if (existing_effect[
"type"].asString() ==
"Saturation")
837 if (!root[
"reader"].isNull())
839 if (!root[
"reader"][
"type"].isNull())
842 bool already_open =
false;
846 already_open = reader->
IsOpen();
855 string type = root[
"reader"][
"type"].asString();
857 if (type ==
"FFmpegReader") {
860 reader =
new FFmpegReader(root[
"reader"][
"path"].asString());
863 }
else if (type ==
"QtImageReader") {
866 reader =
new QtImageReader(root[
"reader"][
"path"].asString());
869 #ifdef USE_IMAGEMAGICK
870 }
else if (type ==
"ImageReader") {
873 reader =
new ImageReader(root[
"reader"][
"path"].asString());
876 }
else if (type ==
"TextReader") {
883 }
else if (type ==
"ChunkReader") {
886 reader =
new ChunkReader(root[
"reader"][
"path"].asString(), (
ChunkVersion) root[
"reader"][
"chunk_version"].asInt());
889 }
else if (type ==
"DummyReader") {
898 manage_reader =
true;
909 void Clip::sort_effects()
919 effects.push_back(effect);
928 effects.remove(effect);
932 tr1::shared_ptr<Frame> Clip::apply_effects(tr1::shared_ptr<Frame> frame)
935 list<EffectBase*>::iterator effect_itr;
936 for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr)
942 frame = effect->
GetFrame(frame, frame->number);
This class reads a special chunk-formatted file, which can be easily shared in a distributed environm...
Keyframe perspective_c3_x
Curves representing X for coordinate 3.
void Close()
Close the internal reader.
Json::Value JsonValue()
Generate Json::JsonValue for this object.
Keyframe scale_y
Curve representing the vertical scaling in percent (0 to 100)
Keyframe perspective_c4_x
Curves representing X for coordinate 4.
This abstract class is the base class, used by all effects in libopenshot.
Align clip to the right of its parent (middle aligned)
Keyframe perspective_c1_x
Curves representing X for coordinate 1.
Keyframe green
Curve representing the green value (0 - 255)
Keyframe perspective_c2_x
Curves representing X for coordinate 2.
Keyframe crop_x
Curve representing X offset in percent (-1.0=-100%, 0.0=0%, 1.0=100%)
This class adjusts the blur of an image, and can be animated with openshot::Keyframe curves over time...
string previous_properties
This string contains the previous JSON properties.
string PropertiesJSON(long int requested_frame)
float End()
Override End() method.
Keyframe perspective_c3_y
Curves representing Y for coordinate 3.
tr1::shared_ptr< Frame > GetFrame(long int requested_frame)
Get an openshot::Frame object for a specific frame number of this timeline.
Align clip to the bottom right of its parent.
Json::Value JsonValue()
Generate Json::JsonValue for this object.
ChannelLayout channel_layout
The channel layout (mono, stereo, 5 point surround, etc...)
GravityType gravity
The gravity of a clip determines where it snaps to it's parent.
int width
The width of the video (in pixesl)
Keyframe volume
Curve representing the volume (0 to 1)
This class represents a single frame of video (i.e. image & audio data)
This class is used as a simple, dummy reader, which always returns a blank frame. ...
float ToFloat()
Return this fraction as a float (i.e. 1/2 = 0.5)
This class uses the ImageMagick++ libraries, to remove (i.e. key out) a color (i.e. greenscreen)
InterpolationType interpolation
This is the interpolation mode.
Keyframe red
Curve representing the red value (0 - 255)
float duration
Length of time (in seconds)
bool Contains(Point p)
Does this keyframe contain a specific point.
Scale the clip until both height and width fill the canvas (cropping the overlap) ...
Keyframe time
Curve representing the frames over time to play (used for speed and direction of video) ...
A Point is the basic building block of a key-frame curve.
ScaleType
This enumeration determines how clips are scaled to fit their parent container.
void AddEffect(EffectBase *effect)
Add an effect to the clip.
virtual void Close()=0
Close the reader (and any resources it was consuming)
This abstract class is the base class, used by all readers in libopenshot.
int Layer()
Get layer of clip on timeline (lower number is covered by higher numbers)
Exception when a reader is closed, and a frame is requested.
virtual tr1::shared_ptr< Frame > GetFrame(tr1::shared_ptr< Frame > frame, long int frame_number)=0
This method is required for all derived classes of EffectBase, and returns a modified openshot::Frame...
Color wave_color
Curve representing the color of the audio wave form.
Align clip to the top right of its parent.
virtual Json::Value JsonValue()=0
Generate Json::JsonValue for this object.
Align clip to the bottom left of its parent.
void SetJsonValue(Json::Value root)
Load Json::JsonValue into this object.
void SetJsonValue(Json::Value root)
Load Json::JsonValue into this object.
Keyframe crop_width
Curve representing width in percent (0.0=0%, 1.0=100%)
This class uses the ImageMagick++ libraries, to open image files, and return openshot::Frame objects ...
Keyframe location_x
Curve representing the relative X position in percent based on the gravity (-100 to 100) ...
float GetValue(long int index)
Get the value at a specific index.
Keyframe location_y
Curve representing the relative Y position in percent based on the gravity (-100 to 100) ...
This class uses the FFmpeg libraries, to open video files and audio files, and return openshot::Frame...
virtual void SetJsonValue(Json::Value root)=0
Load Json::JsonValue into this object.
Keyframe perspective_c1_y
Curves representing Y for coordinate 1.
Keyframe blue
Curve representing the red value (0 - 255)
Keyframe crop_y
Curve representing Y offset in percent (-1.0=-100%, 0.0=0%, 1.0=100%)
Keyframe shear_x
Curve representing X shear angle in degrees (-45.0=left, 45.0=right)
ScaleType scale
The scale determines how a clip should be resized to fit it's parent.
int height
The height of the video (in pixels)
Align clip to the bottom center of its parent.
Align clip to the top left of its parent.
Json::Value add_property_choice_json(string name, int value, int selected_value)
Generate JSON choice for a property (dropdown properties)
Json::Value add_property_json(string name, float value, string type, string memo, bool contains_point, int number_of_points, float min_value, float max_value, InterpolationType intepolation, int closest_point_x, bool readonly)
Generate JSON for a property.
Exception for files that can not be found or opened.
string Id()
Get basic properties.
This class uses the ImageMagick++ libraries, to negate image (i.e. negative)
float Position()
Get position on timeline (in seconds)
void SetJson(string value)
Load JSON string into this object.
Align clip to the left of its parent (middle aligned)
This class adjusts the saturation of color on a frame's image.
virtual Json::Value JsonValue()=0
Generate Json::JsonValue for this object.
virtual void SetJsonValue(Json::Value root)=0
Load Json::JsonValue into this object.
float X
The X value of the coordinate (usually representing the frame #)
ChunkVersion
This enumeration allows the user to choose which version of the chunk they would like (low...
Keyframe rotation
Curve representing the rotation (0 to 360)
virtual void SetJsonValue(Json::Value root)=0
Load Json::JsonValue into this object.
Scale the clip until both height and width fill the canvas (distort to fit)
This class uses the ImageMagick++ libraries, to apply alpha (or transparency) masks to any frame...
vector< Point > Points
Vector of all Points.
Point GetClosestPoint(Point p)
Get current point (or closest point) from the X coordinate (i.e. the frame number) ...
ReaderInfo info
Information about the current media file.
Keyframe shear_y
Curve representing Y shear angle in degrees (-45.0=down, 45.0=up)
Clip()
Default Constructor.
Anchor the clip to the viewport (which can be moved / animated around the canvas) ...
Fraction fps
Frames per second, as a fraction (i.e. 24/1 = 24 fps)
AnchorType
This enumeration determines what parent a clip should be aligned to.
float end
The position in seconds to end playing (used to trim the ending of a clip)
This class adjusts the brightness and contrast of an image, and can be animated with openshot::Keyfra...
Exception for frames that are out of bounds.
void Open()
Open the internal reader.
This class represents a color (used on the timeline and clips)
Align clip to the center of its parent (middle aligned)
GravityType crop_gravity
Cropping needs to have a gravity to determine what side we are cropping.
void RemoveEffect(EffectBase *effect)
Remove an effect from the clip.
long int GetCount()
Get the number of points (i.e. # of points)
virtual tr1::shared_ptr< Frame > GetFrame(long int number)=0
Coordinate co
This is the primary coordinate.
AnchorType anchor
The anchor determines what parent a clip should snap to.
This class uses the ImageMagick++ libraries, to de-interlace the image, which removes the EVEN or ODD...
Exception for invalid JSON.
Keyframe alpha
Curve representing the alpha (1 to 0)
void SetJsonValue(Json::Value root)
Load Json::JsonValue into this object.
Keyframe scale_x
Curve representing the horizontal scaling in percent (0 to 100)
Keyframe perspective_c2_y
Curves representing Y for coordinate 2.
virtual Json::Value JsonValue()=0
Generate Json::JsonValue for this object.
This class uses the ImageMagick++ libraries, to create frames with "Text", and return openshot::Frame...
This class uses the Qt library, to open image files, and return openshot::Frame objects containing th...
Align clip to the top center of its parent.
int channels
The number of audio channels used in the audio stream.
A Keyframe is a collection of Point instances, which is used to vary a number or property over time...
Scale the clip until either height or width fills the canvas (with no cropping)
Keyframe perspective_c4_y
Curves representing Y for coordinate 4.
int GetSamplesPerFrame(Fraction fps, int sample_rate, int channels)
Calculate the # of samples per video frame (for the current frame number)
Json::Value JsonValue()
Generate Json::JsonValue for this object.
float Duration()
Get the length of this clip (in seconds)
GravityType
This enumeration determines how clips are aligned to their parent container.
Anchor the clip to the canvas.
Constant curves jump from their previous position to a new one (with no interpolation).
string Json()
Get and Set JSON methods.
float Start()
Get start position (in seconds) of clip (trim start of video)
virtual void Open()=0
Open the reader (and start consuming resources, such as images or video files)
int sample_rate
The number of audio samples per second (44100 is a common sample rate)
Exception when too many seek attempts happen.
ReaderBase * Reader()
Get the current reader.
virtual bool IsOpen()=0
A thread safe version of GetFrame.
This class is used to resample audio data for many sequential frames.
Keyframe crop_height
Curve representing height in percent (0.0=0%, 1.0=100%)