OpenShot Library | libopenshot  0.1.1
Clip.cpp
Go to the documentation of this file.
1 /**
2  * @file
3  * @brief Source file for Clip class
4  * @author Jonathan Thomas <jonathan@openshot.org>
5  *
6  * @section LICENSE
7  *
8  * Copyright (c) 2008-2014 OpenShot Studios, LLC
9  * <http://www.openshotstudios.com/>. This file is part of
10  * OpenShot Library (libopenshot), an open-source project dedicated to
11  * delivering high quality video editing and animation solutions to the
12  * world. For more information visit <http://www.openshot.org/>.
13  *
14  * OpenShot Library (libopenshot) is free software: you can redistribute it
15  * and/or modify it under the terms of the GNU Lesser General Public License
16  * as published by the Free Software Foundation, either version 3 of the
17  * License, or (at your option) any later version.
18  *
19  * OpenShot Library (libopenshot) is distributed in the hope that it will be
20  * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
21  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22  * GNU Lesser General Public License for more details.
23  *
24  * You should have received a copy of the GNU Lesser General Public License
25  * along with OpenShot Library. If not, see <http://www.gnu.org/licenses/>.
26  */
27 
28 #include "../include/Clip.h"
29 
30 using namespace openshot;
31 
32 // Init default settings for a clip
33 void Clip::init_settings()
34 {
35  // Init clip settings
36  Position(0.0);
37  Layer(0);
38  Start(0.0);
39  End(0.0);
41  scale = SCALE_FIT;
43  waveform = false;
45 
46  // Init scale curves
47  scale_x = Keyframe(1.0);
48  scale_y = Keyframe(1.0);
49 
50  // Init location curves
51  location_x = Keyframe(0.0);
52  location_y = Keyframe(0.0);
53 
54  // Init alpha & rotation
55  alpha = Keyframe(1.0);
56  rotation = Keyframe(0.0);
57 
58  // Init time & volume
59  time = Keyframe(0.0);
60  volume = Keyframe(1.0);
61 
62  // Init audio waveform color
63  wave_color = Color((unsigned char)0, (unsigned char)123, (unsigned char)255, (unsigned char)255);
64 
65  // Init crop settings
67  crop_width = Keyframe(-1.0);
68  crop_height = Keyframe(-1.0);
69  crop_x = Keyframe(0.0);
70  crop_y = Keyframe(0.0);
71 
72  // Init shear and perspective curves
73  shear_x = Keyframe(0.0);
74  shear_y = Keyframe(0.0);
75  perspective_c1_x = Keyframe(-1.0);
76  perspective_c1_y = Keyframe(-1.0);
77  perspective_c2_x = Keyframe(-1.0);
78  perspective_c2_y = Keyframe(-1.0);
79  perspective_c3_x = Keyframe(-1.0);
80  perspective_c3_y = Keyframe(-1.0);
81  perspective_c4_x = Keyframe(-1.0);
82  perspective_c4_y = Keyframe(-1.0);
83 
84  // Default pointers
85  reader = NULL;
86  resampler = NULL;
87  audio_cache = NULL;
88  manage_reader = false;
89 }
90 
91 // Default Constructor for a clip
93 {
94  // Init all default settings
95  init_settings();
96 }
97 
98 // Constructor with reader
99 Clip::Clip(ReaderBase* new_reader)
100 {
101  // Init all default settings
102  init_settings();
103 
104  // Set the reader
105  reader = new_reader;
106 
107  // Open and Close the reader (to set the duration of the clip)
108  Open();
109  Close();
110 
111  // Update duration
112  End(reader->info.duration);
113 }
114 
115 // Constructor with filepath
116 Clip::Clip(string path)
117 {
118  // Init all default settings
119  init_settings();
120 
121  // Get file extension (and convert to lower case)
122  string ext = get_file_extension(path);
123  transform(ext.begin(), ext.end(), ext.begin(), ::tolower);
124 
125  // Determine if common video formats
126  if (ext=="avi" || ext=="mov" || ext=="mkv" || ext=="mpg" || ext=="mpeg" || ext=="mp3" || ext=="mp4" || ext=="mts" ||
127  ext=="ogg" || ext=="wav" || ext=="wmv" || ext=="webm" || ext=="vob")
128  {
129  try
130  {
131  // Open common video format
132  reader = new FFmpegReader(path);
133 
134  } catch(...) { }
135  }
136 
137  // If no video found, try each reader
138  if (!reader)
139  {
140  try
141  {
142  // Try an image reader
143  reader = new QtImageReader(path);
144 
145  } catch(...) {
146  try
147  {
148  // Try a video reader
149  reader = new FFmpegReader(path);
150 
151  } catch(...) { }
152  }
153  }
154 
155  // Update duration
156  if (reader) {
157  End(reader->info.duration);
158  manage_reader = true;
159  }
160 }
161 
162 // Destructor
164 {
165  // Delete the reader if clip created it
166  if (manage_reader && reader) {
167  delete reader;
168  reader = NULL;
169  }
170 
171  // Close the resampler
172  if (resampler) {
173  delete resampler;
174  resampler = NULL;
175  }
176 }
177 
178 /// Set the current reader
179 void Clip::Reader(ReaderBase* new_reader)
180 {
181  // set reader pointer
182  reader = new_reader;
183 }
184 
185 /// Get the current reader
187 {
188  if (reader)
189  return reader;
190  else
191  // Throw error if reader not initialized
192  throw ReaderClosed("No Reader has been initialized for this Clip. Call Reader(*reader) before calling this method.", "");
193 }
194 
195 // Open the internal reader
197 {
198  if (reader)
199  {
200  // Open the reader
201  reader->Open();
202 
203  // Set some clip properties from the file reader
204  if (end == 0.0)
205  End(reader->info.duration);
206  }
207  else
208  // Throw error if reader not initialized
209  throw ReaderClosed("No Reader has been initialized for this Clip. Call Reader(*reader) before calling this method.", "");
210 }
211 
212 // Close the internal reader
214 {
215  if (reader) {
216  // Close the reader
217  reader->Close();
218  }
219  else
220  // Throw error if reader not initialized
221  throw ReaderClosed("No Reader has been initialized for this Clip. Call Reader(*reader) before calling this method.", "");
222 }
223 
224 // Get end position of clip (trim end of video), which can be affected by the time curve.
225 float Clip::End() throw(ReaderClosed)
226 {
227  // if a time curve is present, use it's length
228  if (time.Points.size() > 1)
229  {
230  // Determine the FPS fo this clip
231  float fps = 24.0;
232  if (reader)
233  // file reader
234  fps = reader->info.fps.ToFloat();
235  else
236  // Throw error if reader not initialized
237  throw ReaderClosed("No Reader has been initialized for this Clip. Call Reader(*reader) before calling this method.", "");
238 
239  return float(time.GetLength()) / fps;
240  }
241  else
242  // just use the duration (as detected by the reader)
243  return end;
244 }
245 
246 // Get an openshot::Frame object for a specific frame number of this reader.
247 tr1::shared_ptr<Frame> Clip::GetFrame(long int requested_frame) throw(ReaderClosed)
248 {
249  if (reader)
250  {
251  // Adjust out of bounds frame number
252  requested_frame = adjust_frame_number_minimum(requested_frame);
253 
254  // Is a time map detected
255  long int new_frame_number = requested_frame;
256  if (time.Values.size() > 1)
257  new_frame_number = time.GetLong(requested_frame);
258 
259 
260  // Now that we have re-mapped what frame number is needed, go and get the frame pointer
261  tr1::shared_ptr<Frame> original_frame = GetOrCreateFrame(new_frame_number);
262 
263  // Create a new frame
264  tr1::shared_ptr<Frame> frame(new Frame(new_frame_number, 1, 1, "#000000", original_frame->GetAudioSamplesCount(), original_frame->GetAudioChannelsCount()));
265  frame->SampleRate(original_frame->SampleRate());
266  frame->ChannelsLayout(original_frame->ChannelsLayout());
267 
268  // Copy the image from the odd field
269  frame->AddImage(tr1::shared_ptr<QImage>(new QImage(*original_frame->GetImage())));
270 
271  // Loop through each channel, add audio
272  if (reader->info.has_audio)
273  for (int channel = 0; channel < original_frame->GetAudioChannelsCount(); channel++)
274  frame->AddAudio(true, channel, 0, original_frame->GetAudioSamples(channel), original_frame->GetAudioSamplesCount(), 1.0);
275 
276  // Get time mapped frame number (used to increase speed, change direction, etc...)
277  tr1::shared_ptr<Frame> new_frame = get_time_mapped_frame(frame, requested_frame);
278 
279  // Apply effects to the frame (if any)
280  apply_effects(new_frame);
281 
282  // Return processed 'frame'
283  return new_frame;
284  }
285  else
286  // Throw error if reader not initialized
287  throw ReaderClosed("No Reader has been initialized for this Clip. Call Reader(*reader) before calling this method.", "");
288 }
289 
290 // Get file extension
291 string Clip::get_file_extension(string path)
292 {
293  // return last part of path
294  return path.substr(path.find_last_of(".") + 1);
295 }
296 
297 // Reverse an audio buffer
298 void Clip::reverse_buffer(juce::AudioSampleBuffer* buffer)
299 {
300  int number_of_samples = buffer->getNumSamples();
301  int channels = buffer->getNumChannels();
302 
303  // Reverse array (create new buffer to hold the reversed version)
304  AudioSampleBuffer *reversed = new juce::AudioSampleBuffer(channels, number_of_samples);
305  reversed->clear();
306 
307  for (int channel = 0; channel < channels; channel++)
308  {
309  int n=0;
310  for (int s = number_of_samples - 1; s >= 0; s--, n++)
311  reversed->getWritePointer(channel)[n] = buffer->getWritePointer(channel)[s];
312  }
313 
314  // Copy the samples back to the original array
315  buffer->clear();
316  // Loop through channels, and get audio samples
317  for (int channel = 0; channel < channels; channel++)
318  // Get the audio samples for this channel
319  buffer->addFrom(channel, 0, reversed->getReadPointer(channel), number_of_samples, 1.0f);
320 
321  delete reversed;
322  reversed = NULL;
323 }
324 
325 // Adjust the audio and image of a time mapped frame
326 tr1::shared_ptr<Frame> Clip::get_time_mapped_frame(tr1::shared_ptr<Frame> frame, long int frame_number) throw(ReaderClosed)
327 {
328  // Check for valid reader
329  if (!reader)
330  // Throw error if reader not initialized
331  throw ReaderClosed("No Reader has been initialized for this Clip. Call Reader(*reader) before calling this method.", "");
332 
333  // Check for a valid time map curve
334  if (time.Values.size() > 1)
335  {
336  tr1::shared_ptr<Frame> new_frame;
337 
338  // create buffer and resampler
339  juce::AudioSampleBuffer *samples = NULL;
340  if (!resampler)
341  resampler = new AudioResampler();
342 
343  // Get new frame number
344  int new_frame_number = round(time.GetValue(frame_number));
345 
346  // Create a new frame
347  int samples_in_frame = Frame::GetSamplesPerFrame(new_frame_number, reader->info.fps, reader->info.sample_rate, frame->GetAudioChannelsCount());
348  new_frame = tr1::shared_ptr<Frame>(new Frame(new_frame_number, 1, 1, "#000000", samples_in_frame, frame->GetAudioChannelsCount()));
349 
350  // Copy the image from the new frame
351  new_frame->AddImage(GetOrCreateFrame(new_frame_number)->GetImage());
352 
353 
354  // Get delta (difference in previous Y value)
355  int delta = int(round(time.GetDelta(frame_number)));
356 
357  // Init audio vars
358  int sample_rate = reader->info.sample_rate;
359  int channels = reader->info.channels;
360  int number_of_samples = GetOrCreateFrame(new_frame_number)->GetAudioSamplesCount();
361 
362  // Only resample audio if needed
363  if (reader->info.has_audio) {
364  // Determine if we are speeding up or slowing down
365  if (time.GetRepeatFraction(frame_number).den > 1) {
366  // SLOWING DOWN AUDIO
367  // Resample data, and return new buffer pointer
368  AudioSampleBuffer *resampled_buffer = NULL;
369  int resampled_buffer_size = 0;
370 
371  // SLOW DOWN audio (split audio)
372  samples = new juce::AudioSampleBuffer(channels, number_of_samples);
373  samples->clear();
374 
375  // Loop through channels, and get audio samples
376  for (int channel = 0; channel < channels; channel++)
377  // Get the audio samples for this channel
378  samples->addFrom(channel, 0, GetOrCreateFrame(new_frame_number)->GetAudioSamples(channel),
379  number_of_samples, 1.0f);
380 
381  // Reverse the samples (if needed)
382  if (!time.IsIncreasing(frame_number))
383  reverse_buffer(samples);
384 
385  // Resample audio to be X times slower (where X is the denominator of the repeat fraction)
386  resampler->SetBuffer(samples, 1.0 / time.GetRepeatFraction(frame_number).den);
387 
388  // Resample the data (since it's the 1st slice)
389  resampled_buffer = resampler->GetResampledBuffer();
390 
391  // Get the length of the resampled buffer (if one exists)
392  resampled_buffer_size = resampled_buffer->getNumSamples();
393 
394  // Just take the samples we need for the requested frame
395  int start = (number_of_samples * (time.GetRepeatFraction(frame_number).num - 1));
396  if (start > 0)
397  start -= 1;
398  for (int channel = 0; channel < channels; channel++)
399  // Add new (slower) samples, to the frame object
400  new_frame->AddAudio(true, channel, 0, resampled_buffer->getReadPointer(channel, start),
401  number_of_samples, 1.0f);
402 
403  // Clean up
404  resampled_buffer = NULL;
405 
406  }
407  else if (abs(delta) > 1 && abs(delta) < 100) {
408  int start = 0;
409  if (delta > 0) {
410  // SPEED UP (multiple frames of audio), as long as it's not more than X frames
411  int total_delta_samples = 0;
412  for (int delta_frame = new_frame_number - (delta - 1);
413  delta_frame <= new_frame_number; delta_frame++)
414  total_delta_samples += Frame::GetSamplesPerFrame(delta_frame, reader->info.fps,
415  reader->info.sample_rate,
416  reader->info.channels);
417 
418  // Allocate a new sample buffer for these delta frames
419  samples = new juce::AudioSampleBuffer(channels, total_delta_samples);
420  samples->clear();
421 
422  // Loop through each frame in this delta
423  for (int delta_frame = new_frame_number - (delta - 1);
424  delta_frame <= new_frame_number; delta_frame++) {
425  // buffer to hold detal samples
426  int number_of_delta_samples = GetOrCreateFrame(delta_frame)->GetAudioSamplesCount();
427  AudioSampleBuffer *delta_samples = new juce::AudioSampleBuffer(channels,
428  number_of_delta_samples);
429  delta_samples->clear();
430 
431  for (int channel = 0; channel < channels; channel++)
432  delta_samples->addFrom(channel, 0, GetOrCreateFrame(delta_frame)->GetAudioSamples(channel),
433  number_of_delta_samples, 1.0f);
434 
435  // Reverse the samples (if needed)
436  if (!time.IsIncreasing(frame_number))
437  reverse_buffer(delta_samples);
438 
439  // Copy the samples to
440  for (int channel = 0; channel < channels; channel++)
441  // Get the audio samples for this channel
442  samples->addFrom(channel, start, delta_samples->getReadPointer(channel),
443  number_of_delta_samples, 1.0f);
444 
445  // Clean up
446  delete delta_samples;
447  delta_samples = NULL;
448 
449  // Increment start position
450  start += number_of_delta_samples;
451  }
452  }
453  else {
454  // SPEED UP (multiple frames of audio), as long as it's not more than X frames
455  int total_delta_samples = 0;
456  for (int delta_frame = new_frame_number - (delta + 1);
457  delta_frame >= new_frame_number; delta_frame--)
458  total_delta_samples += Frame::GetSamplesPerFrame(delta_frame, reader->info.fps,
459  reader->info.sample_rate,
460  reader->info.channels);
461 
462  // Allocate a new sample buffer for these delta frames
463  samples = new juce::AudioSampleBuffer(channels, total_delta_samples);
464  samples->clear();
465 
466  // Loop through each frame in this delta
467  for (int delta_frame = new_frame_number - (delta + 1);
468  delta_frame >= new_frame_number; delta_frame--) {
469  // buffer to hold delta samples
470  int number_of_delta_samples = GetOrCreateFrame(delta_frame)->GetAudioSamplesCount();
471  AudioSampleBuffer *delta_samples = new juce::AudioSampleBuffer(channels,
472  number_of_delta_samples);
473  delta_samples->clear();
474 
475  for (int channel = 0; channel < channels; channel++)
476  delta_samples->addFrom(channel, 0, GetOrCreateFrame(delta_frame)->GetAudioSamples(channel),
477  number_of_delta_samples, 1.0f);
478 
479  // Reverse the samples (if needed)
480  if (!time.IsIncreasing(frame_number))
481  reverse_buffer(delta_samples);
482 
483  // Copy the samples to
484  for (int channel = 0; channel < channels; channel++)
485  // Get the audio samples for this channel
486  samples->addFrom(channel, start, delta_samples->getReadPointer(channel),
487  number_of_delta_samples, 1.0f);
488 
489  // Clean up
490  delete delta_samples;
491  delta_samples = NULL;
492 
493  // Increment start position
494  start += number_of_delta_samples;
495  }
496  }
497 
498  // Resample audio to be X times faster (where X is the delta of the repeat fraction)
499  resampler->SetBuffer(samples, float(start) / float(number_of_samples));
500 
501  // Resample data, and return new buffer pointer
502  AudioSampleBuffer *buffer = resampler->GetResampledBuffer();
503  int resampled_buffer_size = buffer->getNumSamples();
504 
505  // Add the newly resized audio samples to the current frame
506  for (int channel = 0; channel < channels; channel++)
507  // Add new (slower) samples, to the frame object
508  new_frame->AddAudio(true, channel, 0, buffer->getReadPointer(channel), number_of_samples, 1.0f);
509 
510  // Clean up
511  buffer = NULL;
512  }
513  else {
514  // Use the samples on this frame (but maybe reverse them if needed)
515  samples = new juce::AudioSampleBuffer(channels, number_of_samples);
516  samples->clear();
517 
518  // Loop through channels, and get audio samples
519  for (int channel = 0; channel < channels; channel++)
520  // Get the audio samples for this channel
521  samples->addFrom(channel, 0, frame->GetAudioSamples(channel), number_of_samples, 1.0f);
522 
523  // reverse the samples
524  if (!time.IsIncreasing(frame_number))
525  reverse_buffer(samples);
526 
527  // Add reversed samples to the frame object
528  for (int channel = 0; channel < channels; channel++)
529  new_frame->AddAudio(true, channel, 0, samples->getReadPointer(channel), number_of_samples, 1.0f);
530 
531 
532  }
533 
534  delete samples;
535  samples = NULL;
536  }
537 
538  // Return new time mapped frame
539  return new_frame;
540 
541  } else
542  // Use original frame
543  return frame;
544 }
545 
546 // Adjust frame number minimum value
547 long int Clip::adjust_frame_number_minimum(long int frame_number)
548 {
549  // Never return a frame number 0 or below
550  if (frame_number < 1)
551  return 1;
552  else
553  return frame_number;
554 
555 }
556 
557 // Get or generate a blank frame
558 tr1::shared_ptr<Frame> Clip::GetOrCreateFrame(long int number)
559 {
560  tr1::shared_ptr<Frame> new_frame;
561 
562  // Init some basic properties about this frame
563  int samples_in_frame = Frame::GetSamplesPerFrame(number, reader->info.fps, reader->info.sample_rate, reader->info.channels);
564 
565  try {
566  // Attempt to get a frame (but this could fail if a reader has just been closed)
567  new_frame = reader->GetFrame(number);
568 
569  // Return real frame
570  return new_frame;
571 
572  } catch (const ReaderClosed & e) {
573  // ...
574  } catch (const TooManySeeks & e) {
575  // ...
576  } catch (const OutOfBoundsFrame & e) {
577  // ...
578  }
579 
580  // Create blank frame
581  new_frame = tr1::shared_ptr<Frame>(new Frame(number, reader->info.width, reader->info.height, "#000000", samples_in_frame, reader->info.channels));
582  new_frame->SampleRate(reader->info.sample_rate);
583  new_frame->ChannelsLayout(reader->info.channel_layout);
584  return new_frame;
585 }
586 
587 // Generate JSON string of this object
588 string Clip::Json() {
589 
590  // Return formatted string
591  return JsonValue().toStyledString();
592 }
593 
594 // Get all properties for a specific frame
595 string Clip::PropertiesJSON(long int requested_frame) {
596 
597  // Requested Point
598  Point requested_point(requested_frame, requested_frame);
599 
600  // Generate JSON properties list
601  Json::Value root;
602  root["id"] = add_property_json("ID", 0.0, "string", Id(), false, 0, -1, -1, CONSTANT, -1, true);
603  root["position"] = add_property_json("Position", Position(), "float", "", false, 0, 0, 1000 * 60 * 30, CONSTANT, -1, false);
604  root["layer"] = add_property_json("Layer", Layer(), "int", "", false, 0, 0, 1000, CONSTANT, -1, false);
605  root["start"] = add_property_json("Start", Start(), "float", "", false, 0, 0, 1000 * 60 * 30, CONSTANT, -1, false);
606  root["end"] = add_property_json("End", End(), "float", "", false, 0, 0, 1000 * 60 * 30, CONSTANT, -1, false);
607  root["duration"] = add_property_json("Duration", Duration(), "float", "", false, 0, 0, 1000 * 60 * 30, CONSTANT, -1, true);
608  root["gravity"] = add_property_json("Gravity", gravity, "int", "", false, 0, -1, -1, CONSTANT, -1, false);
609  root["scale"] = add_property_json("Scale", scale, "int", "", false, 0, -1, -1, CONSTANT, -1, false);
610  root["anchor"] = add_property_json("Anchor", anchor, "int", "", false, 0, -1, -1, CONSTANT, -1, false);
611  root["waveform"] = add_property_json("Waveform", waveform, "bool", "", false, 0, -1, -1, CONSTANT, -1, false);
612 
613  // Add gravity choices (dropdown style)
614  root["gravity"]["choices"].append(add_property_choice_json("Top Left", GRAVITY_TOP_LEFT, gravity));
615  root["gravity"]["choices"].append(add_property_choice_json("Top Center", GRAVITY_TOP, gravity));
616  root["gravity"]["choices"].append(add_property_choice_json("Top Right", GRAVITY_TOP_RIGHT, gravity));
617  root["gravity"]["choices"].append(add_property_choice_json("Left", GRAVITY_LEFT, gravity));
618  root["gravity"]["choices"].append(add_property_choice_json("Center", GRAVITY_CENTER, gravity));
619  root["gravity"]["choices"].append(add_property_choice_json("Right", GRAVITY_RIGHT, gravity));
620  root["gravity"]["choices"].append(add_property_choice_json("Bottom Left", GRAVITY_BOTTOM_LEFT, gravity));
621  root["gravity"]["choices"].append(add_property_choice_json("Bottom Center", GRAVITY_BOTTOM, gravity));
622  root["gravity"]["choices"].append(add_property_choice_json("Bottom Right", GRAVITY_BOTTOM_RIGHT, gravity));
623 
624  // Add scale choices (dropdown style)
625  root["scale"]["choices"].append(add_property_choice_json("Crop", SCALE_CROP, scale));
626  root["scale"]["choices"].append(add_property_choice_json("Best Fit", SCALE_FIT, scale));
627  root["scale"]["choices"].append(add_property_choice_json("Stretch", SCALE_STRETCH, scale));
628  root["scale"]["choices"].append(add_property_choice_json("None", SCALE_NONE, scale));
629 
630  // Add anchor choices (dropdown style)
631  root["anchor"]["choices"].append(add_property_choice_json("Canvas", ANCHOR_CANVAS, anchor));
632  root["anchor"]["choices"].append(add_property_choice_json("Viewport", ANCHOR_VIEWPORT, anchor));
633 
634  // Add waveform choices (dropdown style)
635  root["waveform"]["choices"].append(add_property_choice_json("Yes", true, waveform));
636  root["waveform"]["choices"].append(add_property_choice_json("No", false, waveform));
637 
638  // Keyframes
639  root["location_x"] = add_property_json("Location X", location_x.GetValue(requested_frame), "float", "", location_x.Contains(requested_point), location_x.GetCount(), -10000, 10000, location_x.GetClosestPoint(requested_point).interpolation, location_x.GetClosestPoint(requested_point).co.X, false);
640  root["location_y"] = add_property_json("Location Y", location_y.GetValue(requested_frame), "float", "", location_y.Contains(requested_point), location_y.GetCount(), -10000, 10000, location_y.GetClosestPoint(requested_point).interpolation, location_y.GetClosestPoint(requested_point).co.X, false);
641  root["scale_x"] = add_property_json("Scale X", scale_x.GetValue(requested_frame), "float", "", scale_x.Contains(requested_point), scale_x.GetCount(), 0.0, 100.0, scale_x.GetClosestPoint(requested_point).interpolation, scale_x.GetClosestPoint(requested_point).co.X, false);
642  root["scale_y"] = add_property_json("Scale Y", scale_y.GetValue(requested_frame), "float", "", scale_y.Contains(requested_point), scale_y.GetCount(), 0.0, 100.0, scale_y.GetClosestPoint(requested_point).interpolation, scale_y.GetClosestPoint(requested_point).co.X, false);
643  root["alpha"] = add_property_json("Alpha", alpha.GetValue(requested_frame), "float", "", alpha.Contains(requested_point), alpha.GetCount(), 0.0, 1.0, alpha.GetClosestPoint(requested_point).interpolation, alpha.GetClosestPoint(requested_point).co.X, false);
644  root["rotation"] = add_property_json("Rotation", rotation.GetValue(requested_frame), "float", "", rotation.Contains(requested_point), rotation.GetCount(), -10000, 10000, rotation.GetClosestPoint(requested_point).interpolation, rotation.GetClosestPoint(requested_point).co.X, false);
645  root["volume"] = add_property_json("Volume", volume.GetValue(requested_frame), "float", "", volume.Contains(requested_point), volume.GetCount(), 0.0, 1.0, volume.GetClosestPoint(requested_point).interpolation, volume.GetClosestPoint(requested_point).co.X, false);
646  root["time"] = add_property_json("Time", time.GetValue(requested_frame), "float", "", time.Contains(requested_point), time.GetCount(), 0.0, 1000 * 60 * 30, time.GetClosestPoint(requested_point).interpolation, time.GetClosestPoint(requested_point).co.X, false);
647 
648  root["wave_color"] = add_property_json("Wave Color", 0.0, "color", "", wave_color.red.Contains(requested_point), wave_color.red.GetCount(), -10000, 10000, wave_color.red.GetClosestPoint(requested_point).interpolation, wave_color.red.GetClosestPoint(requested_point).co.X, false);
649  root["wave_color"]["red"] = add_property_json("Red", wave_color.red.GetValue(requested_frame), "float", "", wave_color.red.Contains(requested_point), wave_color.red.GetCount(), -10000, 10000, wave_color.red.GetClosestPoint(requested_point).interpolation, wave_color.red.GetClosestPoint(requested_point).co.X, false);
650  root["wave_color"]["blue"] = add_property_json("Blue", wave_color.blue.GetValue(requested_frame), "float", "", wave_color.blue.Contains(requested_point), wave_color.blue.GetCount(), -10000, 10000, wave_color.blue.GetClosestPoint(requested_point).interpolation, wave_color.blue.GetClosestPoint(requested_point).co.X, false);
651  root["wave_color"]["green"] = add_property_json("Green", wave_color.green.GetValue(requested_frame), "float", "", wave_color.green.Contains(requested_point), wave_color.green.GetCount(), -10000, 10000, wave_color.green.GetClosestPoint(requested_point).interpolation, wave_color.green.GetClosestPoint(requested_point).co.X, false);
652 
653 
654  // Return formatted string
655  return root.toStyledString();
656 }
657 
658 // Generate Json::JsonValue for this object
659 Json::Value Clip::JsonValue() {
660 
661  // Create root json object
662  Json::Value root = ClipBase::JsonValue(); // get parent properties
663  root["gravity"] = gravity;
664  root["scale"] = scale;
665  root["anchor"] = anchor;
666  root["waveform"] = waveform;
667  root["scale_x"] = scale_x.JsonValue();
668  root["scale_y"] = scale_y.JsonValue();
669  root["location_x"] = location_x.JsonValue();
670  root["location_y"] = location_y.JsonValue();
671  root["alpha"] = alpha.JsonValue();
672  root["rotation"] = rotation.JsonValue();
673  root["time"] = time.JsonValue();
674  root["volume"] = volume.JsonValue();
675  root["wave_color"] = wave_color.JsonValue();
676  root["crop_width"] = crop_width.JsonValue();
677  root["crop_height"] = crop_height.JsonValue();
678  root["crop_x"] = crop_x.JsonValue();
679  root["crop_y"] = crop_y.JsonValue();
680  root["shear_x"] = shear_x.JsonValue();
681  root["shear_y"] = shear_y.JsonValue();
682  root["perspective_c1_x"] = perspective_c1_x.JsonValue();
683  root["perspective_c1_y"] = perspective_c1_y.JsonValue();
684  root["perspective_c2_x"] = perspective_c2_x.JsonValue();
685  root["perspective_c2_y"] = perspective_c2_y.JsonValue();
686  root["perspective_c3_x"] = perspective_c3_x.JsonValue();
687  root["perspective_c3_y"] = perspective_c3_y.JsonValue();
688  root["perspective_c4_x"] = perspective_c4_x.JsonValue();
689  root["perspective_c4_y"] = perspective_c4_y.JsonValue();
690 
691  // Add array of effects
692  root["effects"] = Json::Value(Json::arrayValue);
693 
694  // loop through effects
695  list<EffectBase*>::iterator effect_itr;
696  for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr)
697  {
698  // Get clip object from the iterator
699  EffectBase *existing_effect = (*effect_itr);
700  root["effects"].append(existing_effect->JsonValue());
701  }
702 
703  if (reader)
704  root["reader"] = reader->JsonValue();
705 
706  // return JsonValue
707  return root;
708 }
709 
710 // Load JSON string into this object
711 void Clip::SetJson(string value) throw(InvalidJSON) {
712 
713  // Parse JSON string into JSON objects
714  Json::Value root;
715  Json::Reader reader;
716  bool success = reader.parse( value, root );
717  if (!success)
718  // Raise exception
719  throw InvalidJSON("JSON could not be parsed (or is invalid)", "");
720 
721  try
722  {
723  // Set all values that match
724  SetJsonValue(root);
725  }
726  catch (exception e)
727  {
728  // Error parsing JSON (or missing keys)
729  throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", "");
730  }
731 }
732 
733 // Load Json::JsonValue into this object
734 void Clip::SetJsonValue(Json::Value root) {
735 
736  // Set parent data
738 
739  // Set data from Json (if key is found)
740  if (!root["gravity"].isNull())
741  gravity = (GravityType) root["gravity"].asInt();
742  if (!root["scale"].isNull())
743  scale = (ScaleType) root["scale"].asInt();
744  if (!root["anchor"].isNull())
745  anchor = (AnchorType) root["anchor"].asInt();
746  if (!root["waveform"].isNull())
747  waveform = root["waveform"].asBool();
748  if (!root["scale_x"].isNull())
749  scale_x.SetJsonValue(root["scale_x"]);
750  if (!root["scale_y"].isNull())
751  scale_y.SetJsonValue(root["scale_y"]);
752  if (!root["location_x"].isNull())
753  location_x.SetJsonValue(root["location_x"]);
754  if (!root["location_y"].isNull())
755  location_y.SetJsonValue(root["location_y"]);
756  if (!root["alpha"].isNull())
757  alpha.SetJsonValue(root["alpha"]);
758  if (!root["rotation"].isNull())
759  rotation.SetJsonValue(root["rotation"]);
760  if (!root["time"].isNull())
761  time.SetJsonValue(root["time"]);
762  if (!root["volume"].isNull())
763  volume.SetJsonValue(root["volume"]);
764  if (!root["wave_color"].isNull())
765  wave_color.SetJsonValue(root["wave_color"]);
766  if (!root["crop_width"].isNull())
767  crop_width.SetJsonValue(root["crop_width"]);
768  if (!root["crop_height"].isNull())
769  crop_height.SetJsonValue(root["crop_height"]);
770  if (!root["crop_x"].isNull())
771  crop_x.SetJsonValue(root["crop_x"]);
772  if (!root["crop_y"].isNull())
773  crop_y.SetJsonValue(root["crop_y"]);
774  if (!root["shear_x"].isNull())
775  shear_x.SetJsonValue(root["shear_x"]);
776  if (!root["shear_y"].isNull())
777  shear_y.SetJsonValue(root["shear_y"]);
778  if (!root["perspective_c1_x"].isNull())
779  perspective_c1_x.SetJsonValue(root["perspective_c1_x"]);
780  if (!root["perspective_c1_y"].isNull())
781  perspective_c1_y.SetJsonValue(root["perspective_c1_y"]);
782  if (!root["perspective_c2_x"].isNull())
783  perspective_c2_x.SetJsonValue(root["perspective_c2_x"]);
784  if (!root["perspective_c2_y"].isNull())
785  perspective_c2_y.SetJsonValue(root["perspective_c2_y"]);
786  if (!root["perspective_c3_x"].isNull())
787  perspective_c3_x.SetJsonValue(root["perspective_c3_x"]);
788  if (!root["perspective_c3_y"].isNull())
789  perspective_c3_y.SetJsonValue(root["perspective_c3_y"]);
790  if (!root["perspective_c4_x"].isNull())
791  perspective_c4_x.SetJsonValue(root["perspective_c4_x"]);
792  if (!root["perspective_c4_y"].isNull())
793  perspective_c4_y.SetJsonValue(root["perspective_c4_y"]);
794  if (!root["effects"].isNull()) {
795 
796  // Clear existing effects
797  effects.clear();
798 
799  // loop through effects
800  for (int x = 0; x < root["effects"].size(); x++) {
801  // Get each effect
802  Json::Value existing_effect = root["effects"][x];
803 
804  // Create Effect
805  EffectBase *e = NULL;
806 
807  if (!existing_effect["type"].isNull())
808  // Init the matching effect object
809  if (existing_effect["type"].asString() == "Blur")
810  e = new Blur();
811 
812  else if (existing_effect["type"].asString() == "Brightness")
813  e = new Brightness();
814 
815  else if (existing_effect["type"].asString() == "ChromaKey")
816  e = new ChromaKey();
817 
818  else if (existing_effect["type"].asString() == "Deinterlace")
819  e = new Deinterlace();
820 
821  else if (existing_effect["type"].asString() == "Mask")
822  e = new Mask();
823 
824  else if (existing_effect["type"].asString() == "Negate")
825  e = new Negate();
826 
827  else if (existing_effect["type"].asString() == "Saturation")
828  e = new Saturation();
829 
830  // Load Json into Effect
831  e->SetJsonValue(existing_effect);
832 
833  // Add Effect to Timeline
834  AddEffect(e);
835  }
836  }
837  if (!root["reader"].isNull()) // does Json contain a reader?
838  {
839  if (!root["reader"]["type"].isNull()) // does the reader Json contain a 'type'?
840  {
841  // Close previous reader (if any)
842  bool already_open = false;
843  if (reader)
844  {
845  // Track if reader was open
846  already_open = reader->IsOpen();
847 
848  // Close and delete existing reader (if any)
849  reader->Close();
850  delete reader;
851  reader = NULL;
852  }
853 
854  // Create new reader (and load properties)
855  string type = root["reader"]["type"].asString();
856 
857  if (type == "FFmpegReader") {
858 
859  // Create new reader
860  reader = new FFmpegReader(root["reader"]["path"].asString());
861  reader->SetJsonValue(root["reader"]);
862 
863  } else if (type == "QtImageReader") {
864 
865  // Create new reader
866  reader = new QtImageReader(root["reader"]["path"].asString());
867  reader->SetJsonValue(root["reader"]);
868 
869 #ifdef USE_IMAGEMAGICK
870  } else if (type == "ImageReader") {
871 
872  // Create new reader
873  reader = new ImageReader(root["reader"]["path"].asString());
874  reader->SetJsonValue(root["reader"]);
875 
876  } else if (type == "TextReader") {
877 
878  // Create new reader
879  reader = new TextReader();
880  reader->SetJsonValue(root["reader"]);
881 #endif
882 
883  } else if (type == "ChunkReader") {
884 
885  // Create new reader
886  reader = new ChunkReader(root["reader"]["path"].asString(), (ChunkVersion) root["reader"]["chunk_version"].asInt());
887  reader->SetJsonValue(root["reader"]);
888 
889  } else if (type == "DummyReader") {
890 
891  // Create new reader
892  reader = new DummyReader();
893  reader->SetJsonValue(root["reader"]);
894  }
895 
896  // mark as managed reader
897  if (reader)
898  manage_reader = true;
899 
900  // Re-Open reader (if needed)
901  if (already_open)
902  reader->Open();
903 
904  }
905  }
906 }
907 
908 // Sort effects by order
909 void Clip::sort_effects()
910 {
911  // sort clips
912  effects.sort(CompareClipEffects());
913 }
914 
915 // Add an effect to the clip
917 {
918  // Add effect to list
919  effects.push_back(effect);
920 
921  // Sort effects
922  sort_effects();
923 }
924 
925 // Remove an effect from the clip
927 {
928  effects.remove(effect);
929 }
930 
931 // Apply effects to the source frame (if any)
932 tr1::shared_ptr<Frame> Clip::apply_effects(tr1::shared_ptr<Frame> frame)
933 {
934  // Find Effects at this position and layer
935  list<EffectBase*>::iterator effect_itr;
936  for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr)
937  {
938  // Get clip object from the iterator
939  EffectBase *effect = (*effect_itr);
940 
941  // Apply the effect to this frame
942  frame = effect->GetFrame(frame, frame->number);
943 
944  } // end effect loop
945 
946  // Return modified frame
947  return frame;
948 }
This class reads a special chunk-formatted file, which can be easily shared in a distributed environm...
Definition: ChunkReader.h:104
Keyframe perspective_c3_x
Curves representing X for coordinate 3.
Definition: Clip.h:247
void Close()
Close the internal reader.
Definition: Clip.cpp:213
Json::Value JsonValue()
Generate Json::JsonValue for this object.
Definition: Color.cpp:92
Keyframe scale_y
Curve representing the vertical scaling in percent (0 to 100)
Definition: Clip.h:218
Keyframe perspective_c4_x
Curves representing X for coordinate 4.
Definition: Clip.h:249
This abstract class is the base class, used by all effects in libopenshot.
Definition: EffectBase.h:66
Align clip to the right of its parent (middle aligned)
Definition: Enums.h:42
Keyframe perspective_c1_x
Curves representing X for coordinate 1.
Definition: Clip.h:243
Keyframe green
Curve representing the green value (0 - 255)
Definition: Color.h:46
Keyframe perspective_c2_x
Curves representing X for coordinate 2.
Definition: Clip.h:245
Keyframe crop_x
Curve representing X offset in percent (-1.0=-100%, 0.0=0%, 1.0=100%)
Definition: Clip.h:237
This class adjusts the blur of an image, and can be animated with openshot::Keyframe curves over time...
Definition: Blur.h:62
string previous_properties
This string contains the previous JSON properties.
Definition: ClipBase.h:59
string PropertiesJSON(long int requested_frame)
Definition: Clip.cpp:595
float End()
Override End() method.
Definition: Clip.cpp:225
Keyframe perspective_c3_y
Curves representing Y for coordinate 3.
Definition: Clip.h:248
tr1::shared_ptr< Frame > GetFrame(long int requested_frame)
Get an openshot::Frame object for a specific frame number of this timeline.
Definition: Clip.cpp:247
Align clip to the bottom right of its parent.
Definition: Enums.h:45
Json::Value JsonValue()
Generate Json::JsonValue for this object.
Definition: KeyFrame.cpp:319
ChannelLayout channel_layout
The channel layout (mono, stereo, 5 point surround, etc...)
Definition: ReaderBase.h:83
GravityType gravity
The gravity of a clip determines where it snaps to it's parent.
Definition: Clip.h:150
int width
The width of the video (in pixesl)
Definition: ReaderBase.h:67
Keyframe volume
Curve representing the volume (0 to 1)
Definition: Clip.h:228
Do not scale the clip.
Definition: Enums.h:54
This class represents a single frame of video (i.e. image & audio data)
Definition: Frame.h:114
This class is used as a simple, dummy reader, which always returns a blank frame. ...
Definition: DummyReader.h:53
float ToFloat()
Return this fraction as a float (i.e. 1/2 = 0.5)
Definition: Fraction.cpp:41
This class uses the ImageMagick++ libraries, to remove (i.e. key out) a color (i.e. greenscreen)
Definition: ChromaKey.h:54
InterpolationType interpolation
This is the interpolation mode.
Definition: Point.h:86
Keyframe red
Curve representing the red value (0 - 255)
Definition: Color.h:45
float duration
Length of time (in seconds)
Definition: ReaderBase.h:64
bool Contains(Point p)
Does this keyframe contain a specific point.
Definition: KeyFrame.cpp:175
Scale the clip until both height and width fill the canvas (cropping the overlap) ...
Definition: Enums.h:51
Keyframe time
Curve representing the frames over time to play (used for speed and direction of video) ...
Definition: Clip.h:227
A Point is the basic building block of a key-frame curve.
Definition: Point.h:81
ScaleType
This enumeration determines how clips are scaled to fit their parent container.
Definition: Enums.h:49
void AddEffect(EffectBase *effect)
Add an effect to the clip.
Definition: Clip.cpp:916
virtual void Close()=0
Close the reader (and any resources it was consuming)
This abstract class is the base class, used by all readers in libopenshot.
Definition: ReaderBase.h:95
int Layer()
Get layer of clip on timeline (lower number is covered by higher numbers)
Definition: ClipBase.h:78
Exception when a reader is closed, and a frame is requested.
Definition: Exceptions.h:234
~Clip()
Destructor.
Definition: Clip.cpp:163
virtual tr1::shared_ptr< Frame > GetFrame(tr1::shared_ptr< Frame > frame, long int frame_number)=0
This method is required for all derived classes of EffectBase, and returns a modified openshot::Frame...
Color wave_color
Curve representing the color of the audio wave form.
Definition: Clip.h:231
Align clip to the top right of its parent.
Definition: Enums.h:39
virtual Json::Value JsonValue()=0
Generate Json::JsonValue for this object.
Definition: EffectBase.cpp:69
Align clip to the bottom left of its parent.
Definition: Enums.h:43
void SetJsonValue(Json::Value root)
Load Json::JsonValue into this object.
Definition: Clip.cpp:734
void SetJsonValue(Json::Value root)
Load Json::JsonValue into this object.
Definition: KeyFrame.cpp:360
Keyframe crop_width
Curve representing width in percent (0.0=0%, 1.0=100%)
Definition: Clip.h:235
This class uses the ImageMagick++ libraries, to open image files, and return openshot::Frame objects ...
Definition: ImageReader.h:67
Keyframe location_x
Curve representing the relative X position in percent based on the gravity (-100 to 100) ...
Definition: Clip.h:219
float GetValue(long int index)
Get the value at a specific index.
Definition: KeyFrame.cpp:224
Keyframe location_y
Curve representing the relative Y position in percent based on the gravity (-100 to 100) ...
Definition: Clip.h:220
This class uses the FFmpeg libraries, to open video files and audio files, and return openshot::Frame...
Definition: FFmpegReader.h:92
virtual void SetJsonValue(Json::Value root)=0
Load Json::JsonValue into this object.
Definition: ClipBase.cpp:49
Keyframe perspective_c1_y
Curves representing Y for coordinate 1.
Definition: Clip.h:244
Keyframe blue
Curve representing the red value (0 - 255)
Definition: Color.h:47
Keyframe crop_y
Curve representing Y offset in percent (-1.0=-100%, 0.0=0%, 1.0=100%)
Definition: Clip.h:238
Keyframe shear_x
Curve representing X shear angle in degrees (-45.0=left, 45.0=right)
Definition: Clip.h:241
ScaleType scale
The scale determines how a clip should be resized to fit it's parent.
Definition: Clip.h:151
int height
The height of the video (in pixels)
Definition: ReaderBase.h:66
Align clip to the bottom center of its parent.
Definition: Enums.h:44
Align clip to the top left of its parent.
Definition: Enums.h:37
Json::Value add_property_choice_json(string name, int value, int selected_value)
Generate JSON choice for a property (dropdown properties)
Definition: ClipBase.cpp:86
Json::Value add_property_json(string name, float value, string type, string memo, bool contains_point, int number_of_points, float min_value, float max_value, InterpolationType intepolation, int closest_point_x, bool readonly)
Generate JSON for a property.
Definition: ClipBase.cpp:65
Exception for files that can not be found or opened.
Definition: Exceptions.h:132
string Id()
Get basic properties.
Definition: ClipBase.h:76
This class uses the ImageMagick++ libraries, to negate image (i.e. negative)
Definition: Negate.h:53
float Position()
Get position on timeline (in seconds)
Definition: ClipBase.h:77
void SetJson(string value)
Load JSON string into this object.
Definition: Clip.cpp:711
Align clip to the left of its parent (middle aligned)
Definition: Enums.h:40
This class adjusts the saturation of color on a frame's image.
Definition: Saturation.h:59
virtual Json::Value JsonValue()=0
Generate Json::JsonValue for this object.
Definition: ReaderBase.cpp:153
virtual void SetJsonValue(Json::Value root)=0
Load Json::JsonValue into this object.
Definition: EffectBase.cpp:103
float X
The X value of the coordinate (usually representing the frame #)
Definition: Coordinate.h:61
ChunkVersion
This enumeration allows the user to choose which version of the chunk they would like (low...
Definition: ChunkReader.h:75
Keyframe rotation
Curve representing the rotation (0 to 360)
Definition: Clip.h:224
virtual void SetJsonValue(Json::Value root)=0
Load Json::JsonValue into this object.
Definition: ReaderBase.cpp:202
Scale the clip until both height and width fill the canvas (distort to fit)
Definition: Enums.h:53
This class uses the ImageMagick++ libraries, to apply alpha (or transparency) masks to any frame...
Definition: Mask.h:63
vector< Point > Points
Vector of all Points.
Definition: KeyFrame.h:92
Point GetClosestPoint(Point p)
Get current point (or closest point) from the X coordinate (i.e. the frame number) ...
Definition: KeyFrame.cpp:193
ReaderInfo info
Information about the current media file.
Definition: ReaderBase.h:120
Keyframe shear_y
Curve representing Y shear angle in degrees (-45.0=down, 45.0=up)
Definition: Clip.h:242
Clip()
Default Constructor.
Definition: Clip.cpp:92
Anchor the clip to the viewport (which can be moved / animated around the canvas) ...
Definition: Enums.h:61
Fraction fps
Frames per second, as a fraction (i.e. 24/1 = 24 fps)
Definition: ReaderBase.h:69
AnchorType
This enumeration determines what parent a clip should be aligned to.
Definition: Enums.h:58
float end
The position in seconds to end playing (used to trim the ending of a clip)
Definition: ClipBase.h:58
This class adjusts the brightness and contrast of an image, and can be animated with openshot::Keyfra...
Definition: Brightness.h:59
Exception for frames that are out of bounds.
Definition: Exceptions.h:202
void Open()
Open the internal reader.
Definition: Clip.cpp:196
This class represents a color (used on the timeline and clips)
Definition: Color.h:42
Align clip to the center of its parent (middle aligned)
Definition: Enums.h:41
GravityType crop_gravity
Cropping needs to have a gravity to determine what side we are cropping.
Definition: Clip.h:234
void RemoveEffect(EffectBase *effect)
Remove an effect from the clip.
Definition: Clip.cpp:926
long int GetCount()
Get the number of points (i.e. # of points)
Definition: KeyFrame.cpp:453
virtual tr1::shared_ptr< Frame > GetFrame(long int number)=0
Coordinate co
This is the primary coordinate.
Definition: Point.h:83
AnchorType anchor
The anchor determines what parent a clip should snap to.
Definition: Clip.h:152
This class uses the ImageMagick++ libraries, to de-interlace the image, which removes the EVEN or ODD...
Definition: Deinterlace.h:56
Exception for invalid JSON.
Definition: Exceptions.h:152
Keyframe alpha
Curve representing the alpha (1 to 0)
Definition: Clip.h:223
void SetJsonValue(Json::Value root)
Load Json::JsonValue into this object.
Definition: Color.cpp:129
Keyframe scale_x
Curve representing the horizontal scaling in percent (0 to 100)
Definition: Clip.h:217
Keyframe perspective_c2_y
Curves representing Y for coordinate 2.
Definition: Clip.h:246
virtual Json::Value JsonValue()=0
Generate Json::JsonValue for this object.
Definition: ClipBase.cpp:33
This class uses the ImageMagick++ libraries, to create frames with "Text", and return openshot::Frame...
Definition: TextReader.h:81
This class uses the Qt library, to open image files, and return openshot::Frame objects containing th...
Definition: QtImageReader.h:69
Align clip to the top center of its parent.
Definition: Enums.h:38
int channels
The number of audio channels used in the audio stream.
Definition: ReaderBase.h:82
A Keyframe is a collection of Point instances, which is used to vary a number or property over time...
Definition: KeyFrame.h:64
Scale the clip until either height or width fills the canvas (with no cropping)
Definition: Enums.h:52
Keyframe perspective_c4_y
Curves representing Y for coordinate 4.
Definition: Clip.h:250
int GetSamplesPerFrame(Fraction fps, int sample_rate, int channels)
Calculate the # of samples per video frame (for the current frame number)
Definition: Frame.cpp:482
Json::Value JsonValue()
Generate Json::JsonValue for this object.
Definition: Clip.cpp:659
float Duration()
Get the length of this clip (in seconds)
Definition: ClipBase.h:81
long int GetLength()
Definition: KeyFrame.cpp:443
GravityType
This enumeration determines how clips are aligned to their parent container.
Definition: Enums.h:35
Anchor the clip to the canvas.
Definition: Enums.h:60
Constant curves jump from their previous position to a new one (with no interpolation).
Definition: Point.h:48
string Json()
Get and Set JSON methods.
Definition: Clip.cpp:588
float Start()
Get start position (in seconds) of clip (trim start of video)
Definition: ClipBase.h:79
virtual void Open()=0
Open the reader (and start consuming resources, such as images or video files)
int sample_rate
The number of audio samples per second (44100 is a common sample rate)
Definition: ReaderBase.h:81
Exception when too many seek attempts happen.
Definition: Exceptions.h:254
ReaderBase * Reader()
Get the current reader.
Definition: Clip.cpp:186
virtual bool IsOpen()=0
A thread safe version of GetFrame.
This class is used to resample audio data for many sequential frames.
Keyframe crop_height
Curve representing height in percent (0.0=0%, 1.0=100%)
Definition: Clip.h:236