Skip to content

Commit 13b5454

Browse files
peter-targettrameshm
authored and
rameshm
committed
Add effect and marker initialization to Clip (#1808)
* Add effect and marker initialization to Clip Extend Clip to allow effects and markers to be set. Signed-off-by: Peter Targett <[email protected]> --------- Signed-off-by: Peter Targett <[email protected]>
1 parent 383cfda commit 13b5454

File tree

6 files changed

+73
-5
lines changed

6 files changed

+73
-5
lines changed

src/opentimelineio/clip.cpp

+3-1
Original file line numberDiff line numberDiff line change
@@ -13,8 +13,10 @@ Clip::Clip(
1313
MediaReference* media_reference,
1414
std::optional<TimeRange> const& source_range,
1515
AnyDictionary const& metadata,
16+
std::vector<Effect*> const& effects,
17+
std::vector<Marker*> const& markers,
1618
std::string const& active_media_reference_key)
17-
: Parent{ name, source_range, metadata }
19+
: Parent{ name, source_range, metadata, effects, markers }
1820
, _active_media_reference_key(active_media_reference_key)
1921
{
2022
set_media_reference(media_reference);

src/opentimelineio/clip.h

+2
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,8 @@ class Clip : public Item
2727
MediaReference* media_reference = nullptr,
2828
std::optional<TimeRange> const& source_range = std::nullopt,
2929
AnyDictionary const& metadata = AnyDictionary(),
30+
std::vector<Effect*> const& effects = std::vector<Effect*>(),
31+
std::vector<Marker*> const& markers = std::vector<Marker*>(),
3032
std::string const& active_media_reference_key = default_media_key);
3133

3234
void set_media_reference(MediaReference* media_reference);

src/py-opentimelineio/opentimelineio-bindings/otio_serializableObjects.cpp

+8-1
Original file line numberDiff line numberDiff line change
@@ -425,13 +425,20 @@ Contains a :class:`.MediaReference` and a trim on that media reference.
425425
)docstring")
426426
.def(py::init([](std::string name, MediaReference* media_reference,
427427
std::optional<TimeRange> source_range, py::object metadata,
428+
std::optional<std::vector<Effect*>> effects,
429+
std::optional<std::vector<Marker*>> markers,
428430
const std::string& active_media_reference) {
429-
return new Clip(name, media_reference, source_range, py_to_any_dictionary(metadata), active_media_reference);
431+
return new Clip(name, media_reference, source_range, py_to_any_dictionary(metadata),
432+
vector_or_default<Effect>(effects),
433+
vector_or_default<Marker>(markers),
434+
active_media_reference);
430435
}),
431436
py::arg_v("name"_a = std::string()),
432437
"media_reference"_a = nullptr,
433438
"source_range"_a = std::nullopt,
434439
py::arg_v("metadata"_a = py::none()),
440+
"effects"_a = py::none(),
441+
"markers"_a = py::none(),
435442
"active_media_reference"_a = std::string(Clip::default_media_key))
436443
.def_property_readonly_static("DEFAULT_MEDIA_KEY",[](py::object /* self */) {
437444
return Clip::default_media_key;

src/py-opentimelineio/opentimelineio/schema/clip.py

+9-3
Original file line numberDiff line numberDiff line change
@@ -7,11 +7,13 @@
77

88
@add_method(_otio.Clip)
99
def __str__(self):
10-
return 'Clip("{}", {}, {}, {})'.format(
10+
return 'Clip("{}", {}, {}, {}, {}, {})'.format(
1111
self.name,
1212
self.media_reference,
1313
self.source_range,
14-
self.metadata
14+
self.metadata,
15+
self.effects,
16+
self.markers
1517
)
1618

1719

@@ -22,12 +24,16 @@ def __repr__(self):
2224
'name={}, '
2325
'media_reference={}, '
2426
'source_range={}, '
25-
'metadata={}'
27+
'metadata={}, '
28+
'effects={}, '
29+
'markers={}'
2630
')'.format(
2731
repr(self.name),
2832
repr(self.media_reference),
2933
repr(self.source_range),
3034
repr(self.metadata),
35+
repr(self.effects),
36+
repr(self.markers)
3137
)
3238
)
3339

tests/test_clip.cpp

+33
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,9 @@
99
#include <opentimelineio/missingReference.h>
1010
#include <opentimelineio/serializableCollection.h>
1111
#include <opentimelineio/timeline.h>
12+
#include <opentimelineio/freezeFrame.h>
13+
#include <opentimelineio/linearTimeWarp.h>
14+
#include <opentimelineio/marker.h>
1215

1316
#include <iostream>
1417

@@ -150,6 +153,22 @@ main(int argc, char** argv)
150153
tests.add_test("test_clip_media_representation", [] {
151154
using namespace otio;
152155

156+
static constexpr auto time_scalar = 1.5;
157+
158+
SerializableObject::Retainer<LinearTimeWarp> ltw(new LinearTimeWarp(
159+
LinearTimeWarp::Schema::name,
160+
LinearTimeWarp::Schema::name,
161+
time_scalar));
162+
std::vector<Effect*> effects = { ltw };
163+
164+
static constexpr auto red = Marker::Color::red;
165+
166+
SerializableObject::Retainer<Marker> m(new Marker(
167+
LinearTimeWarp::Schema::name,
168+
TimeRange(),
169+
red));
170+
std::vector<Marker*> markers = { m };
171+
153172
static constexpr auto high_quality = "high_quality";
154173
static constexpr auto proxy_quality = "proxy_quality";
155174

@@ -161,6 +180,8 @@ main(int argc, char** argv)
161180
media,
162181
std::nullopt,
163182
AnyDictionary(),
183+
effects,
184+
markers,
164185
high_quality));
165186

166187
assertEqual(clip->active_media_reference_key().c_str(), high_quality);
@@ -225,6 +246,18 @@ main(int argc, char** argv)
225246
// should work
226247
clip->set_media_references({ { "cloud", ref4 } }, "cloud");
227248
assertEqual(clip->media_reference(), ref4.value);
249+
250+
// basic test for an effect
251+
assertEqual(clip->effects().size(), effects.size());
252+
auto effect = dynamic_cast<OTIO_NS::LinearTimeWarp*>(
253+
clip->effects().front().value);
254+
assertEqual(effect->time_scalar(), time_scalar);
255+
256+
// basic test for a marker
257+
assertEqual(clip->markers().size(), markers.size());
258+
auto marker = dynamic_cast<OTIO_NS::Marker*>(
259+
clip->markers().front().value);
260+
assertEqual(marker->color().c_str(), red);
228261
});
229262

230263
tests.run(argc, argv);

tests/test_clip.py

+18
Original file line numberDiff line numberDiff line change
@@ -21,17 +21,35 @@ def test_cons(self):
2121
target_url="/var/tmp/test.mov"
2222
)
2323

24+
ltw = otio.schema.LinearTimeWarp(
25+
name="linear_time_warp",
26+
time_scalar=1.5)
27+
effects = []
28+
effects.append(ltw)
29+
30+
red = otio.schema.MarkerColor.RED
31+
m = otio.schema.Marker(
32+
name="red_marker", color=red)
33+
markers = []
34+
markers.append(m)
35+
2436
cl = otio.schema.Clip(
2537
name=name,
2638
media_reference=mr,
2739
source_range=tr,
40+
effects=effects,
41+
markers=markers,
2842
# transition_in
2943
# transition_out
3044
)
3145
self.assertEqual(cl.name, name)
3246
self.assertEqual(cl.source_range, tr)
3347
self.assertIsOTIOEquivalentTo(cl.media_reference, mr)
3448

49+
self.assertTrue(isinstance(cl.effects[0], otio.schema.LinearTimeWarp))
50+
51+
self.assertEqual(cl.markers[0].color, red)
52+
3553
encoded = otio.adapters.otio_json.write_to_string(cl)
3654
decoded = otio.adapters.otio_json.read_from_string(encoded)
3755
self.assertIsOTIOEquivalentTo(cl, decoded)

0 commit comments

Comments
 (0)