This repository was archived by the owner on Aug 30, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 4
Expand file tree
/
Copy pathvideo_presentation_service.dart
More file actions
107 lines (96 loc) · 5.18 KB
/
video_presentation_service.dart
File metadata and controls
107 lines (96 loc) · 5.18 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
import 'dart:async';
import '../dolbyio_comms_sdk_flutter_platform_interface.dart';
import '../dolbyio_comms_sdk_native_events.dart';
import '../mapper/mapper.dart';
import 'models/enums.dart';
import 'models/video_presentation.dart';
/// The VideoPresentationService allows sharing videos during a conference. To present a video, a conference participant needs to provide the URL of the video file. We recommend sharing files in the MPEG-4 Part 14 or MP4 video formats.
///
/// **The video presentation workflow:**
///
/// 1. The presenter calls the [start] method to start the video presentation. This method automatically starts playing the shared video file.
///
/// 2. All participants receive the [VideoPresentationEventNames.videoPresentationStarted] event.
///
/// 3. The presenter can call the [pause] method to pause the shared video. In such a situation, all conference participants receive the [VideoPresentationEventNames.videoPresentationPaused] event.
///
/// 4. The presenter can call the [play] method to resume the paused video. In this situation, all conference participants receive the [VideoPresentationEventNames.videoPresentationPlayed] event.
///
/// 5. The presenter can call the [seek] method to navigate to a specific section of the shared video. This method applies the provided timestamp. After calling the seek method, all conference participants receive the [VideoPresentationEventNames.videoPresentationSought] event and watch the video from the specified timestamp.
///
/// 6. The presenter calls the [stop] method to stop the video presentation.
///
/// {@category Services}
class VideoPresentationService {
/// @internal */
final _methodChannel = DolbyioCommsSdkFlutterPlatform.createMethodChannel(
"video_presentation_service");
/// @internal
late final _nativeEventsReceiver = DolbyioCommsSdkNativeEventsReceiver<
VideoPresentationEventNames>.forModuleNamed("video_presentation_service");
/// Returns information about the current video presentation.
Future<VideoPresentation?> currentVideo() async {
var result = await _methodChannel
.invokeMethod<Map<Object?, Object?>>("currentVideo");
return Future.value(
result != null ? VideoPresentationMapper.fromMap(result) : null);
}
/// Starts a video presentation. The [url] parameter refers to a video file that the local participant would like to share.
Future<void> start(String url) async {
await _methodChannel.invokeMethod("start", {"url": url});
return Future.value();
}
/// Stops a video presentation.
Future<void> stop() async {
await _methodChannel.invokeMethod("stop");
return Future.value();
}
/// Resumes the paused video.
Future<void> play() async {
await _methodChannel.invokeMethod("play");
return Future.value();
}
/// Pauses a video presentation at a certain [timestamp], in milliseconds.
Future<void> pause(num timestamp) async {
await _methodChannel.invokeMethod<void>("pause", {"timestamp": timestamp});
return Future.value();
}
/// Allows a presenter to navigate to a specific section of the shared video file.
/// The [timestamp] parameter refers to the timestamp at which the video should start, in milliseconds.
Future<void> seek(num timestamp) async {
await _methodChannel.invokeMethod<void>("seek", {"timestamp": timestamp});
return Future.value();
}
/// Provides the current state of a video presentation.
Future<VideoPresentationState> state() async {
var result = await _methodChannel.invokeMethod<String>("state");
return Future.value(
result != null ? VideoPresentationState.decode(result) : null);
}
/// Returns a [Stream] of the [VideoPresentationEventNames.videoPresentationStarted], [VideoPresentationEventNames.videoPresentationPaused], [VideoPresentationEventNames.videoPresentationPlayed], and [VideoPresentationEventNames.videoPresentationSought] events. By subscribing to the returned stream you will be notified about status changes of video presentations.
Stream<Event<VideoPresentationEventNames, VideoPresentation>>
onVideoPresentationChange() {
var events = [
VideoPresentationEventNames.videoPresentationStarted,
VideoPresentationEventNames.videoPresentationPaused,
VideoPresentationEventNames.videoPresentationPlayed,
VideoPresentationEventNames.videoPresentationSought,
];
return _nativeEventsReceiver.addListener(events).map((map) {
final event = map as Map<Object?, Object?>;
final key = VideoPresentationEventNames.valueOf(event["key"] as String);
final data = event["body"] as Map<Object?, Object?>;
return Event(key, VideoPresentationMapper.fromMap(data));
});
}
/// Returns a [Stream] of the [VideoPresentationEventNames.videoPresentationStopped] events. By subscribing to the returned stream you will be notified each time a video presentation ends.
Stream<Event<VideoPresentationEventNames, void>>
onVideoPresentationStopped() {
return _nativeEventsReceiver.addListener(
[VideoPresentationEventNames.videoPresentationStopped]).map((map) {
final event = map as Map<Object?, Object?>;
final key = VideoPresentationEventNames.valueOf(event["key"] as String);
return Event(key, null);
});
}
}