html-media-source.js
11.1 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
/**
* @file html-media-source.js
*/
import window from 'global/window';
import document from 'global/document';
import videojs from 'video.js';
import VirtualSourceBuffer from './virtual-source-buffer';
import {durationOfVideo} from './add-text-track-data';
import {
isAudioCodec,
isVideoCodec,
parseContentType,
translateLegacyCodecs
} from './codec-utils';
/**
* Our MediaSource implementation in HTML, mimics native
* MediaSource where/if possible.
*
* @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource
* @class HtmlMediaSource
* @extends videojs.EventTarget
*/
export default class HtmlMediaSource extends videojs.EventTarget {
constructor() {
super();
let property;
this.nativeMediaSource_ = new window.MediaSource();
// delegate to the native MediaSource's methods by default
for (property in this.nativeMediaSource_) {
if (!(property in HtmlMediaSource.prototype) &&
typeof this.nativeMediaSource_[property] === 'function') {
this[property] = this.nativeMediaSource_[property].bind(this.nativeMediaSource_);
}
}
// emulate `duration` and `seekable` until seeking can be
// handled uniformly for live streams
// see https://github.com/w3c/media-source/issues/5
this.duration_ = NaN;
Object.defineProperty(this, 'duration', {
get() {
if (this.duration_ === Infinity) {
return this.duration_;
}
return this.nativeMediaSource_.duration;
},
set(duration) {
this.duration_ = duration;
if (duration !== Infinity) {
this.nativeMediaSource_.duration = duration;
return;
}
}
});
Object.defineProperty(this, 'seekable', {
get() {
if (this.duration_ === Infinity) {
return videojs.createTimeRanges([[0, this.nativeMediaSource_.duration]]);
}
return this.nativeMediaSource_.seekable;
}
});
Object.defineProperty(this, 'readyState', {
get() {
return this.nativeMediaSource_.readyState;
}
});
Object.defineProperty(this, 'activeSourceBuffers', {
get() {
return this.activeSourceBuffers_;
}
});
// the list of virtual and native SourceBuffers created by this
// MediaSource
this.sourceBuffers = [];
this.activeSourceBuffers_ = [];
/**
* update the list of active source buffers based upon various
* imformation from HLS and video.js
*
* @private
*/
this.updateActiveSourceBuffers_ = () => {
// Retain the reference but empty the array
this.activeSourceBuffers_.length = 0;
// If there is only one source buffer, then it will always be active and audio will
// be disabled based on the codec of the source buffer
if (this.sourceBuffers.length === 1) {
let sourceBuffer = this.sourceBuffers[0];
sourceBuffer.appendAudioInitSegment_ = true;
sourceBuffer.audioDisabled_ = !sourceBuffer.audioCodec_;
this.activeSourceBuffers_.push(sourceBuffer);
return;
}
// There are 2 source buffers, a combined (possibly video only) source buffer and
// and an audio only source buffer.
// By default, the audio in the combined virtual source buffer is enabled
// and the audio-only source buffer (if it exists) is disabled.
let disableCombined = false;
let disableAudioOnly = true;
// TODO: maybe we can store the sourcebuffers on the track objects?
// safari may do something like this
for (let i = 0; i < this.player_.audioTracks().length; i++) {
let track = this.player_.audioTracks()[i];
if (track.enabled && track.kind !== 'main') {
// The enabled track is an alternate audio track so disable the audio in
// the combined source buffer and enable the audio-only source buffer.
disableCombined = true;
disableAudioOnly = false;
break;
}
}
this.sourceBuffers.forEach((sourceBuffer) => {
/* eslinst-disable */
// TODO once codecs are required, we can switch to using the codecs to determine
// what stream is the video stream, rather than relying on videoTracks
/* eslinst-enable */
sourceBuffer.appendAudioInitSegment_ = true;
if (sourceBuffer.videoCodec_ && sourceBuffer.audioCodec_) {
// combined
sourceBuffer.audioDisabled_ = disableCombined;
} else if (sourceBuffer.videoCodec_ && !sourceBuffer.audioCodec_) {
// If the "combined" source buffer is video only, then we do not want
// disable the audio-only source buffer (this is mostly for demuxed
// audio and video hls)
sourceBuffer.audioDisabled_ = true;
disableAudioOnly = false;
} else if (!sourceBuffer.videoCodec_ && sourceBuffer.audioCodec_) {
// audio only
sourceBuffer.audioDisabled_ = disableAudioOnly;
if (disableAudioOnly) {
return;
}
}
this.activeSourceBuffers_.push(sourceBuffer);
});
};
this.onPlayerMediachange_ = () => {
this.sourceBuffers.forEach((sourceBuffer) => {
sourceBuffer.appendAudioInitSegment_ = true;
});
};
this.onHlsReset_ = () => {
this.sourceBuffers.forEach((sourceBuffer) => {
if (sourceBuffer.transmuxer_) {
sourceBuffer.transmuxer_.postMessage({action: 'resetCaptions'});
}
});
};
this.onHlsSegmentTimeMapping_ = (event) => {
this.sourceBuffers.forEach(buffer => buffer.timeMapping_ = event.mapping);
};
// Re-emit MediaSource events on the polyfill
[
'sourceopen',
'sourceclose',
'sourceended'
].forEach(function(eventName) {
this.nativeMediaSource_.addEventListener(eventName, this.trigger.bind(this));
}, this);
// capture the associated player when the MediaSource is
// successfully attached
this.on('sourceopen', (event) => {
// Get the player this MediaSource is attached to
let video = document.querySelector('[src="' + this.url_ + '"]');
if (!video) {
return;
}
this.player_ = videojs(video.parentNode);
// hls-reset is fired by videojs.Hls on to the tech after the main SegmentLoader
// resets its state and flushes the buffer
this.player_.tech_.on('hls-reset', this.onHlsReset_);
// hls-segment-time-mapping is fired by videojs.Hls on to the tech after the main
// SegmentLoader inspects an MTS segment and has an accurate stream to display
// time mapping
this.player_.tech_.on('hls-segment-time-mapping', this.onHlsSegmentTimeMapping_);
if (this.player_.audioTracks && this.player_.audioTracks()) {
this.player_.audioTracks().on('change', this.updateActiveSourceBuffers_);
this.player_.audioTracks().on('addtrack', this.updateActiveSourceBuffers_);
this.player_.audioTracks().on('removetrack', this.updateActiveSourceBuffers_);
}
this.player_.on('mediachange', this.onPlayerMediachange_);
});
this.on('sourceended', (event) => {
let duration = durationOfVideo(this.duration);
for (let i = 0; i < this.sourceBuffers.length; i++) {
let sourcebuffer = this.sourceBuffers[i];
let cues = sourcebuffer.metadataTrack_ && sourcebuffer.metadataTrack_.cues;
if (cues && cues.length) {
cues[cues.length - 1].endTime = duration;
}
}
});
// explicitly terminate any WebWorkers that were created
// by SourceHandlers
this.on('sourceclose', function(event) {
this.sourceBuffers.forEach(function(sourceBuffer) {
if (sourceBuffer.transmuxer_) {
sourceBuffer.transmuxer_.terminate();
}
});
this.sourceBuffers.length = 0;
if (!this.player_) {
return;
}
if (this.player_.audioTracks && this.player_.audioTracks()) {
this.player_.audioTracks().off('change', this.updateActiveSourceBuffers_);
this.player_.audioTracks().off('addtrack', this.updateActiveSourceBuffers_);
this.player_.audioTracks().off('removetrack', this.updateActiveSourceBuffers_);
}
// We can only change this if the player hasn't been disposed of yet
// because `off` eventually tries to use the el_ property. If it has
// been disposed of, then don't worry about it because there are no
// event handlers left to unbind anyway
if (this.player_.el_) {
this.player_.off('mediachange', this.onPlayerMediachange_);
this.player_.tech_.off('hls-reset', this.onHlsReset_);
this.player_.tech_.off('hls-segment-time-mapping', this.onHlsSegmentTimeMapping_);
}
});
}
/**
* Add a range that that can now be seeked to.
*
* @param {Double} start where to start the addition
* @param {Double} end where to end the addition
* @private
*/
addSeekableRange_(start, end) {
let error;
if (this.duration !== Infinity) {
error = new Error('MediaSource.addSeekableRange() can only be invoked ' +
'when the duration is Infinity');
error.name = 'InvalidStateError';
error.code = 11;
throw error;
}
if (end > this.nativeMediaSource_.duration ||
isNaN(this.nativeMediaSource_.duration)) {
this.nativeMediaSource_.duration = end;
}
}
/**
* Add a source buffer to the media source.
*
* @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/addSourceBuffer
* @param {String} type the content-type of the content
* @return {Object} the created source buffer
*/
addSourceBuffer(type) {
let buffer;
let parsedType = parseContentType(type);
// Create a VirtualSourceBuffer to transmux MPEG-2 transport
// stream segments into fragmented MP4s
if ((/^(video|audio)\/mp2t$/i).test(parsedType.type)) {
let codecs = [];
if (parsedType.parameters && parsedType.parameters.codecs) {
codecs = parsedType.parameters.codecs.split(',');
codecs = translateLegacyCodecs(codecs);
codecs = codecs.filter((codec) => {
return (isAudioCodec(codec) || isVideoCodec(codec));
});
}
if (codecs.length === 0) {
codecs = ['avc1.4d400d', 'mp4a.40.2'];
}
buffer = new VirtualSourceBuffer(this, codecs);
if (this.sourceBuffers.length !== 0) {
// If another VirtualSourceBuffer already exists, then we are creating a
// SourceBuffer for an alternate audio track and therefore we know that
// the source has both an audio and video track.
// That means we should trigger the manual creation of the real
// SourceBuffers instead of waiting for the transmuxer to return data
this.sourceBuffers[0].createRealSourceBuffers_();
buffer.createRealSourceBuffers_();
// Automatically disable the audio on the first source buffer if
// a second source buffer is ever created
this.sourceBuffers[0].audioDisabled_ = true;
}
} else {
// delegate to the native implementation
buffer = this.nativeMediaSource_.addSourceBuffer(type);
}
this.sourceBuffers.push(buffer);
return buffer;
}
}