virtual-source-buffer.js 27.7 KB
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827
/**
 * @file virtual-source-buffer.js
 */
'use strict';

Object.defineProperty(exports, '__esModule', {
  value: true
});

var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();

var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };

function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }

function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }

function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }

var _videoJs = require('video.js');

var _videoJs2 = _interopRequireDefault(_videoJs);

var _createTextTracksIfNecessary = require('./create-text-tracks-if-necessary');

var _createTextTracksIfNecessary2 = _interopRequireDefault(_createTextTracksIfNecessary);

var _removeCuesFromTrack = require('./remove-cues-from-track');

var _removeCuesFromTrack2 = _interopRequireDefault(_removeCuesFromTrack);

var _addTextTrackData = require('./add-text-track-data');

var _webwackify = require('webwackify');

var _webwackify2 = _interopRequireDefault(_webwackify);

var _transmuxerWorker = require('./transmuxer-worker');

var _transmuxerWorker2 = _interopRequireDefault(_transmuxerWorker);

var _codecUtils = require('./codec-utils');

var resolveTransmuxWorker = function resolveTransmuxWorker() {
  var result = undefined;

  try {
    result = require.resolve('./transmuxer-worker');
  } catch (e) {
    // no result
  }

  return result;
};

// We create a wrapper around the SourceBuffer so that we can manage the
// state of the `updating` property manually. We have to do this because
// Firefox changes `updating` to false long before triggering `updateend`
// events and that was causing strange problems in videojs-contrib-hls
var makeWrappedSourceBuffer = function makeWrappedSourceBuffer(mediaSource, mimeType) {
  var sourceBuffer = mediaSource.addSourceBuffer(mimeType);
  var wrapper = Object.create(null);

  wrapper.updating = false;
  wrapper.realBuffer_ = sourceBuffer;

  var _loop = function (key) {
    if (typeof sourceBuffer[key] === 'function') {
      wrapper[key] = function () {
        return sourceBuffer[key].apply(sourceBuffer, arguments);
      };
    } else if (typeof wrapper[key] === 'undefined') {
      Object.defineProperty(wrapper, key, {
        get: function get() {
          return sourceBuffer[key];
        },
        set: function set(v) {
          return sourceBuffer[key] = v;
        }
      });
    }
  };

  for (var key in sourceBuffer) {
    _loop(key);
  }

  return wrapper;
};

/**
 * Returns a list of gops in the buffer that have a pts value of 3 seconds or more in
 * front of current time.
 *
 * @param {Array} buffer
 *        The current buffer of gop information
 * @param {Player} player
 *        The player instance
 * @param {Double} mapping
 *        Offset to map display time to stream presentation time
 * @return {Array}
 *         List of gops considered safe to append over
 */
var gopsSafeToAlignWith = function gopsSafeToAlignWith(buffer, player, mapping) {
  if (!player || !buffer.length) {
    return [];
  }

  // pts value for current time + 3 seconds to give a bit more wiggle room
  var currentTimePts = Math.ceil((player.currentTime() - mapping + 3) * 90000);

  var i = undefined;

  for (i = 0; i < buffer.length; i++) {
    if (buffer[i].pts > currentTimePts) {
      break;
    }
  }

  return buffer.slice(i);
};

exports.gopsSafeToAlignWith = gopsSafeToAlignWith;
/**
 * Appends gop information (timing and byteLength) received by the transmuxer for the
 * gops appended in the last call to appendBuffer
 *
 * @param {Array} buffer
 *        The current buffer of gop information
 * @param {Array} gops
 *        List of new gop information
 * @param {boolean} replace
 *        If true, replace the buffer with the new gop information. If false, append the
 *        new gop information to the buffer in the right location of time.
 * @return {Array}
 *         Updated list of gop information
 */
var updateGopBuffer = function updateGopBuffer(buffer, gops, replace) {
  if (!gops.length) {
    return buffer;
  }

  if (replace) {
    // If we are in safe append mode, then completely overwrite the gop buffer
    // with the most recent appeneded data. This will make sure that when appending
    // future segments, we only try to align with gops that are both ahead of current
    // time and in the last segment appended.
    return gops.slice();
  }

  var start = gops[0].pts;

  var i = 0;

  for (i; i < buffer.length; i++) {
    if (buffer[i].pts >= start) {
      break;
    }
  }

  return buffer.slice(0, i).concat(gops);
};

exports.updateGopBuffer = updateGopBuffer;
/**
 * Removes gop information in buffer that overlaps with provided start and end
 *
 * @param {Array} buffer
 *        The current buffer of gop information
 * @param {Double} start
 *        position to start the remove at
 * @param {Double} end
 *        position to end the remove at
 * @param {Double} mapping
 *        Offset to map display time to stream presentation time
 */
var removeGopBuffer = function removeGopBuffer(buffer, start, end, mapping) {
  var startPts = Math.ceil((start - mapping) * 90000);
  var endPts = Math.ceil((end - mapping) * 90000);
  var updatedBuffer = buffer.slice();

  var i = buffer.length;

  while (i--) {
    if (buffer[i].pts <= endPts) {
      break;
    }
  }

  if (i === -1) {
    // no removal because end of remove range is before start of buffer
    return updatedBuffer;
  }

  var j = i + 1;

  while (j--) {
    if (buffer[j].pts <= startPts) {
      break;
    }
  }

  // clamp remove range start to 0 index
  j = Math.max(j, 0);

  updatedBuffer.splice(j, i - j + 1);

  return updatedBuffer;
};

exports.removeGopBuffer = removeGopBuffer;
/**
 * VirtualSourceBuffers exist so that we can transmux non native formats
 * into a native format, but keep the same api as a native source buffer.
 * It creates a transmuxer, that works in its own thread (a web worker) and
 * that transmuxer muxes the data into a native format. VirtualSourceBuffer will
 * then send all of that data to the naive sourcebuffer so that it is
 * indestinguishable from a natively supported format.
 *
 * @param {HtmlMediaSource} mediaSource the parent mediaSource
 * @param {Array} codecs array of codecs that we will be dealing with
 * @class VirtualSourceBuffer
 * @extends video.js.EventTarget
 */

var VirtualSourceBuffer = (function (_videojs$EventTarget) {
  _inherits(VirtualSourceBuffer, _videojs$EventTarget);

  function VirtualSourceBuffer(mediaSource, codecs) {
    var _this = this;

    _classCallCheck(this, VirtualSourceBuffer);

    _get(Object.getPrototypeOf(VirtualSourceBuffer.prototype), 'constructor', this).call(this, _videoJs2['default'].EventTarget);
    this.timestampOffset_ = 0;
    this.pendingBuffers_ = [];
    this.bufferUpdating_ = false;

    this.mediaSource_ = mediaSource;
    this.codecs_ = codecs;
    this.audioCodec_ = null;
    this.videoCodec_ = null;
    this.audioDisabled_ = false;
    this.appendAudioInitSegment_ = true;
    this.gopBuffer_ = [];
    this.timeMapping_ = 0;
    this.safeAppend_ = _videoJs2['default'].browser.IE_VERSION >= 11;

    var options = {
      remux: false,
      alignGopsAtEnd: this.safeAppend_
    };

    this.codecs_.forEach(function (codec) {
      if ((0, _codecUtils.isAudioCodec)(codec)) {
        _this.audioCodec_ = codec;
      } else if ((0, _codecUtils.isVideoCodec)(codec)) {
        _this.videoCodec_ = codec;
      }
    });

    // append muxed segments to their respective native buffers as
    // soon as they are available
    this.transmuxer_ = (0, _webwackify2['default'])(_transmuxerWorker2['default'], resolveTransmuxWorker());
    this.transmuxer_.postMessage({ action: 'init', options: options });

    this.transmuxer_.onmessage = function (event) {
      if (event.data.action === 'data') {
        return _this.data_(event);
      }

      if (event.data.action === 'done') {
        return _this.done_(event);
      }

      if (event.data.action === 'gopInfo') {
        return _this.appendGopInfo_(event);
      }
    };

    // this timestampOffset is a property with the side-effect of resetting
    // baseMediaDecodeTime in the transmuxer on the setter
    Object.defineProperty(this, 'timestampOffset', {
      get: function get() {
        return this.timestampOffset_;
      },
      set: function set(val) {
        if (typeof val === 'number' && val >= 0) {
          this.timestampOffset_ = val;
          this.appendAudioInitSegment_ = true;

          // reset gop buffer on timestampoffset as this signals a change in timeline
          this.gopBuffer_.length = 0;
          this.timeMapping_ = 0;

          // We have to tell the transmuxer to set the baseMediaDecodeTime to
          // the desired timestampOffset for the next segment
          this.transmuxer_.postMessage({
            action: 'setTimestampOffset',
            timestampOffset: val
          });
        }
      }
    });

    // setting the append window affects both source buffers
    Object.defineProperty(this, 'appendWindowStart', {
      get: function get() {
        return (this.videoBuffer_ || this.audioBuffer_).appendWindowStart;
      },
      set: function set(start) {
        if (this.videoBuffer_) {
          this.videoBuffer_.appendWindowStart = start;
        }
        if (this.audioBuffer_) {
          this.audioBuffer_.appendWindowStart = start;
        }
      }
    });

    // this buffer is "updating" if either of its native buffers are
    Object.defineProperty(this, 'updating', {
      get: function get() {
        return !!(this.bufferUpdating_ || !this.audioDisabled_ && this.audioBuffer_ && this.audioBuffer_.updating || this.videoBuffer_ && this.videoBuffer_.updating);
      }
    });

    // the buffered property is the intersection of the buffered
    // ranges of the native source buffers
    Object.defineProperty(this, 'buffered', {
      get: function get() {
        var start = null;
        var end = null;
        var arity = 0;
        var extents = [];
        var ranges = [];

        // neither buffer has been created yet
        if (!this.videoBuffer_ && !this.audioBuffer_) {
          return _videoJs2['default'].createTimeRange();
        }

        // only one buffer is configured
        if (!this.videoBuffer_) {
          return this.audioBuffer_.buffered;
        }
        if (!this.audioBuffer_) {
          return this.videoBuffer_.buffered;
        }

        // both buffers are configured
        if (this.audioDisabled_) {
          return this.videoBuffer_.buffered;
        }

        // both buffers are empty
        if (this.videoBuffer_.buffered.length === 0 && this.audioBuffer_.buffered.length === 0) {
          return _videoJs2['default'].createTimeRange();
        }

        // Handle the case where we have both buffers and create an
        // intersection of the two
        var videoBuffered = this.videoBuffer_.buffered;
        var audioBuffered = this.audioBuffer_.buffered;
        var count = videoBuffered.length;

        // A) Gather up all start and end times
        while (count--) {
          extents.push({ time: videoBuffered.start(count), type: 'start' });
          extents.push({ time: videoBuffered.end(count), type: 'end' });
        }
        count = audioBuffered.length;
        while (count--) {
          extents.push({ time: audioBuffered.start(count), type: 'start' });
          extents.push({ time: audioBuffered.end(count), type: 'end' });
        }
        // B) Sort them by time
        extents.sort(function (a, b) {
          return a.time - b.time;
        });

        // C) Go along one by one incrementing arity for start and decrementing
        //    arity for ends
        for (count = 0; count < extents.length; count++) {
          if (extents[count].type === 'start') {
            arity++;

            // D) If arity is ever incremented to 2 we are entering an
            //    overlapping range
            if (arity === 2) {
              start = extents[count].time;
            }
          } else if (extents[count].type === 'end') {
            arity--;

            // E) If arity is ever decremented to 1 we leaving an
            //    overlapping range
            if (arity === 1) {
              end = extents[count].time;
            }
          }

          // F) Record overlapping ranges
          if (start !== null && end !== null) {
            ranges.push([start, end]);
            start = null;
            end = null;
          }
        }

        return _videoJs2['default'].createTimeRanges(ranges);
      }
    });
  }

  /**
   * When we get a data event from the transmuxer
   * we call this function and handle the data that
   * was sent to us
   *
   * @private
   * @param {Event} event the data event from the transmuxer
   */

  _createClass(VirtualSourceBuffer, [{
    key: 'data_',
    value: function data_(event) {
      var segment = event.data.segment;

      // Cast ArrayBuffer to TypedArray
      segment.data = new Uint8Array(segment.data, event.data.byteOffset, event.data.byteLength);

      segment.initSegment = new Uint8Array(segment.initSegment.data, segment.initSegment.byteOffset, segment.initSegment.byteLength);

      (0, _createTextTracksIfNecessary2['default'])(this, this.mediaSource_, segment);

      // Add the segments to the pendingBuffers array
      this.pendingBuffers_.push(segment);
      return;
    }

    /**
     * When we get a done event from the transmuxer
     * we call this function and we process all
     * of the pending data that we have been saving in the
     * data_ function
     *
     * @private
     * @param {Event} event the done event from the transmuxer
     */
  }, {
    key: 'done_',
    value: function done_(event) {
      // Don't process and append data if the mediaSource is closed
      if (this.mediaSource_.readyState === 'closed') {
        this.pendingBuffers_.length = 0;
        return;
      }

      // All buffers should have been flushed from the muxer
      // start processing anything we have received
      this.processPendingSegments_();
      return;
    }

    /**
     * Create our internal native audio/video source buffers and add
     * event handlers to them with the following conditions:
     * 1. they do not already exist on the mediaSource
     * 2. this VSB has a codec for them
     *
     * @private
     */
  }, {
    key: 'createRealSourceBuffers_',
    value: function createRealSourceBuffers_() {
      var _this2 = this;

      var types = ['audio', 'video'];

      types.forEach(function (type) {
        // Don't create a SourceBuffer of this type if we don't have a
        // codec for it
        if (!_this2[type + 'Codec_']) {
          return;
        }

        // Do nothing if a SourceBuffer of this type already exists
        if (_this2[type + 'Buffer_']) {
          return;
        }

        var buffer = null;

        // If the mediasource already has a SourceBuffer for the codec
        // use that
        if (_this2.mediaSource_[type + 'Buffer_']) {
          buffer = _this2.mediaSource_[type + 'Buffer_'];
          // In multiple audio track cases, the audio source buffer is disabled
          // on the main VirtualSourceBuffer by the HTMLMediaSource much earlier
          // than createRealSourceBuffers_ is called to create the second
          // VirtualSourceBuffer because that happens as a side-effect of
          // videojs-contrib-hls starting the audioSegmentLoader. As a result,
          // the audioBuffer is essentially "ownerless" and no one will toggle
          // the `updating` state back to false once the `updateend` event is received
          //
          // Setting `updating` to false manually will work around this
          // situation and allow work to continue
          buffer.updating = false;
        } else {
          var codecProperty = type + 'Codec_';
          var mimeType = type + '/mp4;codecs="' + _this2[codecProperty] + '"';

          buffer = makeWrappedSourceBuffer(_this2.mediaSource_.nativeMediaSource_, mimeType);

          _this2.mediaSource_[type + 'Buffer_'] = buffer;
        }

        _this2[type + 'Buffer_'] = buffer;

        // Wire up the events to the SourceBuffer
        ['update', 'updatestart', 'updateend'].forEach(function (event) {
          buffer.addEventListener(event, function () {
            // if audio is disabled
            if (type === 'audio' && _this2.audioDisabled_) {
              return;
            }

            if (event === 'updateend') {
              _this2[type + 'Buffer_'].updating = false;
            }

            var shouldTrigger = types.every(function (t) {
              // skip checking audio's updating status if audio
              // is not enabled
              if (t === 'audio' && _this2.audioDisabled_) {
                return true;
              }
              // if the other type if updating we don't trigger
              if (type !== t && _this2[t + 'Buffer_'] && _this2[t + 'Buffer_'].updating) {
                return false;
              }
              return true;
            });

            if (shouldTrigger) {
              return _this2.trigger(event);
            }
          });
        });
      });
    }

    /**
     * Emulate the native mediasource function, but our function will
     * send all of the proposed segments to the transmuxer so that we
     * can transmux them before we append them to our internal
     * native source buffers in the correct format.
     *
     * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/appendBuffer
     * @param {Uint8Array} segment the segment to append to the buffer
     */
  }, {
    key: 'appendBuffer',
    value: function appendBuffer(segment) {
      // Start the internal "updating" state
      this.bufferUpdating_ = true;

      if (this.audioBuffer_ && this.audioBuffer_.buffered.length) {
        var audioBuffered = this.audioBuffer_.buffered;

        this.transmuxer_.postMessage({
          action: 'setAudioAppendStart',
          appendStart: audioBuffered.end(audioBuffered.length - 1)
        });
      }

      if (this.videoBuffer_) {
        this.transmuxer_.postMessage({
          action: 'alignGopsWith',
          gopsToAlignWith: gopsSafeToAlignWith(this.gopBuffer_, this.mediaSource_.player_, this.timeMapping_)
        });
      }

      this.transmuxer_.postMessage({
        action: 'push',
        // Send the typed-array of data as an ArrayBuffer so that
        // it can be sent as a "Transferable" and avoid the costly
        // memory copy
        data: segment.buffer,

        // To recreate the original typed-array, we need information
        // about what portion of the ArrayBuffer it was a view into
        byteOffset: segment.byteOffset,
        byteLength: segment.byteLength
      }, [segment.buffer]);
      this.transmuxer_.postMessage({ action: 'flush' });
    }

    /**
     * Appends gop information (timing and byteLength) received by the transmuxer for the
     * gops appended in the last call to appendBuffer
     *
     * @param {Event} event
     *        The gopInfo event from the transmuxer
     * @param {Array} event.data.gopInfo
     *        List of gop info to append
     */
  }, {
    key: 'appendGopInfo_',
    value: function appendGopInfo_(event) {
      this.gopBuffer_ = updateGopBuffer(this.gopBuffer_, event.data.gopInfo, this.safeAppend_);
    }

    /**
     * Emulate the native mediasource function and remove parts
     * of the buffer from any of our internal buffers that exist
     *
     * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/remove
     * @param {Double} start position to start the remove at
     * @param {Double} end position to end the remove at
     */
  }, {
    key: 'remove',
    value: function remove(start, end) {
      if (this.videoBuffer_) {
        this.videoBuffer_.updating = true;
        this.videoBuffer_.remove(start, end);
        this.gopBuffer_ = removeGopBuffer(this.gopBuffer_, start, end, this.timeMapping_);
      }
      if (!this.audioDisabled_ && this.audioBuffer_) {
        this.audioBuffer_.updating = true;
        this.audioBuffer_.remove(start, end);
      }

      // Remove Metadata Cues (id3)
      (0, _removeCuesFromTrack2['default'])(start, end, this.metadataTrack_);

      // Remove Any Captions
      if (this.inbandTextTracks_) {
        for (var track in this.inbandTextTracks_) {
          (0, _removeCuesFromTrack2['default'])(start, end, this.inbandTextTracks_[track]);
        }
      }
    }

    /**
     * Process any segments that the muxer has output
     * Concatenate segments together based on type and append them into
     * their respective sourceBuffers
     *
     * @private
     */
  }, {
    key: 'processPendingSegments_',
    value: function processPendingSegments_() {
      var sortedSegments = {
        video: {
          segments: [],
          bytes: 0
        },
        audio: {
          segments: [],
          bytes: 0
        },
        captions: [],
        metadata: []
      };

      // Sort segments into separate video/audio arrays and
      // keep track of their total byte lengths
      sortedSegments = this.pendingBuffers_.reduce(function (segmentObj, segment) {
        var type = segment.type;
        var data = segment.data;
        var initSegment = segment.initSegment;

        segmentObj[type].segments.push(data);
        segmentObj[type].bytes += data.byteLength;

        segmentObj[type].initSegment = initSegment;

        // Gather any captions into a single array
        if (segment.captions) {
          segmentObj.captions = segmentObj.captions.concat(segment.captions);
        }

        if (segment.info) {
          segmentObj[type].info = segment.info;
        }

        // Gather any metadata into a single array
        if (segment.metadata) {
          segmentObj.metadata = segmentObj.metadata.concat(segment.metadata);
        }

        return segmentObj;
      }, sortedSegments);

      // Create the real source buffers if they don't exist by now since we
      // finally are sure what tracks are contained in the source
      if (!this.videoBuffer_ && !this.audioBuffer_) {
        // Remove any codecs that may have been specified by default but
        // are no longer applicable now
        if (sortedSegments.video.bytes === 0) {
          this.videoCodec_ = null;
        }
        if (sortedSegments.audio.bytes === 0) {
          this.audioCodec_ = null;
        }

        this.createRealSourceBuffers_();
      }

      if (sortedSegments.audio.info) {
        this.mediaSource_.trigger({ type: 'audioinfo', info: sortedSegments.audio.info });
      }
      if (sortedSegments.video.info) {
        this.mediaSource_.trigger({ type: 'videoinfo', info: sortedSegments.video.info });
      }

      if (this.appendAudioInitSegment_) {
        if (!this.audioDisabled_ && this.audioBuffer_) {
          sortedSegments.audio.segments.unshift(sortedSegments.audio.initSegment);
          sortedSegments.audio.bytes += sortedSegments.audio.initSegment.byteLength;
        }
        this.appendAudioInitSegment_ = false;
      }

      var triggerUpdateend = false;

      // Merge multiple video and audio segments into one and append
      if (this.videoBuffer_ && sortedSegments.video.bytes) {
        sortedSegments.video.segments.unshift(sortedSegments.video.initSegment);
        sortedSegments.video.bytes += sortedSegments.video.initSegment.byteLength;
        this.concatAndAppendSegments_(sortedSegments.video, this.videoBuffer_);
        // TODO: are video tracks the only ones with text tracks?
        (0, _addTextTrackData.addTextTrackData)(this, sortedSegments.captions, sortedSegments.metadata);
      } else if (this.videoBuffer_ && (this.audioDisabled_ || !this.audioBuffer_)) {
        // The transmuxer did not return any bytes of video, meaning it was all trimmed
        // for gop alignment. Since we have a video buffer and audio is disabled, updateend
        // will never be triggered by this source buffer, which will cause contrib-hls
        // to be stuck forever waiting for updateend. If audio is not disabled, updateend
        // will be triggered by the audio buffer, which will be sent upwards since the video
        // buffer will not be in an updating state.
        triggerUpdateend = true;
      }

      if (!this.audioDisabled_ && this.audioBuffer_) {
        this.concatAndAppendSegments_(sortedSegments.audio, this.audioBuffer_);
      }

      this.pendingBuffers_.length = 0;

      if (triggerUpdateend) {
        this.trigger('updateend');
      }

      // We are no longer in the internal "updating" state
      this.bufferUpdating_ = false;
    }

    /**
     * Combine all segments into a single Uint8Array and then append them
     * to the destination buffer
     *
     * @param {Object} segmentObj
     * @param {SourceBuffer} destinationBuffer native source buffer to append data to
     * @private
     */
  }, {
    key: 'concatAndAppendSegments_',
    value: function concatAndAppendSegments_(segmentObj, destinationBuffer) {
      var offset = 0;
      var tempBuffer = undefined;

      if (segmentObj.bytes) {
        tempBuffer = new Uint8Array(segmentObj.bytes);

        // Combine the individual segments into one large typed-array
        segmentObj.segments.forEach(function (segment) {
          tempBuffer.set(segment, offset);
          offset += segment.byteLength;
        });

        try {
          destinationBuffer.updating = true;
          destinationBuffer.appendBuffer(tempBuffer);
        } catch (error) {
          if (this.mediaSource_.player_) {
            this.mediaSource_.player_.error({
              code: -3,
              type: 'APPEND_BUFFER_ERR',
              message: error.message,
              originalError: error
            });
          }
        }
      }
    }

    /**
     * Emulate the native mediasource function. abort any soureBuffer
     * actions and throw out any un-appended data.
     *
     * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/abort
     */
  }, {
    key: 'abort',
    value: function abort() {
      if (this.videoBuffer_) {
        this.videoBuffer_.abort();
      }
      if (!this.audioDisabled_ && this.audioBuffer_) {
        this.audioBuffer_.abort();
      }
      if (this.transmuxer_) {
        this.transmuxer_.postMessage({ action: 'reset' });
      }
      this.pendingBuffers_.length = 0;
      this.bufferUpdating_ = false;
    }
  }]);

  return VirtualSourceBuffer;
})(_videoJs2['default'].EventTarget);

exports['default'] = VirtualSourceBuffer;