28831 lines
1.0 MiB
28831 lines
1.0 MiB
typeof window !== "undefined" &&
|
||
(function webpackUniversalModuleDefinition(root, factory) {
|
||
if(typeof exports === 'object' && typeof module === 'object')
|
||
module.exports = factory();
|
||
else if(typeof define === 'function' && define.amd)
|
||
define([], factory);
|
||
else if(typeof exports === 'object')
|
||
exports["Hls"] = factory();
|
||
else
|
||
root["Hls"] = factory();
|
||
})(this, function() {
|
||
return /******/ (function(modules) { // webpackBootstrap
|
||
/******/ // The module cache
|
||
/******/ var installedModules = {};
|
||
/******/
|
||
/******/ // The require function
|
||
/******/ function __webpack_require__(moduleId) {
|
||
/******/
|
||
/******/ // Check if module is in cache
|
||
/******/ if(installedModules[moduleId]) {
|
||
/******/ return installedModules[moduleId].exports;
|
||
/******/ }
|
||
/******/ // Create a new module (and put it into the cache)
|
||
/******/ var module = installedModules[moduleId] = {
|
||
/******/ i: moduleId,
|
||
/******/ l: false,
|
||
/******/ exports: {}
|
||
/******/ };
|
||
/******/
|
||
/******/ // Execute the module function
|
||
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
|
||
/******/
|
||
/******/ // Flag the module as loaded
|
||
/******/ module.l = true;
|
||
/******/
|
||
/******/ // Return the exports of the module
|
||
/******/ return module.exports;
|
||
/******/ }
|
||
/******/
|
||
/******/
|
||
/******/ // expose the modules object (__webpack_modules__)
|
||
/******/ __webpack_require__.m = modules;
|
||
/******/
|
||
/******/ // expose the module cache
|
||
/******/ __webpack_require__.c = installedModules;
|
||
/******/
|
||
/******/ // define getter function for harmony exports
|
||
/******/ __webpack_require__.d = function(exports, name, getter) {
|
||
/******/ if(!__webpack_require__.o(exports, name)) {
|
||
/******/ Object.defineProperty(exports, name, { enumerable: true, get: getter });
|
||
/******/ }
|
||
/******/ };
|
||
/******/
|
||
/******/ // define __esModule on exports
|
||
/******/ __webpack_require__.r = function(exports) {
|
||
/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
|
||
/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
|
||
/******/ }
|
||
/******/ Object.defineProperty(exports, '__esModule', { value: true });
|
||
/******/ };
|
||
/******/
|
||
/******/ // create a fake namespace object
|
||
/******/ // mode & 1: value is a module id, require it
|
||
/******/ // mode & 2: merge all properties of value into the ns
|
||
/******/ // mode & 4: return value when already ns object
|
||
/******/ // mode & 8|1: behave like require
|
||
/******/ __webpack_require__.t = function(value, mode) {
|
||
/******/ if(mode & 1) value = __webpack_require__(value);
|
||
/******/ if(mode & 8) return value;
|
||
/******/ if((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;
|
||
/******/ var ns = Object.create(null);
|
||
/******/ __webpack_require__.r(ns);
|
||
/******/ Object.defineProperty(ns, 'default', { enumerable: true, value: value });
|
||
/******/ if(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));
|
||
/******/ return ns;
|
||
/******/ };
|
||
/******/
|
||
/******/ // getDefaultExport function for compatibility with non-harmony modules
|
||
/******/ __webpack_require__.n = function(module) {
|
||
/******/ var getter = module && module.__esModule ?
|
||
/******/ function getDefault() { return module['default']; } :
|
||
/******/ function getModuleExports() { return module; };
|
||
/******/ __webpack_require__.d(getter, 'a', getter);
|
||
/******/ return getter;
|
||
/******/ };
|
||
/******/
|
||
/******/ // Object.prototype.hasOwnProperty.call
|
||
/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };
|
||
/******/
|
||
/******/ // __webpack_public_path__
|
||
/******/ __webpack_require__.p = "/dist/";
|
||
/******/
|
||
/******/
|
||
/******/ // Load entry module and return exports
|
||
/******/ return __webpack_require__(__webpack_require__.s = "./src/hls.ts");
|
||
/******/ })
|
||
/************************************************************************/
|
||
/******/ ({
|
||
|
||
/***/ "./node_modules/eventemitter3/index.js":
|
||
/*!*********************************************!*\
|
||
!*** ./node_modules/eventemitter3/index.js ***!
|
||
\*********************************************/
|
||
/*! no static exports found */
|
||
/***/ (function(module, exports, __webpack_require__) {
|
||
|
||
"use strict";
|
||
|
||
|
||
var has = Object.prototype.hasOwnProperty
|
||
, prefix = '~';
|
||
|
||
/**
|
||
* Constructor to create a storage for our `EE` objects.
|
||
* An `Events` instance is a plain object whose properties are event names.
|
||
*
|
||
* @constructor
|
||
* @private
|
||
*/
|
||
function Events() {}
|
||
|
||
//
|
||
// We try to not inherit from `Object.prototype`. In some engines creating an
|
||
// instance in this way is faster than calling `Object.create(null)` directly.
|
||
// If `Object.create(null)` is not supported we prefix the event names with a
|
||
// character to make sure that the built-in object properties are not
|
||
// overridden or used as an attack vector.
|
||
//
|
||
if (Object.create) {
|
||
Events.prototype = Object.create(null);
|
||
|
||
//
|
||
// This hack is needed because the `__proto__` property is still inherited in
|
||
// some old browsers like Android 4, iPhone 5.1, Opera 11 and Safari 5.
|
||
//
|
||
if (!new Events().__proto__) prefix = false;
|
||
}
|
||
|
||
/**
|
||
* Representation of a single event listener.
|
||
*
|
||
* @param {Function} fn The listener function.
|
||
* @param {*} context The context to invoke the listener with.
|
||
* @param {Boolean} [once=false] Specify if the listener is a one-time listener.
|
||
* @constructor
|
||
* @private
|
||
*/
|
||
function EE(fn, context, once) {
|
||
this.fn = fn;
|
||
this.context = context;
|
||
this.once = once || false;
|
||
}
|
||
|
||
/**
|
||
* Add a listener for a given event.
|
||
*
|
||
* @param {EventEmitter} emitter Reference to the `EventEmitter` instance.
|
||
* @param {(String|Symbol)} event The event name.
|
||
* @param {Function} fn The listener function.
|
||
* @param {*} context The context to invoke the listener with.
|
||
* @param {Boolean} once Specify if the listener is a one-time listener.
|
||
* @returns {EventEmitter}
|
||
* @private
|
||
*/
|
||
function addListener(emitter, event, fn, context, once) {
|
||
if (typeof fn !== 'function') {
|
||
throw new TypeError('The listener must be a function');
|
||
}
|
||
|
||
var listener = new EE(fn, context || emitter, once)
|
||
, evt = prefix ? prefix + event : event;
|
||
|
||
if (!emitter._events[evt]) emitter._events[evt] = listener, emitter._eventsCount++;
|
||
else if (!emitter._events[evt].fn) emitter._events[evt].push(listener);
|
||
else emitter._events[evt] = [emitter._events[evt], listener];
|
||
|
||
return emitter;
|
||
}
|
||
|
||
/**
|
||
* Clear event by name.
|
||
*
|
||
* @param {EventEmitter} emitter Reference to the `EventEmitter` instance.
|
||
* @param {(String|Symbol)} evt The Event name.
|
||
* @private
|
||
*/
|
||
function clearEvent(emitter, evt) {
|
||
if (--emitter._eventsCount === 0) emitter._events = new Events();
|
||
else delete emitter._events[evt];
|
||
}
|
||
|
||
/**
|
||
* Minimal `EventEmitter` interface that is molded against the Node.js
|
||
* `EventEmitter` interface.
|
||
*
|
||
* @constructor
|
||
* @public
|
||
*/
|
||
function EventEmitter() {
|
||
this._events = new Events();
|
||
this._eventsCount = 0;
|
||
}
|
||
|
||
/**
|
||
* Return an array listing the events for which the emitter has registered
|
||
* listeners.
|
||
*
|
||
* @returns {Array}
|
||
* @public
|
||
*/
|
||
EventEmitter.prototype.eventNames = function eventNames() {
|
||
var names = []
|
||
, events
|
||
, name;
|
||
|
||
if (this._eventsCount === 0) return names;
|
||
|
||
for (name in (events = this._events)) {
|
||
if (has.call(events, name)) names.push(prefix ? name.slice(1) : name);
|
||
}
|
||
|
||
if (Object.getOwnPropertySymbols) {
|
||
return names.concat(Object.getOwnPropertySymbols(events));
|
||
}
|
||
|
||
return names;
|
||
};
|
||
|
||
/**
|
||
* Return the listeners registered for a given event.
|
||
*
|
||
* @param {(String|Symbol)} event The event name.
|
||
* @returns {Array} The registered listeners.
|
||
* @public
|
||
*/
|
||
EventEmitter.prototype.listeners = function listeners(event) {
|
||
var evt = prefix ? prefix + event : event
|
||
, handlers = this._events[evt];
|
||
|
||
if (!handlers) return [];
|
||
if (handlers.fn) return [handlers.fn];
|
||
|
||
for (var i = 0, l = handlers.length, ee = new Array(l); i < l; i++) {
|
||
ee[i] = handlers[i].fn;
|
||
}
|
||
|
||
return ee;
|
||
};
|
||
|
||
/**
|
||
* Return the number of listeners listening to a given event.
|
||
*
|
||
* @param {(String|Symbol)} event The event name.
|
||
* @returns {Number} The number of listeners.
|
||
* @public
|
||
*/
|
||
EventEmitter.prototype.listenerCount = function listenerCount(event) {
|
||
var evt = prefix ? prefix + event : event
|
||
, listeners = this._events[evt];
|
||
|
||
if (!listeners) return 0;
|
||
if (listeners.fn) return 1;
|
||
return listeners.length;
|
||
};
|
||
|
||
/**
|
||
* Calls each of the listeners registered for a given event.
|
||
*
|
||
* @param {(String|Symbol)} event The event name.
|
||
* @returns {Boolean} `true` if the event had listeners, else `false`.
|
||
* @public
|
||
*/
|
||
EventEmitter.prototype.emit = function emit(event, a1, a2, a3, a4, a5) {
|
||
var evt = prefix ? prefix + event : event;
|
||
|
||
if (!this._events[evt]) return false;
|
||
|
||
var listeners = this._events[evt]
|
||
, len = arguments.length
|
||
, args
|
||
, i;
|
||
|
||
if (listeners.fn) {
|
||
if (listeners.once) this.removeListener(event, listeners.fn, undefined, true);
|
||
|
||
switch (len) {
|
||
case 1: return listeners.fn.call(listeners.context), true;
|
||
case 2: return listeners.fn.call(listeners.context, a1), true;
|
||
case 3: return listeners.fn.call(listeners.context, a1, a2), true;
|
||
case 4: return listeners.fn.call(listeners.context, a1, a2, a3), true;
|
||
case 5: return listeners.fn.call(listeners.context, a1, a2, a3, a4), true;
|
||
case 6: return listeners.fn.call(listeners.context, a1, a2, a3, a4, a5), true;
|
||
}
|
||
|
||
for (i = 1, args = new Array(len -1); i < len; i++) {
|
||
args[i - 1] = arguments[i];
|
||
}
|
||
|
||
listeners.fn.apply(listeners.context, args);
|
||
} else {
|
||
var length = listeners.length
|
||
, j;
|
||
|
||
for (i = 0; i < length; i++) {
|
||
if (listeners[i].once) this.removeListener(event, listeners[i].fn, undefined, true);
|
||
|
||
switch (len) {
|
||
case 1: listeners[i].fn.call(listeners[i].context); break;
|
||
case 2: listeners[i].fn.call(listeners[i].context, a1); break;
|
||
case 3: listeners[i].fn.call(listeners[i].context, a1, a2); break;
|
||
case 4: listeners[i].fn.call(listeners[i].context, a1, a2, a3); break;
|
||
default:
|
||
if (!args) for (j = 1, args = new Array(len -1); j < len; j++) {
|
||
args[j - 1] = arguments[j];
|
||
}
|
||
|
||
listeners[i].fn.apply(listeners[i].context, args);
|
||
}
|
||
}
|
||
}
|
||
|
||
return true;
|
||
};
|
||
|
||
/**
|
||
* Add a listener for a given event.
|
||
*
|
||
* @param {(String|Symbol)} event The event name.
|
||
* @param {Function} fn The listener function.
|
||
* @param {*} [context=this] The context to invoke the listener with.
|
||
* @returns {EventEmitter} `this`.
|
||
* @public
|
||
*/
|
||
EventEmitter.prototype.on = function on(event, fn, context) {
|
||
return addListener(this, event, fn, context, false);
|
||
};
|
||
|
||
/**
|
||
* Add a one-time listener for a given event.
|
||
*
|
||
* @param {(String|Symbol)} event The event name.
|
||
* @param {Function} fn The listener function.
|
||
* @param {*} [context=this] The context to invoke the listener with.
|
||
* @returns {EventEmitter} `this`.
|
||
* @public
|
||
*/
|
||
EventEmitter.prototype.once = function once(event, fn, context) {
|
||
return addListener(this, event, fn, context, true);
|
||
};
|
||
|
||
/**
|
||
* Remove the listeners of a given event.
|
||
*
|
||
* @param {(String|Symbol)} event The event name.
|
||
* @param {Function} fn Only remove the listeners that match this function.
|
||
* @param {*} context Only remove the listeners that have this context.
|
||
* @param {Boolean} once Only remove one-time listeners.
|
||
* @returns {EventEmitter} `this`.
|
||
* @public
|
||
*/
|
||
EventEmitter.prototype.removeListener = function removeListener(event, fn, context, once) {
|
||
var evt = prefix ? prefix + event : event;
|
||
|
||
if (!this._events[evt]) return this;
|
||
if (!fn) {
|
||
clearEvent(this, evt);
|
||
return this;
|
||
}
|
||
|
||
var listeners = this._events[evt];
|
||
|
||
if (listeners.fn) {
|
||
if (
|
||
listeners.fn === fn &&
|
||
(!once || listeners.once) &&
|
||
(!context || listeners.context === context)
|
||
) {
|
||
clearEvent(this, evt);
|
||
}
|
||
} else {
|
||
for (var i = 0, events = [], length = listeners.length; i < length; i++) {
|
||
if (
|
||
listeners[i].fn !== fn ||
|
||
(once && !listeners[i].once) ||
|
||
(context && listeners[i].context !== context)
|
||
) {
|
||
events.push(listeners[i]);
|
||
}
|
||
}
|
||
|
||
//
|
||
// Reset the array, or remove it completely if we have no more listeners.
|
||
//
|
||
if (events.length) this._events[evt] = events.length === 1 ? events[0] : events;
|
||
else clearEvent(this, evt);
|
||
}
|
||
|
||
return this;
|
||
};
|
||
|
||
/**
|
||
* Remove all listeners, or those of the specified event.
|
||
*
|
||
* @param {(String|Symbol)} [event] The event name.
|
||
* @returns {EventEmitter} `this`.
|
||
* @public
|
||
*/
|
||
EventEmitter.prototype.removeAllListeners = function removeAllListeners(event) {
|
||
var evt;
|
||
|
||
if (event) {
|
||
evt = prefix ? prefix + event : event;
|
||
if (this._events[evt]) clearEvent(this, evt);
|
||
} else {
|
||
this._events = new Events();
|
||
this._eventsCount = 0;
|
||
}
|
||
|
||
return this;
|
||
};
|
||
|
||
//
|
||
// Alias methods names because people roll like that.
|
||
//
|
||
EventEmitter.prototype.off = EventEmitter.prototype.removeListener;
|
||
EventEmitter.prototype.addListener = EventEmitter.prototype.on;
|
||
|
||
//
|
||
// Expose the prefix.
|
||
//
|
||
EventEmitter.prefixed = prefix;
|
||
|
||
//
|
||
// Allow `EventEmitter` to be imported as module namespace.
|
||
//
|
||
EventEmitter.EventEmitter = EventEmitter;
|
||
|
||
//
|
||
// Expose the module.
|
||
//
|
||
if (true) {
|
||
module.exports = EventEmitter;
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./node_modules/url-toolkit/src/url-toolkit.js":
|
||
/*!*****************************************************!*\
|
||
!*** ./node_modules/url-toolkit/src/url-toolkit.js ***!
|
||
\*****************************************************/
|
||
/*! no static exports found */
|
||
/***/ (function(module, exports, __webpack_require__) {
|
||
|
||
// see https://tools.ietf.org/html/rfc1808
|
||
|
||
(function (root) {
|
||
var URL_REGEX =
|
||
/^((?:[a-zA-Z0-9+\-.]+:)?)(\/\/[^\/?#]*)?((?:[^\/?#]*\/)*[^;?#]*)?(;[^?#]*)?(\?[^#]*)?(#[^]*)?$/;
|
||
var FIRST_SEGMENT_REGEX = /^([^\/?#]*)([^]*)$/;
|
||
var SLASH_DOT_REGEX = /(?:\/|^)\.(?=\/)/g;
|
||
var SLASH_DOT_DOT_REGEX = /(?:\/|^)\.\.\/(?!\.\.\/)[^\/]*(?=\/)/g;
|
||
|
||
var URLToolkit = {
|
||
// If opts.alwaysNormalize is true then the path will always be normalized even when it starts with / or //
|
||
// E.g
|
||
// With opts.alwaysNormalize = false (default, spec compliant)
|
||
// http://a.com/b/cd + /e/f/../g => http://a.com/e/f/../g
|
||
// With opts.alwaysNormalize = true (not spec compliant)
|
||
// http://a.com/b/cd + /e/f/../g => http://a.com/e/g
|
||
buildAbsoluteURL: function (baseURL, relativeURL, opts) {
|
||
opts = opts || {};
|
||
// remove any remaining space and CRLF
|
||
baseURL = baseURL.trim();
|
||
relativeURL = relativeURL.trim();
|
||
if (!relativeURL) {
|
||
// 2a) If the embedded URL is entirely empty, it inherits the
|
||
// entire base URL (i.e., is set equal to the base URL)
|
||
// and we are done.
|
||
if (!opts.alwaysNormalize) {
|
||
return baseURL;
|
||
}
|
||
var basePartsForNormalise = URLToolkit.parseURL(baseURL);
|
||
if (!basePartsForNormalise) {
|
||
throw new Error('Error trying to parse base URL.');
|
||
}
|
||
basePartsForNormalise.path = URLToolkit.normalizePath(
|
||
basePartsForNormalise.path
|
||
);
|
||
return URLToolkit.buildURLFromParts(basePartsForNormalise);
|
||
}
|
||
var relativeParts = URLToolkit.parseURL(relativeURL);
|
||
if (!relativeParts) {
|
||
throw new Error('Error trying to parse relative URL.');
|
||
}
|
||
if (relativeParts.scheme) {
|
||
// 2b) If the embedded URL starts with a scheme name, it is
|
||
// interpreted as an absolute URL and we are done.
|
||
if (!opts.alwaysNormalize) {
|
||
return relativeURL;
|
||
}
|
||
relativeParts.path = URLToolkit.normalizePath(relativeParts.path);
|
||
return URLToolkit.buildURLFromParts(relativeParts);
|
||
}
|
||
var baseParts = URLToolkit.parseURL(baseURL);
|
||
if (!baseParts) {
|
||
throw new Error('Error trying to parse base URL.');
|
||
}
|
||
if (!baseParts.netLoc && baseParts.path && baseParts.path[0] !== '/') {
|
||
// If netLoc missing and path doesn't start with '/', assume everthing before the first '/' is the netLoc
|
||
// This causes 'example.com/a' to be handled as '//example.com/a' instead of '/example.com/a'
|
||
var pathParts = FIRST_SEGMENT_REGEX.exec(baseParts.path);
|
||
baseParts.netLoc = pathParts[1];
|
||
baseParts.path = pathParts[2];
|
||
}
|
||
if (baseParts.netLoc && !baseParts.path) {
|
||
baseParts.path = '/';
|
||
}
|
||
var builtParts = {
|
||
// 2c) Otherwise, the embedded URL inherits the scheme of
|
||
// the base URL.
|
||
scheme: baseParts.scheme,
|
||
netLoc: relativeParts.netLoc,
|
||
path: null,
|
||
params: relativeParts.params,
|
||
query: relativeParts.query,
|
||
fragment: relativeParts.fragment,
|
||
};
|
||
if (!relativeParts.netLoc) {
|
||
// 3) If the embedded URL's <net_loc> is non-empty, we skip to
|
||
// Step 7. Otherwise, the embedded URL inherits the <net_loc>
|
||
// (if any) of the base URL.
|
||
builtParts.netLoc = baseParts.netLoc;
|
||
// 4) If the embedded URL path is preceded by a slash "/", the
|
||
// path is not relative and we skip to Step 7.
|
||
if (relativeParts.path[0] !== '/') {
|
||
if (!relativeParts.path) {
|
||
// 5) If the embedded URL path is empty (and not preceded by a
|
||
// slash), then the embedded URL inherits the base URL path
|
||
builtParts.path = baseParts.path;
|
||
// 5a) if the embedded URL's <params> is non-empty, we skip to
|
||
// step 7; otherwise, it inherits the <params> of the base
|
||
// URL (if any) and
|
||
if (!relativeParts.params) {
|
||
builtParts.params = baseParts.params;
|
||
// 5b) if the embedded URL's <query> is non-empty, we skip to
|
||
// step 7; otherwise, it inherits the <query> of the base
|
||
// URL (if any) and we skip to step 7.
|
||
if (!relativeParts.query) {
|
||
builtParts.query = baseParts.query;
|
||
}
|
||
}
|
||
} else {
|
||
// 6) The last segment of the base URL's path (anything
|
||
// following the rightmost slash "/", or the entire path if no
|
||
// slash is present) is removed and the embedded URL's path is
|
||
// appended in its place.
|
||
var baseURLPath = baseParts.path;
|
||
var newPath =
|
||
baseURLPath.substring(0, baseURLPath.lastIndexOf('/') + 1) +
|
||
relativeParts.path;
|
||
builtParts.path = URLToolkit.normalizePath(newPath);
|
||
}
|
||
}
|
||
}
|
||
if (builtParts.path === null) {
|
||
builtParts.path = opts.alwaysNormalize
|
||
? URLToolkit.normalizePath(relativeParts.path)
|
||
: relativeParts.path;
|
||
}
|
||
return URLToolkit.buildURLFromParts(builtParts);
|
||
},
|
||
parseURL: function (url) {
|
||
var parts = URL_REGEX.exec(url);
|
||
if (!parts) {
|
||
return null;
|
||
}
|
||
return {
|
||
scheme: parts[1] || '',
|
||
netLoc: parts[2] || '',
|
||
path: parts[3] || '',
|
||
params: parts[4] || '',
|
||
query: parts[5] || '',
|
||
fragment: parts[6] || '',
|
||
};
|
||
},
|
||
normalizePath: function (path) {
|
||
// The following operations are
|
||
// then applied, in order, to the new path:
|
||
// 6a) All occurrences of "./", where "." is a complete path
|
||
// segment, are removed.
|
||
// 6b) If the path ends with "." as a complete path segment,
|
||
// that "." is removed.
|
||
path = path.split('').reverse().join('').replace(SLASH_DOT_REGEX, '');
|
||
// 6c) All occurrences of "<segment>/../", where <segment> is a
|
||
// complete path segment not equal to "..", are removed.
|
||
// Removal of these path segments is performed iteratively,
|
||
// removing the leftmost matching pattern on each iteration,
|
||
// until no matching pattern remains.
|
||
// 6d) If the path ends with "<segment>/..", where <segment> is a
|
||
// complete path segment not equal to "..", that
|
||
// "<segment>/.." is removed.
|
||
while (
|
||
path.length !== (path = path.replace(SLASH_DOT_DOT_REGEX, '')).length
|
||
) {}
|
||
return path.split('').reverse().join('');
|
||
},
|
||
buildURLFromParts: function (parts) {
|
||
return (
|
||
parts.scheme +
|
||
parts.netLoc +
|
||
parts.path +
|
||
parts.params +
|
||
parts.query +
|
||
parts.fragment
|
||
);
|
||
},
|
||
};
|
||
|
||
if (true)
|
||
module.exports = URLToolkit;
|
||
else {}
|
||
})(this);
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./node_modules/webworkify-webpack/index.js":
|
||
/*!**************************************************!*\
|
||
!*** ./node_modules/webworkify-webpack/index.js ***!
|
||
\**************************************************/
|
||
/*! no static exports found */
|
||
/***/ (function(module, exports, __webpack_require__) {
|
||
|
||
function webpackBootstrapFunc (modules) {
|
||
/******/ // The module cache
|
||
/******/ var installedModules = {};
|
||
|
||
/******/ // The require function
|
||
/******/ function __webpack_require__(moduleId) {
|
||
|
||
/******/ // Check if module is in cache
|
||
/******/ if(installedModules[moduleId])
|
||
/******/ return installedModules[moduleId].exports;
|
||
|
||
/******/ // Create a new module (and put it into the cache)
|
||
/******/ var module = installedModules[moduleId] = {
|
||
/******/ i: moduleId,
|
||
/******/ l: false,
|
||
/******/ exports: {}
|
||
/******/ };
|
||
|
||
/******/ // Execute the module function
|
||
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
|
||
|
||
/******/ // Flag the module as loaded
|
||
/******/ module.l = true;
|
||
|
||
/******/ // Return the exports of the module
|
||
/******/ return module.exports;
|
||
/******/ }
|
||
|
||
/******/ // expose the modules object (__webpack_modules__)
|
||
/******/ __webpack_require__.m = modules;
|
||
|
||
/******/ // expose the module cache
|
||
/******/ __webpack_require__.c = installedModules;
|
||
|
||
/******/ // identity function for calling harmony imports with the correct context
|
||
/******/ __webpack_require__.i = function(value) { return value; };
|
||
|
||
/******/ // define getter function for harmony exports
|
||
/******/ __webpack_require__.d = function(exports, name, getter) {
|
||
/******/ if(!__webpack_require__.o(exports, name)) {
|
||
/******/ Object.defineProperty(exports, name, {
|
||
/******/ configurable: false,
|
||
/******/ enumerable: true,
|
||
/******/ get: getter
|
||
/******/ });
|
||
/******/ }
|
||
/******/ };
|
||
|
||
/******/ // define __esModule on exports
|
||
/******/ __webpack_require__.r = function(exports) {
|
||
/******/ Object.defineProperty(exports, '__esModule', { value: true });
|
||
/******/ };
|
||
|
||
/******/ // getDefaultExport function for compatibility with non-harmony modules
|
||
/******/ __webpack_require__.n = function(module) {
|
||
/******/ var getter = module && module.__esModule ?
|
||
/******/ function getDefault() { return module['default']; } :
|
||
/******/ function getModuleExports() { return module; };
|
||
/******/ __webpack_require__.d(getter, 'a', getter);
|
||
/******/ return getter;
|
||
/******/ };
|
||
|
||
/******/ // Object.prototype.hasOwnProperty.call
|
||
/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };
|
||
|
||
/******/ // __webpack_public_path__
|
||
/******/ __webpack_require__.p = "/";
|
||
|
||
/******/ // on error function for async loading
|
||
/******/ __webpack_require__.oe = function(err) { console.error(err); throw err; };
|
||
|
||
var f = __webpack_require__(__webpack_require__.s = ENTRY_MODULE)
|
||
return f.default || f // try to call default if defined to also support babel esmodule exports
|
||
}
|
||
|
||
var moduleNameReqExp = '[\\.|\\-|\\+|\\w|\/|@]+'
|
||
var dependencyRegExp = '\\(\\s*(\/\\*.*?\\*\/)?\\s*.*?(' + moduleNameReqExp + ').*?\\)' // additional chars when output.pathinfo is true
|
||
|
||
// http://stackoverflow.com/a/2593661/130442
|
||
function quoteRegExp (str) {
|
||
return (str + '').replace(/[.?*+^$[\]\\(){}|-]/g, '\\$&')
|
||
}
|
||
|
||
function isNumeric(n) {
|
||
return !isNaN(1 * n); // 1 * n converts integers, integers as string ("123"), 1e3 and "1e3" to integers and strings to NaN
|
||
}
|
||
|
||
function getModuleDependencies (sources, module, queueName) {
|
||
var retval = {}
|
||
retval[queueName] = []
|
||
|
||
var fnString = module.toString()
|
||
var wrapperSignature = fnString.match(/^function\s?\w*\(\w+,\s*\w+,\s*(\w+)\)/)
|
||
if (!wrapperSignature) return retval
|
||
var webpackRequireName = wrapperSignature[1]
|
||
|
||
// main bundle deps
|
||
var re = new RegExp('(\\\\n|\\W)' + quoteRegExp(webpackRequireName) + dependencyRegExp, 'g')
|
||
var match
|
||
while ((match = re.exec(fnString))) {
|
||
if (match[3] === 'dll-reference') continue
|
||
retval[queueName].push(match[3])
|
||
}
|
||
|
||
// dll deps
|
||
re = new RegExp('\\(' + quoteRegExp(webpackRequireName) + '\\("(dll-reference\\s(' + moduleNameReqExp + '))"\\)\\)' + dependencyRegExp, 'g')
|
||
while ((match = re.exec(fnString))) {
|
||
if (!sources[match[2]]) {
|
||
retval[queueName].push(match[1])
|
||
sources[match[2]] = __webpack_require__(match[1]).m
|
||
}
|
||
retval[match[2]] = retval[match[2]] || []
|
||
retval[match[2]].push(match[4])
|
||
}
|
||
|
||
// convert 1e3 back to 1000 - this can be important after uglify-js converted 1000 to 1e3
|
||
var keys = Object.keys(retval);
|
||
for (var i = 0; i < keys.length; i++) {
|
||
for (var j = 0; j < retval[keys[i]].length; j++) {
|
||
if (isNumeric(retval[keys[i]][j])) {
|
||
retval[keys[i]][j] = 1 * retval[keys[i]][j];
|
||
}
|
||
}
|
||
}
|
||
|
||
return retval
|
||
}
|
||
|
||
function hasValuesInQueues (queues) {
|
||
var keys = Object.keys(queues)
|
||
return keys.reduce(function (hasValues, key) {
|
||
return hasValues || queues[key].length > 0
|
||
}, false)
|
||
}
|
||
|
||
function getRequiredModules (sources, moduleId) {
|
||
var modulesQueue = {
|
||
main: [moduleId]
|
||
}
|
||
var requiredModules = {
|
||
main: []
|
||
}
|
||
var seenModules = {
|
||
main: {}
|
||
}
|
||
|
||
while (hasValuesInQueues(modulesQueue)) {
|
||
var queues = Object.keys(modulesQueue)
|
||
for (var i = 0; i < queues.length; i++) {
|
||
var queueName = queues[i]
|
||
var queue = modulesQueue[queueName]
|
||
var moduleToCheck = queue.pop()
|
||
seenModules[queueName] = seenModules[queueName] || {}
|
||
if (seenModules[queueName][moduleToCheck] || !sources[queueName][moduleToCheck]) continue
|
||
seenModules[queueName][moduleToCheck] = true
|
||
requiredModules[queueName] = requiredModules[queueName] || []
|
||
requiredModules[queueName].push(moduleToCheck)
|
||
var newModules = getModuleDependencies(sources, sources[queueName][moduleToCheck], queueName)
|
||
var newModulesKeys = Object.keys(newModules)
|
||
for (var j = 0; j < newModulesKeys.length; j++) {
|
||
modulesQueue[newModulesKeys[j]] = modulesQueue[newModulesKeys[j]] || []
|
||
modulesQueue[newModulesKeys[j]] = modulesQueue[newModulesKeys[j]].concat(newModules[newModulesKeys[j]])
|
||
}
|
||
}
|
||
}
|
||
|
||
return requiredModules
|
||
}
|
||
|
||
module.exports = function (moduleId, options) {
|
||
options = options || {}
|
||
var sources = {
|
||
main: __webpack_require__.m
|
||
}
|
||
|
||
var requiredModules = options.all ? { main: Object.keys(sources.main) } : getRequiredModules(sources, moduleId)
|
||
|
||
var src = ''
|
||
|
||
Object.keys(requiredModules).filter(function (m) { return m !== 'main' }).forEach(function (module) {
|
||
var entryModule = 0
|
||
while (requiredModules[module][entryModule]) {
|
||
entryModule++
|
||
}
|
||
requiredModules[module].push(entryModule)
|
||
sources[module][entryModule] = '(function(module, exports, __webpack_require__) { module.exports = __webpack_require__; })'
|
||
src = src + 'var ' + module + ' = (' + webpackBootstrapFunc.toString().replace('ENTRY_MODULE', JSON.stringify(entryModule)) + ')({' + requiredModules[module].map(function (id) { return '' + JSON.stringify(id) + ': ' + sources[module][id].toString() }).join(',') + '});\n'
|
||
})
|
||
|
||
src = src + 'new ((' + webpackBootstrapFunc.toString().replace('ENTRY_MODULE', JSON.stringify(moduleId)) + ')({' + requiredModules.main.map(function (id) { return '' + JSON.stringify(id) + ': ' + sources.main[id].toString() }).join(',') + '}))(self);'
|
||
|
||
var blob = new window.Blob([src], { type: 'text/javascript' })
|
||
if (options.bare) { return blob }
|
||
|
||
var URL = window.URL || window.webkitURL || window.mozURL || window.msURL
|
||
|
||
var workerUrl = URL.createObjectURL(blob)
|
||
var worker = new window.Worker(workerUrl)
|
||
worker.objectURL = workerUrl
|
||
|
||
return worker
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/config.ts":
|
||
/*!***********************!*\
|
||
!*** ./src/config.ts ***!
|
||
\***********************/
|
||
/*! exports provided: hlsDefaultConfig, mergeConfig, enableStreamingMode */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "hlsDefaultConfig", function() { return hlsDefaultConfig; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "mergeConfig", function() { return mergeConfig; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "enableStreamingMode", function() { return enableStreamingMode; });
|
||
/* harmony import */ var _controller_abr_controller__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./controller/abr-controller */ "./src/controller/abr-controller.ts");
|
||
/* harmony import */ var _controller_audio_stream_controller__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./controller/audio-stream-controller */ "./src/controller/audio-stream-controller.ts");
|
||
/* harmony import */ var _controller_audio_track_controller__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./controller/audio-track-controller */ "./src/controller/audio-track-controller.ts");
|
||
/* harmony import */ var _controller_subtitle_stream_controller__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./controller/subtitle-stream-controller */ "./src/controller/subtitle-stream-controller.ts");
|
||
/* harmony import */ var _controller_subtitle_track_controller__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./controller/subtitle-track-controller */ "./src/controller/subtitle-track-controller.ts");
|
||
/* harmony import */ var _controller_buffer_controller__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./controller/buffer-controller */ "./src/controller/buffer-controller.ts");
|
||
/* harmony import */ var _controller_timeline_controller__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./controller/timeline-controller */ "./src/controller/timeline-controller.ts");
|
||
/* harmony import */ var _controller_cap_level_controller__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./controller/cap-level-controller */ "./src/controller/cap-level-controller.ts");
|
||
/* harmony import */ var _controller_fps_controller__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ./controller/fps-controller */ "./src/controller/fps-controller.ts");
|
||
/* harmony import */ var _controller_eme_controller__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! ./controller/eme-controller */ "./src/controller/eme-controller.ts");
|
||
/* harmony import */ var _controller_cmcd_controller__WEBPACK_IMPORTED_MODULE_10__ = __webpack_require__(/*! ./controller/cmcd-controller */ "./src/controller/cmcd-controller.ts");
|
||
/* harmony import */ var _utils_xhr_loader__WEBPACK_IMPORTED_MODULE_11__ = __webpack_require__(/*! ./utils/xhr-loader */ "./src/utils/xhr-loader.ts");
|
||
/* harmony import */ var _utils_fetch_loader__WEBPACK_IMPORTED_MODULE_12__ = __webpack_require__(/*! ./utils/fetch-loader */ "./src/utils/fetch-loader.ts");
|
||
/* harmony import */ var _utils_cues__WEBPACK_IMPORTED_MODULE_13__ = __webpack_require__(/*! ./utils/cues */ "./src/utils/cues.ts");
|
||
/* harmony import */ var _utils_mediakeys_helper__WEBPACK_IMPORTED_MODULE_14__ = __webpack_require__(/*! ./utils/mediakeys-helper */ "./src/utils/mediakeys-helper.ts");
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_15__ = __webpack_require__(/*! ./utils/logger */ "./src/utils/logger.ts");
|
||
function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
|
||
|
||
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) { symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); } keys.push.apply(keys, symbols); } return keys; }
|
||
|
||
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
|
||
|
||
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
// If possible, keep hlsDefaultConfig shallow
|
||
// It is cloned whenever a new Hls instance is created, by keeping the config
|
||
// shallow the properties are cloned, and we don't end up manipulating the default
|
||
var hlsDefaultConfig = _objectSpread(_objectSpread({
|
||
autoStartLoad: true,
|
||
// used by stream-controller
|
||
startPosition: -1,
|
||
// used by stream-controller
|
||
defaultAudioCodec: undefined,
|
||
// used by stream-controller
|
||
debug: false,
|
||
// used by logger
|
||
capLevelOnFPSDrop: false,
|
||
// used by fps-controller
|
||
capLevelToPlayerSize: false,
|
||
// used by cap-level-controller
|
||
ignoreDevicePixelRatio: false,
|
||
// used by cap-level-controller
|
||
initialLiveManifestSize: 1,
|
||
// used by stream-controller
|
||
maxBufferLength: 30,
|
||
// used by stream-controller
|
||
backBufferLength: Infinity,
|
||
// used by buffer-controller
|
||
maxBufferSize: 60 * 1000 * 1000,
|
||
// used by stream-controller
|
||
maxBufferHole: 0.1,
|
||
// used by stream-controller
|
||
overlayKey: undefined,
|
||
overlayIv: undefined,
|
||
highBufferWatchdogPeriod: 2,
|
||
// used by stream-controller
|
||
nudgeOffset: 0.1,
|
||
// used by stream-controller
|
||
nudgeMaxRetry: 3,
|
||
// used by stream-controller
|
||
maxFragLookUpTolerance: 0.25,
|
||
// used by stream-controller
|
||
liveSyncDurationCount: 3,
|
||
// used by latency-controller
|
||
liveMaxLatencyDurationCount: Infinity,
|
||
// used by latency-controller
|
||
liveSyncDuration: undefined,
|
||
// used by latency-controller
|
||
liveMaxLatencyDuration: undefined,
|
||
// used by latency-controller
|
||
maxLiveSyncPlaybackRate: 1,
|
||
// used by latency-controller
|
||
liveDurationInfinity: false,
|
||
// used by buffer-controller
|
||
liveBackBufferLength: null,
|
||
// used by buffer-controller
|
||
maxMaxBufferLength: 600,
|
||
// used by stream-controller
|
||
enableWorker: true,
|
||
// used by demuxer
|
||
enableSoftwareAES: true,
|
||
// used by decrypter
|
||
manifestLoadingTimeOut: 10000,
|
||
// used by playlist-loader
|
||
manifestLoadingMaxRetry: 1,
|
||
// used by playlist-loader
|
||
manifestLoadingRetryDelay: 1000,
|
||
// used by playlist-loader
|
||
manifestLoadingMaxRetryTimeout: 64000,
|
||
// used by playlist-loader
|
||
startLevel: undefined,
|
||
// used by level-controller
|
||
levelLoadingTimeOut: 10000,
|
||
// used by playlist-loader
|
||
levelLoadingMaxRetry: 4,
|
||
// used by playlist-loader
|
||
levelLoadingRetryDelay: 1000,
|
||
// used by playlist-loader
|
||
levelLoadingMaxRetryTimeout: 64000,
|
||
// used by playlist-loader
|
||
fragLoadingTimeOut: 20000,
|
||
// used by fragment-loader
|
||
fragLoadingMaxRetry: 6,
|
||
// used by fragment-loader
|
||
fragLoadingRetryDelay: 1000,
|
||
// used by fragment-loader
|
||
fragLoadingMaxRetryTimeout: 64000,
|
||
// used by fragment-loader
|
||
startFragPrefetch: false,
|
||
// used by stream-controller
|
||
fpsDroppedMonitoringPeriod: 5000,
|
||
// used by fps-controller
|
||
fpsDroppedMonitoringThreshold: 0.2,
|
||
// used by fps-controller
|
||
appendErrorMaxRetry: 3,
|
||
// used by buffer-controller
|
||
loader: _utils_xhr_loader__WEBPACK_IMPORTED_MODULE_11__["default"],
|
||
// loader: FetchLoader,
|
||
fLoader: undefined,
|
||
// used by fragment-loader
|
||
pLoader: undefined,
|
||
// used by playlist-loader
|
||
xhrSetup: undefined,
|
||
// used by xhr-loader
|
||
licenseXhrSetup: undefined,
|
||
// used by eme-controller
|
||
licenseResponseCallback: undefined,
|
||
// used by eme-controller
|
||
abrController: _controller_abr_controller__WEBPACK_IMPORTED_MODULE_0__["default"],
|
||
bufferController: _controller_buffer_controller__WEBPACK_IMPORTED_MODULE_5__["default"],
|
||
capLevelController: _controller_cap_level_controller__WEBPACK_IMPORTED_MODULE_7__["default"],
|
||
fpsController: _controller_fps_controller__WEBPACK_IMPORTED_MODULE_8__["default"],
|
||
stretchShortVideoTrack: false,
|
||
// used by mp4-remuxer
|
||
maxAudioFramesDrift: 1,
|
||
// used by mp4-remuxer
|
||
forceKeyFrameOnDiscontinuity: true,
|
||
// used by ts-demuxer
|
||
abrEwmaFastLive: 3,
|
||
// used by abr-controller
|
||
abrEwmaSlowLive: 9,
|
||
// used by abr-controller
|
||
abrEwmaFastVoD: 3,
|
||
// used by abr-controller
|
||
abrEwmaSlowVoD: 9,
|
||
// used by abr-controller
|
||
abrEwmaDefaultEstimate: 5e5,
|
||
// 500 kbps // used by abr-controller
|
||
abrBandWidthFactor: 0.95,
|
||
// used by abr-controller
|
||
abrBandWidthUpFactor: 0.7,
|
||
// used by abr-controller
|
||
abrMaxWithRealBitrate: false,
|
||
// used by abr-controller
|
||
maxStarvationDelay: 4,
|
||
// used by abr-controller
|
||
maxLoadingDelay: 4,
|
||
// used by abr-controller
|
||
minAutoBitrate: 0,
|
||
// used by hls
|
||
emeEnabled: false,
|
||
// used by eme-controller
|
||
widevineLicenseUrl: undefined,
|
||
// used by eme-controller
|
||
drmSystemOptions: {},
|
||
// used by eme-controller
|
||
requestMediaKeySystemAccessFunc: _utils_mediakeys_helper__WEBPACK_IMPORTED_MODULE_14__["requestMediaKeySystemAccess"],
|
||
// used by eme-controller
|
||
testBandwidth: true,
|
||
progressive: false,
|
||
lowLatencyMode: true,
|
||
cmcd: undefined
|
||
}, timelineConfig()), {}, {
|
||
subtitleStreamController: true ? _controller_subtitle_stream_controller__WEBPACK_IMPORTED_MODULE_3__["SubtitleStreamController"] : undefined,
|
||
subtitleTrackController: true ? _controller_subtitle_track_controller__WEBPACK_IMPORTED_MODULE_4__["default"] : undefined,
|
||
timelineController: true ? _controller_timeline_controller__WEBPACK_IMPORTED_MODULE_6__["TimelineController"] : undefined,
|
||
audioStreamController: true ? _controller_audio_stream_controller__WEBPACK_IMPORTED_MODULE_1__["default"] : undefined,
|
||
audioTrackController: true ? _controller_audio_track_controller__WEBPACK_IMPORTED_MODULE_2__["default"] : undefined,
|
||
emeController: true ? _controller_eme_controller__WEBPACK_IMPORTED_MODULE_9__["default"] : undefined,
|
||
cmcdController: true ? _controller_cmcd_controller__WEBPACK_IMPORTED_MODULE_10__["default"] : undefined
|
||
});
|
||
|
||
function timelineConfig() {
|
||
return {
|
||
cueHandler: _utils_cues__WEBPACK_IMPORTED_MODULE_13__["default"],
|
||
// used by timeline-controller
|
||
enableCEA708Captions: true,
|
||
// used by timeline-controller
|
||
enableWebVTT: true,
|
||
// used by timeline-controller
|
||
enableIMSC1: true,
|
||
// used by timeline-controller
|
||
captionsTextTrack1Label: 'English',
|
||
// used by timeline-controller
|
||
captionsTextTrack1LanguageCode: 'en',
|
||
// used by timeline-controller
|
||
captionsTextTrack2Label: 'Spanish',
|
||
// used by timeline-controller
|
||
captionsTextTrack2LanguageCode: 'es',
|
||
// used by timeline-controller
|
||
captionsTextTrack3Label: 'Unknown CC',
|
||
// used by timeline-controller
|
||
captionsTextTrack3LanguageCode: '',
|
||
// used by timeline-controller
|
||
captionsTextTrack4Label: 'Unknown CC',
|
||
// used by timeline-controller
|
||
captionsTextTrack4LanguageCode: '',
|
||
// used by timeline-controller
|
||
renderTextTracksNatively: true
|
||
};
|
||
}
|
||
|
||
function mergeConfig(defaultConfig, userConfig) {
|
||
if ((userConfig.liveSyncDurationCount || userConfig.liveMaxLatencyDurationCount) && (userConfig.liveSyncDuration || userConfig.liveMaxLatencyDuration)) {
|
||
throw new Error("Illegal hls.js config: don't mix up liveSyncDurationCount/liveMaxLatencyDurationCount and liveSyncDuration/liveMaxLatencyDuration");
|
||
}
|
||
|
||
if (userConfig.liveMaxLatencyDurationCount !== undefined && (userConfig.liveSyncDurationCount === undefined || userConfig.liveMaxLatencyDurationCount <= userConfig.liveSyncDurationCount)) {
|
||
throw new Error('Illegal hls.js config: "liveMaxLatencyDurationCount" must be greater than "liveSyncDurationCount"');
|
||
}
|
||
|
||
if (userConfig.liveMaxLatencyDuration !== undefined && (userConfig.liveSyncDuration === undefined || userConfig.liveMaxLatencyDuration <= userConfig.liveSyncDuration)) {
|
||
throw new Error('Illegal hls.js config: "liveMaxLatencyDuration" must be greater than "liveSyncDuration"');
|
||
}
|
||
|
||
return _extends({}, defaultConfig, userConfig);
|
||
}
|
||
function enableStreamingMode(config) {
|
||
var currentLoader = config.loader;
|
||
|
||
if (currentLoader !== _utils_fetch_loader__WEBPACK_IMPORTED_MODULE_12__["default"] && currentLoader !== _utils_xhr_loader__WEBPACK_IMPORTED_MODULE_11__["default"]) {
|
||
// If a developer has configured their own loader, respect that choice
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_15__["logger"].log('[config]: Custom loader detected, cannot enable progressive streaming');
|
||
config.progressive = false;
|
||
} else {
|
||
var canStreamProgressively = Object(_utils_fetch_loader__WEBPACK_IMPORTED_MODULE_12__["fetchSupported"])();
|
||
|
||
if (canStreamProgressively) {
|
||
config.loader = _utils_fetch_loader__WEBPACK_IMPORTED_MODULE_12__["default"];
|
||
config.progressive = true;
|
||
config.enableSoftwareAES = true;
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_15__["logger"].log('[config]: Progressive streaming enabled, using FetchLoader');
|
||
}
|
||
}
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/controller/abr-controller.ts":
|
||
/*!******************************************!*\
|
||
!*** ./src/controller/abr-controller.ts ***!
|
||
\******************************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony import */ var _Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
|
||
/* harmony import */ var _utils_ewma_bandwidth_estimator__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../utils/ewma-bandwidth-estimator */ "./src/utils/ewma-bandwidth-estimator.ts");
|
||
/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../events */ "./src/events.ts");
|
||
/* harmony import */ var _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/buffer-helper */ "./src/utils/buffer-helper.ts");
|
||
/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
|
||
/* harmony import */ var _types_loader__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../types/loader */ "./src/types/loader.ts");
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
|
||
|
||
|
||
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
|
||
|
||
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
var AbrController = /*#__PURE__*/function () {
|
||
function AbrController(hls) {
|
||
this.hls = void 0;
|
||
this.lastLoadedFragLevel = 0;
|
||
this._nextAutoLevel = -1;
|
||
this.timer = void 0;
|
||
this.onCheck = this._abandonRulesCheck.bind(this);
|
||
this.fragCurrent = null;
|
||
this.partCurrent = null;
|
||
this.bitrateTestDelay = 0;
|
||
this.bwEstimator = void 0;
|
||
this.hls = hls;
|
||
var config = hls.config;
|
||
this.bwEstimator = new _utils_ewma_bandwidth_estimator__WEBPACK_IMPORTED_MODULE_1__["default"](config.abrEwmaSlowVoD, config.abrEwmaFastVoD, config.abrEwmaDefaultEstimate);
|
||
this.registerListeners();
|
||
}
|
||
|
||
var _proto = AbrController.prototype;
|
||
|
||
_proto.registerListeners = function registerListeners() {
|
||
var hls = this.hls;
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].FRAG_LOADING, this.onFragLoading, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].FRAG_LOADED, this.onFragLoaded, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].FRAG_BUFFERED, this.onFragBuffered, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].LEVEL_LOADED, this.onLevelLoaded, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].ERROR, this.onError, this);
|
||
};
|
||
|
||
_proto.unregisterListeners = function unregisterListeners() {
|
||
var hls = this.hls;
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].FRAG_LOADING, this.onFragLoading, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].FRAG_LOADED, this.onFragLoaded, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].FRAG_BUFFERED, this.onFragBuffered, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].LEVEL_LOADED, this.onLevelLoaded, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].ERROR, this.onError, this);
|
||
};
|
||
|
||
_proto.destroy = function destroy() {
|
||
this.unregisterListeners();
|
||
this.clearTimer(); // @ts-ignore
|
||
|
||
this.hls = this.onCheck = null;
|
||
this.fragCurrent = this.partCurrent = null;
|
||
};
|
||
|
||
_proto.onFragLoading = function onFragLoading(event, data) {
|
||
var frag = data.frag;
|
||
|
||
if (frag.type === _types_loader__WEBPACK_IMPORTED_MODULE_5__["PlaylistLevelType"].MAIN) {
|
||
if (!this.timer) {
|
||
var _data$part;
|
||
|
||
this.fragCurrent = frag;
|
||
this.partCurrent = (_data$part = data.part) != null ? _data$part : null;
|
||
this.timer = self.setInterval(this.onCheck, 100);
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto.onLevelLoaded = function onLevelLoaded(event, data) {
|
||
var config = this.hls.config;
|
||
|
||
if (data.details.live) {
|
||
this.bwEstimator.update(config.abrEwmaSlowLive, config.abrEwmaFastLive);
|
||
} else {
|
||
this.bwEstimator.update(config.abrEwmaSlowVoD, config.abrEwmaFastVoD);
|
||
}
|
||
}
|
||
/*
|
||
This method monitors the download rate of the current fragment, and will downswitch if that fragment will not load
|
||
quickly enough to prevent underbuffering
|
||
*/
|
||
;
|
||
|
||
_proto._abandonRulesCheck = function _abandonRulesCheck() {
|
||
var frag = this.fragCurrent,
|
||
part = this.partCurrent,
|
||
hls = this.hls;
|
||
var autoLevelEnabled = hls.autoLevelEnabled,
|
||
config = hls.config,
|
||
media = hls.media;
|
||
|
||
if (!frag || !media) {
|
||
return;
|
||
}
|
||
|
||
var stats = part ? part.stats : frag.stats;
|
||
var duration = part ? part.duration : frag.duration; // If loading has been aborted and not in lowLatencyMode, stop timer and return
|
||
|
||
if (stats.aborted) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].warn('frag loader destroy or aborted, disarm abandonRules');
|
||
this.clearTimer(); // reset forced auto level value so that next level will be selected
|
||
|
||
this._nextAutoLevel = -1;
|
||
return;
|
||
} // This check only runs if we're in ABR mode and actually playing
|
||
|
||
|
||
if (!autoLevelEnabled || media.paused || !media.playbackRate || !media.readyState) {
|
||
return;
|
||
}
|
||
|
||
var requestDelay = performance.now() - stats.loading.start;
|
||
var playbackRate = Math.abs(media.playbackRate); // In order to work with a stable bandwidth, only begin monitoring bandwidth after half of the fragment has been loaded
|
||
|
||
if (requestDelay <= 500 * duration / playbackRate) {
|
||
return;
|
||
}
|
||
|
||
var levels = hls.levels,
|
||
minAutoLevel = hls.minAutoLevel;
|
||
var level = levels[frag.level];
|
||
var expectedLen = stats.total || Math.max(stats.loaded, Math.round(duration * level.maxBitrate / 8));
|
||
var loadRate = Math.max(1, stats.bwEstimate ? stats.bwEstimate / 8 : stats.loaded * 1000 / requestDelay); // fragLoadDelay is an estimate of the time (in seconds) it will take to buffer the entire fragment
|
||
|
||
var fragLoadedDelay = (expectedLen - stats.loaded) / loadRate;
|
||
var pos = media.currentTime; // bufferStarvationDelay is an estimate of the amount time (in seconds) it will take to exhaust the buffer
|
||
|
||
var bufferStarvationDelay = (_utils_buffer_helper__WEBPACK_IMPORTED_MODULE_3__["BufferHelper"].bufferInfo(media, pos, config.maxBufferHole).end - pos) / playbackRate; // Attempt an emergency downswitch only if less than 2 fragment lengths are buffered, and the time to finish loading
|
||
// the current fragment is greater than the amount of buffer we have left
|
||
|
||
if (bufferStarvationDelay >= 2 * duration / playbackRate || fragLoadedDelay <= bufferStarvationDelay) {
|
||
return;
|
||
}
|
||
|
||
var fragLevelNextLoadedDelay = Number.POSITIVE_INFINITY;
|
||
var nextLoadLevel; // Iterate through lower level and try to find the largest one that avoids rebuffering
|
||
|
||
for (nextLoadLevel = frag.level - 1; nextLoadLevel > minAutoLevel; nextLoadLevel--) {
|
||
// compute time to load next fragment at lower level
|
||
// 0.8 : consider only 80% of current bw to be conservative
|
||
// 8 = bits per byte (bps/Bps)
|
||
var levelNextBitrate = levels[nextLoadLevel].maxBitrate;
|
||
fragLevelNextLoadedDelay = duration * levelNextBitrate / (8 * 0.8 * loadRate);
|
||
|
||
if (fragLevelNextLoadedDelay < bufferStarvationDelay) {
|
||
break;
|
||
}
|
||
} // Only emergency switch down if it takes less time to load a new fragment at lowest level instead of continuing
|
||
// to load the current one
|
||
|
||
|
||
if (fragLevelNextLoadedDelay >= fragLoadedDelay) {
|
||
return;
|
||
}
|
||
|
||
var bwEstimate = this.bwEstimator.getEstimate();
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].warn("Fragment " + frag.sn + (part ? ' part ' + part.index : '') + " of level " + frag.level + " is loading too slowly and will cause an underbuffer; aborting and switching to level " + nextLoadLevel + "\n Current BW estimate: " + (Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(bwEstimate) ? (bwEstimate / 1024).toFixed(3) : 'Unknown') + " Kb/s\n Estimated load time for current fragment: " + fragLoadedDelay.toFixed(3) + " s\n Estimated load time for the next fragment: " + fragLevelNextLoadedDelay.toFixed(3) + " s\n Time to underbuffer: " + bufferStarvationDelay.toFixed(3) + " s");
|
||
hls.nextLoadLevel = nextLoadLevel;
|
||
this.bwEstimator.sample(requestDelay, stats.loaded);
|
||
this.clearTimer();
|
||
|
||
if (frag.loader) {
|
||
this.fragCurrent = this.partCurrent = null;
|
||
frag.loader.abort();
|
||
}
|
||
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].FRAG_LOAD_EMERGENCY_ABORTED, {
|
||
frag: frag,
|
||
part: part,
|
||
stats: stats
|
||
});
|
||
};
|
||
|
||
_proto.onFragLoaded = function onFragLoaded(event, _ref) {
|
||
var frag = _ref.frag,
|
||
part = _ref.part;
|
||
|
||
if (frag.type === _types_loader__WEBPACK_IMPORTED_MODULE_5__["PlaylistLevelType"].MAIN && Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(frag.sn)) {
|
||
var stats = part ? part.stats : frag.stats;
|
||
var duration = part ? part.duration : frag.duration; // stop monitoring bw once frag loaded
|
||
|
||
this.clearTimer(); // store level id after successful fragment load
|
||
|
||
this.lastLoadedFragLevel = frag.level; // reset forced auto level value so that next level will be selected
|
||
|
||
this._nextAutoLevel = -1; // compute level average bitrate
|
||
|
||
if (this.hls.config.abrMaxWithRealBitrate) {
|
||
var level = this.hls.levels[frag.level];
|
||
var loadedBytes = (level.loaded ? level.loaded.bytes : 0) + stats.loaded;
|
||
var loadedDuration = (level.loaded ? level.loaded.duration : 0) + duration;
|
||
level.loaded = {
|
||
bytes: loadedBytes,
|
||
duration: loadedDuration
|
||
};
|
||
level.realBitrate = Math.round(8 * loadedBytes / loadedDuration);
|
||
}
|
||
|
||
if (frag.bitrateTest) {
|
||
var fragBufferedData = {
|
||
stats: stats,
|
||
frag: frag,
|
||
part: part,
|
||
id: frag.type
|
||
};
|
||
this.onFragBuffered(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].FRAG_BUFFERED, fragBufferedData);
|
||
frag.bitrateTest = false;
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto.onFragBuffered = function onFragBuffered(event, data) {
|
||
var frag = data.frag,
|
||
part = data.part;
|
||
var stats = part ? part.stats : frag.stats;
|
||
|
||
if (stats.aborted) {
|
||
return;
|
||
} // Only count non-alt-audio frags which were actually buffered in our BW calculations
|
||
|
||
|
||
if (frag.type !== _types_loader__WEBPACK_IMPORTED_MODULE_5__["PlaylistLevelType"].MAIN || frag.sn === 'initSegment') {
|
||
return;
|
||
} // Use the difference between parsing and request instead of buffering and request to compute fragLoadingProcessing;
|
||
// rationale is that buffer appending only happens once media is attached. This can happen when config.startFragPrefetch
|
||
// is used. If we used buffering in that case, our BW estimate sample will be very large.
|
||
|
||
|
||
var processingMs = stats.parsing.end - stats.loading.start;
|
||
this.bwEstimator.sample(processingMs, stats.loaded);
|
||
stats.bwEstimate = this.bwEstimator.getEstimate();
|
||
|
||
if (frag.bitrateTest) {
|
||
this.bitrateTestDelay = processingMs / 1000;
|
||
} else {
|
||
this.bitrateTestDelay = 0;
|
||
}
|
||
};
|
||
|
||
_proto.onError = function onError(event, data) {
|
||
// stop timer in case of frag loading error
|
||
switch (data.details) {
|
||
case _errors__WEBPACK_IMPORTED_MODULE_4__["ErrorDetails"].FRAG_LOAD_ERROR:
|
||
case _errors__WEBPACK_IMPORTED_MODULE_4__["ErrorDetails"].FRAG_LOAD_TIMEOUT:
|
||
this.clearTimer();
|
||
break;
|
||
|
||
default:
|
||
break;
|
||
}
|
||
};
|
||
|
||
_proto.clearTimer = function clearTimer() {
|
||
self.clearInterval(this.timer);
|
||
this.timer = undefined;
|
||
} // return next auto level
|
||
;
|
||
|
||
_proto.getNextABRAutoLevel = function getNextABRAutoLevel() {
|
||
var fragCurrent = this.fragCurrent,
|
||
partCurrent = this.partCurrent,
|
||
hls = this.hls;
|
||
var maxAutoLevel = hls.maxAutoLevel,
|
||
config = hls.config,
|
||
minAutoLevel = hls.minAutoLevel,
|
||
media = hls.media;
|
||
var currentFragDuration = partCurrent ? partCurrent.duration : fragCurrent ? fragCurrent.duration : 0;
|
||
var pos = media ? media.currentTime : 0; // playbackRate is the absolute value of the playback rate; if media.playbackRate is 0, we use 1 to load as
|
||
// if we're playing back at the normal rate.
|
||
|
||
var playbackRate = media && media.playbackRate !== 0 ? Math.abs(media.playbackRate) : 1.0;
|
||
var avgbw = this.bwEstimator ? this.bwEstimator.getEstimate() : config.abrEwmaDefaultEstimate; // bufferStarvationDelay is the wall-clock time left until the playback buffer is exhausted.
|
||
|
||
var bufferStarvationDelay = (_utils_buffer_helper__WEBPACK_IMPORTED_MODULE_3__["BufferHelper"].bufferInfo(media, pos, config.maxBufferHole).end - pos) / playbackRate; // First, look to see if we can find a level matching with our avg bandwidth AND that could also guarantee no rebuffering at all
|
||
|
||
var bestLevel = this.findBestLevel(avgbw, minAutoLevel, maxAutoLevel, bufferStarvationDelay, config.abrBandWidthFactor, config.abrBandWidthUpFactor);
|
||
|
||
if (bestLevel >= 0) {
|
||
return bestLevel;
|
||
}
|
||
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].trace((bufferStarvationDelay ? 'rebuffering expected' : 'buffer is empty') + ", finding optimal quality level"); // not possible to get rid of rebuffering ... let's try to find level that will guarantee less than maxStarvationDelay of rebuffering
|
||
// if no matching level found, logic will return 0
|
||
|
||
var maxStarvationDelay = currentFragDuration ? Math.min(currentFragDuration, config.maxStarvationDelay) : config.maxStarvationDelay;
|
||
var bwFactor = config.abrBandWidthFactor;
|
||
var bwUpFactor = config.abrBandWidthUpFactor;
|
||
|
||
if (!bufferStarvationDelay) {
|
||
// in case buffer is empty, let's check if previous fragment was loaded to perform a bitrate test
|
||
var bitrateTestDelay = this.bitrateTestDelay;
|
||
|
||
if (bitrateTestDelay) {
|
||
// if it is the case, then we need to adjust our max starvation delay using maxLoadingDelay config value
|
||
// max video loading delay used in automatic start level selection :
|
||
// in that mode ABR controller will ensure that video loading time (ie the time to fetch the first fragment at lowest quality level +
|
||
// the time to fetch the fragment at the appropriate quality level is less than ```maxLoadingDelay``` )
|
||
// cap maxLoadingDelay and ensure it is not bigger 'than bitrate test' frag duration
|
||
var maxLoadingDelay = currentFragDuration ? Math.min(currentFragDuration, config.maxLoadingDelay) : config.maxLoadingDelay;
|
||
maxStarvationDelay = maxLoadingDelay - bitrateTestDelay;
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].trace("bitrate test took " + Math.round(1000 * bitrateTestDelay) + "ms, set first fragment max fetchDuration to " + Math.round(1000 * maxStarvationDelay) + " ms"); // don't use conservative factor on bitrate test
|
||
|
||
bwFactor = bwUpFactor = 1;
|
||
}
|
||
}
|
||
|
||
bestLevel = this.findBestLevel(avgbw, minAutoLevel, maxAutoLevel, bufferStarvationDelay + maxStarvationDelay, bwFactor, bwUpFactor);
|
||
return Math.max(bestLevel, 0);
|
||
};
|
||
|
||
_proto.findBestLevel = function findBestLevel(currentBw, minAutoLevel, maxAutoLevel, maxFetchDuration, bwFactor, bwUpFactor) {
|
||
var _level$details;
|
||
|
||
var fragCurrent = this.fragCurrent,
|
||
partCurrent = this.partCurrent,
|
||
currentLevel = this.lastLoadedFragLevel;
|
||
var levels = this.hls.levels;
|
||
var level = levels[currentLevel];
|
||
var live = !!(level !== null && level !== void 0 && (_level$details = level.details) !== null && _level$details !== void 0 && _level$details.live);
|
||
var currentCodecSet = level === null || level === void 0 ? void 0 : level.codecSet;
|
||
var currentFragDuration = partCurrent ? partCurrent.duration : fragCurrent ? fragCurrent.duration : 0;
|
||
|
||
for (var i = maxAutoLevel; i >= minAutoLevel; i--) {
|
||
var levelInfo = levels[i];
|
||
|
||
if (!levelInfo || currentCodecSet && levelInfo.codecSet !== currentCodecSet) {
|
||
continue;
|
||
}
|
||
|
||
var levelDetails = levelInfo.details;
|
||
var avgDuration = (partCurrent ? levelDetails === null || levelDetails === void 0 ? void 0 : levelDetails.partTarget : levelDetails === null || levelDetails === void 0 ? void 0 : levelDetails.averagetargetduration) || currentFragDuration;
|
||
var adjustedbw = void 0; // follow algorithm captured from stagefright :
|
||
// https://android.googlesource.com/platform/frameworks/av/+/master/media/libstagefright/httplive/LiveSession.cpp
|
||
// Pick the highest bandwidth stream below or equal to estimated bandwidth.
|
||
// consider only 80% of the available bandwidth, but if we are switching up,
|
||
// be even more conservative (70%) to avoid overestimating and immediately
|
||
// switching back.
|
||
|
||
if (i <= currentLevel) {
|
||
adjustedbw = bwFactor * currentBw;
|
||
} else {
|
||
adjustedbw = bwUpFactor * currentBw;
|
||
}
|
||
|
||
var bitrate = levels[i].maxBitrate;
|
||
var fetchDuration = bitrate * avgDuration / adjustedbw;
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].trace("level/adjustedbw/bitrate/avgDuration/maxFetchDuration/fetchDuration: " + i + "/" + Math.round(adjustedbw) + "/" + bitrate + "/" + avgDuration + "/" + maxFetchDuration + "/" + fetchDuration); // if adjusted bw is greater than level bitrate AND
|
||
|
||
if (adjustedbw > bitrate && ( // fragment fetchDuration unknown OR live stream OR fragment fetchDuration less than max allowed fetch duration, then this level matches
|
||
// we don't account for max Fetch Duration for live streams, this is to avoid switching down when near the edge of live sliding window ...
|
||
// special case to support startLevel = -1 (bitrateTest) on live streams : in that case we should not exit loop so that findBestLevel will return -1
|
||
!fetchDuration || live && !this.bitrateTestDelay || fetchDuration < maxFetchDuration)) {
|
||
// as we are looping from highest to lowest, this will return the best achievable quality level
|
||
return i;
|
||
}
|
||
} // not enough time budget even with quality level 0 ... rebuffering might happen
|
||
|
||
|
||
return -1;
|
||
};
|
||
|
||
_createClass(AbrController, [{
|
||
key: "nextAutoLevel",
|
||
get: function get() {
|
||
var forcedAutoLevel = this._nextAutoLevel;
|
||
var bwEstimator = this.bwEstimator; // in case next auto level has been forced, and bw not available or not reliable, return forced value
|
||
|
||
if (forcedAutoLevel !== -1 && (!bwEstimator || !bwEstimator.canEstimate())) {
|
||
return forcedAutoLevel;
|
||
} // compute next level using ABR logic
|
||
|
||
|
||
var nextABRAutoLevel = this.getNextABRAutoLevel(); // if forced auto level has been defined, use it to cap ABR computed quality level
|
||
|
||
if (forcedAutoLevel !== -1) {
|
||
nextABRAutoLevel = Math.min(forcedAutoLevel, nextABRAutoLevel);
|
||
}
|
||
|
||
return nextABRAutoLevel;
|
||
},
|
||
set: function set(nextLevel) {
|
||
this._nextAutoLevel = nextLevel;
|
||
}
|
||
}]);
|
||
|
||
return AbrController;
|
||
}();
|
||
|
||
/* harmony default export */ __webpack_exports__["default"] = (AbrController);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/controller/audio-stream-controller.ts":
|
||
/*!***************************************************!*\
|
||
!*** ./src/controller/audio-stream-controller.ts ***!
|
||
\***************************************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony import */ var _Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
|
||
/* harmony import */ var _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./base-stream-controller */ "./src/controller/base-stream-controller.ts");
|
||
/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../events */ "./src/events.ts");
|
||
/* harmony import */ var _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/buffer-helper */ "./src/utils/buffer-helper.ts");
|
||
/* harmony import */ var _fragment_tracker__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./fragment-tracker */ "./src/controller/fragment-tracker.ts");
|
||
/* harmony import */ var _types_level__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../types/level */ "./src/types/level.ts");
|
||
/* harmony import */ var _types_loader__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../types/loader */ "./src/types/loader.ts");
|
||
/* harmony import */ var _loader_fragment__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ../loader/fragment */ "./src/loader/fragment.ts");
|
||
/* harmony import */ var _demux_chunk_cache__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ../demux/chunk-cache */ "./src/demux/chunk-cache.ts");
|
||
/* harmony import */ var _demux_transmuxer_interface__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! ../demux/transmuxer-interface */ "./src/demux/transmuxer-interface.ts");
|
||
/* harmony import */ var _types_transmuxer__WEBPACK_IMPORTED_MODULE_10__ = __webpack_require__(/*! ../types/transmuxer */ "./src/types/transmuxer.ts");
|
||
/* harmony import */ var _fragment_finders__WEBPACK_IMPORTED_MODULE_11__ = __webpack_require__(/*! ./fragment-finders */ "./src/controller/fragment-finders.ts");
|
||
/* harmony import */ var _utils_discontinuities__WEBPACK_IMPORTED_MODULE_12__ = __webpack_require__(/*! ../utils/discontinuities */ "./src/utils/discontinuities.ts");
|
||
/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_13__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_14__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
|
||
|
||
function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
|
||
|
||
function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
|
||
|
||
function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
var TICK_INTERVAL = 100; // how often to tick in ms
|
||
|
||
var AudioStreamController = /*#__PURE__*/function (_BaseStreamController) {
|
||
_inheritsLoose(AudioStreamController, _BaseStreamController);
|
||
|
||
function AudioStreamController(hls, fragmentTracker) {
|
||
var _this;
|
||
|
||
_this = _BaseStreamController.call(this, hls, fragmentTracker, '[audio-stream-controller]') || this;
|
||
_this.videoBuffer = null;
|
||
_this.videoTrackCC = -1;
|
||
_this.waitingVideoCC = -1;
|
||
_this.audioSwitch = false;
|
||
_this.trackId = -1;
|
||
_this.waitingData = null;
|
||
_this.mainDetails = null;
|
||
_this.bufferFlushed = false;
|
||
|
||
_this._registerListeners();
|
||
|
||
return _this;
|
||
}
|
||
|
||
var _proto = AudioStreamController.prototype;
|
||
|
||
_proto.onHandlerDestroying = function onHandlerDestroying() {
|
||
this._unregisterListeners();
|
||
|
||
this.mainDetails = null;
|
||
};
|
||
|
||
_proto._registerListeners = function _registerListeners() {
|
||
var hls = this.hls;
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].LEVEL_LOADED, this.onLevelLoaded, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].AUDIO_TRACKS_UPDATED, this.onAudioTracksUpdated, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].AUDIO_TRACK_LOADED, this.onAudioTrackLoaded, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].ERROR, this.onError, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].BUFFER_RESET, this.onBufferReset, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].BUFFER_CREATED, this.onBufferCreated, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].BUFFER_FLUSHED, this.onBufferFlushed, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].INIT_PTS_FOUND, this.onInitPtsFound, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].FRAG_BUFFERED, this.onFragBuffered, this);
|
||
};
|
||
|
||
_proto._unregisterListeners = function _unregisterListeners() {
|
||
var hls = this.hls;
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].LEVEL_LOADED, this.onLevelLoaded, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].AUDIO_TRACKS_UPDATED, this.onAudioTracksUpdated, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].AUDIO_TRACK_LOADED, this.onAudioTrackLoaded, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].ERROR, this.onError, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].BUFFER_RESET, this.onBufferReset, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].BUFFER_CREATED, this.onBufferCreated, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].BUFFER_FLUSHED, this.onBufferFlushed, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].INIT_PTS_FOUND, this.onInitPtsFound, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].FRAG_BUFFERED, this.onFragBuffered, this);
|
||
} // INIT_PTS_FOUND is triggered when the video track parsed in the stream-controller has a new PTS value
|
||
;
|
||
|
||
_proto.onInitPtsFound = function onInitPtsFound(event, _ref) {
|
||
var frag = _ref.frag,
|
||
id = _ref.id,
|
||
initPTS = _ref.initPTS;
|
||
|
||
// Always update the new INIT PTS
|
||
// Can change due level switch
|
||
if (id === 'main') {
|
||
var cc = frag.cc;
|
||
this.initPTS[frag.cc] = initPTS;
|
||
this.log("InitPTS for cc: " + cc + " found from main: " + initPTS);
|
||
this.videoTrackCC = cc; // If we are waiting, tick immediately to unblock audio fragment transmuxing
|
||
|
||
if (this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_INIT_PTS) {
|
||
this.tick();
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto.startLoad = function startLoad(startPosition) {
|
||
if (!this.levels) {
|
||
this.startPosition = startPosition;
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].STOPPED;
|
||
return;
|
||
}
|
||
|
||
var lastCurrentTime = this.lastCurrentTime;
|
||
this.stopLoad();
|
||
this.setInterval(TICK_INTERVAL);
|
||
this.fragLoadError = 0;
|
||
|
||
if (lastCurrentTime > 0 && startPosition === -1) {
|
||
this.log("Override startPosition with lastCurrentTime @" + lastCurrentTime.toFixed(3));
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
|
||
} else {
|
||
this.loadedmetadata = false;
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_TRACK;
|
||
}
|
||
|
||
this.nextLoadPosition = this.startPosition = this.lastCurrentTime = startPosition;
|
||
this.tick();
|
||
};
|
||
|
||
_proto.doTick = function doTick() {
|
||
switch (this.state) {
|
||
case _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE:
|
||
this.doTickIdle();
|
||
break;
|
||
|
||
case _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_TRACK:
|
||
{
|
||
var _levels$trackId;
|
||
|
||
var levels = this.levels,
|
||
trackId = this.trackId;
|
||
var details = levels === null || levels === void 0 ? void 0 : (_levels$trackId = levels[trackId]) === null || _levels$trackId === void 0 ? void 0 : _levels$trackId.details;
|
||
|
||
if (details) {
|
||
if (this.waitForCdnTuneIn(details)) {
|
||
break;
|
||
}
|
||
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_INIT_PTS;
|
||
}
|
||
|
||
break;
|
||
}
|
||
|
||
case _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].FRAG_LOADING_WAITING_RETRY:
|
||
{
|
||
var _this$media;
|
||
|
||
var now = performance.now();
|
||
var retryDate = this.retryDate; // if current time is gt than retryDate, or if media seeking let's switch to IDLE state to retry loading
|
||
|
||
if (!retryDate || now >= retryDate || (_this$media = this.media) !== null && _this$media !== void 0 && _this$media.seeking) {
|
||
this.log('RetryDate reached, switch back to IDLE state');
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
|
||
}
|
||
|
||
break;
|
||
}
|
||
|
||
case _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_INIT_PTS:
|
||
{
|
||
// Ensure we don't get stuck in the WAITING_INIT_PTS state if the waiting frag CC doesn't match any initPTS
|
||
var waitingData = this.waitingData;
|
||
|
||
if (waitingData) {
|
||
var frag = waitingData.frag,
|
||
part = waitingData.part,
|
||
cache = waitingData.cache,
|
||
complete = waitingData.complete;
|
||
|
||
if (this.initPTS[frag.cc] !== undefined) {
|
||
this.waitingData = null;
|
||
this.waitingVideoCC = -1;
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].FRAG_LOADING;
|
||
var payload = cache.flush();
|
||
var data = {
|
||
frag: frag,
|
||
part: part,
|
||
payload: payload,
|
||
networkDetails: null
|
||
};
|
||
|
||
this._handleFragmentLoadProgress(data);
|
||
|
||
if (complete) {
|
||
_BaseStreamController.prototype._handleFragmentLoadComplete.call(this, data);
|
||
}
|
||
} else if (this.videoTrackCC !== this.waitingVideoCC) {
|
||
// Drop waiting fragment if videoTrackCC has changed since waitingFragment was set and initPTS was not found
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_14__["logger"].log("Waiting fragment cc (" + frag.cc + ") cancelled because video is at cc " + this.videoTrackCC);
|
||
this.clearWaitingFragment();
|
||
} else {
|
||
// Drop waiting fragment if an earlier fragment is needed
|
||
var pos = this.getLoadPosition();
|
||
var bufferInfo = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_3__["BufferHelper"].bufferInfo(this.mediaBuffer, pos, this.config.maxBufferHole);
|
||
var waitingFragmentAtPosition = Object(_fragment_finders__WEBPACK_IMPORTED_MODULE_11__["fragmentWithinToleranceTest"])(bufferInfo.end, this.config.maxFragLookUpTolerance, frag);
|
||
|
||
if (waitingFragmentAtPosition < 0) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_14__["logger"].log("Waiting fragment cc (" + frag.cc + ") @ " + frag.start + " cancelled because another fragment at " + bufferInfo.end + " is needed");
|
||
this.clearWaitingFragment();
|
||
}
|
||
}
|
||
} else {
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
|
||
}
|
||
}
|
||
}
|
||
|
||
this.onTickEnd();
|
||
};
|
||
|
||
_proto.clearWaitingFragment = function clearWaitingFragment() {
|
||
var waitingData = this.waitingData;
|
||
|
||
if (waitingData) {
|
||
this.fragmentTracker.removeFragment(waitingData.frag);
|
||
this.waitingData = null;
|
||
this.waitingVideoCC = -1;
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
|
||
}
|
||
};
|
||
|
||
_proto.onTickEnd = function onTickEnd() {
|
||
var media = this.media;
|
||
|
||
if (!media || !media.readyState) {
|
||
// Exit early if we don't have media or if the media hasn't buffered anything yet (readyState 0)
|
||
return;
|
||
}
|
||
|
||
var mediaBuffer = this.mediaBuffer ? this.mediaBuffer : media;
|
||
var buffered = mediaBuffer.buffered;
|
||
|
||
if (!this.loadedmetadata && buffered.length) {
|
||
this.loadedmetadata = true;
|
||
}
|
||
|
||
this.lastCurrentTime = media.currentTime;
|
||
};
|
||
|
||
_proto.doTickIdle = function doTickIdle() {
|
||
var _frag$decryptdata, _frag$decryptdata2;
|
||
|
||
var hls = this.hls,
|
||
levels = this.levels,
|
||
media = this.media,
|
||
trackId = this.trackId;
|
||
var config = hls.config;
|
||
|
||
if (!levels || !levels[trackId]) {
|
||
return;
|
||
} // if video not attached AND
|
||
// start fragment already requested OR start frag prefetch not enabled
|
||
// exit loop
|
||
// => if media not attached but start frag prefetch is enabled and start frag not requested yet, we will not exit loop
|
||
|
||
|
||
if (!media && (this.startFragRequested || !config.startFragPrefetch)) {
|
||
return;
|
||
}
|
||
|
||
var levelInfo = levels[trackId];
|
||
var trackDetails = levelInfo.details;
|
||
|
||
if (!trackDetails || trackDetails.live && this.levelLastLoaded !== trackId || this.waitForCdnTuneIn(trackDetails)) {
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_TRACK;
|
||
return;
|
||
}
|
||
|
||
if (this.bufferFlushed) {
|
||
this.bufferFlushed = false;
|
||
this.afterBufferFlushed(this.mediaBuffer ? this.mediaBuffer : this.media, _loader_fragment__WEBPACK_IMPORTED_MODULE_7__["ElementaryStreamTypes"].AUDIO, _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].AUDIO);
|
||
}
|
||
|
||
var bufferInfo = this.getFwdBufferInfo(this.mediaBuffer ? this.mediaBuffer : this.media, _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].AUDIO);
|
||
|
||
if (bufferInfo === null) {
|
||
return;
|
||
}
|
||
|
||
var bufferLen = bufferInfo.len;
|
||
var maxBufLen = this.getMaxBufferLength();
|
||
var audioSwitch = this.audioSwitch; // if buffer length is less than maxBufLen try to load a new fragment
|
||
|
||
if (bufferLen >= maxBufLen && !audioSwitch) {
|
||
return;
|
||
}
|
||
|
||
if (!audioSwitch && this._streamEnded(bufferInfo, trackDetails)) {
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].BUFFER_EOS, {
|
||
type: 'audio'
|
||
});
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].ENDED;
|
||
return;
|
||
}
|
||
|
||
var fragments = trackDetails.fragments;
|
||
var start = fragments[0].start;
|
||
var targetBufferTime = bufferInfo.end;
|
||
|
||
if (audioSwitch) {
|
||
var pos = this.getLoadPosition();
|
||
targetBufferTime = pos; // if currentTime (pos) is less than alt audio playlist start time, it means that alt audio is ahead of currentTime
|
||
|
||
if (trackDetails.PTSKnown && pos < start) {
|
||
// if everything is buffered from pos to start or if audio buffer upfront, let's seek to start
|
||
if (bufferInfo.end > start || bufferInfo.nextStart) {
|
||
this.log('Alt audio track ahead of main track, seek to start of alt audio track');
|
||
media.currentTime = start + 0.05;
|
||
}
|
||
}
|
||
}
|
||
|
||
var frag = this.getNextFragment(targetBufferTime, trackDetails);
|
||
|
||
if (!frag) {
|
||
this.bufferFlushed = true;
|
||
return;
|
||
}
|
||
|
||
if (((_frag$decryptdata = frag.decryptdata) === null || _frag$decryptdata === void 0 ? void 0 : _frag$decryptdata.keyFormat) === 'identity' && !((_frag$decryptdata2 = frag.decryptdata) !== null && _frag$decryptdata2 !== void 0 && _frag$decryptdata2.key)) {
|
||
this.loadKey(frag, trackDetails);
|
||
} else {
|
||
this.loadFragment(frag, trackDetails, targetBufferTime);
|
||
}
|
||
};
|
||
|
||
_proto.getMaxBufferLength = function getMaxBufferLength() {
|
||
var maxConfigBuffer = _BaseStreamController.prototype.getMaxBufferLength.call(this);
|
||
|
||
var mainBufferInfo = this.getFwdBufferInfo(this.videoBuffer ? this.videoBuffer : this.media, _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].MAIN);
|
||
|
||
if (mainBufferInfo === null) {
|
||
return maxConfigBuffer;
|
||
}
|
||
|
||
return Math.max(maxConfigBuffer, mainBufferInfo.len);
|
||
};
|
||
|
||
_proto.onMediaDetaching = function onMediaDetaching() {
|
||
this.videoBuffer = null;
|
||
|
||
_BaseStreamController.prototype.onMediaDetaching.call(this);
|
||
};
|
||
|
||
_proto.onAudioTracksUpdated = function onAudioTracksUpdated(event, _ref2) {
|
||
var audioTracks = _ref2.audioTracks;
|
||
this.resetTransmuxer();
|
||
this.levels = audioTracks.map(function (mediaPlaylist) {
|
||
return new _types_level__WEBPACK_IMPORTED_MODULE_5__["Level"](mediaPlaylist);
|
||
});
|
||
};
|
||
|
||
_proto.onAudioTrackSwitching = function onAudioTrackSwitching(event, data) {
|
||
// if any URL found on new audio track, it is an alternate audio track
|
||
var altAudio = !!data.url;
|
||
this.trackId = data.id;
|
||
var fragCurrent = this.fragCurrent;
|
||
|
||
if (fragCurrent !== null && fragCurrent !== void 0 && fragCurrent.loader) {
|
||
fragCurrent.loader.abort();
|
||
}
|
||
|
||
this.fragCurrent = null;
|
||
this.clearWaitingFragment(); // destroy useless transmuxer when switching audio to main
|
||
|
||
if (!altAudio) {
|
||
this.resetTransmuxer();
|
||
} else {
|
||
// switching to audio track, start timer if not already started
|
||
this.setInterval(TICK_INTERVAL);
|
||
} // should we switch tracks ?
|
||
|
||
|
||
if (altAudio) {
|
||
this.audioSwitch = true; // main audio track are handled by stream-controller, just do something if switching to alt audio track
|
||
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
|
||
} else {
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].STOPPED;
|
||
}
|
||
|
||
this.tick();
|
||
};
|
||
|
||
_proto.onManifestLoading = function onManifestLoading() {
|
||
this.mainDetails = null;
|
||
this.fragmentTracker.removeAllFragments();
|
||
this.startPosition = this.lastCurrentTime = 0;
|
||
this.bufferFlushed = false;
|
||
};
|
||
|
||
_proto.onLevelLoaded = function onLevelLoaded(event, data) {
|
||
this.mainDetails = data.details;
|
||
};
|
||
|
||
_proto.onAudioTrackLoaded = function onAudioTrackLoaded(event, data) {
|
||
var _track$details;
|
||
|
||
var levels = this.levels;
|
||
var newDetails = data.details,
|
||
trackId = data.id;
|
||
|
||
if (!levels) {
|
||
this.warn("Audio tracks were reset while loading level " + trackId);
|
||
return;
|
||
}
|
||
|
||
this.log("Track " + trackId + " loaded [" + newDetails.startSN + "," + newDetails.endSN + "],duration:" + newDetails.totalduration);
|
||
var track = levels[trackId];
|
||
var sliding = 0;
|
||
|
||
if (newDetails.live || (_track$details = track.details) !== null && _track$details !== void 0 && _track$details.live) {
|
||
var mainDetails = this.mainDetails;
|
||
|
||
if (!newDetails.fragments[0]) {
|
||
newDetails.deltaUpdateFailed = true;
|
||
}
|
||
|
||
if (newDetails.deltaUpdateFailed || !mainDetails) {
|
||
return;
|
||
}
|
||
|
||
if (!track.details && newDetails.hasProgramDateTime && mainDetails.hasProgramDateTime) {
|
||
// Make sure our audio rendition is aligned with the "main" rendition, using
|
||
// pdt as our reference times.
|
||
Object(_utils_discontinuities__WEBPACK_IMPORTED_MODULE_12__["alignMediaPlaylistByPDT"])(newDetails, mainDetails);
|
||
sliding = newDetails.fragments[0].start;
|
||
} else {
|
||
sliding = this.alignPlaylists(newDetails, track.details);
|
||
}
|
||
}
|
||
|
||
track.details = newDetails;
|
||
this.levelLastLoaded = trackId; // compute start position if we are aligned with the main playlist
|
||
|
||
if (!this.startFragRequested && (this.mainDetails || !newDetails.live)) {
|
||
this.setStartPosition(track.details, sliding);
|
||
} // only switch back to IDLE state if we were waiting for track to start downloading a new fragment
|
||
|
||
|
||
if (this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_TRACK && !this.waitForCdnTuneIn(newDetails)) {
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
|
||
} // trigger handler right now
|
||
|
||
|
||
this.tick();
|
||
};
|
||
|
||
_proto._handleFragmentLoadProgress = function _handleFragmentLoadProgress(data) {
|
||
var _frag$initSegment;
|
||
|
||
var frag = data.frag,
|
||
part = data.part,
|
||
payload = data.payload;
|
||
var config = this.config,
|
||
trackId = this.trackId,
|
||
levels = this.levels;
|
||
|
||
if (!levels) {
|
||
this.warn("Audio tracks were reset while fragment load was in progress. Fragment " + frag.sn + " of level " + frag.level + " will not be buffered");
|
||
return;
|
||
}
|
||
|
||
var track = levels[trackId];
|
||
console.assert(track, 'Audio track is defined on fragment load progress');
|
||
var details = track.details;
|
||
console.assert(details, 'Audio track details are defined on fragment load progress');
|
||
var audioCodec = config.defaultAudioCodec || track.audioCodec || 'mp4a.40.2';
|
||
var transmuxer = this.transmuxer;
|
||
|
||
if (!transmuxer) {
|
||
transmuxer = this.transmuxer = new _demux_transmuxer_interface__WEBPACK_IMPORTED_MODULE_9__["default"](this.hls, _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].AUDIO, this._handleTransmuxComplete.bind(this), this._handleTransmuxerFlush.bind(this));
|
||
} // Check if we have video initPTS
|
||
// If not we need to wait for it
|
||
|
||
|
||
var initPTS = this.initPTS[frag.cc];
|
||
var initSegmentData = (_frag$initSegment = frag.initSegment) === null || _frag$initSegment === void 0 ? void 0 : _frag$initSegment.data;
|
||
|
||
if (initPTS !== undefined) {
|
||
// this.log(`Transmuxing ${sn} of [${details.startSN} ,${details.endSN}],track ${trackId}`);
|
||
// time Offset is accurate if level PTS is known, or if playlist is not sliding (not live)
|
||
var accurateTimeOffset = false; // details.PTSKnown || !details.live;
|
||
|
||
var partIndex = part ? part.index : -1;
|
||
var partial = partIndex !== -1;
|
||
var chunkMeta = new _types_transmuxer__WEBPACK_IMPORTED_MODULE_10__["ChunkMetadata"](frag.level, frag.sn, frag.stats.chunkCount, payload.byteLength, partIndex, partial);
|
||
transmuxer.push(payload, initSegmentData, audioCodec, '', frag, part, details.totalduration, accurateTimeOffset, chunkMeta, initPTS);
|
||
} else {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_14__["logger"].log("Unknown video PTS for cc " + frag.cc + ", waiting for video PTS before demuxing audio frag " + frag.sn + " of [" + details.startSN + " ," + details.endSN + "],track " + trackId);
|
||
|
||
var _this$waitingData = this.waitingData = this.waitingData || {
|
||
frag: frag,
|
||
part: part,
|
||
cache: new _demux_chunk_cache__WEBPACK_IMPORTED_MODULE_8__["default"](),
|
||
complete: false
|
||
},
|
||
cache = _this$waitingData.cache;
|
||
|
||
cache.push(new Uint8Array(payload));
|
||
this.waitingVideoCC = this.videoTrackCC;
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_INIT_PTS;
|
||
}
|
||
};
|
||
|
||
_proto._handleFragmentLoadComplete = function _handleFragmentLoadComplete(fragLoadedData) {
|
||
if (this.waitingData) {
|
||
this.waitingData.complete = true;
|
||
return;
|
||
}
|
||
|
||
_BaseStreamController.prototype._handleFragmentLoadComplete.call(this, fragLoadedData);
|
||
};
|
||
|
||
_proto.onBufferReset = function onBufferReset()
|
||
/* event: Events.BUFFER_RESET */
|
||
{
|
||
// reset reference to sourcebuffers
|
||
this.mediaBuffer = this.videoBuffer = null;
|
||
this.loadedmetadata = false;
|
||
};
|
||
|
||
_proto.onBufferCreated = function onBufferCreated(event, data) {
|
||
var audioTrack = data.tracks.audio;
|
||
|
||
if (audioTrack) {
|
||
this.mediaBuffer = audioTrack.buffer;
|
||
}
|
||
|
||
if (data.tracks.video) {
|
||
this.videoBuffer = data.tracks.video.buffer;
|
||
}
|
||
};
|
||
|
||
_proto.onFragBuffered = function onFragBuffered(event, data) {
|
||
var frag = data.frag,
|
||
part = data.part;
|
||
|
||
if (frag.type !== _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].AUDIO) {
|
||
return;
|
||
}
|
||
|
||
if (this.fragContextChanged(frag)) {
|
||
// If a level switch was requested while a fragment was buffering, it will emit the FRAG_BUFFERED event upon completion
|
||
// Avoid setting state back to IDLE or concluding the audio switch; otherwise, the switched-to track will not buffer
|
||
this.warn("Fragment " + frag.sn + (part ? ' p: ' + part.index : '') + " of level " + frag.level + " finished buffering, but was aborted. state: " + this.state + ", audioSwitch: " + this.audioSwitch);
|
||
return;
|
||
}
|
||
|
||
if (frag.sn !== 'initSegment') {
|
||
this.fragPrevious = frag;
|
||
|
||
if (this.audioSwitch) {
|
||
this.audioSwitch = false;
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].AUDIO_TRACK_SWITCHED, {
|
||
id: this.trackId
|
||
});
|
||
}
|
||
}
|
||
|
||
this.fragBufferedComplete(frag, part);
|
||
};
|
||
|
||
_proto.onError = function onError(event, data) {
|
||
switch (data.details) {
|
||
case _errors__WEBPACK_IMPORTED_MODULE_13__["ErrorDetails"].FRAG_LOAD_ERROR:
|
||
case _errors__WEBPACK_IMPORTED_MODULE_13__["ErrorDetails"].FRAG_LOAD_TIMEOUT:
|
||
case _errors__WEBPACK_IMPORTED_MODULE_13__["ErrorDetails"].KEY_LOAD_ERROR:
|
||
case _errors__WEBPACK_IMPORTED_MODULE_13__["ErrorDetails"].KEY_LOAD_TIMEOUT:
|
||
// TODO: Skip fragments that do not belong to this.fragCurrent audio-group id
|
||
this.onFragmentOrKeyLoadError(_types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].AUDIO, data);
|
||
break;
|
||
|
||
case _errors__WEBPACK_IMPORTED_MODULE_13__["ErrorDetails"].AUDIO_TRACK_LOAD_ERROR:
|
||
case _errors__WEBPACK_IMPORTED_MODULE_13__["ErrorDetails"].AUDIO_TRACK_LOAD_TIMEOUT:
|
||
// when in ERROR state, don't switch back to IDLE state in case a non-fatal error is received
|
||
if (this.state !== _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].ERROR && this.state !== _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].STOPPED) {
|
||
// if fatal error, stop processing, otherwise move to IDLE to retry loading
|
||
this.state = data.fatal ? _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].ERROR : _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
|
||
this.warn(data.details + " while loading frag, switching to " + this.state + " state");
|
||
}
|
||
|
||
break;
|
||
|
||
case _errors__WEBPACK_IMPORTED_MODULE_13__["ErrorDetails"].BUFFER_FULL_ERROR:
|
||
// if in appending state
|
||
if (data.parent === 'audio' && (this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].PARSING || this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].PARSED)) {
|
||
var flushBuffer = true;
|
||
var bufferedInfo = this.getFwdBufferInfo(this.mediaBuffer, _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].AUDIO); // 0.5 : tolerance needed as some browsers stalls playback before reaching buffered end
|
||
// reduce max buf len if current position is buffered
|
||
|
||
if (bufferedInfo && bufferedInfo.len > 0.5) {
|
||
flushBuffer = !this.reduceMaxBufferLength(bufferedInfo.len);
|
||
}
|
||
|
||
if (flushBuffer) {
|
||
// current position is not buffered, but browser is still complaining about buffer full error
|
||
// this happens on IE/Edge, refer to https://github.com/video-dev/hls.js/pull/708
|
||
// in that case flush the whole audio buffer to recover
|
||
this.warn('Buffer full error also media.currentTime is not buffered, flush audio buffer');
|
||
this.fragCurrent = null;
|
||
|
||
_BaseStreamController.prototype.flushMainBuffer.call(this, 0, Number.POSITIVE_INFINITY, 'audio');
|
||
}
|
||
|
||
this.resetLoadingState();
|
||
}
|
||
|
||
break;
|
||
|
||
default:
|
||
break;
|
||
}
|
||
};
|
||
|
||
_proto.onBufferFlushed = function onBufferFlushed(event, _ref3) {
|
||
var type = _ref3.type;
|
||
|
||
if (type === _loader_fragment__WEBPACK_IMPORTED_MODULE_7__["ElementaryStreamTypes"].AUDIO) {
|
||
this.bufferFlushed = true;
|
||
}
|
||
};
|
||
|
||
_proto._handleTransmuxComplete = function _handleTransmuxComplete(transmuxResult) {
|
||
var _id3$samples;
|
||
|
||
var id = 'audio';
|
||
var hls = this.hls;
|
||
var remuxResult = transmuxResult.remuxResult,
|
||
chunkMeta = transmuxResult.chunkMeta;
|
||
var context = this.getCurrentContext(chunkMeta);
|
||
|
||
if (!context) {
|
||
this.warn("The loading context changed while buffering fragment " + chunkMeta.sn + " of level " + chunkMeta.level + ". This chunk will not be buffered.");
|
||
this.resetLiveStartWhenNotLoaded(chunkMeta.level);
|
||
return;
|
||
}
|
||
|
||
var frag = context.frag,
|
||
part = context.part;
|
||
var audio = remuxResult.audio,
|
||
text = remuxResult.text,
|
||
id3 = remuxResult.id3,
|
||
initSegment = remuxResult.initSegment; // Check if the current fragment has been aborted. We check this by first seeing if we're still playing the current level.
|
||
// If we are, subsequently check if the currently loading fragment (fragCurrent) has changed.
|
||
|
||
if (this.fragContextChanged(frag)) {
|
||
return;
|
||
}
|
||
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].PARSING;
|
||
|
||
if (this.audioSwitch && audio) {
|
||
this.completeAudioSwitch();
|
||
}
|
||
|
||
if (initSegment !== null && initSegment !== void 0 && initSegment.tracks) {
|
||
this._bufferInitSegment(initSegment.tracks, frag, chunkMeta);
|
||
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].FRAG_PARSING_INIT_SEGMENT, {
|
||
frag: frag,
|
||
id: id,
|
||
tracks: initSegment.tracks
|
||
}); // Only flush audio from old audio tracks when PTS is known on new audio track
|
||
}
|
||
|
||
if (audio) {
|
||
var startPTS = audio.startPTS,
|
||
endPTS = audio.endPTS,
|
||
startDTS = audio.startDTS,
|
||
endDTS = audio.endDTS;
|
||
|
||
if (part) {
|
||
part.elementaryStreams[_loader_fragment__WEBPACK_IMPORTED_MODULE_7__["ElementaryStreamTypes"].AUDIO] = {
|
||
startPTS: startPTS,
|
||
endPTS: endPTS,
|
||
startDTS: startDTS,
|
||
endDTS: endDTS
|
||
};
|
||
}
|
||
|
||
frag.setElementaryStreamInfo(_loader_fragment__WEBPACK_IMPORTED_MODULE_7__["ElementaryStreamTypes"].AUDIO, startPTS, endPTS, startDTS, endDTS);
|
||
this.bufferFragmentData(audio, frag, part, chunkMeta);
|
||
}
|
||
|
||
if (id3 !== null && id3 !== void 0 && (_id3$samples = id3.samples) !== null && _id3$samples !== void 0 && _id3$samples.length) {
|
||
var emittedID3 = _extends({
|
||
frag: frag,
|
||
id: id
|
||
}, id3);
|
||
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].FRAG_PARSING_METADATA, emittedID3);
|
||
}
|
||
|
||
if (text) {
|
||
var emittedText = _extends({
|
||
frag: frag,
|
||
id: id
|
||
}, text);
|
||
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].FRAG_PARSING_USERDATA, emittedText);
|
||
}
|
||
};
|
||
|
||
_proto._bufferInitSegment = function _bufferInitSegment(tracks, frag, chunkMeta) {
|
||
if (this.state !== _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].PARSING) {
|
||
return;
|
||
} // delete any video track found on audio transmuxer
|
||
|
||
|
||
if (tracks.video) {
|
||
delete tracks.video;
|
||
} // include levelCodec in audio and video tracks
|
||
|
||
|
||
var track = tracks.audio;
|
||
|
||
if (!track) {
|
||
return;
|
||
}
|
||
|
||
track.levelCodec = track.codec;
|
||
track.id = 'audio';
|
||
this.log("Init audio buffer, container:" + track.container + ", codecs[parsed]=[" + track.codec + "]");
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].BUFFER_CODECS, tracks);
|
||
var initSegment = track.initSegment;
|
||
|
||
if (initSegment !== null && initSegment !== void 0 && initSegment.byteLength) {
|
||
var segment = {
|
||
type: 'audio',
|
||
frag: frag,
|
||
part: null,
|
||
chunkMeta: chunkMeta,
|
||
parent: frag.type,
|
||
data: initSegment
|
||
};
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].BUFFER_APPENDING, segment);
|
||
} // trigger handler right now
|
||
|
||
|
||
this.tick();
|
||
};
|
||
|
||
_proto.loadFragment = function loadFragment(frag, trackDetails, targetBufferTime) {
|
||
// only load if fragment is not loaded or if in audio switch
|
||
var fragState = this.fragmentTracker.getState(frag);
|
||
this.fragCurrent = frag; // we force a frag loading in audio switch as fragment tracker might not have evicted previous frags in case of quick audio switch
|
||
|
||
if (this.audioSwitch || fragState === _fragment_tracker__WEBPACK_IMPORTED_MODULE_4__["FragmentState"].NOT_LOADED || fragState === _fragment_tracker__WEBPACK_IMPORTED_MODULE_4__["FragmentState"].PARTIAL) {
|
||
if (frag.sn === 'initSegment') {
|
||
this._loadInitSegment(frag);
|
||
} else if (trackDetails.live && !Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(this.initPTS[frag.cc])) {
|
||
this.log("Waiting for video PTS in continuity counter " + frag.cc + " of live stream before loading audio fragment " + frag.sn + " of level " + this.trackId);
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_INIT_PTS;
|
||
} else {
|
||
this.startFragRequested = true;
|
||
|
||
_BaseStreamController.prototype.loadFragment.call(this, frag, trackDetails, targetBufferTime);
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto.completeAudioSwitch = function completeAudioSwitch() {
|
||
var hls = this.hls,
|
||
media = this.media,
|
||
trackId = this.trackId;
|
||
|
||
if (media) {
|
||
this.log('Switching audio track : flushing all audio');
|
||
|
||
_BaseStreamController.prototype.flushMainBuffer.call(this, 0, Number.POSITIVE_INFINITY, 'audio');
|
||
}
|
||
|
||
this.audioSwitch = false;
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].AUDIO_TRACK_SWITCHED, {
|
||
id: trackId
|
||
});
|
||
};
|
||
|
||
return AudioStreamController;
|
||
}(_base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["default"]);
|
||
|
||
/* harmony default export */ __webpack_exports__["default"] = (AudioStreamController);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/controller/audio-track-controller.ts":
|
||
/*!**************************************************!*\
|
||
!*** ./src/controller/audio-track-controller.ts ***!
|
||
\**************************************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../events */ "./src/events.ts");
|
||
/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
|
||
/* harmony import */ var _base_playlist_controller__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./base-playlist-controller */ "./src/controller/base-playlist-controller.ts");
|
||
/* harmony import */ var _types_loader__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../types/loader */ "./src/types/loader.ts");
|
||
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
|
||
|
||
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
|
||
|
||
function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
|
||
|
||
function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
|
||
|
||
|
||
|
||
|
||
|
||
|
||
var AudioTrackController = /*#__PURE__*/function (_BasePlaylistControll) {
|
||
_inheritsLoose(AudioTrackController, _BasePlaylistControll);
|
||
|
||
function AudioTrackController(hls) {
|
||
var _this;
|
||
|
||
_this = _BasePlaylistControll.call(this, hls, '[audio-track-controller]') || this;
|
||
_this.tracks = [];
|
||
_this.groupId = null;
|
||
_this.tracksInGroup = [];
|
||
_this.trackId = -1;
|
||
_this.trackName = '';
|
||
_this.selectDefaultTrack = true;
|
||
|
||
_this.registerListeners();
|
||
|
||
return _this;
|
||
}
|
||
|
||
var _proto = AudioTrackController.prototype;
|
||
|
||
_proto.registerListeners = function registerListeners() {
|
||
var hls = this.hls;
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MANIFEST_PARSED, this.onManifestParsed, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].LEVEL_LOADING, this.onLevelLoading, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].LEVEL_SWITCHING, this.onLevelSwitching, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].AUDIO_TRACK_LOADED, this.onAudioTrackLoaded, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, this.onError, this);
|
||
};
|
||
|
||
_proto.unregisterListeners = function unregisterListeners() {
|
||
var hls = this.hls;
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MANIFEST_PARSED, this.onManifestParsed, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].LEVEL_LOADING, this.onLevelLoading, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].LEVEL_SWITCHING, this.onLevelSwitching, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].AUDIO_TRACK_LOADED, this.onAudioTrackLoaded, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, this.onError, this);
|
||
};
|
||
|
||
_proto.destroy = function destroy() {
|
||
this.unregisterListeners();
|
||
this.tracks.length = 0;
|
||
this.tracksInGroup.length = 0;
|
||
|
||
_BasePlaylistControll.prototype.destroy.call(this);
|
||
};
|
||
|
||
_proto.onManifestLoading = function onManifestLoading() {
|
||
this.tracks = [];
|
||
this.groupId = null;
|
||
this.tracksInGroup = [];
|
||
this.trackId = -1;
|
||
this.trackName = '';
|
||
this.selectDefaultTrack = true;
|
||
};
|
||
|
||
_proto.onManifestParsed = function onManifestParsed(event, data) {
|
||
this.tracks = data.audioTracks || [];
|
||
};
|
||
|
||
_proto.onAudioTrackLoaded = function onAudioTrackLoaded(event, data) {
|
||
var id = data.id,
|
||
details = data.details;
|
||
var currentTrack = this.tracksInGroup[id];
|
||
|
||
if (!currentTrack) {
|
||
this.warn("Invalid audio track id " + id);
|
||
return;
|
||
}
|
||
|
||
var curDetails = currentTrack.details;
|
||
currentTrack.details = data.details;
|
||
this.log("audioTrack " + id + " loaded [" + details.startSN + "-" + details.endSN + "]");
|
||
|
||
if (id === this.trackId) {
|
||
this.retryCount = 0;
|
||
this.playlistLoaded(id, data, curDetails);
|
||
}
|
||
};
|
||
|
||
_proto.onLevelLoading = function onLevelLoading(event, data) {
|
||
this.switchLevel(data.level);
|
||
};
|
||
|
||
_proto.onLevelSwitching = function onLevelSwitching(event, data) {
|
||
this.switchLevel(data.level);
|
||
};
|
||
|
||
_proto.switchLevel = function switchLevel(levelIndex) {
|
||
var levelInfo = this.hls.levels[levelIndex];
|
||
|
||
if (!(levelInfo !== null && levelInfo !== void 0 && levelInfo.audioGroupIds)) {
|
||
return;
|
||
}
|
||
|
||
var audioGroupId = levelInfo.audioGroupIds[levelInfo.urlId];
|
||
|
||
if (this.groupId !== audioGroupId) {
|
||
this.groupId = audioGroupId;
|
||
var audioTracks = this.tracks.filter(function (track) {
|
||
return !audioGroupId || track.groupId === audioGroupId;
|
||
}); // Disable selectDefaultTrack if there are no default tracks
|
||
|
||
if (this.selectDefaultTrack && !audioTracks.some(function (track) {
|
||
return track.default;
|
||
})) {
|
||
this.selectDefaultTrack = false;
|
||
}
|
||
|
||
this.tracksInGroup = audioTracks;
|
||
var audioTracksUpdated = {
|
||
audioTracks: audioTracks
|
||
};
|
||
this.log("Updating audio tracks, " + audioTracks.length + " track(s) found in \"" + audioGroupId + "\" group-id");
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].AUDIO_TRACKS_UPDATED, audioTracksUpdated);
|
||
this.selectInitialTrack();
|
||
}
|
||
};
|
||
|
||
_proto.onError = function onError(event, data) {
|
||
_BasePlaylistControll.prototype.onError.call(this, event, data);
|
||
|
||
if (data.fatal || !data.context) {
|
||
return;
|
||
}
|
||
|
||
if (data.context.type === _types_loader__WEBPACK_IMPORTED_MODULE_3__["PlaylistContextType"].AUDIO_TRACK && data.context.id === this.trackId && data.context.groupId === this.groupId) {
|
||
this.retryLoadingOrFail(data);
|
||
}
|
||
};
|
||
|
||
_proto.setAudioTrack = function setAudioTrack(newId) {
|
||
var tracks = this.tracksInGroup; // check if level idx is valid
|
||
|
||
if (newId < 0 || newId >= tracks.length) {
|
||
this.warn('Invalid id passed to audio-track controller');
|
||
return;
|
||
} // stopping live reloading timer if any
|
||
|
||
|
||
this.clearTimer();
|
||
var lastTrack = tracks[this.trackId];
|
||
this.log("Now switching to audio-track index " + newId);
|
||
var track = tracks[newId];
|
||
var id = track.id,
|
||
_track$groupId = track.groupId,
|
||
groupId = _track$groupId === void 0 ? '' : _track$groupId,
|
||
name = track.name,
|
||
type = track.type,
|
||
url = track.url;
|
||
this.trackId = newId;
|
||
this.trackName = name;
|
||
this.selectDefaultTrack = false;
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].AUDIO_TRACK_SWITCHING, {
|
||
id: id,
|
||
groupId: groupId,
|
||
name: name,
|
||
type: type,
|
||
url: url
|
||
}); // Do not reload track unless live
|
||
|
||
if (track.details && !track.details.live) {
|
||
return;
|
||
}
|
||
|
||
var hlsUrlParameters = this.switchParams(track.url, lastTrack === null || lastTrack === void 0 ? void 0 : lastTrack.details);
|
||
this.loadPlaylist(hlsUrlParameters);
|
||
};
|
||
|
||
_proto.selectInitialTrack = function selectInitialTrack() {
|
||
var audioTracks = this.tracksInGroup;
|
||
console.assert(audioTracks.length, 'Initial audio track should be selected when tracks are known');
|
||
var currentAudioTrackName = this.trackName;
|
||
var trackId = this.findTrackId(currentAudioTrackName) || this.findTrackId();
|
||
|
||
if (trackId !== -1) {
|
||
this.setAudioTrack(trackId);
|
||
} else {
|
||
this.warn("No track found for running audio group-ID: " + this.groupId);
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].MEDIA_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].AUDIO_TRACK_LOAD_ERROR,
|
||
fatal: true
|
||
});
|
||
}
|
||
};
|
||
|
||
_proto.findTrackId = function findTrackId(name) {
|
||
var audioTracks = this.tracksInGroup;
|
||
|
||
for (var i = 0; i < audioTracks.length; i++) {
|
||
var track = audioTracks[i];
|
||
|
||
if (!this.selectDefaultTrack || track.default) {
|
||
if (!name || name === track.name) {
|
||
return track.id;
|
||
}
|
||
}
|
||
}
|
||
|
||
return -1;
|
||
};
|
||
|
||
_proto.loadPlaylist = function loadPlaylist(hlsUrlParameters) {
|
||
var audioTrack = this.tracksInGroup[this.trackId];
|
||
|
||
if (this.shouldLoadTrack(audioTrack)) {
|
||
var id = audioTrack.id;
|
||
var groupId = audioTrack.groupId;
|
||
var url = audioTrack.url;
|
||
|
||
if (hlsUrlParameters) {
|
||
try {
|
||
url = hlsUrlParameters.addDirectives(url);
|
||
} catch (error) {
|
||
this.warn("Could not construct new URL with HLS Delivery Directives: " + error);
|
||
}
|
||
} // track not retrieved yet, or live playlist we need to (re)load it
|
||
|
||
|
||
this.log("loading audio-track playlist for id: " + id);
|
||
this.clearTimer();
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].AUDIO_TRACK_LOADING, {
|
||
url: url,
|
||
id: id,
|
||
groupId: groupId,
|
||
deliveryDirectives: hlsUrlParameters || null
|
||
});
|
||
}
|
||
};
|
||
|
||
_createClass(AudioTrackController, [{
|
||
key: "audioTracks",
|
||
get: function get() {
|
||
return this.tracksInGroup;
|
||
}
|
||
}, {
|
||
key: "audioTrack",
|
||
get: function get() {
|
||
return this.trackId;
|
||
},
|
||
set: function set(newId) {
|
||
// If audio track is selected from API then don't choose from the manifest default track
|
||
this.selectDefaultTrack = false;
|
||
this.setAudioTrack(newId);
|
||
}
|
||
}]);
|
||
|
||
return AudioTrackController;
|
||
}(_base_playlist_controller__WEBPACK_IMPORTED_MODULE_2__["default"]);
|
||
|
||
/* harmony default export */ __webpack_exports__["default"] = (AudioTrackController);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/controller/base-playlist-controller.ts":
|
||
/*!****************************************************!*\
|
||
!*** ./src/controller/base-playlist-controller.ts ***!
|
||
\****************************************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return BasePlaylistController; });
|
||
/* harmony import */ var _Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
|
||
/* harmony import */ var _types_level__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../types/level */ "./src/types/level.ts");
|
||
/* harmony import */ var _level_helper__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./level-helper */ "./src/controller/level-helper.ts");
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
var BasePlaylistController = /*#__PURE__*/function () {
|
||
function BasePlaylistController(hls, logPrefix) {
|
||
this.hls = void 0;
|
||
this.timer = -1;
|
||
this.canLoad = false;
|
||
this.retryCount = 0;
|
||
this.log = void 0;
|
||
this.warn = void 0;
|
||
this.log = _utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].log.bind(_utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"], logPrefix + ":");
|
||
this.warn = _utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].warn.bind(_utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"], logPrefix + ":");
|
||
this.hls = hls;
|
||
}
|
||
|
||
var _proto = BasePlaylistController.prototype;
|
||
|
||
_proto.destroy = function destroy() {
|
||
this.clearTimer(); // @ts-ignore
|
||
|
||
this.hls = this.log = this.warn = null;
|
||
};
|
||
|
||
_proto.onError = function onError(event, data) {
|
||
if (data.fatal && data.type === _errors__WEBPACK_IMPORTED_MODULE_4__["ErrorTypes"].NETWORK_ERROR) {
|
||
this.clearTimer();
|
||
}
|
||
};
|
||
|
||
_proto.clearTimer = function clearTimer() {
|
||
clearTimeout(this.timer);
|
||
this.timer = -1;
|
||
};
|
||
|
||
_proto.startLoad = function startLoad() {
|
||
this.canLoad = true;
|
||
this.retryCount = 0;
|
||
this.loadPlaylist();
|
||
};
|
||
|
||
_proto.stopLoad = function stopLoad() {
|
||
this.canLoad = false;
|
||
this.clearTimer();
|
||
};
|
||
|
||
_proto.switchParams = function switchParams(playlistUri, previous) {
|
||
var renditionReports = previous === null || previous === void 0 ? void 0 : previous.renditionReports;
|
||
|
||
if (renditionReports) {
|
||
for (var i = 0; i < renditionReports.length; i++) {
|
||
var attr = renditionReports[i];
|
||
var uri = '' + attr.URI;
|
||
|
||
if (uri === playlistUri.substr(-uri.length)) {
|
||
var msn = parseInt(attr['LAST-MSN']);
|
||
var part = parseInt(attr['LAST-PART']);
|
||
|
||
if (previous && this.hls.config.lowLatencyMode) {
|
||
var currentGoal = Math.min(previous.age - previous.partTarget, previous.targetduration);
|
||
|
||
if (part !== undefined && currentGoal > previous.partTarget) {
|
||
part += 1;
|
||
}
|
||
}
|
||
|
||
if (Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(msn)) {
|
||
return new _types_level__WEBPACK_IMPORTED_MODULE_1__["HlsUrlParameters"](msn, Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(part) ? part : undefined, _types_level__WEBPACK_IMPORTED_MODULE_1__["HlsSkip"].No);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto.loadPlaylist = function loadPlaylist(hlsUrlParameters) {};
|
||
|
||
_proto.shouldLoadTrack = function shouldLoadTrack(track) {
|
||
return this.canLoad && track && !!track.url && (!track.details || track.details.live);
|
||
};
|
||
|
||
_proto.playlistLoaded = function playlistLoaded(index, data, previousDetails) {
|
||
var _this = this;
|
||
|
||
var details = data.details,
|
||
stats = data.stats; // Set last updated date-time
|
||
|
||
var elapsed = stats.loading.end ? Math.max(0, self.performance.now() - stats.loading.end) : 0;
|
||
details.advancedDateTime = Date.now() - elapsed; // if current playlist is a live playlist, arm a timer to reload it
|
||
|
||
if (details.live || previousDetails !== null && previousDetails !== void 0 && previousDetails.live) {
|
||
details.reloaded(previousDetails);
|
||
|
||
if (previousDetails) {
|
||
this.log("live playlist " + index + " " + (details.advanced ? 'REFRESHED ' + details.lastPartSn + '-' + details.lastPartIndex : 'MISSED'));
|
||
} // Merge live playlists to adjust fragment starts and fill in delta playlist skipped segments
|
||
|
||
|
||
if (previousDetails && details.fragments.length > 0) {
|
||
Object(_level_helper__WEBPACK_IMPORTED_MODULE_2__["mergeDetails"])(previousDetails, details);
|
||
}
|
||
|
||
if (!this.canLoad || !details.live) {
|
||
return;
|
||
}
|
||
|
||
var deliveryDirectives;
|
||
var msn = undefined;
|
||
var part = undefined;
|
||
|
||
if (details.canBlockReload && details.endSN && details.advanced) {
|
||
// Load level with LL-HLS delivery directives
|
||
var lowLatencyMode = this.hls.config.lowLatencyMode;
|
||
var lastPartSn = details.lastPartSn;
|
||
var endSn = details.endSN;
|
||
var lastPartIndex = details.lastPartIndex;
|
||
var hasParts = lastPartIndex !== -1;
|
||
var lastPart = lastPartSn === endSn; // When low latency mode is disabled, we'll skip part requests once the last part index is found
|
||
|
||
var nextSnStartIndex = lowLatencyMode ? 0 : lastPartIndex;
|
||
|
||
if (hasParts) {
|
||
msn = lastPart ? endSn + 1 : lastPartSn;
|
||
part = lastPart ? nextSnStartIndex : lastPartIndex + 1;
|
||
} else {
|
||
msn = endSn + 1;
|
||
} // Low-Latency CDN Tune-in: "age" header and time since load indicates we're behind by more than one part
|
||
// Update directives to obtain the Playlist that has the estimated additional duration of media
|
||
|
||
|
||
var lastAdvanced = details.age;
|
||
var cdnAge = lastAdvanced + details.ageHeader;
|
||
var currentGoal = Math.min(cdnAge - details.partTarget, details.targetduration * 1.5);
|
||
|
||
if (currentGoal > 0) {
|
||
if (previousDetails && currentGoal > previousDetails.tuneInGoal) {
|
||
// If we attempted to get the next or latest playlist update, but currentGoal increased,
|
||
// then we either can't catchup, or the "age" header cannot be trusted.
|
||
this.warn("CDN Tune-in goal increased from: " + previousDetails.tuneInGoal + " to: " + currentGoal + " with playlist age: " + details.age);
|
||
currentGoal = 0;
|
||
} else {
|
||
var segments = Math.floor(currentGoal / details.targetduration);
|
||
msn += segments;
|
||
|
||
if (part !== undefined) {
|
||
var parts = Math.round(currentGoal % details.targetduration / details.partTarget);
|
||
part += parts;
|
||
}
|
||
|
||
this.log("CDN Tune-in age: " + details.ageHeader + "s last advanced " + lastAdvanced.toFixed(2) + "s goal: " + currentGoal + " skip sn " + segments + " to part " + part);
|
||
}
|
||
|
||
details.tuneInGoal = currentGoal;
|
||
}
|
||
|
||
deliveryDirectives = this.getDeliveryDirectives(details, data.deliveryDirectives, msn, part);
|
||
|
||
if (lowLatencyMode || !lastPart) {
|
||
this.loadPlaylist(deliveryDirectives);
|
||
return;
|
||
}
|
||
} else {
|
||
deliveryDirectives = this.getDeliveryDirectives(details, data.deliveryDirectives, msn, part);
|
||
}
|
||
|
||
var reloadInterval = Object(_level_helper__WEBPACK_IMPORTED_MODULE_2__["computeReloadInterval"])(details, stats);
|
||
|
||
if (msn !== undefined && details.canBlockReload) {
|
||
reloadInterval -= details.partTarget || 1;
|
||
}
|
||
|
||
this.log("reload live playlist " + index + " in " + Math.round(reloadInterval) + " ms");
|
||
this.timer = self.setTimeout(function () {
|
||
return _this.loadPlaylist(deliveryDirectives);
|
||
}, reloadInterval);
|
||
} else {
|
||
this.clearTimer();
|
||
}
|
||
};
|
||
|
||
_proto.getDeliveryDirectives = function getDeliveryDirectives(details, previousDeliveryDirectives, msn, part) {
|
||
var skip = Object(_types_level__WEBPACK_IMPORTED_MODULE_1__["getSkipValue"])(details, msn);
|
||
|
||
if (previousDeliveryDirectives !== null && previousDeliveryDirectives !== void 0 && previousDeliveryDirectives.skip && details.deltaUpdateFailed) {
|
||
msn = previousDeliveryDirectives.msn;
|
||
part = previousDeliveryDirectives.part;
|
||
skip = _types_level__WEBPACK_IMPORTED_MODULE_1__["HlsSkip"].No;
|
||
}
|
||
|
||
return new _types_level__WEBPACK_IMPORTED_MODULE_1__["HlsUrlParameters"](msn, part, skip);
|
||
};
|
||
|
||
_proto.retryLoadingOrFail = function retryLoadingOrFail(errorEvent) {
|
||
var _this2 = this;
|
||
|
||
var config = this.hls.config;
|
||
var retry = this.retryCount < config.levelLoadingMaxRetry;
|
||
|
||
if (retry) {
|
||
var _errorEvent$context;
|
||
|
||
this.retryCount++;
|
||
|
||
if (errorEvent.details.indexOf('LoadTimeOut') > -1 && (_errorEvent$context = errorEvent.context) !== null && _errorEvent$context !== void 0 && _errorEvent$context.deliveryDirectives) {
|
||
// The LL-HLS request already timed out so retry immediately
|
||
this.warn("retry playlist loading #" + this.retryCount + " after \"" + errorEvent.details + "\"");
|
||
this.loadPlaylist();
|
||
} else {
|
||
// exponential backoff capped to max retry timeout
|
||
var delay = Math.min(Math.pow(2, this.retryCount) * config.levelLoadingRetryDelay, config.levelLoadingMaxRetryTimeout); // Schedule level/track reload
|
||
|
||
this.timer = self.setTimeout(function () {
|
||
return _this2.loadPlaylist();
|
||
}, delay);
|
||
this.warn("retry playlist loading #" + this.retryCount + " in " + delay + " ms after \"" + errorEvent.details + "\"");
|
||
}
|
||
} else {
|
||
this.warn("cannot recover from error \"" + errorEvent.details + "\""); // stopping live reloading timer if any
|
||
|
||
this.clearTimer(); // switch error to fatal
|
||
|
||
errorEvent.fatal = true;
|
||
}
|
||
|
||
return retry;
|
||
};
|
||
|
||
return BasePlaylistController;
|
||
}();
|
||
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/controller/base-stream-controller.ts":
|
||
/*!**************************************************!*\
|
||
!*** ./src/controller/base-stream-controller.ts ***!
|
||
\**************************************************/
|
||
/*! exports provided: State, default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "State", function() { return State; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return BaseStreamController; });
|
||
/* harmony import */ var _Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
|
||
/* harmony import */ var _task_loop__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../task-loop */ "./src/task-loop.ts");
|
||
/* harmony import */ var _fragment_tracker__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./fragment-tracker */ "./src/controller/fragment-tracker.ts");
|
||
/* harmony import */ var _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/buffer-helper */ "./src/utils/buffer-helper.ts");
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../events */ "./src/events.ts");
|
||
/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
|
||
/* harmony import */ var _types_transmuxer__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ../types/transmuxer */ "./src/types/transmuxer.ts");
|
||
/* harmony import */ var _utils_mp4_tools__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ../utils/mp4-tools */ "./src/utils/mp4-tools.ts");
|
||
/* harmony import */ var _utils_discontinuities__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! ../utils/discontinuities */ "./src/utils/discontinuities.ts");
|
||
/* harmony import */ var _fragment_finders__WEBPACK_IMPORTED_MODULE_10__ = __webpack_require__(/*! ./fragment-finders */ "./src/controller/fragment-finders.ts");
|
||
/* harmony import */ var _level_helper__WEBPACK_IMPORTED_MODULE_11__ = __webpack_require__(/*! ./level-helper */ "./src/controller/level-helper.ts");
|
||
/* harmony import */ var _loader_fragment_loader__WEBPACK_IMPORTED_MODULE_12__ = __webpack_require__(/*! ../loader/fragment-loader */ "./src/loader/fragment-loader.ts");
|
||
/* harmony import */ var _crypt_decrypter__WEBPACK_IMPORTED_MODULE_13__ = __webpack_require__(/*! ../crypt/decrypter */ "./src/crypt/decrypter.ts");
|
||
/* harmony import */ var _utils_time_ranges__WEBPACK_IMPORTED_MODULE_14__ = __webpack_require__(/*! ../utils/time-ranges */ "./src/utils/time-ranges.ts");
|
||
/* harmony import */ var _types_loader__WEBPACK_IMPORTED_MODULE_15__ = __webpack_require__(/*! ../types/loader */ "./src/types/loader.ts");
|
||
|
||
|
||
|
||
|
||
|
||
|
||
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
|
||
|
||
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
|
||
|
||
function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
|
||
|
||
function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
|
||
|
||
function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
var State = {
|
||
STOPPED: 'STOPPED',
|
||
IDLE: 'IDLE',
|
||
KEY_LOADING: 'KEY_LOADING',
|
||
FRAG_LOADING: 'FRAG_LOADING',
|
||
FRAG_LOADING_WAITING_RETRY: 'FRAG_LOADING_WAITING_RETRY',
|
||
WAITING_TRACK: 'WAITING_TRACK',
|
||
PARSING: 'PARSING',
|
||
PARSED: 'PARSED',
|
||
BACKTRACKING: 'BACKTRACKING',
|
||
ENDED: 'ENDED',
|
||
ERROR: 'ERROR',
|
||
WAITING_INIT_PTS: 'WAITING_INIT_PTS',
|
||
WAITING_LEVEL: 'WAITING_LEVEL'
|
||
};
|
||
|
||
var BaseStreamController = /*#__PURE__*/function (_TaskLoop) {
|
||
_inheritsLoose(BaseStreamController, _TaskLoop);
|
||
|
||
function BaseStreamController(hls, fragmentTracker, logPrefix) {
|
||
var _this;
|
||
|
||
_this = _TaskLoop.call(this) || this;
|
||
_this.hls = void 0;
|
||
_this.fragPrevious = null;
|
||
_this.fragCurrent = null;
|
||
_this.fragmentTracker = void 0;
|
||
_this.transmuxer = null;
|
||
_this._state = State.STOPPED;
|
||
_this.media = void 0;
|
||
_this.mediaBuffer = void 0;
|
||
_this.config = void 0;
|
||
_this.bitrateTest = false;
|
||
_this.lastCurrentTime = 0;
|
||
_this.nextLoadPosition = 0;
|
||
_this.startPosition = 0;
|
||
_this.loadedmetadata = false;
|
||
_this.fragLoadError = 0;
|
||
_this.retryDate = 0;
|
||
_this.levels = null;
|
||
_this.fragmentLoader = void 0;
|
||
_this.levelLastLoaded = null;
|
||
_this.startFragRequested = false;
|
||
_this.decrypter = void 0;
|
||
_this.initPTS = [];
|
||
_this.onvseeking = null;
|
||
_this.onvended = null;
|
||
_this.logPrefix = '';
|
||
_this.log = void 0;
|
||
_this.warn = void 0;
|
||
_this.logPrefix = logPrefix;
|
||
_this.log = _utils_logger__WEBPACK_IMPORTED_MODULE_4__["logger"].log.bind(_utils_logger__WEBPACK_IMPORTED_MODULE_4__["logger"], logPrefix + ":");
|
||
_this.warn = _utils_logger__WEBPACK_IMPORTED_MODULE_4__["logger"].warn.bind(_utils_logger__WEBPACK_IMPORTED_MODULE_4__["logger"], logPrefix + ":");
|
||
_this.hls = hls;
|
||
_this.fragmentLoader = new _loader_fragment_loader__WEBPACK_IMPORTED_MODULE_12__["default"](hls.config);
|
||
_this.fragmentTracker = fragmentTracker;
|
||
_this.config = hls.config;
|
||
_this.decrypter = new _crypt_decrypter__WEBPACK_IMPORTED_MODULE_13__["default"](hls, hls.config);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].KEY_LOADED, _this.onKeyLoaded, _assertThisInitialized(_this));
|
||
return _this;
|
||
}
|
||
|
||
var _proto = BaseStreamController.prototype;
|
||
|
||
_proto.doTick = function doTick() {
|
||
this.onTickEnd();
|
||
};
|
||
|
||
_proto.onTickEnd = function onTickEnd() {} // eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||
;
|
||
|
||
_proto.startLoad = function startLoad(startPosition) {};
|
||
|
||
_proto.stopLoad = function stopLoad() {
|
||
this.fragmentLoader.abort();
|
||
var frag = this.fragCurrent;
|
||
|
||
if (frag) {
|
||
this.fragmentTracker.removeFragment(frag);
|
||
}
|
||
|
||
this.resetTransmuxer();
|
||
this.fragCurrent = null;
|
||
this.fragPrevious = null;
|
||
this.clearInterval();
|
||
this.clearNextTick();
|
||
this.state = State.STOPPED;
|
||
};
|
||
|
||
_proto._streamEnded = function _streamEnded(bufferInfo, levelDetails) {
|
||
var fragCurrent = this.fragCurrent,
|
||
fragmentTracker = this.fragmentTracker; // we just got done loading the final fragment and there is no other buffered range after ...
|
||
// rationale is that in case there are any buffered ranges after, it means that there are unbuffered portion in between
|
||
// so we should not switch to ENDED in that case, to be able to buffer them
|
||
|
||
if (!levelDetails.live && fragCurrent && // NOTE: Because of the way parts are currently parsed/represented in the playlist, we can end up
|
||
// in situations where the current fragment is actually greater than levelDetails.endSN. While
|
||
// this feels like the "wrong place" to account for that, this is a narrower/safer change than
|
||
// updating e.g. M3U8Parser::parseLevelPlaylist().
|
||
fragCurrent.sn >= levelDetails.endSN && !bufferInfo.nextStart) {
|
||
var partList = levelDetails.partList; // Since the last part isn't guaranteed to correspond to fragCurrent for ll-hls, check instead if the last part is buffered.
|
||
|
||
if (partList !== null && partList !== void 0 && partList.length) {
|
||
var lastPart = partList[partList.length - 1]; // Checking the midpoint of the part for potential margin of error and related issues.
|
||
// NOTE: Technically I believe parts could yield content that is < the computed duration (including potential a duration of 0)
|
||
// and still be spec-compliant, so there may still be edge cases here. Likewise, there could be issues in end of stream
|
||
// part mismatches for independent audio and video playlists/segments.
|
||
|
||
var lastPartBuffered = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_3__["BufferHelper"].isBuffered(this.media, lastPart.start + lastPart.duration / 2);
|
||
return lastPartBuffered;
|
||
}
|
||
|
||
var fragState = fragmentTracker.getState(fragCurrent);
|
||
return fragState === _fragment_tracker__WEBPACK_IMPORTED_MODULE_2__["FragmentState"].PARTIAL || fragState === _fragment_tracker__WEBPACK_IMPORTED_MODULE_2__["FragmentState"].OK;
|
||
}
|
||
|
||
return false;
|
||
};
|
||
|
||
_proto.onMediaAttached = function onMediaAttached(event, data) {
|
||
var media = this.media = this.mediaBuffer = data.media;
|
||
this.onvseeking = this.onMediaSeeking.bind(this);
|
||
this.onvended = this.onMediaEnded.bind(this);
|
||
media.addEventListener('seeking', this.onvseeking);
|
||
media.addEventListener('ended', this.onvended);
|
||
var config = this.config;
|
||
|
||
if (this.levels && config.autoStartLoad && this.state === State.STOPPED) {
|
||
this.startLoad(config.startPosition);
|
||
}
|
||
};
|
||
|
||
_proto.onMediaDetaching = function onMediaDetaching() {
|
||
var media = this.media;
|
||
|
||
if (media !== null && media !== void 0 && media.ended) {
|
||
this.log('MSE detaching and video ended, reset startPosition');
|
||
this.startPosition = this.lastCurrentTime = 0;
|
||
} // remove video listeners
|
||
|
||
|
||
if (media) {
|
||
media.removeEventListener('seeking', this.onvseeking);
|
||
media.removeEventListener('ended', this.onvended);
|
||
this.onvseeking = this.onvended = null;
|
||
}
|
||
|
||
this.media = this.mediaBuffer = null;
|
||
this.loadedmetadata = false;
|
||
this.fragmentTracker.removeAllFragments();
|
||
this.stopLoad();
|
||
};
|
||
|
||
_proto.onMediaSeeking = function onMediaSeeking() {
|
||
var config = this.config,
|
||
fragCurrent = this.fragCurrent,
|
||
media = this.media,
|
||
mediaBuffer = this.mediaBuffer,
|
||
state = this.state;
|
||
var currentTime = media ? media.currentTime : 0;
|
||
var bufferInfo = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_3__["BufferHelper"].bufferInfo(mediaBuffer || media, currentTime, config.maxBufferHole);
|
||
this.log("media seeking to " + (Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(currentTime) ? currentTime.toFixed(3) : currentTime) + ", state: " + state);
|
||
|
||
if (state === State.ENDED) {
|
||
this.resetLoadingState();
|
||
} else if (fragCurrent && !bufferInfo.len) {
|
||
// check if we are seeking to a unbuffered area AND if frag loading is in progress
|
||
var tolerance = config.maxFragLookUpTolerance;
|
||
var fragStartOffset = fragCurrent.start - tolerance;
|
||
var fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance;
|
||
var pastFragment = currentTime > fragEndOffset; // check if the seek position is past current fragment, and if so abort loading
|
||
|
||
if (currentTime < fragStartOffset || pastFragment) {
|
||
if (pastFragment && fragCurrent.loader) {
|
||
this.log('seeking outside of buffer while fragment load in progress, cancel fragment load');
|
||
fragCurrent.loader.abort();
|
||
}
|
||
|
||
this.resetLoadingState();
|
||
}
|
||
}
|
||
|
||
if (media) {
|
||
this.lastCurrentTime = currentTime;
|
||
} // in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
|
||
|
||
|
||
if (!this.loadedmetadata && !bufferInfo.len) {
|
||
this.nextLoadPosition = this.startPosition = currentTime;
|
||
} // Async tick to speed up processing
|
||
|
||
|
||
this.tickImmediate();
|
||
};
|
||
|
||
_proto.onMediaEnded = function onMediaEnded() {
|
||
// reset startPosition and lastCurrentTime to restart playback @ stream beginning
|
||
this.startPosition = this.lastCurrentTime = 0;
|
||
};
|
||
|
||
_proto.onKeyLoaded = function onKeyLoaded(event, data) {
|
||
if (this.state !== State.KEY_LOADING || data.frag !== this.fragCurrent || !this.levels) {
|
||
return;
|
||
}
|
||
|
||
this.state = State.IDLE;
|
||
var levelDetails = this.levels[data.frag.level].details;
|
||
|
||
if (levelDetails) {
|
||
this.loadFragment(data.frag, levelDetails, data.frag.start);
|
||
}
|
||
};
|
||
|
||
_proto.onHandlerDestroying = function onHandlerDestroying() {
|
||
this.stopLoad();
|
||
|
||
_TaskLoop.prototype.onHandlerDestroying.call(this);
|
||
};
|
||
|
||
_proto.onHandlerDestroyed = function onHandlerDestroyed() {
|
||
this.state = State.STOPPED;
|
||
this.hls.off(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].KEY_LOADED, this.onKeyLoaded, this);
|
||
|
||
if (this.fragmentLoader) {
|
||
this.fragmentLoader.destroy();
|
||
}
|
||
|
||
if (this.decrypter) {
|
||
this.decrypter.destroy();
|
||
}
|
||
|
||
this.hls = this.log = this.warn = this.decrypter = this.fragmentLoader = this.fragmentTracker = null;
|
||
|
||
_TaskLoop.prototype.onHandlerDestroyed.call(this);
|
||
};
|
||
|
||
_proto.loadKey = function loadKey(frag, details) {
|
||
this.log("Loading key for " + frag.sn + " of [" + details.startSN + "-" + details.endSN + "], " + (this.logPrefix === '[stream-controller]' ? 'level' : 'track') + " " + frag.level);
|
||
this.state = State.KEY_LOADING;
|
||
this.fragCurrent = frag;
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].KEY_LOADING, {
|
||
frag: frag
|
||
});
|
||
};
|
||
|
||
_proto.loadFragment = function loadFragment(frag, levelDetails, targetBufferTime) {
|
||
this._loadFragForPlayback(frag, levelDetails, targetBufferTime);
|
||
};
|
||
|
||
_proto._loadFragForPlayback = function _loadFragForPlayback(frag, levelDetails, targetBufferTime) {
|
||
var _this2 = this;
|
||
|
||
var progressCallback = function progressCallback(data) {
|
||
if (_this2.fragContextChanged(frag)) {
|
||
_this2.warn("Fragment " + frag.sn + (data.part ? ' p: ' + data.part.index : '') + " of level " + frag.level + " was dropped during download.");
|
||
|
||
_this2.fragmentTracker.removeFragment(frag);
|
||
|
||
return;
|
||
}
|
||
|
||
frag.stats.chunkCount++;
|
||
|
||
_this2._handleFragmentLoadProgress(data);
|
||
};
|
||
|
||
this._doFragLoad(frag, levelDetails, targetBufferTime, progressCallback).then(function (data) {
|
||
if (!data) {
|
||
// if we're here we probably needed to backtrack or are waiting for more parts
|
||
return;
|
||
}
|
||
|
||
_this2.fragLoadError = 0;
|
||
var state = _this2.state;
|
||
|
||
if (_this2.fragContextChanged(frag)) {
|
||
if (state === State.FRAG_LOADING || state === State.BACKTRACKING || !_this2.fragCurrent && state === State.PARSING) {
|
||
_this2.fragmentTracker.removeFragment(frag);
|
||
|
||
_this2.state = State.IDLE;
|
||
}
|
||
|
||
return;
|
||
}
|
||
|
||
if ('payload' in data) {
|
||
_this2.log("Loaded fragment " + frag.sn + " of level " + frag.level);
|
||
|
||
_this2.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].FRAG_LOADED, data); // Tracker backtrack must be called after onFragLoaded to update the fragment entity state to BACKTRACKED
|
||
// This happens after handleTransmuxComplete when the worker or progressive is disabled
|
||
|
||
|
||
if (_this2.state === State.BACKTRACKING) {
|
||
_this2.fragmentTracker.backtrack(frag, data);
|
||
|
||
_this2.resetFragmentLoading(frag);
|
||
|
||
return;
|
||
}
|
||
} // Pass through the whole payload; controllers not implementing progressive loading receive data from this callback
|
||
|
||
|
||
_this2._handleFragmentLoadComplete(data);
|
||
}).catch(function (reason) {
|
||
_this2.warn(reason);
|
||
|
||
_this2.resetFragmentLoading(frag);
|
||
});
|
||
};
|
||
|
||
_proto.flushMainBuffer = function flushMainBuffer(startOffset, endOffset, type) {
|
||
if (type === void 0) {
|
||
type = null;
|
||
}
|
||
|
||
if (!(startOffset - endOffset)) {
|
||
return;
|
||
} // When alternate audio is playing, the audio-stream-controller is responsible for the audio buffer. Otherwise,
|
||
// passing a null type flushes both buffers
|
||
|
||
|
||
var flushScope = {
|
||
startOffset: startOffset,
|
||
endOffset: endOffset,
|
||
type: type
|
||
}; // Reset load errors on flush
|
||
|
||
this.fragLoadError = 0;
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].BUFFER_FLUSHING, flushScope);
|
||
};
|
||
|
||
_proto._loadInitSegment = function _loadInitSegment(frag) {
|
||
var _this3 = this;
|
||
|
||
this._doFragLoad(frag).then(function (data) {
|
||
if (!data || _this3.fragContextChanged(frag) || !_this3.levels) {
|
||
throw new Error('init load aborted');
|
||
}
|
||
|
||
return data;
|
||
}).then(function (data) {
|
||
var hls = _this3.hls;
|
||
var payload = data.payload;
|
||
var decryptData = frag.decryptdata; // check to see if the payload needs to be decrypted
|
||
|
||
if (payload && payload.byteLength > 0 && decryptData && decryptData.key && decryptData.iv && decryptData.method === 'AES-128') {
|
||
var startTime = self.performance.now(); // decrypt the subtitles
|
||
|
||
return _this3.decrypter.webCryptoDecrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer).then(function (decryptedData) {
|
||
var endTime = self.performance.now();
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].FRAG_DECRYPTED, {
|
||
frag: frag,
|
||
payload: decryptedData,
|
||
stats: {
|
||
tstart: startTime,
|
||
tdecrypt: endTime
|
||
}
|
||
});
|
||
data.payload = decryptedData;
|
||
return data;
|
||
});
|
||
}
|
||
|
||
return data;
|
||
}).then(function (data) {
|
||
var fragCurrent = _this3.fragCurrent,
|
||
hls = _this3.hls,
|
||
levels = _this3.levels;
|
||
|
||
if (!levels) {
|
||
throw new Error('init load aborted, missing levels');
|
||
}
|
||
|
||
var details = levels[frag.level].details;
|
||
console.assert(details, 'Level details are defined when init segment is loaded');
|
||
var stats = frag.stats;
|
||
_this3.state = State.IDLE;
|
||
_this3.fragLoadError = 0;
|
||
frag.data = new Uint8Array(data.payload);
|
||
stats.parsing.start = stats.buffering.start = self.performance.now();
|
||
stats.parsing.end = stats.buffering.end = self.performance.now(); // Silence FRAG_BUFFERED event if fragCurrent is null
|
||
|
||
if (data.frag === fragCurrent) {
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].FRAG_BUFFERED, {
|
||
stats: stats,
|
||
frag: fragCurrent,
|
||
part: null,
|
||
id: frag.type
|
||
});
|
||
}
|
||
|
||
_this3.tick();
|
||
}).catch(function (reason) {
|
||
_this3.warn(reason);
|
||
|
||
_this3.resetFragmentLoading(frag);
|
||
});
|
||
};
|
||
|
||
_proto.fragContextChanged = function fragContextChanged(frag) {
|
||
var fragCurrent = this.fragCurrent;
|
||
return !frag || !fragCurrent || frag.level !== fragCurrent.level || frag.sn !== fragCurrent.sn || frag.urlId !== fragCurrent.urlId;
|
||
};
|
||
|
||
_proto.fragBufferedComplete = function fragBufferedComplete(frag, part) {
|
||
var media = this.mediaBuffer ? this.mediaBuffer : this.media;
|
||
this.log("Buffered " + frag.type + " sn: " + frag.sn + (part ? ' part: ' + part.index : '') + " of " + (this.logPrefix === '[stream-controller]' ? 'level' : 'track') + " " + frag.level + " " + _utils_time_ranges__WEBPACK_IMPORTED_MODULE_14__["default"].toString(_utils_buffer_helper__WEBPACK_IMPORTED_MODULE_3__["BufferHelper"].getBuffered(media)));
|
||
this.state = State.IDLE;
|
||
this.tick();
|
||
};
|
||
|
||
_proto._handleFragmentLoadComplete = function _handleFragmentLoadComplete(fragLoadedEndData) {
|
||
var transmuxer = this.transmuxer;
|
||
|
||
if (!transmuxer) {
|
||
return;
|
||
}
|
||
|
||
var frag = fragLoadedEndData.frag,
|
||
part = fragLoadedEndData.part,
|
||
partsLoaded = fragLoadedEndData.partsLoaded; // If we did not load parts, or loaded all parts, we have complete (not partial) fragment data
|
||
|
||
var complete = !partsLoaded || partsLoaded.length === 0 || partsLoaded.some(function (fragLoaded) {
|
||
return !fragLoaded;
|
||
});
|
||
var chunkMeta = new _types_transmuxer__WEBPACK_IMPORTED_MODULE_7__["ChunkMetadata"](frag.level, frag.sn, frag.stats.chunkCount + 1, 0, part ? part.index : -1, !complete);
|
||
transmuxer.flush(chunkMeta);
|
||
} // eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||
;
|
||
|
||
_proto._handleFragmentLoadProgress = function _handleFragmentLoadProgress(frag) {};
|
||
|
||
_proto._doFragLoad = function _doFragLoad(frag, details, targetBufferTime, progressCallback) {
|
||
var _this4 = this;
|
||
|
||
if (targetBufferTime === void 0) {
|
||
targetBufferTime = null;
|
||
}
|
||
|
||
if (!this.levels) {
|
||
throw new Error('frag load aborted, missing levels');
|
||
}
|
||
|
||
targetBufferTime = Math.max(frag.start, targetBufferTime || 0);
|
||
|
||
if (this.config.lowLatencyMode && details) {
|
||
var partList = details.partList;
|
||
|
||
if (partList && progressCallback) {
|
||
if (targetBufferTime > frag.end && details.fragmentHint) {
|
||
frag = details.fragmentHint;
|
||
}
|
||
|
||
var partIndex = this.getNextPart(partList, frag, targetBufferTime);
|
||
|
||
if (partIndex > -1) {
|
||
var part = partList[partIndex];
|
||
this.log("Loading part sn: " + frag.sn + " p: " + part.index + " cc: " + frag.cc + " of playlist [" + details.startSN + "-" + details.endSN + "] parts [0-" + partIndex + "-" + (partList.length - 1) + "] " + (this.logPrefix === '[stream-controller]' ? 'level' : 'track') + ": " + frag.level + ", target: " + parseFloat(targetBufferTime.toFixed(3)));
|
||
this.nextLoadPosition = part.start + part.duration;
|
||
this.state = State.FRAG_LOADING;
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].FRAG_LOADING, {
|
||
frag: frag,
|
||
part: partList[partIndex],
|
||
targetBufferTime: targetBufferTime
|
||
});
|
||
return this.doFragPartsLoad(frag, partList, partIndex, progressCallback).catch(function (error) {
|
||
return _this4.handleFragLoadError(error);
|
||
});
|
||
} else if (!frag.url || this.loadedEndOfParts(partList, targetBufferTime)) {
|
||
// Fragment hint has no parts
|
||
return Promise.resolve(null);
|
||
}
|
||
}
|
||
}
|
||
|
||
this.log("Loading fragment " + frag.sn + " cc: " + frag.cc + " " + (details ? 'of [' + details.startSN + '-' + details.endSN + '] ' : '') + (this.logPrefix === '[stream-controller]' ? 'level' : 'track') + ": " + frag.level + ", target: " + parseFloat(targetBufferTime.toFixed(3))); // Don't update nextLoadPosition for fragments which are not buffered
|
||
|
||
if (Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(frag.sn) && !this.bitrateTest) {
|
||
this.nextLoadPosition = frag.start + frag.duration;
|
||
}
|
||
|
||
this.state = State.FRAG_LOADING;
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].FRAG_LOADING, {
|
||
frag: frag,
|
||
targetBufferTime: targetBufferTime
|
||
});
|
||
return this.fragmentLoader.load(frag, progressCallback).catch(function (error) {
|
||
return _this4.handleFragLoadError(error);
|
||
});
|
||
};
|
||
|
||
_proto.doFragPartsLoad = function doFragPartsLoad(frag, partList, partIndex, progressCallback) {
|
||
var _this5 = this;
|
||
|
||
return new Promise(function (resolve, reject) {
|
||
var partsLoaded = [];
|
||
|
||
var loadPartIndex = function loadPartIndex(index) {
|
||
var part = partList[index];
|
||
|
||
_this5.fragmentLoader.loadPart(frag, part, progressCallback).then(function (partLoadedData) {
|
||
partsLoaded[part.index] = partLoadedData;
|
||
var loadedPart = partLoadedData.part;
|
||
|
||
_this5.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].FRAG_LOADED, partLoadedData);
|
||
|
||
var nextPart = partList[index + 1];
|
||
|
||
if (nextPart && nextPart.fragment === frag) {
|
||
loadPartIndex(index + 1);
|
||
} else {
|
||
return resolve({
|
||
frag: frag,
|
||
part: loadedPart,
|
||
partsLoaded: partsLoaded
|
||
});
|
||
}
|
||
}).catch(reject);
|
||
};
|
||
|
||
loadPartIndex(partIndex);
|
||
});
|
||
};
|
||
|
||
_proto.handleFragLoadError = function handleFragLoadError(_ref) {
|
||
var data = _ref.data;
|
||
|
||
if (data && data.details === _errors__WEBPACK_IMPORTED_MODULE_6__["ErrorDetails"].INTERNAL_ABORTED) {
|
||
this.handleFragLoadAborted(data.frag, data.part);
|
||
} else {
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].ERROR, data);
|
||
}
|
||
|
||
return null;
|
||
};
|
||
|
||
_proto._handleTransmuxerFlush = function _handleTransmuxerFlush(chunkMeta) {
|
||
var context = this.getCurrentContext(chunkMeta);
|
||
|
||
if (!context || this.state !== State.PARSING) {
|
||
if (!this.fragCurrent) {
|
||
this.state = State.IDLE;
|
||
}
|
||
|
||
return;
|
||
}
|
||
|
||
var frag = context.frag,
|
||
part = context.part,
|
||
level = context.level;
|
||
var now = self.performance.now();
|
||
frag.stats.parsing.end = now;
|
||
|
||
if (part) {
|
||
part.stats.parsing.end = now;
|
||
}
|
||
|
||
this.updateLevelTiming(frag, part, level, chunkMeta.partial);
|
||
};
|
||
|
||
_proto.getCurrentContext = function getCurrentContext(chunkMeta) {
|
||
var levels = this.levels;
|
||
var levelIndex = chunkMeta.level,
|
||
sn = chunkMeta.sn,
|
||
partIndex = chunkMeta.part;
|
||
|
||
if (!levels || !levels[levelIndex]) {
|
||
this.warn("Levels object was unset while buffering fragment " + sn + " of level " + levelIndex + ". The current chunk will not be buffered.");
|
||
return null;
|
||
}
|
||
|
||
var level = levels[levelIndex];
|
||
var part = partIndex > -1 ? Object(_level_helper__WEBPACK_IMPORTED_MODULE_11__["getPartWith"])(level, sn, partIndex) : null;
|
||
var frag = part ? part.fragment : Object(_level_helper__WEBPACK_IMPORTED_MODULE_11__["getFragmentWithSN"])(level, sn, this.fragCurrent);
|
||
|
||
if (!frag) {
|
||
return null;
|
||
}
|
||
|
||
return {
|
||
frag: frag,
|
||
part: part,
|
||
level: level
|
||
};
|
||
};
|
||
|
||
_proto.bufferFragmentData = function bufferFragmentData(data, frag, part, chunkMeta) {
|
||
if (!data || this.state !== State.PARSING) {
|
||
return;
|
||
}
|
||
|
||
var data1 = data.data1,
|
||
data2 = data.data2;
|
||
var buffer = data1;
|
||
|
||
if (data1 && data2) {
|
||
// Combine the moof + mdat so that we buffer with a single append
|
||
buffer = Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_8__["appendUint8Array"])(data1, data2);
|
||
}
|
||
|
||
if (!buffer || !buffer.length) {
|
||
return;
|
||
}
|
||
|
||
var segment = {
|
||
type: data.type,
|
||
frag: frag,
|
||
part: part,
|
||
chunkMeta: chunkMeta,
|
||
parent: frag.type,
|
||
data: buffer
|
||
};
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].BUFFER_APPENDING, segment);
|
||
|
||
if (data.dropped && data.independent && !part) {
|
||
// Clear buffer so that we reload previous segments sequentially if required
|
||
this.flushBufferGap(frag);
|
||
}
|
||
};
|
||
|
||
_proto.flushBufferGap = function flushBufferGap(frag) {
|
||
var media = this.media;
|
||
|
||
if (!media) {
|
||
return;
|
||
} // If currentTime is not buffered, clear the back buffer so that we can backtrack as much as needed
|
||
|
||
|
||
if (!_utils_buffer_helper__WEBPACK_IMPORTED_MODULE_3__["BufferHelper"].isBuffered(media, media.currentTime)) {
|
||
this.flushMainBuffer(0, frag.start);
|
||
return;
|
||
} // Remove back-buffer without interrupting playback to allow back tracking
|
||
|
||
|
||
var currentTime = media.currentTime;
|
||
var bufferInfo = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_3__["BufferHelper"].bufferInfo(media, currentTime, 0);
|
||
var fragDuration = frag.duration;
|
||
var segmentFraction = Math.min(this.config.maxFragLookUpTolerance * 2, fragDuration * 0.25);
|
||
var start = Math.max(Math.min(frag.start - segmentFraction, bufferInfo.end - segmentFraction), currentTime + segmentFraction);
|
||
|
||
if (frag.start - start > segmentFraction) {
|
||
this.flushMainBuffer(start, frag.start);
|
||
}
|
||
};
|
||
|
||
_proto.getFwdBufferInfo = function getFwdBufferInfo(bufferable, type) {
|
||
var config = this.config;
|
||
var pos = this.getLoadPosition();
|
||
|
||
if (!Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(pos)) {
|
||
return null;
|
||
}
|
||
|
||
var bufferInfo = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_3__["BufferHelper"].bufferInfo(bufferable, pos, config.maxBufferHole); // Workaround flaw in getting forward buffer when maxBufferHole is smaller than gap at current pos
|
||
|
||
if (bufferInfo.len === 0 && bufferInfo.nextStart !== undefined) {
|
||
var bufferedFragAtPos = this.fragmentTracker.getBufferedFrag(pos, type);
|
||
|
||
if (bufferedFragAtPos && bufferInfo.nextStart < bufferedFragAtPos.end) {
|
||
return _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_3__["BufferHelper"].bufferInfo(bufferable, pos, Math.max(bufferInfo.nextStart, config.maxBufferHole));
|
||
}
|
||
}
|
||
|
||
return bufferInfo;
|
||
};
|
||
|
||
_proto.getMaxBufferLength = function getMaxBufferLength(levelBitrate) {
|
||
var config = this.config;
|
||
var maxBufLen;
|
||
|
||
if (levelBitrate) {
|
||
maxBufLen = Math.max(8 * config.maxBufferSize / levelBitrate, config.maxBufferLength);
|
||
} else {
|
||
maxBufLen = config.maxBufferLength;
|
||
}
|
||
|
||
return Math.min(maxBufLen, config.maxMaxBufferLength);
|
||
};
|
||
|
||
_proto.reduceMaxBufferLength = function reduceMaxBufferLength(threshold) {
|
||
var config = this.config;
|
||
var minLength = threshold || config.maxBufferLength;
|
||
|
||
if (config.maxMaxBufferLength >= minLength) {
|
||
// reduce max buffer length as it might be too high. we do this to avoid loop flushing ...
|
||
config.maxMaxBufferLength /= 2;
|
||
this.warn("Reduce max buffer length to " + config.maxMaxBufferLength + "s");
|
||
return true;
|
||
}
|
||
|
||
return false;
|
||
};
|
||
|
||
_proto.getNextFragment = function getNextFragment(pos, levelDetails) {
|
||
var _frag, _frag2;
|
||
|
||
var fragments = levelDetails.fragments;
|
||
var fragLen = fragments.length;
|
||
|
||
if (!fragLen) {
|
||
return null;
|
||
} // find fragment index, contiguous with end of buffer position
|
||
|
||
|
||
var config = this.config;
|
||
var start = fragments[0].start;
|
||
var frag;
|
||
|
||
if (levelDetails.live) {
|
||
var initialLiveManifestSize = config.initialLiveManifestSize;
|
||
|
||
if (fragLen < initialLiveManifestSize) {
|
||
this.warn("Not enough fragments to start playback (have: " + fragLen + ", need: " + initialLiveManifestSize + ")");
|
||
return null;
|
||
} // The real fragment start times for a live stream are only known after the PTS range for that level is known.
|
||
// In order to discover the range, we load the best matching fragment for that level and demux it.
|
||
// Do not load using live logic if the starting frag is requested - we want to use getFragmentAtPosition() so that
|
||
// we get the fragment matching that start time
|
||
|
||
|
||
if (!levelDetails.PTSKnown && !this.startFragRequested && this.startPosition === -1) {
|
||
frag = this.getInitialLiveFragment(levelDetails, fragments);
|
||
this.startPosition = frag ? this.hls.liveSyncPosition || frag.start : pos;
|
||
}
|
||
} else if (pos <= start) {
|
||
// VoD playlist: if loadPosition before start of playlist, load first fragment
|
||
frag = fragments[0];
|
||
} // If we haven't run into any special cases already, just load the fragment most closely matching the requested position
|
||
|
||
|
||
if (!frag) {
|
||
var end = config.lowLatencyMode ? levelDetails.partEnd : levelDetails.fragmentEnd;
|
||
frag = this.getFragmentAtPosition(pos, end, levelDetails);
|
||
} // If an initSegment is present, it must be buffered first
|
||
|
||
|
||
if ((_frag = frag) !== null && _frag !== void 0 && _frag.initSegment && !((_frag2 = frag) !== null && _frag2 !== void 0 && _frag2.initSegment.data) && !this.bitrateTest) {
|
||
frag = frag.initSegment;
|
||
}
|
||
|
||
return frag;
|
||
};
|
||
|
||
_proto.getNextPart = function getNextPart(partList, frag, targetBufferTime) {
|
||
var nextPart = -1;
|
||
var contiguous = false;
|
||
var independentAttrOmitted = true;
|
||
|
||
for (var i = 0, len = partList.length; i < len; i++) {
|
||
var part = partList[i];
|
||
independentAttrOmitted = independentAttrOmitted && !part.independent;
|
||
|
||
if (nextPart > -1 && targetBufferTime < part.start) {
|
||
break;
|
||
}
|
||
|
||
var loaded = part.loaded;
|
||
|
||
if (!loaded && (contiguous || part.independent || independentAttrOmitted) && part.fragment === frag) {
|
||
nextPart = i;
|
||
}
|
||
|
||
contiguous = loaded;
|
||
}
|
||
|
||
return nextPart;
|
||
};
|
||
|
||
_proto.loadedEndOfParts = function loadedEndOfParts(partList, targetBufferTime) {
|
||
var lastPart = partList[partList.length - 1];
|
||
return lastPart && targetBufferTime > lastPart.start && lastPart.loaded;
|
||
}
|
||
/*
|
||
This method is used find the best matching first fragment for a live playlist. This fragment is used to calculate the
|
||
"sliding" of the playlist, which is its offset from the start of playback. After sliding we can compute the real
|
||
start and end times for each fragment in the playlist (after which this method will not need to be called).
|
||
*/
|
||
;
|
||
|
||
_proto.getInitialLiveFragment = function getInitialLiveFragment(levelDetails, fragments) {
|
||
var fragPrevious = this.fragPrevious;
|
||
var frag = null;
|
||
|
||
if (fragPrevious) {
|
||
if (levelDetails.hasProgramDateTime) {
|
||
// Prefer using PDT, because it can be accurate enough to choose the correct fragment without knowing the level sliding
|
||
this.log("Live playlist, switching playlist, load frag with same PDT: " + fragPrevious.programDateTime);
|
||
frag = Object(_fragment_finders__WEBPACK_IMPORTED_MODULE_10__["findFragmentByPDT"])(fragments, fragPrevious.endProgramDateTime, this.config.maxFragLookUpTolerance);
|
||
}
|
||
|
||
if (!frag) {
|
||
// SN does not need to be accurate between renditions, but depending on the packaging it may be so.
|
||
var targetSN = fragPrevious.sn + 1;
|
||
|
||
if (targetSN >= levelDetails.startSN && targetSN <= levelDetails.endSN) {
|
||
var fragNext = fragments[targetSN - levelDetails.startSN]; // Ensure that we're staying within the continuity range, since PTS resets upon a new range
|
||
|
||
if (fragPrevious.cc === fragNext.cc) {
|
||
frag = fragNext;
|
||
this.log("Live playlist, switching playlist, load frag with next SN: " + frag.sn);
|
||
}
|
||
} // It's important to stay within the continuity range if available; otherwise the fragments in the playlist
|
||
// will have the wrong start times
|
||
|
||
|
||
if (!frag) {
|
||
frag = Object(_fragment_finders__WEBPACK_IMPORTED_MODULE_10__["findFragWithCC"])(fragments, fragPrevious.cc);
|
||
|
||
if (frag) {
|
||
this.log("Live playlist, switching playlist, load frag with same CC: " + frag.sn);
|
||
}
|
||
}
|
||
}
|
||
} else {
|
||
// Find a new start fragment when fragPrevious is null
|
||
var liveStart = this.hls.liveSyncPosition;
|
||
|
||
if (liveStart !== null) {
|
||
frag = this.getFragmentAtPosition(liveStart, this.bitrateTest ? levelDetails.fragmentEnd : levelDetails.edge, levelDetails);
|
||
}
|
||
}
|
||
|
||
return frag;
|
||
}
|
||
/*
|
||
This method finds the best matching fragment given the provided position.
|
||
*/
|
||
;
|
||
|
||
_proto.getFragmentAtPosition = function getFragmentAtPosition(bufferEnd, end, levelDetails) {
|
||
var config = this.config,
|
||
fragPrevious = this.fragPrevious;
|
||
var fragments = levelDetails.fragments,
|
||
endSN = levelDetails.endSN;
|
||
var fragmentHint = levelDetails.fragmentHint;
|
||
var tolerance = config.maxFragLookUpTolerance;
|
||
var loadingParts = !!(config.lowLatencyMode && levelDetails.partList && fragmentHint);
|
||
|
||
if (loadingParts && fragmentHint && !this.bitrateTest) {
|
||
// Include incomplete fragment with parts at end
|
||
fragments = fragments.concat(fragmentHint);
|
||
endSN = fragmentHint.sn;
|
||
}
|
||
|
||
var frag;
|
||
|
||
if (bufferEnd < end) {
|
||
var lookupTolerance = bufferEnd > end - tolerance ? 0 : tolerance; // Remove the tolerance if it would put the bufferEnd past the actual end of stream
|
||
// Uses buffer and sequence number to calculate switch segment (required if using EXT-X-DISCONTINUITY-SEQUENCE)
|
||
|
||
frag = Object(_fragment_finders__WEBPACK_IMPORTED_MODULE_10__["findFragmentByPTS"])(fragPrevious, fragments, bufferEnd, lookupTolerance);
|
||
} else {
|
||
// reach end of playlist
|
||
frag = fragments[fragments.length - 1];
|
||
}
|
||
|
||
if (frag) {
|
||
var curSNIdx = frag.sn - levelDetails.startSN;
|
||
var sameLevel = fragPrevious && frag.level === fragPrevious.level;
|
||
var nextFrag = fragments[curSNIdx + 1];
|
||
var fragState = this.fragmentTracker.getState(frag);
|
||
|
||
if (fragState === _fragment_tracker__WEBPACK_IMPORTED_MODULE_2__["FragmentState"].BACKTRACKED) {
|
||
frag = null;
|
||
var i = curSNIdx;
|
||
|
||
while (fragments[i] && this.fragmentTracker.getState(fragments[i]) === _fragment_tracker__WEBPACK_IMPORTED_MODULE_2__["FragmentState"].BACKTRACKED) {
|
||
// When fragPrevious is null, backtrack to first the first fragment is not BACKTRACKED for loading
|
||
// When fragPrevious is set, we want the first BACKTRACKED fragment for parsing and buffering
|
||
if (!fragPrevious) {
|
||
frag = fragments[--i];
|
||
} else {
|
||
frag = fragments[i--];
|
||
}
|
||
}
|
||
|
||
if (!frag) {
|
||
frag = nextFrag;
|
||
}
|
||
} else if (fragPrevious && frag.sn === fragPrevious.sn && !loadingParts) {
|
||
// Force the next fragment to load if the previous one was already selected. This can occasionally happen with
|
||
// non-uniform fragment durations
|
||
if (sameLevel) {
|
||
if (frag.sn < endSN && this.fragmentTracker.getState(nextFrag) !== _fragment_tracker__WEBPACK_IMPORTED_MODULE_2__["FragmentState"].OK) {
|
||
this.log("SN " + frag.sn + " just loaded, load next one: " + nextFrag.sn);
|
||
frag = nextFrag;
|
||
} else {
|
||
frag = null;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
return frag;
|
||
};
|
||
|
||
_proto.synchronizeToLiveEdge = function synchronizeToLiveEdge(levelDetails) {
|
||
var config = this.config,
|
||
media = this.media;
|
||
|
||
if (!media) {
|
||
return;
|
||
}
|
||
|
||
var liveSyncPosition = this.hls.liveSyncPosition;
|
||
var currentTime = media.currentTime;
|
||
var start = levelDetails.fragments[0].start;
|
||
var end = levelDetails.edge;
|
||
var withinSlidingWindow = currentTime >= start - config.maxFragLookUpTolerance && currentTime <= end; // Continue if we can seek forward to sync position or if current time is outside of sliding window
|
||
|
||
if (liveSyncPosition !== null && media.duration > liveSyncPosition && (currentTime < liveSyncPosition || !withinSlidingWindow)) {
|
||
// Continue if buffer is starving or if current time is behind max latency
|
||
var maxLatency = config.liveMaxLatencyDuration !== undefined ? config.liveMaxLatencyDuration : config.liveMaxLatencyDurationCount * levelDetails.targetduration;
|
||
|
||
if (!withinSlidingWindow && media.readyState < 4 || currentTime < end - maxLatency) {
|
||
if (!this.loadedmetadata) {
|
||
this.nextLoadPosition = liveSyncPosition;
|
||
} // Only seek if ready and there is not a significant forward buffer available for playback
|
||
|
||
|
||
if (media.readyState) {
|
||
this.warn("Playback: " + currentTime.toFixed(3) + " is located too far from the end of live sliding playlist: " + end + ", reset currentTime to : " + liveSyncPosition.toFixed(3));
|
||
media.currentTime = liveSyncPosition;
|
||
}
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto.alignPlaylists = function alignPlaylists(details, previousDetails) {
|
||
var levels = this.levels,
|
||
levelLastLoaded = this.levelLastLoaded,
|
||
fragPrevious = this.fragPrevious;
|
||
var lastLevel = levelLastLoaded !== null ? levels[levelLastLoaded] : null; // FIXME: If not for `shouldAlignOnDiscontinuities` requiring fragPrevious.cc,
|
||
// this could all go in level-helper mergeDetails()
|
||
|
||
var length = details.fragments.length;
|
||
|
||
if (!length) {
|
||
this.warn("No fragments in live playlist");
|
||
return 0;
|
||
}
|
||
|
||
var slidingStart = details.fragments[0].start;
|
||
var firstLevelLoad = !previousDetails;
|
||
|
||
var aligned = details.alignedSliding && Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(slidingStart);
|
||
|
||
if (firstLevelLoad || !aligned && !slidingStart) {
|
||
Object(_utils_discontinuities__WEBPACK_IMPORTED_MODULE_9__["alignStream"])(fragPrevious, lastLevel, details);
|
||
var alignedSlidingStart = details.fragments[0].start;
|
||
this.log("Live playlist sliding: " + alignedSlidingStart.toFixed(2) + " start-sn: " + (previousDetails ? previousDetails.startSN : 'na') + "->" + details.startSN + " prev-sn: " + (fragPrevious ? fragPrevious.sn : 'na') + " fragments: " + length);
|
||
return alignedSlidingStart;
|
||
}
|
||
|
||
return slidingStart;
|
||
};
|
||
|
||
_proto.waitForCdnTuneIn = function waitForCdnTuneIn(details) {
|
||
// Wait for Low-Latency CDN Tune-in to get an updated playlist
|
||
var advancePartLimit = 3;
|
||
return details.live && details.canBlockReload && details.tuneInGoal > Math.max(details.partHoldBack, details.partTarget * advancePartLimit);
|
||
};
|
||
|
||
_proto.setStartPosition = function setStartPosition(details, sliding) {
|
||
// compute start position if set to -1. use it straight away if value is defined
|
||
var startPosition = this.startPosition;
|
||
|
||
if (startPosition < sliding) {
|
||
startPosition = -1;
|
||
}
|
||
|
||
if (startPosition === -1 || this.lastCurrentTime === -1) {
|
||
// first, check if start time offset has been set in playlist, if yes, use this value
|
||
var startTimeOffset = details.startTimeOffset;
|
||
|
||
if (Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(startTimeOffset)) {
|
||
startPosition = sliding + startTimeOffset;
|
||
|
||
if (startTimeOffset < 0) {
|
||
startPosition += details.totalduration;
|
||
}
|
||
|
||
startPosition = Math.min(Math.max(sliding, startPosition), sliding + details.totalduration);
|
||
this.log("Start time offset " + startTimeOffset + " found in playlist, adjust startPosition to " + startPosition);
|
||
this.startPosition = startPosition;
|
||
} else if (details.live) {
|
||
// Leave this.startPosition at -1, so that we can use `getInitialLiveFragment` logic when startPosition has
|
||
// not been specified via the config or an as an argument to startLoad (#3736).
|
||
startPosition = this.hls.liveSyncPosition || sliding;
|
||
} else {
|
||
this.startPosition = startPosition = 0;
|
||
}
|
||
|
||
this.lastCurrentTime = startPosition;
|
||
}
|
||
|
||
this.nextLoadPosition = startPosition;
|
||
};
|
||
|
||
_proto.getLoadPosition = function getLoadPosition() {
|
||
var media = this.media; // if we have not yet loaded any fragment, start loading from start position
|
||
|
||
var pos = 0;
|
||
|
||
if (this.loadedmetadata && media) {
|
||
pos = media.currentTime;
|
||
} else if (this.nextLoadPosition) {
|
||
pos = this.nextLoadPosition;
|
||
}
|
||
|
||
return pos;
|
||
};
|
||
|
||
_proto.handleFragLoadAborted = function handleFragLoadAborted(frag, part) {
|
||
if (this.transmuxer && frag.sn !== 'initSegment' && frag.stats.aborted) {
|
||
this.warn("Fragment " + frag.sn + (part ? ' part' + part.index : '') + " of level " + frag.level + " was aborted");
|
||
this.resetFragmentLoading(frag);
|
||
}
|
||
};
|
||
|
||
_proto.resetFragmentLoading = function resetFragmentLoading(frag) {
|
||
if (!this.fragCurrent || !this.fragContextChanged(frag)) {
|
||
this.state = State.IDLE;
|
||
}
|
||
};
|
||
|
||
_proto.onFragmentOrKeyLoadError = function onFragmentOrKeyLoadError(filterType, data) {
|
||
if (data.fatal) {
|
||
return;
|
||
}
|
||
|
||
var frag = data.frag; // Handle frag error related to caller's filterType
|
||
|
||
if (!frag || frag.type !== filterType) {
|
||
return;
|
||
}
|
||
|
||
var fragCurrent = this.fragCurrent;
|
||
console.assert(fragCurrent && frag.sn === fragCurrent.sn && frag.level === fragCurrent.level && frag.urlId === fragCurrent.urlId, 'Frag load error must match current frag to retry');
|
||
var config = this.config; // keep retrying until the limit will be reached
|
||
|
||
if (this.fragLoadError + 1 <= config.fragLoadingMaxRetry) {
|
||
if (this.resetLiveStartWhenNotLoaded(frag.level)) {
|
||
return;
|
||
} // exponential backoff capped to config.fragLoadingMaxRetryTimeout
|
||
|
||
|
||
var delay = Math.min(Math.pow(2, this.fragLoadError) * config.fragLoadingRetryDelay, config.fragLoadingMaxRetryTimeout);
|
||
this.warn("Fragment " + frag.sn + " of " + filterType + " " + frag.level + " failed to load, retrying in " + delay + "ms");
|
||
this.retryDate = self.performance.now() + delay;
|
||
this.fragLoadError++;
|
||
this.state = State.FRAG_LOADING_WAITING_RETRY;
|
||
} else if (data.levelRetry) {
|
||
if (filterType === _types_loader__WEBPACK_IMPORTED_MODULE_15__["PlaylistLevelType"].AUDIO) {
|
||
// Reset current fragment since audio track audio is essential and may not have a fail-over track
|
||
this.fragCurrent = null;
|
||
} // Fragment errors that result in a level switch or redundant fail-over
|
||
// should reset the stream controller state to idle
|
||
|
||
|
||
this.fragLoadError = 0;
|
||
this.state = State.IDLE;
|
||
} else {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_4__["logger"].error(data.details + " reaches max retry, redispatch as fatal ..."); // switch error to fatal
|
||
|
||
data.fatal = true;
|
||
this.hls.stopLoad();
|
||
this.state = State.ERROR;
|
||
}
|
||
};
|
||
|
||
_proto.afterBufferFlushed = function afterBufferFlushed(media, bufferType, playlistType) {
|
||
if (!media) {
|
||
return;
|
||
} // After successful buffer flushing, filter flushed fragments from bufferedFrags use mediaBuffered instead of media
|
||
// (so that we will check against video.buffered ranges in case of alt audio track)
|
||
|
||
|
||
var bufferedTimeRanges = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_3__["BufferHelper"].getBuffered(media);
|
||
this.fragmentTracker.detectEvictedFragments(bufferType, bufferedTimeRanges, playlistType);
|
||
|
||
if (this.state === State.ENDED) {
|
||
this.resetLoadingState();
|
||
}
|
||
};
|
||
|
||
_proto.resetLoadingState = function resetLoadingState() {
|
||
this.fragCurrent = null;
|
||
this.fragPrevious = null;
|
||
this.state = State.IDLE;
|
||
};
|
||
|
||
_proto.resetLiveStartWhenNotLoaded = function resetLiveStartWhenNotLoaded(level) {
|
||
// if loadedmetadata is not set, it means that we are emergency switch down on first frag
|
||
// in that case, reset startFragRequested flag
|
||
if (!this.loadedmetadata) {
|
||
this.startFragRequested = false;
|
||
var details = this.levels ? this.levels[level].details : null;
|
||
|
||
if (details !== null && details !== void 0 && details.live) {
|
||
// We can't afford to retry after a delay in a live scenario. Update the start position and return to IDLE.
|
||
this.startPosition = -1;
|
||
this.setStartPosition(details, 0);
|
||
this.resetLoadingState();
|
||
return true;
|
||
}
|
||
|
||
this.nextLoadPosition = this.startPosition;
|
||
}
|
||
|
||
return false;
|
||
};
|
||
|
||
_proto.updateLevelTiming = function updateLevelTiming(frag, part, level, partial) {
|
||
var _this6 = this;
|
||
|
||
var details = level.details;
|
||
console.assert(!!details, 'level.details must be defined');
|
||
var parsed = Object.keys(frag.elementaryStreams).reduce(function (result, type) {
|
||
var info = frag.elementaryStreams[type];
|
||
|
||
if (info) {
|
||
var parsedDuration = info.endPTS - info.startPTS;
|
||
|
||
if (parsedDuration <= 0) {
|
||
// Destroy the transmuxer after it's next time offset failed to advance because duration was <= 0.
|
||
// The new transmuxer will be configured with a time offset matching the next fragment start,
|
||
// preventing the timeline from shifting.
|
||
_this6.warn("Could not parse fragment " + frag.sn + " " + type + " duration reliably (" + parsedDuration + ") resetting transmuxer to fallback to playlist timing");
|
||
|
||
_this6.resetTransmuxer();
|
||
|
||
return result || false;
|
||
}
|
||
|
||
var drift = partial ? 0 : Object(_level_helper__WEBPACK_IMPORTED_MODULE_11__["updateFragPTSDTS"])(details, frag, info.startPTS, info.endPTS, info.startDTS, info.endDTS);
|
||
|
||
_this6.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].LEVEL_PTS_UPDATED, {
|
||
details: details,
|
||
level: level,
|
||
drift: drift,
|
||
type: type,
|
||
frag: frag,
|
||
start: info.startPTS,
|
||
end: info.endPTS
|
||
});
|
||
|
||
return true;
|
||
}
|
||
|
||
return result;
|
||
}, false);
|
||
|
||
if (parsed) {
|
||
this.state = State.PARSED;
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_5__["Events"].FRAG_PARSED, {
|
||
frag: frag,
|
||
part: part
|
||
});
|
||
} else {
|
||
this.resetLoadingState();
|
||
}
|
||
};
|
||
|
||
_proto.resetTransmuxer = function resetTransmuxer() {
|
||
if (this.transmuxer) {
|
||
this.transmuxer.destroy();
|
||
this.transmuxer = null;
|
||
}
|
||
};
|
||
|
||
_createClass(BaseStreamController, [{
|
||
key: "state",
|
||
get: function get() {
|
||
return this._state;
|
||
},
|
||
set: function set(nextState) {
|
||
var previousState = this._state;
|
||
|
||
if (previousState !== nextState) {
|
||
this._state = nextState;
|
||
this.log(previousState + "->" + nextState);
|
||
}
|
||
}
|
||
}]);
|
||
|
||
return BaseStreamController;
|
||
}(_task_loop__WEBPACK_IMPORTED_MODULE_1__["default"]);
|
||
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/controller/buffer-controller.ts":
|
||
/*!*********************************************!*\
|
||
!*** ./src/controller/buffer-controller.ts ***!
|
||
\*********************************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return BufferController; });
|
||
/* harmony import */ var _Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
|
||
/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../events */ "./src/events.ts");
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
|
||
/* harmony import */ var _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../utils/buffer-helper */ "./src/utils/buffer-helper.ts");
|
||
/* harmony import */ var _utils_mediasource_helper__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../utils/mediasource-helper */ "./src/utils/mediasource-helper.ts");
|
||
/* harmony import */ var _loader_fragment__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../loader/fragment */ "./src/loader/fragment.ts");
|
||
/* harmony import */ var _buffer_operation_queue__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./buffer-operation-queue */ "./src/controller/buffer-operation-queue.ts");
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
var MediaSource = Object(_utils_mediasource_helper__WEBPACK_IMPORTED_MODULE_5__["getMediaSource"])();
|
||
var VIDEO_CODEC_PROFILE_REPACE = /([ha]vc.)(?:\.[^.,]+)+/;
|
||
|
||
var BufferController = /*#__PURE__*/function () {
|
||
// The level details used to determine duration, target-duration and live
|
||
// cache the self generated object url to detect hijack of video tag
|
||
// A queue of buffer operations which require the SourceBuffer to not be updating upon execution
|
||
// References to event listeners for each SourceBuffer, so that they can be referenced for event removal
|
||
// The number of BUFFER_CODEC events received before any sourceBuffers are created
|
||
// The total number of BUFFER_CODEC events received
|
||
// A reference to the attached media element
|
||
// A reference to the active media source
|
||
// counters
|
||
function BufferController(_hls) {
|
||
var _this = this;
|
||
|
||
this.details = null;
|
||
this._objectUrl = null;
|
||
this.operationQueue = void 0;
|
||
this.listeners = void 0;
|
||
this.hls = void 0;
|
||
this.bufferCodecEventsExpected = 0;
|
||
this._bufferCodecEventsTotal = 0;
|
||
this.media = null;
|
||
this.mediaSource = null;
|
||
this.appendError = 0;
|
||
this.tracks = {};
|
||
this.pendingTracks = {};
|
||
this.sourceBuffer = void 0;
|
||
|
||
this._onMediaSourceOpen = function () {
|
||
var hls = _this.hls,
|
||
media = _this.media,
|
||
mediaSource = _this.mediaSource;
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log('[buffer-controller]: Media source opened');
|
||
|
||
if (media) {
|
||
_this.updateMediaElementDuration();
|
||
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MEDIA_ATTACHED, {
|
||
media: media
|
||
});
|
||
}
|
||
|
||
if (mediaSource) {
|
||
// once received, don't listen anymore to sourceopen event
|
||
mediaSource.removeEventListener('sourceopen', _this._onMediaSourceOpen);
|
||
}
|
||
|
||
_this.checkPendingTracks();
|
||
};
|
||
|
||
this._onMediaSourceClose = function () {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log('[buffer-controller]: Media source closed');
|
||
};
|
||
|
||
this._onMediaSourceEnded = function () {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log('[buffer-controller]: Media source ended');
|
||
};
|
||
|
||
this.hls = _hls;
|
||
|
||
this._initSourceBuffer();
|
||
|
||
this.registerListeners();
|
||
}
|
||
|
||
var _proto = BufferController.prototype;
|
||
|
||
_proto.hasSourceTypes = function hasSourceTypes() {
|
||
return this.getSourceBufferTypes().length > 0 || Object.keys(this.pendingTracks).length > 0;
|
||
};
|
||
|
||
_proto.destroy = function destroy() {
|
||
this.unregisterListeners();
|
||
this.details = null;
|
||
};
|
||
|
||
_proto.registerListeners = function registerListeners() {
|
||
var hls = this.hls;
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MEDIA_ATTACHING, this.onMediaAttaching, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_PARSED, this.onManifestParsed, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_RESET, this.onBufferReset, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_APPENDING, this.onBufferAppending, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_CODECS, this.onBufferCodecs, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_EOS, this.onBufferEos, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_FLUSHING, this.onBufferFlushing, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].LEVEL_UPDATED, this.onLevelUpdated, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_PARSED, this.onFragParsed, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_CHANGED, this.onFragChanged, this);
|
||
};
|
||
|
||
_proto.unregisterListeners = function unregisterListeners() {
|
||
var hls = this.hls;
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MEDIA_ATTACHING, this.onMediaAttaching, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_PARSED, this.onManifestParsed, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_RESET, this.onBufferReset, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_APPENDING, this.onBufferAppending, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_CODECS, this.onBufferCodecs, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_EOS, this.onBufferEos, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_FLUSHING, this.onBufferFlushing, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].LEVEL_UPDATED, this.onLevelUpdated, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_PARSED, this.onFragParsed, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_CHANGED, this.onFragChanged, this);
|
||
};
|
||
|
||
_proto._initSourceBuffer = function _initSourceBuffer() {
|
||
this.sourceBuffer = {};
|
||
this.operationQueue = new _buffer_operation_queue__WEBPACK_IMPORTED_MODULE_7__["default"](this.sourceBuffer);
|
||
this.listeners = {
|
||
audio: [],
|
||
video: [],
|
||
audiovideo: []
|
||
};
|
||
};
|
||
|
||
_proto.onManifestParsed = function onManifestParsed(event, data) {
|
||
// in case of alt audio 2 BUFFER_CODECS events will be triggered, one per stream controller
|
||
// sourcebuffers will be created all at once when the expected nb of tracks will be reached
|
||
// in case alt audio is not used, only one BUFFER_CODEC event will be fired from main stream controller
|
||
// it will contain the expected nb of source buffers, no need to compute it
|
||
var codecEvents = 2;
|
||
|
||
if (data.audio && !data.video || !data.altAudio) {
|
||
codecEvents = 1;
|
||
}
|
||
|
||
this.bufferCodecEventsExpected = this._bufferCodecEventsTotal = codecEvents;
|
||
this.details = null;
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log(this.bufferCodecEventsExpected + " bufferCodec event(s) expected");
|
||
};
|
||
|
||
_proto.onMediaAttaching = function onMediaAttaching(event, data) {
|
||
var media = this.media = data.media;
|
||
|
||
if (media && MediaSource) {
|
||
var ms = this.mediaSource = new MediaSource(); // MediaSource listeners are arrow functions with a lexical scope, and do not need to be bound
|
||
|
||
ms.addEventListener('sourceopen', this._onMediaSourceOpen);
|
||
ms.addEventListener('sourceended', this._onMediaSourceEnded);
|
||
ms.addEventListener('sourceclose', this._onMediaSourceClose); // link video and media Source
|
||
|
||
media.src = self.URL.createObjectURL(ms); // cache the locally generated object url
|
||
|
||
this._objectUrl = media.src;
|
||
}
|
||
};
|
||
|
||
_proto.onMediaDetaching = function onMediaDetaching() {
|
||
var media = this.media,
|
||
mediaSource = this.mediaSource,
|
||
_objectUrl = this._objectUrl;
|
||
|
||
if (mediaSource) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log('[buffer-controller]: media source detaching');
|
||
|
||
if (mediaSource.readyState === 'open') {
|
||
try {
|
||
// endOfStream could trigger exception if any sourcebuffer is in updating state
|
||
// we don't really care about checking sourcebuffer state here,
|
||
// as we are anyway detaching the MediaSource
|
||
// let's just avoid this exception to propagate
|
||
mediaSource.endOfStream();
|
||
} catch (err) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn("[buffer-controller]: onMediaDetaching: " + err.message + " while calling endOfStream");
|
||
}
|
||
} // Clean up the SourceBuffers by invoking onBufferReset
|
||
|
||
|
||
this.onBufferReset();
|
||
mediaSource.removeEventListener('sourceopen', this._onMediaSourceOpen);
|
||
mediaSource.removeEventListener('sourceended', this._onMediaSourceEnded);
|
||
mediaSource.removeEventListener('sourceclose', this._onMediaSourceClose); // Detach properly the MediaSource from the HTMLMediaElement as
|
||
// suggested in https://github.com/w3c/media-source/issues/53.
|
||
|
||
if (media) {
|
||
if (_objectUrl) {
|
||
self.URL.revokeObjectURL(_objectUrl);
|
||
} // clean up video tag src only if it's our own url. some external libraries might
|
||
// hijack the video tag and change its 'src' without destroying the Hls instance first
|
||
|
||
|
||
if (media.src === _objectUrl) {
|
||
media.removeAttribute('src');
|
||
media.load();
|
||
} else {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn('[buffer-controller]: media.src was changed by a third party - skip cleanup');
|
||
}
|
||
}
|
||
|
||
this.mediaSource = null;
|
||
this.media = null;
|
||
this._objectUrl = null;
|
||
this.bufferCodecEventsExpected = this._bufferCodecEventsTotal;
|
||
this.pendingTracks = {};
|
||
this.tracks = {};
|
||
}
|
||
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MEDIA_DETACHED, undefined);
|
||
};
|
||
|
||
_proto.onBufferReset = function onBufferReset() {
|
||
var _this2 = this;
|
||
|
||
this.getSourceBufferTypes().forEach(function (type) {
|
||
var sb = _this2.sourceBuffer[type];
|
||
|
||
try {
|
||
if (sb) {
|
||
_this2.removeBufferListeners(type);
|
||
|
||
if (_this2.mediaSource) {
|
||
_this2.mediaSource.removeSourceBuffer(sb);
|
||
} // Synchronously remove the SB from the map before the next call in order to prevent an async function from
|
||
// accessing it
|
||
|
||
|
||
_this2.sourceBuffer[type] = undefined;
|
||
}
|
||
} catch (err) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn("[buffer-controller]: Failed to reset the " + type + " buffer", err);
|
||
}
|
||
});
|
||
|
||
this._initSourceBuffer();
|
||
};
|
||
|
||
_proto.onBufferCodecs = function onBufferCodecs(event, data) {
|
||
var _this3 = this;
|
||
|
||
var sourceBufferCount = this.getSourceBufferTypes().length;
|
||
Object.keys(data).forEach(function (trackName) {
|
||
if (sourceBufferCount) {
|
||
// check if SourceBuffer codec needs to change
|
||
var track = _this3.tracks[trackName];
|
||
|
||
if (track && typeof track.buffer.changeType === 'function') {
|
||
var _data$trackName = data[trackName],
|
||
id = _data$trackName.id,
|
||
codec = _data$trackName.codec,
|
||
levelCodec = _data$trackName.levelCodec,
|
||
container = _data$trackName.container,
|
||
metadata = _data$trackName.metadata;
|
||
var currentCodec = (track.levelCodec || track.codec).replace(VIDEO_CODEC_PROFILE_REPACE, '$1');
|
||
var nextCodec = (levelCodec || codec).replace(VIDEO_CODEC_PROFILE_REPACE, '$1');
|
||
|
||
if (currentCodec !== nextCodec) {
|
||
var mimeType = container + ";codecs=" + (levelCodec || codec);
|
||
|
||
_this3.appendChangeType(trackName, mimeType);
|
||
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log("[buffer-controller]: switching codec " + currentCodec + " to " + nextCodec);
|
||
_this3.tracks[trackName] = {
|
||
buffer: track.buffer,
|
||
codec: codec,
|
||
container: container,
|
||
levelCodec: levelCodec,
|
||
metadata: metadata,
|
||
id: id
|
||
};
|
||
}
|
||
}
|
||
} else {
|
||
// if source buffer(s) not created yet, appended buffer tracks in this.pendingTracks
|
||
_this3.pendingTracks[trackName] = data[trackName];
|
||
}
|
||
}); // if sourcebuffers already created, do nothing ...
|
||
|
||
if (sourceBufferCount) {
|
||
return;
|
||
}
|
||
|
||
this.bufferCodecEventsExpected = Math.max(this.bufferCodecEventsExpected - 1, 0);
|
||
|
||
if (this.mediaSource && this.mediaSource.readyState === 'open') {
|
||
this.checkPendingTracks();
|
||
}
|
||
};
|
||
|
||
_proto.appendChangeType = function appendChangeType(type, mimeType) {
|
||
var _this4 = this;
|
||
|
||
var operationQueue = this.operationQueue;
|
||
var operation = {
|
||
execute: function execute() {
|
||
var sb = _this4.sourceBuffer[type];
|
||
|
||
if (sb) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log("[buffer-controller]: changing " + type + " sourceBuffer type to " + mimeType);
|
||
sb.changeType(mimeType);
|
||
}
|
||
|
||
operationQueue.shiftAndExecuteNext(type);
|
||
},
|
||
onStart: function onStart() {},
|
||
onComplete: function onComplete() {},
|
||
onError: function onError(e) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn("[buffer-controller]: Failed to change " + type + " SourceBuffer type", e);
|
||
}
|
||
};
|
||
operationQueue.append(operation, type);
|
||
};
|
||
|
||
_proto.onBufferAppending = function onBufferAppending(event, eventData) {
|
||
var _this5 = this;
|
||
|
||
var hls = this.hls,
|
||
operationQueue = this.operationQueue,
|
||
tracks = this.tracks;
|
||
var data = eventData.data,
|
||
type = eventData.type,
|
||
frag = eventData.frag,
|
||
part = eventData.part,
|
||
chunkMeta = eventData.chunkMeta;
|
||
var chunkStats = chunkMeta.buffering[type];
|
||
var bufferAppendingStart = self.performance.now();
|
||
chunkStats.start = bufferAppendingStart;
|
||
var fragBuffering = frag.stats.buffering;
|
||
var partBuffering = part ? part.stats.buffering : null;
|
||
|
||
if (fragBuffering.start === 0) {
|
||
fragBuffering.start = bufferAppendingStart;
|
||
}
|
||
|
||
if (partBuffering && partBuffering.start === 0) {
|
||
partBuffering.start = bufferAppendingStart;
|
||
} // TODO: Only update timestampOffset when audio/mpeg fragment or part is not contiguous with previously appended
|
||
// Adjusting `SourceBuffer.timestampOffset` (desired point in the timeline where the next frames should be appended)
|
||
// in Chrome browser when we detect MPEG audio container and time delta between level PTS and `SourceBuffer.timestampOffset`
|
||
// is greater than 100ms (this is enough to handle seek for VOD or level change for LIVE videos).
|
||
// More info here: https://github.com/video-dev/hls.js/issues/332#issuecomment-257986486
|
||
|
||
|
||
var audioTrack = tracks.audio;
|
||
var checkTimestampOffset = type === 'audio' && chunkMeta.id === 1 && (audioTrack === null || audioTrack === void 0 ? void 0 : audioTrack.container) === 'audio/mpeg';
|
||
var operation = {
|
||
execute: function execute() {
|
||
chunkStats.executeStart = self.performance.now();
|
||
|
||
if (checkTimestampOffset) {
|
||
var sb = _this5.sourceBuffer[type];
|
||
|
||
if (sb) {
|
||
var delta = frag.start - sb.timestampOffset;
|
||
|
||
if (Math.abs(delta) >= 0.1) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log("[buffer-controller]: Updating audio SourceBuffer timestampOffset to " + frag.start + " (delta: " + delta + ") sn: " + frag.sn + ")");
|
||
sb.timestampOffset = frag.start;
|
||
}
|
||
}
|
||
}
|
||
|
||
_this5.appendExecutor(data, type);
|
||
},
|
||
onStart: function onStart() {// logger.debug(`[buffer-controller]: ${type} SourceBuffer updatestart`);
|
||
},
|
||
onComplete: function onComplete() {
|
||
// logger.debug(`[buffer-controller]: ${type} SourceBuffer updateend`);
|
||
var end = self.performance.now();
|
||
chunkStats.executeEnd = chunkStats.end = end;
|
||
|
||
if (fragBuffering.first === 0) {
|
||
fragBuffering.first = end;
|
||
}
|
||
|
||
if (partBuffering && partBuffering.first === 0) {
|
||
partBuffering.first = end;
|
||
}
|
||
|
||
var sourceBuffer = _this5.sourceBuffer;
|
||
var timeRanges = {};
|
||
|
||
for (var _type in sourceBuffer) {
|
||
timeRanges[_type] = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_4__["BufferHelper"].getBuffered(sourceBuffer[_type]);
|
||
}
|
||
|
||
_this5.appendError = 0;
|
||
|
||
_this5.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_APPENDED, {
|
||
type: type,
|
||
frag: frag,
|
||
part: part,
|
||
chunkMeta: chunkMeta,
|
||
parent: frag.type,
|
||
timeRanges: timeRanges
|
||
});
|
||
},
|
||
onError: function onError(err) {
|
||
// in case any error occured while appending, put back segment in segments table
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error("[buffer-controller]: Error encountered while trying to append to the " + type + " SourceBuffer", err);
|
||
var event = {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_3__["ErrorTypes"].MEDIA_ERROR,
|
||
parent: frag.type,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_3__["ErrorDetails"].BUFFER_APPEND_ERROR,
|
||
err: err,
|
||
fatal: false
|
||
};
|
||
|
||
if (err.code === DOMException.QUOTA_EXCEEDED_ERR) {
|
||
// QuotaExceededError: http://www.w3.org/TR/html5/infrastructure.html#quotaexceedederror
|
||
// let's stop appending any segments, and report BUFFER_FULL_ERROR error
|
||
event.details = _errors__WEBPACK_IMPORTED_MODULE_3__["ErrorDetails"].BUFFER_FULL_ERROR;
|
||
} else {
|
||
_this5.appendError++;
|
||
event.details = _errors__WEBPACK_IMPORTED_MODULE_3__["ErrorDetails"].BUFFER_APPEND_ERROR;
|
||
/* with UHD content, we could get loop of quota exceeded error until
|
||
browser is able to evict some data from sourcebuffer. Retrying can help recover.
|
||
*/
|
||
|
||
if (_this5.appendError > hls.config.appendErrorMaxRetry) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error("[buffer-controller]: Failed " + hls.config.appendErrorMaxRetry + " times to append segment in sourceBuffer");
|
||
event.fatal = true;
|
||
}
|
||
}
|
||
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, event);
|
||
}
|
||
};
|
||
operationQueue.append(operation, type);
|
||
};
|
||
|
||
_proto.onBufferFlushing = function onBufferFlushing(event, data) {
|
||
var _this6 = this;
|
||
|
||
var operationQueue = this.operationQueue;
|
||
|
||
var flushOperation = function flushOperation(type) {
|
||
return {
|
||
execute: _this6.removeExecutor.bind(_this6, type, data.startOffset, data.endOffset),
|
||
onStart: function onStart() {// logger.debug(`[buffer-controller]: Started flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
|
||
},
|
||
onComplete: function onComplete() {
|
||
// logger.debug(`[buffer-controller]: Finished flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
|
||
_this6.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_FLUSHED, {
|
||
type: type
|
||
});
|
||
},
|
||
onError: function onError(e) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn("[buffer-controller]: Failed to remove from " + type + " SourceBuffer", e);
|
||
}
|
||
};
|
||
};
|
||
|
||
if (data.type) {
|
||
operationQueue.append(flushOperation(data.type), data.type);
|
||
} else {
|
||
this.getSourceBufferTypes().forEach(function (type) {
|
||
operationQueue.append(flushOperation(type), type);
|
||
});
|
||
}
|
||
};
|
||
|
||
_proto.onFragParsed = function onFragParsed(event, data) {
|
||
var _this7 = this;
|
||
|
||
var frag = data.frag,
|
||
part = data.part;
|
||
var buffersAppendedTo = [];
|
||
var elementaryStreams = part ? part.elementaryStreams : frag.elementaryStreams;
|
||
|
||
if (elementaryStreams[_loader_fragment__WEBPACK_IMPORTED_MODULE_6__["ElementaryStreamTypes"].AUDIOVIDEO]) {
|
||
buffersAppendedTo.push('audiovideo');
|
||
} else {
|
||
if (elementaryStreams[_loader_fragment__WEBPACK_IMPORTED_MODULE_6__["ElementaryStreamTypes"].AUDIO]) {
|
||
buffersAppendedTo.push('audio');
|
||
}
|
||
|
||
if (elementaryStreams[_loader_fragment__WEBPACK_IMPORTED_MODULE_6__["ElementaryStreamTypes"].VIDEO]) {
|
||
buffersAppendedTo.push('video');
|
||
}
|
||
}
|
||
|
||
var onUnblocked = function onUnblocked() {
|
||
var now = self.performance.now();
|
||
frag.stats.buffering.end = now;
|
||
|
||
if (part) {
|
||
part.stats.buffering.end = now;
|
||
}
|
||
|
||
var stats = part ? part.stats : frag.stats;
|
||
|
||
_this7.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_BUFFERED, {
|
||
frag: frag,
|
||
part: part,
|
||
stats: stats,
|
||
id: frag.type
|
||
});
|
||
};
|
||
|
||
if (buffersAppendedTo.length === 0) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn("Fragments must have at least one ElementaryStreamType set. type: " + frag.type + " level: " + frag.level + " sn: " + frag.sn);
|
||
}
|
||
|
||
this.blockBuffers(onUnblocked, buffersAppendedTo);
|
||
};
|
||
|
||
_proto.onFragChanged = function onFragChanged(event, data) {
|
||
this.flushBackBuffer();
|
||
} // on BUFFER_EOS mark matching sourcebuffer(s) as ended and trigger checkEos()
|
||
// an undefined data.type will mark all buffers as EOS.
|
||
;
|
||
|
||
_proto.onBufferEos = function onBufferEos(event, data) {
|
||
var _this8 = this;
|
||
|
||
var ended = this.getSourceBufferTypes().reduce(function (acc, type) {
|
||
var sb = _this8.sourceBuffer[type];
|
||
|
||
if (!data.type || data.type === type) {
|
||
if (sb && !sb.ended) {
|
||
sb.ended = true;
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log("[buffer-controller]: " + type + " sourceBuffer now EOS");
|
||
}
|
||
}
|
||
|
||
return acc && !!(!sb || sb.ended);
|
||
}, true);
|
||
|
||
if (ended) {
|
||
this.blockBuffers(function () {
|
||
var mediaSource = _this8.mediaSource;
|
||
|
||
if (!mediaSource || mediaSource.readyState !== 'open') {
|
||
return;
|
||
} // Allow this to throw and be caught by the enqueueing function
|
||
|
||
|
||
mediaSource.endOfStream();
|
||
});
|
||
}
|
||
};
|
||
|
||
_proto.onLevelUpdated = function onLevelUpdated(event, _ref) {
|
||
var details = _ref.details;
|
||
|
||
if (!details.fragments.length) {
|
||
return;
|
||
}
|
||
|
||
this.details = details;
|
||
|
||
if (this.getSourceBufferTypes().length) {
|
||
this.blockBuffers(this.updateMediaElementDuration.bind(this));
|
||
} else {
|
||
this.updateMediaElementDuration();
|
||
}
|
||
};
|
||
|
||
_proto.flushBackBuffer = function flushBackBuffer() {
|
||
var hls = this.hls,
|
||
details = this.details,
|
||
media = this.media,
|
||
sourceBuffer = this.sourceBuffer;
|
||
|
||
if (!media || details === null) {
|
||
return;
|
||
}
|
||
|
||
var sourceBufferTypes = this.getSourceBufferTypes();
|
||
|
||
if (!sourceBufferTypes.length) {
|
||
return;
|
||
} // Support for deprecated liveBackBufferLength
|
||
|
||
|
||
var backBufferLength = details.live && hls.config.liveBackBufferLength !== null ? hls.config.liveBackBufferLength : hls.config.backBufferLength;
|
||
|
||
if (!Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(backBufferLength) || backBufferLength < 0) {
|
||
return;
|
||
}
|
||
|
||
var currentTime = media.currentTime;
|
||
var targetDuration = details.levelTargetDuration;
|
||
var maxBackBufferLength = Math.max(backBufferLength, targetDuration);
|
||
var targetBackBufferPosition = Math.floor(currentTime / targetDuration) * targetDuration - maxBackBufferLength;
|
||
sourceBufferTypes.forEach(function (type) {
|
||
var sb = sourceBuffer[type];
|
||
|
||
if (sb) {
|
||
var buffered = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_4__["BufferHelper"].getBuffered(sb); // when target buffer start exceeds actual buffer start
|
||
|
||
if (buffered.length > 0 && targetBackBufferPosition > buffered.start(0)) {
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BACK_BUFFER_REACHED, {
|
||
bufferEnd: targetBackBufferPosition
|
||
}); // Support for deprecated event:
|
||
|
||
if (details.live) {
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].LIVE_BACK_BUFFER_REACHED, {
|
||
bufferEnd: targetBackBufferPosition
|
||
});
|
||
}
|
||
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_FLUSHING, {
|
||
startOffset: 0,
|
||
endOffset: targetBackBufferPosition,
|
||
type: type
|
||
});
|
||
}
|
||
}
|
||
});
|
||
}
|
||
/**
|
||
* Update Media Source duration to current level duration or override to Infinity if configuration parameter
|
||
* 'liveDurationInfinity` is set to `true`
|
||
* More details: https://github.com/video-dev/hls.js/issues/355
|
||
*/
|
||
;
|
||
|
||
_proto.updateMediaElementDuration = function updateMediaElementDuration() {
|
||
if (!this.details || !this.media || !this.mediaSource || this.mediaSource.readyState !== 'open') {
|
||
return;
|
||
}
|
||
|
||
var details = this.details,
|
||
hls = this.hls,
|
||
media = this.media,
|
||
mediaSource = this.mediaSource;
|
||
var levelDuration = details.fragments[0].start + details.totalduration;
|
||
var mediaDuration = media.duration;
|
||
var msDuration = Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(mediaSource.duration) ? mediaSource.duration : 0;
|
||
|
||
if (details.live && hls.config.liveDurationInfinity) {
|
||
// Override duration to Infinity
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log('[buffer-controller]: Media Source duration is set to Infinity');
|
||
mediaSource.duration = Infinity;
|
||
this.updateSeekableRange(details);
|
||
} else if (levelDuration > msDuration && levelDuration > mediaDuration || !Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(mediaDuration)) {
|
||
// levelDuration was the last value we set.
|
||
// not using mediaSource.duration as the browser may tweak this value
|
||
// only update Media Source duration if its value increase, this is to avoid
|
||
// flushing already buffered portion when switching between quality level
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log("[buffer-controller]: Updating Media Source duration to " + levelDuration.toFixed(3));
|
||
mediaSource.duration = levelDuration;
|
||
}
|
||
};
|
||
|
||
_proto.updateSeekableRange = function updateSeekableRange(levelDetails) {
|
||
var mediaSource = this.mediaSource;
|
||
var fragments = levelDetails.fragments;
|
||
var len = fragments.length;
|
||
|
||
if (len && levelDetails.live && mediaSource !== null && mediaSource !== void 0 && mediaSource.setLiveSeekableRange) {
|
||
var start = Math.max(0, fragments[0].start);
|
||
var end = Math.max(start, start + levelDetails.totalduration);
|
||
mediaSource.setLiveSeekableRange(start, end);
|
||
}
|
||
};
|
||
|
||
_proto.checkPendingTracks = function checkPendingTracks() {
|
||
var bufferCodecEventsExpected = this.bufferCodecEventsExpected,
|
||
operationQueue = this.operationQueue,
|
||
pendingTracks = this.pendingTracks; // Check if we've received all of the expected bufferCodec events. When none remain, create all the sourceBuffers at once.
|
||
// This is important because the MSE spec allows implementations to throw QuotaExceededErrors if creating new sourceBuffers after
|
||
// data has been appended to existing ones.
|
||
// 2 tracks is the max (one for audio, one for video). If we've reach this max go ahead and create the buffers.
|
||
|
||
var pendingTracksCount = Object.keys(pendingTracks).length;
|
||
|
||
if (pendingTracksCount && !bufferCodecEventsExpected || pendingTracksCount === 2) {
|
||
// ok, let's create them now !
|
||
this.createSourceBuffers(pendingTracks);
|
||
this.pendingTracks = {}; // append any pending segments now !
|
||
|
||
var buffers = this.getSourceBufferTypes();
|
||
|
||
if (buffers.length === 0) {
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_3__["ErrorTypes"].MEDIA_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_3__["ErrorDetails"].BUFFER_INCOMPATIBLE_CODECS_ERROR,
|
||
fatal: true,
|
||
reason: 'could not create source buffer for media codec(s)'
|
||
});
|
||
return;
|
||
}
|
||
|
||
buffers.forEach(function (type) {
|
||
operationQueue.executeNext(type);
|
||
});
|
||
}
|
||
};
|
||
|
||
_proto.createSourceBuffers = function createSourceBuffers(tracks) {
|
||
var sourceBuffer = this.sourceBuffer,
|
||
mediaSource = this.mediaSource;
|
||
|
||
if (!mediaSource) {
|
||
throw Error('createSourceBuffers called when mediaSource was null');
|
||
}
|
||
|
||
var tracksCreated = 0;
|
||
|
||
for (var trackName in tracks) {
|
||
if (!sourceBuffer[trackName]) {
|
||
var track = tracks[trackName];
|
||
|
||
if (!track) {
|
||
throw Error("source buffer exists for track " + trackName + ", however track does not");
|
||
} // use levelCodec as first priority
|
||
|
||
|
||
var codec = track.levelCodec || track.codec;
|
||
var mimeType = track.container + ";codecs=" + codec;
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log("[buffer-controller]: creating sourceBuffer(" + mimeType + ")");
|
||
|
||
try {
|
||
var sb = sourceBuffer[trackName] = mediaSource.addSourceBuffer(mimeType);
|
||
var sbName = trackName;
|
||
this.addBufferListener(sbName, 'updatestart', this._onSBUpdateStart);
|
||
this.addBufferListener(sbName, 'updateend', this._onSBUpdateEnd);
|
||
this.addBufferListener(sbName, 'error', this._onSBUpdateError);
|
||
this.tracks[trackName] = {
|
||
buffer: sb,
|
||
codec: codec,
|
||
container: track.container,
|
||
levelCodec: track.levelCodec,
|
||
metadata: track.metadata,
|
||
id: track.id
|
||
};
|
||
tracksCreated++;
|
||
} catch (err) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error("[buffer-controller]: error while trying to add sourceBuffer: " + err.message);
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_3__["ErrorTypes"].MEDIA_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_3__["ErrorDetails"].BUFFER_ADD_CODEC_ERROR,
|
||
fatal: false,
|
||
error: err,
|
||
mimeType: mimeType
|
||
});
|
||
}
|
||
}
|
||
}
|
||
|
||
if (tracksCreated) {
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_CREATED, {
|
||
tracks: this.tracks
|
||
});
|
||
}
|
||
} // Keep as arrow functions so that we can directly reference these functions directly as event listeners
|
||
;
|
||
|
||
_proto._onSBUpdateStart = function _onSBUpdateStart(type) {
|
||
var operationQueue = this.operationQueue;
|
||
var operation = operationQueue.current(type);
|
||
operation.onStart();
|
||
};
|
||
|
||
_proto._onSBUpdateEnd = function _onSBUpdateEnd(type) {
|
||
var operationQueue = this.operationQueue;
|
||
var operation = operationQueue.current(type);
|
||
operation.onComplete();
|
||
operationQueue.shiftAndExecuteNext(type);
|
||
};
|
||
|
||
_proto._onSBUpdateError = function _onSBUpdateError(type, event) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error("[buffer-controller]: " + type + " SourceBuffer error", event); // according to http://www.w3.org/TR/media-source/#sourcebuffer-append-error
|
||
// SourceBuffer errors are not necessarily fatal; if so, the HTMLMediaElement will fire an error event
|
||
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_3__["ErrorTypes"].MEDIA_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_3__["ErrorDetails"].BUFFER_APPENDING_ERROR,
|
||
fatal: false
|
||
}); // updateend is always fired after error, so we'll allow that to shift the current operation off of the queue
|
||
|
||
var operation = this.operationQueue.current(type);
|
||
|
||
if (operation) {
|
||
operation.onError(event);
|
||
}
|
||
} // This method must result in an updateend event; if remove is not called, _onSBUpdateEnd must be called manually
|
||
;
|
||
|
||
_proto.removeExecutor = function removeExecutor(type, startOffset, endOffset) {
|
||
var media = this.media,
|
||
mediaSource = this.mediaSource,
|
||
operationQueue = this.operationQueue,
|
||
sourceBuffer = this.sourceBuffer;
|
||
var sb = sourceBuffer[type];
|
||
|
||
if (!media || !mediaSource || !sb) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn("[buffer-controller]: Attempting to remove from the " + type + " SourceBuffer, but it does not exist");
|
||
operationQueue.shiftAndExecuteNext(type);
|
||
return;
|
||
}
|
||
|
||
var mediaDuration = Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(media.duration) ? media.duration : Infinity;
|
||
var msDuration = Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(mediaSource.duration) ? mediaSource.duration : Infinity;
|
||
var removeStart = Math.max(0, startOffset);
|
||
var removeEnd = Math.min(endOffset, mediaDuration, msDuration);
|
||
|
||
if (removeEnd > removeStart) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log("[buffer-controller]: Removing [" + removeStart + "," + removeEnd + "] from the " + type + " SourceBuffer");
|
||
console.assert(!sb.updating, type + " sourceBuffer must not be updating");
|
||
sb.remove(removeStart, removeEnd);
|
||
} else {
|
||
// Cycle the queue
|
||
operationQueue.shiftAndExecuteNext(type);
|
||
}
|
||
} // This method must result in an updateend event; if append is not called, _onSBUpdateEnd must be called manually
|
||
;
|
||
|
||
_proto.appendExecutor = function appendExecutor(data, type) {
|
||
var operationQueue = this.operationQueue,
|
||
sourceBuffer = this.sourceBuffer;
|
||
var sb = sourceBuffer[type];
|
||
|
||
if (!sb) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn("[buffer-controller]: Attempting to append to the " + type + " SourceBuffer, but it does not exist");
|
||
operationQueue.shiftAndExecuteNext(type);
|
||
return;
|
||
}
|
||
|
||
sb.ended = false;
|
||
console.assert(!sb.updating, type + " sourceBuffer must not be updating");
|
||
sb.appendBuffer(data);
|
||
} // Enqueues an operation to each SourceBuffer queue which, upon execution, resolves a promise. When all promises
|
||
// resolve, the onUnblocked function is executed. Functions calling this method do not need to unblock the queue
|
||
// upon completion, since we already do it here
|
||
;
|
||
|
||
_proto.blockBuffers = function blockBuffers(onUnblocked, buffers) {
|
||
var _this9 = this;
|
||
|
||
if (buffers === void 0) {
|
||
buffers = this.getSourceBufferTypes();
|
||
}
|
||
|
||
if (!buffers.length) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log('[buffer-controller]: Blocking operation requested, but no SourceBuffers exist');
|
||
Promise.resolve(onUnblocked);
|
||
return;
|
||
}
|
||
|
||
var operationQueue = this.operationQueue; // logger.debug(`[buffer-controller]: Blocking ${buffers} SourceBuffer`);
|
||
|
||
var blockingOperations = buffers.map(function (type) {
|
||
return operationQueue.appendBlocker(type);
|
||
});
|
||
Promise.all(blockingOperations).then(function () {
|
||
// logger.debug(`[buffer-controller]: Blocking operation resolved; unblocking ${buffers} SourceBuffer`);
|
||
onUnblocked();
|
||
buffers.forEach(function (type) {
|
||
var sb = _this9.sourceBuffer[type]; // Only cycle the queue if the SB is not updating. There's a bug in Chrome which sets the SB updating flag to
|
||
// true when changing the MediaSource duration (https://bugs.chromium.org/p/chromium/issues/detail?id=959359&can=2&q=mediasource%20duration)
|
||
// While this is a workaround, it's probably useful to have around
|
||
|
||
if (!sb || !sb.updating) {
|
||
operationQueue.shiftAndExecuteNext(type);
|
||
}
|
||
});
|
||
});
|
||
};
|
||
|
||
_proto.getSourceBufferTypes = function getSourceBufferTypes() {
|
||
return Object.keys(this.sourceBuffer);
|
||
};
|
||
|
||
_proto.addBufferListener = function addBufferListener(type, event, fn) {
|
||
var buffer = this.sourceBuffer[type];
|
||
|
||
if (!buffer) {
|
||
return;
|
||
}
|
||
|
||
var listener = fn.bind(this, type);
|
||
this.listeners[type].push({
|
||
event: event,
|
||
listener: listener
|
||
});
|
||
buffer.addEventListener(event, listener);
|
||
};
|
||
|
||
_proto.removeBufferListeners = function removeBufferListeners(type) {
|
||
var buffer = this.sourceBuffer[type];
|
||
|
||
if (!buffer) {
|
||
return;
|
||
}
|
||
|
||
this.listeners[type].forEach(function (l) {
|
||
buffer.removeEventListener(l.event, l.listener);
|
||
});
|
||
};
|
||
|
||
return BufferController;
|
||
}();
|
||
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/controller/buffer-operation-queue.ts":
|
||
/*!**************************************************!*\
|
||
!*** ./src/controller/buffer-operation-queue.ts ***!
|
||
\**************************************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return BufferOperationQueue; });
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
|
||
|
||
var BufferOperationQueue = /*#__PURE__*/function () {
|
||
function BufferOperationQueue(sourceBufferReference) {
|
||
this.buffers = void 0;
|
||
this.queues = {
|
||
video: [],
|
||
audio: [],
|
||
audiovideo: []
|
||
};
|
||
this.buffers = sourceBufferReference;
|
||
}
|
||
|
||
var _proto = BufferOperationQueue.prototype;
|
||
|
||
_proto.append = function append(operation, type) {
|
||
var queue = this.queues[type];
|
||
queue.push(operation);
|
||
|
||
if (queue.length === 1 && this.buffers[type]) {
|
||
this.executeNext(type);
|
||
}
|
||
};
|
||
|
||
_proto.insertAbort = function insertAbort(operation, type) {
|
||
var queue = this.queues[type];
|
||
queue.unshift(operation);
|
||
this.executeNext(type);
|
||
};
|
||
|
||
_proto.appendBlocker = function appendBlocker(type) {
|
||
var execute;
|
||
var promise = new Promise(function (resolve) {
|
||
execute = resolve;
|
||
});
|
||
var operation = {
|
||
execute: execute,
|
||
onStart: function onStart() {},
|
||
onComplete: function onComplete() {},
|
||
onError: function onError() {}
|
||
};
|
||
this.append(operation, type);
|
||
return promise;
|
||
};
|
||
|
||
_proto.executeNext = function executeNext(type) {
|
||
var buffers = this.buffers,
|
||
queues = this.queues;
|
||
var sb = buffers[type];
|
||
var queue = queues[type];
|
||
|
||
if (queue.length) {
|
||
var operation = queue[0];
|
||
|
||
try {
|
||
// Operations are expected to result in an 'updateend' event being fired. If not, the queue will lock. Operations
|
||
// which do not end with this event must call _onSBUpdateEnd manually
|
||
operation.execute();
|
||
} catch (e) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_0__["logger"].warn('[buffer-operation-queue]: Unhandled exception executing the current operation');
|
||
operation.onError(e); // Only shift the current operation off, otherwise the updateend handler will do this for us
|
||
|
||
if (!sb || !sb.updating) {
|
||
queue.shift();
|
||
this.executeNext(type);
|
||
}
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto.shiftAndExecuteNext = function shiftAndExecuteNext(type) {
|
||
this.queues[type].shift();
|
||
this.executeNext(type);
|
||
};
|
||
|
||
_proto.current = function current(type) {
|
||
return this.queues[type][0];
|
||
};
|
||
|
||
return BufferOperationQueue;
|
||
}();
|
||
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/controller/cap-level-controller.ts":
|
||
/*!************************************************!*\
|
||
!*** ./src/controller/cap-level-controller.ts ***!
|
||
\************************************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../events */ "./src/events.ts");
|
||
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
|
||
|
||
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
|
||
|
||
/*
|
||
* cap stream level to media size dimension controller
|
||
*/
|
||
|
||
|
||
var CapLevelController = /*#__PURE__*/function () {
|
||
function CapLevelController(hls) {
|
||
this.autoLevelCapping = void 0;
|
||
this.firstLevel = void 0;
|
||
this.media = void 0;
|
||
this.restrictedLevels = void 0;
|
||
this.timer = void 0;
|
||
this.hls = void 0;
|
||
this.streamController = void 0;
|
||
this.clientRect = void 0;
|
||
this.hls = hls;
|
||
this.autoLevelCapping = Number.POSITIVE_INFINITY;
|
||
this.firstLevel = -1;
|
||
this.media = null;
|
||
this.restrictedLevels = [];
|
||
this.timer = undefined;
|
||
this.clientRect = null;
|
||
this.registerListeners();
|
||
}
|
||
|
||
var _proto = CapLevelController.prototype;
|
||
|
||
_proto.setStreamController = function setStreamController(streamController) {
|
||
this.streamController = streamController;
|
||
};
|
||
|
||
_proto.destroy = function destroy() {
|
||
this.unregisterListener();
|
||
|
||
if (this.hls.config.capLevelToPlayerSize) {
|
||
this.stopCapping();
|
||
}
|
||
|
||
this.media = null;
|
||
this.clientRect = null; // @ts-ignore
|
||
|
||
this.hls = this.streamController = null;
|
||
};
|
||
|
||
_proto.registerListeners = function registerListeners() {
|
||
var hls = this.hls;
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].FPS_DROP_LEVEL_CAPPING, this.onFpsDropLevelCapping, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_ATTACHING, this.onMediaAttaching, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MANIFEST_PARSED, this.onManifestParsed, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].BUFFER_CODECS, this.onBufferCodecs, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
|
||
};
|
||
|
||
_proto.unregisterListener = function unregisterListener() {
|
||
var hls = this.hls;
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].FPS_DROP_LEVEL_CAPPING, this.onFpsDropLevelCapping, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_ATTACHING, this.onMediaAttaching, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MANIFEST_PARSED, this.onManifestParsed, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].BUFFER_CODECS, this.onBufferCodecs, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
|
||
};
|
||
|
||
_proto.onFpsDropLevelCapping = function onFpsDropLevelCapping(event, data) {
|
||
// Don't add a restricted level more than once
|
||
if (CapLevelController.isLevelAllowed(data.droppedLevel, this.restrictedLevels)) {
|
||
this.restrictedLevels.push(data.droppedLevel);
|
||
}
|
||
};
|
||
|
||
_proto.onMediaAttaching = function onMediaAttaching(event, data) {
|
||
this.media = data.media instanceof HTMLVideoElement ? data.media : null;
|
||
};
|
||
|
||
_proto.onManifestParsed = function onManifestParsed(event, data) {
|
||
var hls = this.hls;
|
||
this.restrictedLevels = [];
|
||
this.firstLevel = data.firstLevel;
|
||
|
||
if (hls.config.capLevelToPlayerSize && data.video) {
|
||
// Start capping immediately if the manifest has signaled video codecs
|
||
this.startCapping();
|
||
}
|
||
} // Only activate capping when playing a video stream; otherwise, multi-bitrate audio-only streams will be restricted
|
||
// to the first level
|
||
;
|
||
|
||
_proto.onBufferCodecs = function onBufferCodecs(event, data) {
|
||
var hls = this.hls;
|
||
|
||
if (hls.config.capLevelToPlayerSize && data.video) {
|
||
// If the manifest did not signal a video codec capping has been deferred until we're certain video is present
|
||
this.startCapping();
|
||
}
|
||
};
|
||
|
||
_proto.onMediaDetaching = function onMediaDetaching() {
|
||
this.stopCapping();
|
||
};
|
||
|
||
_proto.detectPlayerSize = function detectPlayerSize() {
|
||
if (this.media && this.mediaHeight > 0 && this.mediaWidth > 0) {
|
||
var levels = this.hls.levels;
|
||
|
||
if (levels.length) {
|
||
var hls = this.hls;
|
||
hls.autoLevelCapping = this.getMaxLevel(levels.length - 1);
|
||
|
||
if (hls.autoLevelCapping > this.autoLevelCapping && this.streamController) {
|
||
// if auto level capping has a higher value for the previous one, flush the buffer using nextLevelSwitch
|
||
// usually happen when the user go to the fullscreen mode.
|
||
this.streamController.nextLevelSwitch();
|
||
}
|
||
|
||
this.autoLevelCapping = hls.autoLevelCapping;
|
||
}
|
||
}
|
||
}
|
||
/*
|
||
* returns level should be the one with the dimensions equal or greater than the media (player) dimensions (so the video will be downscaled)
|
||
*/
|
||
;
|
||
|
||
_proto.getMaxLevel = function getMaxLevel(capLevelIndex) {
|
||
var _this = this;
|
||
|
||
var levels = this.hls.levels;
|
||
|
||
if (!levels.length) {
|
||
return -1;
|
||
}
|
||
|
||
var validLevels = levels.filter(function (level, index) {
|
||
return CapLevelController.isLevelAllowed(index, _this.restrictedLevels) && index <= capLevelIndex;
|
||
});
|
||
this.clientRect = null;
|
||
return CapLevelController.getMaxLevelByMediaSize(validLevels, this.mediaWidth, this.mediaHeight);
|
||
};
|
||
|
||
_proto.startCapping = function startCapping() {
|
||
if (this.timer) {
|
||
// Don't reset capping if started twice; this can happen if the manifest signals a video codec
|
||
return;
|
||
}
|
||
|
||
this.autoLevelCapping = Number.POSITIVE_INFINITY;
|
||
this.hls.firstLevel = this.getMaxLevel(this.firstLevel);
|
||
self.clearInterval(this.timer);
|
||
this.timer = self.setInterval(this.detectPlayerSize.bind(this), 1000);
|
||
this.detectPlayerSize();
|
||
};
|
||
|
||
_proto.stopCapping = function stopCapping() {
|
||
this.restrictedLevels = [];
|
||
this.firstLevel = -1;
|
||
this.autoLevelCapping = Number.POSITIVE_INFINITY;
|
||
|
||
if (this.timer) {
|
||
self.clearInterval(this.timer);
|
||
this.timer = undefined;
|
||
}
|
||
};
|
||
|
||
_proto.getDimensions = function getDimensions() {
|
||
if (this.clientRect) {
|
||
return this.clientRect;
|
||
}
|
||
|
||
var media = this.media;
|
||
var boundsRect = {
|
||
width: 0,
|
||
height: 0
|
||
};
|
||
|
||
if (media) {
|
||
var clientRect = media.getBoundingClientRect();
|
||
boundsRect.width = clientRect.width;
|
||
boundsRect.height = clientRect.height;
|
||
|
||
if (!boundsRect.width && !boundsRect.height) {
|
||
// When the media element has no width or height (equivalent to not being in the DOM),
|
||
// then use its width and height attributes (media.width, media.height)
|
||
boundsRect.width = clientRect.right - clientRect.left || media.width || 0;
|
||
boundsRect.height = clientRect.bottom - clientRect.top || media.height || 0;
|
||
}
|
||
}
|
||
|
||
this.clientRect = boundsRect;
|
||
return boundsRect;
|
||
};
|
||
|
||
CapLevelController.isLevelAllowed = function isLevelAllowed(level, restrictedLevels) {
|
||
if (restrictedLevels === void 0) {
|
||
restrictedLevels = [];
|
||
}
|
||
|
||
return restrictedLevels.indexOf(level) === -1;
|
||
};
|
||
|
||
CapLevelController.getMaxLevelByMediaSize = function getMaxLevelByMediaSize(levels, width, height) {
|
||
if (!levels || !levels.length) {
|
||
return -1;
|
||
} // Levels can have the same dimensions but differing bandwidths - since levels are ordered, we can look to the next
|
||
// to determine whether we've chosen the greatest bandwidth for the media's dimensions
|
||
|
||
|
||
var atGreatestBandwidth = function atGreatestBandwidth(curLevel, nextLevel) {
|
||
if (!nextLevel) {
|
||
return true;
|
||
}
|
||
|
||
return curLevel.width !== nextLevel.width || curLevel.height !== nextLevel.height;
|
||
}; // If we run through the loop without breaking, the media's dimensions are greater than every level, so default to
|
||
// the max level
|
||
|
||
|
||
var maxLevelIndex = levels.length - 1;
|
||
|
||
for (var i = 0; i < levels.length; i += 1) {
|
||
var level = levels[i];
|
||
|
||
if ((level.width >= width || level.height >= height) && atGreatestBandwidth(level, levels[i + 1])) {
|
||
maxLevelIndex = i;
|
||
break;
|
||
}
|
||
}
|
||
|
||
return maxLevelIndex;
|
||
};
|
||
|
||
_createClass(CapLevelController, [{
|
||
key: "mediaWidth",
|
||
get: function get() {
|
||
return this.getDimensions().width * this.contentScaleFactor;
|
||
}
|
||
}, {
|
||
key: "mediaHeight",
|
||
get: function get() {
|
||
return this.getDimensions().height * this.contentScaleFactor;
|
||
}
|
||
}, {
|
||
key: "contentScaleFactor",
|
||
get: function get() {
|
||
var pixelRatio = 1;
|
||
|
||
if (!this.hls.config.ignoreDevicePixelRatio) {
|
||
try {
|
||
pixelRatio = self.devicePixelRatio;
|
||
} catch (e) {
|
||
/* no-op */
|
||
}
|
||
}
|
||
|
||
return pixelRatio;
|
||
}
|
||
}]);
|
||
|
||
return CapLevelController;
|
||
}();
|
||
|
||
/* harmony default export */ __webpack_exports__["default"] = (CapLevelController);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/controller/cmcd-controller.ts":
|
||
/*!*******************************************!*\
|
||
!*** ./src/controller/cmcd-controller.ts ***!
|
||
\*******************************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return CMCDController; });
|
||
/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../events */ "./src/events.ts");
|
||
/* harmony import */ var _types_cmcd__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../types/cmcd */ "./src/types/cmcd.ts");
|
||
/* harmony import */ var _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../utils/buffer-helper */ "./src/utils/buffer-helper.ts");
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
|
||
|
||
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
|
||
|
||
function _createForOfIteratorHelperLoose(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (it) return (it = it.call(o)).next.bind(it); if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; return function () { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); }
|
||
|
||
function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
|
||
|
||
function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
|
||
|
||
function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
|
||
|
||
|
||
|
||
|
||
|
||
/**
|
||
* Controller to deal with Common Media Client Data (CMCD)
|
||
* @see https://cdn.cta.tech/cta/media/media/resources/standards/pdfs/cta-5004-final.pdf
|
||
*/
|
||
|
||
var CMCDController = /*#__PURE__*/function () {
|
||
// eslint-disable-line no-restricted-globals
|
||
// eslint-disable-line no-restricted-globals
|
||
function CMCDController(hls) {
|
||
var _this = this;
|
||
|
||
this.hls = void 0;
|
||
this.config = void 0;
|
||
this.media = void 0;
|
||
this.sid = void 0;
|
||
this.cid = void 0;
|
||
this.useHeaders = false;
|
||
this.initialized = false;
|
||
this.starved = false;
|
||
this.buffering = true;
|
||
this.audioBuffer = void 0;
|
||
this.videoBuffer = void 0;
|
||
|
||
this.onWaiting = function () {
|
||
if (_this.initialized) {
|
||
_this.starved = true;
|
||
}
|
||
|
||
_this.buffering = true;
|
||
};
|
||
|
||
this.onPlaying = function () {
|
||
if (!_this.initialized) {
|
||
_this.initialized = true;
|
||
}
|
||
|
||
_this.buffering = false;
|
||
};
|
||
|
||
this.applyPlaylistData = function (context) {
|
||
try {
|
||
_this.apply(context, {
|
||
ot: _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDObjectType"].MANIFEST,
|
||
su: !_this.initialized
|
||
});
|
||
} catch (error) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].warn('Could not generate manifest CMCD data.', error);
|
||
}
|
||
};
|
||
|
||
this.applyFragmentData = function (context) {
|
||
try {
|
||
var fragment = context.frag;
|
||
var level = _this.hls.levels[fragment.level];
|
||
|
||
var ot = _this.getObjectType(fragment);
|
||
|
||
var data = {
|
||
d: fragment.duration * 1000,
|
||
ot: ot
|
||
};
|
||
|
||
if (ot === _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDObjectType"].VIDEO || ot === _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDObjectType"].AUDIO || ot == _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDObjectType"].MUXED) {
|
||
data.br = level.bitrate / 1000;
|
||
data.tb = _this.getTopBandwidth(ot) / 1000;
|
||
data.bl = _this.getBufferLength(ot);
|
||
}
|
||
|
||
_this.apply(context, data);
|
||
} catch (error) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].warn('Could not generate segment CMCD data.', error);
|
||
}
|
||
};
|
||
|
||
this.hls = hls;
|
||
var config = this.config = hls.config;
|
||
var cmcd = config.cmcd;
|
||
|
||
if (cmcd != null) {
|
||
config.pLoader = this.createPlaylistLoader();
|
||
config.fLoader = this.createFragmentLoader();
|
||
this.sid = cmcd.sessionId || CMCDController.uuid();
|
||
this.cid = cmcd.contentId;
|
||
this.useHeaders = cmcd.useHeaders === true;
|
||
this.registerListeners();
|
||
}
|
||
}
|
||
|
||
var _proto = CMCDController.prototype;
|
||
|
||
_proto.registerListeners = function registerListeners() {
|
||
var hls = this.hls;
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_DETACHED, this.onMediaDetached, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].BUFFER_CREATED, this.onBufferCreated, this);
|
||
};
|
||
|
||
_proto.unregisterListeners = function unregisterListeners() {
|
||
var hls = this.hls;
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_DETACHED, this.onMediaDetached, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].BUFFER_CREATED, this.onBufferCreated, this);
|
||
this.onMediaDetached();
|
||
};
|
||
|
||
_proto.destroy = function destroy() {
|
||
this.unregisterListeners(); // @ts-ignore
|
||
|
||
this.hls = this.config = this.audioBuffer = this.videoBuffer = null;
|
||
};
|
||
|
||
_proto.onMediaAttached = function onMediaAttached(event, data) {
|
||
this.media = data.media;
|
||
this.media.addEventListener('waiting', this.onWaiting);
|
||
this.media.addEventListener('playing', this.onPlaying);
|
||
};
|
||
|
||
_proto.onMediaDetached = function onMediaDetached() {
|
||
if (!this.media) {
|
||
return;
|
||
}
|
||
|
||
this.media.removeEventListener('waiting', this.onWaiting);
|
||
this.media.removeEventListener('playing', this.onPlaying); // @ts-ignore
|
||
|
||
this.media = null;
|
||
};
|
||
|
||
_proto.onBufferCreated = function onBufferCreated(event, data) {
|
||
var _data$tracks$audio, _data$tracks$video;
|
||
|
||
this.audioBuffer = (_data$tracks$audio = data.tracks.audio) === null || _data$tracks$audio === void 0 ? void 0 : _data$tracks$audio.buffer;
|
||
this.videoBuffer = (_data$tracks$video = data.tracks.video) === null || _data$tracks$video === void 0 ? void 0 : _data$tracks$video.buffer;
|
||
};
|
||
|
||
/**
|
||
* Create baseline CMCD data
|
||
*/
|
||
_proto.createData = function createData() {
|
||
var _this$media;
|
||
|
||
return {
|
||
v: _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDVersion"],
|
||
sf: _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDStreamingFormat"].HLS,
|
||
sid: this.sid,
|
||
cid: this.cid,
|
||
pr: (_this$media = this.media) === null || _this$media === void 0 ? void 0 : _this$media.playbackRate,
|
||
mtp: this.hls.bandwidthEstimate / 1000
|
||
};
|
||
}
|
||
/**
|
||
* Apply CMCD data to a request.
|
||
*/
|
||
;
|
||
|
||
_proto.apply = function apply(context, data) {
|
||
if (data === void 0) {
|
||
data = {};
|
||
}
|
||
|
||
// apply baseline data
|
||
_extends(data, this.createData());
|
||
|
||
var isVideo = data.ot === _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDObjectType"].INIT || data.ot === _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDObjectType"].VIDEO || data.ot === _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDObjectType"].MUXED;
|
||
|
||
if (this.starved && isVideo) {
|
||
data.bs = true;
|
||
data.su = true;
|
||
this.starved = false;
|
||
}
|
||
|
||
if (data.su == null) {
|
||
data.su = this.buffering;
|
||
} // TODO: Implement rtp, nrr, nor, dl
|
||
|
||
|
||
if (this.useHeaders) {
|
||
var headers = CMCDController.toHeaders(data);
|
||
|
||
if (!Object.keys(headers).length) {
|
||
return;
|
||
}
|
||
|
||
if (!context.headers) {
|
||
context.headers = {};
|
||
}
|
||
|
||
_extends(context.headers, headers);
|
||
} else {
|
||
var query = CMCDController.toQuery(data);
|
||
|
||
if (!query) {
|
||
return;
|
||
}
|
||
|
||
context.url = CMCDController.appendQueryToUri(context.url, query);
|
||
}
|
||
}
|
||
/**
|
||
* Apply CMCD data to a manifest request.
|
||
*/
|
||
;
|
||
|
||
/**
|
||
* The CMCD object type.
|
||
*/
|
||
_proto.getObjectType = function getObjectType(fragment) {
|
||
var type = fragment.type;
|
||
|
||
if (type === 'subtitle') {
|
||
return _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDObjectType"].TIMED_TEXT;
|
||
}
|
||
|
||
if (fragment.sn === 'initSegment') {
|
||
return _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDObjectType"].INIT;
|
||
}
|
||
|
||
if (type === 'audio') {
|
||
return _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDObjectType"].AUDIO;
|
||
}
|
||
|
||
if (type === 'main') {
|
||
if (!this.hls.audioTracks.length) {
|
||
return _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDObjectType"].MUXED;
|
||
}
|
||
|
||
return _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDObjectType"].VIDEO;
|
||
}
|
||
|
||
return undefined;
|
||
}
|
||
/**
|
||
* Get the highest bitrate.
|
||
*/
|
||
;
|
||
|
||
_proto.getTopBandwidth = function getTopBandwidth(type) {
|
||
var bitrate = 0;
|
||
var levels;
|
||
var hls = this.hls;
|
||
|
||
if (type === _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDObjectType"].AUDIO) {
|
||
levels = hls.audioTracks;
|
||
} else {
|
||
var max = hls.maxAutoLevel;
|
||
var len = max > -1 ? max + 1 : hls.levels.length;
|
||
levels = hls.levels.slice(0, len);
|
||
}
|
||
|
||
for (var _iterator = _createForOfIteratorHelperLoose(levels), _step; !(_step = _iterator()).done;) {
|
||
var level = _step.value;
|
||
|
||
if (level.bitrate > bitrate) {
|
||
bitrate = level.bitrate;
|
||
}
|
||
}
|
||
|
||
return bitrate > 0 ? bitrate : NaN;
|
||
}
|
||
/**
|
||
* Get the buffer length for a media type in milliseconds
|
||
*/
|
||
;
|
||
|
||
_proto.getBufferLength = function getBufferLength(type) {
|
||
var media = this.hls.media;
|
||
var buffer = type === _types_cmcd__WEBPACK_IMPORTED_MODULE_1__["CMCDObjectType"].AUDIO ? this.audioBuffer : this.videoBuffer;
|
||
|
||
if (!buffer || !media) {
|
||
return NaN;
|
||
}
|
||
|
||
var info = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_2__["BufferHelper"].bufferInfo(buffer, media.currentTime, this.config.maxBufferHole);
|
||
return info.len * 1000;
|
||
}
|
||
/**
|
||
* Create a playlist loader
|
||
*/
|
||
;
|
||
|
||
_proto.createPlaylistLoader = function createPlaylistLoader() {
|
||
var pLoader = this.config.pLoader;
|
||
var apply = this.applyPlaylistData;
|
||
var Ctor = pLoader || this.config.loader;
|
||
return /*#__PURE__*/function () {
|
||
function CmcdPlaylistLoader(config) {
|
||
this.loader = void 0;
|
||
this.loader = new Ctor(config);
|
||
}
|
||
|
||
var _proto2 = CmcdPlaylistLoader.prototype;
|
||
|
||
_proto2.destroy = function destroy() {
|
||
this.loader.destroy();
|
||
};
|
||
|
||
_proto2.abort = function abort() {
|
||
this.loader.abort();
|
||
};
|
||
|
||
_proto2.load = function load(context, config, callbacks) {
|
||
apply(context);
|
||
this.loader.load(context, config, callbacks);
|
||
};
|
||
|
||
_createClass(CmcdPlaylistLoader, [{
|
||
key: "stats",
|
||
get: function get() {
|
||
return this.loader.stats;
|
||
}
|
||
}, {
|
||
key: "context",
|
||
get: function get() {
|
||
return this.loader.context;
|
||
}
|
||
}]);
|
||
|
||
return CmcdPlaylistLoader;
|
||
}();
|
||
}
|
||
/**
|
||
* Create a playlist loader
|
||
*/
|
||
;
|
||
|
||
_proto.createFragmentLoader = function createFragmentLoader() {
|
||
var fLoader = this.config.fLoader;
|
||
var apply = this.applyFragmentData;
|
||
var Ctor = fLoader || this.config.loader;
|
||
return /*#__PURE__*/function () {
|
||
function CmcdFragmentLoader(config) {
|
||
this.loader = void 0;
|
||
this.loader = new Ctor(config);
|
||
}
|
||
|
||
var _proto3 = CmcdFragmentLoader.prototype;
|
||
|
||
_proto3.destroy = function destroy() {
|
||
this.loader.destroy();
|
||
};
|
||
|
||
_proto3.abort = function abort() {
|
||
this.loader.abort();
|
||
};
|
||
|
||
_proto3.load = function load(context, config, callbacks) {
|
||
apply(context);
|
||
this.loader.load(context, config, callbacks);
|
||
};
|
||
|
||
_createClass(CmcdFragmentLoader, [{
|
||
key: "stats",
|
||
get: function get() {
|
||
return this.loader.stats;
|
||
}
|
||
}, {
|
||
key: "context",
|
||
get: function get() {
|
||
return this.loader.context;
|
||
}
|
||
}]);
|
||
|
||
return CmcdFragmentLoader;
|
||
}();
|
||
}
|
||
/**
|
||
* Generate a random v4 UUI
|
||
*
|
||
* @returns {string}
|
||
*/
|
||
;
|
||
|
||
CMCDController.uuid = function uuid() {
|
||
var url = URL.createObjectURL(new Blob());
|
||
var uuid = url.toString();
|
||
URL.revokeObjectURL(url);
|
||
return uuid.substr(uuid.lastIndexOf('/') + 1);
|
||
}
|
||
/**
|
||
* Serialize a CMCD data object according to the rules defined in the
|
||
* section 3.2 of
|
||
* [CTA-5004](https://cdn.cta.tech/cta/media/media/resources/standards/pdfs/cta-5004-final.pdf).
|
||
*/
|
||
;
|
||
|
||
CMCDController.serialize = function serialize(data) {
|
||
var results = [];
|
||
|
||
var isValid = function isValid(value) {
|
||
return !Number.isNaN(value) && value != null && value !== '' && value !== false;
|
||
};
|
||
|
||
var toRounded = function toRounded(value) {
|
||
return Math.round(value);
|
||
};
|
||
|
||
var toHundred = function toHundred(value) {
|
||
return toRounded(value / 100) * 100;
|
||
};
|
||
|
||
var toUrlSafe = function toUrlSafe(value) {
|
||
return encodeURIComponent(value);
|
||
};
|
||
|
||
var formatters = {
|
||
br: toRounded,
|
||
d: toRounded,
|
||
bl: toHundred,
|
||
dl: toHundred,
|
||
mtp: toHundred,
|
||
nor: toUrlSafe,
|
||
rtp: toHundred,
|
||
tb: toRounded
|
||
};
|
||
var keys = Object.keys(data || {}).sort();
|
||
|
||
for (var _iterator2 = _createForOfIteratorHelperLoose(keys), _step2; !(_step2 = _iterator2()).done;) {
|
||
var key = _step2.value;
|
||
var value = data[key]; // ignore invalid values
|
||
|
||
if (!isValid(value)) {
|
||
continue;
|
||
} // Version should only be reported if not equal to 1.
|
||
|
||
|
||
if (key === 'v' && value === 1) {
|
||
continue;
|
||
} // Playback rate should only be sent if not equal to 1.
|
||
|
||
|
||
if (key == 'pr' && value === 1) {
|
||
continue;
|
||
} // Certain values require special formatting
|
||
|
||
|
||
var formatter = formatters[key];
|
||
|
||
if (formatter) {
|
||
value = formatter(value);
|
||
} // Serialize the key/value pair
|
||
|
||
|
||
var type = typeof value;
|
||
var result = void 0;
|
||
|
||
if (key === 'ot' || key === 'sf' || key === 'st') {
|
||
result = key + "=" + value;
|
||
} else if (type === 'boolean') {
|
||
result = key;
|
||
} else if (type === 'number') {
|
||
result = key + "=" + value;
|
||
} else {
|
||
result = key + "=" + JSON.stringify(value);
|
||
}
|
||
|
||
results.push(result);
|
||
}
|
||
|
||
return results.join(',');
|
||
}
|
||
/**
|
||
* Convert a CMCD data object to request headers according to the rules
|
||
* defined in the section 2.1 and 3.2 of
|
||
* [CTA-5004](https://cdn.cta.tech/cta/media/media/resources/standards/pdfs/cta-5004-final.pdf).
|
||
*/
|
||
;
|
||
|
||
CMCDController.toHeaders = function toHeaders(data) {
|
||
var keys = Object.keys(data);
|
||
var headers = {};
|
||
var headerNames = ['Object', 'Request', 'Session', 'Status'];
|
||
var headerGroups = [{}, {}, {}, {}];
|
||
var headerMap = {
|
||
br: 0,
|
||
d: 0,
|
||
ot: 0,
|
||
tb: 0,
|
||
bl: 1,
|
||
dl: 1,
|
||
mtp: 1,
|
||
nor: 1,
|
||
nrr: 1,
|
||
su: 1,
|
||
cid: 2,
|
||
pr: 2,
|
||
sf: 2,
|
||
sid: 2,
|
||
st: 2,
|
||
v: 2,
|
||
bs: 3,
|
||
rtp: 3
|
||
};
|
||
|
||
for (var _i = 0, _keys = keys; _i < _keys.length; _i++) {
|
||
var key = _keys[_i];
|
||
// Unmapped fields are mapped to the Request header
|
||
var index = headerMap[key] != null ? headerMap[key] : 1;
|
||
headerGroups[index][key] = data[key];
|
||
}
|
||
|
||
for (var i = 0; i < headerGroups.length; i++) {
|
||
var value = CMCDController.serialize(headerGroups[i]);
|
||
|
||
if (value) {
|
||
headers["CMCD-" + headerNames[i]] = value;
|
||
}
|
||
}
|
||
|
||
return headers;
|
||
}
|
||
/**
|
||
* Convert a CMCD data object to query args according to the rules
|
||
* defined in the section 2.2 and 3.2 of
|
||
* [CTA-5004](https://cdn.cta.tech/cta/media/media/resources/standards/pdfs/cta-5004-final.pdf).
|
||
*/
|
||
;
|
||
|
||
CMCDController.toQuery = function toQuery(data) {
|
||
return "CMCD=" + encodeURIComponent(CMCDController.serialize(data));
|
||
}
|
||
/**
|
||
* Append query args to a uri.
|
||
*/
|
||
;
|
||
|
||
CMCDController.appendQueryToUri = function appendQueryToUri(uri, query) {
|
||
if (!query) {
|
||
return uri;
|
||
}
|
||
|
||
var separator = uri.includes('?') ? '&' : '?';
|
||
return "" + uri + separator + query;
|
||
};
|
||
|
||
return CMCDController;
|
||
}();
|
||
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/controller/eme-controller.ts":
|
||
/*!******************************************!*\
|
||
!*** ./src/controller/eme-controller.ts ***!
|
||
\******************************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../events */ "./src/events.ts");
|
||
/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
/* harmony import */ var _utils_mediakeys_helper__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/mediakeys-helper */ "./src/utils/mediakeys-helper.ts");
|
||
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
|
||
|
||
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
|
||
|
||
/**
|
||
* @author Stephan Hesse <disparat@gmail.com> | <tchakabam@gmail.com>
|
||
*
|
||
* DRM support for Hls.js
|
||
*/
|
||
|
||
|
||
|
||
|
||
var MAX_LICENSE_REQUEST_FAILURES = 3;
|
||
/**
|
||
* @see https://developer.mozilla.org/en-US/docs/Web/API/MediaKeySystemConfiguration
|
||
* @param {Array<string>} audioCodecs List of required audio codecs to support
|
||
* @param {Array<string>} videoCodecs List of required video codecs to support
|
||
* @param {object} drmSystemOptions Optional parameters/requirements for the key-system
|
||
* @returns {Array<MediaSystemConfiguration>} An array of supported configurations
|
||
*/
|
||
|
||
var createWidevineMediaKeySystemConfigurations = function createWidevineMediaKeySystemConfigurations(audioCodecs, videoCodecs, drmSystemOptions) {
|
||
/* jshint ignore:line */
|
||
var baseConfig = {
|
||
// initDataTypes: ['keyids', 'mp4'],
|
||
// label: "",
|
||
// persistentState: "not-allowed", // or "required" ?
|
||
// distinctiveIdentifier: "not-allowed", // or "required" ?
|
||
// sessionTypes: ['temporary'],
|
||
audioCapabilities: [],
|
||
// { contentType: 'audio/mp4; codecs="mp4a.40.2"' }
|
||
videoCapabilities: [] // { contentType: 'video/mp4; codecs="avc1.42E01E"' }
|
||
|
||
};
|
||
audioCodecs.forEach(function (codec) {
|
||
baseConfig.audioCapabilities.push({
|
||
contentType: "audio/mp4; codecs=\"" + codec + "\"",
|
||
robustness: drmSystemOptions.audioRobustness || ''
|
||
});
|
||
});
|
||
videoCodecs.forEach(function (codec) {
|
||
baseConfig.videoCapabilities.push({
|
||
contentType: "video/mp4; codecs=\"" + codec + "\"",
|
||
robustness: drmSystemOptions.videoRobustness || ''
|
||
});
|
||
});
|
||
return [baseConfig];
|
||
};
|
||
/**
|
||
* The idea here is to handle key-system (and their respective platforms) specific configuration differences
|
||
* in order to work with the local requestMediaKeySystemAccess method.
|
||
*
|
||
* We can also rule-out platform-related key-system support at this point by throwing an error.
|
||
*
|
||
* @param {string} keySystem Identifier for the key-system, see `KeySystems` enum
|
||
* @param {Array<string>} audioCodecs List of required audio codecs to support
|
||
* @param {Array<string>} videoCodecs List of required video codecs to support
|
||
* @throws will throw an error if a unknown key system is passed
|
||
* @returns {Array<MediaSystemConfiguration>} A non-empty Array of MediaKeySystemConfiguration objects
|
||
*/
|
||
|
||
|
||
var getSupportedMediaKeySystemConfigurations = function getSupportedMediaKeySystemConfigurations(keySystem, audioCodecs, videoCodecs, drmSystemOptions) {
|
||
switch (keySystem) {
|
||
case _utils_mediakeys_helper__WEBPACK_IMPORTED_MODULE_3__["KeySystems"].WIDEVINE:
|
||
return createWidevineMediaKeySystemConfigurations(audioCodecs, videoCodecs, drmSystemOptions);
|
||
|
||
default:
|
||
throw new Error("Unknown key-system: " + keySystem);
|
||
}
|
||
};
|
||
|
||
/**
|
||
* Controller to deal with encrypted media extensions (EME)
|
||
* @see https://developer.mozilla.org/en-US/docs/Web/API/Encrypted_Media_Extensions_API
|
||
*
|
||
* @class
|
||
* @constructor
|
||
*/
|
||
var EMEController = /*#__PURE__*/function () {
|
||
/**
|
||
* @constructs
|
||
* @param {Hls} hls Our Hls.js instance
|
||
*/
|
||
function EMEController(hls) {
|
||
this.hls = void 0;
|
||
this._widevineLicenseUrl = void 0;
|
||
this._licenseXhrSetup = void 0;
|
||
this._licenseResponseCallback = void 0;
|
||
this._emeEnabled = void 0;
|
||
this._requestMediaKeySystemAccess = void 0;
|
||
this._drmSystemOptions = void 0;
|
||
this._config = void 0;
|
||
this._mediaKeysList = [];
|
||
this._media = null;
|
||
this._hasSetMediaKeys = false;
|
||
this._requestLicenseFailureCount = 0;
|
||
this.mediaKeysPromise = null;
|
||
this._onMediaEncrypted = this.onMediaEncrypted.bind(this);
|
||
this.hls = hls;
|
||
this._config = hls.config;
|
||
this._widevineLicenseUrl = this._config.widevineLicenseUrl;
|
||
this._licenseXhrSetup = this._config.licenseXhrSetup;
|
||
this._licenseResponseCallback = this._config.licenseResponseCallback;
|
||
this._emeEnabled = this._config.emeEnabled;
|
||
this._requestMediaKeySystemAccess = this._config.requestMediaKeySystemAccessFunc;
|
||
this._drmSystemOptions = this._config.drmSystemOptions;
|
||
|
||
this._registerListeners();
|
||
}
|
||
|
||
var _proto = EMEController.prototype;
|
||
|
||
_proto.destroy = function destroy() {
|
||
this._unregisterListeners(); // @ts-ignore
|
||
|
||
|
||
this.hls = this._onMediaEncrypted = null;
|
||
this._requestMediaKeySystemAccess = null;
|
||
};
|
||
|
||
_proto._registerListeners = function _registerListeners() {
|
||
this.hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
|
||
this.hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_DETACHED, this.onMediaDetached, this);
|
||
this.hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MANIFEST_PARSED, this.onManifestParsed, this);
|
||
};
|
||
|
||
_proto._unregisterListeners = function _unregisterListeners() {
|
||
this.hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
|
||
this.hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_DETACHED, this.onMediaDetached, this);
|
||
this.hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MANIFEST_PARSED, this.onManifestParsed, this);
|
||
}
|
||
/**
|
||
* @param {string} keySystem Identifier for the key-system, see `KeySystems` enum
|
||
* @returns {string} License server URL for key-system (if any configured, otherwise causes error)
|
||
* @throws if a unsupported keysystem is passed
|
||
*/
|
||
;
|
||
|
||
_proto.getLicenseServerUrl = function getLicenseServerUrl(keySystem) {
|
||
switch (keySystem) {
|
||
case _utils_mediakeys_helper__WEBPACK_IMPORTED_MODULE_3__["KeySystems"].WIDEVINE:
|
||
if (!this._widevineLicenseUrl) {
|
||
break;
|
||
}
|
||
|
||
return this._widevineLicenseUrl;
|
||
}
|
||
|
||
throw new Error("no license server URL configured for key-system \"" + keySystem + "\"");
|
||
}
|
||
/**
|
||
* Requests access object and adds it to our list upon success
|
||
* @private
|
||
* @param {string} keySystem System ID (see `KeySystems`)
|
||
* @param {Array<string>} audioCodecs List of required audio codecs to support
|
||
* @param {Array<string>} videoCodecs List of required video codecs to support
|
||
* @throws When a unsupported KeySystem is passed
|
||
*/
|
||
;
|
||
|
||
_proto._attemptKeySystemAccess = function _attemptKeySystemAccess(keySystem, audioCodecs, videoCodecs) {
|
||
var _this = this;
|
||
|
||
// This can throw, but is caught in event handler callpath
|
||
var mediaKeySystemConfigs = getSupportedMediaKeySystemConfigurations(keySystem, audioCodecs, videoCodecs, this._drmSystemOptions);
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log('Requesting encrypted media key-system access'); // expecting interface like window.navigator.requestMediaKeySystemAccess
|
||
|
||
var keySystemAccessPromise = this.requestMediaKeySystemAccess(keySystem, mediaKeySystemConfigs);
|
||
this.mediaKeysPromise = keySystemAccessPromise.then(function (mediaKeySystemAccess) {
|
||
return _this._onMediaKeySystemAccessObtained(keySystem, mediaKeySystemAccess);
|
||
});
|
||
keySystemAccessPromise.catch(function (err) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error("Failed to obtain key-system \"" + keySystem + "\" access:", err);
|
||
});
|
||
};
|
||
|
||
/**
|
||
* Handles obtaining access to a key-system
|
||
* @private
|
||
* @param {string} keySystem
|
||
* @param {MediaKeySystemAccess} mediaKeySystemAccess https://developer.mozilla.org/en-US/docs/Web/API/MediaKeySystemAccess
|
||
*/
|
||
_proto._onMediaKeySystemAccessObtained = function _onMediaKeySystemAccessObtained(keySystem, mediaKeySystemAccess) {
|
||
var _this2 = this;
|
||
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log("Access for key-system \"" + keySystem + "\" obtained");
|
||
var mediaKeysListItem = {
|
||
mediaKeysSessionInitialized: false,
|
||
mediaKeySystemAccess: mediaKeySystemAccess,
|
||
mediaKeySystemDomain: keySystem
|
||
};
|
||
|
||
this._mediaKeysList.push(mediaKeysListItem);
|
||
|
||
var mediaKeysPromise = Promise.resolve().then(function () {
|
||
return mediaKeySystemAccess.createMediaKeys();
|
||
}).then(function (mediaKeys) {
|
||
mediaKeysListItem.mediaKeys = mediaKeys;
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log("Media-keys created for key-system \"" + keySystem + "\"");
|
||
|
||
_this2._onMediaKeysCreated();
|
||
|
||
return mediaKeys;
|
||
});
|
||
mediaKeysPromise.catch(function (err) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error('Failed to create media-keys:', err);
|
||
});
|
||
return mediaKeysPromise;
|
||
}
|
||
/**
|
||
* Handles key-creation (represents access to CDM). We are going to create key-sessions upon this
|
||
* for all existing keys where no session exists yet.
|
||
*
|
||
* @private
|
||
*/
|
||
;
|
||
|
||
_proto._onMediaKeysCreated = function _onMediaKeysCreated() {
|
||
var _this3 = this;
|
||
|
||
// check for all key-list items if a session exists, otherwise, create one
|
||
this._mediaKeysList.forEach(function (mediaKeysListItem) {
|
||
if (!mediaKeysListItem.mediaKeysSession) {
|
||
// mediaKeys is definitely initialized here
|
||
mediaKeysListItem.mediaKeysSession = mediaKeysListItem.mediaKeys.createSession();
|
||
|
||
_this3._onNewMediaKeySession(mediaKeysListItem.mediaKeysSession);
|
||
}
|
||
});
|
||
}
|
||
/**
|
||
* @private
|
||
* @param {*} keySession
|
||
*/
|
||
;
|
||
|
||
_proto._onNewMediaKeySession = function _onNewMediaKeySession(keySession) {
|
||
var _this4 = this;
|
||
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log("New key-system session " + keySession.sessionId);
|
||
keySession.addEventListener('message', function (event) {
|
||
_this4._onKeySessionMessage(keySession, event.message);
|
||
}, false);
|
||
}
|
||
/**
|
||
* @private
|
||
* @param {MediaKeySession} keySession
|
||
* @param {ArrayBuffer} message
|
||
*/
|
||
;
|
||
|
||
_proto._onKeySessionMessage = function _onKeySessionMessage(keySession, message) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log('Got EME message event, creating license request');
|
||
|
||
this._requestLicense(message, function (data) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log("Received license data (length: " + (data ? data.byteLength : data) + "), updating key-session");
|
||
keySession.update(data);
|
||
});
|
||
}
|
||
/**
|
||
* @private
|
||
* @param e {MediaEncryptedEvent}
|
||
*/
|
||
;
|
||
|
||
_proto.onMediaEncrypted = function onMediaEncrypted(e) {
|
||
var _this5 = this;
|
||
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log("Media is encrypted using \"" + e.initDataType + "\" init data type");
|
||
|
||
if (!this.mediaKeysPromise) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error('Fatal: Media is encrypted but no CDM access or no keys have been requested');
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].KEY_SYSTEM_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].KEY_SYSTEM_NO_KEYS,
|
||
fatal: true
|
||
});
|
||
return;
|
||
}
|
||
|
||
var finallySetKeyAndStartSession = function finallySetKeyAndStartSession(mediaKeys) {
|
||
if (!_this5._media) {
|
||
return;
|
||
}
|
||
|
||
_this5._attemptSetMediaKeys(mediaKeys);
|
||
|
||
_this5._generateRequestWithPreferredKeySession(e.initDataType, e.initData);
|
||
}; // Could use `Promise.finally` but some Promise polyfills are missing it
|
||
|
||
|
||
this.mediaKeysPromise.then(finallySetKeyAndStartSession).catch(finallySetKeyAndStartSession);
|
||
}
|
||
/**
|
||
* @private
|
||
*/
|
||
;
|
||
|
||
_proto._attemptSetMediaKeys = function _attemptSetMediaKeys(mediaKeys) {
|
||
if (!this._media) {
|
||
throw new Error('Attempted to set mediaKeys without first attaching a media element');
|
||
}
|
||
|
||
if (!this._hasSetMediaKeys) {
|
||
// FIXME: see if we can/want/need-to really to deal with several potential key-sessions?
|
||
var keysListItem = this._mediaKeysList[0];
|
||
|
||
if (!keysListItem || !keysListItem.mediaKeys) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error('Fatal: Media is encrypted but no CDM access or no keys have been obtained yet');
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].KEY_SYSTEM_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].KEY_SYSTEM_NO_KEYS,
|
||
fatal: true
|
||
});
|
||
return;
|
||
}
|
||
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log('Setting keys for encrypted media');
|
||
|
||
this._media.setMediaKeys(keysListItem.mediaKeys);
|
||
|
||
this._hasSetMediaKeys = true;
|
||
}
|
||
}
|
||
/**
|
||
* @private
|
||
*/
|
||
;
|
||
|
||
_proto._generateRequestWithPreferredKeySession = function _generateRequestWithPreferredKeySession(initDataType, initData) {
|
||
var _this6 = this;
|
||
|
||
// FIXME: see if we can/want/need-to really to deal with several potential key-sessions?
|
||
var keysListItem = this._mediaKeysList[0];
|
||
|
||
if (!keysListItem) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error('Fatal: Media is encrypted but not any key-system access has been obtained yet');
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].KEY_SYSTEM_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].KEY_SYSTEM_NO_ACCESS,
|
||
fatal: true
|
||
});
|
||
return;
|
||
}
|
||
|
||
if (keysListItem.mediaKeysSessionInitialized) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn('Key-Session already initialized but requested again');
|
||
return;
|
||
}
|
||
|
||
var keySession = keysListItem.mediaKeysSession;
|
||
|
||
if (!keySession) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error('Fatal: Media is encrypted but no key-session existing');
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].KEY_SYSTEM_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].KEY_SYSTEM_NO_SESSION,
|
||
fatal: true
|
||
});
|
||
return;
|
||
} // initData is null if the media is not CORS-same-origin
|
||
|
||
|
||
if (!initData) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn('Fatal: initData required for generating a key session is null');
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].KEY_SYSTEM_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].KEY_SYSTEM_NO_INIT_DATA,
|
||
fatal: true
|
||
});
|
||
return;
|
||
}
|
||
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log("Generating key-session request for \"" + initDataType + "\" init data type");
|
||
keysListItem.mediaKeysSessionInitialized = true;
|
||
keySession.generateRequest(initDataType, initData).then(function () {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].debug('Key-session generation succeeded');
|
||
}).catch(function (err) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error('Error generating key-session request:', err);
|
||
|
||
_this6.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].KEY_SYSTEM_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].KEY_SYSTEM_NO_SESSION,
|
||
fatal: false
|
||
});
|
||
});
|
||
}
|
||
/**
|
||
* @private
|
||
* @param {string} url License server URL
|
||
* @param {ArrayBuffer} keyMessage Message data issued by key-system
|
||
* @param {function} callback Called when XHR has succeeded
|
||
* @returns {XMLHttpRequest} Unsent (but opened state) XHR object
|
||
* @throws if XMLHttpRequest construction failed
|
||
*/
|
||
;
|
||
|
||
_proto._createLicenseXhr = function _createLicenseXhr(url, keyMessage, callback) {
|
||
var xhr = new XMLHttpRequest();
|
||
xhr.responseType = 'arraybuffer';
|
||
xhr.onreadystatechange = this._onLicenseRequestReadyStageChange.bind(this, xhr, url, keyMessage, callback);
|
||
var licenseXhrSetup = this._licenseXhrSetup;
|
||
|
||
if (licenseXhrSetup) {
|
||
try {
|
||
licenseXhrSetup.call(this.hls, xhr, url);
|
||
licenseXhrSetup = undefined;
|
||
} catch (e) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error(e);
|
||
}
|
||
}
|
||
|
||
try {
|
||
// if licenseXhrSetup did not yet call open, let's do it now
|
||
if (!xhr.readyState) {
|
||
xhr.open('POST', url, true);
|
||
}
|
||
|
||
if (licenseXhrSetup) {
|
||
licenseXhrSetup.call(this.hls, xhr, url);
|
||
}
|
||
} catch (e) {
|
||
// IE11 throws an exception on xhr.open if attempting to access an HTTP resource over HTTPS
|
||
throw new Error("issue setting up KeySystem license XHR " + e);
|
||
}
|
||
|
||
return xhr;
|
||
}
|
||
/**
|
||
* @private
|
||
* @param {XMLHttpRequest} xhr
|
||
* @param {string} url License server URL
|
||
* @param {ArrayBuffer} keyMessage Message data issued by key-system
|
||
* @param {function} callback Called when XHR has succeeded
|
||
*/
|
||
;
|
||
|
||
_proto._onLicenseRequestReadyStageChange = function _onLicenseRequestReadyStageChange(xhr, url, keyMessage, callback) {
|
||
switch (xhr.readyState) {
|
||
case 4:
|
||
if (xhr.status === 200) {
|
||
this._requestLicenseFailureCount = 0;
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log('License request succeeded');
|
||
var _data = xhr.response;
|
||
var licenseResponseCallback = this._licenseResponseCallback;
|
||
|
||
if (licenseResponseCallback) {
|
||
try {
|
||
_data = licenseResponseCallback.call(this.hls, xhr, url);
|
||
} catch (e) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error(e);
|
||
}
|
||
}
|
||
|
||
callback(_data);
|
||
} else {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error("License Request XHR failed (" + url + "). Status: " + xhr.status + " (" + xhr.statusText + ")");
|
||
this._requestLicenseFailureCount++;
|
||
|
||
if (this._requestLicenseFailureCount > MAX_LICENSE_REQUEST_FAILURES) {
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].KEY_SYSTEM_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].KEY_SYSTEM_LICENSE_REQUEST_FAILED,
|
||
fatal: true
|
||
});
|
||
return;
|
||
}
|
||
|
||
var attemptsLeft = MAX_LICENSE_REQUEST_FAILURES - this._requestLicenseFailureCount + 1;
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn("Retrying license request, " + attemptsLeft + " attempts left");
|
||
|
||
this._requestLicense(keyMessage, callback);
|
||
}
|
||
|
||
break;
|
||
}
|
||
}
|
||
/**
|
||
* @private
|
||
* @param {MediaKeysListItem} keysListItem
|
||
* @param {ArrayBuffer} keyMessage
|
||
* @returns {ArrayBuffer} Challenge data posted to license server
|
||
* @throws if KeySystem is unsupported
|
||
*/
|
||
;
|
||
|
||
_proto._generateLicenseRequestChallenge = function _generateLicenseRequestChallenge(keysListItem, keyMessage) {
|
||
switch (keysListItem.mediaKeySystemDomain) {
|
||
// case KeySystems.PLAYREADY:
|
||
// from https://github.com/MicrosoftEdge/Demos/blob/master/eme/scripts/demo.js
|
||
|
||
/*
|
||
if (this.licenseType !== this.LICENSE_TYPE_WIDEVINE) {
|
||
// For PlayReady CDMs, we need to dig the Challenge out of the XML.
|
||
var keyMessageXml = new DOMParser().parseFromString(String.fromCharCode.apply(null, new Uint16Array(keyMessage)), 'application/xml');
|
||
if (keyMessageXml.getElementsByTagName('Challenge')[0]) {
|
||
challenge = atob(keyMessageXml.getElementsByTagName('Challenge')[0].childNodes[0].nodeValue);
|
||
} else {
|
||
throw 'Cannot find <Challenge> in key message';
|
||
}
|
||
var headerNames = keyMessageXml.getElementsByTagName('name');
|
||
var headerValues = keyMessageXml.getElementsByTagName('value');
|
||
if (headerNames.length !== headerValues.length) {
|
||
throw 'Mismatched header <name>/<value> pair in key message';
|
||
}
|
||
for (var i = 0; i < headerNames.length; i++) {
|
||
xhr.setRequestHeader(headerNames[i].childNodes[0].nodeValue, headerValues[i].childNodes[0].nodeValue);
|
||
}
|
||
}
|
||
break;
|
||
*/
|
||
case _utils_mediakeys_helper__WEBPACK_IMPORTED_MODULE_3__["KeySystems"].WIDEVINE:
|
||
// For Widevine CDMs, the challenge is the keyMessage.
|
||
return keyMessage;
|
||
}
|
||
|
||
throw new Error("unsupported key-system: " + keysListItem.mediaKeySystemDomain);
|
||
}
|
||
/**
|
||
* @private
|
||
* @param keyMessage
|
||
* @param callback
|
||
*/
|
||
;
|
||
|
||
_proto._requestLicense = function _requestLicense(keyMessage, callback) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log('Requesting content license for key-system');
|
||
var keysListItem = this._mediaKeysList[0];
|
||
|
||
if (!keysListItem) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error('Fatal error: Media is encrypted but no key-system access has been obtained yet');
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].KEY_SYSTEM_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].KEY_SYSTEM_NO_ACCESS,
|
||
fatal: true
|
||
});
|
||
return;
|
||
}
|
||
|
||
try {
|
||
var _url = this.getLicenseServerUrl(keysListItem.mediaKeySystemDomain);
|
||
|
||
var _xhr = this._createLicenseXhr(_url, keyMessage, callback);
|
||
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log("Sending license request to URL: " + _url);
|
||
|
||
var challenge = this._generateLicenseRequestChallenge(keysListItem, keyMessage);
|
||
|
||
_xhr.send(challenge);
|
||
} catch (e) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error("Failure requesting DRM license: " + e);
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].KEY_SYSTEM_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].KEY_SYSTEM_LICENSE_REQUEST_FAILED,
|
||
fatal: true
|
||
});
|
||
}
|
||
};
|
||
|
||
_proto.onMediaAttached = function onMediaAttached(event, data) {
|
||
if (!this._emeEnabled) {
|
||
return;
|
||
}
|
||
|
||
var media = data.media; // keep reference of media
|
||
|
||
this._media = media;
|
||
media.addEventListener('encrypted', this._onMediaEncrypted);
|
||
};
|
||
|
||
_proto.onMediaDetached = function onMediaDetached() {
|
||
var media = this._media;
|
||
var mediaKeysList = this._mediaKeysList;
|
||
|
||
if (!media) {
|
||
return;
|
||
}
|
||
|
||
media.removeEventListener('encrypted', this._onMediaEncrypted);
|
||
this._media = null;
|
||
this._mediaKeysList = []; // Close all sessions and remove media keys from the video element.
|
||
|
||
Promise.all(mediaKeysList.map(function (mediaKeysListItem) {
|
||
if (mediaKeysListItem.mediaKeysSession) {
|
||
return mediaKeysListItem.mediaKeysSession.close().catch(function () {// Ignore errors when closing the sessions. Closing a session that
|
||
// generated no key requests will throw an error.
|
||
});
|
||
}
|
||
})).then(function () {
|
||
return media.setMediaKeys(null);
|
||
}).catch(function () {// Ignore any failures while removing media keys from the video element.
|
||
});
|
||
};
|
||
|
||
_proto.onManifestParsed = function onManifestParsed(event, data) {
|
||
if (!this._emeEnabled) {
|
||
return;
|
||
}
|
||
|
||
var audioCodecs = data.levels.map(function (level) {
|
||
return level.audioCodec;
|
||
}).filter(function (audioCodec) {
|
||
return !!audioCodec;
|
||
});
|
||
var videoCodecs = data.levels.map(function (level) {
|
||
return level.videoCodec;
|
||
}).filter(function (videoCodec) {
|
||
return !!videoCodec;
|
||
});
|
||
|
||
this._attemptKeySystemAccess(_utils_mediakeys_helper__WEBPACK_IMPORTED_MODULE_3__["KeySystems"].WIDEVINE, audioCodecs, videoCodecs);
|
||
};
|
||
|
||
_createClass(EMEController, [{
|
||
key: "requestMediaKeySystemAccess",
|
||
get: function get() {
|
||
if (!this._requestMediaKeySystemAccess) {
|
||
throw new Error('No requestMediaKeySystemAccess function configured');
|
||
}
|
||
|
||
return this._requestMediaKeySystemAccess;
|
||
}
|
||
}]);
|
||
|
||
return EMEController;
|
||
}();
|
||
|
||
/* harmony default export */ __webpack_exports__["default"] = (EMEController);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/controller/fps-controller.ts":
|
||
/*!******************************************!*\
|
||
!*** ./src/controller/fps-controller.ts ***!
|
||
\******************************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../events */ "./src/events.ts");
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
|
||
|
||
|
||
var FPSController = /*#__PURE__*/function () {
|
||
// stream controller must be provided as a dependency!
|
||
function FPSController(hls) {
|
||
this.hls = void 0;
|
||
this.isVideoPlaybackQualityAvailable = false;
|
||
this.timer = void 0;
|
||
this.media = null;
|
||
this.lastTime = void 0;
|
||
this.lastDroppedFrames = 0;
|
||
this.lastDecodedFrames = 0;
|
||
this.streamController = void 0;
|
||
this.hls = hls;
|
||
this.registerListeners();
|
||
}
|
||
|
||
var _proto = FPSController.prototype;
|
||
|
||
_proto.setStreamController = function setStreamController(streamController) {
|
||
this.streamController = streamController;
|
||
};
|
||
|
||
_proto.registerListeners = function registerListeners() {
|
||
this.hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_ATTACHING, this.onMediaAttaching, this);
|
||
};
|
||
|
||
_proto.unregisterListeners = function unregisterListeners() {
|
||
this.hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_ATTACHING, this.onMediaAttaching);
|
||
};
|
||
|
||
_proto.destroy = function destroy() {
|
||
if (this.timer) {
|
||
clearInterval(this.timer);
|
||
}
|
||
|
||
this.unregisterListeners();
|
||
this.isVideoPlaybackQualityAvailable = false;
|
||
this.media = null;
|
||
};
|
||
|
||
_proto.onMediaAttaching = function onMediaAttaching(event, data) {
|
||
var config = this.hls.config;
|
||
|
||
if (config.capLevelOnFPSDrop) {
|
||
var media = data.media instanceof self.HTMLVideoElement ? data.media : null;
|
||
this.media = media;
|
||
|
||
if (media && typeof media.getVideoPlaybackQuality === 'function') {
|
||
this.isVideoPlaybackQualityAvailable = true;
|
||
}
|
||
|
||
self.clearInterval(this.timer);
|
||
this.timer = self.setInterval(this.checkFPSInterval.bind(this), config.fpsDroppedMonitoringPeriod);
|
||
}
|
||
};
|
||
|
||
_proto.checkFPS = function checkFPS(video, decodedFrames, droppedFrames) {
|
||
var currentTime = performance.now();
|
||
|
||
if (decodedFrames) {
|
||
if (this.lastTime) {
|
||
var currentPeriod = currentTime - this.lastTime;
|
||
var currentDropped = droppedFrames - this.lastDroppedFrames;
|
||
var currentDecoded = decodedFrames - this.lastDecodedFrames;
|
||
var droppedFPS = 1000 * currentDropped / currentPeriod;
|
||
var hls = this.hls;
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].FPS_DROP, {
|
||
currentDropped: currentDropped,
|
||
currentDecoded: currentDecoded,
|
||
totalDroppedFrames: droppedFrames
|
||
});
|
||
|
||
if (droppedFPS > 0) {
|
||
// logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod));
|
||
if (currentDropped > hls.config.fpsDroppedMonitoringThreshold * currentDecoded) {
|
||
var currentLevel = hls.currentLevel;
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_1__["logger"].warn('drop FPS ratio greater than max allowed value for currentLevel: ' + currentLevel);
|
||
|
||
if (currentLevel > 0 && (hls.autoLevelCapping === -1 || hls.autoLevelCapping >= currentLevel)) {
|
||
currentLevel = currentLevel - 1;
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].FPS_DROP_LEVEL_CAPPING, {
|
||
level: currentLevel,
|
||
droppedLevel: hls.currentLevel
|
||
});
|
||
hls.autoLevelCapping = currentLevel;
|
||
this.streamController.nextLevelSwitch();
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
this.lastTime = currentTime;
|
||
this.lastDroppedFrames = droppedFrames;
|
||
this.lastDecodedFrames = decodedFrames;
|
||
}
|
||
};
|
||
|
||
_proto.checkFPSInterval = function checkFPSInterval() {
|
||
var video = this.media;
|
||
|
||
if (video) {
|
||
if (this.isVideoPlaybackQualityAvailable) {
|
||
var videoPlaybackQuality = video.getVideoPlaybackQuality();
|
||
this.checkFPS(video, videoPlaybackQuality.totalVideoFrames, videoPlaybackQuality.droppedVideoFrames);
|
||
} else {
|
||
// HTMLVideoElement doesn't include the webkit types
|
||
this.checkFPS(video, video.webkitDecodedFrameCount, video.webkitDroppedFrameCount);
|
||
}
|
||
}
|
||
};
|
||
|
||
return FPSController;
|
||
}();
|
||
|
||
/* harmony default export */ __webpack_exports__["default"] = (FPSController);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/controller/fragment-finders.ts":
|
||
/*!********************************************!*\
|
||
!*** ./src/controller/fragment-finders.ts ***!
|
||
\********************************************/
|
||
/*! exports provided: findFragmentByPDT, findFragmentByPTS, fragmentWithinToleranceTest, pdtWithinToleranceTest, findFragWithCC */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "findFragmentByPDT", function() { return findFragmentByPDT; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "findFragmentByPTS", function() { return findFragmentByPTS; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "fragmentWithinToleranceTest", function() { return fragmentWithinToleranceTest; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "pdtWithinToleranceTest", function() { return pdtWithinToleranceTest; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "findFragWithCC", function() { return findFragWithCC; });
|
||
/* harmony import */ var _Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
|
||
/* harmony import */ var _utils_binary_search__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../utils/binary-search */ "./src/utils/binary-search.ts");
|
||
|
||
|
||
|
||
/**
|
||
* Returns first fragment whose endPdt value exceeds the given PDT.
|
||
* @param {Array<Fragment>} fragments - The array of candidate fragments
|
||
* @param {number|null} [PDTValue = null] - The PDT value which must be exceeded
|
||
* @param {number} [maxFragLookUpTolerance = 0] - The amount of time that a fragment's start/end can be within in order to be considered contiguous
|
||
* @returns {*|null} fragment - The best matching fragment
|
||
*/
|
||
function findFragmentByPDT(fragments, PDTValue, maxFragLookUpTolerance) {
|
||
if (PDTValue === null || !Array.isArray(fragments) || !fragments.length || !Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(PDTValue)) {
|
||
return null;
|
||
} // if less than start
|
||
|
||
|
||
var startPDT = fragments[0].programDateTime;
|
||
|
||
if (PDTValue < (startPDT || 0)) {
|
||
return null;
|
||
}
|
||
|
||
var endPDT = fragments[fragments.length - 1].endProgramDateTime;
|
||
|
||
if (PDTValue >= (endPDT || 0)) {
|
||
return null;
|
||
}
|
||
|
||
maxFragLookUpTolerance = maxFragLookUpTolerance || 0;
|
||
|
||
for (var seg = 0; seg < fragments.length; ++seg) {
|
||
var frag = fragments[seg];
|
||
|
||
if (pdtWithinToleranceTest(PDTValue, maxFragLookUpTolerance, frag)) {
|
||
return frag;
|
||
}
|
||
}
|
||
|
||
return null;
|
||
}
|
||
/**
|
||
* Finds a fragment based on the SN of the previous fragment; or based on the needs of the current buffer.
|
||
* This method compensates for small buffer gaps by applying a tolerance to the start of any candidate fragment, thus
|
||
* breaking any traps which would cause the same fragment to be continuously selected within a small range.
|
||
* @param {*} fragPrevious - The last frag successfully appended
|
||
* @param {Array} fragments - The array of candidate fragments
|
||
* @param {number} [bufferEnd = 0] - The end of the contiguous buffered range the playhead is currently within
|
||
* @param {number} maxFragLookUpTolerance - The amount of time that a fragment's start/end can be within in order to be considered contiguous
|
||
* @returns {*} foundFrag - The best matching fragment
|
||
*/
|
||
|
||
function findFragmentByPTS(fragPrevious, fragments, bufferEnd, maxFragLookUpTolerance) {
|
||
if (bufferEnd === void 0) {
|
||
bufferEnd = 0;
|
||
}
|
||
|
||
if (maxFragLookUpTolerance === void 0) {
|
||
maxFragLookUpTolerance = 0;
|
||
}
|
||
|
||
var fragNext = null;
|
||
|
||
if (fragPrevious) {
|
||
fragNext = fragments[fragPrevious.sn - fragments[0].sn + 1] || null;
|
||
} else if (bufferEnd === 0 && fragments[0].start === 0) {
|
||
fragNext = fragments[0];
|
||
} // Prefer the next fragment if it's within tolerance
|
||
|
||
|
||
if (fragNext && fragmentWithinToleranceTest(bufferEnd, maxFragLookUpTolerance, fragNext) === 0) {
|
||
return fragNext;
|
||
} // We might be seeking past the tolerance so find the best match
|
||
|
||
|
||
var foundFragment = _utils_binary_search__WEBPACK_IMPORTED_MODULE_1__["default"].search(fragments, fragmentWithinToleranceTest.bind(null, bufferEnd, maxFragLookUpTolerance));
|
||
|
||
if (foundFragment) {
|
||
return foundFragment;
|
||
} // If no match was found return the next fragment after fragPrevious, or null
|
||
|
||
|
||
return fragNext;
|
||
}
|
||
/**
|
||
* The test function used by the findFragmentBySn's BinarySearch to look for the best match to the current buffer conditions.
|
||
* @param {*} candidate - The fragment to test
|
||
* @param {number} [bufferEnd = 0] - The end of the current buffered range the playhead is currently within
|
||
* @param {number} [maxFragLookUpTolerance = 0] - The amount of time that a fragment's start can be within in order to be considered contiguous
|
||
* @returns {number} - 0 if it matches, 1 if too low, -1 if too high
|
||
*/
|
||
|
||
function fragmentWithinToleranceTest(bufferEnd, maxFragLookUpTolerance, candidate) {
|
||
if (bufferEnd === void 0) {
|
||
bufferEnd = 0;
|
||
}
|
||
|
||
if (maxFragLookUpTolerance === void 0) {
|
||
maxFragLookUpTolerance = 0;
|
||
}
|
||
|
||
// offset should be within fragment boundary - config.maxFragLookUpTolerance
|
||
// this is to cope with situations like
|
||
// bufferEnd = 9.991
|
||
// frag[Ø] : [0,10]
|
||
// frag[1] : [10,20]
|
||
// bufferEnd is within frag[0] range ... although what we are expecting is to return frag[1] here
|
||
// frag start frag start+duration
|
||
// |-----------------------------|
|
||
// <---> <--->
|
||
// ...--------><-----------------------------><---------....
|
||
// previous frag matching fragment next frag
|
||
// return -1 return 0 return 1
|
||
// logger.log(`level/sn/start/end/bufEnd:${level}/${candidate.sn}/${candidate.start}/${(candidate.start+candidate.duration)}/${bufferEnd}`);
|
||
// Set the lookup tolerance to be small enough to detect the current segment - ensures we don't skip over very small segments
|
||
var candidateLookupTolerance = Math.min(maxFragLookUpTolerance, candidate.duration + (candidate.deltaPTS ? candidate.deltaPTS : 0));
|
||
|
||
if (candidate.start + candidate.duration - candidateLookupTolerance <= bufferEnd) {
|
||
return 1;
|
||
} else if (candidate.start - candidateLookupTolerance > bufferEnd && candidate.start) {
|
||
// if maxFragLookUpTolerance will have negative value then don't return -1 for first element
|
||
return -1;
|
||
}
|
||
|
||
return 0;
|
||
}
|
||
/**
|
||
* The test function used by the findFragmentByPdt's BinarySearch to look for the best match to the current buffer conditions.
|
||
* This function tests the candidate's program date time values, as represented in Unix time
|
||
* @param {*} candidate - The fragment to test
|
||
* @param {number} [pdtBufferEnd = 0] - The Unix time representing the end of the current buffered range
|
||
* @param {number} [maxFragLookUpTolerance = 0] - The amount of time that a fragment's start can be within in order to be considered contiguous
|
||
* @returns {boolean} True if contiguous, false otherwise
|
||
*/
|
||
|
||
function pdtWithinToleranceTest(pdtBufferEnd, maxFragLookUpTolerance, candidate) {
|
||
var candidateLookupTolerance = Math.min(maxFragLookUpTolerance, candidate.duration + (candidate.deltaPTS ? candidate.deltaPTS : 0)) * 1000; // endProgramDateTime can be null, default to zero
|
||
|
||
var endProgramDateTime = candidate.endProgramDateTime || 0;
|
||
return endProgramDateTime - candidateLookupTolerance > pdtBufferEnd;
|
||
}
|
||
function findFragWithCC(fragments, cc) {
|
||
return _utils_binary_search__WEBPACK_IMPORTED_MODULE_1__["default"].search(fragments, function (candidate) {
|
||
if (candidate.cc < cc) {
|
||
return 1;
|
||
} else if (candidate.cc > cc) {
|
||
return -1;
|
||
} else {
|
||
return 0;
|
||
}
|
||
});
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/controller/fragment-tracker.ts":
|
||
/*!********************************************!*\
|
||
!*** ./src/controller/fragment-tracker.ts ***!
|
||
\********************************************/
|
||
/*! exports provided: FragmentState, FragmentTracker */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "FragmentState", function() { return FragmentState; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "FragmentTracker", function() { return FragmentTracker; });
|
||
/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../events */ "./src/events.ts");
|
||
/* harmony import */ var _types_loader__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../types/loader */ "./src/types/loader.ts");
|
||
|
||
|
||
var FragmentState;
|
||
|
||
(function (FragmentState) {
|
||
FragmentState["NOT_LOADED"] = "NOT_LOADED";
|
||
FragmentState["BACKTRACKED"] = "BACKTRACKED";
|
||
FragmentState["APPENDING"] = "APPENDING";
|
||
FragmentState["PARTIAL"] = "PARTIAL";
|
||
FragmentState["OK"] = "OK";
|
||
})(FragmentState || (FragmentState = {}));
|
||
|
||
var FragmentTracker = /*#__PURE__*/function () {
|
||
function FragmentTracker(hls) {
|
||
this.activeFragment = null;
|
||
this.activeParts = null;
|
||
this.fragments = Object.create(null);
|
||
this.timeRanges = Object.create(null);
|
||
this.bufferPadding = 0.2;
|
||
this.hls = void 0;
|
||
this.hls = hls;
|
||
|
||
this._registerListeners();
|
||
}
|
||
|
||
var _proto = FragmentTracker.prototype;
|
||
|
||
_proto._registerListeners = function _registerListeners() {
|
||
var hls = this.hls;
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].BUFFER_APPENDED, this.onBufferAppended, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].FRAG_BUFFERED, this.onFragBuffered, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].FRAG_LOADED, this.onFragLoaded, this);
|
||
};
|
||
|
||
_proto._unregisterListeners = function _unregisterListeners() {
|
||
var hls = this.hls;
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].BUFFER_APPENDED, this.onBufferAppended, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].FRAG_BUFFERED, this.onFragBuffered, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].FRAG_LOADED, this.onFragLoaded, this);
|
||
};
|
||
|
||
_proto.destroy = function destroy() {
|
||
this._unregisterListeners(); // @ts-ignore
|
||
|
||
|
||
this.fragments = this.timeRanges = null;
|
||
}
|
||
/**
|
||
* Return a Fragment with an appended range that matches the position and levelType.
|
||
* If not found any Fragment, return null
|
||
*/
|
||
;
|
||
|
||
_proto.getAppendedFrag = function getAppendedFrag(position, levelType) {
|
||
if (levelType === _types_loader__WEBPACK_IMPORTED_MODULE_1__["PlaylistLevelType"].MAIN) {
|
||
var activeFragment = this.activeFragment,
|
||
activeParts = this.activeParts;
|
||
|
||
if (!activeFragment) {
|
||
return null;
|
||
}
|
||
|
||
if (activeParts) {
|
||
for (var i = activeParts.length; i--;) {
|
||
var activePart = activeParts[i];
|
||
var appendedPTS = activePart ? activePart.end : activeFragment.appendedPTS;
|
||
|
||
if (activePart.start <= position && appendedPTS !== undefined && position <= appendedPTS) {
|
||
// 9 is a magic number. remove parts from lookup after a match but keep some short seeks back.
|
||
if (i > 9) {
|
||
this.activeParts = activeParts.slice(i - 9);
|
||
}
|
||
|
||
return activePart;
|
||
}
|
||
}
|
||
} else if (activeFragment.start <= position && activeFragment.appendedPTS !== undefined && position <= activeFragment.appendedPTS) {
|
||
return activeFragment;
|
||
}
|
||
}
|
||
|
||
return this.getBufferedFrag(position, levelType);
|
||
}
|
||
/**
|
||
* Return a buffered Fragment that matches the position and levelType.
|
||
* A buffered Fragment is one whose loading, parsing and appending is done (completed or "partial" meaning aborted).
|
||
* If not found any Fragment, return null
|
||
*/
|
||
;
|
||
|
||
_proto.getBufferedFrag = function getBufferedFrag(position, levelType) {
|
||
var fragments = this.fragments;
|
||
var keys = Object.keys(fragments);
|
||
|
||
for (var i = keys.length; i--;) {
|
||
var fragmentEntity = fragments[keys[i]];
|
||
|
||
if ((fragmentEntity === null || fragmentEntity === void 0 ? void 0 : fragmentEntity.body.type) === levelType && fragmentEntity.buffered) {
|
||
var frag = fragmentEntity.body;
|
||
|
||
if (frag.start <= position && position <= frag.end) {
|
||
return frag;
|
||
}
|
||
}
|
||
}
|
||
|
||
return null;
|
||
}
|
||
/**
|
||
* Partial fragments effected by coded frame eviction will be removed
|
||
* The browser will unload parts of the buffer to free up memory for new buffer data
|
||
* Fragments will need to be reloaded when the buffer is freed up, removing partial fragments will allow them to reload(since there might be parts that are still playable)
|
||
*/
|
||
;
|
||
|
||
_proto.detectEvictedFragments = function detectEvictedFragments(elementaryStream, timeRange, playlistType) {
|
||
var _this = this;
|
||
|
||
// Check if any flagged fragments have been unloaded
|
||
Object.keys(this.fragments).forEach(function (key) {
|
||
var fragmentEntity = _this.fragments[key];
|
||
|
||
if (!fragmentEntity) {
|
||
return;
|
||
}
|
||
|
||
if (!fragmentEntity.buffered) {
|
||
if (fragmentEntity.body.type === playlistType) {
|
||
_this.removeFragment(fragmentEntity.body);
|
||
}
|
||
|
||
return;
|
||
}
|
||
|
||
var esData = fragmentEntity.range[elementaryStream];
|
||
|
||
if (!esData) {
|
||
return;
|
||
}
|
||
|
||
esData.time.some(function (time) {
|
||
var isNotBuffered = !_this.isTimeBuffered(time.startPTS, time.endPTS, timeRange);
|
||
|
||
if (isNotBuffered) {
|
||
// Unregister partial fragment as it needs to load again to be reused
|
||
_this.removeFragment(fragmentEntity.body);
|
||
}
|
||
|
||
return isNotBuffered;
|
||
});
|
||
});
|
||
}
|
||
/**
|
||
* Checks if the fragment passed in is loaded in the buffer properly
|
||
* Partially loaded fragments will be registered as a partial fragment
|
||
*/
|
||
;
|
||
|
||
_proto.detectPartialFragments = function detectPartialFragments(data) {
|
||
var _this2 = this;
|
||
|
||
var timeRanges = this.timeRanges;
|
||
var frag = data.frag,
|
||
part = data.part;
|
||
|
||
if (!timeRanges || frag.sn === 'initSegment') {
|
||
return;
|
||
}
|
||
|
||
var fragKey = getFragmentKey(frag);
|
||
var fragmentEntity = this.fragments[fragKey];
|
||
|
||
if (!fragmentEntity) {
|
||
return;
|
||
}
|
||
|
||
Object.keys(timeRanges).forEach(function (elementaryStream) {
|
||
var streamInfo = frag.elementaryStreams[elementaryStream];
|
||
|
||
if (!streamInfo) {
|
||
return;
|
||
}
|
||
|
||
var timeRange = timeRanges[elementaryStream];
|
||
var partial = part !== null || streamInfo.partial === true;
|
||
fragmentEntity.range[elementaryStream] = _this2.getBufferedTimes(frag, part, partial, timeRange);
|
||
});
|
||
fragmentEntity.backtrack = fragmentEntity.loaded = null;
|
||
|
||
if (Object.keys(fragmentEntity.range).length) {
|
||
fragmentEntity.buffered = true;
|
||
} else {
|
||
// remove fragment if nothing was appended
|
||
this.removeFragment(fragmentEntity.body);
|
||
}
|
||
};
|
||
|
||
_proto.fragBuffered = function fragBuffered(frag) {
|
||
var fragKey = getFragmentKey(frag);
|
||
var fragmentEntity = this.fragments[fragKey];
|
||
|
||
if (fragmentEntity) {
|
||
fragmentEntity.backtrack = fragmentEntity.loaded = null;
|
||
fragmentEntity.buffered = true;
|
||
}
|
||
};
|
||
|
||
_proto.getBufferedTimes = function getBufferedTimes(fragment, part, partial, timeRange) {
|
||
var buffered = {
|
||
time: [],
|
||
partial: partial
|
||
};
|
||
var startPTS = part ? part.start : fragment.start;
|
||
var endPTS = part ? part.end : fragment.end;
|
||
var minEndPTS = fragment.minEndPTS || endPTS;
|
||
var maxStartPTS = fragment.maxStartPTS || startPTS;
|
||
|
||
for (var i = 0; i < timeRange.length; i++) {
|
||
var startTime = timeRange.start(i) - this.bufferPadding;
|
||
var endTime = timeRange.end(i) + this.bufferPadding;
|
||
|
||
if (maxStartPTS >= startTime && minEndPTS <= endTime) {
|
||
// Fragment is entirely contained in buffer
|
||
// No need to check the other timeRange times since it's completely playable
|
||
buffered.time.push({
|
||
startPTS: Math.max(startPTS, timeRange.start(i)),
|
||
endPTS: Math.min(endPTS, timeRange.end(i))
|
||
});
|
||
break;
|
||
} else if (startPTS < endTime && endPTS > startTime) {
|
||
buffered.partial = true; // Check for intersection with buffer
|
||
// Get playable sections of the fragment
|
||
|
||
buffered.time.push({
|
||
startPTS: Math.max(startPTS, timeRange.start(i)),
|
||
endPTS: Math.min(endPTS, timeRange.end(i))
|
||
});
|
||
} else if (endPTS <= startTime) {
|
||
// No need to check the rest of the timeRange as it is in order
|
||
break;
|
||
}
|
||
}
|
||
|
||
return buffered;
|
||
}
|
||
/**
|
||
* Gets the partial fragment for a certain time
|
||
*/
|
||
;
|
||
|
||
_proto.getPartialFragment = function getPartialFragment(time) {
|
||
var bestFragment = null;
|
||
var timePadding;
|
||
var startTime;
|
||
var endTime;
|
||
var bestOverlap = 0;
|
||
var bufferPadding = this.bufferPadding,
|
||
fragments = this.fragments;
|
||
Object.keys(fragments).forEach(function (key) {
|
||
var fragmentEntity = fragments[key];
|
||
|
||
if (!fragmentEntity) {
|
||
return;
|
||
}
|
||
|
||
if (isPartial(fragmentEntity)) {
|
||
startTime = fragmentEntity.body.start - bufferPadding;
|
||
endTime = fragmentEntity.body.end + bufferPadding;
|
||
|
||
if (time >= startTime && time <= endTime) {
|
||
// Use the fragment that has the most padding from start and end time
|
||
timePadding = Math.min(time - startTime, endTime - time);
|
||
|
||
if (bestOverlap <= timePadding) {
|
||
bestFragment = fragmentEntity.body;
|
||
bestOverlap = timePadding;
|
||
}
|
||
}
|
||
}
|
||
});
|
||
return bestFragment;
|
||
};
|
||
|
||
_proto.getState = function getState(fragment) {
|
||
var fragKey = getFragmentKey(fragment);
|
||
var fragmentEntity = this.fragments[fragKey];
|
||
|
||
if (fragmentEntity) {
|
||
if (!fragmentEntity.buffered) {
|
||
if (fragmentEntity.backtrack) {
|
||
return FragmentState.BACKTRACKED;
|
||
}
|
||
|
||
return FragmentState.APPENDING;
|
||
} else if (isPartial(fragmentEntity)) {
|
||
return FragmentState.PARTIAL;
|
||
} else {
|
||
return FragmentState.OK;
|
||
}
|
||
}
|
||
|
||
return FragmentState.NOT_LOADED;
|
||
};
|
||
|
||
_proto.backtrack = function backtrack(frag, data) {
|
||
var fragKey = getFragmentKey(frag);
|
||
var fragmentEntity = this.fragments[fragKey];
|
||
|
||
if (!fragmentEntity || fragmentEntity.backtrack) {
|
||
return null;
|
||
}
|
||
|
||
var backtrack = fragmentEntity.backtrack = data ? data : fragmentEntity.loaded;
|
||
fragmentEntity.loaded = null;
|
||
return backtrack;
|
||
};
|
||
|
||
_proto.getBacktrackData = function getBacktrackData(fragment) {
|
||
var fragKey = getFragmentKey(fragment);
|
||
var fragmentEntity = this.fragments[fragKey];
|
||
|
||
if (fragmentEntity) {
|
||
var _backtrack$payload;
|
||
|
||
var backtrack = fragmentEntity.backtrack; // If data was already sent to Worker it is detached no longer available
|
||
|
||
if (backtrack !== null && backtrack !== void 0 && (_backtrack$payload = backtrack.payload) !== null && _backtrack$payload !== void 0 && _backtrack$payload.byteLength) {
|
||
return backtrack;
|
||
} else {
|
||
this.removeFragment(fragment);
|
||
}
|
||
}
|
||
|
||
return null;
|
||
};
|
||
|
||
_proto.isTimeBuffered = function isTimeBuffered(startPTS, endPTS, timeRange) {
|
||
var startTime;
|
||
var endTime;
|
||
|
||
for (var i = 0; i < timeRange.length; i++) {
|
||
startTime = timeRange.start(i) - this.bufferPadding;
|
||
endTime = timeRange.end(i) + this.bufferPadding;
|
||
|
||
if (startPTS >= startTime && endPTS <= endTime) {
|
||
return true;
|
||
}
|
||
|
||
if (endPTS <= startTime) {
|
||
// No need to check the rest of the timeRange as it is in order
|
||
return false;
|
||
}
|
||
}
|
||
|
||
return false;
|
||
};
|
||
|
||
_proto.onFragLoaded = function onFragLoaded(event, data) {
|
||
var frag = data.frag,
|
||
part = data.part; // don't track initsegment (for which sn is not a number)
|
||
// don't track frags used for bitrateTest, they're irrelevant.
|
||
// don't track parts for memory efficiency
|
||
|
||
if (frag.sn === 'initSegment' || frag.bitrateTest || part) {
|
||
return;
|
||
}
|
||
|
||
var fragKey = getFragmentKey(frag);
|
||
this.fragments[fragKey] = {
|
||
body: frag,
|
||
loaded: data,
|
||
backtrack: null,
|
||
buffered: false,
|
||
range: Object.create(null)
|
||
};
|
||
};
|
||
|
||
_proto.onBufferAppended = function onBufferAppended(event, data) {
|
||
var _this3 = this;
|
||
|
||
var frag = data.frag,
|
||
part = data.part,
|
||
timeRanges = data.timeRanges;
|
||
|
||
if (frag.type === _types_loader__WEBPACK_IMPORTED_MODULE_1__["PlaylistLevelType"].MAIN) {
|
||
this.activeFragment = frag;
|
||
|
||
if (part) {
|
||
var activeParts = this.activeParts;
|
||
|
||
if (!activeParts) {
|
||
this.activeParts = activeParts = [];
|
||
}
|
||
|
||
activeParts.push(part);
|
||
} else {
|
||
this.activeParts = null;
|
||
}
|
||
} // Store the latest timeRanges loaded in the buffer
|
||
|
||
|
||
this.timeRanges = timeRanges;
|
||
Object.keys(timeRanges).forEach(function (elementaryStream) {
|
||
var timeRange = timeRanges[elementaryStream];
|
||
|
||
_this3.detectEvictedFragments(elementaryStream, timeRange);
|
||
|
||
if (!part) {
|
||
for (var i = 0; i < timeRange.length; i++) {
|
||
frag.appendedPTS = Math.max(timeRange.end(i), frag.appendedPTS || 0);
|
||
}
|
||
}
|
||
});
|
||
};
|
||
|
||
_proto.onFragBuffered = function onFragBuffered(event, data) {
|
||
this.detectPartialFragments(data);
|
||
};
|
||
|
||
_proto.hasFragment = function hasFragment(fragment) {
|
||
var fragKey = getFragmentKey(fragment);
|
||
return !!this.fragments[fragKey];
|
||
};
|
||
|
||
_proto.removeFragmentsInRange = function removeFragmentsInRange(start, end, playlistType) {
|
||
var _this4 = this;
|
||
|
||
Object.keys(this.fragments).forEach(function (key) {
|
||
var fragmentEntity = _this4.fragments[key];
|
||
|
||
if (!fragmentEntity) {
|
||
return;
|
||
}
|
||
|
||
if (fragmentEntity.buffered) {
|
||
var frag = fragmentEntity.body;
|
||
|
||
if (frag.type === playlistType && frag.start < end && frag.end > start) {
|
||
_this4.removeFragment(frag);
|
||
}
|
||
}
|
||
});
|
||
};
|
||
|
||
_proto.removeFragment = function removeFragment(fragment) {
|
||
var fragKey = getFragmentKey(fragment);
|
||
fragment.stats.loaded = 0;
|
||
fragment.clearElementaryStreamInfo();
|
||
delete this.fragments[fragKey];
|
||
};
|
||
|
||
_proto.removeAllFragments = function removeAllFragments() {
|
||
this.fragments = Object.create(null);
|
||
this.activeFragment = null;
|
||
this.activeParts = null;
|
||
};
|
||
|
||
return FragmentTracker;
|
||
}();
|
||
|
||
function isPartial(fragmentEntity) {
|
||
var _fragmentEntity$range, _fragmentEntity$range2;
|
||
|
||
return fragmentEntity.buffered && (((_fragmentEntity$range = fragmentEntity.range.video) === null || _fragmentEntity$range === void 0 ? void 0 : _fragmentEntity$range.partial) || ((_fragmentEntity$range2 = fragmentEntity.range.audio) === null || _fragmentEntity$range2 === void 0 ? void 0 : _fragmentEntity$range2.partial));
|
||
}
|
||
|
||
function getFragmentKey(fragment) {
|
||
return fragment.type + "_" + fragment.level + "_" + fragment.urlId + "_" + fragment.sn;
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/controller/gap-controller.ts":
|
||
/*!******************************************!*\
|
||
!*** ./src/controller/gap-controller.ts ***!
|
||
\******************************************/
|
||
/*! exports provided: STALL_MINIMUM_DURATION_MS, MAX_START_GAP_JUMP, SKIP_BUFFER_HOLE_STEP_SECONDS, SKIP_BUFFER_RANGE_START, default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "STALL_MINIMUM_DURATION_MS", function() { return STALL_MINIMUM_DURATION_MS; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "MAX_START_GAP_JUMP", function() { return MAX_START_GAP_JUMP; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "SKIP_BUFFER_HOLE_STEP_SECONDS", function() { return SKIP_BUFFER_HOLE_STEP_SECONDS; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "SKIP_BUFFER_RANGE_START", function() { return SKIP_BUFFER_RANGE_START; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return GapController; });
|
||
/* harmony import */ var _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../utils/buffer-helper */ "./src/utils/buffer-helper.ts");
|
||
/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
|
||
/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../events */ "./src/events.ts");
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
|
||
|
||
|
||
|
||
var STALL_MINIMUM_DURATION_MS = 250;
|
||
var MAX_START_GAP_JUMP = 2.0;
|
||
var SKIP_BUFFER_HOLE_STEP_SECONDS = 0.1;
|
||
var SKIP_BUFFER_RANGE_START = 0.05;
|
||
|
||
var GapController = /*#__PURE__*/function () {
|
||
function GapController(config, media, fragmentTracker, hls) {
|
||
this.config = void 0;
|
||
this.media = void 0;
|
||
this.fragmentTracker = void 0;
|
||
this.hls = void 0;
|
||
this.nudgeRetry = 0;
|
||
this.stallReported = false;
|
||
this.stalled = null;
|
||
this.moved = false;
|
||
this.seeking = false;
|
||
this.config = config;
|
||
this.media = media;
|
||
this.fragmentTracker = fragmentTracker;
|
||
this.hls = hls;
|
||
}
|
||
|
||
var _proto = GapController.prototype;
|
||
|
||
_proto.destroy = function destroy() {
|
||
// @ts-ignore
|
||
this.hls = this.fragmentTracker = this.media = null;
|
||
}
|
||
/**
|
||
* Checks if the playhead is stuck within a gap, and if so, attempts to free it.
|
||
* A gap is an unbuffered range between two buffered ranges (or the start and the first buffered range).
|
||
*
|
||
* @param {number} lastCurrentTime Previously read playhead position
|
||
*/
|
||
;
|
||
|
||
_proto.poll = function poll(lastCurrentTime) {
|
||
var config = this.config,
|
||
media = this.media,
|
||
stalled = this.stalled;
|
||
var currentTime = media.currentTime,
|
||
seeking = media.seeking;
|
||
var seeked = this.seeking && !seeking;
|
||
var beginSeek = !this.seeking && seeking;
|
||
this.seeking = seeking; // The playhead is moving, no-op
|
||
|
||
if (currentTime !== lastCurrentTime) {
|
||
this.moved = true;
|
||
|
||
if (stalled !== null) {
|
||
// The playhead is now moving, but was previously stalled
|
||
if (this.stallReported) {
|
||
var _stalledDuration = self.performance.now() - stalled;
|
||
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].warn("playback not stuck anymore @" + currentTime + ", after " + Math.round(_stalledDuration) + "ms");
|
||
this.stallReported = false;
|
||
}
|
||
|
||
this.stalled = null;
|
||
this.nudgeRetry = 0;
|
||
}
|
||
|
||
return;
|
||
} // Clear stalled state when beginning or finishing seeking so that we don't report stalls coming out of a seek
|
||
|
||
|
||
if (beginSeek || seeked) {
|
||
this.stalled = null;
|
||
} // The playhead should not be moving
|
||
|
||
|
||
if (media.paused || media.ended || media.playbackRate === 0 || !_utils_buffer_helper__WEBPACK_IMPORTED_MODULE_0__["BufferHelper"].getBuffered(media).length) {
|
||
return;
|
||
}
|
||
|
||
var bufferInfo = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_0__["BufferHelper"].bufferInfo(media, currentTime, 0);
|
||
var isBuffered = bufferInfo.len > 0;
|
||
var nextStart = bufferInfo.nextStart || 0; // There is no playable buffer (seeked, waiting for buffer)
|
||
|
||
if (!isBuffered && !nextStart) {
|
||
return;
|
||
}
|
||
|
||
if (seeking) {
|
||
// Waiting for seeking in a buffered range to complete
|
||
var hasEnoughBuffer = bufferInfo.len > MAX_START_GAP_JUMP; // Next buffered range is too far ahead to jump to while still seeking
|
||
|
||
var noBufferGap = !nextStart || nextStart - currentTime > MAX_START_GAP_JUMP && !this.fragmentTracker.getPartialFragment(currentTime);
|
||
|
||
if (hasEnoughBuffer || noBufferGap) {
|
||
return;
|
||
} // Reset moved state when seeking to a point in or before a gap
|
||
|
||
|
||
this.moved = false;
|
||
} // Skip start gaps if we haven't played, but the last poll detected the start of a stall
|
||
// The addition poll gives the browser a chance to jump the gap for us
|
||
|
||
|
||
if (!this.moved && this.stalled !== null) {
|
||
var _level$details;
|
||
|
||
// Jump start gaps within jump threshold
|
||
var startJump = Math.max(nextStart, bufferInfo.start || 0) - currentTime; // When joining a live stream with audio tracks, account for live playlist window sliding by allowing
|
||
// a larger jump over start gaps caused by the audio-stream-controller buffering a start fragment
|
||
// that begins over 1 target duration after the video start position.
|
||
|
||
var level = this.hls.levels ? this.hls.levels[this.hls.currentLevel] : null;
|
||
var isLive = level === null || level === void 0 ? void 0 : (_level$details = level.details) === null || _level$details === void 0 ? void 0 : _level$details.live;
|
||
var maxStartGapJump = isLive ? level.details.targetduration * 2 : MAX_START_GAP_JUMP;
|
||
|
||
if (startJump > 0 && startJump <= maxStartGapJump) {
|
||
this._trySkipBufferHole(null);
|
||
|
||
return;
|
||
}
|
||
} // Start tracking stall time
|
||
|
||
|
||
var tnow = self.performance.now();
|
||
|
||
if (stalled === null) {
|
||
this.stalled = tnow;
|
||
return;
|
||
}
|
||
|
||
var stalledDuration = tnow - stalled;
|
||
|
||
if (!seeking && stalledDuration >= STALL_MINIMUM_DURATION_MS) {
|
||
// Report stalling after trying to fix
|
||
this._reportStall(bufferInfo.len);
|
||
}
|
||
|
||
var bufferedWithHoles = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_0__["BufferHelper"].bufferInfo(media, currentTime, config.maxBufferHole);
|
||
|
||
this._tryFixBufferStall(bufferedWithHoles, stalledDuration);
|
||
}
|
||
/**
|
||
* Detects and attempts to fix known buffer stalling issues.
|
||
* @param bufferInfo - The properties of the current buffer.
|
||
* @param stalledDurationMs - The amount of time Hls.js has been stalling for.
|
||
* @private
|
||
*/
|
||
;
|
||
|
||
_proto._tryFixBufferStall = function _tryFixBufferStall(bufferInfo, stalledDurationMs) {
|
||
var config = this.config,
|
||
fragmentTracker = this.fragmentTracker,
|
||
media = this.media;
|
||
var currentTime = media.currentTime;
|
||
var partial = fragmentTracker.getPartialFragment(currentTime);
|
||
|
||
if (partial) {
|
||
// Try to skip over the buffer hole caused by a partial fragment
|
||
// This method isn't limited by the size of the gap between buffered ranges
|
||
var targetTime = this._trySkipBufferHole(partial); // we return here in this case, meaning
|
||
// the branch below only executes when we don't handle a partial fragment
|
||
|
||
|
||
if (targetTime) {
|
||
return;
|
||
}
|
||
} // if we haven't had to skip over a buffer hole of a partial fragment
|
||
// we may just have to "nudge" the playlist as the browser decoding/rendering engine
|
||
// needs to cross some sort of threshold covering all source-buffers content
|
||
// to start playing properly.
|
||
|
||
|
||
if (bufferInfo.len > config.maxBufferHole && stalledDurationMs > config.highBufferWatchdogPeriod * 1000) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].warn('Trying to nudge playhead over buffer-hole'); // Try to nudge currentTime over a buffer hole if we've been stalling for the configured amount of seconds
|
||
// We only try to jump the hole if it's under the configured size
|
||
// Reset stalled so to rearm watchdog timer
|
||
|
||
this.stalled = null;
|
||
|
||
this._tryNudgeBuffer();
|
||
}
|
||
}
|
||
/**
|
||
* Triggers a BUFFER_STALLED_ERROR event, but only once per stall period.
|
||
* @param bufferLen - The playhead distance from the end of the current buffer segment.
|
||
* @private
|
||
*/
|
||
;
|
||
|
||
_proto._reportStall = function _reportStall(bufferLen) {
|
||
var hls = this.hls,
|
||
media = this.media,
|
||
stallReported = this.stallReported;
|
||
|
||
if (!stallReported) {
|
||
// Report stalled error once
|
||
this.stallReported = true;
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].warn("Playback stalling at @" + media.currentTime + " due to low buffer (buffer=" + bufferLen + ")");
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].MEDIA_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].BUFFER_STALLED_ERROR,
|
||
fatal: false,
|
||
buffer: bufferLen
|
||
});
|
||
}
|
||
}
|
||
/**
|
||
* Attempts to fix buffer stalls by jumping over known gaps caused by partial fragments
|
||
* @param partial - The partial fragment found at the current time (where playback is stalling).
|
||
* @private
|
||
*/
|
||
;
|
||
|
||
_proto._trySkipBufferHole = function _trySkipBufferHole(partial) {
|
||
var config = this.config,
|
||
hls = this.hls,
|
||
media = this.media;
|
||
var currentTime = media.currentTime;
|
||
var lastEndTime = 0; // Check if currentTime is between unbuffered regions of partial fragments
|
||
|
||
var buffered = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_0__["BufferHelper"].getBuffered(media);
|
||
|
||
for (var i = 0; i < buffered.length; i++) {
|
||
var startTime = buffered.start(i);
|
||
|
||
if (currentTime + config.maxBufferHole >= lastEndTime && currentTime < startTime) {
|
||
var targetTime = Math.max(startTime + SKIP_BUFFER_RANGE_START, media.currentTime + SKIP_BUFFER_HOLE_STEP_SECONDS);
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].warn("skipping hole, adjusting currentTime from " + currentTime + " to " + targetTime);
|
||
this.moved = true;
|
||
this.stalled = null;
|
||
media.currentTime = targetTime;
|
||
|
||
if (partial) {
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].MEDIA_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].BUFFER_SEEK_OVER_HOLE,
|
||
fatal: false,
|
||
reason: "fragment loaded with buffer holes, seeking from " + currentTime + " to " + targetTime,
|
||
frag: partial
|
||
});
|
||
}
|
||
|
||
return targetTime;
|
||
}
|
||
|
||
lastEndTime = buffered.end(i);
|
||
}
|
||
|
||
return 0;
|
||
}
|
||
/**
|
||
* Attempts to fix buffer stalls by advancing the mediaElement's current time by a small amount.
|
||
* @private
|
||
*/
|
||
;
|
||
|
||
_proto._tryNudgeBuffer = function _tryNudgeBuffer() {
|
||
var config = this.config,
|
||
hls = this.hls,
|
||
media = this.media,
|
||
nudgeRetry = this.nudgeRetry;
|
||
var currentTime = media.currentTime;
|
||
this.nudgeRetry++;
|
||
|
||
if (nudgeRetry < config.nudgeMaxRetry) {
|
||
var targetTime = currentTime + (nudgeRetry + 1) * config.nudgeOffset; // playback stalled in buffered area ... let's nudge currentTime to try to overcome this
|
||
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].warn("Nudging 'currentTime' from " + currentTime + " to " + targetTime);
|
||
media.currentTime = targetTime;
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].MEDIA_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].BUFFER_NUDGE_ON_STALL,
|
||
fatal: false
|
||
});
|
||
} else {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].error("Playhead still not moving while enough data buffered @" + currentTime + " after " + config.nudgeMaxRetry + " nudges");
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].MEDIA_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].BUFFER_STALLED_ERROR,
|
||
fatal: true
|
||
});
|
||
}
|
||
};
|
||
|
||
return GapController;
|
||
}();
|
||
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/controller/id3-track-controller.ts":
|
||
/*!************************************************!*\
|
||
!*** ./src/controller/id3-track-controller.ts ***!
|
||
\************************************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../events */ "./src/events.ts");
|
||
/* harmony import */ var _utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../utils/texttrack-utils */ "./src/utils/texttrack-utils.ts");
|
||
/* harmony import */ var _demux_id3__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../demux/id3 */ "./src/demux/id3.ts");
|
||
|
||
|
||
|
||
var MIN_CUE_DURATION = 0.25;
|
||
|
||
var ID3TrackController = /*#__PURE__*/function () {
|
||
function ID3TrackController(hls) {
|
||
this.hls = void 0;
|
||
this.id3Track = null;
|
||
this.media = null;
|
||
this.hls = hls;
|
||
|
||
this._registerListeners();
|
||
}
|
||
|
||
var _proto = ID3TrackController.prototype;
|
||
|
||
_proto.destroy = function destroy() {
|
||
this._unregisterListeners();
|
||
};
|
||
|
||
_proto._registerListeners = function _registerListeners() {
|
||
var hls = this.hls;
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].FRAG_PARSING_METADATA, this.onFragParsingMetadata, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].BUFFER_FLUSHING, this.onBufferFlushing, this);
|
||
};
|
||
|
||
_proto._unregisterListeners = function _unregisterListeners() {
|
||
var hls = this.hls;
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].FRAG_PARSING_METADATA, this.onFragParsingMetadata, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].BUFFER_FLUSHING, this.onBufferFlushing, this);
|
||
} // Add ID3 metatadata text track.
|
||
;
|
||
|
||
_proto.onMediaAttached = function onMediaAttached(event, data) {
|
||
this.media = data.media;
|
||
};
|
||
|
||
_proto.onMediaDetaching = function onMediaDetaching() {
|
||
if (!this.id3Track) {
|
||
return;
|
||
}
|
||
|
||
Object(_utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_1__["clearCurrentCues"])(this.id3Track);
|
||
this.id3Track = null;
|
||
this.media = null;
|
||
};
|
||
|
||
_proto.getID3Track = function getID3Track(textTracks) {
|
||
if (!this.media) {
|
||
return;
|
||
}
|
||
|
||
for (var i = 0; i < textTracks.length; i++) {
|
||
var textTrack = textTracks[i];
|
||
|
||
if (textTrack.kind === 'metadata' && textTrack.label === 'id3') {
|
||
// send 'addtrack' when reusing the textTrack for metadata,
|
||
// same as what we do for captions
|
||
Object(_utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_1__["sendAddTrackEvent"])(textTrack, this.media);
|
||
return textTrack;
|
||
}
|
||
}
|
||
|
||
return this.media.addTextTrack('metadata', 'id3');
|
||
};
|
||
|
||
_proto.onFragParsingMetadata = function onFragParsingMetadata(event, data) {
|
||
if (!this.media) {
|
||
return;
|
||
}
|
||
|
||
var fragment = data.frag;
|
||
var samples = data.samples; // create track dynamically
|
||
|
||
if (!this.id3Track) {
|
||
this.id3Track = this.getID3Track(this.media.textTracks);
|
||
this.id3Track.mode = 'hidden';
|
||
} // Attempt to recreate Safari functionality by creating
|
||
// WebKitDataCue objects when available and store the decoded
|
||
// ID3 data in the value property of the cue
|
||
|
||
|
||
var Cue = self.WebKitDataCue || self.VTTCue || self.TextTrackCue;
|
||
|
||
for (var i = 0; i < samples.length; i++) {
|
||
var frames = _demux_id3__WEBPACK_IMPORTED_MODULE_2__["getID3Frames"](samples[i].data);
|
||
|
||
if (frames) {
|
||
var startTime = samples[i].pts;
|
||
var endTime = i < samples.length - 1 ? samples[i + 1].pts : fragment.end;
|
||
var timeDiff = endTime - startTime;
|
||
|
||
if (timeDiff <= 0) {
|
||
endTime = startTime + MIN_CUE_DURATION;
|
||
}
|
||
|
||
for (var j = 0; j < frames.length; j++) {
|
||
var frame = frames[j]; // Safari doesn't put the timestamp frame in the TextTrack
|
||
|
||
if (!_demux_id3__WEBPACK_IMPORTED_MODULE_2__["isTimeStampFrame"](frame)) {
|
||
var cue = new Cue(startTime, endTime, '');
|
||
cue.value = frame;
|
||
this.id3Track.addCue(cue);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto.onBufferFlushing = function onBufferFlushing(event, _ref) {
|
||
var startOffset = _ref.startOffset,
|
||
endOffset = _ref.endOffset,
|
||
type = _ref.type;
|
||
|
||
if (!type || type === 'audio') {
|
||
// id3 cues come from parsed audio only remove cues when audio buffer is cleared
|
||
var id3Track = this.id3Track;
|
||
|
||
if (id3Track) {
|
||
Object(_utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_1__["removeCuesInRange"])(id3Track, startOffset, endOffset);
|
||
}
|
||
}
|
||
};
|
||
|
||
return ID3TrackController;
|
||
}();
|
||
|
||
/* harmony default export */ __webpack_exports__["default"] = (ID3TrackController);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/controller/latency-controller.ts":
|
||
/*!**********************************************!*\
|
||
!*** ./src/controller/latency-controller.ts ***!
|
||
\**********************************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return LatencyController; });
|
||
/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
|
||
/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../events */ "./src/events.ts");
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
|
||
|
||
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
|
||
|
||
|
||
|
||
|
||
|
||
var LatencyController = /*#__PURE__*/function () {
|
||
function LatencyController(hls) {
|
||
var _this = this;
|
||
|
||
this.hls = void 0;
|
||
this.config = void 0;
|
||
this.media = null;
|
||
this.levelDetails = null;
|
||
this.currentTime = 0;
|
||
this.stallCount = 0;
|
||
this._latency = null;
|
||
|
||
this.timeupdateHandler = function () {
|
||
return _this.timeupdate();
|
||
};
|
||
|
||
this.hls = hls;
|
||
this.config = hls.config;
|
||
this.registerListeners();
|
||
}
|
||
|
||
var _proto = LatencyController.prototype;
|
||
|
||
_proto.destroy = function destroy() {
|
||
this.unregisterListeners();
|
||
this.onMediaDetaching();
|
||
this.levelDetails = null; // @ts-ignore
|
||
|
||
this.hls = this.timeupdateHandler = null;
|
||
};
|
||
|
||
_proto.registerListeners = function registerListeners() {
|
||
this.hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
|
||
this.hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
|
||
this.hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
|
||
this.hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].LEVEL_UPDATED, this.onLevelUpdated, this);
|
||
this.hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, this.onError, this);
|
||
};
|
||
|
||
_proto.unregisterListeners = function unregisterListeners() {
|
||
this.hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MEDIA_ATTACHED, this.onMediaAttached);
|
||
this.hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MEDIA_DETACHING, this.onMediaDetaching);
|
||
this.hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_LOADING, this.onManifestLoading);
|
||
this.hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].LEVEL_UPDATED, this.onLevelUpdated);
|
||
this.hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, this.onError);
|
||
};
|
||
|
||
_proto.onMediaAttached = function onMediaAttached(event, data) {
|
||
this.media = data.media;
|
||
this.media.addEventListener('timeupdate', this.timeupdateHandler);
|
||
};
|
||
|
||
_proto.onMediaDetaching = function onMediaDetaching() {
|
||
if (this.media) {
|
||
this.media.removeEventListener('timeupdate', this.timeupdateHandler);
|
||
this.media = null;
|
||
}
|
||
};
|
||
|
||
_proto.onManifestLoading = function onManifestLoading() {
|
||
this.levelDetails = null;
|
||
this._latency = null;
|
||
this.stallCount = 0;
|
||
};
|
||
|
||
_proto.onLevelUpdated = function onLevelUpdated(event, _ref) {
|
||
var details = _ref.details;
|
||
this.levelDetails = details;
|
||
|
||
if (details.advanced) {
|
||
this.timeupdate();
|
||
}
|
||
|
||
if (!details.live && this.media) {
|
||
this.media.removeEventListener('timeupdate', this.timeupdateHandler);
|
||
}
|
||
};
|
||
|
||
_proto.onError = function onError(event, data) {
|
||
if (data.details !== _errors__WEBPACK_IMPORTED_MODULE_0__["ErrorDetails"].BUFFER_STALLED_ERROR) {
|
||
return;
|
||
}
|
||
|
||
this.stallCount++;
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn('[playback-rate-controller]: Stall detected, adjusting target latency');
|
||
};
|
||
|
||
_proto.timeupdate = function timeupdate() {
|
||
var media = this.media,
|
||
levelDetails = this.levelDetails;
|
||
|
||
if (!media || !levelDetails) {
|
||
return;
|
||
}
|
||
|
||
this.currentTime = media.currentTime;
|
||
var latency = this.computeLatency();
|
||
|
||
if (latency === null) {
|
||
return;
|
||
}
|
||
|
||
this._latency = latency; // Adapt playbackRate to meet target latency in low-latency mode
|
||
|
||
var _this$config = this.config,
|
||
lowLatencyMode = _this$config.lowLatencyMode,
|
||
maxLiveSyncPlaybackRate = _this$config.maxLiveSyncPlaybackRate;
|
||
|
||
if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1) {
|
||
return;
|
||
}
|
||
|
||
var targetLatency = this.targetLatency;
|
||
|
||
if (targetLatency === null) {
|
||
return;
|
||
}
|
||
|
||
var distanceFromTarget = latency - targetLatency; // Only adjust playbackRate when within one target duration of targetLatency
|
||
// and more than one second from under-buffering.
|
||
// Playback further than one target duration from target can be considered DVR playback.
|
||
|
||
var liveMinLatencyDuration = Math.min(this.maxLatency, targetLatency + levelDetails.targetduration);
|
||
var inLiveRange = distanceFromTarget < liveMinLatencyDuration;
|
||
|
||
if (levelDetails.live && inLiveRange && distanceFromTarget > 0.05 && this.forwardBufferLength > 1) {
|
||
var max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
|
||
var rate = Math.round(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled)) * 20) / 20;
|
||
media.playbackRate = Math.min(max, Math.max(1, rate));
|
||
} else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
|
||
media.playbackRate = 1;
|
||
}
|
||
};
|
||
|
||
_proto.estimateLiveEdge = function estimateLiveEdge() {
|
||
var levelDetails = this.levelDetails;
|
||
|
||
if (levelDetails === null) {
|
||
return null;
|
||
}
|
||
|
||
return levelDetails.edge + levelDetails.age;
|
||
};
|
||
|
||
_proto.computeLatency = function computeLatency() {
|
||
var liveEdge = this.estimateLiveEdge();
|
||
|
||
if (liveEdge === null) {
|
||
return null;
|
||
}
|
||
|
||
return liveEdge - this.currentTime;
|
||
};
|
||
|
||
_createClass(LatencyController, [{
|
||
key: "latency",
|
||
get: function get() {
|
||
return this._latency || 0;
|
||
}
|
||
}, {
|
||
key: "maxLatency",
|
||
get: function get() {
|
||
var config = this.config,
|
||
levelDetails = this.levelDetails;
|
||
|
||
if (config.liveMaxLatencyDuration !== undefined) {
|
||
return config.liveMaxLatencyDuration;
|
||
}
|
||
|
||
return levelDetails ? config.liveMaxLatencyDurationCount * levelDetails.targetduration : 0;
|
||
}
|
||
}, {
|
||
key: "targetLatency",
|
||
get: function get() {
|
||
var levelDetails = this.levelDetails;
|
||
|
||
if (levelDetails === null) {
|
||
return null;
|
||
}
|
||
|
||
var holdBack = levelDetails.holdBack,
|
||
partHoldBack = levelDetails.partHoldBack,
|
||
targetduration = levelDetails.targetduration;
|
||
var _this$config2 = this.config,
|
||
liveSyncDuration = _this$config2.liveSyncDuration,
|
||
liveSyncDurationCount = _this$config2.liveSyncDurationCount,
|
||
lowLatencyMode = _this$config2.lowLatencyMode;
|
||
var userConfig = this.hls.userConfig;
|
||
var targetLatency = lowLatencyMode ? partHoldBack || holdBack : holdBack;
|
||
|
||
if (userConfig.liveSyncDuration || userConfig.liveSyncDurationCount || targetLatency === 0) {
|
||
targetLatency = liveSyncDuration !== undefined ? liveSyncDuration : liveSyncDurationCount * targetduration;
|
||
}
|
||
|
||
var maxLiveSyncOnStallIncrease = targetduration;
|
||
var liveSyncOnStallIncrease = 1.0;
|
||
return targetLatency + Math.min(this.stallCount * liveSyncOnStallIncrease, maxLiveSyncOnStallIncrease);
|
||
}
|
||
}, {
|
||
key: "liveSyncPosition",
|
||
get: function get() {
|
||
var liveEdge = this.estimateLiveEdge();
|
||
var targetLatency = this.targetLatency;
|
||
var levelDetails = this.levelDetails;
|
||
|
||
if (liveEdge === null || targetLatency === null || levelDetails === null) {
|
||
return null;
|
||
}
|
||
|
||
var edge = levelDetails.edge;
|
||
var syncPosition = liveEdge - targetLatency - this.edgeStalled;
|
||
var min = edge - levelDetails.totalduration;
|
||
var max = edge - (this.config.lowLatencyMode && levelDetails.partTarget || levelDetails.targetduration);
|
||
return Math.min(Math.max(min, syncPosition), max);
|
||
}
|
||
}, {
|
||
key: "drift",
|
||
get: function get() {
|
||
var levelDetails = this.levelDetails;
|
||
|
||
if (levelDetails === null) {
|
||
return 1;
|
||
}
|
||
|
||
return levelDetails.drift;
|
||
}
|
||
}, {
|
||
key: "edgeStalled",
|
||
get: function get() {
|
||
var levelDetails = this.levelDetails;
|
||
|
||
if (levelDetails === null) {
|
||
return 0;
|
||
}
|
||
|
||
var maxLevelUpdateAge = (this.config.lowLatencyMode && levelDetails.partTarget || levelDetails.targetduration) * 3;
|
||
return Math.max(levelDetails.age - maxLevelUpdateAge, 0);
|
||
}
|
||
}, {
|
||
key: "forwardBufferLength",
|
||
get: function get() {
|
||
var media = this.media,
|
||
levelDetails = this.levelDetails;
|
||
|
||
if (!media || !levelDetails) {
|
||
return 0;
|
||
}
|
||
|
||
var bufferedRanges = media.buffered.length;
|
||
return (bufferedRanges ? media.buffered.end(bufferedRanges - 1) : levelDetails.edge) - this.currentTime;
|
||
}
|
||
}]);
|
||
|
||
return LatencyController;
|
||
}();
|
||
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/controller/level-controller.ts":
|
||
/*!********************************************!*\
|
||
!*** ./src/controller/level-controller.ts ***!
|
||
\********************************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return LevelController; });
|
||
/* harmony import */ var _types_level__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../types/level */ "./src/types/level.ts");
|
||
/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../events */ "./src/events.ts");
|
||
/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
|
||
/* harmony import */ var _utils_codecs__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/codecs */ "./src/utils/codecs.ts");
|
||
/* harmony import */ var _level_helper__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./level-helper */ "./src/controller/level-helper.ts");
|
||
/* harmony import */ var _base_playlist_controller__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./base-playlist-controller */ "./src/controller/base-playlist-controller.ts");
|
||
/* harmony import */ var _types_loader__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../types/loader */ "./src/types/loader.ts");
|
||
function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
|
||
|
||
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
|
||
|
||
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
|
||
|
||
function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
|
||
|
||
function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
|
||
|
||
/*
|
||
* Level Controller
|
||
*/
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
var chromeOrFirefox = /chrome|firefox/.test(navigator.userAgent.toLowerCase());
|
||
|
||
var LevelController = /*#__PURE__*/function (_BasePlaylistControll) {
|
||
_inheritsLoose(LevelController, _BasePlaylistControll);
|
||
|
||
function LevelController(hls) {
|
||
var _this;
|
||
|
||
_this = _BasePlaylistControll.call(this, hls, '[level-controller]') || this;
|
||
_this._levels = [];
|
||
_this._firstLevel = -1;
|
||
_this._startLevel = void 0;
|
||
_this.currentLevelIndex = -1;
|
||
_this.manualLevelIndex = -1;
|
||
_this.onParsedComplete = void 0;
|
||
|
||
_this._registerListeners();
|
||
|
||
return _this;
|
||
}
|
||
|
||
var _proto = LevelController.prototype;
|
||
|
||
_proto._registerListeners = function _registerListeners() {
|
||
var hls = this.hls;
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_LOADED, this.onManifestLoaded, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].LEVEL_LOADED, this.onLevelLoaded, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_LOADED, this.onFragLoaded, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, this.onError, this);
|
||
};
|
||
|
||
_proto._unregisterListeners = function _unregisterListeners() {
|
||
var hls = this.hls;
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_LOADED, this.onManifestLoaded, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].LEVEL_LOADED, this.onLevelLoaded, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_LOADED, this.onFragLoaded, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, this.onError, this);
|
||
};
|
||
|
||
_proto.destroy = function destroy() {
|
||
this._unregisterListeners();
|
||
|
||
this.manualLevelIndex = -1;
|
||
this._levels.length = 0;
|
||
|
||
_BasePlaylistControll.prototype.destroy.call(this);
|
||
};
|
||
|
||
_proto.startLoad = function startLoad() {
|
||
var levels = this._levels; // clean up live level details to force reload them, and reset load errors
|
||
|
||
levels.forEach(function (level) {
|
||
level.loadError = 0;
|
||
});
|
||
|
||
_BasePlaylistControll.prototype.startLoad.call(this);
|
||
};
|
||
|
||
_proto.onManifestLoaded = function onManifestLoaded(event, data) {
|
||
var levels = [];
|
||
var audioTracks = [];
|
||
var subtitleTracks = [];
|
||
var bitrateStart;
|
||
var levelSet = {};
|
||
var levelFromSet;
|
||
var resolutionFound = false;
|
||
var videoCodecFound = false;
|
||
var audioCodecFound = false; // regroup redundant levels together
|
||
|
||
data.levels.forEach(function (levelParsed) {
|
||
var attributes = levelParsed.attrs;
|
||
resolutionFound = resolutionFound || !!(levelParsed.width && levelParsed.height);
|
||
videoCodecFound = videoCodecFound || !!levelParsed.videoCodec;
|
||
audioCodecFound = audioCodecFound || !!levelParsed.audioCodec; // erase audio codec info if browser does not support mp4a.40.34.
|
||
// demuxer will autodetect codec and fallback to mpeg/audio
|
||
|
||
if (chromeOrFirefox && levelParsed.audioCodec && levelParsed.audioCodec.indexOf('mp4a.40.34') !== -1) {
|
||
levelParsed.audioCodec = undefined;
|
||
}
|
||
|
||
var levelKey = levelParsed.bitrate + "-" + levelParsed.attrs.RESOLUTION + "-" + levelParsed.attrs.CODECS;
|
||
levelFromSet = levelSet[levelKey];
|
||
|
||
if (!levelFromSet) {
|
||
levelFromSet = new _types_level__WEBPACK_IMPORTED_MODULE_0__["Level"](levelParsed);
|
||
levelSet[levelKey] = levelFromSet;
|
||
levels.push(levelFromSet);
|
||
} else {
|
||
levelFromSet.url.push(levelParsed.url);
|
||
}
|
||
|
||
if (attributes) {
|
||
if (attributes.AUDIO) {
|
||
Object(_level_helper__WEBPACK_IMPORTED_MODULE_4__["addGroupId"])(levelFromSet, 'audio', attributes.AUDIO);
|
||
}
|
||
|
||
if (attributes.SUBTITLES) {
|
||
Object(_level_helper__WEBPACK_IMPORTED_MODULE_4__["addGroupId"])(levelFromSet, 'text', attributes.SUBTITLES);
|
||
}
|
||
}
|
||
}); // remove audio-only level if we also have levels with video codecs or RESOLUTION signalled
|
||
|
||
if ((resolutionFound || videoCodecFound) && audioCodecFound) {
|
||
levels = levels.filter(function (_ref) {
|
||
var videoCodec = _ref.videoCodec,
|
||
width = _ref.width,
|
||
height = _ref.height;
|
||
return !!videoCodec || !!(width && height);
|
||
});
|
||
} // only keep levels with supported audio/video codecs
|
||
|
||
|
||
levels = levels.filter(function (_ref2) {
|
||
var audioCodec = _ref2.audioCodec,
|
||
videoCodec = _ref2.videoCodec;
|
||
return (!audioCodec || Object(_utils_codecs__WEBPACK_IMPORTED_MODULE_3__["isCodecSupportedInMp4"])(audioCodec, 'audio')) && (!videoCodec || Object(_utils_codecs__WEBPACK_IMPORTED_MODULE_3__["isCodecSupportedInMp4"])(videoCodec, 'video'));
|
||
});
|
||
|
||
if (data.audioTracks) {
|
||
audioTracks = data.audioTracks.filter(function (track) {
|
||
return !track.audioCodec || Object(_utils_codecs__WEBPACK_IMPORTED_MODULE_3__["isCodecSupportedInMp4"])(track.audioCodec, 'audio');
|
||
}); // Assign ids after filtering as array indices by group-id
|
||
|
||
Object(_level_helper__WEBPACK_IMPORTED_MODULE_4__["assignTrackIdsByGroup"])(audioTracks);
|
||
}
|
||
|
||
if (data.subtitles) {
|
||
subtitleTracks = data.subtitles;
|
||
Object(_level_helper__WEBPACK_IMPORTED_MODULE_4__["assignTrackIdsByGroup"])(subtitleTracks);
|
||
}
|
||
|
||
if (levels.length > 0) {
|
||
// start bitrate is the first bitrate of the manifest
|
||
bitrateStart = levels[0].bitrate; // sort level on bitrate
|
||
|
||
levels.sort(function (a, b) {
|
||
return a.bitrate - b.bitrate;
|
||
});
|
||
this._levels = levels; // find index of first level in sorted levels
|
||
|
||
for (var i = 0; i < levels.length; i++) {
|
||
if (levels[i].bitrate === bitrateStart) {
|
||
this._firstLevel = i;
|
||
this.log("manifest loaded, " + levels.length + " level(s) found, first bitrate: " + bitrateStart);
|
||
break;
|
||
}
|
||
} // Audio is only alternate if manifest include a URI along with the audio group tag,
|
||
// and this is not an audio-only stream where levels contain audio-only
|
||
|
||
|
||
var audioOnly = audioCodecFound && !videoCodecFound;
|
||
var edata = {
|
||
levels: levels,
|
||
audioTracks: audioTracks,
|
||
subtitleTracks: subtitleTracks,
|
||
firstLevel: this._firstLevel,
|
||
stats: data.stats,
|
||
audio: audioCodecFound,
|
||
video: videoCodecFound,
|
||
altAudio: !audioOnly && audioTracks.some(function (t) {
|
||
return !!t.url;
|
||
})
|
||
};
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_PARSED, edata); // Initiate loading after all controllers have received MANIFEST_PARSED
|
||
|
||
if (this.hls.config.autoStartLoad || this.hls.forceStartLoad) {
|
||
this.hls.startLoad(this.hls.config.startPosition);
|
||
}
|
||
} else {
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorTypes"].MEDIA_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].MANIFEST_INCOMPATIBLE_CODECS_ERROR,
|
||
fatal: true,
|
||
url: data.url,
|
||
reason: 'no level with compatible codecs found in manifest'
|
||
});
|
||
}
|
||
};
|
||
|
||
_proto.onError = function onError(event, data) {
|
||
_BasePlaylistControll.prototype.onError.call(this, event, data);
|
||
|
||
if (data.fatal) {
|
||
return;
|
||
} // Switch to redundant level when track fails to load
|
||
|
||
|
||
var context = data.context;
|
||
var level = this._levels[this.currentLevelIndex];
|
||
|
||
if (context && (context.type === _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].AUDIO_TRACK && level.audioGroupIds && context.groupId === level.audioGroupIds[level.urlId] || context.type === _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].SUBTITLE_TRACK && level.textGroupIds && context.groupId === level.textGroupIds[level.urlId])) {
|
||
this.redundantFailover(this.currentLevelIndex);
|
||
return;
|
||
}
|
||
|
||
var levelError = false;
|
||
var levelSwitch = true;
|
||
var levelIndex; // try to recover not fatal errors
|
||
|
||
switch (data.details) {
|
||
case _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].FRAG_LOAD_ERROR:
|
||
case _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].FRAG_LOAD_TIMEOUT:
|
||
case _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].KEY_LOAD_ERROR:
|
||
case _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].KEY_LOAD_TIMEOUT:
|
||
if (data.frag) {
|
||
var _level = this._levels[data.frag.level]; // Set levelIndex when we're out of fragment retries
|
||
|
||
if (_level) {
|
||
_level.fragmentError++;
|
||
|
||
if (_level.fragmentError > this.hls.config.fragLoadingMaxRetry) {
|
||
levelIndex = data.frag.level;
|
||
}
|
||
} else {
|
||
levelIndex = data.frag.level;
|
||
}
|
||
}
|
||
|
||
break;
|
||
|
||
case _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].LEVEL_LOAD_ERROR:
|
||
case _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].LEVEL_LOAD_TIMEOUT:
|
||
// Do not perform level switch if an error occurred using delivery directives
|
||
// Attempt to reload level without directives first
|
||
if (context) {
|
||
if (context.deliveryDirectives) {
|
||
levelSwitch = false;
|
||
}
|
||
|
||
levelIndex = context.level;
|
||
}
|
||
|
||
levelError = true;
|
||
break;
|
||
|
||
case _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].REMUX_ALLOC_ERROR:
|
||
levelIndex = data.level;
|
||
levelError = true;
|
||
break;
|
||
}
|
||
|
||
if (levelIndex !== undefined) {
|
||
this.recoverLevel(data, levelIndex, levelError, levelSwitch);
|
||
}
|
||
}
|
||
/**
|
||
* Switch to a redundant stream if any available.
|
||
* If redundant stream is not available, emergency switch down if ABR mode is enabled.
|
||
*/
|
||
;
|
||
|
||
_proto.recoverLevel = function recoverLevel(errorEvent, levelIndex, levelError, levelSwitch) {
|
||
var errorDetails = errorEvent.details;
|
||
var level = this._levels[levelIndex];
|
||
level.loadError++;
|
||
|
||
if (levelError) {
|
||
var retrying = this.retryLoadingOrFail(errorEvent);
|
||
|
||
if (retrying) {
|
||
// boolean used to inform stream controller not to switch back to IDLE on non fatal error
|
||
errorEvent.levelRetry = true;
|
||
} else {
|
||
this.currentLevelIndex = -1;
|
||
return;
|
||
}
|
||
}
|
||
|
||
if (levelSwitch) {
|
||
var redundantLevels = level.url.length; // Try redundant fail-over until level.loadError reaches redundantLevels
|
||
|
||
if (redundantLevels > 1 && level.loadError < redundantLevels) {
|
||
errorEvent.levelRetry = true;
|
||
this.redundantFailover(levelIndex);
|
||
} else if (this.manualLevelIndex === -1) {
|
||
// Search for available level in auto level selection mode, cycling from highest to lowest bitrate
|
||
var nextLevel = levelIndex === 0 ? this._levels.length - 1 : levelIndex - 1;
|
||
|
||
if (this.currentLevelIndex !== nextLevel && this._levels[nextLevel].loadError === 0) {
|
||
this.warn(errorDetails + ": switch to " + nextLevel);
|
||
errorEvent.levelRetry = true;
|
||
this.hls.nextAutoLevel = nextLevel;
|
||
}
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto.redundantFailover = function redundantFailover(levelIndex) {
|
||
var level = this._levels[levelIndex];
|
||
var redundantLevels = level.url.length;
|
||
|
||
if (redundantLevels > 1) {
|
||
// Update the url id of all levels so that we stay on the same set of variants when level switching
|
||
var newUrlId = (level.urlId + 1) % redundantLevels;
|
||
this.warn("Switching to redundant URL-id " + newUrlId);
|
||
|
||
this._levels.forEach(function (level) {
|
||
level.urlId = newUrlId;
|
||
});
|
||
|
||
this.level = levelIndex;
|
||
}
|
||
} // reset errors on the successful load of a fragment
|
||
;
|
||
|
||
_proto.onFragLoaded = function onFragLoaded(event, _ref3) {
|
||
var frag = _ref3.frag;
|
||
|
||
if (frag !== undefined && frag.type === _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].MAIN) {
|
||
var level = this._levels[frag.level];
|
||
|
||
if (level !== undefined) {
|
||
level.fragmentError = 0;
|
||
level.loadError = 0;
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto.onLevelLoaded = function onLevelLoaded(event, data) {
|
||
var _data$deliveryDirecti2;
|
||
|
||
var level = data.level,
|
||
details = data.details;
|
||
var curLevel = this._levels[level];
|
||
|
||
if (!curLevel) {
|
||
var _data$deliveryDirecti;
|
||
|
||
this.warn("Invalid level index " + level);
|
||
|
||
if ((_data$deliveryDirecti = data.deliveryDirectives) !== null && _data$deliveryDirecti !== void 0 && _data$deliveryDirecti.skip) {
|
||
details.deltaUpdateFailed = true;
|
||
}
|
||
|
||
return;
|
||
} // only process level loaded events matching with expected level
|
||
|
||
|
||
if (level === this.currentLevelIndex) {
|
||
// reset level load error counter on successful level loaded only if there is no issues with fragments
|
||
if (curLevel.fragmentError === 0) {
|
||
curLevel.loadError = 0;
|
||
this.retryCount = 0;
|
||
}
|
||
|
||
this.playlistLoaded(level, data, curLevel.details);
|
||
} else if ((_data$deliveryDirecti2 = data.deliveryDirectives) !== null && _data$deliveryDirecti2 !== void 0 && _data$deliveryDirecti2.skip) {
|
||
// received a delta playlist update that cannot be merged
|
||
details.deltaUpdateFailed = true;
|
||
}
|
||
};
|
||
|
||
_proto.onAudioTrackSwitched = function onAudioTrackSwitched(event, data) {
|
||
var currentLevel = this.hls.levels[this.currentLevelIndex];
|
||
|
||
if (!currentLevel) {
|
||
return;
|
||
}
|
||
|
||
if (currentLevel.audioGroupIds) {
|
||
var urlId = -1;
|
||
var audioGroupId = this.hls.audioTracks[data.id].groupId;
|
||
|
||
for (var i = 0; i < currentLevel.audioGroupIds.length; i++) {
|
||
if (currentLevel.audioGroupIds[i] === audioGroupId) {
|
||
urlId = i;
|
||
break;
|
||
}
|
||
}
|
||
|
||
if (urlId !== currentLevel.urlId) {
|
||
currentLevel.urlId = urlId;
|
||
this.startLoad();
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto.loadPlaylist = function loadPlaylist(hlsUrlParameters) {
|
||
var level = this.currentLevelIndex;
|
||
var currentLevel = this._levels[level];
|
||
|
||
if (this.canLoad && currentLevel && currentLevel.url.length > 0) {
|
||
var id = currentLevel.urlId;
|
||
var url = currentLevel.url[id];
|
||
|
||
if (hlsUrlParameters) {
|
||
try {
|
||
url = hlsUrlParameters.addDirectives(url);
|
||
} catch (error) {
|
||
this.warn("Could not construct new URL with HLS Delivery Directives: " + error);
|
||
}
|
||
}
|
||
|
||
this.log("Attempt loading level index " + level + (hlsUrlParameters ? ' at sn ' + hlsUrlParameters.msn + ' part ' + hlsUrlParameters.part : '') + " with URL-id " + id + " " + url); // console.log('Current audio track group ID:', this.hls.audioTracks[this.hls.audioTrack].groupId);
|
||
// console.log('New video quality level audio group id:', levelObject.attrs.AUDIO, level);
|
||
|
||
this.clearTimer();
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].LEVEL_LOADING, {
|
||
url: url,
|
||
level: level,
|
||
id: id,
|
||
deliveryDirectives: hlsUrlParameters || null
|
||
});
|
||
}
|
||
};
|
||
|
||
_proto.removeLevel = function removeLevel(levelIndex, urlId) {
|
||
var filterLevelAndGroupByIdIndex = function filterLevelAndGroupByIdIndex(url, id) {
|
||
return id !== urlId;
|
||
};
|
||
|
||
var levels = this._levels.filter(function (level, index) {
|
||
if (index !== levelIndex) {
|
||
return true;
|
||
}
|
||
|
||
if (level.url.length > 1 && urlId !== undefined) {
|
||
level.url = level.url.filter(filterLevelAndGroupByIdIndex);
|
||
|
||
if (level.audioGroupIds) {
|
||
level.audioGroupIds = level.audioGroupIds.filter(filterLevelAndGroupByIdIndex);
|
||
}
|
||
|
||
if (level.textGroupIds) {
|
||
level.textGroupIds = level.textGroupIds.filter(filterLevelAndGroupByIdIndex);
|
||
}
|
||
|
||
level.urlId = 0;
|
||
return true;
|
||
}
|
||
|
||
return false;
|
||
}).map(function (level, index) {
|
||
var details = level.details;
|
||
|
||
if (details !== null && details !== void 0 && details.fragments) {
|
||
details.fragments.forEach(function (fragment) {
|
||
fragment.level = index;
|
||
});
|
||
}
|
||
|
||
return level;
|
||
});
|
||
|
||
this._levels = levels;
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].LEVELS_UPDATED, {
|
||
levels: levels
|
||
});
|
||
};
|
||
|
||
_createClass(LevelController, [{
|
||
key: "levels",
|
||
get: function get() {
|
||
if (this._levels.length === 0) {
|
||
return null;
|
||
}
|
||
|
||
return this._levels;
|
||
}
|
||
}, {
|
||
key: "level",
|
||
get: function get() {
|
||
return this.currentLevelIndex;
|
||
},
|
||
set: function set(newLevel) {
|
||
var _levels$newLevel;
|
||
|
||
var levels = this._levels;
|
||
|
||
if (levels.length === 0) {
|
||
return;
|
||
}
|
||
|
||
if (this.currentLevelIndex === newLevel && (_levels$newLevel = levels[newLevel]) !== null && _levels$newLevel !== void 0 && _levels$newLevel.details) {
|
||
return;
|
||
} // check if level idx is valid
|
||
|
||
|
||
if (newLevel < 0 || newLevel >= levels.length) {
|
||
// invalid level id given, trigger error
|
||
var fatal = newLevel < 0;
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorTypes"].OTHER_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].LEVEL_SWITCH_ERROR,
|
||
level: newLevel,
|
||
fatal: fatal,
|
||
reason: 'invalid level idx'
|
||
});
|
||
|
||
if (fatal) {
|
||
return;
|
||
}
|
||
|
||
newLevel = Math.min(newLevel, levels.length - 1);
|
||
} // stopping live reloading timer if any
|
||
|
||
|
||
this.clearTimer();
|
||
var lastLevelIndex = this.currentLevelIndex;
|
||
var lastLevel = levels[lastLevelIndex];
|
||
var level = levels[newLevel];
|
||
this.log("switching to level " + newLevel + " from " + lastLevelIndex);
|
||
this.currentLevelIndex = newLevel;
|
||
|
||
var levelSwitchingData = _extends({}, level, {
|
||
level: newLevel,
|
||
maxBitrate: level.maxBitrate,
|
||
uri: level.uri,
|
||
urlId: level.urlId
|
||
}); // @ts-ignore
|
||
|
||
|
||
delete levelSwitchingData._urlId;
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].LEVEL_SWITCHING, levelSwitchingData); // check if we need to load playlist for this level
|
||
|
||
var levelDetails = level.details;
|
||
|
||
if (!levelDetails || levelDetails.live) {
|
||
// level not retrieved yet, or live playlist we need to (re)load it
|
||
var hlsUrlParameters = this.switchParams(level.uri, lastLevel === null || lastLevel === void 0 ? void 0 : lastLevel.details);
|
||
this.loadPlaylist(hlsUrlParameters);
|
||
}
|
||
}
|
||
}, {
|
||
key: "manualLevel",
|
||
get: function get() {
|
||
return this.manualLevelIndex;
|
||
},
|
||
set: function set(newLevel) {
|
||
this.manualLevelIndex = newLevel;
|
||
|
||
if (this._startLevel === undefined) {
|
||
this._startLevel = newLevel;
|
||
}
|
||
|
||
if (newLevel !== -1) {
|
||
this.level = newLevel;
|
||
}
|
||
}
|
||
}, {
|
||
key: "firstLevel",
|
||
get: function get() {
|
||
return this._firstLevel;
|
||
},
|
||
set: function set(newLevel) {
|
||
this._firstLevel = newLevel;
|
||
}
|
||
}, {
|
||
key: "startLevel",
|
||
get: function get() {
|
||
// hls.startLevel takes precedence over config.startLevel
|
||
// if none of these values are defined, fallback on this._firstLevel (first quality level appearing in variant manifest)
|
||
if (this._startLevel === undefined) {
|
||
var configStartLevel = this.hls.config.startLevel;
|
||
|
||
if (configStartLevel !== undefined) {
|
||
return configStartLevel;
|
||
} else {
|
||
return this._firstLevel;
|
||
}
|
||
} else {
|
||
return this._startLevel;
|
||
}
|
||
},
|
||
set: function set(newLevel) {
|
||
this._startLevel = newLevel;
|
||
}
|
||
}, {
|
||
key: "nextLoadLevel",
|
||
get: function get() {
|
||
if (this.manualLevelIndex !== -1) {
|
||
return this.manualLevelIndex;
|
||
} else {
|
||
return this.hls.nextAutoLevel;
|
||
}
|
||
},
|
||
set: function set(nextLevel) {
|
||
this.level = nextLevel;
|
||
|
||
if (this.manualLevelIndex === -1) {
|
||
this.hls.nextAutoLevel = nextLevel;
|
||
}
|
||
}
|
||
}]);
|
||
|
||
return LevelController;
|
||
}(_base_playlist_controller__WEBPACK_IMPORTED_MODULE_5__["default"]);
|
||
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/controller/level-helper.ts":
|
||
/*!****************************************!*\
|
||
!*** ./src/controller/level-helper.ts ***!
|
||
\****************************************/
|
||
/*! exports provided: addGroupId, assignTrackIdsByGroup, updatePTS, updateFragPTSDTS, mergeDetails, mapPartIntersection, mapFragmentIntersection, adjustSliding, addSliding, computeReloadInterval, getFragmentWithSN, getPartWith */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "addGroupId", function() { return addGroupId; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "assignTrackIdsByGroup", function() { return assignTrackIdsByGroup; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "updatePTS", function() { return updatePTS; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "updateFragPTSDTS", function() { return updateFragPTSDTS; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "mergeDetails", function() { return mergeDetails; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "mapPartIntersection", function() { return mapPartIntersection; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "mapFragmentIntersection", function() { return mapFragmentIntersection; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "adjustSliding", function() { return adjustSliding; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "addSliding", function() { return addSliding; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "computeReloadInterval", function() { return computeReloadInterval; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getFragmentWithSN", function() { return getFragmentWithSN; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getPartWith", function() { return getPartWith; });
|
||
/* harmony import */ var _Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
|
||
|
||
|
||
|
||
|
||
|
||
/**
|
||
* @module LevelHelper
|
||
* Providing methods dealing with playlist sliding and drift
|
||
* */
|
||
|
||
function addGroupId(level, type, id) {
|
||
switch (type) {
|
||
case 'audio':
|
||
if (!level.audioGroupIds) {
|
||
level.audioGroupIds = [];
|
||
}
|
||
|
||
level.audioGroupIds.push(id);
|
||
break;
|
||
|
||
case 'text':
|
||
if (!level.textGroupIds) {
|
||
level.textGroupIds = [];
|
||
}
|
||
|
||
level.textGroupIds.push(id);
|
||
break;
|
||
}
|
||
}
|
||
function assignTrackIdsByGroup(tracks) {
|
||
var groups = {};
|
||
tracks.forEach(function (track) {
|
||
var groupId = track.groupId || '';
|
||
track.id = groups[groupId] = groups[groupId] || 0;
|
||
groups[groupId]++;
|
||
});
|
||
}
|
||
function updatePTS(fragments, fromIdx, toIdx) {
|
||
var fragFrom = fragments[fromIdx];
|
||
var fragTo = fragments[toIdx];
|
||
updateFromToPTS(fragFrom, fragTo);
|
||
}
|
||
|
||
function updateFromToPTS(fragFrom, fragTo) {
|
||
var fragToPTS = fragTo.startPTS; // if we know startPTS[toIdx]
|
||
|
||
if (Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(fragToPTS)) {
|
||
// update fragment duration.
|
||
// it helps to fix drifts between playlist reported duration and fragment real duration
|
||
var duration = 0;
|
||
var frag;
|
||
|
||
if (fragTo.sn > fragFrom.sn) {
|
||
duration = fragToPTS - fragFrom.start;
|
||
frag = fragFrom;
|
||
} else {
|
||
duration = fragFrom.start - fragToPTS;
|
||
frag = fragTo;
|
||
} // TODO? Drift can go either way, or the playlist could be completely accurate
|
||
// console.assert(duration > 0,
|
||
// `duration of ${duration} computed for frag ${frag.sn}, level ${frag.level}, there should be some duration drift between playlist and fragment!`);
|
||
|
||
|
||
if (frag.duration !== duration) {
|
||
frag.duration = duration;
|
||
} // we dont know startPTS[toIdx]
|
||
|
||
} else if (fragTo.sn > fragFrom.sn) {
|
||
var contiguous = fragFrom.cc === fragTo.cc; // TODO: With part-loading end/durations we need to confirm the whole fragment is loaded before using (or setting) minEndPTS
|
||
|
||
if (contiguous && fragFrom.minEndPTS) {
|
||
fragTo.start = fragFrom.start + (fragFrom.minEndPTS - fragFrom.start);
|
||
} else {
|
||
fragTo.start = fragFrom.start + fragFrom.duration;
|
||
}
|
||
} else {
|
||
fragTo.start = Math.max(fragFrom.start - fragTo.duration, 0);
|
||
}
|
||
}
|
||
|
||
function updateFragPTSDTS(details, frag, startPTS, endPTS, startDTS, endDTS) {
|
||
var parsedMediaDuration = endPTS - startPTS;
|
||
|
||
if (parsedMediaDuration <= 0) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_1__["logger"].warn('Fragment should have a positive duration', frag);
|
||
endPTS = startPTS + frag.duration;
|
||
endDTS = startDTS + frag.duration;
|
||
}
|
||
|
||
var maxStartPTS = startPTS;
|
||
var minEndPTS = endPTS;
|
||
var fragStartPts = frag.startPTS;
|
||
var fragEndPts = frag.endPTS;
|
||
|
||
if (Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(fragStartPts)) {
|
||
// delta PTS between audio and video
|
||
var deltaPTS = Math.abs(fragStartPts - startPTS);
|
||
|
||
if (!Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(frag.deltaPTS)) {
|
||
frag.deltaPTS = deltaPTS;
|
||
} else {
|
||
frag.deltaPTS = Math.max(deltaPTS, frag.deltaPTS);
|
||
}
|
||
|
||
maxStartPTS = Math.max(startPTS, fragStartPts);
|
||
startPTS = Math.min(startPTS, fragStartPts);
|
||
startDTS = Math.min(startDTS, frag.startDTS);
|
||
minEndPTS = Math.min(endPTS, fragEndPts);
|
||
endPTS = Math.max(endPTS, fragEndPts);
|
||
endDTS = Math.max(endDTS, frag.endDTS);
|
||
}
|
||
|
||
frag.duration = endPTS - startPTS;
|
||
var drift = startPTS - frag.start;
|
||
frag.appendedPTS = endPTS;
|
||
frag.start = frag.startPTS = startPTS;
|
||
frag.maxStartPTS = maxStartPTS;
|
||
frag.startDTS = startDTS;
|
||
frag.endPTS = endPTS;
|
||
frag.minEndPTS = minEndPTS;
|
||
frag.endDTS = endDTS;
|
||
var sn = frag.sn; // 'initSegment'
|
||
// exit if sn out of range
|
||
|
||
if (!details || sn < details.startSN || sn > details.endSN) {
|
||
return 0;
|
||
}
|
||
|
||
var i;
|
||
var fragIdx = sn - details.startSN;
|
||
var fragments = details.fragments; // update frag reference in fragments array
|
||
// rationale is that fragments array might not contain this frag object.
|
||
// this will happen if playlist has been refreshed between frag loading and call to updateFragPTSDTS()
|
||
// if we don't update frag, we won't be able to propagate PTS info on the playlist
|
||
// resulting in invalid sliding computation
|
||
|
||
fragments[fragIdx] = frag; // adjust fragment PTS/duration from seqnum-1 to frag 0
|
||
|
||
for (i = fragIdx; i > 0; i--) {
|
||
updateFromToPTS(fragments[i], fragments[i - 1]);
|
||
} // adjust fragment PTS/duration from seqnum to last frag
|
||
|
||
|
||
for (i = fragIdx; i < fragments.length - 1; i++) {
|
||
updateFromToPTS(fragments[i], fragments[i + 1]);
|
||
}
|
||
|
||
if (details.fragmentHint) {
|
||
updateFromToPTS(fragments[fragments.length - 1], details.fragmentHint);
|
||
}
|
||
|
||
details.PTSKnown = details.alignedSliding = true;
|
||
return drift;
|
||
}
|
||
function mergeDetails(oldDetails, newDetails) {
|
||
// Track the last initSegment processed. Initialize it to the last one on the timeline.
|
||
var currentInitSegment = null;
|
||
var oldFragments = oldDetails.fragments;
|
||
|
||
for (var i = oldFragments.length - 1; i >= 0; i--) {
|
||
var oldInit = oldFragments[i].initSegment;
|
||
|
||
if (oldInit) {
|
||
currentInitSegment = oldInit;
|
||
break;
|
||
}
|
||
}
|
||
|
||
if (oldDetails.fragmentHint) {
|
||
// prevent PTS and duration from being adjusted on the next hint
|
||
delete oldDetails.fragmentHint.endPTS;
|
||
} // check if old/new playlists have fragments in common
|
||
// loop through overlapping SN and update startPTS , cc, and duration if any found
|
||
|
||
|
||
var ccOffset = 0;
|
||
var PTSFrag;
|
||
mapFragmentIntersection(oldDetails, newDetails, function (oldFrag, newFrag) {
|
||
if (oldFrag.relurl) {
|
||
// Do not compare CC if the old fragment has no url. This is a level.fragmentHint used by LL-HLS parts.
|
||
// It maybe be off by 1 if it was created before any parts or discontinuity tags were appended to the end
|
||
// of the playlist.
|
||
ccOffset = oldFrag.cc - newFrag.cc;
|
||
}
|
||
|
||
if (Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(oldFrag.startPTS) && Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(oldFrag.endPTS)) {
|
||
newFrag.start = newFrag.startPTS = oldFrag.startPTS;
|
||
newFrag.startDTS = oldFrag.startDTS;
|
||
newFrag.appendedPTS = oldFrag.appendedPTS;
|
||
newFrag.maxStartPTS = oldFrag.maxStartPTS;
|
||
newFrag.endPTS = oldFrag.endPTS;
|
||
newFrag.endDTS = oldFrag.endDTS;
|
||
newFrag.minEndPTS = oldFrag.minEndPTS;
|
||
newFrag.duration = oldFrag.endPTS - oldFrag.startPTS;
|
||
|
||
if (newFrag.duration) {
|
||
PTSFrag = newFrag;
|
||
} // PTS is known when any segment has startPTS and endPTS
|
||
|
||
|
||
newDetails.PTSKnown = newDetails.alignedSliding = true;
|
||
}
|
||
|
||
newFrag.elementaryStreams = oldFrag.elementaryStreams;
|
||
newFrag.loader = oldFrag.loader;
|
||
newFrag.stats = oldFrag.stats;
|
||
newFrag.urlId = oldFrag.urlId;
|
||
|
||
if (oldFrag.initSegment) {
|
||
newFrag.initSegment = oldFrag.initSegment;
|
||
currentInitSegment = oldFrag.initSegment;
|
||
}
|
||
});
|
||
|
||
if (currentInitSegment) {
|
||
var fragmentsToCheck = newDetails.fragmentHint ? newDetails.fragments.concat(newDetails.fragmentHint) : newDetails.fragments;
|
||
fragmentsToCheck.forEach(function (frag) {
|
||
var _currentInitSegment;
|
||
|
||
if (!frag.initSegment || frag.initSegment.relurl === ((_currentInitSegment = currentInitSegment) === null || _currentInitSegment === void 0 ? void 0 : _currentInitSegment.relurl)) {
|
||
frag.initSegment = currentInitSegment;
|
||
}
|
||
});
|
||
}
|
||
|
||
if (newDetails.skippedSegments) {
|
||
newDetails.deltaUpdateFailed = newDetails.fragments.some(function (frag) {
|
||
return !frag;
|
||
});
|
||
|
||
if (newDetails.deltaUpdateFailed) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_1__["logger"].warn('[level-helper] Previous playlist missing segments skipped in delta playlist');
|
||
|
||
for (var _i = newDetails.skippedSegments; _i--;) {
|
||
newDetails.fragments.shift();
|
||
}
|
||
|
||
newDetails.startSN = newDetails.fragments[0].sn;
|
||
newDetails.startCC = newDetails.fragments[0].cc;
|
||
}
|
||
}
|
||
|
||
var newFragments = newDetails.fragments;
|
||
|
||
if (ccOffset) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_1__["logger"].warn('discontinuity sliding from playlist, take drift into account');
|
||
|
||
for (var _i2 = 0; _i2 < newFragments.length; _i2++) {
|
||
newFragments[_i2].cc += ccOffset;
|
||
}
|
||
}
|
||
|
||
if (newDetails.skippedSegments) {
|
||
newDetails.startCC = newDetails.fragments[0].cc;
|
||
} // Merge parts
|
||
|
||
|
||
mapPartIntersection(oldDetails.partList, newDetails.partList, function (oldPart, newPart) {
|
||
newPart.elementaryStreams = oldPart.elementaryStreams;
|
||
newPart.stats = oldPart.stats;
|
||
}); // if at least one fragment contains PTS info, recompute PTS information for all fragments
|
||
|
||
if (PTSFrag) {
|
||
updateFragPTSDTS(newDetails, PTSFrag, PTSFrag.startPTS, PTSFrag.endPTS, PTSFrag.startDTS, PTSFrag.endDTS);
|
||
} else {
|
||
// ensure that delta is within oldFragments range
|
||
// also adjust sliding in case delta is 0 (we could have old=[50-60] and new=old=[50-61])
|
||
// in that case we also need to adjust start offset of all fragments
|
||
adjustSliding(oldDetails, newDetails);
|
||
}
|
||
|
||
if (newFragments.length) {
|
||
newDetails.totalduration = newDetails.edge - newFragments[0].start;
|
||
}
|
||
|
||
newDetails.driftStartTime = oldDetails.driftStartTime;
|
||
newDetails.driftStart = oldDetails.driftStart;
|
||
var advancedDateTime = newDetails.advancedDateTime;
|
||
|
||
if (newDetails.advanced && advancedDateTime) {
|
||
var edge = newDetails.edge;
|
||
|
||
if (!newDetails.driftStart) {
|
||
newDetails.driftStartTime = advancedDateTime;
|
||
newDetails.driftStart = edge;
|
||
}
|
||
|
||
newDetails.driftEndTime = advancedDateTime;
|
||
newDetails.driftEnd = edge;
|
||
} else {
|
||
newDetails.driftEndTime = oldDetails.driftEndTime;
|
||
newDetails.driftEnd = oldDetails.driftEnd;
|
||
newDetails.advancedDateTime = oldDetails.advancedDateTime;
|
||
}
|
||
}
|
||
function mapPartIntersection(oldParts, newParts, intersectionFn) {
|
||
if (oldParts && newParts) {
|
||
var delta = 0;
|
||
|
||
for (var i = 0, len = oldParts.length; i <= len; i++) {
|
||
var _oldPart = oldParts[i];
|
||
var _newPart = newParts[i + delta];
|
||
|
||
if (_oldPart && _newPart && _oldPart.index === _newPart.index && _oldPart.fragment.sn === _newPart.fragment.sn) {
|
||
intersectionFn(_oldPart, _newPart);
|
||
} else {
|
||
delta--;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
function mapFragmentIntersection(oldDetails, newDetails, intersectionFn) {
|
||
var skippedSegments = newDetails.skippedSegments;
|
||
var start = Math.max(oldDetails.startSN, newDetails.startSN) - newDetails.startSN;
|
||
var end = (oldDetails.fragmentHint ? 1 : 0) + (skippedSegments ? newDetails.endSN : Math.min(oldDetails.endSN, newDetails.endSN)) - newDetails.startSN;
|
||
var delta = newDetails.startSN - oldDetails.startSN;
|
||
var newFrags = newDetails.fragmentHint ? newDetails.fragments.concat(newDetails.fragmentHint) : newDetails.fragments;
|
||
var oldFrags = oldDetails.fragmentHint ? oldDetails.fragments.concat(oldDetails.fragmentHint) : oldDetails.fragments;
|
||
|
||
for (var i = start; i <= end; i++) {
|
||
var _oldFrag = oldFrags[delta + i];
|
||
var _newFrag = newFrags[i];
|
||
|
||
if (skippedSegments && !_newFrag && i < skippedSegments) {
|
||
// Fill in skipped segments in delta playlist
|
||
_newFrag = newDetails.fragments[i] = _oldFrag;
|
||
}
|
||
|
||
if (_oldFrag && _newFrag) {
|
||
intersectionFn(_oldFrag, _newFrag);
|
||
}
|
||
}
|
||
}
|
||
function adjustSliding(oldDetails, newDetails) {
|
||
var delta = newDetails.startSN + newDetails.skippedSegments - oldDetails.startSN;
|
||
var oldFragments = oldDetails.fragments;
|
||
|
||
if (delta < 0 || delta >= oldFragments.length) {
|
||
return;
|
||
}
|
||
|
||
addSliding(newDetails, oldFragments[delta].start);
|
||
}
|
||
function addSliding(details, start) {
|
||
if (start) {
|
||
var fragments = details.fragments;
|
||
|
||
for (var i = details.skippedSegments; i < fragments.length; i++) {
|
||
fragments[i].start += start;
|
||
}
|
||
|
||
if (details.fragmentHint) {
|
||
details.fragmentHint.start += start;
|
||
}
|
||
}
|
||
}
|
||
function computeReloadInterval(newDetails, stats) {
|
||
var reloadInterval = 1000 * newDetails.levelTargetDuration;
|
||
var reloadIntervalAfterMiss = reloadInterval / 2;
|
||
var timeSinceLastModified = newDetails.age;
|
||
var useLastModified = timeSinceLastModified > 0 && timeSinceLastModified < reloadInterval * 3;
|
||
var roundTrip = stats.loading.end - stats.loading.start;
|
||
var estimatedTimeUntilUpdate;
|
||
var availabilityDelay = newDetails.availabilityDelay; // let estimate = 'average';
|
||
|
||
if (newDetails.updated === false) {
|
||
if (useLastModified) {
|
||
// estimate = 'miss round trip';
|
||
// We should have had a hit so try again in the time it takes to get a response,
|
||
// but no less than 1/3 second.
|
||
var minRetry = 333 * newDetails.misses;
|
||
estimatedTimeUntilUpdate = Math.max(Math.min(reloadIntervalAfterMiss, roundTrip * 2), minRetry);
|
||
newDetails.availabilityDelay = (newDetails.availabilityDelay || 0) + estimatedTimeUntilUpdate;
|
||
} else {
|
||
// estimate = 'miss half average';
|
||
// follow HLS Spec, If the client reloads a Playlist file and finds that it has not
|
||
// changed then it MUST wait for a period of one-half the target
|
||
// duration before retrying.
|
||
estimatedTimeUntilUpdate = reloadIntervalAfterMiss;
|
||
}
|
||
} else if (useLastModified) {
|
||
// estimate = 'next modified date';
|
||
// Get the closest we've been to timeSinceLastModified on update
|
||
availabilityDelay = Math.min(availabilityDelay || reloadInterval / 2, timeSinceLastModified);
|
||
newDetails.availabilityDelay = availabilityDelay;
|
||
estimatedTimeUntilUpdate = availabilityDelay + reloadInterval - timeSinceLastModified;
|
||
} else {
|
||
estimatedTimeUntilUpdate = reloadInterval - roundTrip;
|
||
} // console.log(`[computeReloadInterval] live reload ${newDetails.updated ? 'REFRESHED' : 'MISSED'}`,
|
||
// '\n method', estimate,
|
||
// '\n estimated time until update =>', estimatedTimeUntilUpdate,
|
||
// '\n average target duration', reloadInterval,
|
||
// '\n time since modified', timeSinceLastModified,
|
||
// '\n time round trip', roundTrip,
|
||
// '\n availability delay', availabilityDelay);
|
||
|
||
|
||
return Math.round(estimatedTimeUntilUpdate);
|
||
}
|
||
function getFragmentWithSN(level, sn, fragCurrent) {
|
||
if (!level || !level.details) {
|
||
return null;
|
||
}
|
||
|
||
var levelDetails = level.details;
|
||
var fragment = levelDetails.fragments[sn - levelDetails.startSN];
|
||
|
||
if (fragment) {
|
||
return fragment;
|
||
}
|
||
|
||
fragment = levelDetails.fragmentHint;
|
||
|
||
if (fragment && fragment.sn === sn) {
|
||
return fragment;
|
||
}
|
||
|
||
if (sn < levelDetails.startSN && fragCurrent && fragCurrent.sn === sn) {
|
||
return fragCurrent;
|
||
}
|
||
|
||
return null;
|
||
}
|
||
function getPartWith(level, sn, partIndex) {
|
||
if (!level || !level.details) {
|
||
return null;
|
||
}
|
||
|
||
var partList = level.details.partList;
|
||
|
||
if (partList) {
|
||
for (var i = partList.length; i--;) {
|
||
var part = partList[i];
|
||
|
||
if (part.index === partIndex && part.fragment.sn === sn) {
|
||
return part;
|
||
}
|
||
}
|
||
}
|
||
|
||
return null;
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/controller/stream-controller.ts":
|
||
/*!*********************************************!*\
|
||
!*** ./src/controller/stream-controller.ts ***!
|
||
\*********************************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return StreamController; });
|
||
/* harmony import */ var _Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
|
||
/* harmony import */ var _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./base-stream-controller */ "./src/controller/base-stream-controller.ts");
|
||
/* harmony import */ var _is_supported__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../is-supported */ "./src/is-supported.ts");
|
||
/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../events */ "./src/events.ts");
|
||
/* harmony import */ var _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../utils/buffer-helper */ "./src/utils/buffer-helper.ts");
|
||
/* harmony import */ var _fragment_tracker__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./fragment-tracker */ "./src/controller/fragment-tracker.ts");
|
||
/* harmony import */ var _types_loader__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../types/loader */ "./src/types/loader.ts");
|
||
/* harmony import */ var _loader_fragment__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ../loader/fragment */ "./src/loader/fragment.ts");
|
||
/* harmony import */ var _demux_transmuxer_interface__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ../demux/transmuxer-interface */ "./src/demux/transmuxer-interface.ts");
|
||
/* harmony import */ var _types_transmuxer__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! ../types/transmuxer */ "./src/types/transmuxer.ts");
|
||
/* harmony import */ var _gap_controller__WEBPACK_IMPORTED_MODULE_10__ = __webpack_require__(/*! ./gap-controller */ "./src/controller/gap-controller.ts");
|
||
/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_11__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_12__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
|
||
|
||
|
||
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
|
||
|
||
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
|
||
|
||
function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
|
||
|
||
function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
var TICK_INTERVAL = 100; // how often to tick in ms
|
||
|
||
var StreamController = /*#__PURE__*/function (_BaseStreamController) {
|
||
_inheritsLoose(StreamController, _BaseStreamController);
|
||
|
||
function StreamController(hls, fragmentTracker) {
|
||
var _this;
|
||
|
||
_this = _BaseStreamController.call(this, hls, fragmentTracker, '[stream-controller]') || this;
|
||
_this.audioCodecSwap = false;
|
||
_this.gapController = null;
|
||
_this.level = -1;
|
||
_this._forceStartLoad = false;
|
||
_this.altAudio = false;
|
||
_this.audioOnly = false;
|
||
_this.fragPlaying = null;
|
||
_this.onvplaying = null;
|
||
_this.onvseeked = null;
|
||
_this.fragLastKbps = 0;
|
||
_this.stalled = false;
|
||
_this.couldBacktrack = false;
|
||
_this.audioCodecSwitch = false;
|
||
_this.videoBuffer = null;
|
||
|
||
_this._registerListeners();
|
||
|
||
return _this;
|
||
}
|
||
|
||
var _proto = StreamController.prototype;
|
||
|
||
_proto._registerListeners = function _registerListeners() {
|
||
var hls = this.hls;
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].MANIFEST_PARSED, this.onManifestParsed, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].LEVEL_LOADING, this.onLevelLoading, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].LEVEL_LOADED, this.onLevelLoaded, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].ERROR, this.onError, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].BUFFER_CREATED, this.onBufferCreated, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].BUFFER_FLUSHED, this.onBufferFlushed, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].LEVELS_UPDATED, this.onLevelsUpdated, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].FRAG_BUFFERED, this.onFragBuffered, this);
|
||
};
|
||
|
||
_proto._unregisterListeners = function _unregisterListeners() {
|
||
var hls = this.hls;
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].MANIFEST_PARSED, this.onManifestParsed, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].LEVEL_LOADED, this.onLevelLoaded, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].ERROR, this.onError, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].BUFFER_CREATED, this.onBufferCreated, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].BUFFER_FLUSHED, this.onBufferFlushed, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].LEVELS_UPDATED, this.onLevelsUpdated, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].FRAG_BUFFERED, this.onFragBuffered, this);
|
||
};
|
||
|
||
_proto.onHandlerDestroying = function onHandlerDestroying() {
|
||
this._unregisterListeners();
|
||
|
||
this.onMediaDetaching();
|
||
};
|
||
|
||
_proto.startLoad = function startLoad(startPosition) {
|
||
if (this.levels) {
|
||
var lastCurrentTime = this.lastCurrentTime,
|
||
hls = this.hls;
|
||
this.stopLoad();
|
||
this.setInterval(TICK_INTERVAL);
|
||
this.level = -1;
|
||
this.fragLoadError = 0;
|
||
|
||
if (!this.startFragRequested) {
|
||
// determine load level
|
||
var startLevel = hls.startLevel;
|
||
|
||
if (startLevel === -1) {
|
||
if (hls.config.testBandwidth) {
|
||
// -1 : guess start Level by doing a bitrate test by loading first fragment of lowest quality level
|
||
startLevel = 0;
|
||
this.bitrateTest = true;
|
||
} else {
|
||
startLevel = hls.nextAutoLevel;
|
||
}
|
||
} // set new level to playlist loader : this will trigger start level load
|
||
// hls.nextLoadLevel remains until it is set to a new value or until a new frag is successfully loaded
|
||
|
||
|
||
this.level = hls.nextLoadLevel = startLevel;
|
||
this.loadedmetadata = false;
|
||
} // if startPosition undefined but lastCurrentTime set, set startPosition to last currentTime
|
||
|
||
|
||
if (lastCurrentTime > 0 && startPosition === -1) {
|
||
this.log("Override startPosition with lastCurrentTime @" + lastCurrentTime.toFixed(3));
|
||
startPosition = lastCurrentTime;
|
||
}
|
||
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
|
||
this.nextLoadPosition = this.startPosition = this.lastCurrentTime = startPosition;
|
||
this.tick();
|
||
} else {
|
||
this._forceStartLoad = true;
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].STOPPED;
|
||
}
|
||
};
|
||
|
||
_proto.stopLoad = function stopLoad() {
|
||
this._forceStartLoad = false;
|
||
|
||
_BaseStreamController.prototype.stopLoad.call(this);
|
||
};
|
||
|
||
_proto.doTick = function doTick() {
|
||
switch (this.state) {
|
||
case _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE:
|
||
this.doTickIdle();
|
||
break;
|
||
|
||
case _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_LEVEL:
|
||
{
|
||
var _levels$level;
|
||
|
||
var levels = this.levels,
|
||
level = this.level;
|
||
var details = levels === null || levels === void 0 ? void 0 : (_levels$level = levels[level]) === null || _levels$level === void 0 ? void 0 : _levels$level.details;
|
||
|
||
if (details && (!details.live || this.levelLastLoaded === this.level)) {
|
||
if (this.waitForCdnTuneIn(details)) {
|
||
break;
|
||
}
|
||
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
|
||
break;
|
||
}
|
||
|
||
break;
|
||
}
|
||
|
||
case _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].FRAG_LOADING_WAITING_RETRY:
|
||
{
|
||
var _this$media;
|
||
|
||
var now = self.performance.now();
|
||
var retryDate = this.retryDate; // if current time is gt than retryDate, or if media seeking let's switch to IDLE state to retry loading
|
||
|
||
if (!retryDate || now >= retryDate || (_this$media = this.media) !== null && _this$media !== void 0 && _this$media.seeking) {
|
||
this.log('retryDate reached, switch back to IDLE state');
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
|
||
}
|
||
}
|
||
break;
|
||
|
||
default:
|
||
break;
|
||
} // check buffer
|
||
// check/update current fragment
|
||
|
||
|
||
this.onTickEnd();
|
||
};
|
||
|
||
_proto.onTickEnd = function onTickEnd() {
|
||
_BaseStreamController.prototype.onTickEnd.call(this);
|
||
|
||
this.checkBuffer();
|
||
this.checkFragmentChanged();
|
||
};
|
||
|
||
_proto.doTickIdle = function doTickIdle() {
|
||
var _frag$decryptdata, _frag$decryptdata2;
|
||
|
||
var hls = this.hls,
|
||
levelLastLoaded = this.levelLastLoaded,
|
||
levels = this.levels,
|
||
media = this.media;
|
||
var config = hls.config,
|
||
level = hls.nextLoadLevel; // if start level not parsed yet OR
|
||
// if video not attached AND start fragment already requested OR start frag prefetch not enabled
|
||
// exit loop, as we either need more info (level not parsed) or we need media to be attached to load new fragment
|
||
|
||
if (levelLastLoaded === null || !media && (this.startFragRequested || !config.startFragPrefetch)) {
|
||
return;
|
||
} // If the "main" level is audio-only but we are loading an alternate track in the same group, do not load anything
|
||
|
||
|
||
if (this.altAudio && this.audioOnly) {
|
||
return;
|
||
}
|
||
|
||
if (!levels || !levels[level]) {
|
||
return;
|
||
}
|
||
|
||
var levelInfo = levels[level]; // if buffer length is less than maxBufLen try to load a new fragment
|
||
// set next load level : this will trigger a playlist load if needed
|
||
|
||
this.level = hls.nextLoadLevel = level;
|
||
var levelDetails = levelInfo.details; // if level info not retrieved yet, switch state and wait for level retrieval
|
||
// if live playlist, ensure that new playlist has been refreshed to avoid loading/try to load
|
||
// a useless and outdated fragment (that might even introduce load error if it is already out of the live playlist)
|
||
|
||
if (!levelDetails || this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_LEVEL || levelDetails.live && this.levelLastLoaded !== level) {
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_LEVEL;
|
||
return;
|
||
}
|
||
|
||
var bufferInfo = this.getFwdBufferInfo(this.mediaBuffer ? this.mediaBuffer : media, _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].MAIN);
|
||
|
||
if (bufferInfo === null) {
|
||
return;
|
||
}
|
||
|
||
var bufferLen = bufferInfo.len; // compute max Buffer Length that we could get from this load level, based on level bitrate. don't buffer more than 60 MB and more than 30s
|
||
|
||
var maxBufLen = this.getMaxBufferLength(levelInfo.maxBitrate); // Stay idle if we are still with buffer margins
|
||
|
||
if (bufferLen >= maxBufLen) {
|
||
return;
|
||
}
|
||
|
||
if (this._streamEnded(bufferInfo, levelDetails)) {
|
||
var data = {};
|
||
|
||
if (this.altAudio) {
|
||
data.type = 'video';
|
||
}
|
||
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].BUFFER_EOS, data);
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].ENDED;
|
||
return;
|
||
}
|
||
|
||
var targetBufferTime = bufferInfo.end;
|
||
var frag = this.getNextFragment(targetBufferTime, levelDetails); // Avoid backtracking after seeking or switching by loading an earlier segment in streams that could backtrack
|
||
|
||
if (this.couldBacktrack && !this.fragPrevious && frag && frag.sn !== 'initSegment') {
|
||
var fragIdx = frag.sn - levelDetails.startSN;
|
||
|
||
if (fragIdx > 1) {
|
||
frag = levelDetails.fragments[fragIdx - 1];
|
||
this.fragmentTracker.removeFragment(frag);
|
||
}
|
||
} // Avoid loop loading by using nextLoadPosition set for backtracking
|
||
|
||
|
||
if (frag && this.fragmentTracker.getState(frag) === _fragment_tracker__WEBPACK_IMPORTED_MODULE_5__["FragmentState"].OK && this.nextLoadPosition > targetBufferTime) {
|
||
// Cleanup the fragment tracker before trying to find the next unbuffered fragment
|
||
var type = this.audioOnly && !this.altAudio ? _loader_fragment__WEBPACK_IMPORTED_MODULE_7__["ElementaryStreamTypes"].AUDIO : _loader_fragment__WEBPACK_IMPORTED_MODULE_7__["ElementaryStreamTypes"].VIDEO;
|
||
this.afterBufferFlushed(media, type, _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].MAIN);
|
||
frag = this.getNextFragment(this.nextLoadPosition, levelDetails);
|
||
}
|
||
|
||
if (!frag) {
|
||
return;
|
||
}
|
||
|
||
if (frag.initSegment && !frag.initSegment.data && !this.bitrateTest) {
|
||
frag = frag.initSegment;
|
||
} // We want to load the key if we're dealing with an identity key, because we will decrypt
|
||
// this content using the key we fetch. Other keys will be handled by the DRM CDM via EME.
|
||
|
||
|
||
if (((_frag$decryptdata = frag.decryptdata) === null || _frag$decryptdata === void 0 ? void 0 : _frag$decryptdata.keyFormat) === 'identity' && !((_frag$decryptdata2 = frag.decryptdata) !== null && _frag$decryptdata2 !== void 0 && _frag$decryptdata2.key)) {
|
||
this.loadKey(frag, levelDetails);
|
||
} else {
|
||
this.loadFragment(frag, levelDetails, targetBufferTime);
|
||
}
|
||
};
|
||
|
||
_proto.loadFragment = function loadFragment(frag, levelDetails, targetBufferTime) {
|
||
var _this$media2;
|
||
|
||
// Check if fragment is not loaded
|
||
var fragState = this.fragmentTracker.getState(frag);
|
||
this.fragCurrent = frag; // Use data from loaded backtracked fragment if available
|
||
|
||
if (fragState === _fragment_tracker__WEBPACK_IMPORTED_MODULE_5__["FragmentState"].BACKTRACKED) {
|
||
var data = this.fragmentTracker.getBacktrackData(frag);
|
||
|
||
if (data) {
|
||
this._handleFragmentLoadProgress(data);
|
||
|
||
this._handleFragmentLoadComplete(data);
|
||
|
||
return;
|
||
} else {
|
||
fragState = _fragment_tracker__WEBPACK_IMPORTED_MODULE_5__["FragmentState"].NOT_LOADED;
|
||
}
|
||
}
|
||
|
||
if (fragState === _fragment_tracker__WEBPACK_IMPORTED_MODULE_5__["FragmentState"].NOT_LOADED || fragState === _fragment_tracker__WEBPACK_IMPORTED_MODULE_5__["FragmentState"].PARTIAL) {
|
||
if (frag.sn === 'initSegment') {
|
||
this._loadInitSegment(frag);
|
||
} else if (this.bitrateTest) {
|
||
frag.bitrateTest = true;
|
||
this.log("Fragment " + frag.sn + " of level " + frag.level + " is being downloaded to test bitrate and will not be buffered");
|
||
|
||
this._loadBitrateTestFrag(frag);
|
||
} else {
|
||
this.startFragRequested = true;
|
||
|
||
_BaseStreamController.prototype.loadFragment.call(this, frag, levelDetails, targetBufferTime);
|
||
}
|
||
} else if (fragState === _fragment_tracker__WEBPACK_IMPORTED_MODULE_5__["FragmentState"].APPENDING) {
|
||
// Lower the buffer size and try again
|
||
if (this.reduceMaxBufferLength(frag.duration)) {
|
||
this.fragmentTracker.removeFragment(frag);
|
||
}
|
||
} else if (((_this$media2 = this.media) === null || _this$media2 === void 0 ? void 0 : _this$media2.buffered.length) === 0) {
|
||
// Stop gap for bad tracker / buffer flush behavior
|
||
this.fragmentTracker.removeAllFragments();
|
||
}
|
||
};
|
||
|
||
_proto.getAppendedFrag = function getAppendedFrag(position) {
|
||
var fragOrPart = this.fragmentTracker.getAppendedFrag(position, _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].MAIN);
|
||
|
||
if (fragOrPart && 'fragment' in fragOrPart) {
|
||
return fragOrPart.fragment;
|
||
}
|
||
|
||
return fragOrPart;
|
||
};
|
||
|
||
_proto.getBufferedFrag = function getBufferedFrag(position) {
|
||
return this.fragmentTracker.getBufferedFrag(position, _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].MAIN);
|
||
};
|
||
|
||
_proto.followingBufferedFrag = function followingBufferedFrag(frag) {
|
||
if (frag) {
|
||
// try to get range of next fragment (500ms after this range)
|
||
return this.getBufferedFrag(frag.end + 0.5);
|
||
}
|
||
|
||
return null;
|
||
}
|
||
/*
|
||
on immediate level switch :
|
||
- pause playback if playing
|
||
- cancel any pending load request
|
||
- and trigger a buffer flush
|
||
*/
|
||
;
|
||
|
||
_proto.immediateLevelSwitch = function immediateLevelSwitch() {
|
||
this.abortCurrentFrag();
|
||
this.flushMainBuffer(0, Number.POSITIVE_INFINITY);
|
||
}
|
||
/**
|
||
* try to switch ASAP without breaking video playback:
|
||
* in order to ensure smooth but quick level switching,
|
||
* we need to find the next flushable buffer range
|
||
* we should take into account new segment fetch time
|
||
*/
|
||
;
|
||
|
||
_proto.nextLevelSwitch = function nextLevelSwitch() {
|
||
var levels = this.levels,
|
||
media = this.media; // ensure that media is defined and that metadata are available (to retrieve currentTime)
|
||
|
||
if (media !== null && media !== void 0 && media.readyState) {
|
||
var fetchdelay;
|
||
var fragPlayingCurrent = this.getAppendedFrag(media.currentTime);
|
||
|
||
if (fragPlayingCurrent && fragPlayingCurrent.start > 1) {
|
||
// flush buffer preceding current fragment (flush until current fragment start offset)
|
||
// minus 1s to avoid video freezing, that could happen if we flush keyframe of current video ...
|
||
this.flushMainBuffer(0, fragPlayingCurrent.start - 1);
|
||
}
|
||
|
||
if (!media.paused && levels) {
|
||
// add a safety delay of 1s
|
||
var nextLevelId = this.hls.nextLoadLevel;
|
||
var nextLevel = levels[nextLevelId];
|
||
var fragLastKbps = this.fragLastKbps;
|
||
|
||
if (fragLastKbps && this.fragCurrent) {
|
||
fetchdelay = this.fragCurrent.duration * nextLevel.maxBitrate / (1000 * fragLastKbps) + 1;
|
||
} else {
|
||
fetchdelay = 0;
|
||
}
|
||
} else {
|
||
fetchdelay = 0;
|
||
} // this.log('fetchdelay:'+fetchdelay);
|
||
// find buffer range that will be reached once new fragment will be fetched
|
||
|
||
|
||
var bufferedFrag = this.getBufferedFrag(media.currentTime + fetchdelay);
|
||
|
||
if (bufferedFrag) {
|
||
// we can flush buffer range following this one without stalling playback
|
||
var nextBufferedFrag = this.followingBufferedFrag(bufferedFrag);
|
||
|
||
if (nextBufferedFrag) {
|
||
// if we are here, we can also cancel any loading/demuxing in progress, as they are useless
|
||
this.abortCurrentFrag(); // start flush position is in next buffered frag. Leave some padding for non-independent segments and smoother playback.
|
||
|
||
var maxStart = nextBufferedFrag.maxStartPTS ? nextBufferedFrag.maxStartPTS : nextBufferedFrag.start;
|
||
var fragDuration = nextBufferedFrag.duration;
|
||
var startPts = Math.max(bufferedFrag.end, maxStart + Math.min(Math.max(fragDuration - this.config.maxFragLookUpTolerance, fragDuration * 0.5), fragDuration * 0.75));
|
||
this.flushMainBuffer(startPts, Number.POSITIVE_INFINITY);
|
||
}
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto.abortCurrentFrag = function abortCurrentFrag() {
|
||
var fragCurrent = this.fragCurrent;
|
||
this.fragCurrent = null;
|
||
|
||
if (fragCurrent !== null && fragCurrent !== void 0 && fragCurrent.loader) {
|
||
fragCurrent.loader.abort();
|
||
}
|
||
|
||
if (this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].KEY_LOADING) {
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
|
||
}
|
||
|
||
this.nextLoadPosition = this.getLoadPosition();
|
||
};
|
||
|
||
_proto.flushMainBuffer = function flushMainBuffer(startOffset, endOffset) {
|
||
_BaseStreamController.prototype.flushMainBuffer.call(this, startOffset, endOffset, this.altAudio ? 'video' : null);
|
||
};
|
||
|
||
_proto.onMediaAttached = function onMediaAttached(event, data) {
|
||
_BaseStreamController.prototype.onMediaAttached.call(this, event, data);
|
||
|
||
var media = data.media;
|
||
this.onvplaying = this.onMediaPlaying.bind(this);
|
||
this.onvseeked = this.onMediaSeeked.bind(this);
|
||
media.addEventListener('playing', this.onvplaying);
|
||
media.addEventListener('seeked', this.onvseeked);
|
||
this.gapController = new _gap_controller__WEBPACK_IMPORTED_MODULE_10__["default"](this.config, media, this.fragmentTracker, this.hls);
|
||
};
|
||
|
||
_proto.onMediaDetaching = function onMediaDetaching() {
|
||
var media = this.media;
|
||
|
||
if (media) {
|
||
media.removeEventListener('playing', this.onvplaying);
|
||
media.removeEventListener('seeked', this.onvseeked);
|
||
this.onvplaying = this.onvseeked = null;
|
||
this.videoBuffer = null;
|
||
}
|
||
|
||
this.fragPlaying = null;
|
||
|
||
if (this.gapController) {
|
||
this.gapController.destroy();
|
||
this.gapController = null;
|
||
}
|
||
|
||
_BaseStreamController.prototype.onMediaDetaching.call(this);
|
||
};
|
||
|
||
_proto.onMediaPlaying = function onMediaPlaying() {
|
||
// tick to speed up FRAG_CHANGED triggering
|
||
this.tick();
|
||
};
|
||
|
||
_proto.onMediaSeeked = function onMediaSeeked() {
|
||
var media = this.media;
|
||
var currentTime = media ? media.currentTime : null;
|
||
|
||
if (Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(currentTime)) {
|
||
this.log("Media seeked to " + currentTime.toFixed(3));
|
||
} // tick to speed up FRAG_CHANGED triggering
|
||
|
||
|
||
this.tick();
|
||
};
|
||
|
||
_proto.onManifestLoading = function onManifestLoading() {
|
||
// reset buffer on manifest loading
|
||
this.log('Trigger BUFFER_RESET');
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].BUFFER_RESET, undefined);
|
||
this.fragmentTracker.removeAllFragments();
|
||
this.couldBacktrack = this.stalled = false;
|
||
this.startPosition = this.lastCurrentTime = 0;
|
||
this.fragPlaying = null;
|
||
};
|
||
|
||
_proto.onManifestParsed = function onManifestParsed(event, data) {
|
||
var aac = false;
|
||
var heaac = false;
|
||
var codec;
|
||
data.levels.forEach(function (level) {
|
||
// detect if we have different kind of audio codecs used amongst playlists
|
||
codec = level.audioCodec;
|
||
|
||
if (codec) {
|
||
if (codec.indexOf('mp4a.40.2') !== -1) {
|
||
aac = true;
|
||
}
|
||
|
||
if (codec.indexOf('mp4a.40.5') !== -1) {
|
||
heaac = true;
|
||
}
|
||
}
|
||
});
|
||
this.audioCodecSwitch = aac && heaac && !Object(_is_supported__WEBPACK_IMPORTED_MODULE_2__["changeTypeSupported"])();
|
||
|
||
if (this.audioCodecSwitch) {
|
||
this.log('Both AAC/HE-AAC audio found in levels; declaring level codec as HE-AAC');
|
||
}
|
||
|
||
this.levels = data.levels;
|
||
this.startFragRequested = false;
|
||
};
|
||
|
||
_proto.onLevelLoading = function onLevelLoading(event, data) {
|
||
var levels = this.levels;
|
||
|
||
if (!levels || this.state !== _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE) {
|
||
return;
|
||
}
|
||
|
||
var level = levels[data.level];
|
||
|
||
if (!level.details || level.details.live && this.levelLastLoaded !== data.level || this.waitForCdnTuneIn(level.details)) {
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_LEVEL;
|
||
}
|
||
};
|
||
|
||
_proto.onLevelLoaded = function onLevelLoaded(event, data) {
|
||
var _curLevel$details;
|
||
|
||
var levels = this.levels;
|
||
var newLevelId = data.level;
|
||
var newDetails = data.details;
|
||
var duration = newDetails.totalduration;
|
||
|
||
if (!levels) {
|
||
this.warn("Levels were reset while loading level " + newLevelId);
|
||
return;
|
||
}
|
||
|
||
this.log("Level " + newLevelId + " loaded [" + newDetails.startSN + "," + newDetails.endSN + "], cc [" + newDetails.startCC + ", " + newDetails.endCC + "] duration:" + duration);
|
||
var fragCurrent = this.fragCurrent;
|
||
|
||
if (fragCurrent && (this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].FRAG_LOADING || this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].FRAG_LOADING_WAITING_RETRY)) {
|
||
if (fragCurrent.level !== data.level && fragCurrent.loader) {
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
|
||
fragCurrent.loader.abort();
|
||
}
|
||
}
|
||
|
||
var curLevel = levels[newLevelId];
|
||
var sliding = 0;
|
||
|
||
if (newDetails.live || (_curLevel$details = curLevel.details) !== null && _curLevel$details !== void 0 && _curLevel$details.live) {
|
||
if (!newDetails.fragments[0]) {
|
||
newDetails.deltaUpdateFailed = true;
|
||
}
|
||
|
||
if (newDetails.deltaUpdateFailed) {
|
||
return;
|
||
}
|
||
|
||
sliding = this.alignPlaylists(newDetails, curLevel.details);
|
||
} // override level info
|
||
|
||
|
||
curLevel.details = newDetails;
|
||
this.levelLastLoaded = newLevelId;
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].LEVEL_UPDATED, {
|
||
details: newDetails,
|
||
level: newLevelId
|
||
}); // only switch back to IDLE state if we were waiting for level to start downloading a new fragment
|
||
|
||
if (this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_LEVEL) {
|
||
if (this.waitForCdnTuneIn(newDetails)) {
|
||
// Wait for Low-Latency CDN Tune-in
|
||
return;
|
||
}
|
||
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
|
||
}
|
||
|
||
if (!this.startFragRequested) {
|
||
this.setStartPosition(newDetails, sliding);
|
||
} else if (newDetails.live) {
|
||
this.synchronizeToLiveEdge(newDetails);
|
||
} // trigger handler right now
|
||
|
||
|
||
this.tick();
|
||
};
|
||
|
||
_proto._handleFragmentLoadProgress = function _handleFragmentLoadProgress(data) {
|
||
var _frag$initSegment;
|
||
|
||
var frag = data.frag,
|
||
part = data.part,
|
||
payload = data.payload;
|
||
var levels = this.levels;
|
||
|
||
if (!levels) {
|
||
this.warn("Levels were reset while fragment load was in progress. Fragment " + frag.sn + " of level " + frag.level + " will not be buffered");
|
||
return;
|
||
}
|
||
|
||
var currentLevel = levels[frag.level];
|
||
var details = currentLevel.details;
|
||
|
||
if (!details) {
|
||
this.warn("Dropping fragment " + frag.sn + " of level " + frag.level + " after level details were reset");
|
||
return;
|
||
}
|
||
|
||
var videoCodec = currentLevel.videoCodec; // time Offset is accurate if level PTS is known, or if playlist is not sliding (not live)
|
||
|
||
var accurateTimeOffset = details.PTSKnown || !details.live;
|
||
var initSegmentData = (_frag$initSegment = frag.initSegment) === null || _frag$initSegment === void 0 ? void 0 : _frag$initSegment.data;
|
||
|
||
var audioCodec = this._getAudioCodec(currentLevel); // transmux the MPEG-TS data to ISO-BMFF segments
|
||
// this.log(`Transmuxing ${frag.sn} of [${details.startSN} ,${details.endSN}],level ${frag.level}, cc ${frag.cc}`);
|
||
|
||
|
||
var transmuxer = this.transmuxer = this.transmuxer || new _demux_transmuxer_interface__WEBPACK_IMPORTED_MODULE_8__["default"](this.hls, _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].MAIN, this._handleTransmuxComplete.bind(this), this._handleTransmuxerFlush.bind(this));
|
||
var partIndex = part ? part.index : -1;
|
||
var partial = partIndex !== -1;
|
||
var chunkMeta = new _types_transmuxer__WEBPACK_IMPORTED_MODULE_9__["ChunkMetadata"](frag.level, frag.sn, frag.stats.chunkCount, payload.byteLength, partIndex, partial);
|
||
var initPTS = this.initPTS[frag.cc];
|
||
transmuxer.push(payload, initSegmentData, audioCodec, videoCodec, frag, part, details.totalduration, accurateTimeOffset, chunkMeta, initPTS);
|
||
};
|
||
|
||
_proto.onAudioTrackSwitching = function onAudioTrackSwitching(event, data) {
|
||
// if any URL found on new audio track, it is an alternate audio track
|
||
var fromAltAudio = this.altAudio;
|
||
var altAudio = !!data.url;
|
||
var trackId = data.id; // if we switch on main audio, ensure that main fragment scheduling is synced with media.buffered
|
||
// don't do anything if we switch to alt audio: audio stream controller is handling it.
|
||
// we will just have to change buffer scheduling on audioTrackSwitched
|
||
|
||
if (!altAudio) {
|
||
if (this.mediaBuffer !== this.media) {
|
||
this.log('Switching on main audio, use media.buffered to schedule main fragment loading');
|
||
this.mediaBuffer = this.media;
|
||
var fragCurrent = this.fragCurrent; // we need to refill audio buffer from main: cancel any frag loading to speed up audio switch
|
||
|
||
if (fragCurrent !== null && fragCurrent !== void 0 && fragCurrent.loader) {
|
||
this.log('Switching to main audio track, cancel main fragment load');
|
||
fragCurrent.loader.abort();
|
||
} // destroy transmuxer to force init segment generation (following audio switch)
|
||
|
||
|
||
this.resetTransmuxer(); // switch to IDLE state to load new fragment
|
||
|
||
this.resetLoadingState();
|
||
} else if (this.audioOnly) {
|
||
// Reset audio transmuxer so when switching back to main audio we're not still appending where we left off
|
||
this.resetTransmuxer();
|
||
}
|
||
|
||
var hls = this.hls; // If switching from alt to main audio, flush all audio and trigger track switched
|
||
|
||
if (fromAltAudio) {
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].BUFFER_FLUSHING, {
|
||
startOffset: 0,
|
||
endOffset: Number.POSITIVE_INFINITY,
|
||
type: 'audio'
|
||
});
|
||
}
|
||
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].AUDIO_TRACK_SWITCHED, {
|
||
id: trackId
|
||
});
|
||
}
|
||
};
|
||
|
||
_proto.onAudioTrackSwitched = function onAudioTrackSwitched(event, data) {
|
||
var trackId = data.id;
|
||
var altAudio = !!this.hls.audioTracks[trackId].url;
|
||
|
||
if (altAudio) {
|
||
var videoBuffer = this.videoBuffer; // if we switched on alternate audio, ensure that main fragment scheduling is synced with video sourcebuffer buffered
|
||
|
||
if (videoBuffer && this.mediaBuffer !== videoBuffer) {
|
||
this.log('Switching on alternate audio, use video.buffered to schedule main fragment loading');
|
||
this.mediaBuffer = videoBuffer;
|
||
}
|
||
}
|
||
|
||
this.altAudio = altAudio;
|
||
this.tick();
|
||
};
|
||
|
||
_proto.onBufferCreated = function onBufferCreated(event, data) {
|
||
var tracks = data.tracks;
|
||
var mediaTrack;
|
||
var name;
|
||
var alternate = false;
|
||
|
||
for (var type in tracks) {
|
||
var track = tracks[type];
|
||
|
||
if (track.id === 'main') {
|
||
name = type;
|
||
mediaTrack = track; // keep video source buffer reference
|
||
|
||
if (type === 'video') {
|
||
var videoTrack = tracks[type];
|
||
|
||
if (videoTrack) {
|
||
this.videoBuffer = videoTrack.buffer;
|
||
}
|
||
}
|
||
} else {
|
||
alternate = true;
|
||
}
|
||
}
|
||
|
||
if (alternate && mediaTrack) {
|
||
this.log("Alternate track found, use " + name + ".buffered to schedule main fragment loading");
|
||
this.mediaBuffer = mediaTrack.buffer;
|
||
} else {
|
||
this.mediaBuffer = this.media;
|
||
}
|
||
};
|
||
|
||
_proto.onFragBuffered = function onFragBuffered(event, data) {
|
||
var frag = data.frag,
|
||
part = data.part;
|
||
|
||
if (frag && frag.type !== _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].MAIN) {
|
||
return;
|
||
}
|
||
|
||
if (this.fragContextChanged(frag)) {
|
||
// If a level switch was requested while a fragment was buffering, it will emit the FRAG_BUFFERED event upon completion
|
||
// Avoid setting state back to IDLE, since that will interfere with a level switch
|
||
this.warn("Fragment " + frag.sn + (part ? ' p: ' + part.index : '') + " of level " + frag.level + " finished buffering, but was aborted. state: " + this.state);
|
||
|
||
if (this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].PARSED) {
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
|
||
}
|
||
|
||
return;
|
||
}
|
||
|
||
var stats = part ? part.stats : frag.stats;
|
||
this.fragLastKbps = Math.round(8 * stats.total / (stats.buffering.end - stats.loading.first));
|
||
|
||
if (frag.sn !== 'initSegment') {
|
||
this.fragPrevious = frag;
|
||
}
|
||
|
||
this.fragBufferedComplete(frag, part);
|
||
};
|
||
|
||
_proto.onError = function onError(event, data) {
|
||
switch (data.details) {
|
||
case _errors__WEBPACK_IMPORTED_MODULE_11__["ErrorDetails"].FRAG_LOAD_ERROR:
|
||
case _errors__WEBPACK_IMPORTED_MODULE_11__["ErrorDetails"].FRAG_LOAD_TIMEOUT:
|
||
case _errors__WEBPACK_IMPORTED_MODULE_11__["ErrorDetails"].KEY_LOAD_ERROR:
|
||
case _errors__WEBPACK_IMPORTED_MODULE_11__["ErrorDetails"].KEY_LOAD_TIMEOUT:
|
||
this.onFragmentOrKeyLoadError(_types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].MAIN, data);
|
||
break;
|
||
|
||
case _errors__WEBPACK_IMPORTED_MODULE_11__["ErrorDetails"].LEVEL_LOAD_ERROR:
|
||
case _errors__WEBPACK_IMPORTED_MODULE_11__["ErrorDetails"].LEVEL_LOAD_TIMEOUT:
|
||
if (this.state !== _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].ERROR) {
|
||
if (data.fatal) {
|
||
// if fatal error, stop processing
|
||
this.warn("" + data.details);
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].ERROR;
|
||
} else {
|
||
// in case of non fatal error while loading level, if level controller is not retrying to load level , switch back to IDLE
|
||
if (!data.levelRetry && this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].WAITING_LEVEL) {
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
|
||
}
|
||
}
|
||
}
|
||
|
||
break;
|
||
|
||
case _errors__WEBPACK_IMPORTED_MODULE_11__["ErrorDetails"].BUFFER_FULL_ERROR:
|
||
// if in appending state
|
||
if (data.parent === 'main' && (this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].PARSING || this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].PARSED)) {
|
||
var flushBuffer = true;
|
||
var bufferedInfo = this.getFwdBufferInfo(this.media, _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].MAIN); // 0.5 : tolerance needed as some browsers stalls playback before reaching buffered end
|
||
// reduce max buf len if current position is buffered
|
||
|
||
if (bufferedInfo && bufferedInfo.len > 0.5) {
|
||
flushBuffer = !this.reduceMaxBufferLength(bufferedInfo.len);
|
||
}
|
||
|
||
if (flushBuffer) {
|
||
// current position is not buffered, but browser is still complaining about buffer full error
|
||
// this happens on IE/Edge, refer to https://github.com/video-dev/hls.js/pull/708
|
||
// in that case flush the whole buffer to recover
|
||
this.warn('buffer full error also media.currentTime is not buffered, flush main'); // flush main buffer
|
||
|
||
this.immediateLevelSwitch();
|
||
}
|
||
|
||
this.resetLoadingState();
|
||
}
|
||
|
||
break;
|
||
|
||
default:
|
||
break;
|
||
}
|
||
} // Checks the health of the buffer and attempts to resolve playback stalls.
|
||
;
|
||
|
||
_proto.checkBuffer = function checkBuffer() {
|
||
var media = this.media,
|
||
gapController = this.gapController;
|
||
|
||
if (!media || !gapController || !media.readyState) {
|
||
// Exit early if we don't have media or if the media hasn't buffered anything yet (readyState 0)
|
||
return;
|
||
} // Check combined buffer
|
||
|
||
|
||
var buffered = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_4__["BufferHelper"].getBuffered(media);
|
||
|
||
if (!this.loadedmetadata && buffered.length) {
|
||
this.loadedmetadata = true;
|
||
this.seekToStartPos();
|
||
} else {
|
||
// Resolve gaps using the main buffer, whose ranges are the intersections of the A/V sourcebuffers
|
||
gapController.poll(this.lastCurrentTime);
|
||
}
|
||
|
||
this.lastCurrentTime = media.currentTime;
|
||
};
|
||
|
||
_proto.onFragLoadEmergencyAborted = function onFragLoadEmergencyAborted() {
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE; // if loadedmetadata is not set, it means that we are emergency switch down on first frag
|
||
// in that case, reset startFragRequested flag
|
||
|
||
if (!this.loadedmetadata) {
|
||
this.startFragRequested = false;
|
||
this.nextLoadPosition = this.startPosition;
|
||
}
|
||
|
||
this.tickImmediate();
|
||
};
|
||
|
||
_proto.onBufferFlushed = function onBufferFlushed(event, _ref) {
|
||
var type = _ref.type;
|
||
|
||
if (type !== _loader_fragment__WEBPACK_IMPORTED_MODULE_7__["ElementaryStreamTypes"].AUDIO || this.audioOnly && !this.altAudio) {
|
||
var media = (type === _loader_fragment__WEBPACK_IMPORTED_MODULE_7__["ElementaryStreamTypes"].VIDEO ? this.videoBuffer : this.mediaBuffer) || this.media;
|
||
this.afterBufferFlushed(media, type, _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].MAIN);
|
||
}
|
||
};
|
||
|
||
_proto.onLevelsUpdated = function onLevelsUpdated(event, data) {
|
||
this.levels = data.levels;
|
||
};
|
||
|
||
_proto.swapAudioCodec = function swapAudioCodec() {
|
||
this.audioCodecSwap = !this.audioCodecSwap;
|
||
}
|
||
/**
|
||
* Seeks to the set startPosition if not equal to the mediaElement's current time.
|
||
* @private
|
||
*/
|
||
;
|
||
|
||
_proto.seekToStartPos = function seekToStartPos() {
|
||
var media = this.media;
|
||
var currentTime = media.currentTime;
|
||
var startPosition = this.startPosition; // only adjust currentTime if different from startPosition or if startPosition not buffered
|
||
// at that stage, there should be only one buffered range, as we reach that code after first fragment has been buffered
|
||
|
||
if (startPosition >= 0 && currentTime < startPosition) {
|
||
if (media.seeking) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_12__["logger"].log("could not seek to " + startPosition + ", already seeking at " + currentTime);
|
||
return;
|
||
}
|
||
|
||
var buffered = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_4__["BufferHelper"].getBuffered(media);
|
||
var bufferStart = buffered.length ? buffered.start(0) : 0;
|
||
var delta = bufferStart - startPosition;
|
||
|
||
if (delta > 0 && (delta < this.config.maxBufferHole || delta < this.config.maxFragLookUpTolerance)) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_12__["logger"].log("adjusting start position by " + delta + " to match buffer start");
|
||
startPosition += delta;
|
||
this.startPosition = startPosition;
|
||
}
|
||
|
||
this.log("seek to target start position " + startPosition + " from current time " + currentTime);
|
||
media.currentTime = startPosition;
|
||
}
|
||
};
|
||
|
||
_proto._getAudioCodec = function _getAudioCodec(currentLevel) {
|
||
var audioCodec = this.config.defaultAudioCodec || currentLevel.audioCodec;
|
||
|
||
if (this.audioCodecSwap && audioCodec) {
|
||
this.log('Swapping audio codec');
|
||
|
||
if (audioCodec.indexOf('mp4a.40.5') !== -1) {
|
||
audioCodec = 'mp4a.40.2';
|
||
} else {
|
||
audioCodec = 'mp4a.40.5';
|
||
}
|
||
}
|
||
|
||
return audioCodec;
|
||
};
|
||
|
||
_proto._loadBitrateTestFrag = function _loadBitrateTestFrag(frag) {
|
||
var _this2 = this;
|
||
|
||
this._doFragLoad(frag).then(function (data) {
|
||
var hls = _this2.hls;
|
||
|
||
if (!data || hls.nextLoadLevel || _this2.fragContextChanged(frag)) {
|
||
return;
|
||
}
|
||
|
||
_this2.fragLoadError = 0;
|
||
_this2.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].IDLE;
|
||
_this2.startFragRequested = false;
|
||
_this2.bitrateTest = false;
|
||
var stats = frag.stats; // Bitrate tests fragments are neither parsed nor buffered
|
||
|
||
stats.parsing.start = stats.parsing.end = stats.buffering.start = stats.buffering.end = self.performance.now();
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].FRAG_LOADED, data);
|
||
});
|
||
};
|
||
|
||
_proto._handleTransmuxComplete = function _handleTransmuxComplete(transmuxResult) {
|
||
var _id3$samples;
|
||
|
||
var id = 'main';
|
||
var hls = this.hls;
|
||
var remuxResult = transmuxResult.remuxResult,
|
||
chunkMeta = transmuxResult.chunkMeta;
|
||
var context = this.getCurrentContext(chunkMeta);
|
||
|
||
if (!context) {
|
||
this.warn("The loading context changed while buffering fragment " + chunkMeta.sn + " of level " + chunkMeta.level + ". This chunk will not be buffered.");
|
||
this.resetLiveStartWhenNotLoaded(chunkMeta.level);
|
||
return;
|
||
}
|
||
|
||
var frag = context.frag,
|
||
part = context.part,
|
||
level = context.level;
|
||
var video = remuxResult.video,
|
||
text = remuxResult.text,
|
||
id3 = remuxResult.id3,
|
||
initSegment = remuxResult.initSegment; // The audio-stream-controller handles audio buffering if Hls.js is playing an alternate audio track
|
||
|
||
var audio = this.altAudio ? undefined : remuxResult.audio; // Check if the current fragment has been aborted. We check this by first seeing if we're still playing the current level.
|
||
// If we are, subsequently check if the currently loading fragment (fragCurrent) has changed.
|
||
|
||
if (this.fragContextChanged(frag)) {
|
||
return;
|
||
}
|
||
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].PARSING;
|
||
|
||
if (initSegment) {
|
||
if (initSegment.tracks) {
|
||
this._bufferInitSegment(level, initSegment.tracks, frag, chunkMeta);
|
||
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].FRAG_PARSING_INIT_SEGMENT, {
|
||
frag: frag,
|
||
id: id,
|
||
tracks: initSegment.tracks
|
||
});
|
||
} // This would be nice if Number.isFinite acted as a typeguard, but it doesn't. See: https://github.com/Microsoft/TypeScript/issues/10038
|
||
|
||
|
||
var initPTS = initSegment.initPTS;
|
||
var timescale = initSegment.timescale;
|
||
|
||
if (Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(initPTS)) {
|
||
this.initPTS[frag.cc] = initPTS;
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].INIT_PTS_FOUND, {
|
||
frag: frag,
|
||
id: id,
|
||
initPTS: initPTS,
|
||
timescale: timescale
|
||
});
|
||
}
|
||
} // Avoid buffering if backtracking this fragment
|
||
|
||
|
||
if (video && remuxResult.independent !== false) {
|
||
if (level.details) {
|
||
var startPTS = video.startPTS,
|
||
endPTS = video.endPTS,
|
||
startDTS = video.startDTS,
|
||
endDTS = video.endDTS;
|
||
|
||
if (part) {
|
||
part.elementaryStreams[video.type] = {
|
||
startPTS: startPTS,
|
||
endPTS: endPTS,
|
||
startDTS: startDTS,
|
||
endDTS: endDTS
|
||
};
|
||
} else {
|
||
if (video.firstKeyFrame && video.independent) {
|
||
this.couldBacktrack = true;
|
||
}
|
||
|
||
if (video.dropped && video.independent) {
|
||
// Backtrack if dropped frames create a gap after currentTime
|
||
var pos = this.getLoadPosition() + this.config.maxBufferHole;
|
||
|
||
if (pos < startPTS) {
|
||
this.backtrack(frag);
|
||
return;
|
||
} // Set video stream start to fragment start so that truncated samples do not distort the timeline, and mark it partial
|
||
|
||
|
||
frag.setElementaryStreamInfo(video.type, frag.start, endPTS, frag.start, endDTS, true);
|
||
}
|
||
}
|
||
|
||
frag.setElementaryStreamInfo(video.type, startPTS, endPTS, startDTS, endDTS);
|
||
this.bufferFragmentData(video, frag, part, chunkMeta);
|
||
}
|
||
} else if (remuxResult.independent === false) {
|
||
this.backtrack(frag);
|
||
return;
|
||
}
|
||
|
||
if (audio) {
|
||
var _startPTS = audio.startPTS,
|
||
_endPTS = audio.endPTS,
|
||
_startDTS = audio.startDTS,
|
||
_endDTS = audio.endDTS;
|
||
|
||
if (part) {
|
||
part.elementaryStreams[_loader_fragment__WEBPACK_IMPORTED_MODULE_7__["ElementaryStreamTypes"].AUDIO] = {
|
||
startPTS: _startPTS,
|
||
endPTS: _endPTS,
|
||
startDTS: _startDTS,
|
||
endDTS: _endDTS
|
||
};
|
||
}
|
||
|
||
frag.setElementaryStreamInfo(_loader_fragment__WEBPACK_IMPORTED_MODULE_7__["ElementaryStreamTypes"].AUDIO, _startPTS, _endPTS, _startDTS, _endDTS);
|
||
this.bufferFragmentData(audio, frag, part, chunkMeta);
|
||
}
|
||
|
||
if (id3 !== null && id3 !== void 0 && (_id3$samples = id3.samples) !== null && _id3$samples !== void 0 && _id3$samples.length) {
|
||
var emittedID3 = {
|
||
frag: frag,
|
||
id: id,
|
||
samples: id3.samples
|
||
};
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].FRAG_PARSING_METADATA, emittedID3);
|
||
}
|
||
|
||
if (text) {
|
||
var emittedText = {
|
||
frag: frag,
|
||
id: id,
|
||
samples: text.samples
|
||
};
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].FRAG_PARSING_USERDATA, emittedText);
|
||
}
|
||
};
|
||
|
||
_proto._bufferInitSegment = function _bufferInitSegment(currentLevel, tracks, frag, chunkMeta) {
|
||
var _this3 = this;
|
||
|
||
if (this.state !== _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].PARSING) {
|
||
return;
|
||
}
|
||
|
||
this.audioOnly = !!tracks.audio && !tracks.video; // if audio track is expected to come from audio stream controller, discard any coming from main
|
||
|
||
if (this.altAudio && !this.audioOnly) {
|
||
delete tracks.audio;
|
||
} // include levelCodec in audio and video tracks
|
||
|
||
|
||
var audio = tracks.audio,
|
||
video = tracks.video,
|
||
audiovideo = tracks.audiovideo;
|
||
|
||
if (audio) {
|
||
var audioCodec = currentLevel.audioCodec;
|
||
var ua = navigator.userAgent.toLowerCase();
|
||
|
||
if (this.audioCodecSwitch) {
|
||
if (audioCodec) {
|
||
if (audioCodec.indexOf('mp4a.40.5') !== -1) {
|
||
audioCodec = 'mp4a.40.2';
|
||
} else {
|
||
audioCodec = 'mp4a.40.5';
|
||
}
|
||
} // In the case that AAC and HE-AAC audio codecs are signalled in manifest,
|
||
// force HE-AAC, as it seems that most browsers prefers it.
|
||
// don't force HE-AAC if mono stream, or in Firefox
|
||
|
||
|
||
if (audio.metadata.channelCount !== 1 && ua.indexOf('firefox') === -1) {
|
||
audioCodec = 'mp4a.40.5';
|
||
}
|
||
} // HE-AAC is broken on Android, always signal audio codec as AAC even if variant manifest states otherwise
|
||
|
||
|
||
if (ua.indexOf('android') !== -1 && audio.container !== 'audio/mpeg') {
|
||
// Exclude mpeg audio
|
||
audioCodec = 'mp4a.40.2';
|
||
this.log("Android: force audio codec to " + audioCodec);
|
||
}
|
||
|
||
if (currentLevel.audioCodec && currentLevel.audioCodec !== audioCodec) {
|
||
this.log("Swapping manifest audio codec \"" + currentLevel.audioCodec + "\" for \"" + audioCodec + "\"");
|
||
}
|
||
|
||
audio.levelCodec = audioCodec;
|
||
audio.id = 'main';
|
||
this.log("Init audio buffer, container:" + audio.container + ", codecs[selected/level/parsed]=[" + (audioCodec || '') + "/" + (currentLevel.audioCodec || '') + "/" + audio.codec + "]");
|
||
}
|
||
|
||
if (video) {
|
||
video.levelCodec = currentLevel.videoCodec;
|
||
video.id = 'main';
|
||
this.log("Init video buffer, container:" + video.container + ", codecs[level/parsed]=[" + (currentLevel.videoCodec || '') + "/" + video.codec + "]");
|
||
}
|
||
|
||
if (audiovideo) {
|
||
this.log("Init audiovideo buffer, container:" + audiovideo.container + ", codecs[level/parsed]=[" + (currentLevel.attrs.CODECS || '') + "/" + audiovideo.codec + "]");
|
||
}
|
||
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].BUFFER_CODECS, tracks); // loop through tracks that are going to be provided to bufferController
|
||
|
||
Object.keys(tracks).forEach(function (trackName) {
|
||
var track = tracks[trackName];
|
||
var initSegment = track.initSegment;
|
||
|
||
if (initSegment !== null && initSegment !== void 0 && initSegment.byteLength) {
|
||
_this3.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].BUFFER_APPENDING, {
|
||
type: trackName,
|
||
data: initSegment,
|
||
frag: frag,
|
||
part: null,
|
||
chunkMeta: chunkMeta,
|
||
parent: frag.type
|
||
});
|
||
}
|
||
}); // trigger handler right now
|
||
|
||
this.tick();
|
||
};
|
||
|
||
_proto.backtrack = function backtrack(frag) {
|
||
this.couldBacktrack = true; // Causes findFragments to backtrack through fragments to find the keyframe
|
||
|
||
this.resetTransmuxer();
|
||
this.flushBufferGap(frag);
|
||
var data = this.fragmentTracker.backtrack(frag);
|
||
this.fragPrevious = null;
|
||
this.nextLoadPosition = frag.start;
|
||
|
||
if (data) {
|
||
this.resetFragmentLoading(frag);
|
||
} else {
|
||
// Change state to BACKTRACKING so that fragmentEntity.backtrack data can be added after _doFragLoad
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["State"].BACKTRACKING;
|
||
}
|
||
};
|
||
|
||
_proto.checkFragmentChanged = function checkFragmentChanged() {
|
||
var video = this.media;
|
||
var fragPlayingCurrent = null;
|
||
|
||
if (video && video.readyState > 1 && video.seeking === false) {
|
||
var currentTime = video.currentTime;
|
||
/* if video element is in seeked state, currentTime can only increase.
|
||
(assuming that playback rate is positive ...)
|
||
As sometimes currentTime jumps back to zero after a
|
||
media decode error, check this, to avoid seeking back to
|
||
wrong position after a media decode error
|
||
*/
|
||
|
||
if (_utils_buffer_helper__WEBPACK_IMPORTED_MODULE_4__["BufferHelper"].isBuffered(video, currentTime)) {
|
||
fragPlayingCurrent = this.getAppendedFrag(currentTime);
|
||
} else if (_utils_buffer_helper__WEBPACK_IMPORTED_MODULE_4__["BufferHelper"].isBuffered(video, currentTime + 0.1)) {
|
||
/* ensure that FRAG_CHANGED event is triggered at startup,
|
||
when first video frame is displayed and playback is paused.
|
||
add a tolerance of 100ms, in case current position is not buffered,
|
||
check if current pos+100ms is buffered and use that buffer range
|
||
for FRAG_CHANGED event reporting */
|
||
fragPlayingCurrent = this.getAppendedFrag(currentTime + 0.1);
|
||
}
|
||
|
||
if (fragPlayingCurrent) {
|
||
var fragPlaying = this.fragPlaying;
|
||
var fragCurrentLevel = fragPlayingCurrent.level;
|
||
|
||
if (!fragPlaying || fragPlayingCurrent.sn !== fragPlaying.sn || fragPlaying.level !== fragCurrentLevel || fragPlayingCurrent.urlId !== fragPlaying.urlId) {
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].FRAG_CHANGED, {
|
||
frag: fragPlayingCurrent
|
||
});
|
||
|
||
if (!fragPlaying || fragPlaying.level !== fragCurrentLevel) {
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].LEVEL_SWITCHED, {
|
||
level: fragCurrentLevel
|
||
});
|
||
}
|
||
|
||
this.fragPlaying = fragPlayingCurrent;
|
||
}
|
||
}
|
||
}
|
||
};
|
||
|
||
_createClass(StreamController, [{
|
||
key: "nextLevel",
|
||
get: function get() {
|
||
var frag = this.nextBufferedFrag;
|
||
|
||
if (frag) {
|
||
return frag.level;
|
||
} else {
|
||
return -1;
|
||
}
|
||
}
|
||
}, {
|
||
key: "currentLevel",
|
||
get: function get() {
|
||
var media = this.media;
|
||
|
||
if (media) {
|
||
var fragPlayingCurrent = this.getAppendedFrag(media.currentTime);
|
||
|
||
if (fragPlayingCurrent) {
|
||
return fragPlayingCurrent.level;
|
||
}
|
||
}
|
||
|
||
return -1;
|
||
}
|
||
}, {
|
||
key: "nextBufferedFrag",
|
||
get: function get() {
|
||
var media = this.media;
|
||
|
||
if (media) {
|
||
// first get end range of current fragment
|
||
var fragPlayingCurrent = this.getAppendedFrag(media.currentTime);
|
||
return this.followingBufferedFrag(fragPlayingCurrent);
|
||
} else {
|
||
return null;
|
||
}
|
||
}
|
||
}, {
|
||
key: "forceStartLoad",
|
||
get: function get() {
|
||
return this._forceStartLoad;
|
||
}
|
||
}]);
|
||
|
||
return StreamController;
|
||
}(_base_stream_controller__WEBPACK_IMPORTED_MODULE_1__["default"]);
|
||
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/controller/subtitle-stream-controller.ts":
|
||
/*!******************************************************!*\
|
||
!*** ./src/controller/subtitle-stream-controller.ts ***!
|
||
\******************************************************/
|
||
/*! exports provided: SubtitleStreamController */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "SubtitleStreamController", function() { return SubtitleStreamController; });
|
||
/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../events */ "./src/events.ts");
|
||
/* harmony import */ var _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../utils/buffer-helper */ "./src/utils/buffer-helper.ts");
|
||
/* harmony import */ var _fragment_finders__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./fragment-finders */ "./src/controller/fragment-finders.ts");
|
||
/* harmony import */ var _utils_discontinuities__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/discontinuities */ "./src/utils/discontinuities.ts");
|
||
/* harmony import */ var _level_helper__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./level-helper */ "./src/controller/level-helper.ts");
|
||
/* harmony import */ var _fragment_tracker__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./fragment-tracker */ "./src/controller/fragment-tracker.ts");
|
||
/* harmony import */ var _base_stream_controller__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./base-stream-controller */ "./src/controller/base-stream-controller.ts");
|
||
/* harmony import */ var _types_loader__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ../types/loader */ "./src/types/loader.ts");
|
||
/* harmony import */ var _types_level__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ../types/level */ "./src/types/level.ts");
|
||
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
|
||
|
||
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
|
||
|
||
function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
|
||
|
||
function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
var TICK_INTERVAL = 500; // how often to tick in ms
|
||
|
||
var SubtitleStreamController = /*#__PURE__*/function (_BaseStreamController) {
|
||
_inheritsLoose(SubtitleStreamController, _BaseStreamController);
|
||
|
||
function SubtitleStreamController(hls, fragmentTracker) {
|
||
var _this;
|
||
|
||
_this = _BaseStreamController.call(this, hls, fragmentTracker, '[subtitle-stream-controller]') || this;
|
||
_this.levels = [];
|
||
_this.currentTrackId = -1;
|
||
_this.tracksBuffered = [];
|
||
_this.mainDetails = null;
|
||
|
||
_this._registerListeners();
|
||
|
||
return _this;
|
||
}
|
||
|
||
var _proto = SubtitleStreamController.prototype;
|
||
|
||
_proto.onHandlerDestroying = function onHandlerDestroying() {
|
||
this._unregisterListeners();
|
||
|
||
this.mainDetails = null;
|
||
};
|
||
|
||
_proto._registerListeners = function _registerListeners() {
|
||
var hls = this.hls;
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].LEVEL_LOADED, this.onLevelLoaded, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, this.onError, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].SUBTITLE_TRACKS_UPDATED, this.onSubtitleTracksUpdated, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].SUBTITLE_TRACK_SWITCH, this.onSubtitleTrackSwitch, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].SUBTITLE_TRACK_LOADED, this.onSubtitleTrackLoaded, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].SUBTITLE_FRAG_PROCESSED, this.onSubtitleFragProcessed, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].BUFFER_FLUSHING, this.onBufferFlushing, this);
|
||
};
|
||
|
||
_proto._unregisterListeners = function _unregisterListeners() {
|
||
var hls = this.hls;
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].LEVEL_LOADED, this.onLevelLoaded, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, this.onError, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].SUBTITLE_TRACKS_UPDATED, this.onSubtitleTracksUpdated, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].SUBTITLE_TRACK_SWITCH, this.onSubtitleTrackSwitch, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].SUBTITLE_TRACK_LOADED, this.onSubtitleTrackLoaded, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].SUBTITLE_FRAG_PROCESSED, this.onSubtitleFragProcessed, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].BUFFER_FLUSHING, this.onBufferFlushing, this);
|
||
};
|
||
|
||
_proto.startLoad = function startLoad() {
|
||
this.stopLoad();
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_6__["State"].IDLE;
|
||
this.setInterval(TICK_INTERVAL);
|
||
this.tick();
|
||
};
|
||
|
||
_proto.onManifestLoading = function onManifestLoading() {
|
||
this.mainDetails = null;
|
||
this.fragmentTracker.removeAllFragments();
|
||
};
|
||
|
||
_proto.onLevelLoaded = function onLevelLoaded(event, data) {
|
||
this.mainDetails = data.details;
|
||
};
|
||
|
||
_proto.onSubtitleFragProcessed = function onSubtitleFragProcessed(event, data) {
|
||
var frag = data.frag,
|
||
success = data.success;
|
||
this.fragPrevious = frag;
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_6__["State"].IDLE;
|
||
|
||
if (!success) {
|
||
return;
|
||
}
|
||
|
||
var buffered = this.tracksBuffered[this.currentTrackId];
|
||
|
||
if (!buffered) {
|
||
return;
|
||
} // Create/update a buffered array matching the interface used by BufferHelper.bufferedInfo
|
||
// so we can re-use the logic used to detect how much has been buffered
|
||
|
||
|
||
var timeRange;
|
||
var fragStart = frag.start;
|
||
|
||
for (var i = 0; i < buffered.length; i++) {
|
||
if (fragStart >= buffered[i].start && fragStart <= buffered[i].end) {
|
||
timeRange = buffered[i];
|
||
break;
|
||
}
|
||
}
|
||
|
||
var fragEnd = frag.start + frag.duration;
|
||
|
||
if (timeRange) {
|
||
timeRange.end = fragEnd;
|
||
} else {
|
||
timeRange = {
|
||
start: fragStart,
|
||
end: fragEnd
|
||
};
|
||
buffered.push(timeRange);
|
||
}
|
||
|
||
this.fragmentTracker.fragBuffered(frag);
|
||
};
|
||
|
||
_proto.onBufferFlushing = function onBufferFlushing(event, data) {
|
||
var startOffset = data.startOffset,
|
||
endOffset = data.endOffset;
|
||
|
||
if (startOffset === 0 && endOffset !== Number.POSITIVE_INFINITY) {
|
||
var currentTrackId = this.currentTrackId,
|
||
levels = this.levels;
|
||
|
||
if (!levels.length || !levels[currentTrackId] || !levels[currentTrackId].details) {
|
||
return;
|
||
}
|
||
|
||
var trackDetails = levels[currentTrackId].details;
|
||
var targetDuration = trackDetails.targetduration;
|
||
var endOffsetSubtitles = endOffset - targetDuration;
|
||
|
||
if (endOffsetSubtitles <= 0) {
|
||
return;
|
||
}
|
||
|
||
data.endOffsetSubtitles = Math.max(0, endOffsetSubtitles);
|
||
this.tracksBuffered.forEach(function (buffered) {
|
||
for (var i = 0; i < buffered.length;) {
|
||
if (buffered[i].end <= endOffsetSubtitles) {
|
||
buffered.shift();
|
||
continue;
|
||
} else if (buffered[i].start < endOffsetSubtitles) {
|
||
buffered[i].start = endOffsetSubtitles;
|
||
} else {
|
||
break;
|
||
}
|
||
|
||
i++;
|
||
}
|
||
});
|
||
this.fragmentTracker.removeFragmentsInRange(startOffset, endOffsetSubtitles, _types_loader__WEBPACK_IMPORTED_MODULE_7__["PlaylistLevelType"].SUBTITLE);
|
||
}
|
||
} // If something goes wrong, proceed to next frag, if we were processing one.
|
||
;
|
||
|
||
_proto.onError = function onError(event, data) {
|
||
var _this$fragCurrent;
|
||
|
||
var frag = data.frag; // don't handle error not related to subtitle fragment
|
||
|
||
if (!frag || frag.type !== _types_loader__WEBPACK_IMPORTED_MODULE_7__["PlaylistLevelType"].SUBTITLE) {
|
||
return;
|
||
}
|
||
|
||
if ((_this$fragCurrent = this.fragCurrent) !== null && _this$fragCurrent !== void 0 && _this$fragCurrent.loader) {
|
||
this.fragCurrent.loader.abort();
|
||
}
|
||
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_6__["State"].IDLE;
|
||
} // Got all new subtitle levels.
|
||
;
|
||
|
||
_proto.onSubtitleTracksUpdated = function onSubtitleTracksUpdated(event, _ref) {
|
||
var _this2 = this;
|
||
|
||
var subtitleTracks = _ref.subtitleTracks;
|
||
this.tracksBuffered = [];
|
||
this.levels = subtitleTracks.map(function (mediaPlaylist) {
|
||
return new _types_level__WEBPACK_IMPORTED_MODULE_8__["Level"](mediaPlaylist);
|
||
});
|
||
this.fragmentTracker.removeAllFragments();
|
||
this.fragPrevious = null;
|
||
this.levels.forEach(function (level) {
|
||
_this2.tracksBuffered[level.id] = [];
|
||
});
|
||
this.mediaBuffer = null;
|
||
};
|
||
|
||
_proto.onSubtitleTrackSwitch = function onSubtitleTrackSwitch(event, data) {
|
||
this.currentTrackId = data.id;
|
||
|
||
if (!this.levels.length || this.currentTrackId === -1) {
|
||
this.clearInterval();
|
||
return;
|
||
} // Check if track has the necessary details to load fragments
|
||
|
||
|
||
var currentTrack = this.levels[this.currentTrackId];
|
||
|
||
if (currentTrack !== null && currentTrack !== void 0 && currentTrack.details) {
|
||
this.mediaBuffer = this.mediaBufferTimeRanges;
|
||
} else {
|
||
this.mediaBuffer = null;
|
||
}
|
||
|
||
if (currentTrack) {
|
||
this.setInterval(TICK_INTERVAL);
|
||
}
|
||
} // Got a new set of subtitle fragments.
|
||
;
|
||
|
||
_proto.onSubtitleTrackLoaded = function onSubtitleTrackLoaded(event, data) {
|
||
var _track$details;
|
||
|
||
var newDetails = data.details,
|
||
trackId = data.id;
|
||
var currentTrackId = this.currentTrackId,
|
||
levels = this.levels;
|
||
|
||
if (!levels.length) {
|
||
return;
|
||
}
|
||
|
||
var track = levels[currentTrackId];
|
||
|
||
if (trackId >= levels.length || trackId !== currentTrackId || !track) {
|
||
return;
|
||
}
|
||
|
||
this.mediaBuffer = this.mediaBufferTimeRanges;
|
||
|
||
if (newDetails.live || (_track$details = track.details) !== null && _track$details !== void 0 && _track$details.live) {
|
||
var mainDetails = this.mainDetails;
|
||
|
||
if (newDetails.deltaUpdateFailed || !mainDetails) {
|
||
return;
|
||
}
|
||
|
||
var mainSlidingStartFragment = mainDetails.fragments[0];
|
||
|
||
if (!track.details) {
|
||
if (newDetails.hasProgramDateTime && mainDetails.hasProgramDateTime) {
|
||
Object(_utils_discontinuities__WEBPACK_IMPORTED_MODULE_3__["alignMediaPlaylistByPDT"])(newDetails, mainDetails);
|
||
} else if (mainSlidingStartFragment) {
|
||
// line up live playlist with main so that fragments in range are loaded
|
||
Object(_level_helper__WEBPACK_IMPORTED_MODULE_4__["addSliding"])(newDetails, mainSlidingStartFragment.start);
|
||
}
|
||
} else {
|
||
var sliding = this.alignPlaylists(newDetails, track.details);
|
||
|
||
if (sliding === 0 && mainSlidingStartFragment) {
|
||
// realign with main when there is no overlap with last refresh
|
||
Object(_level_helper__WEBPACK_IMPORTED_MODULE_4__["addSliding"])(newDetails, mainSlidingStartFragment.start);
|
||
}
|
||
}
|
||
}
|
||
|
||
track.details = newDetails;
|
||
this.levelLastLoaded = trackId; // trigger handler right now
|
||
|
||
this.tick(); // If playlist is misaligned because of bad PDT or drift, delete details to resync with main on reload
|
||
|
||
if (newDetails.live && !this.fragCurrent && this.media && this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_6__["State"].IDLE) {
|
||
var foundFrag = Object(_fragment_finders__WEBPACK_IMPORTED_MODULE_2__["findFragmentByPTS"])(null, newDetails.fragments, this.media.currentTime, 0);
|
||
|
||
if (!foundFrag) {
|
||
this.warn('Subtitle playlist not aligned with playback');
|
||
track.details = undefined;
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto._handleFragmentLoadComplete = function _handleFragmentLoadComplete(fragLoadedData) {
|
||
var frag = fragLoadedData.frag,
|
||
payload = fragLoadedData.payload;
|
||
var decryptData = frag.decryptdata;
|
||
var hls = this.hls;
|
||
|
||
if (this.fragContextChanged(frag)) {
|
||
return;
|
||
} // check to see if the payload needs to be decrypted
|
||
|
||
|
||
if (payload && payload.byteLength > 0 && decryptData && decryptData.key && decryptData.iv && decryptData.method === 'AES-128') {
|
||
var startTime = performance.now(); // decrypt the subtitles
|
||
|
||
this.decrypter.webCryptoDecrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer).then(function (decryptedData) {
|
||
var endTime = performance.now();
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].FRAG_DECRYPTED, {
|
||
frag: frag,
|
||
payload: decryptedData,
|
||
stats: {
|
||
tstart: startTime,
|
||
tdecrypt: endTime
|
||
}
|
||
});
|
||
});
|
||
}
|
||
};
|
||
|
||
_proto.doTick = function doTick() {
|
||
if (!this.media) {
|
||
this.state = _base_stream_controller__WEBPACK_IMPORTED_MODULE_6__["State"].IDLE;
|
||
return;
|
||
}
|
||
|
||
if (this.state === _base_stream_controller__WEBPACK_IMPORTED_MODULE_6__["State"].IDLE) {
|
||
var _foundFrag;
|
||
|
||
var currentTrackId = this.currentTrackId,
|
||
levels = this.levels;
|
||
|
||
if (!levels.length || !levels[currentTrackId] || !levels[currentTrackId].details) {
|
||
return;
|
||
} // Expand range of subs loaded by one target-duration in either direction to make up for misaligned playlists
|
||
|
||
|
||
var trackDetails = levels[currentTrackId].details;
|
||
var targetDuration = trackDetails.targetduration;
|
||
var config = this.config,
|
||
media = this.media;
|
||
var bufferedInfo = _utils_buffer_helper__WEBPACK_IMPORTED_MODULE_1__["BufferHelper"].bufferedInfo(this.mediaBufferTimeRanges, media.currentTime - targetDuration, config.maxBufferHole);
|
||
var targetBufferTime = bufferedInfo.end,
|
||
bufferLen = bufferedInfo.len;
|
||
var maxBufLen = this.getMaxBufferLength() + targetDuration;
|
||
|
||
if (bufferLen > maxBufLen) {
|
||
return;
|
||
}
|
||
|
||
console.assert(trackDetails, 'Subtitle track details are defined on idle subtitle stream controller tick');
|
||
var fragments = trackDetails.fragments;
|
||
var fragLen = fragments.length;
|
||
var end = trackDetails.edge;
|
||
var foundFrag;
|
||
var fragPrevious = this.fragPrevious;
|
||
|
||
if (targetBufferTime < end) {
|
||
var maxFragLookUpTolerance = config.maxFragLookUpTolerance;
|
||
foundFrag = Object(_fragment_finders__WEBPACK_IMPORTED_MODULE_2__["findFragmentByPTS"])(fragPrevious, fragments, targetBufferTime, maxFragLookUpTolerance);
|
||
|
||
if (!foundFrag && fragPrevious && fragPrevious.start < fragments[0].start) {
|
||
foundFrag = fragments[0];
|
||
}
|
||
} else {
|
||
foundFrag = fragments[fragLen - 1];
|
||
}
|
||
|
||
if ((_foundFrag = foundFrag) !== null && _foundFrag !== void 0 && _foundFrag.encrypted) {
|
||
this.loadKey(foundFrag, trackDetails);
|
||
} else if (foundFrag && this.fragmentTracker.getState(foundFrag) === _fragment_tracker__WEBPACK_IMPORTED_MODULE_5__["FragmentState"].NOT_LOADED) {
|
||
// only load if fragment is not loaded
|
||
this.loadFragment(foundFrag, trackDetails, targetBufferTime);
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto.loadFragment = function loadFragment(frag, levelDetails, targetBufferTime) {
|
||
this.fragCurrent = frag;
|
||
|
||
_BaseStreamController.prototype.loadFragment.call(this, frag, levelDetails, targetBufferTime);
|
||
};
|
||
|
||
_createClass(SubtitleStreamController, [{
|
||
key: "mediaBufferTimeRanges",
|
||
get: function get() {
|
||
return this.tracksBuffered[this.currentTrackId] || [];
|
||
}
|
||
}]);
|
||
|
||
return SubtitleStreamController;
|
||
}(_base_stream_controller__WEBPACK_IMPORTED_MODULE_6__["default"]);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/controller/subtitle-track-controller.ts":
|
||
/*!*****************************************************!*\
|
||
!*** ./src/controller/subtitle-track-controller.ts ***!
|
||
\*****************************************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../events */ "./src/events.ts");
|
||
/* harmony import */ var _utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../utils/texttrack-utils */ "./src/utils/texttrack-utils.ts");
|
||
/* harmony import */ var _base_playlist_controller__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./base-playlist-controller */ "./src/controller/base-playlist-controller.ts");
|
||
/* harmony import */ var _types_loader__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../types/loader */ "./src/types/loader.ts");
|
||
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
|
||
|
||
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
|
||
|
||
function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
|
||
|
||
function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
|
||
|
||
|
||
|
||
|
||
|
||
|
||
var SubtitleTrackController = /*#__PURE__*/function (_BasePlaylistControll) {
|
||
_inheritsLoose(SubtitleTrackController, _BasePlaylistControll);
|
||
|
||
// Enable/disable subtitle display rendering
|
||
function SubtitleTrackController(hls) {
|
||
var _this;
|
||
|
||
_this = _BasePlaylistControll.call(this, hls, '[subtitle-track-controller]') || this;
|
||
_this.media = null;
|
||
_this.tracks = [];
|
||
_this.groupId = null;
|
||
_this.tracksInGroup = [];
|
||
_this.trackId = -1;
|
||
_this.selectDefaultTrack = true;
|
||
_this.queuedDefaultTrack = -1;
|
||
|
||
_this.trackChangeListener = function () {
|
||
return _this.onTextTracksChanged();
|
||
};
|
||
|
||
_this.asyncPollTrackChange = function () {
|
||
return _this.pollTrackChange(0);
|
||
};
|
||
|
||
_this.useTextTrackPolling = false;
|
||
_this.subtitlePollingInterval = -1;
|
||
_this.subtitleDisplay = true;
|
||
|
||
_this.registerListeners();
|
||
|
||
return _this;
|
||
}
|
||
|
||
var _proto = SubtitleTrackController.prototype;
|
||
|
||
_proto.destroy = function destroy() {
|
||
this.unregisterListeners();
|
||
this.tracks.length = 0;
|
||
this.tracksInGroup.length = 0;
|
||
this.trackChangeListener = this.asyncPollTrackChange = null;
|
||
|
||
_BasePlaylistControll.prototype.destroy.call(this);
|
||
};
|
||
|
||
_proto.registerListeners = function registerListeners() {
|
||
var hls = this.hls;
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MANIFEST_PARSED, this.onManifestParsed, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].LEVEL_LOADING, this.onLevelLoading, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].LEVEL_SWITCHING, this.onLevelSwitching, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].SUBTITLE_TRACK_LOADED, this.onSubtitleTrackLoaded, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, this.onError, this);
|
||
};
|
||
|
||
_proto.unregisterListeners = function unregisterListeners() {
|
||
var hls = this.hls;
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_ATTACHED, this.onMediaAttached, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].MANIFEST_PARSED, this.onManifestParsed, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].LEVEL_LOADING, this.onLevelLoading, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].LEVEL_SWITCHING, this.onLevelSwitching, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].SUBTITLE_TRACK_LOADED, this.onSubtitleTrackLoaded, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, this.onError, this);
|
||
} // Listen for subtitle track change, then extract the current track ID.
|
||
;
|
||
|
||
_proto.onMediaAttached = function onMediaAttached(event, data) {
|
||
this.media = data.media;
|
||
|
||
if (!this.media) {
|
||
return;
|
||
}
|
||
|
||
if (this.queuedDefaultTrack > -1) {
|
||
this.subtitleTrack = this.queuedDefaultTrack;
|
||
this.queuedDefaultTrack = -1;
|
||
}
|
||
|
||
this.useTextTrackPolling = !(this.media.textTracks && 'onchange' in this.media.textTracks);
|
||
|
||
if (this.useTextTrackPolling) {
|
||
this.pollTrackChange(500);
|
||
} else {
|
||
this.media.textTracks.addEventListener('change', this.asyncPollTrackChange);
|
||
}
|
||
};
|
||
|
||
_proto.pollTrackChange = function pollTrackChange(timeout) {
|
||
self.clearInterval(this.subtitlePollingInterval);
|
||
this.subtitlePollingInterval = self.setInterval(this.trackChangeListener, timeout);
|
||
};
|
||
|
||
_proto.onMediaDetaching = function onMediaDetaching() {
|
||
if (!this.media) {
|
||
return;
|
||
}
|
||
|
||
self.clearInterval(this.subtitlePollingInterval);
|
||
|
||
if (!this.useTextTrackPolling) {
|
||
this.media.textTracks.removeEventListener('change', this.asyncPollTrackChange);
|
||
}
|
||
|
||
if (this.trackId > -1) {
|
||
this.queuedDefaultTrack = this.trackId;
|
||
}
|
||
|
||
var textTracks = filterSubtitleTracks(this.media.textTracks); // Clear loaded cues on media detachment from tracks
|
||
|
||
textTracks.forEach(function (track) {
|
||
Object(_utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_1__["clearCurrentCues"])(track);
|
||
}); // Disable all subtitle tracks before detachment so when reattached only tracks in that content are enabled.
|
||
|
||
this.subtitleTrack = -1;
|
||
this.media = null;
|
||
};
|
||
|
||
_proto.onManifestLoading = function onManifestLoading() {
|
||
this.tracks = [];
|
||
this.groupId = null;
|
||
this.tracksInGroup = [];
|
||
this.trackId = -1;
|
||
this.selectDefaultTrack = true;
|
||
} // Fired whenever a new manifest is loaded.
|
||
;
|
||
|
||
_proto.onManifestParsed = function onManifestParsed(event, data) {
|
||
this.tracks = data.subtitleTracks;
|
||
};
|
||
|
||
_proto.onSubtitleTrackLoaded = function onSubtitleTrackLoaded(event, data) {
|
||
var id = data.id,
|
||
details = data.details;
|
||
var trackId = this.trackId;
|
||
var currentTrack = this.tracksInGroup[trackId];
|
||
|
||
if (!currentTrack) {
|
||
this.warn("Invalid subtitle track id " + id);
|
||
return;
|
||
}
|
||
|
||
var curDetails = currentTrack.details;
|
||
currentTrack.details = data.details;
|
||
this.log("subtitle track " + id + " loaded [" + details.startSN + "-" + details.endSN + "]");
|
||
|
||
if (id === this.trackId) {
|
||
this.retryCount = 0;
|
||
this.playlistLoaded(id, data, curDetails);
|
||
}
|
||
};
|
||
|
||
_proto.onLevelLoading = function onLevelLoading(event, data) {
|
||
this.switchLevel(data.level);
|
||
};
|
||
|
||
_proto.onLevelSwitching = function onLevelSwitching(event, data) {
|
||
this.switchLevel(data.level);
|
||
};
|
||
|
||
_proto.switchLevel = function switchLevel(levelIndex) {
|
||
var levelInfo = this.hls.levels[levelIndex];
|
||
|
||
if (!(levelInfo !== null && levelInfo !== void 0 && levelInfo.textGroupIds)) {
|
||
return;
|
||
}
|
||
|
||
var textGroupId = levelInfo.textGroupIds[levelInfo.urlId];
|
||
|
||
if (this.groupId !== textGroupId) {
|
||
var lastTrack = this.tracksInGroup ? this.tracksInGroup[this.trackId] : undefined;
|
||
var subtitleTracks = this.tracks.filter(function (track) {
|
||
return !textGroupId || track.groupId === textGroupId;
|
||
});
|
||
this.tracksInGroup = subtitleTracks;
|
||
var initialTrackId = this.findTrackId(lastTrack === null || lastTrack === void 0 ? void 0 : lastTrack.name) || this.findTrackId();
|
||
this.groupId = textGroupId;
|
||
var subtitleTracksUpdated = {
|
||
subtitleTracks: subtitleTracks
|
||
};
|
||
this.log("Updating subtitle tracks, " + subtitleTracks.length + " track(s) found in \"" + textGroupId + "\" group-id");
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].SUBTITLE_TRACKS_UPDATED, subtitleTracksUpdated);
|
||
|
||
if (initialTrackId !== -1) {
|
||
this.setSubtitleTrack(initialTrackId, lastTrack);
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto.findTrackId = function findTrackId(name) {
|
||
var textTracks = this.tracksInGroup;
|
||
|
||
for (var i = 0; i < textTracks.length; i++) {
|
||
var track = textTracks[i];
|
||
|
||
if (!this.selectDefaultTrack || track.default) {
|
||
if (!name || name === track.name) {
|
||
return track.id;
|
||
}
|
||
}
|
||
}
|
||
|
||
return -1;
|
||
};
|
||
|
||
_proto.onError = function onError(event, data) {
|
||
_BasePlaylistControll.prototype.onError.call(this, event, data);
|
||
|
||
if (data.fatal || !data.context) {
|
||
return;
|
||
}
|
||
|
||
if (data.context.type === _types_loader__WEBPACK_IMPORTED_MODULE_3__["PlaylistContextType"].SUBTITLE_TRACK && data.context.id === this.trackId && data.context.groupId === this.groupId) {
|
||
this.retryLoadingOrFail(data);
|
||
}
|
||
}
|
||
/** get alternate subtitle tracks list from playlist **/
|
||
;
|
||
|
||
_proto.loadPlaylist = function loadPlaylist(hlsUrlParameters) {
|
||
var currentTrack = this.tracksInGroup[this.trackId];
|
||
|
||
if (this.shouldLoadTrack(currentTrack)) {
|
||
var id = currentTrack.id;
|
||
var groupId = currentTrack.groupId;
|
||
var url = currentTrack.url;
|
||
|
||
if (hlsUrlParameters) {
|
||
try {
|
||
url = hlsUrlParameters.addDirectives(url);
|
||
} catch (error) {
|
||
this.warn("Could not construct new URL with HLS Delivery Directives: " + error);
|
||
}
|
||
}
|
||
|
||
this.log("Loading subtitle playlist for id " + id);
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].SUBTITLE_TRACK_LOADING, {
|
||
url: url,
|
||
id: id,
|
||
groupId: groupId,
|
||
deliveryDirectives: hlsUrlParameters || null
|
||
});
|
||
}
|
||
}
|
||
/**
|
||
* Disables the old subtitleTrack and sets current mode on the next subtitleTrack.
|
||
* This operates on the DOM textTracks.
|
||
* A value of -1 will disable all subtitle tracks.
|
||
*/
|
||
;
|
||
|
||
_proto.toggleTrackModes = function toggleTrackModes(newId) {
|
||
var _this2 = this;
|
||
|
||
var media = this.media,
|
||
subtitleDisplay = this.subtitleDisplay,
|
||
trackId = this.trackId;
|
||
|
||
if (!media) {
|
||
return;
|
||
}
|
||
|
||
var textTracks = filterSubtitleTracks(media.textTracks);
|
||
var groupTracks = textTracks.filter(function (track) {
|
||
return track.groupId === _this2.groupId;
|
||
});
|
||
|
||
if (newId === -1) {
|
||
[].slice.call(textTracks).forEach(function (track) {
|
||
track.mode = 'disabled';
|
||
});
|
||
} else {
|
||
var oldTrack = groupTracks[trackId];
|
||
|
||
if (oldTrack) {
|
||
oldTrack.mode = 'disabled';
|
||
}
|
||
}
|
||
|
||
var nextTrack = groupTracks[newId];
|
||
|
||
if (nextTrack) {
|
||
nextTrack.mode = subtitleDisplay ? 'showing' : 'hidden';
|
||
}
|
||
}
|
||
/**
|
||
* This method is responsible for validating the subtitle index and periodically reloading if live.
|
||
* Dispatches the SUBTITLE_TRACK_SWITCH event, which instructs the subtitle-stream-controller to load the selected track.
|
||
*/
|
||
;
|
||
|
||
_proto.setSubtitleTrack = function setSubtitleTrack(newId, lastTrack) {
|
||
var _tracks$newId;
|
||
|
||
var tracks = this.tracksInGroup; // setting this.subtitleTrack will trigger internal logic
|
||
// if media has not been attached yet, it will fail
|
||
// we keep a reference to the default track id
|
||
// and we'll set subtitleTrack when onMediaAttached is triggered
|
||
|
||
if (!this.media) {
|
||
this.queuedDefaultTrack = newId;
|
||
return;
|
||
}
|
||
|
||
if (this.trackId !== newId) {
|
||
this.toggleTrackModes(newId);
|
||
} // exit if track id as already set or invalid
|
||
|
||
|
||
if (this.trackId === newId && (newId === -1 || (_tracks$newId = tracks[newId]) !== null && _tracks$newId !== void 0 && _tracks$newId.details) || newId < -1 || newId >= tracks.length) {
|
||
return;
|
||
} // stopping live reloading timer if any
|
||
|
||
|
||
this.clearTimer();
|
||
var track = tracks[newId];
|
||
this.log("Switching to subtitle track " + newId);
|
||
this.trackId = newId;
|
||
|
||
if (track) {
|
||
var id = track.id,
|
||
_track$groupId = track.groupId,
|
||
groupId = _track$groupId === void 0 ? '' : _track$groupId,
|
||
name = track.name,
|
||
type = track.type,
|
||
url = track.url;
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].SUBTITLE_TRACK_SWITCH, {
|
||
id: id,
|
||
groupId: groupId,
|
||
name: name,
|
||
type: type,
|
||
url: url
|
||
});
|
||
var hlsUrlParameters = this.switchParams(track.url, lastTrack === null || lastTrack === void 0 ? void 0 : lastTrack.details);
|
||
this.loadPlaylist(hlsUrlParameters);
|
||
} else {
|
||
// switch to -1
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].SUBTITLE_TRACK_SWITCH, {
|
||
id: newId
|
||
});
|
||
}
|
||
};
|
||
|
||
_proto.onTextTracksChanged = function onTextTracksChanged() {
|
||
if (!this.useTextTrackPolling) {
|
||
self.clearInterval(this.subtitlePollingInterval);
|
||
} // Media is undefined when switching streams via loadSource()
|
||
|
||
|
||
if (!this.media || !this.hls.config.renderTextTracksNatively) {
|
||
return;
|
||
}
|
||
|
||
var trackId = -1;
|
||
var tracks = filterSubtitleTracks(this.media.textTracks);
|
||
|
||
for (var id = 0; id < tracks.length; id++) {
|
||
if (tracks[id].mode === 'hidden') {
|
||
// Do not break in case there is a following track with showing.
|
||
trackId = id;
|
||
} else if (tracks[id].mode === 'showing') {
|
||
trackId = id;
|
||
break;
|
||
}
|
||
} // Setting current subtitleTrack will invoke code.
|
||
|
||
|
||
if (this.subtitleTrack !== trackId) {
|
||
this.subtitleTrack = trackId;
|
||
}
|
||
};
|
||
|
||
_createClass(SubtitleTrackController, [{
|
||
key: "subtitleTracks",
|
||
get: function get() {
|
||
return this.tracksInGroup;
|
||
}
|
||
/** get/set index of the selected subtitle track (based on index in subtitle track lists) **/
|
||
|
||
}, {
|
||
key: "subtitleTrack",
|
||
get: function get() {
|
||
return this.trackId;
|
||
},
|
||
set: function set(newId) {
|
||
this.selectDefaultTrack = false;
|
||
var lastTrack = this.tracksInGroup ? this.tracksInGroup[this.trackId] : undefined;
|
||
this.setSubtitleTrack(newId, lastTrack);
|
||
}
|
||
}]);
|
||
|
||
return SubtitleTrackController;
|
||
}(_base_playlist_controller__WEBPACK_IMPORTED_MODULE_2__["default"]);
|
||
|
||
function filterSubtitleTracks(textTrackList) {
|
||
var tracks = [];
|
||
|
||
for (var i = 0; i < textTrackList.length; i++) {
|
||
var track = textTrackList[i]; // Edge adds a track without a label; we don't want to use it
|
||
|
||
if (track.kind === 'subtitles' && track.label) {
|
||
tracks.push(textTrackList[i]);
|
||
}
|
||
}
|
||
|
||
return tracks;
|
||
}
|
||
|
||
/* harmony default export */ __webpack_exports__["default"] = (SubtitleTrackController);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/controller/timeline-controller.ts":
|
||
/*!***********************************************!*\
|
||
!*** ./src/controller/timeline-controller.ts ***!
|
||
\***********************************************/
|
||
/*! exports provided: TimelineController */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "TimelineController", function() { return TimelineController; });
|
||
/* harmony import */ var _Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
|
||
/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../events */ "./src/events.ts");
|
||
/* harmony import */ var _utils_cea_608_parser__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../utils/cea-608-parser */ "./src/utils/cea-608-parser.ts");
|
||
/* harmony import */ var _utils_output_filter__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/output-filter */ "./src/utils/output-filter.ts");
|
||
/* harmony import */ var _utils_webvtt_parser__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../utils/webvtt-parser */ "./src/utils/webvtt-parser.ts");
|
||
/* harmony import */ var _utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../utils/texttrack-utils */ "./src/utils/texttrack-utils.ts");
|
||
/* harmony import */ var _utils_imsc1_ttml_parser__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../utils/imsc1-ttml-parser */ "./src/utils/imsc1-ttml-parser.ts");
|
||
/* harmony import */ var _types_loader__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ../types/loader */ "./src/types/loader.ts");
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
var TimelineController = /*#__PURE__*/function () {
|
||
function TimelineController(hls) {
|
||
this.hls = void 0;
|
||
this.media = null;
|
||
this.config = void 0;
|
||
this.enabled = true;
|
||
this.Cues = void 0;
|
||
this.textTracks = [];
|
||
this.tracks = [];
|
||
this.initPTS = [];
|
||
this.timescale = [];
|
||
this.unparsedVttFrags = [];
|
||
this.captionsTracks = {};
|
||
this.nonNativeCaptionsTracks = {};
|
||
this.cea608Parser1 = void 0;
|
||
this.cea608Parser2 = void 0;
|
||
this.lastSn = -1;
|
||
this.lastPartIndex = -1;
|
||
this.prevCC = -1;
|
||
this.vttCCs = newVTTCCs();
|
||
this.captionsProperties = void 0;
|
||
this.hls = hls;
|
||
this.config = hls.config;
|
||
this.Cues = hls.config.cueHandler;
|
||
this.captionsProperties = {
|
||
textTrack1: {
|
||
label: this.config.captionsTextTrack1Label,
|
||
languageCode: this.config.captionsTextTrack1LanguageCode
|
||
},
|
||
textTrack2: {
|
||
label: this.config.captionsTextTrack2Label,
|
||
languageCode: this.config.captionsTextTrack2LanguageCode
|
||
},
|
||
textTrack3: {
|
||
label: this.config.captionsTextTrack3Label,
|
||
languageCode: this.config.captionsTextTrack3LanguageCode
|
||
},
|
||
textTrack4: {
|
||
label: this.config.captionsTextTrack4Label,
|
||
languageCode: this.config.captionsTextTrack4LanguageCode
|
||
}
|
||
};
|
||
|
||
if (this.config.enableCEA708Captions) {
|
||
var channel1 = new _utils_output_filter__WEBPACK_IMPORTED_MODULE_3__["default"](this, 'textTrack1');
|
||
var channel2 = new _utils_output_filter__WEBPACK_IMPORTED_MODULE_3__["default"](this, 'textTrack2');
|
||
var channel3 = new _utils_output_filter__WEBPACK_IMPORTED_MODULE_3__["default"](this, 'textTrack3');
|
||
var channel4 = new _utils_output_filter__WEBPACK_IMPORTED_MODULE_3__["default"](this, 'textTrack4');
|
||
this.cea608Parser1 = new _utils_cea_608_parser__WEBPACK_IMPORTED_MODULE_2__["default"](1, channel1, channel2);
|
||
this.cea608Parser2 = new _utils_cea_608_parser__WEBPACK_IMPORTED_MODULE_2__["default"](3, channel3, channel4);
|
||
}
|
||
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MEDIA_ATTACHING, this.onMediaAttaching, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_LOADED, this.onManifestLoaded, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].SUBTITLE_TRACKS_UPDATED, this.onSubtitleTracksUpdated, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_LOADING, this.onFragLoading, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_LOADED, this.onFragLoaded, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_PARSING_USERDATA, this.onFragParsingUserdata, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_DECRYPTED, this.onFragDecrypted, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].INIT_PTS_FOUND, this.onInitPtsFound, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].SUBTITLE_TRACKS_CLEARED, this.onSubtitleTracksCleared, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_FLUSHING, this.onBufferFlushing, this);
|
||
}
|
||
|
||
var _proto = TimelineController.prototype;
|
||
|
||
_proto.destroy = function destroy() {
|
||
var hls = this.hls;
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MEDIA_ATTACHING, this.onMediaAttaching, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MEDIA_DETACHING, this.onMediaDetaching, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_LOADED, this.onManifestLoaded, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].SUBTITLE_TRACKS_UPDATED, this.onSubtitleTracksUpdated, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_LOADING, this.onFragLoading, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_LOADED, this.onFragLoaded, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_PARSING_USERDATA, this.onFragParsingUserdata, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_DECRYPTED, this.onFragDecrypted, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].INIT_PTS_FOUND, this.onInitPtsFound, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].SUBTITLE_TRACKS_CLEARED, this.onSubtitleTracksCleared, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].BUFFER_FLUSHING, this.onBufferFlushing, this); // @ts-ignore
|
||
|
||
this.hls = this.config = this.cea608Parser1 = this.cea608Parser2 = null;
|
||
};
|
||
|
||
_proto.addCues = function addCues(trackName, startTime, endTime, screen, cueRanges) {
|
||
// skip cues which overlap more than 50% with previously parsed time ranges
|
||
var merged = false;
|
||
|
||
for (var i = cueRanges.length; i--;) {
|
||
var cueRange = cueRanges[i];
|
||
var overlap = intersection(cueRange[0], cueRange[1], startTime, endTime);
|
||
|
||
if (overlap >= 0) {
|
||
cueRange[0] = Math.min(cueRange[0], startTime);
|
||
cueRange[1] = Math.max(cueRange[1], endTime);
|
||
merged = true;
|
||
|
||
if (overlap / (endTime - startTime) > 0.5) {
|
||
return;
|
||
}
|
||
}
|
||
}
|
||
|
||
if (!merged) {
|
||
cueRanges.push([startTime, endTime]);
|
||
}
|
||
|
||
if (this.config.renderTextTracksNatively) {
|
||
var track = this.captionsTracks[trackName];
|
||
this.Cues.newCue(track, startTime, endTime, screen);
|
||
} else {
|
||
var cues = this.Cues.newCue(null, startTime, endTime, screen);
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].CUES_PARSED, {
|
||
type: 'captions',
|
||
cues: cues,
|
||
track: trackName
|
||
});
|
||
}
|
||
} // Triggered when an initial PTS is found; used for synchronisation of WebVTT.
|
||
;
|
||
|
||
_proto.onInitPtsFound = function onInitPtsFound(event, _ref) {
|
||
var _this = this;
|
||
|
||
var frag = _ref.frag,
|
||
id = _ref.id,
|
||
initPTS = _ref.initPTS,
|
||
timescale = _ref.timescale;
|
||
var unparsedVttFrags = this.unparsedVttFrags;
|
||
|
||
if (id === 'main') {
|
||
this.initPTS[frag.cc] = initPTS;
|
||
this.timescale[frag.cc] = timescale;
|
||
} // Due to asynchronous processing, initial PTS may arrive later than the first VTT fragments are loaded.
|
||
// Parse any unparsed fragments upon receiving the initial PTS.
|
||
|
||
|
||
if (unparsedVttFrags.length) {
|
||
this.unparsedVttFrags = [];
|
||
unparsedVttFrags.forEach(function (frag) {
|
||
_this.onFragLoaded(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_LOADED, frag);
|
||
});
|
||
}
|
||
};
|
||
|
||
_proto.getExistingTrack = function getExistingTrack(trackName) {
|
||
var media = this.media;
|
||
|
||
if (media) {
|
||
for (var i = 0; i < media.textTracks.length; i++) {
|
||
var textTrack = media.textTracks[i];
|
||
|
||
if (textTrack[trackName]) {
|
||
return textTrack;
|
||
}
|
||
}
|
||
}
|
||
|
||
return null;
|
||
};
|
||
|
||
_proto.createCaptionsTrack = function createCaptionsTrack(trackName) {
|
||
if (this.config.renderTextTracksNatively) {
|
||
this.createNativeTrack(trackName);
|
||
} else {
|
||
this.createNonNativeTrack(trackName);
|
||
}
|
||
};
|
||
|
||
_proto.createNativeTrack = function createNativeTrack(trackName) {
|
||
if (this.captionsTracks[trackName]) {
|
||
return;
|
||
}
|
||
|
||
var captionsProperties = this.captionsProperties,
|
||
captionsTracks = this.captionsTracks,
|
||
media = this.media;
|
||
var _captionsProperties$t = captionsProperties[trackName],
|
||
label = _captionsProperties$t.label,
|
||
languageCode = _captionsProperties$t.languageCode; // Enable reuse of existing text track.
|
||
|
||
var existingTrack = this.getExistingTrack(trackName);
|
||
|
||
if (!existingTrack) {
|
||
var textTrack = this.createTextTrack('captions', label, languageCode);
|
||
|
||
if (textTrack) {
|
||
// Set a special property on the track so we know it's managed by Hls.js
|
||
textTrack[trackName] = true;
|
||
captionsTracks[trackName] = textTrack;
|
||
}
|
||
} else {
|
||
captionsTracks[trackName] = existingTrack;
|
||
Object(_utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_5__["clearCurrentCues"])(captionsTracks[trackName]);
|
||
Object(_utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_5__["sendAddTrackEvent"])(captionsTracks[trackName], media);
|
||
}
|
||
};
|
||
|
||
_proto.createNonNativeTrack = function createNonNativeTrack(trackName) {
|
||
if (this.nonNativeCaptionsTracks[trackName]) {
|
||
return;
|
||
} // Create a list of a single track for the provider to consume
|
||
|
||
|
||
var trackProperties = this.captionsProperties[trackName];
|
||
|
||
if (!trackProperties) {
|
||
return;
|
||
}
|
||
|
||
var label = trackProperties.label;
|
||
var track = {
|
||
_id: trackName,
|
||
label: label,
|
||
kind: 'captions',
|
||
default: trackProperties.media ? !!trackProperties.media.default : false,
|
||
closedCaptions: trackProperties.media
|
||
};
|
||
this.nonNativeCaptionsTracks[trackName] = track;
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].NON_NATIVE_TEXT_TRACKS_FOUND, {
|
||
tracks: [track]
|
||
});
|
||
};
|
||
|
||
_proto.createTextTrack = function createTextTrack(kind, label, lang) {
|
||
var media = this.media;
|
||
|
||
if (!media) {
|
||
return;
|
||
}
|
||
|
||
return media.addTextTrack(kind, label, lang);
|
||
};
|
||
|
||
_proto.onMediaAttaching = function onMediaAttaching(event, data) {
|
||
this.media = data.media;
|
||
|
||
this._cleanTracks();
|
||
};
|
||
|
||
_proto.onMediaDetaching = function onMediaDetaching() {
|
||
var captionsTracks = this.captionsTracks;
|
||
Object.keys(captionsTracks).forEach(function (trackName) {
|
||
Object(_utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_5__["clearCurrentCues"])(captionsTracks[trackName]);
|
||
delete captionsTracks[trackName];
|
||
});
|
||
this.nonNativeCaptionsTracks = {};
|
||
};
|
||
|
||
_proto.onManifestLoading = function onManifestLoading() {
|
||
this.lastSn = -1; // Detect discontinuity in fragment parsing
|
||
|
||
this.lastPartIndex = -1;
|
||
this.prevCC = -1;
|
||
this.vttCCs = newVTTCCs(); // Detect discontinuity in subtitle manifests
|
||
|
||
this._cleanTracks();
|
||
|
||
this.tracks = [];
|
||
this.captionsTracks = {};
|
||
this.nonNativeCaptionsTracks = {};
|
||
this.textTracks = [];
|
||
this.unparsedVttFrags = this.unparsedVttFrags || [];
|
||
this.initPTS = [];
|
||
this.timescale = [];
|
||
|
||
if (this.cea608Parser1 && this.cea608Parser2) {
|
||
this.cea608Parser1.reset();
|
||
this.cea608Parser2.reset();
|
||
}
|
||
};
|
||
|
||
_proto._cleanTracks = function _cleanTracks() {
|
||
// clear outdated subtitles
|
||
var media = this.media;
|
||
|
||
if (!media) {
|
||
return;
|
||
}
|
||
|
||
var textTracks = media.textTracks;
|
||
|
||
if (textTracks) {
|
||
for (var i = 0; i < textTracks.length; i++) {
|
||
Object(_utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_5__["clearCurrentCues"])(textTracks[i]);
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto.onSubtitleTracksUpdated = function onSubtitleTracksUpdated(event, data) {
|
||
var _this2 = this;
|
||
|
||
this.textTracks = [];
|
||
var tracks = data.subtitleTracks || [];
|
||
var hasIMSC1 = tracks.some(function (track) {
|
||
return track.textCodec === _utils_imsc1_ttml_parser__WEBPACK_IMPORTED_MODULE_6__["IMSC1_CODEC"];
|
||
});
|
||
|
||
if (this.config.enableWebVTT || hasIMSC1 && this.config.enableIMSC1) {
|
||
var sameTracks = this.tracks && tracks && this.tracks.length === tracks.length;
|
||
this.tracks = tracks || [];
|
||
|
||
if (this.config.renderTextTracksNatively) {
|
||
var inUseTracks = this.media ? this.media.textTracks : [];
|
||
this.tracks.forEach(function (track, index) {
|
||
var textTrack;
|
||
|
||
if (index < inUseTracks.length) {
|
||
var inUseTrack = null;
|
||
|
||
for (var i = 0; i < inUseTracks.length; i++) {
|
||
if (canReuseVttTextTrack(inUseTracks[i], track)) {
|
||
inUseTrack = inUseTracks[i];
|
||
break;
|
||
}
|
||
} // Reuse tracks with the same label, but do not reuse 608/708 tracks
|
||
|
||
|
||
if (inUseTrack) {
|
||
textTrack = inUseTrack;
|
||
}
|
||
}
|
||
|
||
if (textTrack) {
|
||
Object(_utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_5__["clearCurrentCues"])(textTrack);
|
||
} else {
|
||
textTrack = _this2.createTextTrack('subtitles', track.name, track.lang);
|
||
|
||
if (textTrack) {
|
||
textTrack.mode = 'disabled';
|
||
}
|
||
}
|
||
|
||
if (textTrack) {
|
||
textTrack.groupId = track.groupId;
|
||
|
||
_this2.textTracks.push(textTrack);
|
||
}
|
||
});
|
||
} else if (!sameTracks && this.tracks && this.tracks.length) {
|
||
// Create a list of tracks for the provider to consume
|
||
var tracksList = this.tracks.map(function (track) {
|
||
return {
|
||
label: track.name,
|
||
kind: track.type.toLowerCase(),
|
||
default: track.default,
|
||
subtitleTrack: track
|
||
};
|
||
});
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].NON_NATIVE_TEXT_TRACKS_FOUND, {
|
||
tracks: tracksList
|
||
});
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto.onManifestLoaded = function onManifestLoaded(event, data) {
|
||
var _this3 = this;
|
||
|
||
if (this.config.enableCEA708Captions && data.captions) {
|
||
data.captions.forEach(function (captionsTrack) {
|
||
var instreamIdMatch = /(?:CC|SERVICE)([1-4])/.exec(captionsTrack.instreamId);
|
||
|
||
if (!instreamIdMatch) {
|
||
return;
|
||
}
|
||
|
||
var trackName = "textTrack" + instreamIdMatch[1];
|
||
var trackProperties = _this3.captionsProperties[trackName];
|
||
|
||
if (!trackProperties) {
|
||
return;
|
||
}
|
||
|
||
trackProperties.label = captionsTrack.name;
|
||
|
||
if (captionsTrack.lang) {
|
||
// optional attribute
|
||
trackProperties.languageCode = captionsTrack.lang;
|
||
}
|
||
|
||
trackProperties.media = captionsTrack;
|
||
});
|
||
}
|
||
};
|
||
|
||
_proto.onFragLoading = function onFragLoading(event, data) {
|
||
var cea608Parser1 = this.cea608Parser1,
|
||
cea608Parser2 = this.cea608Parser2,
|
||
lastSn = this.lastSn,
|
||
lastPartIndex = this.lastPartIndex;
|
||
|
||
if (!this.enabled || !(cea608Parser1 && cea608Parser2)) {
|
||
return;
|
||
} // if this frag isn't contiguous, clear the parser so cues with bad start/end times aren't added to the textTrack
|
||
|
||
|
||
if (data.frag.type === _types_loader__WEBPACK_IMPORTED_MODULE_7__["PlaylistLevelType"].MAIN) {
|
||
var _data$part$index, _data$part;
|
||
|
||
var sn = data.frag.sn;
|
||
var partIndex = (_data$part$index = data === null || data === void 0 ? void 0 : (_data$part = data.part) === null || _data$part === void 0 ? void 0 : _data$part.index) != null ? _data$part$index : -1;
|
||
|
||
if (!(sn === lastSn + 1 || sn === lastSn && partIndex === lastPartIndex + 1)) {
|
||
cea608Parser1.reset();
|
||
cea608Parser2.reset();
|
||
}
|
||
|
||
this.lastSn = sn;
|
||
this.lastPartIndex = partIndex;
|
||
}
|
||
};
|
||
|
||
_proto.onFragLoaded = function onFragLoaded(event, data) {
|
||
var frag = data.frag,
|
||
payload = data.payload;
|
||
var initPTS = this.initPTS,
|
||
unparsedVttFrags = this.unparsedVttFrags;
|
||
|
||
if (frag.type === _types_loader__WEBPACK_IMPORTED_MODULE_7__["PlaylistLevelType"].SUBTITLE) {
|
||
// If fragment is subtitle type, parse as WebVTT.
|
||
if (payload.byteLength) {
|
||
// We need an initial synchronisation PTS. Store fragments as long as none has arrived.
|
||
if (!Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(initPTS[frag.cc])) {
|
||
unparsedVttFrags.push(data);
|
||
|
||
if (initPTS.length) {
|
||
// finish unsuccessfully, otherwise the subtitle-stream-controller could be blocked from loading new frags.
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].SUBTITLE_FRAG_PROCESSED, {
|
||
success: false,
|
||
frag: frag,
|
||
error: new Error('Missing initial subtitle PTS')
|
||
});
|
||
}
|
||
|
||
return;
|
||
}
|
||
|
||
var decryptData = frag.decryptdata; // fragment after decryption has a stats object
|
||
|
||
var decrypted = ('stats' in data); // If the subtitles are not encrypted, parse VTTs now. Otherwise, we need to wait.
|
||
|
||
if (decryptData == null || decryptData.key == null || decryptData.method !== 'AES-128' || decrypted) {
|
||
var trackPlaylistMedia = this.tracks[frag.level];
|
||
var vttCCs = this.vttCCs;
|
||
|
||
if (!vttCCs[frag.cc]) {
|
||
vttCCs[frag.cc] = {
|
||
start: frag.start,
|
||
prevCC: this.prevCC,
|
||
new: true
|
||
};
|
||
this.prevCC = frag.cc;
|
||
}
|
||
|
||
if (trackPlaylistMedia && trackPlaylistMedia.textCodec === _utils_imsc1_ttml_parser__WEBPACK_IMPORTED_MODULE_6__["IMSC1_CODEC"]) {
|
||
this._parseIMSC1(frag, payload);
|
||
} else {
|
||
this._parseVTTs(frag, payload, vttCCs);
|
||
}
|
||
}
|
||
} else {
|
||
// In case there is no payload, finish unsuccessfully.
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].SUBTITLE_FRAG_PROCESSED, {
|
||
success: false,
|
||
frag: frag,
|
||
error: new Error('Empty subtitle payload')
|
||
});
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto._parseIMSC1 = function _parseIMSC1(frag, payload) {
|
||
var _this4 = this;
|
||
|
||
var hls = this.hls;
|
||
Object(_utils_imsc1_ttml_parser__WEBPACK_IMPORTED_MODULE_6__["parseIMSC1"])(payload, this.initPTS[frag.cc], this.timescale[frag.cc], function (cues) {
|
||
_this4._appendCues(cues, frag.level);
|
||
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].SUBTITLE_FRAG_PROCESSED, {
|
||
success: true,
|
||
frag: frag
|
||
});
|
||
}, function (error) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_8__["logger"].log("Failed to parse IMSC1: " + error);
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].SUBTITLE_FRAG_PROCESSED, {
|
||
success: false,
|
||
frag: frag,
|
||
error: error
|
||
});
|
||
});
|
||
};
|
||
|
||
_proto._parseVTTs = function _parseVTTs(frag, payload, vttCCs) {
|
||
var _this5 = this;
|
||
|
||
var hls = this.hls; // Parse the WebVTT file contents.
|
||
|
||
Object(_utils_webvtt_parser__WEBPACK_IMPORTED_MODULE_4__["parseWebVTT"])(payload, this.initPTS[frag.cc], this.timescale[frag.cc], vttCCs, frag.cc, frag.start, function (cues) {
|
||
_this5._appendCues(cues, frag.level);
|
||
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].SUBTITLE_FRAG_PROCESSED, {
|
||
success: true,
|
||
frag: frag
|
||
});
|
||
}, function (error) {
|
||
_this5._fallbackToIMSC1(frag, payload); // Something went wrong while parsing. Trigger event with success false.
|
||
|
||
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_8__["logger"].log("Failed to parse VTT cue: " + error);
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].SUBTITLE_FRAG_PROCESSED, {
|
||
success: false,
|
||
frag: frag,
|
||
error: error
|
||
});
|
||
});
|
||
};
|
||
|
||
_proto._fallbackToIMSC1 = function _fallbackToIMSC1(frag, payload) {
|
||
var _this6 = this;
|
||
|
||
// If textCodec is unknown, try parsing as IMSC1. Set textCodec based on the result
|
||
var trackPlaylistMedia = this.tracks[frag.level];
|
||
|
||
if (!trackPlaylistMedia.textCodec) {
|
||
Object(_utils_imsc1_ttml_parser__WEBPACK_IMPORTED_MODULE_6__["parseIMSC1"])(payload, this.initPTS[frag.cc], this.timescale[frag.cc], function () {
|
||
trackPlaylistMedia.textCodec = _utils_imsc1_ttml_parser__WEBPACK_IMPORTED_MODULE_6__["IMSC1_CODEC"];
|
||
|
||
_this6._parseIMSC1(frag, payload);
|
||
}, function () {
|
||
trackPlaylistMedia.textCodec = 'wvtt';
|
||
});
|
||
}
|
||
};
|
||
|
||
_proto._appendCues = function _appendCues(cues, fragLevel) {
|
||
var hls = this.hls;
|
||
|
||
if (this.config.renderTextTracksNatively) {
|
||
var textTrack = this.textTracks[fragLevel]; // WebVTTParser.parse is an async method and if the currently selected text track mode is set to "disabled"
|
||
// before parsing is done then don't try to access currentTrack.cues.getCueById as cues will be null
|
||
// and trying to access getCueById method of cues will throw an exception
|
||
// Because we check if the mode is disabled, we can force check `cues` below. They can't be null.
|
||
|
||
if (textTrack.mode === 'disabled') {
|
||
return;
|
||
}
|
||
|
||
cues.forEach(function (cue) {
|
||
return Object(_utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_5__["addCueToTrack"])(textTrack, cue);
|
||
});
|
||
} else {
|
||
var currentTrack = this.tracks[fragLevel];
|
||
var track = currentTrack.default ? 'default' : 'subtitles' + fragLevel;
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].CUES_PARSED, {
|
||
type: 'subtitles',
|
||
cues: cues,
|
||
track: track
|
||
});
|
||
}
|
||
};
|
||
|
||
_proto.onFragDecrypted = function onFragDecrypted(event, data) {
|
||
var frag = data.frag;
|
||
|
||
if (frag.type === _types_loader__WEBPACK_IMPORTED_MODULE_7__["PlaylistLevelType"].SUBTITLE) {
|
||
if (!Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(this.initPTS[frag.cc])) {
|
||
this.unparsedVttFrags.push(data);
|
||
return;
|
||
}
|
||
|
||
this.onFragLoaded(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_LOADED, data);
|
||
}
|
||
};
|
||
|
||
_proto.onSubtitleTracksCleared = function onSubtitleTracksCleared() {
|
||
this.tracks = [];
|
||
this.captionsTracks = {};
|
||
};
|
||
|
||
_proto.onFragParsingUserdata = function onFragParsingUserdata(event, data) {
|
||
var cea608Parser1 = this.cea608Parser1,
|
||
cea608Parser2 = this.cea608Parser2;
|
||
|
||
if (!this.enabled || !(cea608Parser1 && cea608Parser2)) {
|
||
return;
|
||
} // If the event contains captions (found in the bytes property), push all bytes into the parser immediately
|
||
// It will create the proper timestamps based on the PTS value
|
||
|
||
|
||
for (var i = 0; i < data.samples.length; i++) {
|
||
var ccBytes = data.samples[i].bytes;
|
||
|
||
if (ccBytes) {
|
||
var ccdatas = this.extractCea608Data(ccBytes);
|
||
cea608Parser1.addData(data.samples[i].pts, ccdatas[0]);
|
||
cea608Parser2.addData(data.samples[i].pts, ccdatas[1]);
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto.onBufferFlushing = function onBufferFlushing(event, _ref2) {
|
||
var startOffset = _ref2.startOffset,
|
||
endOffset = _ref2.endOffset,
|
||
endOffsetSubtitles = _ref2.endOffsetSubtitles,
|
||
type = _ref2.type;
|
||
var media = this.media;
|
||
|
||
if (!media || media.currentTime < endOffset) {
|
||
return;
|
||
} // Clear 608 caption cues from the captions TextTracks when the video back buffer is flushed
|
||
// Forward cues are never removed because we can loose streamed 608 content from recent fragments
|
||
|
||
|
||
if (!type || type === 'video') {
|
||
var captionsTracks = this.captionsTracks;
|
||
Object.keys(captionsTracks).forEach(function (trackName) {
|
||
return Object(_utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_5__["removeCuesInRange"])(captionsTracks[trackName], startOffset, endOffset);
|
||
});
|
||
}
|
||
|
||
if (this.config.renderTextTracksNatively) {
|
||
// Clear VTT/IMSC1 subtitle cues from the subtitle TextTracks when the back buffer is flushed
|
||
if (startOffset === 0 && endOffsetSubtitles !== undefined) {
|
||
var textTracks = this.textTracks;
|
||
Object.keys(textTracks).forEach(function (trackName) {
|
||
return Object(_utils_texttrack_utils__WEBPACK_IMPORTED_MODULE_5__["removeCuesInRange"])(textTracks[trackName], startOffset, endOffsetSubtitles);
|
||
});
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto.extractCea608Data = function extractCea608Data(byteArray) {
|
||
var actualCCBytes = [[], []];
|
||
var count = byteArray[0] & 0x1f;
|
||
var position = 2;
|
||
|
||
for (var j = 0; j < count; j++) {
|
||
var tmpByte = byteArray[position++];
|
||
var ccbyte1 = 0x7f & byteArray[position++];
|
||
var ccbyte2 = 0x7f & byteArray[position++];
|
||
|
||
if (ccbyte1 === 0 && ccbyte2 === 0) {
|
||
continue;
|
||
}
|
||
|
||
var ccValid = (0x04 & tmpByte) !== 0; // Support all four channels
|
||
|
||
if (ccValid) {
|
||
var ccType = 0x03 & tmpByte;
|
||
|
||
if (0x00
|
||
/* CEA608 field1*/
|
||
=== ccType || 0x01
|
||
/* CEA608 field2*/
|
||
=== ccType) {
|
||
// Exclude CEA708 CC data.
|
||
actualCCBytes[ccType].push(ccbyte1);
|
||
actualCCBytes[ccType].push(ccbyte2);
|
||
}
|
||
}
|
||
}
|
||
|
||
return actualCCBytes;
|
||
};
|
||
|
||
return TimelineController;
|
||
}();
|
||
|
||
function canReuseVttTextTrack(inUseTrack, manifestTrack) {
|
||
return inUseTrack && inUseTrack.label === manifestTrack.name && !(inUseTrack.textTrack1 || inUseTrack.textTrack2);
|
||
}
|
||
|
||
function intersection(x1, x2, y1, y2) {
|
||
return Math.min(x2, y2) - Math.max(x1, y1);
|
||
}
|
||
|
||
function newVTTCCs() {
|
||
return {
|
||
ccOffset: 0,
|
||
presentationOffset: 0,
|
||
0: {
|
||
start: 0,
|
||
prevCC: -1,
|
||
new: false
|
||
}
|
||
};
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/crypt/aes-crypto.ts":
|
||
/*!*********************************!*\
|
||
!*** ./src/crypt/aes-crypto.ts ***!
|
||
\*********************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return AESCrypto; });
|
||
var AESCrypto = /*#__PURE__*/function () {
|
||
function AESCrypto(subtle, iv) {
|
||
this.subtle = void 0;
|
||
this.aesIV = void 0;
|
||
this.subtle = subtle;
|
||
this.aesIV = iv;
|
||
}
|
||
|
||
var _proto = AESCrypto.prototype;
|
||
|
||
_proto.decrypt = function decrypt(data, key) {
|
||
return this.subtle.decrypt({
|
||
name: 'AES-CBC',
|
||
iv: this.aesIV
|
||
}, key, data);
|
||
};
|
||
|
||
return AESCrypto;
|
||
}();
|
||
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/crypt/aes-decryptor.ts":
|
||
/*!************************************!*\
|
||
!*** ./src/crypt/aes-decryptor.ts ***!
|
||
\************************************/
|
||
/*! exports provided: removePadding, default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "removePadding", function() { return removePadding; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return AESDecryptor; });
|
||
/* harmony import */ var _utils_typed_array__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../utils/typed-array */ "./src/utils/typed-array.ts");
|
||
// PKCS7
|
||
|
||
function removePadding(array) {
|
||
var outputBytes = array.byteLength;
|
||
var paddingBytes = outputBytes && new DataView(array.buffer).getUint8(outputBytes - 1);
|
||
|
||
if (paddingBytes) {
|
||
return Object(_utils_typed_array__WEBPACK_IMPORTED_MODULE_0__["sliceUint8"])(array, 0, outputBytes - paddingBytes);
|
||
}
|
||
|
||
return array;
|
||
}
|
||
|
||
var AESDecryptor = /*#__PURE__*/function () {
|
||
function AESDecryptor() {
|
||
this.rcon = [0x0, 0x1, 0x2, 0x4, 0x8, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36];
|
||
this.subMix = [new Uint32Array(256), new Uint32Array(256), new Uint32Array(256), new Uint32Array(256)];
|
||
this.invSubMix = [new Uint32Array(256), new Uint32Array(256), new Uint32Array(256), new Uint32Array(256)];
|
||
this.sBox = new Uint32Array(256);
|
||
this.invSBox = new Uint32Array(256);
|
||
this.key = new Uint32Array(0);
|
||
this.ksRows = 0;
|
||
this.keySize = 0;
|
||
this.keySchedule = void 0;
|
||
this.invKeySchedule = void 0;
|
||
this.initTable();
|
||
} // Using view.getUint32() also swaps the byte order.
|
||
|
||
|
||
var _proto = AESDecryptor.prototype;
|
||
|
||
_proto.uint8ArrayToUint32Array_ = function uint8ArrayToUint32Array_(arrayBuffer) {
|
||
var view = new DataView(arrayBuffer);
|
||
var newArray = new Uint32Array(4);
|
||
|
||
for (var i = 0; i < 4; i++) {
|
||
newArray[i] = view.getUint32(i * 4);
|
||
}
|
||
|
||
return newArray;
|
||
};
|
||
|
||
_proto.initTable = function initTable() {
|
||
var sBox = this.sBox;
|
||
var invSBox = this.invSBox;
|
||
var subMix = this.subMix;
|
||
var subMix0 = subMix[0];
|
||
var subMix1 = subMix[1];
|
||
var subMix2 = subMix[2];
|
||
var subMix3 = subMix[3];
|
||
var invSubMix = this.invSubMix;
|
||
var invSubMix0 = invSubMix[0];
|
||
var invSubMix1 = invSubMix[1];
|
||
var invSubMix2 = invSubMix[2];
|
||
var invSubMix3 = invSubMix[3];
|
||
var d = new Uint32Array(256);
|
||
var x = 0;
|
||
var xi = 0;
|
||
var i = 0;
|
||
|
||
for (i = 0; i < 256; i++) {
|
||
if (i < 128) {
|
||
d[i] = i << 1;
|
||
} else {
|
||
d[i] = i << 1 ^ 0x11b;
|
||
}
|
||
}
|
||
|
||
for (i = 0; i < 256; i++) {
|
||
var sx = xi ^ xi << 1 ^ xi << 2 ^ xi << 3 ^ xi << 4;
|
||
sx = sx >>> 8 ^ sx & 0xff ^ 0x63;
|
||
sBox[x] = sx;
|
||
invSBox[sx] = x; // Compute multiplication
|
||
|
||
var x2 = d[x];
|
||
var x4 = d[x2];
|
||
var x8 = d[x4]; // Compute sub/invSub bytes, mix columns tables
|
||
|
||
var t = d[sx] * 0x101 ^ sx * 0x1010100;
|
||
subMix0[x] = t << 24 | t >>> 8;
|
||
subMix1[x] = t << 16 | t >>> 16;
|
||
subMix2[x] = t << 8 | t >>> 24;
|
||
subMix3[x] = t; // Compute inv sub bytes, inv mix columns tables
|
||
|
||
t = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
|
||
invSubMix0[sx] = t << 24 | t >>> 8;
|
||
invSubMix1[sx] = t << 16 | t >>> 16;
|
||
invSubMix2[sx] = t << 8 | t >>> 24;
|
||
invSubMix3[sx] = t; // Compute next counter
|
||
|
||
if (!x) {
|
||
x = xi = 1;
|
||
} else {
|
||
x = x2 ^ d[d[d[x8 ^ x2]]];
|
||
xi ^= d[d[xi]];
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto.expandKey = function expandKey(keyBuffer) {
|
||
// convert keyBuffer to Uint32Array
|
||
var key = this.uint8ArrayToUint32Array_(keyBuffer);
|
||
var sameKey = true;
|
||
var offset = 0;
|
||
|
||
while (offset < key.length && sameKey) {
|
||
sameKey = key[offset] === this.key[offset];
|
||
offset++;
|
||
}
|
||
|
||
if (sameKey) {
|
||
return;
|
||
}
|
||
|
||
this.key = key;
|
||
var keySize = this.keySize = key.length;
|
||
|
||
if (keySize !== 4 && keySize !== 6 && keySize !== 8) {
|
||
throw new Error('Invalid aes key size=' + keySize);
|
||
}
|
||
|
||
var ksRows = this.ksRows = (keySize + 6 + 1) * 4;
|
||
var ksRow;
|
||
var invKsRow;
|
||
var keySchedule = this.keySchedule = new Uint32Array(ksRows);
|
||
var invKeySchedule = this.invKeySchedule = new Uint32Array(ksRows);
|
||
var sbox = this.sBox;
|
||
var rcon = this.rcon;
|
||
var invSubMix = this.invSubMix;
|
||
var invSubMix0 = invSubMix[0];
|
||
var invSubMix1 = invSubMix[1];
|
||
var invSubMix2 = invSubMix[2];
|
||
var invSubMix3 = invSubMix[3];
|
||
var prev;
|
||
var t;
|
||
|
||
for (ksRow = 0; ksRow < ksRows; ksRow++) {
|
||
if (ksRow < keySize) {
|
||
prev = keySchedule[ksRow] = key[ksRow];
|
||
continue;
|
||
}
|
||
|
||
t = prev;
|
||
|
||
if (ksRow % keySize === 0) {
|
||
// Rot word
|
||
t = t << 8 | t >>> 24; // Sub word
|
||
|
||
t = sbox[t >>> 24] << 24 | sbox[t >>> 16 & 0xff] << 16 | sbox[t >>> 8 & 0xff] << 8 | sbox[t & 0xff]; // Mix Rcon
|
||
|
||
t ^= rcon[ksRow / keySize | 0] << 24;
|
||
} else if (keySize > 6 && ksRow % keySize === 4) {
|
||
// Sub word
|
||
t = sbox[t >>> 24] << 24 | sbox[t >>> 16 & 0xff] << 16 | sbox[t >>> 8 & 0xff] << 8 | sbox[t & 0xff];
|
||
}
|
||
|
||
keySchedule[ksRow] = prev = (keySchedule[ksRow - keySize] ^ t) >>> 0;
|
||
}
|
||
|
||
for (invKsRow = 0; invKsRow < ksRows; invKsRow++) {
|
||
ksRow = ksRows - invKsRow;
|
||
|
||
if (invKsRow & 3) {
|
||
t = keySchedule[ksRow];
|
||
} else {
|
||
t = keySchedule[ksRow - 4];
|
||
}
|
||
|
||
if (invKsRow < 4 || ksRow <= 4) {
|
||
invKeySchedule[invKsRow] = t;
|
||
} else {
|
||
invKeySchedule[invKsRow] = invSubMix0[sbox[t >>> 24]] ^ invSubMix1[sbox[t >>> 16 & 0xff]] ^ invSubMix2[sbox[t >>> 8 & 0xff]] ^ invSubMix3[sbox[t & 0xff]];
|
||
}
|
||
|
||
invKeySchedule[invKsRow] = invKeySchedule[invKsRow] >>> 0;
|
||
}
|
||
} // Adding this as a method greatly improves performance.
|
||
;
|
||
|
||
_proto.networkToHostOrderSwap = function networkToHostOrderSwap(word) {
|
||
return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
|
||
};
|
||
|
||
_proto.decrypt = function decrypt(inputArrayBuffer, offset, aesIV) {
|
||
var nRounds = this.keySize + 6;
|
||
var invKeySchedule = this.invKeySchedule;
|
||
var invSBOX = this.invSBox;
|
||
var invSubMix = this.invSubMix;
|
||
var invSubMix0 = invSubMix[0];
|
||
var invSubMix1 = invSubMix[1];
|
||
var invSubMix2 = invSubMix[2];
|
||
var invSubMix3 = invSubMix[3];
|
||
var initVector = this.uint8ArrayToUint32Array_(aesIV);
|
||
var initVector0 = initVector[0];
|
||
var initVector1 = initVector[1];
|
||
var initVector2 = initVector[2];
|
||
var initVector3 = initVector[3];
|
||
var inputInt32 = new Int32Array(inputArrayBuffer);
|
||
var outputInt32 = new Int32Array(inputInt32.length);
|
||
var t0, t1, t2, t3;
|
||
var s0, s1, s2, s3;
|
||
var inputWords0, inputWords1, inputWords2, inputWords3;
|
||
var ksRow, i;
|
||
var swapWord = this.networkToHostOrderSwap;
|
||
|
||
while (offset < inputInt32.length) {
|
||
inputWords0 = swapWord(inputInt32[offset]);
|
||
inputWords1 = swapWord(inputInt32[offset + 1]);
|
||
inputWords2 = swapWord(inputInt32[offset + 2]);
|
||
inputWords3 = swapWord(inputInt32[offset + 3]);
|
||
s0 = inputWords0 ^ invKeySchedule[0];
|
||
s1 = inputWords3 ^ invKeySchedule[1];
|
||
s2 = inputWords2 ^ invKeySchedule[2];
|
||
s3 = inputWords1 ^ invKeySchedule[3];
|
||
ksRow = 4; // Iterate through the rounds of decryption
|
||
|
||
for (i = 1; i < nRounds; i++) {
|
||
t0 = invSubMix0[s0 >>> 24] ^ invSubMix1[s1 >> 16 & 0xff] ^ invSubMix2[s2 >> 8 & 0xff] ^ invSubMix3[s3 & 0xff] ^ invKeySchedule[ksRow];
|
||
t1 = invSubMix0[s1 >>> 24] ^ invSubMix1[s2 >> 16 & 0xff] ^ invSubMix2[s3 >> 8 & 0xff] ^ invSubMix3[s0 & 0xff] ^ invKeySchedule[ksRow + 1];
|
||
t2 = invSubMix0[s2 >>> 24] ^ invSubMix1[s3 >> 16 & 0xff] ^ invSubMix2[s0 >> 8 & 0xff] ^ invSubMix3[s1 & 0xff] ^ invKeySchedule[ksRow + 2];
|
||
t3 = invSubMix0[s3 >>> 24] ^ invSubMix1[s0 >> 16 & 0xff] ^ invSubMix2[s1 >> 8 & 0xff] ^ invSubMix3[s2 & 0xff] ^ invKeySchedule[ksRow + 3]; // Update state
|
||
|
||
s0 = t0;
|
||
s1 = t1;
|
||
s2 = t2;
|
||
s3 = t3;
|
||
ksRow = ksRow + 4;
|
||
} // Shift rows, sub bytes, add round key
|
||
|
||
|
||
t0 = invSBOX[s0 >>> 24] << 24 ^ invSBOX[s1 >> 16 & 0xff] << 16 ^ invSBOX[s2 >> 8 & 0xff] << 8 ^ invSBOX[s3 & 0xff] ^ invKeySchedule[ksRow];
|
||
t1 = invSBOX[s1 >>> 24] << 24 ^ invSBOX[s2 >> 16 & 0xff] << 16 ^ invSBOX[s3 >> 8 & 0xff] << 8 ^ invSBOX[s0 & 0xff] ^ invKeySchedule[ksRow + 1];
|
||
t2 = invSBOX[s2 >>> 24] << 24 ^ invSBOX[s3 >> 16 & 0xff] << 16 ^ invSBOX[s0 >> 8 & 0xff] << 8 ^ invSBOX[s1 & 0xff] ^ invKeySchedule[ksRow + 2];
|
||
t3 = invSBOX[s3 >>> 24] << 24 ^ invSBOX[s0 >> 16 & 0xff] << 16 ^ invSBOX[s1 >> 8 & 0xff] << 8 ^ invSBOX[s2 & 0xff] ^ invKeySchedule[ksRow + 3]; // Write
|
||
|
||
outputInt32[offset] = swapWord(t0 ^ initVector0);
|
||
outputInt32[offset + 1] = swapWord(t3 ^ initVector1);
|
||
outputInt32[offset + 2] = swapWord(t2 ^ initVector2);
|
||
outputInt32[offset + 3] = swapWord(t1 ^ initVector3); // reset initVector to last 4 unsigned int
|
||
|
||
initVector0 = inputWords0;
|
||
initVector1 = inputWords1;
|
||
initVector2 = inputWords2;
|
||
initVector3 = inputWords3;
|
||
offset = offset + 4;
|
||
}
|
||
|
||
return outputInt32.buffer;
|
||
};
|
||
|
||
return AESDecryptor;
|
||
}();
|
||
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/crypt/decrypter.ts":
|
||
/*!********************************!*\
|
||
!*** ./src/crypt/decrypter.ts ***!
|
||
\********************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return Decrypter; });
|
||
/* harmony import */ var _aes_crypto__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./aes-crypto */ "./src/crypt/aes-crypto.ts");
|
||
/* harmony import */ var _fast_aes_key__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./fast-aes-key */ "./src/crypt/fast-aes-key.ts");
|
||
/* harmony import */ var _aes_decryptor__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./aes-decryptor */ "./src/crypt/aes-decryptor.ts");
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
/* harmony import */ var _utils_mp4_tools__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../utils/mp4-tools */ "./src/utils/mp4-tools.ts");
|
||
/* harmony import */ var _utils_typed_array__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../utils/typed-array */ "./src/utils/typed-array.ts");
|
||
|
||
|
||
|
||
|
||
|
||
|
||
var CHUNK_SIZE = 16; // 16 bytes, 128 bits
|
||
|
||
var Decrypter = /*#__PURE__*/function () {
|
||
function Decrypter(observer, config, _temp) {
|
||
var _ref = _temp === void 0 ? {} : _temp,
|
||
_ref$removePKCS7Paddi = _ref.removePKCS7Padding,
|
||
removePKCS7Padding = _ref$removePKCS7Paddi === void 0 ? true : _ref$removePKCS7Paddi;
|
||
|
||
this.logEnabled = true;
|
||
this.observer = void 0;
|
||
this.config = void 0;
|
||
this.removePKCS7Padding = void 0;
|
||
this.subtle = null;
|
||
this.softwareDecrypter = null;
|
||
this.key = null;
|
||
this.fastAesKey = null;
|
||
this.remainderData = null;
|
||
this.currentIV = null;
|
||
this.currentResult = null;
|
||
this.observer = observer;
|
||
this.config = config;
|
||
this.removePKCS7Padding = removePKCS7Padding; // built in decryptor expects PKCS7 padding
|
||
|
||
if (removePKCS7Padding) {
|
||
try {
|
||
var browserCrypto = self.crypto;
|
||
|
||
if (browserCrypto) {
|
||
this.subtle = browserCrypto.subtle || browserCrypto.webkitSubtle;
|
||
}
|
||
} catch (e) {
|
||
/* no-op */
|
||
}
|
||
}
|
||
|
||
if (this.subtle === null) {
|
||
this.config.enableSoftwareAES = true;
|
||
}
|
||
}
|
||
|
||
var _proto = Decrypter.prototype;
|
||
|
||
_proto.destroy = function destroy() {
|
||
// @ts-ignore
|
||
this.observer = null;
|
||
};
|
||
|
||
_proto.isSync = function isSync() {
|
||
return this.config.enableSoftwareAES;
|
||
};
|
||
|
||
_proto.flush = function flush() {
|
||
var currentResult = this.currentResult;
|
||
|
||
if (!currentResult) {
|
||
this.reset();
|
||
return;
|
||
}
|
||
|
||
var data = new Uint8Array(currentResult);
|
||
this.reset();
|
||
|
||
if (this.removePKCS7Padding) {
|
||
return Object(_aes_decryptor__WEBPACK_IMPORTED_MODULE_2__["removePadding"])(data);
|
||
}
|
||
|
||
return data;
|
||
};
|
||
|
||
_proto.reset = function reset() {
|
||
this.currentResult = null;
|
||
this.currentIV = null;
|
||
this.remainderData = null;
|
||
|
||
if (this.softwareDecrypter) {
|
||
this.softwareDecrypter = null;
|
||
}
|
||
};
|
||
|
||
_proto.decrypt = function decrypt(data, key, iv, callback) {
|
||
if (this.config.enableSoftwareAES) {
|
||
this.softwareDecrypt(new Uint8Array(data), key, iv);
|
||
var decryptResult = this.flush();
|
||
|
||
if (decryptResult) {
|
||
callback(decryptResult.buffer);
|
||
}
|
||
} else {
|
||
this.webCryptoDecrypt(new Uint8Array(data), key, iv).then(callback);
|
||
}
|
||
};
|
||
|
||
_proto.softwareDecrypt = function softwareDecrypt(data, key, iv) {
|
||
var currentIV = this.currentIV,
|
||
currentResult = this.currentResult,
|
||
remainderData = this.remainderData;
|
||
this.logOnce('JS AES decrypt'); // The output is staggered during progressive parsing - the current result is cached, and emitted on the next call
|
||
// This is done in order to strip PKCS7 padding, which is found at the end of each segment. We only know we've reached
|
||
// the end on flush(), but by that time we have already received all bytes for the segment.
|
||
// Progressive decryption does not work with WebCrypto
|
||
|
||
if (remainderData) {
|
||
data = Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_4__["appendUint8Array"])(remainderData, data);
|
||
this.remainderData = null;
|
||
} // Byte length must be a multiple of 16 (AES-128 = 128 bit blocks = 16 bytes)
|
||
|
||
|
||
var currentChunk = this.getValidChunk(data);
|
||
|
||
if (!currentChunk.length) {
|
||
return null;
|
||
}
|
||
|
||
if (currentIV) {
|
||
iv = currentIV;
|
||
}
|
||
|
||
var softwareDecrypter = this.softwareDecrypter;
|
||
|
||
if (!softwareDecrypter) {
|
||
softwareDecrypter = this.softwareDecrypter = new _aes_decryptor__WEBPACK_IMPORTED_MODULE_2__["default"]();
|
||
}
|
||
|
||
softwareDecrypter.expandKey(key);
|
||
var result = currentResult;
|
||
this.currentResult = softwareDecrypter.decrypt(currentChunk.buffer, 0, iv);
|
||
this.currentIV = Object(_utils_typed_array__WEBPACK_IMPORTED_MODULE_5__["sliceUint8"])(currentChunk, -16).buffer;
|
||
|
||
if (!result) {
|
||
return null;
|
||
}
|
||
|
||
return result;
|
||
};
|
||
|
||
_proto.webCryptoDecrypt = function webCryptoDecrypt(data, key, iv) {
|
||
var _this = this;
|
||
|
||
var subtle = this.subtle;
|
||
|
||
if (this.key !== key || !this.fastAesKey) {
|
||
this.key = key;
|
||
this.fastAesKey = new _fast_aes_key__WEBPACK_IMPORTED_MODULE_1__["default"](subtle, key);
|
||
}
|
||
|
||
return this.fastAesKey.expandKey().then(function (aesKey) {
|
||
// decrypt using web crypto
|
||
if (!subtle) {
|
||
return Promise.reject(new Error('web crypto not initialized'));
|
||
}
|
||
|
||
var crypto = new _aes_crypto__WEBPACK_IMPORTED_MODULE_0__["default"](subtle, iv);
|
||
return crypto.decrypt(data.buffer, aesKey);
|
||
}).catch(function (err) {
|
||
return _this.onWebCryptoError(err, data, key, iv);
|
||
});
|
||
};
|
||
|
||
_proto.onWebCryptoError = function onWebCryptoError(err, data, key, iv) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].warn('[decrypter.ts]: WebCrypto Error, disable WebCrypto API:', err);
|
||
this.config.enableSoftwareAES = true;
|
||
this.logEnabled = true;
|
||
return this.softwareDecrypt(data, key, iv);
|
||
};
|
||
|
||
_proto.getValidChunk = function getValidChunk(data) {
|
||
var currentChunk = data;
|
||
var splitPoint = data.length - data.length % CHUNK_SIZE;
|
||
|
||
if (splitPoint !== data.length) {
|
||
currentChunk = Object(_utils_typed_array__WEBPACK_IMPORTED_MODULE_5__["sliceUint8"])(data, 0, splitPoint);
|
||
this.remainderData = Object(_utils_typed_array__WEBPACK_IMPORTED_MODULE_5__["sliceUint8"])(data, splitPoint);
|
||
}
|
||
|
||
return currentChunk;
|
||
};
|
||
|
||
_proto.logOnce = function logOnce(msg) {
|
||
if (!this.logEnabled) {
|
||
return;
|
||
}
|
||
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].log("[decrypter.ts]: " + msg);
|
||
this.logEnabled = false;
|
||
};
|
||
|
||
return Decrypter;
|
||
}();
|
||
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/crypt/fast-aes-key.ts":
|
||
/*!***********************************!*\
|
||
!*** ./src/crypt/fast-aes-key.ts ***!
|
||
\***********************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return FastAESKey; });
|
||
var FastAESKey = /*#__PURE__*/function () {
|
||
function FastAESKey(subtle, key) {
|
||
this.subtle = void 0;
|
||
this.key = void 0;
|
||
this.subtle = subtle;
|
||
this.key = key;
|
||
}
|
||
|
||
var _proto = FastAESKey.prototype;
|
||
|
||
_proto.expandKey = function expandKey() {
|
||
return this.subtle.importKey('raw', this.key, {
|
||
name: 'AES-CBC'
|
||
}, false, ['encrypt', 'decrypt']);
|
||
};
|
||
|
||
return FastAESKey;
|
||
}();
|
||
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/demux/aacdemuxer.ts":
|
||
/*!*********************************!*\
|
||
!*** ./src/demux/aacdemuxer.ts ***!
|
||
\*********************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony import */ var _base_audio_demuxer__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./base-audio-demuxer */ "./src/demux/base-audio-demuxer.ts");
|
||
/* harmony import */ var _adts__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./adts */ "./src/demux/adts.ts");
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
/* harmony import */ var _demux_id3__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../demux/id3 */ "./src/demux/id3.ts");
|
||
function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
|
||
|
||
function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
|
||
|
||
/**
|
||
* AAC demuxer
|
||
*/
|
||
|
||
|
||
|
||
|
||
|
||
var AACDemuxer = /*#__PURE__*/function (_BaseAudioDemuxer) {
|
||
_inheritsLoose(AACDemuxer, _BaseAudioDemuxer);
|
||
|
||
function AACDemuxer(observer, config) {
|
||
var _this;
|
||
|
||
_this = _BaseAudioDemuxer.call(this) || this;
|
||
_this.observer = void 0;
|
||
_this.config = void 0;
|
||
_this.observer = observer;
|
||
_this.config = config;
|
||
return _this;
|
||
}
|
||
|
||
var _proto = AACDemuxer.prototype;
|
||
|
||
_proto.resetInitSegment = function resetInitSegment(initSegment, audioCodec, videoCodec, trackDuration) {
|
||
_BaseAudioDemuxer.prototype.resetInitSegment.call(this, initSegment, audioCodec, videoCodec, trackDuration);
|
||
|
||
this._audioTrack = {
|
||
container: 'audio/adts',
|
||
type: 'audio',
|
||
id: 2,
|
||
pid: -1,
|
||
sequenceNumber: 0,
|
||
isAAC: true,
|
||
samples: [],
|
||
manifestCodec: audioCodec,
|
||
duration: trackDuration,
|
||
inputTimeScale: 90000,
|
||
dropped: 0
|
||
};
|
||
} // Source for probe info - https://wiki.multimedia.cx/index.php?title=ADTS
|
||
;
|
||
|
||
AACDemuxer.probe = function probe(data) {
|
||
if (!data) {
|
||
return false;
|
||
} // Check for the ADTS sync word
|
||
// Look for ADTS header | 1111 1111 | 1111 X00X | where X can be either 0 or 1
|
||
// Layer bits (position 14 and 15) in header should be always 0 for ADTS
|
||
// More info https://wiki.multimedia.cx/index.php?title=ADTS
|
||
|
||
|
||
var id3Data = _demux_id3__WEBPACK_IMPORTED_MODULE_3__["getID3Data"](data, 0) || [];
|
||
var offset = id3Data.length;
|
||
|
||
for (var length = data.length; offset < length; offset++) {
|
||
if (_adts__WEBPACK_IMPORTED_MODULE_1__["probe"](data, offset)) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log('ADTS sync word found !');
|
||
return true;
|
||
}
|
||
}
|
||
|
||
return false;
|
||
};
|
||
|
||
_proto.canParse = function canParse(data, offset) {
|
||
return _adts__WEBPACK_IMPORTED_MODULE_1__["canParse"](data, offset);
|
||
};
|
||
|
||
_proto.appendFrame = function appendFrame(track, data, offset) {
|
||
_adts__WEBPACK_IMPORTED_MODULE_1__["initTrackConfig"](track, this.observer, data, offset, track.manifestCodec);
|
||
var frame = _adts__WEBPACK_IMPORTED_MODULE_1__["appendFrame"](track, data, offset, this.initPTS, this.frameIndex);
|
||
|
||
if (frame && frame.missing === 0) {
|
||
return frame;
|
||
}
|
||
};
|
||
|
||
return AACDemuxer;
|
||
}(_base_audio_demuxer__WEBPACK_IMPORTED_MODULE_0__["default"]);
|
||
|
||
AACDemuxer.minProbeByteLength = 9;
|
||
/* harmony default export */ __webpack_exports__["default"] = (AACDemuxer);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/demux/adts.ts":
|
||
/*!***************************!*\
|
||
!*** ./src/demux/adts.ts ***!
|
||
\***************************/
|
||
/*! exports provided: getAudioConfig, isHeaderPattern, getHeaderLength, getFullFrameLength, canGetFrameLength, isHeader, canParse, probe, initTrackConfig, getFrameDuration, parseFrameHeader, appendFrame */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getAudioConfig", function() { return getAudioConfig; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isHeaderPattern", function() { return isHeaderPattern; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getHeaderLength", function() { return getHeaderLength; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getFullFrameLength", function() { return getFullFrameLength; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "canGetFrameLength", function() { return canGetFrameLength; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isHeader", function() { return isHeader; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "canParse", function() { return canParse; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "probe", function() { return probe; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "initTrackConfig", function() { return initTrackConfig; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getFrameDuration", function() { return getFrameDuration; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "parseFrameHeader", function() { return parseFrameHeader; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "appendFrame", function() { return appendFrame; });
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
|
||
/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../events */ "./src/events.ts");
|
||
/**
|
||
* ADTS parser helper
|
||
* @link https://wiki.multimedia.cx/index.php?title=ADTS
|
||
*/
|
||
|
||
|
||
|
||
function getAudioConfig(observer, data, offset, audioCodec) {
|
||
var adtsObjectType;
|
||
var adtsExtensionSamplingIndex;
|
||
var adtsChanelConfig;
|
||
var config;
|
||
var userAgent = navigator.userAgent.toLowerCase();
|
||
var manifestCodec = audioCodec;
|
||
var adtsSampleingRates = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350]; // byte 2
|
||
|
||
adtsObjectType = ((data[offset + 2] & 0xc0) >>> 6) + 1;
|
||
var adtsSamplingIndex = (data[offset + 2] & 0x3c) >>> 2;
|
||
|
||
if (adtsSamplingIndex > adtsSampleingRates.length - 1) {
|
||
observer.trigger(_events__WEBPACK_IMPORTED_MODULE_2__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].MEDIA_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].FRAG_PARSING_ERROR,
|
||
fatal: true,
|
||
reason: "invalid ADTS sampling index:" + adtsSamplingIndex
|
||
});
|
||
return;
|
||
}
|
||
|
||
adtsChanelConfig = (data[offset + 2] & 0x01) << 2; // byte 3
|
||
|
||
adtsChanelConfig |= (data[offset + 3] & 0xc0) >>> 6;
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_0__["logger"].log("manifest codec:" + audioCodec + ", ADTS type:" + adtsObjectType + ", samplingIndex:" + adtsSamplingIndex); // firefox: freq less than 24kHz = AAC SBR (HE-AAC)
|
||
|
||
if (/firefox/i.test(userAgent)) {
|
||
if (adtsSamplingIndex >= 6) {
|
||
adtsObjectType = 5;
|
||
config = new Array(4); // HE-AAC uses SBR (Spectral Band Replication) , high frequencies are constructed from low frequencies
|
||
// there is a factor 2 between frame sample rate and output sample rate
|
||
// multiply frequency by 2 (see table below, equivalent to substract 3)
|
||
|
||
adtsExtensionSamplingIndex = adtsSamplingIndex - 3;
|
||
} else {
|
||
adtsObjectType = 2;
|
||
config = new Array(2);
|
||
adtsExtensionSamplingIndex = adtsSamplingIndex;
|
||
} // Android : always use AAC
|
||
|
||
} else if (userAgent.indexOf('android') !== -1) {
|
||
adtsObjectType = 2;
|
||
config = new Array(2);
|
||
adtsExtensionSamplingIndex = adtsSamplingIndex;
|
||
} else {
|
||
/* for other browsers (Chrome/Vivaldi/Opera ...)
|
||
always force audio type to be HE-AAC SBR, as some browsers do not support audio codec switch properly (like Chrome ...)
|
||
*/
|
||
adtsObjectType = 5;
|
||
config = new Array(4); // if (manifest codec is HE-AAC or HE-AACv2) OR (manifest codec not specified AND frequency less than 24kHz)
|
||
|
||
if (audioCodec && (audioCodec.indexOf('mp4a.40.29') !== -1 || audioCodec.indexOf('mp4a.40.5') !== -1) || !audioCodec && adtsSamplingIndex >= 6) {
|
||
// HE-AAC uses SBR (Spectral Band Replication) , high frequencies are constructed from low frequencies
|
||
// there is a factor 2 between frame sample rate and output sample rate
|
||
// multiply frequency by 2 (see table below, equivalent to substract 3)
|
||
adtsExtensionSamplingIndex = adtsSamplingIndex - 3;
|
||
} else {
|
||
// if (manifest codec is AAC) AND (frequency less than 24kHz AND nb channel is 1) OR (manifest codec not specified and mono audio)
|
||
// Chrome fails to play back with low frequency AAC LC mono when initialized with HE-AAC. This is not a problem with stereo.
|
||
if (audioCodec && audioCodec.indexOf('mp4a.40.2') !== -1 && (adtsSamplingIndex >= 6 && adtsChanelConfig === 1 || /vivaldi/i.test(userAgent)) || !audioCodec && adtsChanelConfig === 1) {
|
||
adtsObjectType = 2;
|
||
config = new Array(2);
|
||
}
|
||
|
||
adtsExtensionSamplingIndex = adtsSamplingIndex;
|
||
}
|
||
}
|
||
/* refer to http://wiki.multimedia.cx/index.php?title=MPEG-4_Audio#Audio_Specific_Config
|
||
ISO 14496-3 (AAC).pdf - Table 1.13 — Syntax of AudioSpecificConfig()
|
||
Audio Profile / Audio Object Type
|
||
0: Null
|
||
1: AAC Main
|
||
2: AAC LC (Low Complexity)
|
||
3: AAC SSR (Scalable Sample Rate)
|
||
4: AAC LTP (Long Term Prediction)
|
||
5: SBR (Spectral Band Replication)
|
||
6: AAC Scalable
|
||
sampling freq
|
||
0: 96000 Hz
|
||
1: 88200 Hz
|
||
2: 64000 Hz
|
||
3: 48000 Hz
|
||
4: 44100 Hz
|
||
5: 32000 Hz
|
||
6: 24000 Hz
|
||
7: 22050 Hz
|
||
8: 16000 Hz
|
||
9: 12000 Hz
|
||
10: 11025 Hz
|
||
11: 8000 Hz
|
||
12: 7350 Hz
|
||
13: Reserved
|
||
14: Reserved
|
||
15: frequency is written explictly
|
||
Channel Configurations
|
||
These are the channel configurations:
|
||
0: Defined in AOT Specifc Config
|
||
1: 1 channel: front-center
|
||
2: 2 channels: front-left, front-right
|
||
*/
|
||
// audioObjectType = profile => profile, the MPEG-4 Audio Object Type minus 1
|
||
|
||
|
||
config[0] = adtsObjectType << 3; // samplingFrequencyIndex
|
||
|
||
config[0] |= (adtsSamplingIndex & 0x0e) >> 1;
|
||
config[1] |= (adtsSamplingIndex & 0x01) << 7; // channelConfiguration
|
||
|
||
config[1] |= adtsChanelConfig << 3;
|
||
|
||
if (adtsObjectType === 5) {
|
||
// adtsExtensionSampleingIndex
|
||
config[1] |= (adtsExtensionSamplingIndex & 0x0e) >> 1;
|
||
config[2] = (adtsExtensionSamplingIndex & 0x01) << 7; // adtsObjectType (force to 2, chrome is checking that object type is less than 5 ???
|
||
// https://chromium.googlesource.com/chromium/src.git/+/master/media/formats/mp4/aac.cc
|
||
|
||
config[2] |= 2 << 2;
|
||
config[3] = 0;
|
||
}
|
||
|
||
return {
|
||
config: config,
|
||
samplerate: adtsSampleingRates[adtsSamplingIndex],
|
||
channelCount: adtsChanelConfig,
|
||
codec: 'mp4a.40.' + adtsObjectType,
|
||
manifestCodec: manifestCodec
|
||
};
|
||
}
|
||
function isHeaderPattern(data, offset) {
|
||
return data[offset] === 0xff && (data[offset + 1] & 0xf6) === 0xf0;
|
||
}
|
||
function getHeaderLength(data, offset) {
|
||
return data[offset + 1] & 0x01 ? 7 : 9;
|
||
}
|
||
function getFullFrameLength(data, offset) {
|
||
return (data[offset + 3] & 0x03) << 11 | data[offset + 4] << 3 | (data[offset + 5] & 0xe0) >>> 5;
|
||
}
|
||
function canGetFrameLength(data, offset) {
|
||
return offset + 5 < data.length;
|
||
}
|
||
function isHeader(data, offset) {
|
||
// Look for ADTS header | 1111 1111 | 1111 X00X | where X can be either 0 or 1
|
||
// Layer bits (position 14 and 15) in header should be always 0 for ADTS
|
||
// More info https://wiki.multimedia.cx/index.php?title=ADTS
|
||
return offset + 1 < data.length && isHeaderPattern(data, offset);
|
||
}
|
||
function canParse(data, offset) {
|
||
return canGetFrameLength(data, offset) && isHeaderPattern(data, offset) && getFullFrameLength(data, offset) <= data.length - offset;
|
||
}
|
||
function probe(data, offset) {
|
||
// same as isHeader but we also check that ADTS frame follows last ADTS frame
|
||
// or end of data is reached
|
||
if (isHeader(data, offset)) {
|
||
// ADTS header Length
|
||
var headerLength = getHeaderLength(data, offset);
|
||
|
||
if (offset + headerLength >= data.length) {
|
||
return false;
|
||
} // ADTS frame Length
|
||
|
||
|
||
var frameLength = getFullFrameLength(data, offset);
|
||
|
||
if (frameLength <= headerLength) {
|
||
return false;
|
||
}
|
||
|
||
var newOffset = offset + frameLength;
|
||
return newOffset === data.length || isHeader(data, newOffset);
|
||
}
|
||
|
||
return false;
|
||
}
|
||
function initTrackConfig(track, observer, data, offset, audioCodec) {
|
||
if (!track.samplerate) {
|
||
var config = getAudioConfig(observer, data, offset, audioCodec);
|
||
|
||
if (!config) {
|
||
return;
|
||
}
|
||
|
||
track.config = config.config;
|
||
track.samplerate = config.samplerate;
|
||
track.channelCount = config.channelCount;
|
||
track.codec = config.codec;
|
||
track.manifestCodec = config.manifestCodec;
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_0__["logger"].log("parsed codec:" + track.codec + ", rate:" + config.samplerate + ", channels:" + config.channelCount);
|
||
}
|
||
}
|
||
function getFrameDuration(samplerate) {
|
||
return 1024 * 90000 / samplerate;
|
||
}
|
||
function parseFrameHeader(data, offset, pts, frameIndex, frameDuration) {
|
||
// The protection skip bit tells us if we have 2 bytes of CRC data at the end of the ADTS header
|
||
var headerLength = getHeaderLength(data, offset); // retrieve frame size
|
||
|
||
var frameLength = getFullFrameLength(data, offset);
|
||
frameLength -= headerLength;
|
||
|
||
if (frameLength > 0) {
|
||
var stamp = pts + frameIndex * frameDuration; // logger.log(`AAC frame, offset/length/total/pts:${offset+headerLength}/${frameLength}/${data.byteLength}/${(stamp/90).toFixed(0)}`);
|
||
|
||
return {
|
||
headerLength: headerLength,
|
||
frameLength: frameLength,
|
||
stamp: stamp
|
||
};
|
||
}
|
||
}
|
||
function appendFrame(track, data, offset, pts, frameIndex) {
|
||
var frameDuration = getFrameDuration(track.samplerate);
|
||
var header = parseFrameHeader(data, offset, pts, frameIndex, frameDuration);
|
||
|
||
if (header) {
|
||
var frameLength = header.frameLength,
|
||
headerLength = header.headerLength,
|
||
stamp = header.stamp;
|
||
var length = headerLength + frameLength;
|
||
var missing = Math.max(0, offset + length - data.length); // logger.log(`AAC frame ${frameIndex}, pts:${stamp} length@offset/total: ${frameLength}@${offset+headerLength}/${data.byteLength} missing: ${missing}`);
|
||
|
||
var unit;
|
||
|
||
if (missing) {
|
||
unit = new Uint8Array(length - headerLength);
|
||
unit.set(data.subarray(offset + headerLength, data.length), 0);
|
||
} else {
|
||
unit = data.subarray(offset + headerLength, offset + length);
|
||
}
|
||
|
||
var sample = {
|
||
unit: unit,
|
||
pts: stamp
|
||
};
|
||
|
||
if (!missing) {
|
||
track.samples.push(sample);
|
||
}
|
||
|
||
return {
|
||
sample: sample,
|
||
length: length,
|
||
missing: missing
|
||
};
|
||
}
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/demux/base-audio-demuxer.ts":
|
||
/*!*****************************************!*\
|
||
!*** ./src/demux/base-audio-demuxer.ts ***!
|
||
\*****************************************/
|
||
/*! exports provided: initPTSFn, default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "initPTSFn", function() { return initPTSFn; });
|
||
/* harmony import */ var _Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
|
||
/* harmony import */ var _demux_id3__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../demux/id3 */ "./src/demux/id3.ts");
|
||
/* harmony import */ var _dummy_demuxed_track__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./dummy-demuxed-track */ "./src/demux/dummy-demuxed-track.ts");
|
||
/* harmony import */ var _utils_mp4_tools__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/mp4-tools */ "./src/utils/mp4-tools.ts");
|
||
/* harmony import */ var _utils_typed_array__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../utils/typed-array */ "./src/utils/typed-array.ts");
|
||
|
||
|
||
|
||
|
||
|
||
|
||
var BaseAudioDemuxer = /*#__PURE__*/function () {
|
||
function BaseAudioDemuxer() {
|
||
this._audioTrack = void 0;
|
||
this._id3Track = void 0;
|
||
this.frameIndex = 0;
|
||
this.cachedData = null;
|
||
this.initPTS = null;
|
||
}
|
||
|
||
var _proto = BaseAudioDemuxer.prototype;
|
||
|
||
_proto.resetInitSegment = function resetInitSegment(initSegment, audioCodec, videoCodec, trackDuration) {
|
||
this._id3Track = {
|
||
type: 'id3',
|
||
id: 3,
|
||
pid: -1,
|
||
inputTimeScale: 90000,
|
||
sequenceNumber: 0,
|
||
samples: [],
|
||
dropped: 0
|
||
};
|
||
};
|
||
|
||
_proto.resetTimeStamp = function resetTimeStamp() {};
|
||
|
||
_proto.resetContiguity = function resetContiguity() {};
|
||
|
||
_proto.canParse = function canParse(data, offset) {
|
||
return false;
|
||
};
|
||
|
||
_proto.appendFrame = function appendFrame(track, data, offset) {} // feed incoming data to the front of the parsing pipeline
|
||
;
|
||
|
||
_proto.demux = function demux(data, timeOffset) {
|
||
if (this.cachedData) {
|
||
data = Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_3__["appendUint8Array"])(this.cachedData, data);
|
||
this.cachedData = null;
|
||
}
|
||
|
||
var id3Data = _demux_id3__WEBPACK_IMPORTED_MODULE_1__["getID3Data"](data, 0);
|
||
var offset = id3Data ? id3Data.length : 0;
|
||
var lastDataIndex;
|
||
var pts;
|
||
var track = this._audioTrack;
|
||
var id3Track = this._id3Track;
|
||
var timestamp = id3Data ? _demux_id3__WEBPACK_IMPORTED_MODULE_1__["getTimeStamp"](id3Data) : undefined;
|
||
var length = data.length;
|
||
|
||
if (this.frameIndex === 0 || this.initPTS === null) {
|
||
this.initPTS = initPTSFn(timestamp, timeOffset);
|
||
} // more expressive than alternative: id3Data?.length
|
||
|
||
|
||
if (id3Data && id3Data.length > 0) {
|
||
id3Track.samples.push({
|
||
pts: this.initPTS,
|
||
dts: this.initPTS,
|
||
data: id3Data
|
||
});
|
||
}
|
||
|
||
pts = this.initPTS;
|
||
|
||
while (offset < length) {
|
||
if (this.canParse(data, offset)) {
|
||
var frame = this.appendFrame(track, data, offset);
|
||
|
||
if (frame) {
|
||
this.frameIndex++;
|
||
pts = frame.sample.pts;
|
||
offset += frame.length;
|
||
lastDataIndex = offset;
|
||
} else {
|
||
offset = length;
|
||
}
|
||
} else if (_demux_id3__WEBPACK_IMPORTED_MODULE_1__["canParse"](data, offset)) {
|
||
// after a ID3.canParse, a call to ID3.getID3Data *should* always returns some data
|
||
id3Data = _demux_id3__WEBPACK_IMPORTED_MODULE_1__["getID3Data"](data, offset);
|
||
id3Track.samples.push({
|
||
pts: pts,
|
||
dts: pts,
|
||
data: id3Data
|
||
});
|
||
offset += id3Data.length;
|
||
lastDataIndex = offset;
|
||
} else {
|
||
offset++;
|
||
}
|
||
|
||
if (offset === length && lastDataIndex !== length) {
|
||
var partialData = Object(_utils_typed_array__WEBPACK_IMPORTED_MODULE_4__["sliceUint8"])(data, lastDataIndex);
|
||
|
||
if (this.cachedData) {
|
||
this.cachedData = Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_3__["appendUint8Array"])(this.cachedData, partialData);
|
||
} else {
|
||
this.cachedData = partialData;
|
||
}
|
||
}
|
||
}
|
||
|
||
return {
|
||
audioTrack: track,
|
||
videoTrack: Object(_dummy_demuxed_track__WEBPACK_IMPORTED_MODULE_2__["dummyTrack"])(),
|
||
id3Track: id3Track,
|
||
textTrack: Object(_dummy_demuxed_track__WEBPACK_IMPORTED_MODULE_2__["dummyTrack"])()
|
||
};
|
||
};
|
||
|
||
_proto.demuxSampleAes = function demuxSampleAes(data, keyData, timeOffset) {
|
||
return Promise.reject(new Error("[" + this + "] This demuxer does not support Sample-AES decryption"));
|
||
};
|
||
|
||
_proto.flush = function flush(timeOffset) {
|
||
// Parse cache in case of remaining frames.
|
||
var cachedData = this.cachedData;
|
||
|
||
if (cachedData) {
|
||
this.cachedData = null;
|
||
this.demux(cachedData, 0);
|
||
}
|
||
|
||
this.frameIndex = 0;
|
||
return {
|
||
audioTrack: this._audioTrack,
|
||
videoTrack: Object(_dummy_demuxed_track__WEBPACK_IMPORTED_MODULE_2__["dummyTrack"])(),
|
||
id3Track: this._id3Track,
|
||
textTrack: Object(_dummy_demuxed_track__WEBPACK_IMPORTED_MODULE_2__["dummyTrack"])()
|
||
};
|
||
};
|
||
|
||
_proto.destroy = function destroy() {};
|
||
|
||
return BaseAudioDemuxer;
|
||
}();
|
||
/**
|
||
* Initialize PTS
|
||
* <p>
|
||
* use timestamp unless it is undefined, NaN or Infinity
|
||
* </p>
|
||
*/
|
||
|
||
|
||
var initPTSFn = function initPTSFn(timestamp, timeOffset) {
|
||
return Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(timestamp) ? timestamp * 90 : timeOffset * 90000;
|
||
};
|
||
/* harmony default export */ __webpack_exports__["default"] = (BaseAudioDemuxer);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/demux/chunk-cache.ts":
|
||
/*!**********************************!*\
|
||
!*** ./src/demux/chunk-cache.ts ***!
|
||
\**********************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return ChunkCache; });
|
||
var ChunkCache = /*#__PURE__*/function () {
|
||
function ChunkCache() {
|
||
this.chunks = [];
|
||
this.dataLength = 0;
|
||
}
|
||
|
||
var _proto = ChunkCache.prototype;
|
||
|
||
_proto.push = function push(chunk) {
|
||
this.chunks.push(chunk);
|
||
this.dataLength += chunk.length;
|
||
};
|
||
|
||
_proto.flush = function flush() {
|
||
var chunks = this.chunks,
|
||
dataLength = this.dataLength;
|
||
var result;
|
||
|
||
if (!chunks.length) {
|
||
return new Uint8Array(0);
|
||
} else if (chunks.length === 1) {
|
||
result = chunks[0];
|
||
} else {
|
||
result = concatUint8Arrays(chunks, dataLength);
|
||
}
|
||
|
||
this.reset();
|
||
return result;
|
||
};
|
||
|
||
_proto.reset = function reset() {
|
||
this.chunks.length = 0;
|
||
this.dataLength = 0;
|
||
};
|
||
|
||
return ChunkCache;
|
||
}();
|
||
|
||
|
||
|
||
function concatUint8Arrays(chunks, dataLength) {
|
||
var result = new Uint8Array(dataLength);
|
||
var offset = 0;
|
||
|
||
for (var i = 0; i < chunks.length; i++) {
|
||
var chunk = chunks[i];
|
||
result.set(chunk, offset);
|
||
offset += chunk.length;
|
||
}
|
||
|
||
return result;
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/demux/dummy-demuxed-track.ts":
|
||
/*!******************************************!*\
|
||
!*** ./src/demux/dummy-demuxed-track.ts ***!
|
||
\******************************************/
|
||
/*! exports provided: dummyTrack */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "dummyTrack", function() { return dummyTrack; });
|
||
function dummyTrack(type, inputTimeScale) {
|
||
if (type === void 0) {
|
||
type = '';
|
||
}
|
||
|
||
if (inputTimeScale === void 0) {
|
||
inputTimeScale = 90000;
|
||
}
|
||
|
||
return {
|
||
type: type,
|
||
id: -1,
|
||
pid: -1,
|
||
inputTimeScale: inputTimeScale,
|
||
sequenceNumber: -1,
|
||
samples: [],
|
||
dropped: 0
|
||
};
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/demux/exp-golomb.ts":
|
||
/*!*********************************!*\
|
||
!*** ./src/demux/exp-golomb.ts ***!
|
||
\*********************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
/**
|
||
* Parser for exponential Golomb codes, a variable-bitwidth number encoding scheme used by h264.
|
||
*/
|
||
|
||
|
||
var ExpGolomb = /*#__PURE__*/function () {
|
||
function ExpGolomb(data) {
|
||
this.data = void 0;
|
||
this.bytesAvailable = void 0;
|
||
this.word = void 0;
|
||
this.bitsAvailable = void 0;
|
||
this.data = data; // the number of bytes left to examine in this.data
|
||
|
||
this.bytesAvailable = data.byteLength; // the current word being examined
|
||
|
||
this.word = 0; // :uint
|
||
// the number of bits left to examine in the current word
|
||
|
||
this.bitsAvailable = 0; // :uint
|
||
} // ():void
|
||
|
||
|
||
var _proto = ExpGolomb.prototype;
|
||
|
||
_proto.loadWord = function loadWord() {
|
||
var data = this.data;
|
||
var bytesAvailable = this.bytesAvailable;
|
||
var position = data.byteLength - bytesAvailable;
|
||
var workingBytes = new Uint8Array(4);
|
||
var availableBytes = Math.min(4, bytesAvailable);
|
||
|
||
if (availableBytes === 0) {
|
||
throw new Error('no bytes available');
|
||
}
|
||
|
||
workingBytes.set(data.subarray(position, position + availableBytes));
|
||
this.word = new DataView(workingBytes.buffer).getUint32(0); // track the amount of this.data that has been processed
|
||
|
||
this.bitsAvailable = availableBytes * 8;
|
||
this.bytesAvailable -= availableBytes;
|
||
} // (count:int):void
|
||
;
|
||
|
||
_proto.skipBits = function skipBits(count) {
|
||
var skipBytes; // :int
|
||
|
||
if (this.bitsAvailable > count) {
|
||
this.word <<= count;
|
||
this.bitsAvailable -= count;
|
||
} else {
|
||
count -= this.bitsAvailable;
|
||
skipBytes = count >> 3;
|
||
count -= skipBytes >> 3;
|
||
this.bytesAvailable -= skipBytes;
|
||
this.loadWord();
|
||
this.word <<= count;
|
||
this.bitsAvailable -= count;
|
||
}
|
||
} // (size:int):uint
|
||
;
|
||
|
||
_proto.readBits = function readBits(size) {
|
||
var bits = Math.min(this.bitsAvailable, size); // :uint
|
||
|
||
var valu = this.word >>> 32 - bits; // :uint
|
||
|
||
if (size > 32) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_0__["logger"].error('Cannot read more than 32 bits at a time');
|
||
}
|
||
|
||
this.bitsAvailable -= bits;
|
||
|
||
if (this.bitsAvailable > 0) {
|
||
this.word <<= bits;
|
||
} else if (this.bytesAvailable > 0) {
|
||
this.loadWord();
|
||
}
|
||
|
||
bits = size - bits;
|
||
|
||
if (bits > 0 && this.bitsAvailable) {
|
||
return valu << bits | this.readBits(bits);
|
||
} else {
|
||
return valu;
|
||
}
|
||
} // ():uint
|
||
;
|
||
|
||
_proto.skipLZ = function skipLZ() {
|
||
var leadingZeroCount; // :uint
|
||
|
||
for (leadingZeroCount = 0; leadingZeroCount < this.bitsAvailable; ++leadingZeroCount) {
|
||
if ((this.word & 0x80000000 >>> leadingZeroCount) !== 0) {
|
||
// the first bit of working word is 1
|
||
this.word <<= leadingZeroCount;
|
||
this.bitsAvailable -= leadingZeroCount;
|
||
return leadingZeroCount;
|
||
}
|
||
} // we exhausted word and still have not found a 1
|
||
|
||
|
||
this.loadWord();
|
||
return leadingZeroCount + this.skipLZ();
|
||
} // ():void
|
||
;
|
||
|
||
_proto.skipUEG = function skipUEG() {
|
||
this.skipBits(1 + this.skipLZ());
|
||
} // ():void
|
||
;
|
||
|
||
_proto.skipEG = function skipEG() {
|
||
this.skipBits(1 + this.skipLZ());
|
||
} // ():uint
|
||
;
|
||
|
||
_proto.readUEG = function readUEG() {
|
||
var clz = this.skipLZ(); // :uint
|
||
|
||
return this.readBits(clz + 1) - 1;
|
||
} // ():int
|
||
;
|
||
|
||
_proto.readEG = function readEG() {
|
||
var valu = this.readUEG(); // :int
|
||
|
||
if (0x01 & valu) {
|
||
// the number is odd if the low order bit is set
|
||
return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
|
||
} else {
|
||
return -1 * (valu >>> 1); // divide by two then make it negative
|
||
}
|
||
} // Some convenience functions
|
||
// :Boolean
|
||
;
|
||
|
||
_proto.readBoolean = function readBoolean() {
|
||
return this.readBits(1) === 1;
|
||
} // ():int
|
||
;
|
||
|
||
_proto.readUByte = function readUByte() {
|
||
return this.readBits(8);
|
||
} // ():int
|
||
;
|
||
|
||
_proto.readUShort = function readUShort() {
|
||
return this.readBits(16);
|
||
} // ():int
|
||
;
|
||
|
||
_proto.readUInt = function readUInt() {
|
||
return this.readBits(32);
|
||
}
|
||
/**
|
||
* Advance the ExpGolomb decoder past a scaling list. The scaling
|
||
* list is optionally transmitted as part of a sequence parameter
|
||
* set and is not relevant to transmuxing.
|
||
* @param count the number of entries in this scaling list
|
||
* @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
|
||
*/
|
||
;
|
||
|
||
_proto.skipScalingList = function skipScalingList(count) {
|
||
var lastScale = 8;
|
||
var nextScale = 8;
|
||
var deltaScale;
|
||
|
||
for (var j = 0; j < count; j++) {
|
||
if (nextScale !== 0) {
|
||
deltaScale = this.readEG();
|
||
nextScale = (lastScale + deltaScale + 256) % 256;
|
||
}
|
||
|
||
lastScale = nextScale === 0 ? lastScale : nextScale;
|
||
}
|
||
}
|
||
/**
|
||
* Read a sequence parameter set and return some interesting video
|
||
* properties. A sequence parameter set is the H264 metadata that
|
||
* describes the properties of upcoming video frames.
|
||
* @param data {Uint8Array} the bytes of a sequence parameter set
|
||
* @return {object} an object with configuration parsed from the
|
||
* sequence parameter set, including the dimensions of the
|
||
* associated video frames.
|
||
*/
|
||
;
|
||
|
||
_proto.readSPS = function readSPS() {
|
||
var frameCropLeftOffset = 0;
|
||
var frameCropRightOffset = 0;
|
||
var frameCropTopOffset = 0;
|
||
var frameCropBottomOffset = 0;
|
||
var numRefFramesInPicOrderCntCycle;
|
||
var scalingListCount;
|
||
var i;
|
||
var readUByte = this.readUByte.bind(this);
|
||
var readBits = this.readBits.bind(this);
|
||
var readUEG = this.readUEG.bind(this);
|
||
var readBoolean = this.readBoolean.bind(this);
|
||
var skipBits = this.skipBits.bind(this);
|
||
var skipEG = this.skipEG.bind(this);
|
||
var skipUEG = this.skipUEG.bind(this);
|
||
var skipScalingList = this.skipScalingList.bind(this);
|
||
readUByte();
|
||
var profileIdc = readUByte(); // profile_idc
|
||
|
||
readBits(5); // profileCompat constraint_set[0-4]_flag, u(5)
|
||
|
||
skipBits(3); // reserved_zero_3bits u(3),
|
||
|
||
readUByte(); // level_idc u(8)
|
||
|
||
skipUEG(); // seq_parameter_set_id
|
||
// some profiles have more optional data we don't need
|
||
|
||
if (profileIdc === 100 || profileIdc === 110 || profileIdc === 122 || profileIdc === 244 || profileIdc === 44 || profileIdc === 83 || profileIdc === 86 || profileIdc === 118 || profileIdc === 128) {
|
||
var chromaFormatIdc = readUEG();
|
||
|
||
if (chromaFormatIdc === 3) {
|
||
skipBits(1);
|
||
} // separate_colour_plane_flag
|
||
|
||
|
||
skipUEG(); // bit_depth_luma_minus8
|
||
|
||
skipUEG(); // bit_depth_chroma_minus8
|
||
|
||
skipBits(1); // qpprime_y_zero_transform_bypass_flag
|
||
|
||
if (readBoolean()) {
|
||
// seq_scaling_matrix_present_flag
|
||
scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
|
||
|
||
for (i = 0; i < scalingListCount; i++) {
|
||
if (readBoolean()) {
|
||
// seq_scaling_list_present_flag[ i ]
|
||
if (i < 6) {
|
||
skipScalingList(16);
|
||
} else {
|
||
skipScalingList(64);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
skipUEG(); // log2_max_frame_num_minus4
|
||
|
||
var picOrderCntType = readUEG();
|
||
|
||
if (picOrderCntType === 0) {
|
||
readUEG(); // log2_max_pic_order_cnt_lsb_minus4
|
||
} else if (picOrderCntType === 1) {
|
||
skipBits(1); // delta_pic_order_always_zero_flag
|
||
|
||
skipEG(); // offset_for_non_ref_pic
|
||
|
||
skipEG(); // offset_for_top_to_bottom_field
|
||
|
||
numRefFramesInPicOrderCntCycle = readUEG();
|
||
|
||
for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
|
||
skipEG();
|
||
} // offset_for_ref_frame[ i ]
|
||
|
||
}
|
||
|
||
skipUEG(); // max_num_ref_frames
|
||
|
||
skipBits(1); // gaps_in_frame_num_value_allowed_flag
|
||
|
||
var picWidthInMbsMinus1 = readUEG();
|
||
var picHeightInMapUnitsMinus1 = readUEG();
|
||
var frameMbsOnlyFlag = readBits(1);
|
||
|
||
if (frameMbsOnlyFlag === 0) {
|
||
skipBits(1);
|
||
} // mb_adaptive_frame_field_flag
|
||
|
||
|
||
skipBits(1); // direct_8x8_inference_flag
|
||
|
||
if (readBoolean()) {
|
||
// frame_cropping_flag
|
||
frameCropLeftOffset = readUEG();
|
||
frameCropRightOffset = readUEG();
|
||
frameCropTopOffset = readUEG();
|
||
frameCropBottomOffset = readUEG();
|
||
}
|
||
|
||
var pixelRatio = [1, 1];
|
||
|
||
if (readBoolean()) {
|
||
// vui_parameters_present_flag
|
||
if (readBoolean()) {
|
||
// aspect_ratio_info_present_flag
|
||
var aspectRatioIdc = readUByte();
|
||
|
||
switch (aspectRatioIdc) {
|
||
case 1:
|
||
pixelRatio = [1, 1];
|
||
break;
|
||
|
||
case 2:
|
||
pixelRatio = [12, 11];
|
||
break;
|
||
|
||
case 3:
|
||
pixelRatio = [10, 11];
|
||
break;
|
||
|
||
case 4:
|
||
pixelRatio = [16, 11];
|
||
break;
|
||
|
||
case 5:
|
||
pixelRatio = [40, 33];
|
||
break;
|
||
|
||
case 6:
|
||
pixelRatio = [24, 11];
|
||
break;
|
||
|
||
case 7:
|
||
pixelRatio = [20, 11];
|
||
break;
|
||
|
||
case 8:
|
||
pixelRatio = [32, 11];
|
||
break;
|
||
|
||
case 9:
|
||
pixelRatio = [80, 33];
|
||
break;
|
||
|
||
case 10:
|
||
pixelRatio = [18, 11];
|
||
break;
|
||
|
||
case 11:
|
||
pixelRatio = [15, 11];
|
||
break;
|
||
|
||
case 12:
|
||
pixelRatio = [64, 33];
|
||
break;
|
||
|
||
case 13:
|
||
pixelRatio = [160, 99];
|
||
break;
|
||
|
||
case 14:
|
||
pixelRatio = [4, 3];
|
||
break;
|
||
|
||
case 15:
|
||
pixelRatio = [3, 2];
|
||
break;
|
||
|
||
case 16:
|
||
pixelRatio = [2, 1];
|
||
break;
|
||
|
||
case 255:
|
||
{
|
||
pixelRatio = [readUByte() << 8 | readUByte(), readUByte() << 8 | readUByte()];
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
return {
|
||
width: Math.ceil((picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2),
|
||
height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - (frameMbsOnlyFlag ? 2 : 4) * (frameCropTopOffset + frameCropBottomOffset),
|
||
pixelRatio: pixelRatio
|
||
};
|
||
};
|
||
|
||
_proto.readSliceType = function readSliceType() {
|
||
// skip NALu type
|
||
this.readUByte(); // discard first_mb_in_slice
|
||
|
||
this.readUEG(); // return slice_type
|
||
|
||
return this.readUEG();
|
||
};
|
||
|
||
return ExpGolomb;
|
||
}();
|
||
|
||
/* harmony default export */ __webpack_exports__["default"] = (ExpGolomb);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/demux/id3.ts":
|
||
/*!**************************!*\
|
||
!*** ./src/demux/id3.ts ***!
|
||
\**************************/
|
||
/*! exports provided: isHeader, isFooter, getID3Data, canParse, getTimeStamp, isTimeStampFrame, getID3Frames, decodeFrame, utf8ArrayToStr, testables */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isHeader", function() { return isHeader; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isFooter", function() { return isFooter; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getID3Data", function() { return getID3Data; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "canParse", function() { return canParse; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getTimeStamp", function() { return getTimeStamp; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isTimeStampFrame", function() { return isTimeStampFrame; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getID3Frames", function() { return getID3Frames; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "decodeFrame", function() { return decodeFrame; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "utf8ArrayToStr", function() { return utf8ArrayToStr; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "testables", function() { return testables; });
|
||
// breaking up those two types in order to clarify what is happening in the decoding path.
|
||
|
||
/**
|
||
* Returns true if an ID3 header can be found at offset in data
|
||
* @param {Uint8Array} data - The data to search in
|
||
* @param {number} offset - The offset at which to start searching
|
||
* @return {boolean} - True if an ID3 header is found
|
||
*/
|
||
var isHeader = function isHeader(data, offset) {
|
||
/*
|
||
* http://id3.org/id3v2.3.0
|
||
* [0] = 'I'
|
||
* [1] = 'D'
|
||
* [2] = '3'
|
||
* [3,4] = {Version}
|
||
* [5] = {Flags}
|
||
* [6-9] = {ID3 Size}
|
||
*
|
||
* An ID3v2 tag can be detected with the following pattern:
|
||
* $49 44 33 yy yy xx zz zz zz zz
|
||
* Where yy is less than $FF, xx is the 'flags' byte and zz is less than $80
|
||
*/
|
||
if (offset + 10 <= data.length) {
|
||
// look for 'ID3' identifier
|
||
if (data[offset] === 0x49 && data[offset + 1] === 0x44 && data[offset + 2] === 0x33) {
|
||
// check version is within range
|
||
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
|
||
// check size is within range
|
||
if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
|
||
return true;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
return false;
|
||
};
|
||
/**
|
||
* Returns true if an ID3 footer can be found at offset in data
|
||
* @param {Uint8Array} data - The data to search in
|
||
* @param {number} offset - The offset at which to start searching
|
||
* @return {boolean} - True if an ID3 footer is found
|
||
*/
|
||
|
||
var isFooter = function isFooter(data, offset) {
|
||
/*
|
||
* The footer is a copy of the header, but with a different identifier
|
||
*/
|
||
if (offset + 10 <= data.length) {
|
||
// look for '3DI' identifier
|
||
if (data[offset] === 0x33 && data[offset + 1] === 0x44 && data[offset + 2] === 0x49) {
|
||
// check version is within range
|
||
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
|
||
// check size is within range
|
||
if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
|
||
return true;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
return false;
|
||
};
|
||
/**
|
||
* Returns any adjacent ID3 tags found in data starting at offset, as one block of data
|
||
* @param {Uint8Array} data - The data to search in
|
||
* @param {number} offset - The offset at which to start searching
|
||
* @return {Uint8Array | undefined} - The block of data containing any ID3 tags found
|
||
* or *undefined* if no header is found at the starting offset
|
||
*/
|
||
|
||
var getID3Data = function getID3Data(data, offset) {
|
||
var front = offset;
|
||
var length = 0;
|
||
|
||
while (isHeader(data, offset)) {
|
||
// ID3 header is 10 bytes
|
||
length += 10;
|
||
var size = readSize(data, offset + 6);
|
||
length += size;
|
||
|
||
if (isFooter(data, offset + 10)) {
|
||
// ID3 footer is 10 bytes
|
||
length += 10;
|
||
}
|
||
|
||
offset += length;
|
||
}
|
||
|
||
if (length > 0) {
|
||
return data.subarray(front, front + length);
|
||
}
|
||
|
||
return undefined;
|
||
};
|
||
|
||
var readSize = function readSize(data, offset) {
|
||
var size = 0;
|
||
size = (data[offset] & 0x7f) << 21;
|
||
size |= (data[offset + 1] & 0x7f) << 14;
|
||
size |= (data[offset + 2] & 0x7f) << 7;
|
||
size |= data[offset + 3] & 0x7f;
|
||
return size;
|
||
};
|
||
|
||
var canParse = function canParse(data, offset) {
|
||
return isHeader(data, offset) && readSize(data, offset + 6) + 10 <= data.length - offset;
|
||
};
|
||
/**
|
||
* Searches for the Elementary Stream timestamp found in the ID3 data chunk
|
||
* @param {Uint8Array} data - Block of data containing one or more ID3 tags
|
||
* @return {number | undefined} - The timestamp
|
||
*/
|
||
|
||
var getTimeStamp = function getTimeStamp(data) {
|
||
var frames = getID3Frames(data);
|
||
|
||
for (var i = 0; i < frames.length; i++) {
|
||
var frame = frames[i];
|
||
|
||
if (isTimeStampFrame(frame)) {
|
||
return readTimeStamp(frame);
|
||
}
|
||
}
|
||
|
||
return undefined;
|
||
};
|
||
/**
|
||
* Returns true if the ID3 frame is an Elementary Stream timestamp frame
|
||
* @param {ID3 frame} frame
|
||
*/
|
||
|
||
var isTimeStampFrame = function isTimeStampFrame(frame) {
|
||
return frame && frame.key === 'PRIV' && frame.info === 'com.apple.streaming.transportStreamTimestamp';
|
||
};
|
||
|
||
var getFrameData = function getFrameData(data) {
|
||
/*
|
||
Frame ID $xx xx xx xx (four characters)
|
||
Size $xx xx xx xx
|
||
Flags $xx xx
|
||
*/
|
||
var type = String.fromCharCode(data[0], data[1], data[2], data[3]);
|
||
var size = readSize(data, 4); // skip frame id, size, and flags
|
||
|
||
var offset = 10;
|
||
return {
|
||
type: type,
|
||
size: size,
|
||
data: data.subarray(offset, offset + size)
|
||
};
|
||
};
|
||
/**
|
||
* Returns an array of ID3 frames found in all the ID3 tags in the id3Data
|
||
* @param {Uint8Array} id3Data - The ID3 data containing one or more ID3 tags
|
||
* @return {ID3.Frame[]} - Array of ID3 frame objects
|
||
*/
|
||
|
||
|
||
var getID3Frames = function getID3Frames(id3Data) {
|
||
var offset = 0;
|
||
var frames = [];
|
||
|
||
while (isHeader(id3Data, offset)) {
|
||
var size = readSize(id3Data, offset + 6); // skip past ID3 header
|
||
|
||
offset += 10;
|
||
var end = offset + size; // loop through frames in the ID3 tag
|
||
|
||
while (offset + 8 < end) {
|
||
var frameData = getFrameData(id3Data.subarray(offset));
|
||
var frame = decodeFrame(frameData);
|
||
|
||
if (frame) {
|
||
frames.push(frame);
|
||
} // skip frame header and frame data
|
||
|
||
|
||
offset += frameData.size + 10;
|
||
}
|
||
|
||
if (isFooter(id3Data, offset)) {
|
||
offset += 10;
|
||
}
|
||
}
|
||
|
||
return frames;
|
||
};
|
||
var decodeFrame = function decodeFrame(frame) {
|
||
if (frame.type === 'PRIV') {
|
||
return decodePrivFrame(frame);
|
||
} else if (frame.type[0] === 'W') {
|
||
return decodeURLFrame(frame);
|
||
}
|
||
|
||
return decodeTextFrame(frame);
|
||
};
|
||
|
||
var decodePrivFrame = function decodePrivFrame(frame) {
|
||
/*
|
||
Format: <text string>\0<binary data>
|
||
*/
|
||
if (frame.size < 2) {
|
||
return undefined;
|
||
}
|
||
|
||
var owner = utf8ArrayToStr(frame.data, true);
|
||
var privateData = new Uint8Array(frame.data.subarray(owner.length + 1));
|
||
return {
|
||
key: frame.type,
|
||
info: owner,
|
||
data: privateData.buffer
|
||
};
|
||
};
|
||
|
||
var decodeTextFrame = function decodeTextFrame(frame) {
|
||
if (frame.size < 2) {
|
||
return undefined;
|
||
}
|
||
|
||
if (frame.type === 'TXXX') {
|
||
/*
|
||
Format:
|
||
[0] = {Text Encoding}
|
||
[1-?] = {Description}\0{Value}
|
||
*/
|
||
var index = 1;
|
||
var description = utf8ArrayToStr(frame.data.subarray(index), true);
|
||
index += description.length + 1;
|
||
var value = utf8ArrayToStr(frame.data.subarray(index));
|
||
return {
|
||
key: frame.type,
|
||
info: description,
|
||
data: value
|
||
};
|
||
}
|
||
/*
|
||
Format:
|
||
[0] = {Text Encoding}
|
||
[1-?] = {Value}
|
||
*/
|
||
|
||
|
||
var text = utf8ArrayToStr(frame.data.subarray(1));
|
||
return {
|
||
key: frame.type,
|
||
data: text
|
||
};
|
||
};
|
||
|
||
var decodeURLFrame = function decodeURLFrame(frame) {
|
||
if (frame.type === 'WXXX') {
|
||
/*
|
||
Format:
|
||
[0] = {Text Encoding}
|
||
[1-?] = {Description}\0{URL}
|
||
*/
|
||
if (frame.size < 2) {
|
||
return undefined;
|
||
}
|
||
|
||
var index = 1;
|
||
var description = utf8ArrayToStr(frame.data.subarray(index), true);
|
||
index += description.length + 1;
|
||
var value = utf8ArrayToStr(frame.data.subarray(index));
|
||
return {
|
||
key: frame.type,
|
||
info: description,
|
||
data: value
|
||
};
|
||
}
|
||
/*
|
||
Format:
|
||
[0-?] = {URL}
|
||
*/
|
||
|
||
|
||
var url = utf8ArrayToStr(frame.data);
|
||
return {
|
||
key: frame.type,
|
||
data: url
|
||
};
|
||
};
|
||
|
||
var readTimeStamp = function readTimeStamp(timeStampFrame) {
|
||
if (timeStampFrame.data.byteLength === 8) {
|
||
var data = new Uint8Array(timeStampFrame.data); // timestamp is 33 bit expressed as a big-endian eight-octet number,
|
||
// with the upper 31 bits set to zero.
|
||
|
||
var pts33Bit = data[3] & 0x1;
|
||
var timestamp = (data[4] << 23) + (data[5] << 15) + (data[6] << 7) + data[7];
|
||
timestamp /= 45;
|
||
|
||
if (pts33Bit) {
|
||
timestamp += 47721858.84;
|
||
} // 2^32 / 90
|
||
|
||
|
||
return Math.round(timestamp);
|
||
}
|
||
|
||
return undefined;
|
||
}; // http://stackoverflow.com/questions/8936984/uint8array-to-string-in-javascript/22373197
|
||
// http://www.onicos.com/staff/iz/amuse/javascript/expert/utf.txt
|
||
|
||
/* utf.js - UTF-8 <=> UTF-16 convertion
|
||
*
|
||
* Copyright (C) 1999 Masanao Izumo <iz@onicos.co.jp>
|
||
* Version: 1.0
|
||
* LastModified: Dec 25 1999
|
||
* This library is free. You can redistribute it and/or modify it.
|
||
*/
|
||
|
||
|
||
var utf8ArrayToStr = function utf8ArrayToStr(array, exitOnNull) {
|
||
if (exitOnNull === void 0) {
|
||
exitOnNull = false;
|
||
}
|
||
|
||
var decoder = getTextDecoder();
|
||
|
||
if (decoder) {
|
||
var decoded = decoder.decode(array);
|
||
|
||
if (exitOnNull) {
|
||
// grab up to the first null
|
||
var idx = decoded.indexOf('\0');
|
||
return idx !== -1 ? decoded.substring(0, idx) : decoded;
|
||
} // remove any null characters
|
||
|
||
|
||
return decoded.replace(/\0/g, '');
|
||
}
|
||
|
||
var len = array.length;
|
||
var c;
|
||
var char2;
|
||
var char3;
|
||
var out = '';
|
||
var i = 0;
|
||
|
||
while (i < len) {
|
||
c = array[i++];
|
||
|
||
if (c === 0x00 && exitOnNull) {
|
||
return out;
|
||
} else if (c === 0x00 || c === 0x03) {
|
||
// If the character is 3 (END_OF_TEXT) or 0 (NULL) then skip it
|
||
continue;
|
||
}
|
||
|
||
switch (c >> 4) {
|
||
case 0:
|
||
case 1:
|
||
case 2:
|
||
case 3:
|
||
case 4:
|
||
case 5:
|
||
case 6:
|
||
case 7:
|
||
// 0xxxxxxx
|
||
out += String.fromCharCode(c);
|
||
break;
|
||
|
||
case 12:
|
||
case 13:
|
||
// 110x xxxx 10xx xxxx
|
||
char2 = array[i++];
|
||
out += String.fromCharCode((c & 0x1f) << 6 | char2 & 0x3f);
|
||
break;
|
||
|
||
case 14:
|
||
// 1110 xxxx 10xx xxxx 10xx xxxx
|
||
char2 = array[i++];
|
||
char3 = array[i++];
|
||
out += String.fromCharCode((c & 0x0f) << 12 | (char2 & 0x3f) << 6 | (char3 & 0x3f) << 0);
|
||
break;
|
||
|
||
default:
|
||
}
|
||
}
|
||
|
||
return out;
|
||
};
|
||
var testables = {
|
||
decodeTextFrame: decodeTextFrame
|
||
};
|
||
var decoder;
|
||
|
||
function getTextDecoder() {
|
||
if (!decoder && typeof self.TextDecoder !== 'undefined') {
|
||
decoder = new self.TextDecoder('utf-8');
|
||
}
|
||
|
||
return decoder;
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/demux/mp3demuxer.ts":
|
||
/*!*********************************!*\
|
||
!*** ./src/demux/mp3demuxer.ts ***!
|
||
\*********************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony import */ var _base_audio_demuxer__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./base-audio-demuxer */ "./src/demux/base-audio-demuxer.ts");
|
||
/* harmony import */ var _demux_id3__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../demux/id3 */ "./src/demux/id3.ts");
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
/* harmony import */ var _mpegaudio__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./mpegaudio */ "./src/demux/mpegaudio.ts");
|
||
function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
|
||
|
||
function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
|
||
|
||
/**
|
||
* MP3 demuxer
|
||
*/
|
||
|
||
|
||
|
||
|
||
|
||
var MP3Demuxer = /*#__PURE__*/function (_BaseAudioDemuxer) {
|
||
_inheritsLoose(MP3Demuxer, _BaseAudioDemuxer);
|
||
|
||
function MP3Demuxer() {
|
||
return _BaseAudioDemuxer.apply(this, arguments) || this;
|
||
}
|
||
|
||
var _proto = MP3Demuxer.prototype;
|
||
|
||
_proto.resetInitSegment = function resetInitSegment(initSegment, audioCodec, videoCodec, trackDuration) {
|
||
_BaseAudioDemuxer.prototype.resetInitSegment.call(this, initSegment, audioCodec, videoCodec, trackDuration);
|
||
|
||
this._audioTrack = {
|
||
container: 'audio/mpeg',
|
||
type: 'audio',
|
||
id: 2,
|
||
pid: -1,
|
||
sequenceNumber: 0,
|
||
isAAC: false,
|
||
samples: [],
|
||
manifestCodec: audioCodec,
|
||
duration: trackDuration,
|
||
inputTimeScale: 90000,
|
||
dropped: 0
|
||
};
|
||
};
|
||
|
||
MP3Demuxer.probe = function probe(data) {
|
||
if (!data) {
|
||
return false;
|
||
} // check if data contains ID3 timestamp and MPEG sync word
|
||
// Look for MPEG header | 1111 1111 | 111X XYZX | where X can be either 0 or 1 and Y or Z should be 1
|
||
// Layer bits (position 14 and 15) in header should be always different from 0 (Layer I or Layer II or Layer III)
|
||
// More info http://www.mp3-tech.org/programmer/frame_header.html
|
||
|
||
|
||
var id3Data = _demux_id3__WEBPACK_IMPORTED_MODULE_1__["getID3Data"](data, 0) || [];
|
||
var offset = id3Data.length;
|
||
|
||
for (var length = data.length; offset < length; offset++) {
|
||
if (_mpegaudio__WEBPACK_IMPORTED_MODULE_3__["probe"](data, offset)) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].log('MPEG Audio sync word found !');
|
||
return true;
|
||
}
|
||
}
|
||
|
||
return false;
|
||
};
|
||
|
||
_proto.canParse = function canParse(data, offset) {
|
||
return _mpegaudio__WEBPACK_IMPORTED_MODULE_3__["canParse"](data, offset);
|
||
};
|
||
|
||
_proto.appendFrame = function appendFrame(track, data, offset) {
|
||
if (this.initPTS === null) {
|
||
return;
|
||
}
|
||
|
||
return _mpegaudio__WEBPACK_IMPORTED_MODULE_3__["appendFrame"](track, data, offset, this.initPTS, this.frameIndex);
|
||
};
|
||
|
||
return MP3Demuxer;
|
||
}(_base_audio_demuxer__WEBPACK_IMPORTED_MODULE_0__["default"]);
|
||
|
||
MP3Demuxer.minProbeByteLength = 4;
|
||
/* harmony default export */ __webpack_exports__["default"] = (MP3Demuxer);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/demux/mp4demuxer.ts":
|
||
/*!*********************************!*\
|
||
!*** ./src/demux/mp4demuxer.ts ***!
|
||
\*********************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony import */ var _Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
|
||
/* harmony import */ var _utils_mp4_tools__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../utils/mp4-tools */ "./src/utils/mp4-tools.ts");
|
||
/* harmony import */ var _dummy_demuxed_track__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./dummy-demuxed-track */ "./src/demux/dummy-demuxed-track.ts");
|
||
|
||
|
||
/**
|
||
* MP4 demuxer
|
||
*/
|
||
|
||
|
||
var emsgSchemePattern = /\/emsg[-/]ID3/i;
|
||
|
||
var MP4Demuxer = /*#__PURE__*/function () {
|
||
function MP4Demuxer(observer, config) {
|
||
this.remainderData = null;
|
||
this.timeOffset = 0;
|
||
this.config = void 0;
|
||
this.videoTrack = void 0;
|
||
this.audioTrack = void 0;
|
||
this.id3Track = void 0;
|
||
this.txtTrack = void 0;
|
||
this.config = config;
|
||
}
|
||
|
||
var _proto = MP4Demuxer.prototype;
|
||
|
||
_proto.resetTimeStamp = function resetTimeStamp() {};
|
||
|
||
_proto.resetInitSegment = function resetInitSegment(initSegment, audioCodec, videoCodec, trackDuration) {
|
||
var initData = Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_1__["parseInitSegment"])(initSegment);
|
||
var videoTrack = this.videoTrack = Object(_dummy_demuxed_track__WEBPACK_IMPORTED_MODULE_2__["dummyTrack"])('video', 1);
|
||
var audioTrack = this.audioTrack = Object(_dummy_demuxed_track__WEBPACK_IMPORTED_MODULE_2__["dummyTrack"])('audio', 1);
|
||
var captionTrack = this.txtTrack = Object(_dummy_demuxed_track__WEBPACK_IMPORTED_MODULE_2__["dummyTrack"])('text', 1);
|
||
this.id3Track = Object(_dummy_demuxed_track__WEBPACK_IMPORTED_MODULE_2__["dummyTrack"])('id3', 1);
|
||
this.timeOffset = 0;
|
||
|
||
if (initData.video) {
|
||
var _initData$video = initData.video,
|
||
id = _initData$video.id,
|
||
timescale = _initData$video.timescale,
|
||
codec = _initData$video.codec;
|
||
videoTrack.id = id;
|
||
videoTrack.timescale = captionTrack.timescale = timescale;
|
||
videoTrack.codec = codec;
|
||
}
|
||
|
||
if (initData.audio) {
|
||
var _initData$audio = initData.audio,
|
||
_id = _initData$audio.id,
|
||
_timescale = _initData$audio.timescale,
|
||
_codec = _initData$audio.codec;
|
||
audioTrack.id = _id;
|
||
audioTrack.timescale = _timescale;
|
||
audioTrack.codec = _codec;
|
||
}
|
||
|
||
captionTrack.id = _utils_mp4_tools__WEBPACK_IMPORTED_MODULE_1__["RemuxerTrackIdConfig"].text;
|
||
videoTrack.sampleDuration = 0;
|
||
videoTrack.duration = audioTrack.duration = trackDuration;
|
||
};
|
||
|
||
_proto.resetContiguity = function resetContiguity() {};
|
||
|
||
MP4Demuxer.probe = function probe(data) {
|
||
// ensure we find a moof box in the first 16 kB
|
||
data = data.length > 16384 ? data.subarray(0, 16384) : data;
|
||
return Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_1__["findBox"])(data, ['moof']).length > 0;
|
||
};
|
||
|
||
_proto.demux = function demux(data, timeOffset) {
|
||
this.timeOffset = timeOffset; // Load all data into the avc track. The CMAF remuxer will look for the data in the samples object; the rest of the fields do not matter
|
||
|
||
var videoSamples = data;
|
||
var videoTrack = this.videoTrack;
|
||
var textTrack = this.txtTrack;
|
||
|
||
if (this.config.progressive) {
|
||
// Split the bytestream into two ranges: one encompassing all data up until the start of the last moof, and everything else.
|
||
// This is done to guarantee that we're sending valid data to MSE - when demuxing progressively, we have no guarantee
|
||
// that the fetch loader gives us flush moof+mdat pairs. If we push jagged data to MSE, it will throw an exception.
|
||
if (this.remainderData) {
|
||
videoSamples = Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_1__["appendUint8Array"])(this.remainderData, data);
|
||
}
|
||
|
||
var segmentedData = Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_1__["segmentValidRange"])(videoSamples);
|
||
this.remainderData = segmentedData.remainder;
|
||
videoTrack.samples = segmentedData.valid || new Uint8Array();
|
||
} else {
|
||
videoTrack.samples = videoSamples;
|
||
}
|
||
|
||
var id3Track = this.extractID3Track(videoTrack, timeOffset);
|
||
textTrack.samples = Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_1__["parseSamples"])(timeOffset, videoTrack);
|
||
return {
|
||
videoTrack: videoTrack,
|
||
audioTrack: this.audioTrack,
|
||
id3Track: id3Track,
|
||
textTrack: this.txtTrack
|
||
};
|
||
};
|
||
|
||
_proto.flush = function flush() {
|
||
var timeOffset = this.timeOffset;
|
||
var videoTrack = this.videoTrack;
|
||
var textTrack = this.txtTrack;
|
||
videoTrack.samples = this.remainderData || new Uint8Array();
|
||
this.remainderData = null;
|
||
var id3Track = this.extractID3Track(videoTrack, this.timeOffset);
|
||
textTrack.samples = Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_1__["parseSamples"])(timeOffset, videoTrack);
|
||
return {
|
||
videoTrack: videoTrack,
|
||
audioTrack: Object(_dummy_demuxed_track__WEBPACK_IMPORTED_MODULE_2__["dummyTrack"])(),
|
||
id3Track: id3Track,
|
||
textTrack: Object(_dummy_demuxed_track__WEBPACK_IMPORTED_MODULE_2__["dummyTrack"])()
|
||
};
|
||
};
|
||
|
||
_proto.extractID3Track = function extractID3Track(videoTrack, timeOffset) {
|
||
var id3Track = this.id3Track;
|
||
|
||
if (videoTrack.samples.length) {
|
||
var emsgs = Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_1__["findBox"])(videoTrack.samples, ['emsg']);
|
||
|
||
if (emsgs) {
|
||
emsgs.forEach(function (data) {
|
||
var emsgInfo = Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_1__["parseEmsg"])(data);
|
||
|
||
if (emsgSchemePattern.test(emsgInfo.schemeIdUri)) {
|
||
var pts = Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(emsgInfo.presentationTime) ? emsgInfo.presentationTime / emsgInfo.timeScale : timeOffset + emsgInfo.presentationTimeDelta / emsgInfo.timeScale;
|
||
var payload = emsgInfo.payload;
|
||
id3Track.samples.push({
|
||
data: payload,
|
||
len: payload.byteLength,
|
||
dts: pts,
|
||
pts: pts
|
||
});
|
||
}
|
||
});
|
||
}
|
||
}
|
||
|
||
return id3Track;
|
||
};
|
||
|
||
_proto.demuxSampleAes = function demuxSampleAes(data, keyData, timeOffset) {
|
||
return Promise.reject(new Error('The MP4 demuxer does not support SAMPLE-AES decryption'));
|
||
};
|
||
|
||
_proto.destroy = function destroy() {};
|
||
|
||
return MP4Demuxer;
|
||
}();
|
||
|
||
MP4Demuxer.minProbeByteLength = 1024;
|
||
/* harmony default export */ __webpack_exports__["default"] = (MP4Demuxer);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/demux/mpegaudio.ts":
|
||
/*!********************************!*\
|
||
!*** ./src/demux/mpegaudio.ts ***!
|
||
\********************************/
|
||
/*! exports provided: appendFrame, parseHeader, isHeaderPattern, isHeader, canParse, probe */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "appendFrame", function() { return appendFrame; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "parseHeader", function() { return parseHeader; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isHeaderPattern", function() { return isHeaderPattern; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isHeader", function() { return isHeader; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "canParse", function() { return canParse; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "probe", function() { return probe; });
|
||
/**
|
||
* MPEG parser helper
|
||
*/
|
||
var chromeVersion = null;
|
||
var BitratesMap = [32, 64, 96, 128, 160, 192, 224, 256, 288, 320, 352, 384, 416, 448, 32, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 32, 48, 56, 64, 80, 96, 112, 128, 144, 160, 176, 192, 224, 256, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160];
|
||
var SamplingRateMap = [44100, 48000, 32000, 22050, 24000, 16000, 11025, 12000, 8000];
|
||
var SamplesCoefficients = [// MPEG 2.5
|
||
[0, // Reserved
|
||
72, // Layer3
|
||
144, // Layer2
|
||
12 // Layer1
|
||
], // Reserved
|
||
[0, // Reserved
|
||
0, // Layer3
|
||
0, // Layer2
|
||
0 // Layer1
|
||
], // MPEG 2
|
||
[0, // Reserved
|
||
72, // Layer3
|
||
144, // Layer2
|
||
12 // Layer1
|
||
], // MPEG 1
|
||
[0, // Reserved
|
||
144, // Layer3
|
||
144, // Layer2
|
||
12 // Layer1
|
||
]];
|
||
var BytesInSlot = [0, // Reserved
|
||
1, // Layer3
|
||
1, // Layer2
|
||
4 // Layer1
|
||
];
|
||
function appendFrame(track, data, offset, pts, frameIndex) {
|
||
// Using http://www.datavoyage.com/mpgscript/mpeghdr.htm as a reference
|
||
if (offset + 24 > data.length) {
|
||
return;
|
||
}
|
||
|
||
var header = parseHeader(data, offset);
|
||
|
||
if (header && offset + header.frameLength <= data.length) {
|
||
var frameDuration = header.samplesPerFrame * 90000 / header.sampleRate;
|
||
var stamp = pts + frameIndex * frameDuration;
|
||
var sample = {
|
||
unit: data.subarray(offset, offset + header.frameLength),
|
||
pts: stamp,
|
||
dts: stamp
|
||
};
|
||
track.config = [];
|
||
track.channelCount = header.channelCount;
|
||
track.samplerate = header.sampleRate;
|
||
track.samples.push(sample);
|
||
return {
|
||
sample: sample,
|
||
length: header.frameLength,
|
||
missing: 0
|
||
};
|
||
}
|
||
}
|
||
function parseHeader(data, offset) {
|
||
var mpegVersion = data[offset + 1] >> 3 & 3;
|
||
var mpegLayer = data[offset + 1] >> 1 & 3;
|
||
var bitRateIndex = data[offset + 2] >> 4 & 15;
|
||
var sampleRateIndex = data[offset + 2] >> 2 & 3;
|
||
|
||
if (mpegVersion !== 1 && bitRateIndex !== 0 && bitRateIndex !== 15 && sampleRateIndex !== 3) {
|
||
var paddingBit = data[offset + 2] >> 1 & 1;
|
||
var channelMode = data[offset + 3] >> 6;
|
||
var columnInBitrates = mpegVersion === 3 ? 3 - mpegLayer : mpegLayer === 3 ? 3 : 4;
|
||
var bitRate = BitratesMap[columnInBitrates * 14 + bitRateIndex - 1] * 1000;
|
||
var columnInSampleRates = mpegVersion === 3 ? 0 : mpegVersion === 2 ? 1 : 2;
|
||
var sampleRate = SamplingRateMap[columnInSampleRates * 3 + sampleRateIndex];
|
||
var channelCount = channelMode === 3 ? 1 : 2; // If bits of channel mode are `11` then it is a single channel (Mono)
|
||
|
||
var sampleCoefficient = SamplesCoefficients[mpegVersion][mpegLayer];
|
||
var bytesInSlot = BytesInSlot[mpegLayer];
|
||
var samplesPerFrame = sampleCoefficient * 8 * bytesInSlot;
|
||
var frameLength = Math.floor(sampleCoefficient * bitRate / sampleRate + paddingBit) * bytesInSlot;
|
||
|
||
if (chromeVersion === null) {
|
||
var userAgent = navigator.userAgent || '';
|
||
var result = userAgent.match(/Chrome\/(\d+)/i);
|
||
chromeVersion = result ? parseInt(result[1]) : 0;
|
||
}
|
||
|
||
var needChromeFix = !!chromeVersion && chromeVersion <= 87;
|
||
|
||
if (needChromeFix && mpegLayer === 2 && bitRate >= 224000 && channelMode === 0) {
|
||
// Work around bug in Chromium by setting channelMode to dual-channel (01) instead of stereo (00)
|
||
data[offset + 3] = data[offset + 3] | 0x80;
|
||
}
|
||
|
||
return {
|
||
sampleRate: sampleRate,
|
||
channelCount: channelCount,
|
||
frameLength: frameLength,
|
||
samplesPerFrame: samplesPerFrame
|
||
};
|
||
}
|
||
}
|
||
function isHeaderPattern(data, offset) {
|
||
return data[offset] === 0xff && (data[offset + 1] & 0xe0) === 0xe0 && (data[offset + 1] & 0x06) !== 0x00;
|
||
}
|
||
function isHeader(data, offset) {
|
||
// Look for MPEG header | 1111 1111 | 111X XYZX | where X can be either 0 or 1 and Y or Z should be 1
|
||
// Layer bits (position 14 and 15) in header should be always different from 0 (Layer I or Layer II or Layer III)
|
||
// More info http://www.mp3-tech.org/programmer/frame_header.html
|
||
return offset + 1 < data.length && isHeaderPattern(data, offset);
|
||
}
|
||
function canParse(data, offset) {
|
||
var headerSize = 4;
|
||
return isHeaderPattern(data, offset) && headerSize <= data.length - offset;
|
||
}
|
||
function probe(data, offset) {
|
||
// same as isHeader but we also check that MPEG frame follows last MPEG frame
|
||
// or end of data is reached
|
||
if (offset + 1 < data.length && isHeaderPattern(data, offset)) {
|
||
// MPEG header Length
|
||
var headerLength = 4; // MPEG frame Length
|
||
|
||
var header = parseHeader(data, offset);
|
||
var frameLength = headerLength;
|
||
|
||
if (header !== null && header !== void 0 && header.frameLength) {
|
||
frameLength = header.frameLength;
|
||
}
|
||
|
||
var newOffset = offset + frameLength;
|
||
return newOffset === data.length || isHeader(data, newOffset);
|
||
}
|
||
|
||
return false;
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/demux/sample-aes.ts":
|
||
/*!*********************************!*\
|
||
!*** ./src/demux/sample-aes.ts ***!
|
||
\*********************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony import */ var _crypt_decrypter__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../crypt/decrypter */ "./src/crypt/decrypter.ts");
|
||
/* harmony import */ var _tsdemuxer__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./tsdemuxer */ "./src/demux/tsdemuxer.ts");
|
||
/**
|
||
* SAMPLE-AES decrypter
|
||
*/
|
||
|
||
|
||
|
||
var SampleAesDecrypter = /*#__PURE__*/function () {
|
||
function SampleAesDecrypter(observer, config, keyData) {
|
||
this.keyData = void 0;
|
||
this.decrypter = void 0;
|
||
this.keyData = keyData;
|
||
this.decrypter = new _crypt_decrypter__WEBPACK_IMPORTED_MODULE_0__["default"](observer, config, {
|
||
removePKCS7Padding: false
|
||
});
|
||
}
|
||
|
||
var _proto = SampleAesDecrypter.prototype;
|
||
|
||
_proto.decryptBuffer = function decryptBuffer(encryptedData, callback) {
|
||
this.decrypter.decrypt(encryptedData, this.keyData.key.buffer, this.keyData.iv.buffer, callback);
|
||
} // AAC - encrypt all full 16 bytes blocks starting from offset 16
|
||
;
|
||
|
||
_proto.decryptAacSample = function decryptAacSample(samples, sampleIndex, callback, sync) {
|
||
var curUnit = samples[sampleIndex].unit;
|
||
|
||
if (curUnit.length <= 16) {
|
||
// No encrypted portion in this sample (first 16 bytes is not
|
||
// encrypted, see https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/HLS_Sample_Encryption/Encryption/Encryption.html),
|
||
return;
|
||
}
|
||
|
||
var encryptedData = curUnit.subarray(16, curUnit.length - curUnit.length % 16);
|
||
var encryptedBuffer = encryptedData.buffer.slice(encryptedData.byteOffset, encryptedData.byteOffset + encryptedData.length);
|
||
var localthis = this;
|
||
this.decryptBuffer(encryptedBuffer, function (decryptedBuffer) {
|
||
var decryptedData = new Uint8Array(decryptedBuffer);
|
||
curUnit.set(decryptedData, 16);
|
||
|
||
if (!sync) {
|
||
localthis.decryptAacSamples(samples, sampleIndex + 1, callback);
|
||
}
|
||
});
|
||
};
|
||
|
||
_proto.decryptAacSamples = function decryptAacSamples(samples, sampleIndex, callback) {
|
||
for (;; sampleIndex++) {
|
||
if (sampleIndex >= samples.length) {
|
||
callback();
|
||
return;
|
||
}
|
||
|
||
if (samples[sampleIndex].unit.length < 32) {
|
||
continue;
|
||
}
|
||
|
||
var sync = this.decrypter.isSync();
|
||
this.decryptAacSample(samples, sampleIndex, callback, sync);
|
||
|
||
if (!sync) {
|
||
return;
|
||
}
|
||
}
|
||
} // AVC - encrypt one 16 bytes block out of ten, starting from offset 32
|
||
;
|
||
|
||
_proto.getAvcEncryptedData = function getAvcEncryptedData(decodedData) {
|
||
var encryptedDataLen = Math.floor((decodedData.length - 48) / 160) * 16 + 16;
|
||
var encryptedData = new Int8Array(encryptedDataLen);
|
||
var outputPos = 0;
|
||
|
||
for (var inputPos = 32; inputPos < decodedData.length - 16; inputPos += 160, outputPos += 16) {
|
||
encryptedData.set(decodedData.subarray(inputPos, inputPos + 16), outputPos);
|
||
}
|
||
|
||
return encryptedData;
|
||
};
|
||
|
||
_proto.getAvcDecryptedUnit = function getAvcDecryptedUnit(decodedData, decryptedData) {
|
||
var uint8DecryptedData = new Uint8Array(decryptedData);
|
||
var inputPos = 0;
|
||
|
||
for (var outputPos = 32; outputPos < decodedData.length - 16; outputPos += 160, inputPos += 16) {
|
||
decodedData.set(uint8DecryptedData.subarray(inputPos, inputPos + 16), outputPos);
|
||
}
|
||
|
||
return decodedData;
|
||
};
|
||
|
||
_proto.decryptAvcSample = function decryptAvcSample(samples, sampleIndex, unitIndex, callback, curUnit, sync) {
|
||
var decodedData = Object(_tsdemuxer__WEBPACK_IMPORTED_MODULE_1__["discardEPB"])(curUnit.data);
|
||
var encryptedData = this.getAvcEncryptedData(decodedData);
|
||
var localthis = this;
|
||
this.decryptBuffer(encryptedData.buffer, function (decryptedBuffer) {
|
||
curUnit.data = localthis.getAvcDecryptedUnit(decodedData, decryptedBuffer);
|
||
|
||
if (!sync) {
|
||
localthis.decryptAvcSamples(samples, sampleIndex, unitIndex + 1, callback);
|
||
}
|
||
});
|
||
};
|
||
|
||
_proto.decryptAvcSamples = function decryptAvcSamples(samples, sampleIndex, unitIndex, callback) {
|
||
if (samples instanceof Uint8Array) {
|
||
throw new Error('Cannot decrypt samples of type Uint8Array');
|
||
}
|
||
|
||
for (;; sampleIndex++, unitIndex = 0) {
|
||
if (sampleIndex >= samples.length) {
|
||
callback();
|
||
return;
|
||
}
|
||
|
||
var curUnits = samples[sampleIndex].units;
|
||
|
||
for (;; unitIndex++) {
|
||
if (unitIndex >= curUnits.length) {
|
||
break;
|
||
}
|
||
|
||
var curUnit = curUnits[unitIndex];
|
||
|
||
if (curUnit.data.length <= 48 || curUnit.type !== 1 && curUnit.type !== 5) {
|
||
continue;
|
||
}
|
||
|
||
var sync = this.decrypter.isSync();
|
||
this.decryptAvcSample(samples, sampleIndex, unitIndex, callback, curUnit, sync);
|
||
|
||
if (!sync) {
|
||
return;
|
||
}
|
||
}
|
||
}
|
||
};
|
||
|
||
return SampleAesDecrypter;
|
||
}();
|
||
|
||
/* harmony default export */ __webpack_exports__["default"] = (SampleAesDecrypter);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/demux/transmuxer-interface.ts":
|
||
/*!*******************************************!*\
|
||
!*** ./src/demux/transmuxer-interface.ts ***!
|
||
\*******************************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return TransmuxerInterface; });
|
||
/* harmony import */ var webworkify_webpack__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! webworkify-webpack */ "./node_modules/webworkify-webpack/index.js");
|
||
/* harmony import */ var webworkify_webpack__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(webworkify_webpack__WEBPACK_IMPORTED_MODULE_0__);
|
||
/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../events */ "./src/events.ts");
|
||
/* harmony import */ var _demux_transmuxer__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../demux/transmuxer */ "./src/demux/transmuxer.ts");
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
|
||
/* harmony import */ var _utils_mediasource_helper__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../utils/mediasource-helper */ "./src/utils/mediasource-helper.ts");
|
||
/* harmony import */ var eventemitter3__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! eventemitter3 */ "./node_modules/eventemitter3/index.js");
|
||
/* harmony import */ var eventemitter3__WEBPACK_IMPORTED_MODULE_6___default = /*#__PURE__*/__webpack_require__.n(eventemitter3__WEBPACK_IMPORTED_MODULE_6__);
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
var MediaSource = Object(_utils_mediasource_helper__WEBPACK_IMPORTED_MODULE_5__["getMediaSource"])() || {
|
||
isTypeSupported: function isTypeSupported() {
|
||
return false;
|
||
}
|
||
};
|
||
|
||
var TransmuxerInterface = /*#__PURE__*/function () {
|
||
function TransmuxerInterface(hls, id, onTransmuxComplete, onFlush) {
|
||
var _this = this;
|
||
|
||
this.hls = void 0;
|
||
this.id = void 0;
|
||
this.observer = void 0;
|
||
this.frag = null;
|
||
this.part = null;
|
||
this.worker = void 0;
|
||
this.onwmsg = void 0;
|
||
this.transmuxer = null;
|
||
this.onTransmuxComplete = void 0;
|
||
this.onFlush = void 0;
|
||
this.hls = hls;
|
||
this.id = id;
|
||
this.onTransmuxComplete = onTransmuxComplete;
|
||
this.onFlush = onFlush;
|
||
var config = hls.config;
|
||
|
||
var forwardMessage = function forwardMessage(ev, data) {
|
||
data = data || {};
|
||
data.frag = _this.frag;
|
||
data.id = _this.id;
|
||
hls.trigger(ev, data);
|
||
}; // forward events to main thread
|
||
|
||
|
||
this.observer = new eventemitter3__WEBPACK_IMPORTED_MODULE_6__["EventEmitter"]();
|
||
this.observer.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_DECRYPTED, forwardMessage);
|
||
this.observer.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, forwardMessage);
|
||
var typeSupported = {
|
||
mp4: MediaSource.isTypeSupported('video/mp4'),
|
||
mpeg: MediaSource.isTypeSupported('audio/mpeg'),
|
||
mp3: MediaSource.isTypeSupported('audio/mp4; codecs="mp3"')
|
||
}; // navigator.vendor is not always available in Web Worker
|
||
// refer to https://developer.mozilla.org/en-US/docs/Web/API/WorkerGlobalScope/navigator
|
||
|
||
var vendor = navigator.vendor;
|
||
|
||
if (config.enableWorker && typeof Worker !== 'undefined') {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].log('demuxing in webworker');
|
||
var worker;
|
||
|
||
try {
|
||
worker = this.worker = webworkify_webpack__WEBPACK_IMPORTED_MODULE_0__(/*require.resolve*/(/*! ../demux/transmuxer-worker.ts */ "./src/demux/transmuxer-worker.ts"));
|
||
this.onwmsg = this.onWorkerMessage.bind(this);
|
||
worker.addEventListener('message', this.onwmsg);
|
||
|
||
worker.onerror = function (event) {
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_4__["ErrorTypes"].OTHER_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_4__["ErrorDetails"].INTERNAL_EXCEPTION,
|
||
fatal: true,
|
||
event: 'demuxerWorker',
|
||
error: new Error(event.message + " (" + event.filename + ":" + event.lineno + ")")
|
||
});
|
||
};
|
||
|
||
worker.postMessage({
|
||
cmd: 'init',
|
||
typeSupported: typeSupported,
|
||
vendor: vendor,
|
||
id: id,
|
||
config: JSON.stringify(config)
|
||
});
|
||
} catch (err) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].warn('Error in worker:', err);
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].error('Error while initializing DemuxerWorker, fallback to inline');
|
||
|
||
if (worker) {
|
||
// revoke the Object URL that was used to create transmuxer worker, so as not to leak it
|
||
self.URL.revokeObjectURL(worker.objectURL);
|
||
}
|
||
|
||
this.transmuxer = new _demux_transmuxer__WEBPACK_IMPORTED_MODULE_2__["default"](this.observer, typeSupported, config, vendor, id);
|
||
this.worker = null;
|
||
}
|
||
} else {
|
||
this.transmuxer = new _demux_transmuxer__WEBPACK_IMPORTED_MODULE_2__["default"](this.observer, typeSupported, config, vendor, id);
|
||
}
|
||
}
|
||
|
||
var _proto = TransmuxerInterface.prototype;
|
||
|
||
_proto.destroy = function destroy() {
|
||
var w = this.worker;
|
||
|
||
if (w) {
|
||
w.removeEventListener('message', this.onwmsg);
|
||
w.terminate();
|
||
this.worker = null;
|
||
} else {
|
||
var transmuxer = this.transmuxer;
|
||
|
||
if (transmuxer) {
|
||
transmuxer.destroy();
|
||
this.transmuxer = null;
|
||
}
|
||
}
|
||
|
||
var observer = this.observer;
|
||
|
||
if (observer) {
|
||
observer.removeAllListeners();
|
||
} // @ts-ignore
|
||
|
||
|
||
this.observer = null;
|
||
};
|
||
|
||
_proto.push = function push(data, initSegmentData, audioCodec, videoCodec, frag, part, duration, accurateTimeOffset, chunkMeta, defaultInitPTS) {
|
||
var _frag$initSegment,
|
||
_lastFrag$initSegment,
|
||
_this2 = this;
|
||
|
||
chunkMeta.transmuxing.start = self.performance.now();
|
||
var transmuxer = this.transmuxer,
|
||
worker = this.worker;
|
||
var timeOffset = part ? part.start : frag.start;
|
||
var decryptdata = frag.decryptdata;
|
||
var lastFrag = this.frag;
|
||
var discontinuity = !(lastFrag && frag.cc === lastFrag.cc);
|
||
var trackSwitch = !(lastFrag && chunkMeta.level === lastFrag.level);
|
||
var snDiff = lastFrag ? chunkMeta.sn - lastFrag.sn : -1;
|
||
var partDiff = this.part ? chunkMeta.part - this.part.index : 1;
|
||
var contiguous = !trackSwitch && (snDiff === 1 || snDiff === 0 && partDiff === 1);
|
||
var now = self.performance.now();
|
||
|
||
if (trackSwitch || snDiff || frag.stats.parsing.start === 0) {
|
||
frag.stats.parsing.start = now;
|
||
}
|
||
|
||
if (part && (partDiff || !contiguous)) {
|
||
part.stats.parsing.start = now;
|
||
}
|
||
|
||
var initSegmentChange = !(lastFrag && ((_frag$initSegment = frag.initSegment) === null || _frag$initSegment === void 0 ? void 0 : _frag$initSegment.url) === ((_lastFrag$initSegment = lastFrag.initSegment) === null || _lastFrag$initSegment === void 0 ? void 0 : _lastFrag$initSegment.url));
|
||
var state = new _demux_transmuxer__WEBPACK_IMPORTED_MODULE_2__["TransmuxState"](discontinuity, contiguous, accurateTimeOffset, trackSwitch, timeOffset, initSegmentChange);
|
||
|
||
if (!contiguous || discontinuity || initSegmentChange) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].log("[transmuxer-interface, " + frag.type + "]: Starting new transmux session for sn: " + chunkMeta.sn + " p: " + chunkMeta.part + " level: " + chunkMeta.level + " id: " + chunkMeta.id + "\n discontinuity: " + discontinuity + "\n trackSwitch: " + trackSwitch + "\n contiguous: " + contiguous + "\n accurateTimeOffset: " + accurateTimeOffset + "\n timeOffset: " + timeOffset + "\n initSegmentChange: " + initSegmentChange);
|
||
var config = new _demux_transmuxer__WEBPACK_IMPORTED_MODULE_2__["TransmuxConfig"](audioCodec, videoCodec, initSegmentData, duration, defaultInitPTS);
|
||
this.configureTransmuxer(config);
|
||
}
|
||
|
||
this.frag = frag;
|
||
this.part = part; // Frags with sn of 'initSegment' are not transmuxed
|
||
|
||
if (worker) {
|
||
// post fragment payload as transferable objects for ArrayBuffer (no copy)
|
||
worker.postMessage({
|
||
cmd: 'demux',
|
||
data: data,
|
||
decryptdata: decryptdata,
|
||
chunkMeta: chunkMeta,
|
||
state: state
|
||
}, data instanceof ArrayBuffer ? [data] : []);
|
||
} else if (transmuxer) {
|
||
var _transmuxResult = transmuxer.push(data, decryptdata, chunkMeta, state);
|
||
|
||
if (Object(_demux_transmuxer__WEBPACK_IMPORTED_MODULE_2__["isPromise"])(_transmuxResult)) {
|
||
_transmuxResult.then(function (data) {
|
||
_this2.handleTransmuxComplete(data);
|
||
});
|
||
} else {
|
||
this.handleTransmuxComplete(_transmuxResult);
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto.flush = function flush(chunkMeta) {
|
||
var _this3 = this;
|
||
|
||
chunkMeta.transmuxing.start = self.performance.now();
|
||
var transmuxer = this.transmuxer,
|
||
worker = this.worker;
|
||
|
||
if (worker) {
|
||
worker.postMessage({
|
||
cmd: 'flush',
|
||
chunkMeta: chunkMeta
|
||
});
|
||
} else if (transmuxer) {
|
||
var _transmuxResult2 = transmuxer.flush(chunkMeta);
|
||
|
||
if (Object(_demux_transmuxer__WEBPACK_IMPORTED_MODULE_2__["isPromise"])(_transmuxResult2)) {
|
||
_transmuxResult2.then(function (data) {
|
||
_this3.handleFlushResult(data, chunkMeta);
|
||
});
|
||
} else {
|
||
this.handleFlushResult(_transmuxResult2, chunkMeta);
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto.handleFlushResult = function handleFlushResult(results, chunkMeta) {
|
||
var _this4 = this;
|
||
|
||
results.forEach(function (result) {
|
||
_this4.handleTransmuxComplete(result);
|
||
});
|
||
this.onFlush(chunkMeta);
|
||
};
|
||
|
||
_proto.onWorkerMessage = function onWorkerMessage(ev) {
|
||
var data = ev.data;
|
||
var hls = this.hls;
|
||
|
||
switch (data.event) {
|
||
case 'init':
|
||
{
|
||
// revoke the Object URL that was used to create transmuxer worker, so as not to leak it
|
||
self.URL.revokeObjectURL(this.worker.objectURL);
|
||
break;
|
||
}
|
||
|
||
case 'transmuxComplete':
|
||
{
|
||
this.handleTransmuxComplete(data.data);
|
||
break;
|
||
}
|
||
|
||
case 'flush':
|
||
{
|
||
this.onFlush(data.data);
|
||
break;
|
||
}
|
||
|
||
/* falls through */
|
||
|
||
default:
|
||
{
|
||
data.data = data.data || {};
|
||
data.data.frag = this.frag;
|
||
data.data.id = this.id;
|
||
hls.trigger(data.event, data.data);
|
||
break;
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto.configureTransmuxer = function configureTransmuxer(config) {
|
||
var worker = this.worker,
|
||
transmuxer = this.transmuxer;
|
||
|
||
if (worker) {
|
||
worker.postMessage({
|
||
cmd: 'configure',
|
||
config: config
|
||
});
|
||
} else if (transmuxer) {
|
||
transmuxer.configure(config);
|
||
}
|
||
};
|
||
|
||
_proto.handleTransmuxComplete = function handleTransmuxComplete(result) {
|
||
result.chunkMeta.transmuxing.end = self.performance.now();
|
||
this.onTransmuxComplete(result);
|
||
};
|
||
|
||
return TransmuxerInterface;
|
||
}();
|
||
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/demux/transmuxer-worker.ts":
|
||
/*!****************************************!*\
|
||
!*** ./src/demux/transmuxer-worker.ts ***!
|
||
\****************************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return TransmuxerWorker; });
|
||
/* harmony import */ var _demux_transmuxer__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../demux/transmuxer */ "./src/demux/transmuxer.ts");
|
||
/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../events */ "./src/events.ts");
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
/* harmony import */ var eventemitter3__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! eventemitter3 */ "./node_modules/eventemitter3/index.js");
|
||
/* harmony import */ var eventemitter3__WEBPACK_IMPORTED_MODULE_3___default = /*#__PURE__*/__webpack_require__.n(eventemitter3__WEBPACK_IMPORTED_MODULE_3__);
|
||
|
||
|
||
|
||
|
||
function TransmuxerWorker(self) {
|
||
var observer = new eventemitter3__WEBPACK_IMPORTED_MODULE_3__["EventEmitter"]();
|
||
|
||
var forwardMessage = function forwardMessage(ev, data) {
|
||
self.postMessage({
|
||
event: ev,
|
||
data: data
|
||
});
|
||
}; // forward events to main thread
|
||
|
||
|
||
observer.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].FRAG_DECRYPTED, forwardMessage);
|
||
observer.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, forwardMessage);
|
||
self.addEventListener('message', function (ev) {
|
||
var data = ev.data;
|
||
|
||
switch (data.cmd) {
|
||
case 'init':
|
||
{
|
||
var config = JSON.parse(data.config);
|
||
self.transmuxer = new _demux_transmuxer__WEBPACK_IMPORTED_MODULE_0__["default"](observer, data.typeSupported, config, data.vendor, data.id);
|
||
Object(_utils_logger__WEBPACK_IMPORTED_MODULE_2__["enableLogs"])(config.debug);
|
||
forwardMessage('init', null);
|
||
break;
|
||
}
|
||
|
||
case 'configure':
|
||
{
|
||
self.transmuxer.configure(data.config);
|
||
break;
|
||
}
|
||
|
||
case 'demux':
|
||
{
|
||
var transmuxResult = self.transmuxer.push(data.data, data.decryptdata, data.chunkMeta, data.state);
|
||
|
||
if (Object(_demux_transmuxer__WEBPACK_IMPORTED_MODULE_0__["isPromise"])(transmuxResult)) {
|
||
transmuxResult.then(function (data) {
|
||
emitTransmuxComplete(self, data);
|
||
});
|
||
} else {
|
||
emitTransmuxComplete(self, transmuxResult);
|
||
}
|
||
|
||
break;
|
||
}
|
||
|
||
case 'flush':
|
||
{
|
||
var id = data.chunkMeta;
|
||
|
||
var _transmuxResult = self.transmuxer.flush(id);
|
||
|
||
if (Object(_demux_transmuxer__WEBPACK_IMPORTED_MODULE_0__["isPromise"])(_transmuxResult)) {
|
||
_transmuxResult.then(function (results) {
|
||
handleFlushResult(self, results, id);
|
||
});
|
||
} else {
|
||
handleFlushResult(self, _transmuxResult, id);
|
||
}
|
||
|
||
break;
|
||
}
|
||
|
||
default:
|
||
break;
|
||
}
|
||
});
|
||
}
|
||
|
||
function emitTransmuxComplete(self, transmuxResult) {
|
||
if (isEmptyResult(transmuxResult.remuxResult)) {
|
||
return;
|
||
}
|
||
|
||
var transferable = [];
|
||
var _transmuxResult$remux = transmuxResult.remuxResult,
|
||
audio = _transmuxResult$remux.audio,
|
||
video = _transmuxResult$remux.video;
|
||
|
||
if (audio) {
|
||
addToTransferable(transferable, audio);
|
||
}
|
||
|
||
if (video) {
|
||
addToTransferable(transferable, video);
|
||
}
|
||
|
||
self.postMessage({
|
||
event: 'transmuxComplete',
|
||
data: transmuxResult
|
||
}, transferable);
|
||
} // Converts data to a transferable object https://developers.google.com/web/updates/2011/12/Transferable-Objects-Lightning-Fast)
|
||
// in order to minimize message passing overhead
|
||
|
||
|
||
function addToTransferable(transferable, track) {
|
||
if (track.data1) {
|
||
transferable.push(track.data1.buffer);
|
||
}
|
||
|
||
if (track.data2) {
|
||
transferable.push(track.data2.buffer);
|
||
}
|
||
}
|
||
|
||
function handleFlushResult(self, results, chunkMeta) {
|
||
results.forEach(function (result) {
|
||
emitTransmuxComplete(self, result);
|
||
});
|
||
self.postMessage({
|
||
event: 'flush',
|
||
data: chunkMeta
|
||
});
|
||
}
|
||
|
||
function isEmptyResult(remuxResult) {
|
||
return !remuxResult.audio && !remuxResult.video && !remuxResult.text && !remuxResult.id3 && !remuxResult.initSegment;
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/demux/transmuxer.ts":
|
||
/*!*********************************!*\
|
||
!*** ./src/demux/transmuxer.ts ***!
|
||
\*********************************/
|
||
/*! exports provided: default, isPromise, TransmuxConfig, TransmuxState */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return Transmuxer; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isPromise", function() { return isPromise; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "TransmuxConfig", function() { return TransmuxConfig; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "TransmuxState", function() { return TransmuxState; });
|
||
/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../events */ "./src/events.ts");
|
||
/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
|
||
/* harmony import */ var _crypt_decrypter__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../crypt/decrypter */ "./src/crypt/decrypter.ts");
|
||
/* harmony import */ var _demux_aacdemuxer__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../demux/aacdemuxer */ "./src/demux/aacdemuxer.ts");
|
||
/* harmony import */ var _demux_mp4demuxer__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../demux/mp4demuxer */ "./src/demux/mp4demuxer.ts");
|
||
/* harmony import */ var _demux_tsdemuxer__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../demux/tsdemuxer */ "./src/demux/tsdemuxer.ts");
|
||
/* harmony import */ var _demux_mp3demuxer__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../demux/mp3demuxer */ "./src/demux/mp3demuxer.ts");
|
||
/* harmony import */ var _remux_mp4_remuxer__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ../remux/mp4-remuxer */ "./src/remux/mp4-remuxer.ts");
|
||
/* harmony import */ var _remux_passthrough_remuxer__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ../remux/passthrough-remuxer */ "./src/remux/passthrough-remuxer.ts");
|
||
/* harmony import */ var _chunk_cache__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! ./chunk-cache */ "./src/demux/chunk-cache.ts");
|
||
/* harmony import */ var _utils_mp4_tools__WEBPACK_IMPORTED_MODULE_10__ = __webpack_require__(/*! ../utils/mp4-tools */ "./src/utils/mp4-tools.ts");
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_11__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
var now; // performance.now() not available on WebWorker, at least on Safari Desktop
|
||
|
||
try {
|
||
now = self.performance.now.bind(self.performance);
|
||
} catch (err) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_11__["logger"].debug('Unable to use Performance API on this environment');
|
||
now = self.Date.now;
|
||
}
|
||
|
||
var muxConfig = [{
|
||
demux: _demux_tsdemuxer__WEBPACK_IMPORTED_MODULE_5__["default"],
|
||
remux: _remux_mp4_remuxer__WEBPACK_IMPORTED_MODULE_7__["default"]
|
||
}, {
|
||
demux: _demux_mp4demuxer__WEBPACK_IMPORTED_MODULE_4__["default"],
|
||
remux: _remux_passthrough_remuxer__WEBPACK_IMPORTED_MODULE_8__["default"]
|
||
}, {
|
||
demux: _demux_aacdemuxer__WEBPACK_IMPORTED_MODULE_3__["default"],
|
||
remux: _remux_mp4_remuxer__WEBPACK_IMPORTED_MODULE_7__["default"]
|
||
}, {
|
||
demux: _demux_mp3demuxer__WEBPACK_IMPORTED_MODULE_6__["default"],
|
||
remux: _remux_mp4_remuxer__WEBPACK_IMPORTED_MODULE_7__["default"]
|
||
}];
|
||
var minProbeByteLength = 1024;
|
||
muxConfig.forEach(function (_ref) {
|
||
var demux = _ref.demux;
|
||
minProbeByteLength = Math.max(minProbeByteLength, demux.minProbeByteLength);
|
||
});
|
||
|
||
var Transmuxer = /*#__PURE__*/function () {
|
||
function Transmuxer(observer, typeSupported, config, vendor, id) {
|
||
this.observer = void 0;
|
||
this.typeSupported = void 0;
|
||
this.config = void 0;
|
||
this.vendor = void 0;
|
||
this.id = void 0;
|
||
this.demuxer = void 0;
|
||
this.remuxer = void 0;
|
||
this.decrypter = void 0;
|
||
this.probe = void 0;
|
||
this.decryptionPromise = null;
|
||
this.transmuxConfig = void 0;
|
||
this.currentTransmuxState = void 0;
|
||
this.cache = new _chunk_cache__WEBPACK_IMPORTED_MODULE_9__["default"]();
|
||
this.observer = observer;
|
||
this.typeSupported = typeSupported;
|
||
this.config = config;
|
||
this.vendor = vendor;
|
||
this.id = id;
|
||
}
|
||
|
||
var _proto = Transmuxer.prototype;
|
||
|
||
_proto.configure = function configure(transmuxConfig) {
|
||
this.transmuxConfig = transmuxConfig;
|
||
|
||
if (this.decrypter) {
|
||
this.decrypter.reset();
|
||
}
|
||
};
|
||
|
||
_proto.push = function push(data, decryptdata, chunkMeta, state) {
|
||
var _this = this;
|
||
|
||
var stats = chunkMeta.transmuxing;
|
||
stats.executeStart = now();
|
||
var uintData = new Uint8Array(data);
|
||
var cache = this.cache,
|
||
config = this.config,
|
||
currentTransmuxState = this.currentTransmuxState,
|
||
transmuxConfig = this.transmuxConfig;
|
||
|
||
if (state) {
|
||
this.currentTransmuxState = state;
|
||
}
|
||
|
||
var keyData = getEncryptionType(uintData, decryptdata);
|
||
|
||
if (keyData && keyData.method === 'AES-128') {
|
||
var decrypter = this.getDecrypter(); // Software decryption is synchronous; webCrypto is not
|
||
|
||
if (config.enableSoftwareAES) {
|
||
// Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
|
||
// data is handled in the flush() call
|
||
var decryptedData = decrypter.softwareDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer);
|
||
|
||
if (!decryptedData) {
|
||
stats.executeEnd = now();
|
||
return emptyResult(chunkMeta);
|
||
}
|
||
|
||
uintData = new Uint8Array(decryptedData);
|
||
} else {
|
||
this.decryptionPromise = decrypter.webCryptoDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer).then(function (decryptedData) {
|
||
// Calling push here is important; if flush() is called while this is still resolving, this ensures that
|
||
// the decrypted data has been transmuxed
|
||
var result = _this.push(decryptedData, null, chunkMeta);
|
||
|
||
_this.decryptionPromise = null;
|
||
return result;
|
||
});
|
||
return this.decryptionPromise;
|
||
}
|
||
}
|
||
|
||
var _ref2 = state || currentTransmuxState,
|
||
contiguous = _ref2.contiguous,
|
||
discontinuity = _ref2.discontinuity,
|
||
trackSwitch = _ref2.trackSwitch,
|
||
accurateTimeOffset = _ref2.accurateTimeOffset,
|
||
timeOffset = _ref2.timeOffset,
|
||
initSegmentChange = _ref2.initSegmentChange;
|
||
|
||
var audioCodec = transmuxConfig.audioCodec,
|
||
videoCodec = transmuxConfig.videoCodec,
|
||
defaultInitPts = transmuxConfig.defaultInitPts,
|
||
duration = transmuxConfig.duration,
|
||
initSegmentData = transmuxConfig.initSegmentData; // Reset muxers before probing to ensure that their state is clean, even if flushing occurs before a successful probe
|
||
|
||
if (discontinuity || trackSwitch || initSegmentChange) {
|
||
this.resetInitSegment(initSegmentData, audioCodec, videoCodec, duration);
|
||
}
|
||
|
||
if (discontinuity || initSegmentChange) {
|
||
this.resetInitialTimestamp(defaultInitPts);
|
||
}
|
||
|
||
if (!contiguous) {
|
||
this.resetContiguity();
|
||
}
|
||
|
||
if (this.needsProbing(uintData, discontinuity, trackSwitch)) {
|
||
if (cache.dataLength) {
|
||
var cachedData = cache.flush();
|
||
uintData = Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_10__["appendUint8Array"])(cachedData, uintData);
|
||
}
|
||
|
||
this.configureTransmuxer(uintData, transmuxConfig);
|
||
}
|
||
|
||
var result = this.transmux(uintData, keyData, timeOffset, accurateTimeOffset, chunkMeta);
|
||
var currentState = this.currentTransmuxState;
|
||
currentState.contiguous = true;
|
||
currentState.discontinuity = false;
|
||
currentState.trackSwitch = false;
|
||
stats.executeEnd = now();
|
||
return result;
|
||
} // Due to data caching, flush calls can produce more than one TransmuxerResult (hence the Array type)
|
||
;
|
||
|
||
_proto.flush = function flush(chunkMeta) {
|
||
var _this2 = this;
|
||
|
||
var stats = chunkMeta.transmuxing;
|
||
stats.executeStart = now();
|
||
var decrypter = this.decrypter,
|
||
cache = this.cache,
|
||
currentTransmuxState = this.currentTransmuxState,
|
||
decryptionPromise = this.decryptionPromise;
|
||
|
||
if (decryptionPromise) {
|
||
// Upon resolution, the decryption promise calls push() and returns its TransmuxerResult up the stack. Therefore
|
||
// only flushing is required for async decryption
|
||
return decryptionPromise.then(function () {
|
||
return _this2.flush(chunkMeta);
|
||
});
|
||
}
|
||
|
||
var transmuxResults = [];
|
||
var timeOffset = currentTransmuxState.timeOffset;
|
||
|
||
if (decrypter) {
|
||
// The decrypter may have data cached, which needs to be demuxed. In this case we'll have two TransmuxResults
|
||
// This happens in the case that we receive only 1 push call for a segment (either for non-progressive downloads,
|
||
// or for progressive downloads with small segments)
|
||
var decryptedData = decrypter.flush();
|
||
|
||
if (decryptedData) {
|
||
// Push always returns a TransmuxerResult if decryptdata is null
|
||
transmuxResults.push(this.push(decryptedData, null, chunkMeta));
|
||
}
|
||
}
|
||
|
||
var bytesSeen = cache.dataLength;
|
||
cache.reset();
|
||
var demuxer = this.demuxer,
|
||
remuxer = this.remuxer;
|
||
|
||
if (!demuxer || !remuxer) {
|
||
// If probing failed, and each demuxer saw enough bytes to be able to probe, then Hls.js has been given content its not able to handle
|
||
if (bytesSeen >= minProbeByteLength) {
|
||
this.observer.emit(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, _events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].MEDIA_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].FRAG_PARSING_ERROR,
|
||
fatal: true,
|
||
reason: 'no demux matching with content found'
|
||
});
|
||
}
|
||
|
||
stats.executeEnd = now();
|
||
return [emptyResult(chunkMeta)];
|
||
}
|
||
|
||
var demuxResultOrPromise = demuxer.flush(timeOffset);
|
||
|
||
if (isPromise(demuxResultOrPromise)) {
|
||
// Decrypt final SAMPLE-AES samples
|
||
return demuxResultOrPromise.then(function (demuxResult) {
|
||
_this2.flushRemux(transmuxResults, demuxResult, chunkMeta);
|
||
|
||
return transmuxResults;
|
||
});
|
||
}
|
||
|
||
this.flushRemux(transmuxResults, demuxResultOrPromise, chunkMeta);
|
||
return transmuxResults;
|
||
};
|
||
|
||
_proto.flushRemux = function flushRemux(transmuxResults, demuxResult, chunkMeta) {
|
||
var audioTrack = demuxResult.audioTrack,
|
||
videoTrack = demuxResult.videoTrack,
|
||
id3Track = demuxResult.id3Track,
|
||
textTrack = demuxResult.textTrack;
|
||
var _this$currentTransmux = this.currentTransmuxState,
|
||
accurateTimeOffset = _this$currentTransmux.accurateTimeOffset,
|
||
timeOffset = _this$currentTransmux.timeOffset;
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_11__["logger"].log("[transmuxer.ts]: Flushed fragment " + chunkMeta.sn + (chunkMeta.part > -1 ? ' p: ' + chunkMeta.part : '') + " of level " + chunkMeta.level);
|
||
var remuxResult = this.remuxer.remux(audioTrack, videoTrack, id3Track, textTrack, timeOffset, accurateTimeOffset, true, this.id);
|
||
transmuxResults.push({
|
||
remuxResult: remuxResult,
|
||
chunkMeta: chunkMeta
|
||
});
|
||
chunkMeta.transmuxing.executeEnd = now();
|
||
};
|
||
|
||
_proto.resetInitialTimestamp = function resetInitialTimestamp(defaultInitPts) {
|
||
var demuxer = this.demuxer,
|
||
remuxer = this.remuxer;
|
||
|
||
if (!demuxer || !remuxer) {
|
||
return;
|
||
}
|
||
|
||
demuxer.resetTimeStamp(defaultInitPts);
|
||
remuxer.resetTimeStamp(defaultInitPts);
|
||
};
|
||
|
||
_proto.resetContiguity = function resetContiguity() {
|
||
var demuxer = this.demuxer,
|
||
remuxer = this.remuxer;
|
||
|
||
if (!demuxer || !remuxer) {
|
||
return;
|
||
}
|
||
|
||
demuxer.resetContiguity();
|
||
remuxer.resetNextTimestamp();
|
||
};
|
||
|
||
_proto.resetInitSegment = function resetInitSegment(initSegmentData, audioCodec, videoCodec, trackDuration) {
|
||
var demuxer = this.demuxer,
|
||
remuxer = this.remuxer;
|
||
|
||
if (!demuxer || !remuxer) {
|
||
return;
|
||
}
|
||
|
||
demuxer.resetInitSegment(initSegmentData, audioCodec, videoCodec, trackDuration);
|
||
remuxer.resetInitSegment(initSegmentData, audioCodec, videoCodec);
|
||
};
|
||
|
||
_proto.destroy = function destroy() {
|
||
if (this.demuxer) {
|
||
this.demuxer.destroy();
|
||
this.demuxer = undefined;
|
||
}
|
||
|
||
if (this.remuxer) {
|
||
this.remuxer.destroy();
|
||
this.remuxer = undefined;
|
||
}
|
||
};
|
||
|
||
_proto.transmux = function transmux(data, keyData, timeOffset, accurateTimeOffset, chunkMeta) {
|
||
var result;
|
||
|
||
if (keyData && keyData.method === 'SAMPLE-AES') {
|
||
result = this.transmuxSampleAes(data, keyData, timeOffset, accurateTimeOffset, chunkMeta);
|
||
} else {
|
||
result = this.transmuxUnencrypted(data, timeOffset, accurateTimeOffset, chunkMeta);
|
||
}
|
||
|
||
return result;
|
||
};
|
||
|
||
_proto.transmuxUnencrypted = function transmuxUnencrypted(data, timeOffset, accurateTimeOffset, chunkMeta) {
|
||
var _demux = this.demuxer.demux(data, timeOffset, false, !this.config.progressive),
|
||
audioTrack = _demux.audioTrack,
|
||
videoTrack = _demux.videoTrack,
|
||
id3Track = _demux.id3Track,
|
||
textTrack = _demux.textTrack;
|
||
|
||
var remuxResult = this.remuxer.remux(audioTrack, videoTrack, id3Track, textTrack, timeOffset, accurateTimeOffset, false, this.id);
|
||
return {
|
||
remuxResult: remuxResult,
|
||
chunkMeta: chunkMeta
|
||
};
|
||
};
|
||
|
||
_proto.transmuxSampleAes = function transmuxSampleAes(data, decryptData, timeOffset, accurateTimeOffset, chunkMeta) {
|
||
var _this3 = this;
|
||
|
||
return this.demuxer.demuxSampleAes(data, decryptData, timeOffset).then(function (demuxResult) {
|
||
var remuxResult = _this3.remuxer.remux(demuxResult.audioTrack, demuxResult.videoTrack, demuxResult.id3Track, demuxResult.textTrack, timeOffset, accurateTimeOffset, false, _this3.id);
|
||
|
||
return {
|
||
remuxResult: remuxResult,
|
||
chunkMeta: chunkMeta
|
||
};
|
||
});
|
||
};
|
||
|
||
_proto.configureTransmuxer = function configureTransmuxer(data, transmuxConfig) {
|
||
var config = this.config,
|
||
observer = this.observer,
|
||
typeSupported = this.typeSupported,
|
||
vendor = this.vendor;
|
||
var audioCodec = transmuxConfig.audioCodec,
|
||
defaultInitPts = transmuxConfig.defaultInitPts,
|
||
duration = transmuxConfig.duration,
|
||
initSegmentData = transmuxConfig.initSegmentData,
|
||
videoCodec = transmuxConfig.videoCodec; // probe for content type
|
||
|
||
var mux;
|
||
|
||
for (var i = 0, len = muxConfig.length; i < len; i++) {
|
||
if (muxConfig[i].demux.probe(data)) {
|
||
mux = muxConfig[i];
|
||
break;
|
||
}
|
||
}
|
||
|
||
if (!mux) {
|
||
// If probing previous configs fail, use mp4 passthrough
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_11__["logger"].warn('Failed to find demuxer by probing frag, treating as mp4 passthrough');
|
||
mux = {
|
||
demux: _demux_mp4demuxer__WEBPACK_IMPORTED_MODULE_4__["default"],
|
||
remux: _remux_passthrough_remuxer__WEBPACK_IMPORTED_MODULE_8__["default"]
|
||
};
|
||
} // so let's check that current remuxer and demuxer are still valid
|
||
|
||
|
||
var demuxer = this.demuxer;
|
||
var remuxer = this.remuxer;
|
||
var Remuxer = mux.remux;
|
||
var Demuxer = mux.demux;
|
||
|
||
if (!remuxer || !(remuxer instanceof Remuxer)) {
|
||
this.remuxer = new Remuxer(observer, config, typeSupported, vendor);
|
||
}
|
||
|
||
if (!demuxer || !(demuxer instanceof Demuxer)) {
|
||
this.demuxer = new Demuxer(observer, config, typeSupported);
|
||
this.probe = Demuxer.probe;
|
||
} // Ensure that muxers are always initialized with an initSegment
|
||
|
||
|
||
this.resetInitSegment(initSegmentData, audioCodec, videoCodec, duration);
|
||
this.resetInitialTimestamp(defaultInitPts);
|
||
};
|
||
|
||
_proto.needsProbing = function needsProbing(data, discontinuity, trackSwitch) {
|
||
// in case of continuity change, or track switch
|
||
// we might switch from content type (AAC container to TS container, or TS to fmp4 for example)
|
||
return !this.demuxer || !this.remuxer || discontinuity || trackSwitch;
|
||
};
|
||
|
||
_proto.getDecrypter = function getDecrypter() {
|
||
var decrypter = this.decrypter;
|
||
|
||
if (!decrypter) {
|
||
decrypter = this.decrypter = new _crypt_decrypter__WEBPACK_IMPORTED_MODULE_2__["default"](this.observer, this.config);
|
||
}
|
||
|
||
return decrypter;
|
||
};
|
||
|
||
return Transmuxer;
|
||
}();
|
||
|
||
|
||
|
||
function getEncryptionType(data, decryptData) {
|
||
var encryptionType = null;
|
||
|
||
if (data.byteLength > 0 && decryptData != null && decryptData.key != null && decryptData.iv !== null && decryptData.method != null) {
|
||
encryptionType = decryptData;
|
||
}
|
||
|
||
return encryptionType;
|
||
}
|
||
|
||
var emptyResult = function emptyResult(chunkMeta) {
|
||
return {
|
||
remuxResult: {},
|
||
chunkMeta: chunkMeta
|
||
};
|
||
};
|
||
|
||
function isPromise(p) {
|
||
return 'then' in p && p.then instanceof Function;
|
||
}
|
||
var TransmuxConfig = function TransmuxConfig(audioCodec, videoCodec, initSegmentData, duration, defaultInitPts) {
|
||
this.audioCodec = void 0;
|
||
this.videoCodec = void 0;
|
||
this.initSegmentData = void 0;
|
||
this.duration = void 0;
|
||
this.defaultInitPts = void 0;
|
||
this.audioCodec = audioCodec;
|
||
this.videoCodec = videoCodec;
|
||
this.initSegmentData = initSegmentData;
|
||
this.duration = duration;
|
||
this.defaultInitPts = defaultInitPts;
|
||
};
|
||
var TransmuxState = function TransmuxState(discontinuity, contiguous, accurateTimeOffset, trackSwitch, timeOffset, initSegmentChange) {
|
||
this.discontinuity = void 0;
|
||
this.contiguous = void 0;
|
||
this.accurateTimeOffset = void 0;
|
||
this.trackSwitch = void 0;
|
||
this.timeOffset = void 0;
|
||
this.initSegmentChange = void 0;
|
||
this.discontinuity = discontinuity;
|
||
this.contiguous = contiguous;
|
||
this.accurateTimeOffset = accurateTimeOffset;
|
||
this.trackSwitch = trackSwitch;
|
||
this.timeOffset = timeOffset;
|
||
this.initSegmentChange = initSegmentChange;
|
||
};
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/demux/tsdemuxer.ts":
|
||
/*!********************************!*\
|
||
!*** ./src/demux/tsdemuxer.ts ***!
|
||
\********************************/
|
||
/*! exports provided: discardEPB, default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "discardEPB", function() { return discardEPB; });
|
||
/* harmony import */ var _adts__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./adts */ "./src/demux/adts.ts");
|
||
/* harmony import */ var _mpegaudio__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./mpegaudio */ "./src/demux/mpegaudio.ts");
|
||
/* harmony import */ var _exp_golomb__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./exp-golomb */ "./src/demux/exp-golomb.ts");
|
||
/* harmony import */ var _sample_aes__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./sample-aes */ "./src/demux/sample-aes.ts");
|
||
/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../events */ "./src/events.ts");
|
||
/* harmony import */ var _utils_mp4_tools__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../utils/mp4-tools */ "./src/utils/mp4-tools.ts");
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
|
||
/**
|
||
* highly optimized TS demuxer:
|
||
* parse PAT, PMT
|
||
* extract PES packet from audio and video PIDs
|
||
* extract AVC/H264 NAL units and AAC/ADTS samples from PES packet
|
||
* trigger the remuxer upon parsing completion
|
||
* it also tries to workaround as best as it can audio codec switch (HE-AAC to AAC and vice versa), without having to restart the MediaSource.
|
||
* it also controls the remuxing process :
|
||
* upon discontinuity or level switch detection, it will also notifies the remuxer so that it can reset its state.
|
||
*/
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
var TSDemuxer = /*#__PURE__*/function () {
|
||
function TSDemuxer(observer, config, typeSupported) {
|
||
this.observer = void 0;
|
||
this.config = void 0;
|
||
this.typeSupported = void 0;
|
||
this.sampleAes = null;
|
||
this.pmtParsed = false;
|
||
this.audioCodec = void 0;
|
||
this.videoCodec = void 0;
|
||
this._duration = 0;
|
||
this._pmtId = -1;
|
||
this._avcTrack = void 0;
|
||
this._audioTrack = void 0;
|
||
this._id3Track = void 0;
|
||
this._txtTrack = void 0;
|
||
this.aacOverFlow = null;
|
||
this.avcSample = null;
|
||
this.remainderData = null;
|
||
this.observer = observer;
|
||
this.config = config;
|
||
this.typeSupported = typeSupported;
|
||
}
|
||
|
||
TSDemuxer.probe = function probe(data) {
|
||
var syncOffset = TSDemuxer.syncOffset(data);
|
||
|
||
if (syncOffset < 0) {
|
||
return false;
|
||
} else {
|
||
if (syncOffset) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].warn("MPEG2-TS detected but first sync word found @ offset " + syncOffset + ", junk ahead ?");
|
||
}
|
||
|
||
return true;
|
||
}
|
||
};
|
||
|
||
TSDemuxer.syncOffset = function syncOffset(data) {
|
||
// scan 1000 first bytes
|
||
var scanwindow = Math.min(1000, data.length - 3 * 188);
|
||
var i = 0;
|
||
|
||
while (i < scanwindow) {
|
||
// a TS fragment should contain at least 3 TS packets, a PAT, a PMT, and one PID, each starting with 0x47
|
||
if (data[i] === 0x47 && data[i + 188] === 0x47 && data[i + 2 * 188] === 0x47) {
|
||
return i;
|
||
} else {
|
||
i++;
|
||
}
|
||
}
|
||
|
||
return -1;
|
||
}
|
||
/**
|
||
* Creates a track model internal to demuxer used to drive remuxing input
|
||
*
|
||
* @param type 'audio' | 'video' | 'id3' | 'text'
|
||
* @param duration
|
||
* @return TSDemuxer's internal track model
|
||
*/
|
||
;
|
||
|
||
TSDemuxer.createTrack = function createTrack(type, duration) {
|
||
return {
|
||
container: type === 'video' || type === 'audio' ? 'video/mp2t' : undefined,
|
||
type: type,
|
||
id: _utils_mp4_tools__WEBPACK_IMPORTED_MODULE_5__["RemuxerTrackIdConfig"][type],
|
||
pid: -1,
|
||
inputTimeScale: 90000,
|
||
sequenceNumber: 0,
|
||
samples: [],
|
||
dropped: 0,
|
||
duration: type === 'audio' ? duration : undefined
|
||
};
|
||
}
|
||
/**
|
||
* Initializes a new init segment on the demuxer/remuxer interface. Needed for discontinuities/track-switches (or at stream start)
|
||
* Resets all internal track instances of the demuxer.
|
||
*/
|
||
;
|
||
|
||
var _proto = TSDemuxer.prototype;
|
||
|
||
_proto.resetInitSegment = function resetInitSegment(initSegment, audioCodec, videoCodec, trackDuration) {
|
||
this.pmtParsed = false;
|
||
this._pmtId = -1;
|
||
this._avcTrack = TSDemuxer.createTrack('video');
|
||
this._audioTrack = TSDemuxer.createTrack('audio', trackDuration);
|
||
this._id3Track = TSDemuxer.createTrack('id3');
|
||
this._txtTrack = TSDemuxer.createTrack('text');
|
||
this._audioTrack.isAAC = true; // flush any partial content
|
||
|
||
this.aacOverFlow = null;
|
||
this.avcSample = null;
|
||
this.audioCodec = audioCodec;
|
||
this.videoCodec = videoCodec;
|
||
this._duration = trackDuration;
|
||
};
|
||
|
||
_proto.resetTimeStamp = function resetTimeStamp() {};
|
||
|
||
_proto.resetContiguity = function resetContiguity() {
|
||
var _audioTrack = this._audioTrack,
|
||
_avcTrack = this._avcTrack,
|
||
_id3Track = this._id3Track;
|
||
|
||
if (_audioTrack) {
|
||
_audioTrack.pesData = null;
|
||
}
|
||
|
||
if (_avcTrack) {
|
||
_avcTrack.pesData = null;
|
||
}
|
||
|
||
if (_id3Track) {
|
||
_id3Track.pesData = null;
|
||
}
|
||
|
||
this.aacOverFlow = null;
|
||
};
|
||
|
||
_proto.demux = function demux(data, timeOffset, isSampleAes, flush) {
|
||
if (isSampleAes === void 0) {
|
||
isSampleAes = false;
|
||
}
|
||
|
||
if (flush === void 0) {
|
||
flush = false;
|
||
}
|
||
|
||
if (!isSampleAes) {
|
||
this.sampleAes = null;
|
||
}
|
||
|
||
var pes;
|
||
var videoTrack = this._avcTrack;
|
||
var audioTrack = this._audioTrack;
|
||
var id3Track = this._id3Track;
|
||
var textTrack = this._txtTrack;
|
||
var avcId = videoTrack.pid;
|
||
var avcData = videoTrack.pesData;
|
||
var audioId = audioTrack.pid;
|
||
var id3Id = id3Track.pid;
|
||
var audioData = audioTrack.pesData;
|
||
var id3Data = id3Track.pesData;
|
||
var unknownPIDs = false;
|
||
var pmtParsed = this.pmtParsed;
|
||
var pmtId = this._pmtId;
|
||
var len = data.length;
|
||
|
||
if (this.remainderData) {
|
||
data = Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_5__["appendUint8Array"])(this.remainderData, data);
|
||
len = data.length;
|
||
this.remainderData = null;
|
||
}
|
||
|
||
if (len < 188 && !flush) {
|
||
this.remainderData = data;
|
||
return {
|
||
audioTrack: audioTrack,
|
||
videoTrack: videoTrack,
|
||
id3Track: id3Track,
|
||
textTrack: textTrack
|
||
};
|
||
}
|
||
|
||
var syncOffset = Math.max(0, TSDemuxer.syncOffset(data));
|
||
len -= (len + syncOffset) % 188;
|
||
|
||
if (len < data.byteLength && !flush) {
|
||
this.remainderData = new Uint8Array(data.buffer, len, data.buffer.byteLength - len);
|
||
} // loop through TS packets
|
||
|
||
|
||
var tsPacketErrors = 0;
|
||
|
||
for (var start = syncOffset; start < len; start += 188) {
|
||
if (data[start] === 0x47) {
|
||
var stt = !!(data[start + 1] & 0x40); // pid is a 13-bit field starting at the last bit of TS[1]
|
||
|
||
var pid = ((data[start + 1] & 0x1f) << 8) + data[start + 2];
|
||
var atf = (data[start + 3] & 0x30) >> 4; // if an adaption field is present, its length is specified by the fifth byte of the TS packet header.
|
||
|
||
var offset = void 0;
|
||
|
||
if (atf > 1) {
|
||
offset = start + 5 + data[start + 4]; // continue if there is only adaptation field
|
||
|
||
if (offset === start + 188) {
|
||
continue;
|
||
}
|
||
} else {
|
||
offset = start + 4;
|
||
}
|
||
|
||
switch (pid) {
|
||
case avcId:
|
||
if (stt) {
|
||
if (avcData && (pes = parsePES(avcData))) {
|
||
this.parseAVCPES(videoTrack, textTrack, pes, false);
|
||
}
|
||
|
||
avcData = {
|
||
data: [],
|
||
size: 0
|
||
};
|
||
}
|
||
|
||
if (avcData) {
|
||
avcData.data.push(data.subarray(offset, start + 188));
|
||
avcData.size += start + 188 - offset;
|
||
}
|
||
|
||
break;
|
||
|
||
case audioId:
|
||
if (stt) {
|
||
if (audioData && (pes = parsePES(audioData))) {
|
||
if (audioTrack.isAAC) {
|
||
this.parseAACPES(audioTrack, pes);
|
||
} else {
|
||
this.parseMPEGPES(audioTrack, pes);
|
||
}
|
||
}
|
||
|
||
audioData = {
|
||
data: [],
|
||
size: 0
|
||
};
|
||
}
|
||
|
||
if (audioData) {
|
||
audioData.data.push(data.subarray(offset, start + 188));
|
||
audioData.size += start + 188 - offset;
|
||
}
|
||
|
||
break;
|
||
|
||
case id3Id:
|
||
if (stt) {
|
||
if (id3Data && (pes = parsePES(id3Data))) {
|
||
this.parseID3PES(id3Track, pes);
|
||
}
|
||
|
||
id3Data = {
|
||
data: [],
|
||
size: 0
|
||
};
|
||
}
|
||
|
||
if (id3Data) {
|
||
id3Data.data.push(data.subarray(offset, start + 188));
|
||
id3Data.size += start + 188 - offset;
|
||
}
|
||
|
||
break;
|
||
|
||
case 0:
|
||
if (stt) {
|
||
offset += data[offset] + 1;
|
||
}
|
||
|
||
pmtId = this._pmtId = parsePAT(data, offset);
|
||
break;
|
||
|
||
case pmtId:
|
||
{
|
||
if (stt) {
|
||
offset += data[offset] + 1;
|
||
}
|
||
|
||
var parsedPIDs = parsePMT(data, offset, this.typeSupported.mpeg === true || this.typeSupported.mp3 === true, isSampleAes); // only update track id if track PID found while parsing PMT
|
||
// this is to avoid resetting the PID to -1 in case
|
||
// track PID transiently disappears from the stream
|
||
// this could happen in case of transient missing audio samples for example
|
||
// NOTE this is only the PID of the track as found in TS,
|
||
// but we are not using this for MP4 track IDs.
|
||
|
||
avcId = parsedPIDs.avc;
|
||
|
||
if (avcId > 0) {
|
||
videoTrack.pid = avcId;
|
||
}
|
||
|
||
audioId = parsedPIDs.audio;
|
||
|
||
if (audioId > 0) {
|
||
audioTrack.pid = audioId;
|
||
audioTrack.isAAC = parsedPIDs.isAAC;
|
||
}
|
||
|
||
id3Id = parsedPIDs.id3;
|
||
|
||
if (id3Id > 0) {
|
||
id3Track.pid = id3Id;
|
||
}
|
||
|
||
if (unknownPIDs && !pmtParsed) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].log('reparse from beginning');
|
||
unknownPIDs = false; // we set it to -188, the += 188 in the for loop will reset start to 0
|
||
|
||
start = syncOffset - 188;
|
||
}
|
||
|
||
pmtParsed = this.pmtParsed = true;
|
||
break;
|
||
}
|
||
|
||
case 17:
|
||
case 0x1fff:
|
||
break;
|
||
|
||
default:
|
||
unknownPIDs = true;
|
||
break;
|
||
}
|
||
} else {
|
||
tsPacketErrors++;
|
||
}
|
||
}
|
||
|
||
if (tsPacketErrors > 0) {
|
||
this.observer.emit(_events__WEBPACK_IMPORTED_MODULE_4__["Events"].ERROR, _events__WEBPACK_IMPORTED_MODULE_4__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_7__["ErrorTypes"].MEDIA_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_7__["ErrorDetails"].FRAG_PARSING_ERROR,
|
||
fatal: false,
|
||
reason: "Found " + tsPacketErrors + " TS packet/s that do not start with 0x47"
|
||
});
|
||
}
|
||
|
||
videoTrack.pesData = avcData;
|
||
audioTrack.pesData = audioData;
|
||
id3Track.pesData = id3Data;
|
||
var demuxResult = {
|
||
audioTrack: audioTrack,
|
||
videoTrack: videoTrack,
|
||
id3Track: id3Track,
|
||
textTrack: textTrack
|
||
};
|
||
|
||
if (flush) {
|
||
this.extractRemainingSamples(demuxResult);
|
||
}
|
||
|
||
return demuxResult;
|
||
};
|
||
|
||
_proto.flush = function flush() {
|
||
var remainderData = this.remainderData;
|
||
this.remainderData = null;
|
||
var result;
|
||
|
||
if (remainderData) {
|
||
result = this.demux(remainderData, -1, false, true);
|
||
} else {
|
||
result = {
|
||
videoTrack: this._avcTrack,
|
||
audioTrack: this._audioTrack,
|
||
id3Track: this._id3Track,
|
||
textTrack: this._txtTrack
|
||
};
|
||
}
|
||
|
||
this.extractRemainingSamples(result);
|
||
|
||
if (this.sampleAes) {
|
||
return this.decrypt(result, this.sampleAes);
|
||
}
|
||
|
||
return result;
|
||
};
|
||
|
||
_proto.extractRemainingSamples = function extractRemainingSamples(demuxResult) {
|
||
var audioTrack = demuxResult.audioTrack,
|
||
videoTrack = demuxResult.videoTrack,
|
||
id3Track = demuxResult.id3Track,
|
||
textTrack = demuxResult.textTrack;
|
||
var avcData = videoTrack.pesData;
|
||
var audioData = audioTrack.pesData;
|
||
var id3Data = id3Track.pesData; // try to parse last PES packets
|
||
|
||
var pes;
|
||
|
||
if (avcData && (pes = parsePES(avcData))) {
|
||
this.parseAVCPES(videoTrack, textTrack, pes, true);
|
||
videoTrack.pesData = null;
|
||
} else {
|
||
// either avcData null or PES truncated, keep it for next frag parsing
|
||
videoTrack.pesData = avcData;
|
||
}
|
||
|
||
if (audioData && (pes = parsePES(audioData))) {
|
||
if (audioTrack.isAAC) {
|
||
this.parseAACPES(audioTrack, pes);
|
||
} else {
|
||
this.parseMPEGPES(audioTrack, pes);
|
||
}
|
||
|
||
audioTrack.pesData = null;
|
||
} else {
|
||
if (audioData !== null && audioData !== void 0 && audioData.size) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].log('last AAC PES packet truncated,might overlap between fragments');
|
||
} // either audioData null or PES truncated, keep it for next frag parsing
|
||
|
||
|
||
audioTrack.pesData = audioData;
|
||
}
|
||
|
||
if (id3Data && (pes = parsePES(id3Data))) {
|
||
this.parseID3PES(id3Track, pes);
|
||
id3Track.pesData = null;
|
||
} else {
|
||
// either id3Data null or PES truncated, keep it for next frag parsing
|
||
id3Track.pesData = id3Data;
|
||
}
|
||
};
|
||
|
||
_proto.demuxSampleAes = function demuxSampleAes(data, keyData, timeOffset) {
|
||
var demuxResult = this.demux(data, timeOffset, true, !this.config.progressive);
|
||
var sampleAes = this.sampleAes = new _sample_aes__WEBPACK_IMPORTED_MODULE_3__["default"](this.observer, this.config, keyData);
|
||
return this.decrypt(demuxResult, sampleAes);
|
||
};
|
||
|
||
_proto.decrypt = function decrypt(demuxResult, sampleAes) {
|
||
return new Promise(function (resolve) {
|
||
var audioTrack = demuxResult.audioTrack,
|
||
videoTrack = demuxResult.videoTrack;
|
||
|
||
if (audioTrack.samples && audioTrack.isAAC) {
|
||
sampleAes.decryptAacSamples(audioTrack.samples, 0, function () {
|
||
if (videoTrack.samples) {
|
||
sampleAes.decryptAvcSamples(videoTrack.samples, 0, 0, function () {
|
||
resolve(demuxResult);
|
||
});
|
||
} else {
|
||
resolve(demuxResult);
|
||
}
|
||
});
|
||
} else if (videoTrack.samples) {
|
||
sampleAes.decryptAvcSamples(videoTrack.samples, 0, 0, function () {
|
||
resolve(demuxResult);
|
||
});
|
||
}
|
||
});
|
||
};
|
||
|
||
_proto.destroy = function destroy() {
|
||
this._duration = 0;
|
||
};
|
||
|
||
_proto.parseAVCPES = function parseAVCPES(track, textTrack, pes, last) {
|
||
var _this = this;
|
||
|
||
var units = this.parseAVCNALu(track, pes.data);
|
||
var debug = false;
|
||
var avcSample = this.avcSample;
|
||
var push;
|
||
var spsfound = false; // free pes.data to save up some memory
|
||
|
||
pes.data = null; // if new NAL units found and last sample still there, let's push ...
|
||
// this helps parsing streams with missing AUD (only do this if AUD never found)
|
||
|
||
if (avcSample && units.length && !track.audFound) {
|
||
pushAccessUnit(avcSample, track);
|
||
avcSample = this.avcSample = createAVCSample(false, pes.pts, pes.dts, '');
|
||
}
|
||
|
||
units.forEach(function (unit) {
|
||
switch (unit.type) {
|
||
// NDR
|
||
case 1:
|
||
{
|
||
push = true;
|
||
|
||
if (!avcSample) {
|
||
avcSample = _this.avcSample = createAVCSample(true, pes.pts, pes.dts, '');
|
||
}
|
||
|
||
if (debug) {
|
||
avcSample.debug += 'NDR ';
|
||
}
|
||
|
||
avcSample.frame = true;
|
||
var data = unit.data; // only check slice type to detect KF in case SPS found in same packet (any keyframe is preceded by SPS ...)
|
||
|
||
if (spsfound && data.length > 4) {
|
||
// retrieve slice type by parsing beginning of NAL unit (follow H264 spec, slice_header definition) to detect keyframe embedded in NDR
|
||
var sliceType = new _exp_golomb__WEBPACK_IMPORTED_MODULE_2__["default"](data).readSliceType(); // 2 : I slice, 4 : SI slice, 7 : I slice, 9: SI slice
|
||
// SI slice : A slice that is coded using intra prediction only and using quantisation of the prediction samples.
|
||
// An SI slice can be coded such that its decoded samples can be constructed identically to an SP slice.
|
||
// I slice: A slice that is not an SI slice that is decoded using intra prediction only.
|
||
// if (sliceType === 2 || sliceType === 7) {
|
||
|
||
if (sliceType === 2 || sliceType === 4 || sliceType === 7 || sliceType === 9) {
|
||
avcSample.key = true;
|
||
}
|
||
}
|
||
|
||
break; // IDR
|
||
}
|
||
|
||
case 5:
|
||
push = true; // handle PES not starting with AUD
|
||
|
||
if (!avcSample) {
|
||
avcSample = _this.avcSample = createAVCSample(true, pes.pts, pes.dts, '');
|
||
}
|
||
|
||
if (debug) {
|
||
avcSample.debug += 'IDR ';
|
||
}
|
||
|
||
avcSample.key = true;
|
||
avcSample.frame = true;
|
||
break;
|
||
// SEI
|
||
|
||
case 6:
|
||
{
|
||
push = true;
|
||
|
||
if (debug && avcSample) {
|
||
avcSample.debug += 'SEI ';
|
||
}
|
||
|
||
Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_5__["parseSEIMessageFromNALu"])(discardEPB(unit.data), pes.pts, textTrack.samples);
|
||
break; // SPS
|
||
}
|
||
|
||
case 7:
|
||
push = true;
|
||
spsfound = true;
|
||
|
||
if (debug && avcSample) {
|
||
avcSample.debug += 'SPS ';
|
||
}
|
||
|
||
if (!track.sps) {
|
||
var expGolombDecoder = new _exp_golomb__WEBPACK_IMPORTED_MODULE_2__["default"](unit.data);
|
||
var config = expGolombDecoder.readSPS();
|
||
track.width = config.width;
|
||
track.height = config.height;
|
||
track.pixelRatio = config.pixelRatio; // TODO: `track.sps` is defined as a `number[]`, but we're setting it to a `Uint8Array[]`.
|
||
|
||
track.sps = [unit.data];
|
||
track.duration = _this._duration;
|
||
var codecarray = unit.data.subarray(1, 4);
|
||
var codecstring = 'avc1.';
|
||
|
||
for (var i = 0; i < 3; i++) {
|
||
var h = codecarray[i].toString(16);
|
||
|
||
if (h.length < 2) {
|
||
h = '0' + h;
|
||
}
|
||
|
||
codecstring += h;
|
||
}
|
||
|
||
track.codec = codecstring;
|
||
}
|
||
|
||
break;
|
||
// PPS
|
||
|
||
case 8:
|
||
push = true;
|
||
|
||
if (debug && avcSample) {
|
||
avcSample.debug += 'PPS ';
|
||
}
|
||
|
||
if (!track.pps) {
|
||
// TODO: `track.pss` is defined as a `number[]`, but we're setting it to a `Uint8Array[]`.
|
||
track.pps = [unit.data];
|
||
}
|
||
|
||
break;
|
||
// AUD
|
||
|
||
case 9:
|
||
push = false;
|
||
track.audFound = true;
|
||
|
||
if (avcSample) {
|
||
pushAccessUnit(avcSample, track);
|
||
}
|
||
|
||
avcSample = _this.avcSample = createAVCSample(false, pes.pts, pes.dts, debug ? 'AUD ' : '');
|
||
break;
|
||
// Filler Data
|
||
|
||
case 12:
|
||
push = false;
|
||
break;
|
||
|
||
default:
|
||
push = false;
|
||
|
||
if (avcSample) {
|
||
avcSample.debug += 'unknown NAL ' + unit.type + ' ';
|
||
}
|
||
|
||
break;
|
||
}
|
||
|
||
if (avcSample && push) {
|
||
var _units = avcSample.units;
|
||
|
||
_units.push(unit);
|
||
}
|
||
}); // if last PES packet, push samples
|
||
|
||
if (last && avcSample) {
|
||
pushAccessUnit(avcSample, track);
|
||
this.avcSample = null;
|
||
}
|
||
};
|
||
|
||
_proto.getLastNalUnit = function getLastNalUnit(samples) {
|
||
var _avcSample;
|
||
|
||
var avcSample = this.avcSample;
|
||
var lastUnit; // try to fallback to previous sample if current one is empty
|
||
|
||
if (!avcSample || avcSample.units.length === 0) {
|
||
avcSample = samples[samples.length - 1];
|
||
}
|
||
|
||
if ((_avcSample = avcSample) !== null && _avcSample !== void 0 && _avcSample.units) {
|
||
var units = avcSample.units;
|
||
lastUnit = units[units.length - 1];
|
||
}
|
||
|
||
return lastUnit;
|
||
};
|
||
|
||
_proto.parseAVCNALu = function parseAVCNALu(track, array) {
|
||
var len = array.byteLength;
|
||
var state = track.naluState || 0;
|
||
var lastState = state;
|
||
var units = [];
|
||
var i = 0;
|
||
var value;
|
||
var overflow;
|
||
var unitType;
|
||
var lastUnitStart = -1;
|
||
var lastUnitType = 0; // logger.log('PES:' + Hex.hexDump(array));
|
||
|
||
if (state === -1) {
|
||
// special use case where we found 3 or 4-byte start codes exactly at the end of previous PES packet
|
||
lastUnitStart = 0; // NALu type is value read from offset 0
|
||
|
||
lastUnitType = array[0] & 0x1f;
|
||
state = 0;
|
||
i = 1;
|
||
}
|
||
|
||
while (i < len) {
|
||
value = array[i++]; // optimization. state 0 and 1 are the predominant case. let's handle them outside of the switch/case
|
||
|
||
if (!state) {
|
||
state = value ? 0 : 1;
|
||
continue;
|
||
}
|
||
|
||
if (state === 1) {
|
||
state = value ? 0 : 2;
|
||
continue;
|
||
} // here we have state either equal to 2 or 3
|
||
|
||
|
||
if (!value) {
|
||
state = 3;
|
||
} else if (value === 1) {
|
||
if (lastUnitStart >= 0) {
|
||
var unit = {
|
||
data: array.subarray(lastUnitStart, i - state - 1),
|
||
type: lastUnitType
|
||
}; // logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
|
||
|
||
units.push(unit);
|
||
} else {
|
||
// lastUnitStart is undefined => this is the first start code found in this PES packet
|
||
// first check if start code delimiter is overlapping between 2 PES packets,
|
||
// ie it started in last packet (lastState not zero)
|
||
// and ended at the beginning of this PES packet (i <= 4 - lastState)
|
||
var lastUnit = this.getLastNalUnit(track.samples);
|
||
|
||
if (lastUnit) {
|
||
if (lastState && i <= 4 - lastState) {
|
||
// start delimiter overlapping between PES packets
|
||
// strip start delimiter bytes from the end of last NAL unit
|
||
// check if lastUnit had a state different from zero
|
||
if (lastUnit.state) {
|
||
// strip last bytes
|
||
lastUnit.data = lastUnit.data.subarray(0, lastUnit.data.byteLength - lastState);
|
||
}
|
||
} // If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
|
||
|
||
|
||
overflow = i - state - 1;
|
||
|
||
if (overflow > 0) {
|
||
// logger.log('first NALU found with overflow:' + overflow);
|
||
var tmp = new Uint8Array(lastUnit.data.byteLength + overflow);
|
||
tmp.set(lastUnit.data, 0);
|
||
tmp.set(array.subarray(0, overflow), lastUnit.data.byteLength);
|
||
lastUnit.data = tmp;
|
||
lastUnit.state = 0;
|
||
}
|
||
}
|
||
} // check if we can read unit type
|
||
|
||
|
||
if (i < len) {
|
||
unitType = array[i] & 0x1f; // logger.log('find NALU @ offset:' + i + ',type:' + unitType);
|
||
|
||
lastUnitStart = i;
|
||
lastUnitType = unitType;
|
||
state = 0;
|
||
} else {
|
||
// not enough byte to read unit type. let's read it on next PES parsing
|
||
state = -1;
|
||
}
|
||
} else {
|
||
state = 0;
|
||
}
|
||
}
|
||
|
||
if (lastUnitStart >= 0 && state >= 0) {
|
||
var _unit = {
|
||
data: array.subarray(lastUnitStart, len),
|
||
type: lastUnitType,
|
||
state: state
|
||
};
|
||
units.push(_unit); // logger.log('pushing NALU, type/size/state:' + unit.type + '/' + unit.data.byteLength + '/' + state);
|
||
} // no NALu found
|
||
|
||
|
||
if (units.length === 0) {
|
||
// append pes.data to previous NAL unit
|
||
var _lastUnit = this.getLastNalUnit(track.samples);
|
||
|
||
if (_lastUnit) {
|
||
var _tmp = new Uint8Array(_lastUnit.data.byteLength + array.byteLength);
|
||
|
||
_tmp.set(_lastUnit.data, 0);
|
||
|
||
_tmp.set(array, _lastUnit.data.byteLength);
|
||
|
||
_lastUnit.data = _tmp;
|
||
}
|
||
}
|
||
|
||
track.naluState = state;
|
||
return units;
|
||
};
|
||
|
||
_proto.parseAACPES = function parseAACPES(track, pes) {
|
||
var startOffset = 0;
|
||
var aacOverFlow = this.aacOverFlow;
|
||
var data = pes.data;
|
||
|
||
if (aacOverFlow) {
|
||
this.aacOverFlow = null;
|
||
var sampleLength = aacOverFlow.sample.unit.byteLength;
|
||
var frameMissingBytes = Math.min(aacOverFlow.missing, sampleLength);
|
||
var frameOverflowBytes = sampleLength - frameMissingBytes;
|
||
aacOverFlow.sample.unit.set(data.subarray(0, frameMissingBytes), frameOverflowBytes);
|
||
track.samples.push(aacOverFlow.sample); // logger.log(`AAC: append overflowing ${frameOverflowBytes} bytes to beginning of new PES`);
|
||
|
||
startOffset = aacOverFlow.missing;
|
||
} // look for ADTS header (0xFFFx)
|
||
|
||
|
||
var offset;
|
||
var len;
|
||
|
||
for (offset = startOffset, len = data.length; offset < len - 1; offset++) {
|
||
if (_adts__WEBPACK_IMPORTED_MODULE_0__["isHeader"](data, offset)) {
|
||
break;
|
||
}
|
||
} // if ADTS header does not start straight from the beginning of the PES payload, raise an error
|
||
|
||
|
||
if (offset !== startOffset) {
|
||
var reason;
|
||
var fatal;
|
||
|
||
if (offset < len - 1) {
|
||
reason = "AAC PES did not start with ADTS header,offset:" + offset;
|
||
fatal = false;
|
||
} else {
|
||
reason = 'no ADTS header found in AAC PES';
|
||
fatal = true;
|
||
}
|
||
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].warn("parsing error:" + reason);
|
||
this.observer.emit(_events__WEBPACK_IMPORTED_MODULE_4__["Events"].ERROR, _events__WEBPACK_IMPORTED_MODULE_4__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_7__["ErrorTypes"].MEDIA_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_7__["ErrorDetails"].FRAG_PARSING_ERROR,
|
||
fatal: fatal,
|
||
reason: reason
|
||
});
|
||
|
||
if (fatal) {
|
||
return;
|
||
}
|
||
}
|
||
|
||
_adts__WEBPACK_IMPORTED_MODULE_0__["initTrackConfig"](track, this.observer, data, offset, this.audioCodec);
|
||
var pts;
|
||
|
||
if (pes.pts !== undefined) {
|
||
pts = pes.pts;
|
||
} else if (aacOverFlow) {
|
||
// if last AAC frame is overflowing, we should ensure timestamps are contiguous:
|
||
// first sample PTS should be equal to last sample PTS + frameDuration
|
||
var frameDuration = _adts__WEBPACK_IMPORTED_MODULE_0__["getFrameDuration"](track.samplerate);
|
||
pts = aacOverFlow.sample.pts + frameDuration;
|
||
} else {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].warn('[tsdemuxer]: AAC PES unknown PTS');
|
||
return;
|
||
} // scan for aac samples
|
||
|
||
|
||
var frameIndex = 0;
|
||
|
||
while (offset < len) {
|
||
if (_adts__WEBPACK_IMPORTED_MODULE_0__["isHeader"](data, offset)) {
|
||
if (offset + 5 < len) {
|
||
var frame = _adts__WEBPACK_IMPORTED_MODULE_0__["appendFrame"](track, data, offset, pts, frameIndex);
|
||
|
||
if (frame) {
|
||
if (frame.missing) {
|
||
this.aacOverFlow = frame;
|
||
} else {
|
||
offset += frame.length;
|
||
frameIndex++;
|
||
continue;
|
||
}
|
||
}
|
||
} // We are at an ADTS header, but do not have enough data for a frame
|
||
// Remaining data will be added to aacOverFlow
|
||
|
||
|
||
break;
|
||
} else {
|
||
// nothing found, keep looking
|
||
offset++;
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto.parseMPEGPES = function parseMPEGPES(track, pes) {
|
||
var data = pes.data;
|
||
var length = data.length;
|
||
var frameIndex = 0;
|
||
var offset = 0;
|
||
var pts = pes.pts;
|
||
|
||
if (pts === undefined) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].warn('[tsdemuxer]: MPEG PES unknown PTS');
|
||
return;
|
||
}
|
||
|
||
while (offset < length) {
|
||
if (_mpegaudio__WEBPACK_IMPORTED_MODULE_1__["isHeader"](data, offset)) {
|
||
var frame = _mpegaudio__WEBPACK_IMPORTED_MODULE_1__["appendFrame"](track, data, offset, pts, frameIndex);
|
||
|
||
if (frame) {
|
||
offset += frame.length;
|
||
frameIndex++;
|
||
} else {
|
||
// logger.log('Unable to parse Mpeg audio frame');
|
||
break;
|
||
}
|
||
} else {
|
||
// nothing found, keep looking
|
||
offset++;
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto.parseID3PES = function parseID3PES(id3Track, pes) {
|
||
if (pes.pts === undefined) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].warn('[tsdemuxer]: ID3 PES unknown PTS');
|
||
return;
|
||
}
|
||
|
||
id3Track.samples.push(pes);
|
||
};
|
||
|
||
return TSDemuxer;
|
||
}();
|
||
|
||
TSDemuxer.minProbeByteLength = 188;
|
||
|
||
function createAVCSample(key, pts, dts, debug) {
|
||
return {
|
||
key: key,
|
||
frame: false,
|
||
pts: pts,
|
||
dts: dts,
|
||
units: [],
|
||
debug: debug,
|
||
length: 0
|
||
};
|
||
}
|
||
|
||
function parsePAT(data, offset) {
|
||
// skip the PSI header and parse the first PMT entry
|
||
return (data[offset + 10] & 0x1f) << 8 | data[offset + 11]; // logger.log('PMT PID:' + this._pmtId);
|
||
}
|
||
|
||
function parsePMT(data, offset, mpegSupported, isSampleAes) {
|
||
var result = {
|
||
audio: -1,
|
||
avc: -1,
|
||
id3: -1,
|
||
isAAC: true
|
||
};
|
||
var sectionLength = (data[offset + 1] & 0x0f) << 8 | data[offset + 2];
|
||
var tableEnd = offset + 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
|
||
// long the program info descriptors are
|
||
|
||
var programInfoLength = (data[offset + 10] & 0x0f) << 8 | data[offset + 11]; // advance the offset to the first entry in the mapping table
|
||
|
||
offset += 12 + programInfoLength;
|
||
|
||
while (offset < tableEnd) {
|
||
var pid = (data[offset + 1] & 0x1f) << 8 | data[offset + 2];
|
||
|
||
switch (data[offset]) {
|
||
case 0xcf:
|
||
// SAMPLE-AES AAC
|
||
if (!isSampleAes) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].log('ADTS AAC with AES-128-CBC frame encryption found in unencrypted stream');
|
||
break;
|
||
}
|
||
|
||
/* falls through */
|
||
|
||
case 0x0f:
|
||
// ISO/IEC 13818-7 ADTS AAC (MPEG-2 lower bit-rate audio)
|
||
// logger.log('AAC PID:' + pid);
|
||
if (result.audio === -1) {
|
||
result.audio = pid;
|
||
}
|
||
|
||
break;
|
||
// Packetized metadata (ID3)
|
||
|
||
case 0x15:
|
||
// logger.log('ID3 PID:' + pid);
|
||
if (result.id3 === -1) {
|
||
result.id3 = pid;
|
||
}
|
||
|
||
break;
|
||
|
||
case 0xdb:
|
||
// SAMPLE-AES AVC
|
||
if (!isSampleAes) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].log('H.264 with AES-128-CBC slice encryption found in unencrypted stream');
|
||
break;
|
||
}
|
||
|
||
/* falls through */
|
||
|
||
case 0x1b:
|
||
// ITU-T Rec. H.264 and ISO/IEC 14496-10 (lower bit-rate video)
|
||
// logger.log('AVC PID:' + pid);
|
||
if (result.avc === -1) {
|
||
result.avc = pid;
|
||
}
|
||
|
||
break;
|
||
// ISO/IEC 11172-3 (MPEG-1 audio)
|
||
// or ISO/IEC 13818-3 (MPEG-2 halved sample rate audio)
|
||
|
||
case 0x03:
|
||
case 0x04:
|
||
// logger.log('MPEG PID:' + pid);
|
||
if (!mpegSupported) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].log('MPEG audio found, not supported in this browser');
|
||
} else if (result.audio === -1) {
|
||
result.audio = pid;
|
||
result.isAAC = false;
|
||
}
|
||
|
||
break;
|
||
|
||
case 0x24:
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].warn('Unsupported HEVC stream type found');
|
||
break;
|
||
|
||
default:
|
||
// logger.log('unknown stream type:' + data[offset]);
|
||
break;
|
||
} // move to the next table entry
|
||
// skip past the elementary stream descriptors, if present
|
||
|
||
|
||
offset += ((data[offset + 3] & 0x0f) << 8 | data[offset + 4]) + 5;
|
||
}
|
||
|
||
return result;
|
||
}
|
||
|
||
function parsePES(stream) {
|
||
var i = 0;
|
||
var frag;
|
||
var pesLen;
|
||
var pesHdrLen;
|
||
var pesPts;
|
||
var pesDts;
|
||
var data = stream.data; // safety check
|
||
|
||
if (!stream || stream.size === 0) {
|
||
return null;
|
||
} // we might need up to 19 bytes to read PES header
|
||
// if first chunk of data is less than 19 bytes, let's merge it with following ones until we get 19 bytes
|
||
// usually only one merge is needed (and this is rare ...)
|
||
|
||
|
||
while (data[0].length < 19 && data.length > 1) {
|
||
var newData = new Uint8Array(data[0].length + data[1].length);
|
||
newData.set(data[0]);
|
||
newData.set(data[1], data[0].length);
|
||
data[0] = newData;
|
||
data.splice(1, 1);
|
||
} // retrieve PTS/DTS from first fragment
|
||
|
||
|
||
frag = data[0];
|
||
var pesPrefix = (frag[0] << 16) + (frag[1] << 8) + frag[2];
|
||
|
||
if (pesPrefix === 1) {
|
||
pesLen = (frag[4] << 8) + frag[5]; // if PES parsed length is not zero and greater than total received length, stop parsing. PES might be truncated
|
||
// minus 6 : PES header size
|
||
|
||
if (pesLen && pesLen > stream.size - 6) {
|
||
return null;
|
||
}
|
||
|
||
var pesFlags = frag[7];
|
||
|
||
if (pesFlags & 0xc0) {
|
||
/* PES header described here : http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
|
||
as PTS / DTS is 33 bit we cannot use bitwise operator in JS,
|
||
as Bitwise operators treat their operands as a sequence of 32 bits */
|
||
pesPts = (frag[9] & 0x0e) * 536870912 + // 1 << 29
|
||
(frag[10] & 0xff) * 4194304 + // 1 << 22
|
||
(frag[11] & 0xfe) * 16384 + // 1 << 14
|
||
(frag[12] & 0xff) * 128 + // 1 << 7
|
||
(frag[13] & 0xfe) / 2;
|
||
|
||
if (pesFlags & 0x40) {
|
||
pesDts = (frag[14] & 0x0e) * 536870912 + // 1 << 29
|
||
(frag[15] & 0xff) * 4194304 + // 1 << 22
|
||
(frag[16] & 0xfe) * 16384 + // 1 << 14
|
||
(frag[17] & 0xff) * 128 + // 1 << 7
|
||
(frag[18] & 0xfe) / 2;
|
||
|
||
if (pesPts - pesDts > 60 * 90000) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].warn(Math.round((pesPts - pesDts) / 90000) + "s delta between PTS and DTS, align them");
|
||
pesPts = pesDts;
|
||
}
|
||
} else {
|
||
pesDts = pesPts;
|
||
}
|
||
}
|
||
|
||
pesHdrLen = frag[8]; // 9 bytes : 6 bytes for PES header + 3 bytes for PES extension
|
||
|
||
var payloadStartOffset = pesHdrLen + 9;
|
||
|
||
if (stream.size <= payloadStartOffset) {
|
||
return null;
|
||
}
|
||
|
||
stream.size -= payloadStartOffset; // reassemble PES packet
|
||
|
||
var pesData = new Uint8Array(stream.size);
|
||
|
||
for (var j = 0, dataLen = data.length; j < dataLen; j++) {
|
||
frag = data[j];
|
||
var len = frag.byteLength;
|
||
|
||
if (payloadStartOffset) {
|
||
if (payloadStartOffset > len) {
|
||
// trim full frag if PES header bigger than frag
|
||
payloadStartOffset -= len;
|
||
continue;
|
||
} else {
|
||
// trim partial frag if PES header smaller than frag
|
||
frag = frag.subarray(payloadStartOffset);
|
||
len -= payloadStartOffset;
|
||
payloadStartOffset = 0;
|
||
}
|
||
}
|
||
|
||
pesData.set(frag, i);
|
||
i += len;
|
||
}
|
||
|
||
if (pesLen) {
|
||
// payload size : remove PES header + PES extension
|
||
pesLen -= pesHdrLen + 3;
|
||
}
|
||
|
||
return {
|
||
data: pesData,
|
||
pts: pesPts,
|
||
dts: pesDts,
|
||
len: pesLen
|
||
};
|
||
}
|
||
|
||
return null;
|
||
}
|
||
|
||
function pushAccessUnit(avcSample, avcTrack) {
|
||
if (avcSample.units.length && avcSample.frame) {
|
||
// if sample does not have PTS/DTS, patch with last sample PTS/DTS
|
||
if (avcSample.pts === undefined) {
|
||
var samples = avcTrack.samples;
|
||
var nbSamples = samples.length;
|
||
|
||
if (nbSamples) {
|
||
var lastSample = samples[nbSamples - 1];
|
||
avcSample.pts = lastSample.pts;
|
||
avcSample.dts = lastSample.dts;
|
||
} else {
|
||
// dropping samples, no timestamp found
|
||
avcTrack.dropped++;
|
||
return;
|
||
}
|
||
}
|
||
|
||
avcTrack.samples.push(avcSample);
|
||
}
|
||
|
||
if (avcSample.debug.length) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].log(avcSample.pts + '/' + avcSample.dts + ':' + avcSample.debug);
|
||
}
|
||
}
|
||
/**
|
||
* remove Emulation Prevention bytes from a RBSP
|
||
*/
|
||
|
||
|
||
function discardEPB(data) {
|
||
var length = data.byteLength;
|
||
var EPBPositions = [];
|
||
var i = 1; // Find all `Emulation Prevention Bytes`
|
||
|
||
while (i < length - 2) {
|
||
if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
|
||
EPBPositions.push(i + 2);
|
||
i += 2;
|
||
} else {
|
||
i++;
|
||
}
|
||
} // If no Emulation Prevention Bytes were found just return the original
|
||
// array
|
||
|
||
|
||
if (EPBPositions.length === 0) {
|
||
return data;
|
||
} // Create a new array to hold the NAL unit data
|
||
|
||
|
||
var newLength = length - EPBPositions.length;
|
||
var newData = new Uint8Array(newLength);
|
||
var sourceIndex = 0;
|
||
|
||
for (i = 0; i < newLength; sourceIndex++, i++) {
|
||
if (sourceIndex === EPBPositions[0]) {
|
||
// Skip this byte
|
||
sourceIndex++; // Remove this position index
|
||
|
||
EPBPositions.shift();
|
||
}
|
||
|
||
newData[i] = data[sourceIndex];
|
||
}
|
||
|
||
return newData;
|
||
}
|
||
/* harmony default export */ __webpack_exports__["default"] = (TSDemuxer);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/errors.ts":
|
||
/*!***********************!*\
|
||
!*** ./src/errors.ts ***!
|
||
\***********************/
|
||
/*! exports provided: ErrorTypes, ErrorDetails */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "ErrorTypes", function() { return ErrorTypes; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "ErrorDetails", function() { return ErrorDetails; });
|
||
var ErrorTypes;
|
||
/**
|
||
* @enum {ErrorDetails}
|
||
* @typedef {string} ErrorDetail
|
||
*/
|
||
|
||
(function (ErrorTypes) {
|
||
ErrorTypes["NETWORK_ERROR"] = "networkError";
|
||
ErrorTypes["MEDIA_ERROR"] = "mediaError";
|
||
ErrorTypes["KEY_SYSTEM_ERROR"] = "keySystemError";
|
||
ErrorTypes["MUX_ERROR"] = "muxError";
|
||
ErrorTypes["OTHER_ERROR"] = "otherError";
|
||
})(ErrorTypes || (ErrorTypes = {}));
|
||
|
||
var ErrorDetails;
|
||
|
||
(function (ErrorDetails) {
|
||
ErrorDetails["KEY_SYSTEM_NO_KEYS"] = "keySystemNoKeys";
|
||
ErrorDetails["KEY_SYSTEM_NO_ACCESS"] = "keySystemNoAccess";
|
||
ErrorDetails["KEY_SYSTEM_NO_SESSION"] = "keySystemNoSession";
|
||
ErrorDetails["KEY_SYSTEM_LICENSE_REQUEST_FAILED"] = "keySystemLicenseRequestFailed";
|
||
ErrorDetails["KEY_SYSTEM_NO_INIT_DATA"] = "keySystemNoInitData";
|
||
ErrorDetails["MANIFEST_LOAD_ERROR"] = "manifestLoadError";
|
||
ErrorDetails["MANIFEST_LOAD_TIMEOUT"] = "manifestLoadTimeOut";
|
||
ErrorDetails["MANIFEST_PARSING_ERROR"] = "manifestParsingError";
|
||
ErrorDetails["MANIFEST_INCOMPATIBLE_CODECS_ERROR"] = "manifestIncompatibleCodecsError";
|
||
ErrorDetails["LEVEL_EMPTY_ERROR"] = "levelEmptyError";
|
||
ErrorDetails["LEVEL_LOAD_ERROR"] = "levelLoadError";
|
||
ErrorDetails["LEVEL_LOAD_TIMEOUT"] = "levelLoadTimeOut";
|
||
ErrorDetails["LEVEL_SWITCH_ERROR"] = "levelSwitchError";
|
||
ErrorDetails["AUDIO_TRACK_LOAD_ERROR"] = "audioTrackLoadError";
|
||
ErrorDetails["AUDIO_TRACK_LOAD_TIMEOUT"] = "audioTrackLoadTimeOut";
|
||
ErrorDetails["SUBTITLE_LOAD_ERROR"] = "subtitleTrackLoadError";
|
||
ErrorDetails["SUBTITLE_TRACK_LOAD_TIMEOUT"] = "subtitleTrackLoadTimeOut";
|
||
ErrorDetails["FRAG_LOAD_ERROR"] = "fragLoadError";
|
||
ErrorDetails["FRAG_LOAD_TIMEOUT"] = "fragLoadTimeOut";
|
||
ErrorDetails["FRAG_DECRYPT_ERROR"] = "fragDecryptError";
|
||
ErrorDetails["FRAG_PARSING_ERROR"] = "fragParsingError";
|
||
ErrorDetails["REMUX_ALLOC_ERROR"] = "remuxAllocError";
|
||
ErrorDetails["KEY_LOAD_ERROR"] = "keyLoadError";
|
||
ErrorDetails["KEY_LOAD_TIMEOUT"] = "keyLoadTimeOut";
|
||
ErrorDetails["BUFFER_ADD_CODEC_ERROR"] = "bufferAddCodecError";
|
||
ErrorDetails["BUFFER_INCOMPATIBLE_CODECS_ERROR"] = "bufferIncompatibleCodecsError";
|
||
ErrorDetails["BUFFER_APPEND_ERROR"] = "bufferAppendError";
|
||
ErrorDetails["BUFFER_APPENDING_ERROR"] = "bufferAppendingError";
|
||
ErrorDetails["BUFFER_STALLED_ERROR"] = "bufferStalledError";
|
||
ErrorDetails["BUFFER_FULL_ERROR"] = "bufferFullError";
|
||
ErrorDetails["BUFFER_SEEK_OVER_HOLE"] = "bufferSeekOverHole";
|
||
ErrorDetails["BUFFER_NUDGE_ON_STALL"] = "bufferNudgeOnStall";
|
||
ErrorDetails["INTERNAL_EXCEPTION"] = "internalException";
|
||
ErrorDetails["INTERNAL_ABORTED"] = "aborted";
|
||
ErrorDetails["UNKNOWN"] = "unknown";
|
||
})(ErrorDetails || (ErrorDetails = {}));
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/events.ts":
|
||
/*!***********************!*\
|
||
!*** ./src/events.ts ***!
|
||
\***********************/
|
||
/*! exports provided: Events */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "Events", function() { return Events; });
|
||
/**
|
||
* @readonly
|
||
* @enum {string}
|
||
*/
|
||
var Events;
|
||
|
||
(function (Events) {
|
||
Events["MEDIA_ATTACHING"] = "hlsMediaAttaching";
|
||
Events["MEDIA_ATTACHED"] = "hlsMediaAttached";
|
||
Events["MEDIA_DETACHING"] = "hlsMediaDetaching";
|
||
Events["MEDIA_DETACHED"] = "hlsMediaDetached";
|
||
Events["BUFFER_RESET"] = "hlsBufferReset";
|
||
Events["BUFFER_CODECS"] = "hlsBufferCodecs";
|
||
Events["BUFFER_CREATED"] = "hlsBufferCreated";
|
||
Events["BUFFER_APPENDING"] = "hlsBufferAppending";
|
||
Events["BUFFER_APPENDED"] = "hlsBufferAppended";
|
||
Events["BUFFER_EOS"] = "hlsBufferEos";
|
||
Events["BUFFER_FLUSHING"] = "hlsBufferFlushing";
|
||
Events["BUFFER_FLUSHED"] = "hlsBufferFlushed";
|
||
Events["MANIFEST_LOADING"] = "hlsManifestLoading";
|
||
Events["MANIFEST_LOADED"] = "hlsManifestLoaded";
|
||
Events["MANIFEST_PARSED"] = "hlsManifestParsed";
|
||
Events["LEVEL_SWITCHING"] = "hlsLevelSwitching";
|
||
Events["LEVEL_SWITCHED"] = "hlsLevelSwitched";
|
||
Events["LEVEL_LOADING"] = "hlsLevelLoading";
|
||
Events["LEVEL_LOADED"] = "hlsLevelLoaded";
|
||
Events["LEVEL_UPDATED"] = "hlsLevelUpdated";
|
||
Events["LEVEL_PTS_UPDATED"] = "hlsLevelPtsUpdated";
|
||
Events["LEVELS_UPDATED"] = "hlsLevelsUpdated";
|
||
Events["AUDIO_TRACKS_UPDATED"] = "hlsAudioTracksUpdated";
|
||
Events["AUDIO_TRACK_SWITCHING"] = "hlsAudioTrackSwitching";
|
||
Events["AUDIO_TRACK_SWITCHED"] = "hlsAudioTrackSwitched";
|
||
Events["AUDIO_TRACK_LOADING"] = "hlsAudioTrackLoading";
|
||
Events["AUDIO_TRACK_LOADED"] = "hlsAudioTrackLoaded";
|
||
Events["SUBTITLE_TRACKS_UPDATED"] = "hlsSubtitleTracksUpdated";
|
||
Events["SUBTITLE_TRACKS_CLEARED"] = "hlsSubtitleTracksCleared";
|
||
Events["SUBTITLE_TRACK_SWITCH"] = "hlsSubtitleTrackSwitch";
|
||
Events["SUBTITLE_TRACK_LOADING"] = "hlsSubtitleTrackLoading";
|
||
Events["SUBTITLE_TRACK_LOADED"] = "hlsSubtitleTrackLoaded";
|
||
Events["SUBTITLE_FRAG_PROCESSED"] = "hlsSubtitleFragProcessed";
|
||
Events["CUES_PARSED"] = "hlsCuesParsed";
|
||
Events["NON_NATIVE_TEXT_TRACKS_FOUND"] = "hlsNonNativeTextTracksFound";
|
||
Events["INIT_PTS_FOUND"] = "hlsInitPtsFound";
|
||
Events["FRAG_LOADING"] = "hlsFragLoading";
|
||
Events["FRAG_LOAD_EMERGENCY_ABORTED"] = "hlsFragLoadEmergencyAborted";
|
||
Events["FRAG_LOADED"] = "hlsFragLoaded";
|
||
Events["FRAG_DECRYPTED"] = "hlsFragDecrypted";
|
||
Events["FRAG_PARSING_INIT_SEGMENT"] = "hlsFragParsingInitSegment";
|
||
Events["FRAG_PARSING_USERDATA"] = "hlsFragParsingUserdata";
|
||
Events["FRAG_PARSING_METADATA"] = "hlsFragParsingMetadata";
|
||
Events["FRAG_PARSED"] = "hlsFragParsed";
|
||
Events["FRAG_BUFFERED"] = "hlsFragBuffered";
|
||
Events["FRAG_CHANGED"] = "hlsFragChanged";
|
||
Events["FPS_DROP"] = "hlsFpsDrop";
|
||
Events["FPS_DROP_LEVEL_CAPPING"] = "hlsFpsDropLevelCapping";
|
||
Events["ERROR"] = "hlsError";
|
||
Events["DESTROYING"] = "hlsDestroying";
|
||
Events["KEY_LOADING"] = "hlsKeyLoading";
|
||
Events["KEY_LOADED"] = "hlsKeyLoaded";
|
||
Events["LIVE_BACK_BUFFER_REACHED"] = "hlsLiveBackBufferReached";
|
||
Events["BACK_BUFFER_REACHED"] = "hlsBackBufferReached";
|
||
})(Events || (Events = {}));
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/hls.ts":
|
||
/*!********************!*\
|
||
!*** ./src/hls.ts ***!
|
||
\********************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return Hls; });
|
||
/* harmony import */ var url_toolkit__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! url-toolkit */ "./node_modules/url-toolkit/src/url-toolkit.js");
|
||
/* harmony import */ var url_toolkit__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(url_toolkit__WEBPACK_IMPORTED_MODULE_0__);
|
||
/* harmony import */ var _loader_playlist_loader__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./loader/playlist-loader */ "./src/loader/playlist-loader.ts");
|
||
/* harmony import */ var _loader_key_loader__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./loader/key-loader */ "./src/loader/key-loader.ts");
|
||
/* harmony import */ var _controller_id3_track_controller__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./controller/id3-track-controller */ "./src/controller/id3-track-controller.ts");
|
||
/* harmony import */ var _controller_latency_controller__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./controller/latency-controller */ "./src/controller/latency-controller.ts");
|
||
/* harmony import */ var _controller_level_controller__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./controller/level-controller */ "./src/controller/level-controller.ts");
|
||
/* harmony import */ var _controller_fragment_tracker__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./controller/fragment-tracker */ "./src/controller/fragment-tracker.ts");
|
||
/* harmony import */ var _controller_stream_controller__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./controller/stream-controller */ "./src/controller/stream-controller.ts");
|
||
/* harmony import */ var _is_supported__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ./is-supported */ "./src/is-supported.ts");
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! ./utils/logger */ "./src/utils/logger.ts");
|
||
/* harmony import */ var _config__WEBPACK_IMPORTED_MODULE_10__ = __webpack_require__(/*! ./config */ "./src/config.ts");
|
||
/* harmony import */ var eventemitter3__WEBPACK_IMPORTED_MODULE_11__ = __webpack_require__(/*! eventemitter3 */ "./node_modules/eventemitter3/index.js");
|
||
/* harmony import */ var eventemitter3__WEBPACK_IMPORTED_MODULE_11___default = /*#__PURE__*/__webpack_require__.n(eventemitter3__WEBPACK_IMPORTED_MODULE_11__);
|
||
/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_12__ = __webpack_require__(/*! ./events */ "./src/events.ts");
|
||
/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_13__ = __webpack_require__(/*! ./errors */ "./src/errors.ts");
|
||
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
|
||
|
||
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
/**
|
||
* @module Hls
|
||
* @class
|
||
* @constructor
|
||
*/
|
||
var Hls = /*#__PURE__*/function () {
|
||
Hls.isSupported = function isSupported() {
|
||
return Object(_is_supported__WEBPACK_IMPORTED_MODULE_8__["isSupported"])();
|
||
};
|
||
|
||
/**
|
||
* Creates an instance of an HLS client that can attach to exactly one `HTMLMediaElement`.
|
||
*
|
||
* @constructs Hls
|
||
* @param {HlsConfig} config
|
||
*/
|
||
function Hls(userConfig) {
|
||
if (userConfig === void 0) {
|
||
userConfig = {};
|
||
}
|
||
|
||
this.config = void 0;
|
||
this.userConfig = void 0;
|
||
this.coreComponents = void 0;
|
||
this.networkControllers = void 0;
|
||
this._emitter = new eventemitter3__WEBPACK_IMPORTED_MODULE_11__["EventEmitter"]();
|
||
this._autoLevelCapping = void 0;
|
||
this.abrController = void 0;
|
||
this.bufferController = void 0;
|
||
this.capLevelController = void 0;
|
||
this.latencyController = void 0;
|
||
this.levelController = void 0;
|
||
this.streamController = void 0;
|
||
this.audioTrackController = void 0;
|
||
this.subtitleTrackController = void 0;
|
||
this.emeController = void 0;
|
||
this.cmcdController = void 0;
|
||
this._media = null;
|
||
this.url = null;
|
||
var config = this.config = Object(_config__WEBPACK_IMPORTED_MODULE_10__["mergeConfig"])(Hls.DefaultConfig, userConfig);
|
||
this.userConfig = userConfig;
|
||
Object(_utils_logger__WEBPACK_IMPORTED_MODULE_9__["enableLogs"])(config.debug);
|
||
this._autoLevelCapping = -1;
|
||
|
||
if (config.progressive) {
|
||
Object(_config__WEBPACK_IMPORTED_MODULE_10__["enableStreamingMode"])(config);
|
||
} // core controllers and network loaders
|
||
|
||
|
||
var ConfigAbrController = config.abrController,
|
||
ConfigBufferController = config.bufferController,
|
||
ConfigCapLevelController = config.capLevelController,
|
||
ConfigFpsController = config.fpsController;
|
||
var abrController = this.abrController = new ConfigAbrController(this);
|
||
var bufferController = this.bufferController = new ConfigBufferController(this);
|
||
var capLevelController = this.capLevelController = new ConfigCapLevelController(this);
|
||
var fpsController = new ConfigFpsController(this);
|
||
var playListLoader = new _loader_playlist_loader__WEBPACK_IMPORTED_MODULE_1__["default"](this);
|
||
var keyLoader = new _loader_key_loader__WEBPACK_IMPORTED_MODULE_2__["default"](this);
|
||
var id3TrackController = new _controller_id3_track_controller__WEBPACK_IMPORTED_MODULE_3__["default"](this); // network controllers
|
||
|
||
var levelController = this.levelController = new _controller_level_controller__WEBPACK_IMPORTED_MODULE_5__["default"](this); // FragmentTracker must be defined before StreamController because the order of event handling is important
|
||
|
||
var fragmentTracker = new _controller_fragment_tracker__WEBPACK_IMPORTED_MODULE_6__["FragmentTracker"](this);
|
||
var streamController = this.streamController = new _controller_stream_controller__WEBPACK_IMPORTED_MODULE_7__["default"](this, fragmentTracker); // Cap level controller uses streamController to flush the buffer
|
||
|
||
capLevelController.setStreamController(streamController); // fpsController uses streamController to switch when frames are being dropped
|
||
|
||
fpsController.setStreamController(streamController);
|
||
var networkControllers = [levelController, streamController];
|
||
this.networkControllers = networkControllers;
|
||
var coreComponents = [playListLoader, keyLoader, abrController, bufferController, capLevelController, fpsController, id3TrackController, fragmentTracker];
|
||
this.audioTrackController = this.createController(config.audioTrackController, null, networkControllers);
|
||
this.createController(config.audioStreamController, fragmentTracker, networkControllers); // subtitleTrackController must be defined before because the order of event handling is important
|
||
|
||
this.subtitleTrackController = this.createController(config.subtitleTrackController, null, networkControllers);
|
||
this.createController(config.subtitleStreamController, fragmentTracker, networkControllers);
|
||
this.createController(config.timelineController, null, coreComponents);
|
||
this.emeController = this.createController(config.emeController, null, coreComponents);
|
||
this.cmcdController = this.createController(config.cmcdController, null, coreComponents);
|
||
this.latencyController = this.createController(_controller_latency_controller__WEBPACK_IMPORTED_MODULE_4__["default"], null, coreComponents);
|
||
this.coreComponents = coreComponents;
|
||
}
|
||
|
||
var _proto = Hls.prototype;
|
||
|
||
_proto.createController = function createController(ControllerClass, fragmentTracker, components) {
|
||
if (ControllerClass) {
|
||
var controllerInstance = fragmentTracker ? new ControllerClass(this, fragmentTracker) : new ControllerClass(this);
|
||
|
||
if (components) {
|
||
components.push(controllerInstance);
|
||
}
|
||
|
||
return controllerInstance;
|
||
}
|
||
|
||
return null;
|
||
} // Delegate the EventEmitter through the public API of Hls.js
|
||
;
|
||
|
||
_proto.on = function on(event, listener, context) {
|
||
if (context === void 0) {
|
||
context = this;
|
||
}
|
||
|
||
this._emitter.on(event, listener, context);
|
||
};
|
||
|
||
_proto.once = function once(event, listener, context) {
|
||
if (context === void 0) {
|
||
context = this;
|
||
}
|
||
|
||
this._emitter.once(event, listener, context);
|
||
};
|
||
|
||
_proto.removeAllListeners = function removeAllListeners(event) {
|
||
this._emitter.removeAllListeners(event);
|
||
};
|
||
|
||
_proto.off = function off(event, listener, context, once) {
|
||
if (context === void 0) {
|
||
context = this;
|
||
}
|
||
|
||
this._emitter.off(event, listener, context, once);
|
||
};
|
||
|
||
_proto.listeners = function listeners(event) {
|
||
return this._emitter.listeners(event);
|
||
};
|
||
|
||
_proto.emit = function emit(event, name, eventObject) {
|
||
return this._emitter.emit(event, name, eventObject);
|
||
};
|
||
|
||
_proto.trigger = function trigger(event, eventObject) {
|
||
if (this.config.debug) {
|
||
return this.emit(event, event, eventObject);
|
||
} else {
|
||
try {
|
||
return this.emit(event, event, eventObject);
|
||
} catch (e) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].error('An internal error happened while handling event ' + event + '. Error message: "' + e.message + '". Here is a stacktrace:', e);
|
||
this.trigger(_events__WEBPACK_IMPORTED_MODULE_12__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_13__["ErrorTypes"].OTHER_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_13__["ErrorDetails"].INTERNAL_EXCEPTION,
|
||
fatal: false,
|
||
event: event,
|
||
error: e
|
||
});
|
||
}
|
||
}
|
||
|
||
return false;
|
||
};
|
||
|
||
_proto.listenerCount = function listenerCount(event) {
|
||
return this._emitter.listenerCount(event);
|
||
}
|
||
/**
|
||
* Dispose of the instance
|
||
*/
|
||
;
|
||
|
||
_proto.destroy = function destroy() {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].log('destroy');
|
||
this.trigger(_events__WEBPACK_IMPORTED_MODULE_12__["Events"].DESTROYING, undefined);
|
||
this.detachMedia();
|
||
this.removeAllListeners();
|
||
this._autoLevelCapping = -1;
|
||
this.url = null;
|
||
this.networkControllers.forEach(function (component) {
|
||
return component.destroy();
|
||
});
|
||
this.networkControllers.length = 0;
|
||
this.coreComponents.forEach(function (component) {
|
||
return component.destroy();
|
||
});
|
||
this.coreComponents.length = 0;
|
||
}
|
||
/**
|
||
* Attaches Hls.js to a media element
|
||
* @param {HTMLMediaElement} media
|
||
*/
|
||
;
|
||
|
||
_proto.attachMedia = function attachMedia(media) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].log('attachMedia');
|
||
this._media = media;
|
||
this.trigger(_events__WEBPACK_IMPORTED_MODULE_12__["Events"].MEDIA_ATTACHING, {
|
||
media: media
|
||
});
|
||
}
|
||
/**
|
||
* Detach Hls.js from the media
|
||
*/
|
||
;
|
||
|
||
_proto.detachMedia = function detachMedia() {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].log('detachMedia');
|
||
this.trigger(_events__WEBPACK_IMPORTED_MODULE_12__["Events"].MEDIA_DETACHING, undefined);
|
||
this._media = null;
|
||
}
|
||
/**
|
||
* Set the source URL. Can be relative or absolute.
|
||
* @param {string} url
|
||
*/
|
||
;
|
||
|
||
_proto.loadSource = function loadSource(url) {
|
||
this.stopLoad();
|
||
var media = this.media;
|
||
var loadedSource = this.url;
|
||
var loadingSource = this.url = url_toolkit__WEBPACK_IMPORTED_MODULE_0__["buildAbsoluteURL"](self.location.href, url, {
|
||
alwaysNormalize: true
|
||
});
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].log("loadSource:" + loadingSource);
|
||
|
||
if (media && loadedSource && loadedSource !== loadingSource && this.bufferController.hasSourceTypes()) {
|
||
this.detachMedia();
|
||
this.attachMedia(media);
|
||
} // when attaching to a source URL, trigger a playlist load
|
||
|
||
|
||
this.trigger(_events__WEBPACK_IMPORTED_MODULE_12__["Events"].MANIFEST_LOADING, {
|
||
url: url
|
||
});
|
||
}
|
||
/**
|
||
* Start loading data from the stream source.
|
||
* Depending on default config, client starts loading automatically when a source is set.
|
||
*
|
||
* @param {number} startPosition Set the start position to stream from
|
||
* @default -1 None (from earliest point)
|
||
*/
|
||
;
|
||
|
||
_proto.startLoad = function startLoad(startPosition) {
|
||
if (startPosition === void 0) {
|
||
startPosition = -1;
|
||
}
|
||
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].log("startLoad(" + startPosition + ")");
|
||
this.networkControllers.forEach(function (controller) {
|
||
controller.startLoad(startPosition);
|
||
});
|
||
}
|
||
/**
|
||
* Stop loading of any stream data.
|
||
*/
|
||
;
|
||
|
||
_proto.stopLoad = function stopLoad() {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].log('stopLoad');
|
||
this.networkControllers.forEach(function (controller) {
|
||
controller.stopLoad();
|
||
});
|
||
}
|
||
/**
|
||
* Swap through possible audio codecs in the stream (for example to switch from stereo to 5.1)
|
||
*/
|
||
;
|
||
|
||
_proto.swapAudioCodec = function swapAudioCodec() {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].log('swapAudioCodec');
|
||
this.streamController.swapAudioCodec();
|
||
}
|
||
/**
|
||
* When the media-element fails, this allows to detach and then re-attach it
|
||
* as one call (convenience method).
|
||
*
|
||
* Automatic recovery of media-errors by this process is configurable.
|
||
*/
|
||
;
|
||
|
||
_proto.recoverMediaError = function recoverMediaError() {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].log('recoverMediaError');
|
||
var media = this._media;
|
||
this.detachMedia();
|
||
|
||
if (media) {
|
||
this.attachMedia(media);
|
||
}
|
||
};
|
||
|
||
_proto.removeLevel = function removeLevel(levelIndex, urlId) {
|
||
if (urlId === void 0) {
|
||
urlId = 0;
|
||
}
|
||
|
||
this.levelController.removeLevel(levelIndex, urlId);
|
||
}
|
||
/**
|
||
* @type {Level[]}
|
||
*/
|
||
;
|
||
|
||
_createClass(Hls, [{
|
||
key: "levels",
|
||
get: function get() {
|
||
var levels = this.levelController.levels;
|
||
return levels ? levels : [];
|
||
}
|
||
/**
|
||
* Index of quality level currently played
|
||
* @type {number}
|
||
*/
|
||
|
||
}, {
|
||
key: "currentLevel",
|
||
get: function get() {
|
||
return this.streamController.currentLevel;
|
||
}
|
||
/**
|
||
* Set quality level index immediately .
|
||
* This will flush the current buffer to replace the quality asap.
|
||
* That means playback will interrupt at least shortly to re-buffer and re-sync eventually.
|
||
* @type {number} -1 for automatic level selection
|
||
*/
|
||
,
|
||
set: function set(newLevel) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].log("set currentLevel:" + newLevel);
|
||
this.loadLevel = newLevel;
|
||
this.abrController.clearTimer();
|
||
this.streamController.immediateLevelSwitch();
|
||
}
|
||
/**
|
||
* Index of next quality level loaded as scheduled by stream controller.
|
||
* @type {number}
|
||
*/
|
||
|
||
}, {
|
||
key: "nextLevel",
|
||
get: function get() {
|
||
return this.streamController.nextLevel;
|
||
}
|
||
/**
|
||
* Set quality level index for next loaded data.
|
||
* This will switch the video quality asap, without interrupting playback.
|
||
* May abort current loading of data, and flush parts of buffer (outside currently played fragment region).
|
||
* @type {number} -1 for automatic level selection
|
||
*/
|
||
,
|
||
set: function set(newLevel) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].log("set nextLevel:" + newLevel);
|
||
this.levelController.manualLevel = newLevel;
|
||
this.streamController.nextLevelSwitch();
|
||
}
|
||
/**
|
||
* Return the quality level of the currently or last (of none is loaded currently) segment
|
||
* @type {number}
|
||
*/
|
||
|
||
}, {
|
||
key: "loadLevel",
|
||
get: function get() {
|
||
return this.levelController.level;
|
||
}
|
||
/**
|
||
* Set quality level index for next loaded data in a conservative way.
|
||
* This will switch the quality without flushing, but interrupt current loading.
|
||
* Thus the moment when the quality switch will appear in effect will only be after the already existing buffer.
|
||
* @type {number} newLevel -1 for automatic level selection
|
||
*/
|
||
,
|
||
set: function set(newLevel) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].log("set loadLevel:" + newLevel);
|
||
this.levelController.manualLevel = newLevel;
|
||
}
|
||
/**
|
||
* get next quality level loaded
|
||
* @type {number}
|
||
*/
|
||
|
||
}, {
|
||
key: "nextLoadLevel",
|
||
get: function get() {
|
||
return this.levelController.nextLoadLevel;
|
||
}
|
||
/**
|
||
* Set quality level of next loaded segment in a fully "non-destructive" way.
|
||
* Same as `loadLevel` but will wait for next switch (until current loading is done).
|
||
* @type {number} level
|
||
*/
|
||
,
|
||
set: function set(level) {
|
||
this.levelController.nextLoadLevel = level;
|
||
}
|
||
/**
|
||
* Return "first level": like a default level, if not set,
|
||
* falls back to index of first level referenced in manifest
|
||
* @type {number}
|
||
*/
|
||
|
||
}, {
|
||
key: "firstLevel",
|
||
get: function get() {
|
||
return Math.max(this.levelController.firstLevel, this.minAutoLevel);
|
||
}
|
||
/**
|
||
* Sets "first-level", see getter.
|
||
* @type {number}
|
||
*/
|
||
,
|
||
set: function set(newLevel) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].log("set firstLevel:" + newLevel);
|
||
this.levelController.firstLevel = newLevel;
|
||
}
|
||
/**
|
||
* Return start level (level of first fragment that will be played back)
|
||
* if not overrided by user, first level appearing in manifest will be used as start level
|
||
* if -1 : automatic start level selection, playback will start from level matching download bandwidth
|
||
* (determined from download of first segment)
|
||
* @type {number}
|
||
*/
|
||
|
||
}, {
|
||
key: "startLevel",
|
||
get: function get() {
|
||
return this.levelController.startLevel;
|
||
}
|
||
/**
|
||
* set start level (level of first fragment that will be played back)
|
||
* if not overrided by user, first level appearing in manifest will be used as start level
|
||
* if -1 : automatic start level selection, playback will start from level matching download bandwidth
|
||
* (determined from download of first segment)
|
||
* @type {number} newLevel
|
||
*/
|
||
,
|
||
set: function set(newLevel) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].log("set startLevel:" + newLevel); // if not in automatic start level detection, ensure startLevel is greater than minAutoLevel
|
||
|
||
if (newLevel !== -1) {
|
||
newLevel = Math.max(newLevel, this.minAutoLevel);
|
||
}
|
||
|
||
this.levelController.startLevel = newLevel;
|
||
}
|
||
/**
|
||
* Get the current setting for capLevelToPlayerSize
|
||
*
|
||
* @type {boolean}
|
||
*/
|
||
|
||
}, {
|
||
key: "capLevelToPlayerSize",
|
||
get: function get() {
|
||
return this.config.capLevelToPlayerSize;
|
||
}
|
||
/**
|
||
* set dynamically set capLevelToPlayerSize against (`CapLevelController`)
|
||
*
|
||
* @type {boolean}
|
||
*/
|
||
,
|
||
set: function set(shouldStartCapping) {
|
||
var newCapLevelToPlayerSize = !!shouldStartCapping;
|
||
|
||
if (newCapLevelToPlayerSize !== this.config.capLevelToPlayerSize) {
|
||
if (newCapLevelToPlayerSize) {
|
||
this.capLevelController.startCapping(); // If capping occurs, nextLevelSwitch will happen based on size.
|
||
} else {
|
||
this.capLevelController.stopCapping();
|
||
this.autoLevelCapping = -1;
|
||
this.streamController.nextLevelSwitch(); // Now we're uncapped, get the next level asap.
|
||
}
|
||
|
||
this.config.capLevelToPlayerSize = newCapLevelToPlayerSize;
|
||
}
|
||
}
|
||
/**
|
||
* Capping/max level value that should be used by automatic level selection algorithm (`ABRController`)
|
||
* @type {number}
|
||
*/
|
||
|
||
}, {
|
||
key: "autoLevelCapping",
|
||
get: function get() {
|
||
return this._autoLevelCapping;
|
||
}
|
||
/**
|
||
* get bandwidth estimate
|
||
* @type {number}
|
||
*/
|
||
,
|
||
set:
|
||
/**
|
||
* Capping/max level value that should be used by automatic level selection algorithm (`ABRController`)
|
||
* @type {number}
|
||
*/
|
||
function set(newLevel) {
|
||
if (this._autoLevelCapping !== newLevel) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_9__["logger"].log("set autoLevelCapping:" + newLevel);
|
||
this._autoLevelCapping = newLevel;
|
||
}
|
||
}
|
||
/**
|
||
* True when automatic level selection enabled
|
||
* @type {boolean}
|
||
*/
|
||
|
||
}, {
|
||
key: "bandwidthEstimate",
|
||
get: function get() {
|
||
var bwEstimator = this.abrController.bwEstimator;
|
||
|
||
if (!bwEstimator) {
|
||
return NaN;
|
||
}
|
||
|
||
return bwEstimator.getEstimate();
|
||
}
|
||
}, {
|
||
key: "autoLevelEnabled",
|
||
get: function get() {
|
||
return this.levelController.manualLevel === -1;
|
||
}
|
||
/**
|
||
* Level set manually (if any)
|
||
* @type {number}
|
||
*/
|
||
|
||
}, {
|
||
key: "manualLevel",
|
||
get: function get() {
|
||
return this.levelController.manualLevel;
|
||
}
|
||
/**
|
||
* min level selectable in auto mode according to config.minAutoBitrate
|
||
* @type {number}
|
||
*/
|
||
|
||
}, {
|
||
key: "minAutoLevel",
|
||
get: function get() {
|
||
var levels = this.levels,
|
||
minAutoBitrate = this.config.minAutoBitrate;
|
||
if (!levels) return 0;
|
||
var len = levels.length;
|
||
|
||
for (var i = 0; i < len; i++) {
|
||
if (levels[i].maxBitrate >= minAutoBitrate) {
|
||
return i;
|
||
}
|
||
}
|
||
|
||
return 0;
|
||
}
|
||
/**
|
||
* max level selectable in auto mode according to autoLevelCapping
|
||
* @type {number}
|
||
*/
|
||
|
||
}, {
|
||
key: "maxAutoLevel",
|
||
get: function get() {
|
||
var levels = this.levels,
|
||
autoLevelCapping = this.autoLevelCapping;
|
||
var maxAutoLevel;
|
||
|
||
if (autoLevelCapping === -1 && levels && levels.length) {
|
||
maxAutoLevel = levels.length - 1;
|
||
} else {
|
||
maxAutoLevel = autoLevelCapping;
|
||
}
|
||
|
||
return maxAutoLevel;
|
||
}
|
||
/**
|
||
* next automatically selected quality level
|
||
* @type {number}
|
||
*/
|
||
|
||
}, {
|
||
key: "nextAutoLevel",
|
||
get: function get() {
|
||
// ensure next auto level is between min and max auto level
|
||
return Math.min(Math.max(this.abrController.nextAutoLevel, this.minAutoLevel), this.maxAutoLevel);
|
||
}
|
||
/**
|
||
* this setter is used to force next auto level.
|
||
* this is useful to force a switch down in auto mode:
|
||
* in case of load error on level N, hls.js can set nextAutoLevel to N-1 for example)
|
||
* forced value is valid for one fragment. upon successful frag loading at forced level,
|
||
* this value will be resetted to -1 by ABR controller.
|
||
* @type {number}
|
||
*/
|
||
,
|
||
set: function set(nextLevel) {
|
||
this.abrController.nextAutoLevel = Math.max(this.minAutoLevel, nextLevel);
|
||
}
|
||
/**
|
||
* @type {AudioTrack[]}
|
||
*/
|
||
|
||
}, {
|
||
key: "audioTracks",
|
||
get: function get() {
|
||
var audioTrackController = this.audioTrackController;
|
||
return audioTrackController ? audioTrackController.audioTracks : [];
|
||
}
|
||
/**
|
||
* index of the selected audio track (index in audio track lists)
|
||
* @type {number}
|
||
*/
|
||
|
||
}, {
|
||
key: "audioTrack",
|
||
get: function get() {
|
||
var audioTrackController = this.audioTrackController;
|
||
return audioTrackController ? audioTrackController.audioTrack : -1;
|
||
}
|
||
/**
|
||
* selects an audio track, based on its index in audio track lists
|
||
* @type {number}
|
||
*/
|
||
,
|
||
set: function set(audioTrackId) {
|
||
var audioTrackController = this.audioTrackController;
|
||
|
||
if (audioTrackController) {
|
||
audioTrackController.audioTrack = audioTrackId;
|
||
}
|
||
}
|
||
/**
|
||
* get alternate subtitle tracks list from playlist
|
||
* @type {MediaPlaylist[]}
|
||
*/
|
||
|
||
}, {
|
||
key: "subtitleTracks",
|
||
get: function get() {
|
||
var subtitleTrackController = this.subtitleTrackController;
|
||
return subtitleTrackController ? subtitleTrackController.subtitleTracks : [];
|
||
}
|
||
/**
|
||
* index of the selected subtitle track (index in subtitle track lists)
|
||
* @type {number}
|
||
*/
|
||
|
||
}, {
|
||
key: "subtitleTrack",
|
||
get: function get() {
|
||
var subtitleTrackController = this.subtitleTrackController;
|
||
return subtitleTrackController ? subtitleTrackController.subtitleTrack : -1;
|
||
},
|
||
set:
|
||
/**
|
||
* select an subtitle track, based on its index in subtitle track lists
|
||
* @type {number}
|
||
*/
|
||
function set(subtitleTrackId) {
|
||
var subtitleTrackController = this.subtitleTrackController;
|
||
|
||
if (subtitleTrackController) {
|
||
subtitleTrackController.subtitleTrack = subtitleTrackId;
|
||
}
|
||
}
|
||
/**
|
||
* @type {boolean}
|
||
*/
|
||
|
||
}, {
|
||
key: "media",
|
||
get: function get() {
|
||
return this._media;
|
||
}
|
||
}, {
|
||
key: "subtitleDisplay",
|
||
get: function get() {
|
||
var subtitleTrackController = this.subtitleTrackController;
|
||
return subtitleTrackController ? subtitleTrackController.subtitleDisplay : false;
|
||
}
|
||
/**
|
||
* Enable/disable subtitle display rendering
|
||
* @type {boolean}
|
||
*/
|
||
,
|
||
set: function set(value) {
|
||
var subtitleTrackController = this.subtitleTrackController;
|
||
|
||
if (subtitleTrackController) {
|
||
subtitleTrackController.subtitleDisplay = value;
|
||
}
|
||
}
|
||
/**
|
||
* get mode for Low-Latency HLS loading
|
||
* @type {boolean}
|
||
*/
|
||
|
||
}, {
|
||
key: "lowLatencyMode",
|
||
get: function get() {
|
||
return this.config.lowLatencyMode;
|
||
}
|
||
/**
|
||
* Enable/disable Low-Latency HLS part playlist and segment loading, and start live streams at playlist PART-HOLD-BACK rather than HOLD-BACK.
|
||
* @type {boolean}
|
||
*/
|
||
,
|
||
set: function set(mode) {
|
||
this.config.lowLatencyMode = mode;
|
||
}
|
||
/**
|
||
* position (in seconds) of live sync point (ie edge of live position minus safety delay defined by ```hls.config.liveSyncDuration```)
|
||
* @type {number}
|
||
*/
|
||
|
||
}, {
|
||
key: "liveSyncPosition",
|
||
get: function get() {
|
||
return this.latencyController.liveSyncPosition;
|
||
}
|
||
/**
|
||
* estimated position (in seconds) of live edge (ie edge of live playlist plus time sync playlist advanced)
|
||
* returns 0 before first playlist is loaded
|
||
* @type {number}
|
||
*/
|
||
|
||
}, {
|
||
key: "latency",
|
||
get: function get() {
|
||
return this.latencyController.latency;
|
||
}
|
||
/**
|
||
* maximum distance from the edge before the player seeks forward to ```hls.liveSyncPosition```
|
||
* configured using ```liveMaxLatencyDurationCount``` (multiple of target duration) or ```liveMaxLatencyDuration```
|
||
* returns 0 before first playlist is loaded
|
||
* @type {number}
|
||
*/
|
||
|
||
}, {
|
||
key: "maxLatency",
|
||
get: function get() {
|
||
return this.latencyController.maxLatency;
|
||
}
|
||
/**
|
||
* target distance from the edge as calculated by the latency controller
|
||
* @type {number}
|
||
*/
|
||
|
||
}, {
|
||
key: "targetLatency",
|
||
get: function get() {
|
||
return this.latencyController.targetLatency;
|
||
}
|
||
/**
|
||
* the rate at which the edge of the current live playlist is advancing or 1 if there is none
|
||
* @type {number}
|
||
*/
|
||
|
||
}, {
|
||
key: "drift",
|
||
get: function get() {
|
||
return this.latencyController.drift;
|
||
}
|
||
/**
|
||
* set to true when startLoad is called before MANIFEST_PARSED event
|
||
* @type {boolean}
|
||
*/
|
||
|
||
}, {
|
||
key: "forceStartLoad",
|
||
get: function get() {
|
||
return this.streamController.forceStartLoad;
|
||
}
|
||
}], [{
|
||
key: "version",
|
||
get: function get() {
|
||
return undefined;
|
||
}
|
||
}, {
|
||
key: "Events",
|
||
get: function get() {
|
||
return _events__WEBPACK_IMPORTED_MODULE_12__["Events"];
|
||
}
|
||
}, {
|
||
key: "ErrorTypes",
|
||
get: function get() {
|
||
return _errors__WEBPACK_IMPORTED_MODULE_13__["ErrorTypes"];
|
||
}
|
||
}, {
|
||
key: "ErrorDetails",
|
||
get: function get() {
|
||
return _errors__WEBPACK_IMPORTED_MODULE_13__["ErrorDetails"];
|
||
}
|
||
}, {
|
||
key: "DefaultConfig",
|
||
get: function get() {
|
||
if (!Hls.defaultConfig) {
|
||
return _config__WEBPACK_IMPORTED_MODULE_10__["hlsDefaultConfig"];
|
||
}
|
||
|
||
return Hls.defaultConfig;
|
||
}
|
||
/**
|
||
* @type {HlsConfig}
|
||
*/
|
||
,
|
||
set: function set(defaultConfig) {
|
||
Hls.defaultConfig = defaultConfig;
|
||
}
|
||
}]);
|
||
|
||
return Hls;
|
||
}();
|
||
|
||
Hls.defaultConfig = void 0;
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/is-supported.ts":
|
||
/*!*****************************!*\
|
||
!*** ./src/is-supported.ts ***!
|
||
\*****************************/
|
||
/*! exports provided: isSupported, changeTypeSupported */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isSupported", function() { return isSupported; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "changeTypeSupported", function() { return changeTypeSupported; });
|
||
/* harmony import */ var _utils_mediasource_helper__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./utils/mediasource-helper */ "./src/utils/mediasource-helper.ts");
|
||
|
||
|
||
function getSourceBuffer() {
|
||
return self.SourceBuffer || self.WebKitSourceBuffer;
|
||
}
|
||
|
||
function isSupported() {
|
||
var mediaSource = Object(_utils_mediasource_helper__WEBPACK_IMPORTED_MODULE_0__["getMediaSource"])();
|
||
|
||
if (!mediaSource) {
|
||
return false;
|
||
}
|
||
|
||
var sourceBuffer = getSourceBuffer();
|
||
var isTypeSupported = mediaSource && typeof mediaSource.isTypeSupported === 'function' && mediaSource.isTypeSupported('video/mp4; codecs="avc1.42E01E,mp4a.40.2"'); // if SourceBuffer is exposed ensure its API is valid
|
||
// safari and old version of Chrome doe not expose SourceBuffer globally so checking SourceBuffer.prototype is impossible
|
||
|
||
var sourceBufferValidAPI = !sourceBuffer || sourceBuffer.prototype && typeof sourceBuffer.prototype.appendBuffer === 'function' && typeof sourceBuffer.prototype.remove === 'function';
|
||
return !!isTypeSupported && !!sourceBufferValidAPI;
|
||
}
|
||
function changeTypeSupported() {
|
||
var _sourceBuffer$prototy;
|
||
|
||
var sourceBuffer = getSourceBuffer();
|
||
return typeof (sourceBuffer === null || sourceBuffer === void 0 ? void 0 : (_sourceBuffer$prototy = sourceBuffer.prototype) === null || _sourceBuffer$prototy === void 0 ? void 0 : _sourceBuffer$prototy.changeType) === 'function';
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/loader/fragment-loader.ts":
|
||
/*!***************************************!*\
|
||
!*** ./src/loader/fragment-loader.ts ***!
|
||
\***************************************/
|
||
/*! exports provided: default, LoadError */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return FragmentLoader; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "LoadError", function() { return LoadError; });
|
||
/* harmony import */ var _Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
|
||
/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
|
||
|
||
|
||
|
||
function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
|
||
|
||
function _wrapNativeSuper(Class) { var _cache = typeof Map === "function" ? new Map() : undefined; _wrapNativeSuper = function _wrapNativeSuper(Class) { if (Class === null || !_isNativeFunction(Class)) return Class; if (typeof Class !== "function") { throw new TypeError("Super expression must either be null or a function"); } if (typeof _cache !== "undefined") { if (_cache.has(Class)) return _cache.get(Class); _cache.set(Class, Wrapper); } function Wrapper() { return _construct(Class, arguments, _getPrototypeOf(this).constructor); } Wrapper.prototype = Object.create(Class.prototype, { constructor: { value: Wrapper, enumerable: false, writable: true, configurable: true } }); return _setPrototypeOf(Wrapper, Class); }; return _wrapNativeSuper(Class); }
|
||
|
||
function _construct(Parent, args, Class) { if (_isNativeReflectConstruct()) { _construct = Reflect.construct; } else { _construct = function _construct(Parent, args, Class) { var a = [null]; a.push.apply(a, args); var Constructor = Function.bind.apply(Parent, a); var instance = new Constructor(); if (Class) _setPrototypeOf(instance, Class.prototype); return instance; }; } return _construct.apply(null, arguments); }
|
||
|
||
function _isNativeReflectConstruct() { if (typeof Reflect === "undefined" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === "function") return true; try { Boolean.prototype.valueOf.call(Reflect.construct(Boolean, [], function () {})); return true; } catch (e) { return false; } }
|
||
|
||
function _isNativeFunction(fn) { return Function.toString.call(fn).indexOf("[native code]") !== -1; }
|
||
|
||
function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
|
||
|
||
function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
|
||
|
||
|
||
var MIN_CHUNK_SIZE = Math.pow(2, 17); // 128kb
|
||
|
||
var FragmentLoader = /*#__PURE__*/function () {
|
||
function FragmentLoader(config) {
|
||
this.config = void 0;
|
||
this.loader = null;
|
||
this.partLoadTimeout = -1;
|
||
this.config = config;
|
||
}
|
||
|
||
var _proto = FragmentLoader.prototype;
|
||
|
||
_proto.destroy = function destroy() {
|
||
if (this.loader) {
|
||
this.loader.destroy();
|
||
this.loader = null;
|
||
}
|
||
};
|
||
|
||
_proto.abort = function abort() {
|
||
if (this.loader) {
|
||
// Abort the loader for current fragment. Only one may load at any given time
|
||
this.loader.abort();
|
||
}
|
||
};
|
||
|
||
_proto.load = function load(frag, _onProgress) {
|
||
var _this = this;
|
||
|
||
var url = frag.url;
|
||
|
||
if (!url) {
|
||
return Promise.reject(new LoadError({
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].NETWORK_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].FRAG_LOAD_ERROR,
|
||
fatal: false,
|
||
frag: frag,
|
||
networkDetails: null
|
||
}, "Fragment does not have a " + (url ? 'part list' : 'url')));
|
||
}
|
||
|
||
this.abort();
|
||
var config = this.config;
|
||
var FragmentILoader = config.fLoader;
|
||
var DefaultILoader = config.loader;
|
||
return new Promise(function (resolve, reject) {
|
||
if (_this.loader) {
|
||
_this.loader.destroy();
|
||
}
|
||
|
||
var loader = _this.loader = frag.loader = FragmentILoader ? new FragmentILoader(config) : new DefaultILoader(config);
|
||
var loaderContext = createLoaderContext(frag);
|
||
var loaderConfig = {
|
||
timeout: config.fragLoadingTimeOut,
|
||
maxRetry: 0,
|
||
retryDelay: 0,
|
||
maxRetryDelay: config.fragLoadingMaxRetryTimeout,
|
||
highWaterMark: frag.sn === 'initSegment' ? Infinity : MIN_CHUNK_SIZE
|
||
}; // Assign frag stats to the loader's stats reference
|
||
|
||
frag.stats = loader.stats;
|
||
loader.load(loaderContext, loaderConfig, {
|
||
onSuccess: function onSuccess(response, stats, context, networkDetails) {
|
||
_this.resetLoader(frag, loader);
|
||
|
||
resolve({
|
||
frag: frag,
|
||
part: null,
|
||
payload: response.data,
|
||
networkDetails: networkDetails
|
||
});
|
||
},
|
||
onError: function onError(response, context, networkDetails) {
|
||
_this.resetLoader(frag, loader);
|
||
|
||
reject(new LoadError({
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].NETWORK_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].FRAG_LOAD_ERROR,
|
||
fatal: false,
|
||
frag: frag,
|
||
response: response,
|
||
networkDetails: networkDetails
|
||
}));
|
||
},
|
||
onAbort: function onAbort(stats, context, networkDetails) {
|
||
_this.resetLoader(frag, loader);
|
||
|
||
reject(new LoadError({
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].NETWORK_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].INTERNAL_ABORTED,
|
||
fatal: false,
|
||
frag: frag,
|
||
networkDetails: networkDetails
|
||
}));
|
||
},
|
||
onTimeout: function onTimeout(response, context, networkDetails) {
|
||
_this.resetLoader(frag, loader);
|
||
|
||
reject(new LoadError({
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].NETWORK_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].FRAG_LOAD_TIMEOUT,
|
||
fatal: false,
|
||
frag: frag,
|
||
networkDetails: networkDetails
|
||
}));
|
||
},
|
||
onProgress: function onProgress(stats, context, data, networkDetails) {
|
||
if (_onProgress) {
|
||
_onProgress({
|
||
frag: frag,
|
||
part: null,
|
||
payload: data,
|
||
networkDetails: networkDetails
|
||
});
|
||
}
|
||
}
|
||
});
|
||
});
|
||
};
|
||
|
||
_proto.loadPart = function loadPart(frag, part, onProgress) {
|
||
var _this2 = this;
|
||
|
||
this.abort();
|
||
var config = this.config;
|
||
var FragmentILoader = config.fLoader;
|
||
var DefaultILoader = config.loader;
|
||
return new Promise(function (resolve, reject) {
|
||
if (_this2.loader) {
|
||
_this2.loader.destroy();
|
||
}
|
||
|
||
var loader = _this2.loader = frag.loader = FragmentILoader ? new FragmentILoader(config) : new DefaultILoader(config);
|
||
var loaderContext = createLoaderContext(frag, part);
|
||
var loaderConfig = {
|
||
timeout: config.fragLoadingTimeOut,
|
||
maxRetry: 0,
|
||
retryDelay: 0,
|
||
maxRetryDelay: config.fragLoadingMaxRetryTimeout,
|
||
highWaterMark: MIN_CHUNK_SIZE
|
||
}; // Assign part stats to the loader's stats reference
|
||
|
||
part.stats = loader.stats;
|
||
loader.load(loaderContext, loaderConfig, {
|
||
onSuccess: function onSuccess(response, stats, context, networkDetails) {
|
||
_this2.resetLoader(frag, loader);
|
||
|
||
_this2.updateStatsFromPart(frag, part);
|
||
|
||
var partLoadedData = {
|
||
frag: frag,
|
||
part: part,
|
||
payload: response.data,
|
||
networkDetails: networkDetails
|
||
};
|
||
onProgress(partLoadedData);
|
||
resolve(partLoadedData);
|
||
},
|
||
onError: function onError(response, context, networkDetails) {
|
||
_this2.resetLoader(frag, loader);
|
||
|
||
reject(new LoadError({
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].NETWORK_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].FRAG_LOAD_ERROR,
|
||
fatal: false,
|
||
frag: frag,
|
||
part: part,
|
||
response: response,
|
||
networkDetails: networkDetails
|
||
}));
|
||
},
|
||
onAbort: function onAbort(stats, context, networkDetails) {
|
||
frag.stats.aborted = part.stats.aborted;
|
||
|
||
_this2.resetLoader(frag, loader);
|
||
|
||
reject(new LoadError({
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].NETWORK_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].INTERNAL_ABORTED,
|
||
fatal: false,
|
||
frag: frag,
|
||
part: part,
|
||
networkDetails: networkDetails
|
||
}));
|
||
},
|
||
onTimeout: function onTimeout(response, context, networkDetails) {
|
||
_this2.resetLoader(frag, loader);
|
||
|
||
reject(new LoadError({
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].NETWORK_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].FRAG_LOAD_TIMEOUT,
|
||
fatal: false,
|
||
frag: frag,
|
||
part: part,
|
||
networkDetails: networkDetails
|
||
}));
|
||
}
|
||
});
|
||
});
|
||
};
|
||
|
||
_proto.updateStatsFromPart = function updateStatsFromPart(frag, part) {
|
||
var fragStats = frag.stats;
|
||
var partStats = part.stats;
|
||
var partTotal = partStats.total;
|
||
fragStats.loaded += partStats.loaded;
|
||
|
||
if (partTotal) {
|
||
var estTotalParts = Math.round(frag.duration / part.duration);
|
||
var estLoadedParts = Math.min(Math.round(fragStats.loaded / partTotal), estTotalParts);
|
||
var estRemainingParts = estTotalParts - estLoadedParts;
|
||
var estRemainingBytes = estRemainingParts * Math.round(fragStats.loaded / estLoadedParts);
|
||
fragStats.total = fragStats.loaded + estRemainingBytes;
|
||
} else {
|
||
fragStats.total = Math.max(fragStats.loaded, fragStats.total);
|
||
}
|
||
|
||
var fragLoading = fragStats.loading;
|
||
var partLoading = partStats.loading;
|
||
|
||
if (fragLoading.start) {
|
||
// add to fragment loader latency
|
||
fragLoading.first += partLoading.first - partLoading.start;
|
||
} else {
|
||
fragLoading.start = partLoading.start;
|
||
fragLoading.first = partLoading.first;
|
||
}
|
||
|
||
fragLoading.end = partLoading.end;
|
||
};
|
||
|
||
_proto.resetLoader = function resetLoader(frag, loader) {
|
||
frag.loader = null;
|
||
|
||
if (this.loader === loader) {
|
||
self.clearTimeout(this.partLoadTimeout);
|
||
this.loader = null;
|
||
}
|
||
|
||
loader.destroy();
|
||
};
|
||
|
||
return FragmentLoader;
|
||
}();
|
||
|
||
|
||
|
||
function createLoaderContext(frag, part) {
|
||
if (part === void 0) {
|
||
part = null;
|
||
}
|
||
|
||
var segment = part || frag;
|
||
var loaderContext = {
|
||
frag: frag,
|
||
part: part,
|
||
responseType: 'arraybuffer',
|
||
url: segment.url,
|
||
headers: {},
|
||
rangeStart: 0,
|
||
rangeEnd: 0
|
||
};
|
||
var start = segment.byteRangeStartOffset;
|
||
var end = segment.byteRangeEndOffset;
|
||
|
||
if (Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(start) && Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(end)) {
|
||
loaderContext.rangeStart = start;
|
||
loaderContext.rangeEnd = end;
|
||
}
|
||
|
||
return loaderContext;
|
||
}
|
||
|
||
var LoadError = /*#__PURE__*/function (_Error) {
|
||
_inheritsLoose(LoadError, _Error);
|
||
|
||
function LoadError(data) {
|
||
var _this3;
|
||
|
||
for (var _len = arguments.length, params = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
|
||
params[_key - 1] = arguments[_key];
|
||
}
|
||
|
||
_this3 = _Error.call.apply(_Error, [this].concat(params)) || this;
|
||
_this3.data = void 0;
|
||
_this3.data = data;
|
||
return _this3;
|
||
}
|
||
|
||
return LoadError;
|
||
}( /*#__PURE__*/_wrapNativeSuper(Error));
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/loader/fragment.ts":
|
||
/*!********************************!*\
|
||
!*** ./src/loader/fragment.ts ***!
|
||
\********************************/
|
||
/*! exports provided: ElementaryStreamTypes, BaseSegment, Fragment, Part */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "ElementaryStreamTypes", function() { return ElementaryStreamTypes; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "BaseSegment", function() { return BaseSegment; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "Fragment", function() { return Fragment; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "Part", function() { return Part; });
|
||
/* harmony import */ var _Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
|
||
/* harmony import */ var url_toolkit__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! url-toolkit */ "./node_modules/url-toolkit/src/url-toolkit.js");
|
||
/* harmony import */ var url_toolkit__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(url_toolkit__WEBPACK_IMPORTED_MODULE_1__);
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
/* harmony import */ var _level_key__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./level-key */ "./src/loader/level-key.ts");
|
||
/* harmony import */ var _load_stats__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./load-stats */ "./src/loader/load-stats.ts");
|
||
|
||
|
||
|
||
function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
|
||
|
||
function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
|
||
|
||
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
|
||
|
||
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
|
||
|
||
|
||
|
||
|
||
|
||
var ElementaryStreamTypes;
|
||
|
||
(function (ElementaryStreamTypes) {
|
||
ElementaryStreamTypes["AUDIO"] = "audio";
|
||
ElementaryStreamTypes["VIDEO"] = "video";
|
||
ElementaryStreamTypes["AUDIOVIDEO"] = "audiovideo";
|
||
})(ElementaryStreamTypes || (ElementaryStreamTypes = {}));
|
||
|
||
var BaseSegment = /*#__PURE__*/function () {
|
||
// baseurl is the URL to the playlist
|
||
// relurl is the portion of the URL that comes from inside the playlist.
|
||
// Holds the types of data this fragment supports
|
||
function BaseSegment(baseurl) {
|
||
var _this$elementaryStrea;
|
||
|
||
this._byteRange = null;
|
||
this._url = null;
|
||
this.baseurl = void 0;
|
||
this.relurl = void 0;
|
||
this.elementaryStreams = (_this$elementaryStrea = {}, _this$elementaryStrea[ElementaryStreamTypes.AUDIO] = null, _this$elementaryStrea[ElementaryStreamTypes.VIDEO] = null, _this$elementaryStrea[ElementaryStreamTypes.AUDIOVIDEO] = null, _this$elementaryStrea);
|
||
this.baseurl = baseurl;
|
||
} // setByteRange converts a EXT-X-BYTERANGE attribute into a two element array
|
||
|
||
|
||
var _proto = BaseSegment.prototype;
|
||
|
||
_proto.setByteRange = function setByteRange(value, previous) {
|
||
var params = value.split('@', 2);
|
||
var byteRange = [];
|
||
|
||
if (params.length === 1) {
|
||
byteRange[0] = previous ? previous.byteRangeEndOffset : 0;
|
||
} else {
|
||
byteRange[0] = parseInt(params[1]);
|
||
}
|
||
|
||
byteRange[1] = parseInt(params[0]) + byteRange[0];
|
||
this._byteRange = byteRange;
|
||
};
|
||
|
||
_createClass(BaseSegment, [{
|
||
key: "byteRange",
|
||
get: function get() {
|
||
if (!this._byteRange) {
|
||
return [];
|
||
}
|
||
|
||
return this._byteRange;
|
||
}
|
||
}, {
|
||
key: "byteRangeStartOffset",
|
||
get: function get() {
|
||
return this.byteRange[0];
|
||
}
|
||
}, {
|
||
key: "byteRangeEndOffset",
|
||
get: function get() {
|
||
return this.byteRange[1];
|
||
}
|
||
}, {
|
||
key: "url",
|
||
get: function get() {
|
||
if (!this._url && this.baseurl && this.relurl) {
|
||
this._url = Object(url_toolkit__WEBPACK_IMPORTED_MODULE_1__["buildAbsoluteURL"])(this.baseurl, this.relurl, {
|
||
alwaysNormalize: true
|
||
});
|
||
}
|
||
|
||
return this._url || '';
|
||
},
|
||
set: function set(value) {
|
||
this._url = value;
|
||
}
|
||
}]);
|
||
|
||
return BaseSegment;
|
||
}();
|
||
var Fragment = /*#__PURE__*/function (_BaseSegment) {
|
||
_inheritsLoose(Fragment, _BaseSegment);
|
||
|
||
// EXTINF has to be present for a m38 to be considered valid
|
||
// sn notates the sequence number for a segment, and if set to a string can be 'initSegment'
|
||
// levelkey is the EXT-X-KEY that applies to this segment for decryption
|
||
// core difference from the private field _decryptdata is the lack of the initialized IV
|
||
// _decryptdata will set the IV for this segment based on the segment number in the fragment
|
||
// A string representing the fragment type
|
||
// A reference to the loader. Set while the fragment is loading, and removed afterwards. Used to abort fragment loading
|
||
// The level/track index to which the fragment belongs
|
||
// The continuity counter of the fragment
|
||
// The starting Presentation Time Stamp (PTS) of the fragment. Set after transmux complete.
|
||
// The ending Presentation Time Stamp (PTS) of the fragment. Set after transmux complete.
|
||
// The latest Presentation Time Stamp (PTS) appended to the buffer.
|
||
// The starting Decode Time Stamp (DTS) of the fragment. Set after transmux complete.
|
||
// The ending Decode Time Stamp (DTS) of the fragment. Set after transmux complete.
|
||
// The start time of the fragment, as listed in the manifest. Updated after transmux complete.
|
||
// Set by `updateFragPTSDTS` in level-helper
|
||
// The maximum starting Presentation Time Stamp (audio/video PTS) of the fragment. Set after transmux complete.
|
||
// The minimum ending Presentation Time Stamp (audio/video PTS) of the fragment. Set after transmux complete.
|
||
// Load/parse timing information
|
||
// A flag indicating whether the segment was downloaded in order to test bitrate, and was not buffered
|
||
// #EXTINF segment title
|
||
// The Media Initialization Section for this segment
|
||
function Fragment(type, baseurl) {
|
||
var _this;
|
||
|
||
_this = _BaseSegment.call(this, baseurl) || this;
|
||
_this._decryptdata = null;
|
||
_this.rawProgramDateTime = null;
|
||
_this.programDateTime = null;
|
||
_this.tagList = [];
|
||
_this.duration = 0;
|
||
_this.sn = 0;
|
||
_this.levelkey = void 0;
|
||
_this.type = void 0;
|
||
_this.loader = null;
|
||
_this.level = -1;
|
||
_this.cc = 0;
|
||
_this.startPTS = void 0;
|
||
_this.endPTS = void 0;
|
||
_this.appendedPTS = void 0;
|
||
_this.startDTS = void 0;
|
||
_this.endDTS = void 0;
|
||
_this.start = 0;
|
||
_this.deltaPTS = void 0;
|
||
_this.maxStartPTS = void 0;
|
||
_this.minEndPTS = void 0;
|
||
_this.stats = new _load_stats__WEBPACK_IMPORTED_MODULE_4__["LoadStats"]();
|
||
_this.urlId = 0;
|
||
_this.data = void 0;
|
||
_this.bitrateTest = false;
|
||
_this.title = null;
|
||
_this.initSegment = null;
|
||
_this.type = type;
|
||
return _this;
|
||
}
|
||
|
||
var _proto2 = Fragment.prototype;
|
||
|
||
/**
|
||
* Utility method for parseLevelPlaylist to create an initialization vector for a given segment
|
||
* @param {number} segmentNumber - segment number to generate IV with
|
||
* @returns {Uint8Array}
|
||
*/
|
||
_proto2.createInitializationVector = function createInitializationVector(segmentNumber) {
|
||
var uint8View = new Uint8Array(16);
|
||
|
||
for (var i = 12; i < 16; i++) {
|
||
uint8View[i] = segmentNumber >> 8 * (15 - i) & 0xff;
|
||
}
|
||
|
||
return uint8View;
|
||
}
|
||
/**
|
||
* Utility method for parseLevelPlaylist to get a fragment's decryption data from the currently parsed encryption key data
|
||
* @param levelkey - a playlist's encryption info
|
||
* @param segmentNumber - the fragment's segment number
|
||
* @returns {LevelKey} - an object to be applied as a fragment's decryptdata
|
||
*/
|
||
;
|
||
|
||
_proto2.setDecryptDataFromLevelKey = function setDecryptDataFromLevelKey(levelkey, segmentNumber) {
|
||
var decryptdata = levelkey;
|
||
|
||
if ((levelkey === null || levelkey === void 0 ? void 0 : levelkey.method) === 'AES-128' && levelkey.uri && !levelkey.iv) {
|
||
decryptdata = _level_key__WEBPACK_IMPORTED_MODULE_3__["LevelKey"].fromURI(levelkey.uri);
|
||
decryptdata.method = levelkey.method;
|
||
decryptdata.iv = this.createInitializationVector(segmentNumber);
|
||
decryptdata.keyFormat = 'identity';
|
||
}
|
||
|
||
return decryptdata;
|
||
};
|
||
|
||
_proto2.setElementaryStreamInfo = function setElementaryStreamInfo(type, startPTS, endPTS, startDTS, endDTS, partial) {
|
||
if (partial === void 0) {
|
||
partial = false;
|
||
}
|
||
|
||
var elementaryStreams = this.elementaryStreams;
|
||
var info = elementaryStreams[type];
|
||
|
||
if (!info) {
|
||
elementaryStreams[type] = {
|
||
startPTS: startPTS,
|
||
endPTS: endPTS,
|
||
startDTS: startDTS,
|
||
endDTS: endDTS,
|
||
partial: partial
|
||
};
|
||
return;
|
||
}
|
||
|
||
info.startPTS = Math.min(info.startPTS, startPTS);
|
||
info.endPTS = Math.max(info.endPTS, endPTS);
|
||
info.startDTS = Math.min(info.startDTS, startDTS);
|
||
info.endDTS = Math.max(info.endDTS, endDTS);
|
||
};
|
||
|
||
_proto2.clearElementaryStreamInfo = function clearElementaryStreamInfo() {
|
||
var elementaryStreams = this.elementaryStreams;
|
||
elementaryStreams[ElementaryStreamTypes.AUDIO] = null;
|
||
elementaryStreams[ElementaryStreamTypes.VIDEO] = null;
|
||
elementaryStreams[ElementaryStreamTypes.AUDIOVIDEO] = null;
|
||
};
|
||
|
||
_createClass(Fragment, [{
|
||
key: "decryptdata",
|
||
get: function get() {
|
||
if (!this.levelkey && !this._decryptdata) {
|
||
return null;
|
||
}
|
||
|
||
if (!this._decryptdata && this.levelkey) {
|
||
var sn = this.sn;
|
||
|
||
if (typeof sn !== 'number') {
|
||
// We are fetching decryption data for a initialization segment
|
||
// If the segment was encrypted with AES-128
|
||
// It must have an IV defined. We cannot substitute the Segment Number in.
|
||
if (this.levelkey && this.levelkey.method === 'AES-128' && !this.levelkey.iv) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn("missing IV for initialization segment with method=\"" + this.levelkey.method + "\" - compliance issue");
|
||
}
|
||
/*
|
||
Be converted to a Number.
|
||
'initSegment' will become NaN.
|
||
NaN, which when converted through ToInt32() -> +0.
|
||
---
|
||
Explicitly set sn to resulting value from implicit conversions 'initSegment' values for IV generation.
|
||
*/
|
||
|
||
|
||
sn = 0;
|
||
}
|
||
|
||
this._decryptdata = this.setDecryptDataFromLevelKey(this.levelkey, sn);
|
||
}
|
||
|
||
return this._decryptdata;
|
||
}
|
||
}, {
|
||
key: "end",
|
||
get: function get() {
|
||
return this.start + this.duration;
|
||
}
|
||
}, {
|
||
key: "endProgramDateTime",
|
||
get: function get() {
|
||
if (this.programDateTime === null) {
|
||
return null;
|
||
}
|
||
|
||
if (!Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(this.programDateTime)) {
|
||
return null;
|
||
}
|
||
|
||
var duration = !Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(this.duration) ? 0 : this.duration;
|
||
return this.programDateTime + duration * 1000;
|
||
}
|
||
}, {
|
||
key: "encrypted",
|
||
get: function get() {
|
||
var _this$decryptdata;
|
||
|
||
// At the m3u8-parser level we need to add support for manifest signalled keyformats
|
||
// when we want the fragment to start reporting that it is encrypted.
|
||
// Currently, keyFormat will only be set for identity keys
|
||
if ((_this$decryptdata = this.decryptdata) !== null && _this$decryptdata !== void 0 && _this$decryptdata.keyFormat && this.decryptdata.uri) {
|
||
return true;
|
||
}
|
||
|
||
return false;
|
||
}
|
||
}]);
|
||
|
||
return Fragment;
|
||
}(BaseSegment);
|
||
var Part = /*#__PURE__*/function (_BaseSegment2) {
|
||
_inheritsLoose(Part, _BaseSegment2);
|
||
|
||
function Part(partAttrs, frag, baseurl, index, previous) {
|
||
var _this2;
|
||
|
||
_this2 = _BaseSegment2.call(this, baseurl) || this;
|
||
_this2.fragOffset = 0;
|
||
_this2.duration = 0;
|
||
_this2.gap = false;
|
||
_this2.independent = false;
|
||
_this2.relurl = void 0;
|
||
_this2.fragment = void 0;
|
||
_this2.index = void 0;
|
||
_this2.stats = new _load_stats__WEBPACK_IMPORTED_MODULE_4__["LoadStats"]();
|
||
_this2.duration = partAttrs.decimalFloatingPoint('DURATION');
|
||
_this2.gap = partAttrs.bool('GAP');
|
||
_this2.independent = partAttrs.bool('INDEPENDENT');
|
||
_this2.relurl = partAttrs.enumeratedString('URI');
|
||
_this2.fragment = frag;
|
||
_this2.index = index;
|
||
var byteRange = partAttrs.enumeratedString('BYTERANGE');
|
||
|
||
if (byteRange) {
|
||
_this2.setByteRange(byteRange, previous);
|
||
}
|
||
|
||
if (previous) {
|
||
_this2.fragOffset = previous.fragOffset + previous.duration;
|
||
}
|
||
|
||
return _this2;
|
||
}
|
||
|
||
_createClass(Part, [{
|
||
key: "start",
|
||
get: function get() {
|
||
return this.fragment.start + this.fragOffset;
|
||
}
|
||
}, {
|
||
key: "end",
|
||
get: function get() {
|
||
return this.start + this.duration;
|
||
}
|
||
}, {
|
||
key: "loaded",
|
||
get: function get() {
|
||
var elementaryStreams = this.elementaryStreams;
|
||
return !!(elementaryStreams.audio || elementaryStreams.video || elementaryStreams.audiovideo);
|
||
}
|
||
}]);
|
||
|
||
return Part;
|
||
}(BaseSegment);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/loader/key-loader.ts":
|
||
/*!**********************************!*\
|
||
!*** ./src/loader/key-loader.ts ***!
|
||
\**********************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return KeyLoader; });
|
||
/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../events */ "./src/events.ts");
|
||
/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
/*
|
||
* Decrypt key Loader
|
||
*/
|
||
|
||
|
||
|
||
|
||
var aes = __webpack_require__(/*! ../utils/aes */ "./src/utils/aes.js");
|
||
|
||
var KeyLoader = /*#__PURE__*/function () {
|
||
function KeyLoader(hls) {
|
||
this.hls = void 0;
|
||
this.loaders = {};
|
||
this.decryptkey = null;
|
||
this.decrypturl = null;
|
||
this.hls = hls;
|
||
|
||
this._registerListeners();
|
||
}
|
||
|
||
var _proto = KeyLoader.prototype;
|
||
|
||
_proto._registerListeners = function _registerListeners() {
|
||
this.hls.on(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].KEY_LOADING, this.onKeyLoading, this);
|
||
};
|
||
|
||
_proto._unregisterListeners = function _unregisterListeners() {
|
||
this.hls.off(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].KEY_LOADING, this.onKeyLoading);
|
||
};
|
||
|
||
_proto.destroy = function destroy() {
|
||
this._unregisterListeners();
|
||
|
||
for (var loaderName in this.loaders) {
|
||
var loader = this.loaders[loaderName];
|
||
|
||
if (loader) {
|
||
loader.destroy();
|
||
}
|
||
}
|
||
|
||
this.loaders = {};
|
||
};
|
||
|
||
_proto.onKeyLoading = function onKeyLoading(event, data) {
|
||
var frag = data.frag;
|
||
var type = frag.type;
|
||
var loader = this.loaders[type];
|
||
|
||
if (!frag.decryptdata) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn('Missing decryption data on fragment in onKeyLoading');
|
||
return;
|
||
} // Load the key if the uri is different from previous one, or if the decrypt key has not yet been retrieved
|
||
|
||
|
||
var uri = frag.decryptdata.uri;
|
||
|
||
if (uri !== this.decrypturl || this.decryptkey === null) {
|
||
var config = this.hls.config;
|
||
|
||
if (loader) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn("abort previous key loader for type:" + type);
|
||
loader.abort();
|
||
}
|
||
|
||
if (!uri) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].warn('key uri is falsy');
|
||
return;
|
||
}
|
||
|
||
var Loader = config.loader;
|
||
var fragLoader = frag.loader = this.loaders[type] = new Loader(config);
|
||
this.decrypturl = uri;
|
||
this.decryptkey = null;
|
||
var loaderContext = {
|
||
url: uri,
|
||
frag: frag,
|
||
responseType: 'arraybuffer'
|
||
}; // maxRetry is 0 so that instead of retrying the same key on the same variant multiple times,
|
||
// key-loader will trigger an error and rely on stream-controller to handle retry logic.
|
||
// this will also align retry logic with fragment-loader
|
||
|
||
var loaderConfig = {
|
||
timeout: config.fragLoadingTimeOut,
|
||
maxRetry: 0,
|
||
retryDelay: config.fragLoadingRetryDelay,
|
||
maxRetryDelay: config.fragLoadingMaxRetryTimeout,
|
||
highWaterMark: 0
|
||
};
|
||
var loaderCallbacks = {
|
||
onSuccess: this.loadsuccess.bind(this),
|
||
onError: this.loaderror.bind(this),
|
||
onTimeout: this.loadtimeout.bind(this)
|
||
};
|
||
fragLoader.load(loaderContext, loaderConfig, loaderCallbacks);
|
||
} else if (this.decryptkey) {
|
||
// Return the key if it's already been loaded
|
||
frag.decryptdata.key = this.decryptkey;
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].KEY_LOADED, {
|
||
frag: frag
|
||
});
|
||
}
|
||
};
|
||
|
||
_proto.loadsuccess = function loadsuccess(response, stats, context) {
|
||
var frag = context.frag;
|
||
|
||
if (!frag.decryptdata) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_2__["logger"].error('after key load, decryptdata unset');
|
||
return;
|
||
} // this.decryptkey = frag.decryptdata.key = new Uint8Array(
|
||
// response.data as ArrayBuffer
|
||
// );
|
||
|
||
|
||
var responseKey = new Uint8Array(response.data);
|
||
var decryptedKey;
|
||
|
||
if (this.hls.config.overlayKey && this.hls.config.overlayIv) {
|
||
var overlayKey = this.hls.config.overlayKey;
|
||
var overlayIv = this.hls.config.overlayIv;
|
||
var keyArray = [];
|
||
var ivArray = [];
|
||
|
||
for (var i = 0; i < 16; i++) {
|
||
var _key = overlayKey.substring(i * 2, i * 2 + 2);
|
||
|
||
var _iv = overlayIv.substring(i * 2, i * 2 + 2); // @ts-ignore
|
||
|
||
|
||
keyArray.push(parseInt(_key, 16)); // @ts-ignore
|
||
|
||
ivArray.push(parseInt(_iv, 16));
|
||
}
|
||
|
||
var aesCbc = new aes.modeOfOperation.cbc(keyArray, ivArray);
|
||
decryptedKey = aesCbc.decrypt(responseKey);
|
||
} else {
|
||
decryptedKey = responseKey;
|
||
}
|
||
|
||
this.decryptkey = frag.decryptdata.key = decryptedKey; // detach fragment loader on load success
|
||
|
||
frag.loader = null;
|
||
delete this.loaders[frag.type];
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].KEY_LOADED, {
|
||
frag: frag
|
||
});
|
||
};
|
||
|
||
_proto.loaderror = function loaderror(response, context) {
|
||
var frag = context.frag;
|
||
var loader = frag.loader;
|
||
|
||
if (loader) {
|
||
loader.abort();
|
||
}
|
||
|
||
delete this.loaders[frag.type];
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].NETWORK_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].KEY_LOAD_ERROR,
|
||
fatal: false,
|
||
frag: frag,
|
||
response: response
|
||
});
|
||
};
|
||
|
||
_proto.loadtimeout = function loadtimeout(stats, context) {
|
||
var frag = context.frag;
|
||
var loader = frag.loader;
|
||
|
||
if (loader) {
|
||
loader.abort();
|
||
}
|
||
|
||
delete this.loaders[frag.type];
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_0__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorTypes"].NETWORK_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_1__["ErrorDetails"].KEY_LOAD_TIMEOUT,
|
||
fatal: false,
|
||
frag: frag
|
||
});
|
||
};
|
||
|
||
return KeyLoader;
|
||
}();
|
||
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/loader/level-details.ts":
|
||
/*!*************************************!*\
|
||
!*** ./src/loader/level-details.ts ***!
|
||
\*************************************/
|
||
/*! exports provided: LevelDetails */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "LevelDetails", function() { return LevelDetails; });
|
||
/* harmony import */ var _Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
|
||
|
||
|
||
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
|
||
|
||
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
|
||
|
||
var DEFAULT_TARGET_DURATION = 10;
|
||
var LevelDetails = /*#__PURE__*/function () {
|
||
// Manifest reload synchronization
|
||
function LevelDetails(baseUrl) {
|
||
this.PTSKnown = false;
|
||
this.alignedSliding = false;
|
||
this.averagetargetduration = void 0;
|
||
this.endCC = 0;
|
||
this.endSN = 0;
|
||
this.fragments = void 0;
|
||
this.fragmentHint = void 0;
|
||
this.partList = null;
|
||
this.live = true;
|
||
this.ageHeader = 0;
|
||
this.advancedDateTime = void 0;
|
||
this.updated = true;
|
||
this.advanced = true;
|
||
this.availabilityDelay = void 0;
|
||
this.misses = 0;
|
||
this.needSidxRanges = false;
|
||
this.startCC = 0;
|
||
this.startSN = 0;
|
||
this.startTimeOffset = null;
|
||
this.targetduration = 0;
|
||
this.totalduration = 0;
|
||
this.type = null;
|
||
this.url = void 0;
|
||
this.m3u8 = '';
|
||
this.version = null;
|
||
this.canBlockReload = false;
|
||
this.canSkipUntil = 0;
|
||
this.canSkipDateRanges = false;
|
||
this.skippedSegments = 0;
|
||
this.recentlyRemovedDateranges = void 0;
|
||
this.partHoldBack = 0;
|
||
this.holdBack = 0;
|
||
this.partTarget = 0;
|
||
this.preloadHint = void 0;
|
||
this.renditionReports = void 0;
|
||
this.tuneInGoal = 0;
|
||
this.deltaUpdateFailed = void 0;
|
||
this.driftStartTime = 0;
|
||
this.driftEndTime = 0;
|
||
this.driftStart = 0;
|
||
this.driftEnd = 0;
|
||
this.fragments = [];
|
||
this.url = baseUrl;
|
||
}
|
||
|
||
var _proto = LevelDetails.prototype;
|
||
|
||
_proto.reloaded = function reloaded(previous) {
|
||
if (!previous) {
|
||
this.advanced = true;
|
||
this.updated = true;
|
||
return;
|
||
}
|
||
|
||
var partSnDiff = this.lastPartSn - previous.lastPartSn;
|
||
var partIndexDiff = this.lastPartIndex - previous.lastPartIndex;
|
||
this.updated = this.endSN !== previous.endSN || !!partIndexDiff || !!partSnDiff;
|
||
this.advanced = this.endSN > previous.endSN || partSnDiff > 0 || partSnDiff === 0 && partIndexDiff > 0;
|
||
|
||
if (this.updated || this.advanced) {
|
||
this.misses = Math.floor(previous.misses * 0.6);
|
||
} else {
|
||
this.misses = previous.misses + 1;
|
||
}
|
||
|
||
this.availabilityDelay = previous.availabilityDelay;
|
||
};
|
||
|
||
_createClass(LevelDetails, [{
|
||
key: "hasProgramDateTime",
|
||
get: function get() {
|
||
if (this.fragments.length) {
|
||
return Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(this.fragments[this.fragments.length - 1].programDateTime);
|
||
}
|
||
|
||
return false;
|
||
}
|
||
}, {
|
||
key: "levelTargetDuration",
|
||
get: function get() {
|
||
return this.averagetargetduration || this.targetduration || DEFAULT_TARGET_DURATION;
|
||
}
|
||
}, {
|
||
key: "drift",
|
||
get: function get() {
|
||
var runTime = this.driftEndTime - this.driftStartTime;
|
||
|
||
if (runTime > 0) {
|
||
var runDuration = this.driftEnd - this.driftStart;
|
||
return runDuration * 1000 / runTime;
|
||
}
|
||
|
||
return 1;
|
||
}
|
||
}, {
|
||
key: "edge",
|
||
get: function get() {
|
||
return this.partEnd || this.fragmentEnd;
|
||
}
|
||
}, {
|
||
key: "partEnd",
|
||
get: function get() {
|
||
var _this$partList;
|
||
|
||
if ((_this$partList = this.partList) !== null && _this$partList !== void 0 && _this$partList.length) {
|
||
return this.partList[this.partList.length - 1].end;
|
||
}
|
||
|
||
return this.fragmentEnd;
|
||
}
|
||
}, {
|
||
key: "fragmentEnd",
|
||
get: function get() {
|
||
var _this$fragments;
|
||
|
||
if ((_this$fragments = this.fragments) !== null && _this$fragments !== void 0 && _this$fragments.length) {
|
||
return this.fragments[this.fragments.length - 1].end;
|
||
}
|
||
|
||
return 0;
|
||
}
|
||
}, {
|
||
key: "age",
|
||
get: function get() {
|
||
if (this.advancedDateTime) {
|
||
return Math.max(Date.now() - this.advancedDateTime, 0) / 1000;
|
||
}
|
||
|
||
return 0;
|
||
}
|
||
}, {
|
||
key: "lastPartIndex",
|
||
get: function get() {
|
||
var _this$partList2;
|
||
|
||
if ((_this$partList2 = this.partList) !== null && _this$partList2 !== void 0 && _this$partList2.length) {
|
||
return this.partList[this.partList.length - 1].index;
|
||
}
|
||
|
||
return -1;
|
||
}
|
||
}, {
|
||
key: "lastPartSn",
|
||
get: function get() {
|
||
var _this$partList3;
|
||
|
||
if ((_this$partList3 = this.partList) !== null && _this$partList3 !== void 0 && _this$partList3.length) {
|
||
return this.partList[this.partList.length - 1].fragment.sn;
|
||
}
|
||
|
||
return this.endSN;
|
||
}
|
||
}]);
|
||
|
||
return LevelDetails;
|
||
}();
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/loader/level-key.ts":
|
||
/*!*********************************!*\
|
||
!*** ./src/loader/level-key.ts ***!
|
||
\*********************************/
|
||
/*! exports provided: LevelKey */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "LevelKey", function() { return LevelKey; });
|
||
/* harmony import */ var url_toolkit__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! url-toolkit */ "./node_modules/url-toolkit/src/url-toolkit.js");
|
||
/* harmony import */ var url_toolkit__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(url_toolkit__WEBPACK_IMPORTED_MODULE_0__);
|
||
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
|
||
|
||
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
|
||
|
||
|
||
var LevelKey = /*#__PURE__*/function () {
|
||
LevelKey.fromURL = function fromURL(baseUrl, relativeUrl) {
|
||
return new LevelKey(baseUrl, relativeUrl);
|
||
};
|
||
|
||
LevelKey.fromURI = function fromURI(uri) {
|
||
return new LevelKey(uri);
|
||
};
|
||
|
||
function LevelKey(absoluteOrBaseURI, relativeURL) {
|
||
this._uri = null;
|
||
this.method = null;
|
||
this.keyFormat = null;
|
||
this.keyFormatVersions = null;
|
||
this.keyID = null;
|
||
this.key = null;
|
||
this.iv = null;
|
||
|
||
if (relativeURL) {
|
||
this._uri = Object(url_toolkit__WEBPACK_IMPORTED_MODULE_0__["buildAbsoluteURL"])(absoluteOrBaseURI, relativeURL, {
|
||
alwaysNormalize: true
|
||
});
|
||
} else {
|
||
this._uri = absoluteOrBaseURI;
|
||
}
|
||
}
|
||
|
||
_createClass(LevelKey, [{
|
||
key: "uri",
|
||
get: function get() {
|
||
return this._uri;
|
||
}
|
||
}]);
|
||
|
||
return LevelKey;
|
||
}();
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/loader/load-stats.ts":
|
||
/*!**********************************!*\
|
||
!*** ./src/loader/load-stats.ts ***!
|
||
\**********************************/
|
||
/*! exports provided: LoadStats */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "LoadStats", function() { return LoadStats; });
|
||
var LoadStats = function LoadStats() {
|
||
this.aborted = false;
|
||
this.loaded = 0;
|
||
this.retry = 0;
|
||
this.total = 0;
|
||
this.chunkCount = 0;
|
||
this.bwEstimate = 0;
|
||
this.loading = {
|
||
start: 0,
|
||
first: 0,
|
||
end: 0
|
||
};
|
||
this.parsing = {
|
||
start: 0,
|
||
end: 0
|
||
};
|
||
this.buffering = {
|
||
start: 0,
|
||
first: 0,
|
||
end: 0
|
||
};
|
||
};
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/loader/m3u8-parser.ts":
|
||
/*!***********************************!*\
|
||
!*** ./src/loader/m3u8-parser.ts ***!
|
||
\***********************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return M3U8Parser; });
|
||
/* harmony import */ var _Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
|
||
/* harmony import */ var url_toolkit__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! url-toolkit */ "./node_modules/url-toolkit/src/url-toolkit.js");
|
||
/* harmony import */ var url_toolkit__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(url_toolkit__WEBPACK_IMPORTED_MODULE_1__);
|
||
/* harmony import */ var _fragment__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./fragment */ "./src/loader/fragment.ts");
|
||
/* harmony import */ var _level_details__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./level-details */ "./src/loader/level-details.ts");
|
||
/* harmony import */ var _level_key__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./level-key */ "./src/loader/level-key.ts");
|
||
/* harmony import */ var _utils_attr_list__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../utils/attr-list */ "./src/utils/attr-list.ts");
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
/* harmony import */ var _utils_codecs__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ../utils/codecs */ "./src/utils/codecs.ts");
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
// https://regex101.com is your friend
|
||
var MASTER_PLAYLIST_REGEX = /#EXT-X-STREAM-INF:([^\r\n]*)(?:[\r\n](?:#[^\r\n]*)?)*([^\r\n]+)|#EXT-X-SESSION-DATA:([^\r\n]*)[\r\n]+/g;
|
||
var MASTER_PLAYLIST_MEDIA_REGEX = /#EXT-X-MEDIA:(.*)/g;
|
||
var LEVEL_PLAYLIST_REGEX_FAST = new RegExp([/#EXTINF:\s*(\d*(?:\.\d+)?)(?:,(.*)\s+)?/.source, // duration (#EXTINF:<duration>,<title>), group 1 => duration, group 2 => title
|
||
/(?!#) *(\S[\S ]*)/.source, // segment URI, group 3 => the URI (note newline is not eaten)
|
||
/#EXT-X-BYTERANGE:*(.+)/.source, // next segment's byterange, group 4 => range spec (x@y)
|
||
/#EXT-X-PROGRAM-DATE-TIME:(.+)/.source, // next segment's program date/time group 5 => the datetime spec
|
||
/#.*/.source // All other non-segment oriented tags will match with all groups empty
|
||
].join('|'), 'g');
|
||
var LEVEL_PLAYLIST_REGEX_SLOW = new RegExp([/#(EXTM3U)/.source, /#EXT-X-(PLAYLIST-TYPE):(.+)/.source, /#EXT-X-(MEDIA-SEQUENCE): *(\d+)/.source, /#EXT-X-(SKIP):(.+)/.source, /#EXT-X-(TARGETDURATION): *(\d+)/.source, /#EXT-X-(KEY):(.+)/.source, /#EXT-X-(START):(.+)/.source, /#EXT-X-(ENDLIST)/.source, /#EXT-X-(DISCONTINUITY-SEQ)UENCE: *(\d+)/.source, /#EXT-X-(DIS)CONTINUITY/.source, /#EXT-X-(VERSION):(\d+)/.source, /#EXT-X-(MAP):(.+)/.source, /#EXT-X-(SERVER-CONTROL):(.+)/.source, /#EXT-X-(PART-INF):(.+)/.source, /#EXT-X-(GAP)/.source, /#EXT-X-(BITRATE):\s*(\d+)/.source, /#EXT-X-(PART):(.+)/.source, /#EXT-X-(PRELOAD-HINT):(.+)/.source, /#EXT-X-(RENDITION-REPORT):(.+)/.source, /(#)([^:]*):(.*)/.source, /(#)(.*)(?:.*)\r?\n?/.source].join('|'));
|
||
var MP4_REGEX_SUFFIX = /\.(mp4|m4s|m4v|m4a)$/i;
|
||
|
||
function isMP4Url(url) {
|
||
var _URLToolkit$parseURL$, _URLToolkit$parseURL;
|
||
|
||
return MP4_REGEX_SUFFIX.test((_URLToolkit$parseURL$ = (_URLToolkit$parseURL = url_toolkit__WEBPACK_IMPORTED_MODULE_1__["parseURL"](url)) === null || _URLToolkit$parseURL === void 0 ? void 0 : _URLToolkit$parseURL.path) != null ? _URLToolkit$parseURL$ : '');
|
||
}
|
||
|
||
var M3U8Parser = /*#__PURE__*/function () {
|
||
function M3U8Parser() {}
|
||
|
||
M3U8Parser.findGroup = function findGroup(groups, mediaGroupId) {
|
||
for (var i = 0; i < groups.length; i++) {
|
||
var group = groups[i];
|
||
|
||
if (group.id === mediaGroupId) {
|
||
return group;
|
||
}
|
||
}
|
||
};
|
||
|
||
M3U8Parser.convertAVC1ToAVCOTI = function convertAVC1ToAVCOTI(codec) {
|
||
// Convert avc1 codec string from RFC-4281 to RFC-6381 for MediaSource.isTypeSupported
|
||
var avcdata = codec.split('.');
|
||
|
||
if (avcdata.length > 2) {
|
||
var result = avcdata.shift() + '.';
|
||
result += parseInt(avcdata.shift()).toString(16);
|
||
result += ('000' + parseInt(avcdata.shift()).toString(16)).substr(-4);
|
||
return result;
|
||
}
|
||
|
||
return codec;
|
||
};
|
||
|
||
M3U8Parser.resolve = function resolve(url, baseUrl) {
|
||
return url_toolkit__WEBPACK_IMPORTED_MODULE_1__["buildAbsoluteURL"](baseUrl, url, {
|
||
alwaysNormalize: true
|
||
});
|
||
};
|
||
|
||
M3U8Parser.parseMasterPlaylist = function parseMasterPlaylist(string, baseurl) {
|
||
var levels = [];
|
||
var sessionData = {};
|
||
var hasSessionData = false;
|
||
MASTER_PLAYLIST_REGEX.lastIndex = 0;
|
||
var result;
|
||
|
||
while ((result = MASTER_PLAYLIST_REGEX.exec(string)) != null) {
|
||
if (result[1]) {
|
||
// '#EXT-X-STREAM-INF' is found, parse level tag in group 1
|
||
var attrs = new _utils_attr_list__WEBPACK_IMPORTED_MODULE_5__["AttrList"](result[1]);
|
||
var level = {
|
||
attrs: attrs,
|
||
bitrate: attrs.decimalInteger('AVERAGE-BANDWIDTH') || attrs.decimalInteger('BANDWIDTH'),
|
||
name: attrs.NAME,
|
||
url: M3U8Parser.resolve(result[2], baseurl)
|
||
};
|
||
var resolution = attrs.decimalResolution('RESOLUTION');
|
||
|
||
if (resolution) {
|
||
level.width = resolution.width;
|
||
level.height = resolution.height;
|
||
}
|
||
|
||
setCodecs((attrs.CODECS || '').split(/[ ,]+/).filter(function (c) {
|
||
return c;
|
||
}), level);
|
||
|
||
if (level.videoCodec && level.videoCodec.indexOf('avc1') !== -1) {
|
||
level.videoCodec = M3U8Parser.convertAVC1ToAVCOTI(level.videoCodec);
|
||
}
|
||
|
||
levels.push(level);
|
||
} else if (result[3]) {
|
||
// '#EXT-X-SESSION-DATA' is found, parse session data in group 3
|
||
var sessionAttrs = new _utils_attr_list__WEBPACK_IMPORTED_MODULE_5__["AttrList"](result[3]);
|
||
|
||
if (sessionAttrs['DATA-ID']) {
|
||
hasSessionData = true;
|
||
sessionData[sessionAttrs['DATA-ID']] = sessionAttrs;
|
||
}
|
||
}
|
||
}
|
||
|
||
return {
|
||
levels: levels,
|
||
sessionData: hasSessionData ? sessionData : null
|
||
};
|
||
};
|
||
|
||
M3U8Parser.parseMasterPlaylistMedia = function parseMasterPlaylistMedia(string, baseurl, type, groups) {
|
||
if (groups === void 0) {
|
||
groups = [];
|
||
}
|
||
|
||
var result;
|
||
var medias = [];
|
||
var id = 0;
|
||
MASTER_PLAYLIST_MEDIA_REGEX.lastIndex = 0;
|
||
|
||
while ((result = MASTER_PLAYLIST_MEDIA_REGEX.exec(string)) !== null) {
|
||
var attrs = new _utils_attr_list__WEBPACK_IMPORTED_MODULE_5__["AttrList"](result[1]);
|
||
|
||
if (attrs.TYPE === type) {
|
||
var media = {
|
||
attrs: attrs,
|
||
bitrate: 0,
|
||
id: id++,
|
||
groupId: attrs['GROUP-ID'],
|
||
instreamId: attrs['INSTREAM-ID'],
|
||
name: attrs.NAME || attrs.LANGUAGE || '',
|
||
type: type,
|
||
default: attrs.bool('DEFAULT'),
|
||
autoselect: attrs.bool('AUTOSELECT'),
|
||
forced: attrs.bool('FORCED'),
|
||
lang: attrs.LANGUAGE,
|
||
url: attrs.URI ? M3U8Parser.resolve(attrs.URI, baseurl) : ''
|
||
};
|
||
|
||
if (groups.length) {
|
||
// If there are audio or text groups signalled in the manifest, let's look for a matching codec string for this track
|
||
// If we don't find the track signalled, lets use the first audio groups codec we have
|
||
// Acting as a best guess
|
||
var groupCodec = M3U8Parser.findGroup(groups, media.groupId) || groups[0];
|
||
assignCodec(media, groupCodec, 'audioCodec');
|
||
assignCodec(media, groupCodec, 'textCodec');
|
||
}
|
||
|
||
medias.push(media);
|
||
}
|
||
}
|
||
|
||
return medias;
|
||
};
|
||
|
||
M3U8Parser.parseLevelPlaylist = function parseLevelPlaylist(string, baseurl, id, type, levelUrlId) {
|
||
var level = new _level_details__WEBPACK_IMPORTED_MODULE_3__["LevelDetails"](baseurl);
|
||
var fragments = level.fragments; // The most recent init segment seen (applies to all subsequent segments)
|
||
|
||
var currentInitSegment = null;
|
||
var currentSN = 0;
|
||
var currentPart = 0;
|
||
var totalduration = 0;
|
||
var discontinuityCounter = 0;
|
||
var prevFrag = null;
|
||
var frag = new _fragment__WEBPACK_IMPORTED_MODULE_2__["Fragment"](type, baseurl);
|
||
var result;
|
||
var i;
|
||
var levelkey;
|
||
var firstPdtIndex = -1;
|
||
var createNextFrag = false;
|
||
LEVEL_PLAYLIST_REGEX_FAST.lastIndex = 0;
|
||
level.m3u8 = string;
|
||
|
||
while ((result = LEVEL_PLAYLIST_REGEX_FAST.exec(string)) !== null) {
|
||
if (createNextFrag) {
|
||
createNextFrag = false;
|
||
frag = new _fragment__WEBPACK_IMPORTED_MODULE_2__["Fragment"](type, baseurl); // setup the next fragment for part loading
|
||
|
||
frag.start = totalduration;
|
||
frag.sn = currentSN;
|
||
frag.cc = discontinuityCounter;
|
||
frag.level = id;
|
||
|
||
if (currentInitSegment) {
|
||
frag.initSegment = currentInitSegment;
|
||
frag.rawProgramDateTime = currentInitSegment.rawProgramDateTime;
|
||
currentInitSegment.rawProgramDateTime = null;
|
||
}
|
||
}
|
||
|
||
var duration = result[1];
|
||
|
||
if (duration) {
|
||
// INF
|
||
frag.duration = parseFloat(duration); // avoid sliced strings https://github.com/video-dev/hls.js/issues/939
|
||
|
||
var title = (' ' + result[2]).slice(1);
|
||
frag.title = title || null;
|
||
frag.tagList.push(title ? ['INF', duration, title] : ['INF', duration]);
|
||
} else if (result[3]) {
|
||
// url
|
||
if (Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(frag.duration)) {
|
||
frag.start = totalduration;
|
||
|
||
if (levelkey) {
|
||
frag.levelkey = levelkey;
|
||
}
|
||
|
||
frag.sn = currentSN;
|
||
frag.level = id;
|
||
frag.cc = discontinuityCounter;
|
||
frag.urlId = levelUrlId;
|
||
fragments.push(frag); // avoid sliced strings https://github.com/video-dev/hls.js/issues/939
|
||
|
||
frag.relurl = (' ' + result[3]).slice(1);
|
||
assignProgramDateTime(frag, prevFrag);
|
||
prevFrag = frag;
|
||
totalduration += frag.duration;
|
||
currentSN++;
|
||
currentPart = 0;
|
||
createNextFrag = true;
|
||
}
|
||
} else if (result[4]) {
|
||
// X-BYTERANGE
|
||
var data = (' ' + result[4]).slice(1);
|
||
|
||
if (prevFrag) {
|
||
frag.setByteRange(data, prevFrag);
|
||
} else {
|
||
frag.setByteRange(data);
|
||
}
|
||
} else if (result[5]) {
|
||
// PROGRAM-DATE-TIME
|
||
// avoid sliced strings https://github.com/video-dev/hls.js/issues/939
|
||
frag.rawProgramDateTime = (' ' + result[5]).slice(1);
|
||
frag.tagList.push(['PROGRAM-DATE-TIME', frag.rawProgramDateTime]);
|
||
|
||
if (firstPdtIndex === -1) {
|
||
firstPdtIndex = fragments.length;
|
||
}
|
||
} else {
|
||
result = result[0].match(LEVEL_PLAYLIST_REGEX_SLOW);
|
||
|
||
if (!result) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].warn('No matches on slow regex match for level playlist!');
|
||
continue;
|
||
}
|
||
|
||
for (i = 1; i < result.length; i++) {
|
||
if (typeof result[i] !== 'undefined') {
|
||
break;
|
||
}
|
||
} // avoid sliced strings https://github.com/video-dev/hls.js/issues/939
|
||
|
||
|
||
var tag = (' ' + result[i]).slice(1);
|
||
var value1 = (' ' + result[i + 1]).slice(1);
|
||
var value2 = result[i + 2] ? (' ' + result[i + 2]).slice(1) : '';
|
||
|
||
switch (tag) {
|
||
case 'PLAYLIST-TYPE':
|
||
level.type = value1.toUpperCase();
|
||
break;
|
||
|
||
case 'MEDIA-SEQUENCE':
|
||
currentSN = level.startSN = parseInt(value1);
|
||
break;
|
||
|
||
case 'SKIP':
|
||
{
|
||
var skipAttrs = new _utils_attr_list__WEBPACK_IMPORTED_MODULE_5__["AttrList"](value1);
|
||
var skippedSegments = skipAttrs.decimalInteger('SKIPPED-SEGMENTS');
|
||
|
||
if (Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(skippedSegments)) {
|
||
level.skippedSegments = skippedSegments; // This will result in fragments[] containing undefined values, which we will fill in with `mergeDetails`
|
||
|
||
for (var _i = skippedSegments; _i--;) {
|
||
fragments.unshift(null);
|
||
}
|
||
|
||
currentSN += skippedSegments;
|
||
}
|
||
|
||
var recentlyRemovedDateranges = skipAttrs.enumeratedString('RECENTLY-REMOVED-DATERANGES');
|
||
|
||
if (recentlyRemovedDateranges) {
|
||
level.recentlyRemovedDateranges = recentlyRemovedDateranges.split('\t');
|
||
}
|
||
|
||
break;
|
||
}
|
||
|
||
case 'TARGETDURATION':
|
||
level.targetduration = parseFloat(value1);
|
||
break;
|
||
|
||
case 'VERSION':
|
||
level.version = parseInt(value1);
|
||
break;
|
||
|
||
case 'EXTM3U':
|
||
break;
|
||
|
||
case 'ENDLIST':
|
||
level.live = false;
|
||
break;
|
||
|
||
case '#':
|
||
if (value1 || value2) {
|
||
frag.tagList.push(value2 ? [value1, value2] : [value1]);
|
||
}
|
||
|
||
break;
|
||
|
||
case 'DIS':
|
||
discontinuityCounter++;
|
||
|
||
/* falls through */
|
||
|
||
case 'GAP':
|
||
frag.tagList.push([tag]);
|
||
break;
|
||
|
||
case 'BITRATE':
|
||
frag.tagList.push([tag, value1]);
|
||
break;
|
||
|
||
case 'DISCONTINUITY-SEQ':
|
||
discontinuityCounter = parseInt(value1);
|
||
break;
|
||
|
||
case 'KEY':
|
||
{
|
||
var _keyAttrs$enumeratedS;
|
||
|
||
// https://tools.ietf.org/html/rfc8216#section-4.3.2.4
|
||
var keyAttrs = new _utils_attr_list__WEBPACK_IMPORTED_MODULE_5__["AttrList"](value1);
|
||
var decryptmethod = keyAttrs.enumeratedString('METHOD');
|
||
var decrypturi = keyAttrs.URI;
|
||
var decryptiv = keyAttrs.hexadecimalInteger('IV');
|
||
var decryptkeyformatversions = keyAttrs.enumeratedString('KEYFORMATVERSIONS');
|
||
var decryptkeyid = keyAttrs.enumeratedString('KEYID'); // From RFC: This attribute is OPTIONAL; its absence indicates an implicit value of "identity".
|
||
|
||
var decryptkeyformat = (_keyAttrs$enumeratedS = keyAttrs.enumeratedString('KEYFORMAT')) != null ? _keyAttrs$enumeratedS : 'identity';
|
||
var unsupportedKnownKeyformatsInManifest = ['com.apple.streamingkeydelivery', 'com.microsoft.playready', 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed', // widevine (v2)
|
||
'com.widevine' // earlier widevine (v1)
|
||
];
|
||
|
||
if (unsupportedKnownKeyformatsInManifest.indexOf(decryptkeyformat) > -1) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].warn("Keyformat " + decryptkeyformat + " is not supported from the manifest");
|
||
continue;
|
||
} else if (decryptkeyformat !== 'identity') {
|
||
// We are supposed to skip keys we don't understand.
|
||
// As we currently only officially support identity keys
|
||
// from the manifest we shouldn't save any other key.
|
||
continue;
|
||
} // TODO: multiple keys can be defined on a fragment, and we need to support this
|
||
// for clients that support both playready and widevine
|
||
|
||
|
||
if (decryptmethod) {
|
||
// TODO: need to determine if the level key is actually a relative URL
|
||
// if it isn't, then we should instead construct the LevelKey using fromURI.
|
||
levelkey = _level_key__WEBPACK_IMPORTED_MODULE_4__["LevelKey"].fromURL(baseurl, decrypturi);
|
||
|
||
if (decrypturi && ['AES-128', 'SAMPLE-AES', 'SAMPLE-AES-CENC'].indexOf(decryptmethod) >= 0) {
|
||
levelkey.method = decryptmethod;
|
||
levelkey.keyFormat = decryptkeyformat;
|
||
|
||
if (decryptkeyid) {
|
||
levelkey.keyID = decryptkeyid;
|
||
}
|
||
|
||
if (decryptkeyformatversions) {
|
||
levelkey.keyFormatVersions = decryptkeyformatversions;
|
||
} // Initialization Vector (IV)
|
||
|
||
|
||
levelkey.iv = decryptiv;
|
||
}
|
||
}
|
||
|
||
break;
|
||
}
|
||
|
||
case 'START':
|
||
{
|
||
var startAttrs = new _utils_attr_list__WEBPACK_IMPORTED_MODULE_5__["AttrList"](value1);
|
||
var startTimeOffset = startAttrs.decimalFloatingPoint('TIME-OFFSET'); // TIME-OFFSET can be 0
|
||
|
||
if (Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(startTimeOffset)) {
|
||
level.startTimeOffset = startTimeOffset;
|
||
}
|
||
|
||
break;
|
||
}
|
||
|
||
case 'MAP':
|
||
{
|
||
var mapAttrs = new _utils_attr_list__WEBPACK_IMPORTED_MODULE_5__["AttrList"](value1);
|
||
frag.relurl = mapAttrs.URI;
|
||
|
||
if (mapAttrs.BYTERANGE) {
|
||
frag.setByteRange(mapAttrs.BYTERANGE);
|
||
}
|
||
|
||
frag.level = id;
|
||
frag.sn = 'initSegment';
|
||
|
||
if (levelkey) {
|
||
frag.levelkey = levelkey;
|
||
}
|
||
|
||
frag.initSegment = null;
|
||
currentInitSegment = frag;
|
||
createNextFrag = true;
|
||
break;
|
||
}
|
||
|
||
case 'SERVER-CONTROL':
|
||
{
|
||
var serverControlAttrs = new _utils_attr_list__WEBPACK_IMPORTED_MODULE_5__["AttrList"](value1);
|
||
level.canBlockReload = serverControlAttrs.bool('CAN-BLOCK-RELOAD');
|
||
level.canSkipUntil = serverControlAttrs.optionalFloat('CAN-SKIP-UNTIL', 0);
|
||
level.canSkipDateRanges = level.canSkipUntil > 0 && serverControlAttrs.bool('CAN-SKIP-DATERANGES');
|
||
level.partHoldBack = serverControlAttrs.optionalFloat('PART-HOLD-BACK', 0);
|
||
level.holdBack = serverControlAttrs.optionalFloat('HOLD-BACK', 0);
|
||
break;
|
||
}
|
||
|
||
case 'PART-INF':
|
||
{
|
||
var partInfAttrs = new _utils_attr_list__WEBPACK_IMPORTED_MODULE_5__["AttrList"](value1);
|
||
level.partTarget = partInfAttrs.decimalFloatingPoint('PART-TARGET');
|
||
break;
|
||
}
|
||
|
||
case 'PART':
|
||
{
|
||
var partList = level.partList;
|
||
|
||
if (!partList) {
|
||
partList = level.partList = [];
|
||
}
|
||
|
||
var previousFragmentPart = currentPart > 0 ? partList[partList.length - 1] : undefined;
|
||
var index = currentPart++;
|
||
var part = new _fragment__WEBPACK_IMPORTED_MODULE_2__["Part"](new _utils_attr_list__WEBPACK_IMPORTED_MODULE_5__["AttrList"](value1), frag, baseurl, index, previousFragmentPart);
|
||
partList.push(part);
|
||
frag.duration += part.duration;
|
||
break;
|
||
}
|
||
|
||
case 'PRELOAD-HINT':
|
||
{
|
||
var preloadHintAttrs = new _utils_attr_list__WEBPACK_IMPORTED_MODULE_5__["AttrList"](value1);
|
||
level.preloadHint = preloadHintAttrs;
|
||
break;
|
||
}
|
||
|
||
case 'RENDITION-REPORT':
|
||
{
|
||
var renditionReportAttrs = new _utils_attr_list__WEBPACK_IMPORTED_MODULE_5__["AttrList"](value1);
|
||
level.renditionReports = level.renditionReports || [];
|
||
level.renditionReports.push(renditionReportAttrs);
|
||
break;
|
||
}
|
||
|
||
default:
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].warn("line parsed but not handled: " + result);
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
|
||
if (prevFrag && !prevFrag.relurl) {
|
||
fragments.pop();
|
||
totalduration -= prevFrag.duration;
|
||
|
||
if (level.partList) {
|
||
level.fragmentHint = prevFrag;
|
||
}
|
||
} else if (level.partList) {
|
||
assignProgramDateTime(frag, prevFrag);
|
||
frag.cc = discontinuityCounter;
|
||
level.fragmentHint = frag;
|
||
}
|
||
|
||
var fragmentLength = fragments.length;
|
||
var firstFragment = fragments[0];
|
||
var lastFragment = fragments[fragmentLength - 1];
|
||
totalduration += level.skippedSegments * level.targetduration;
|
||
|
||
if (totalduration > 0 && fragmentLength && lastFragment) {
|
||
level.averagetargetduration = totalduration / fragmentLength;
|
||
var lastSn = lastFragment.sn;
|
||
level.endSN = lastSn !== 'initSegment' ? lastSn : 0;
|
||
|
||
if (firstFragment) {
|
||
level.startCC = firstFragment.cc;
|
||
|
||
if (!firstFragment.initSegment) {
|
||
// this is a bit lurky but HLS really has no other way to tell us
|
||
// if the fragments are TS or MP4, except if we download them :/
|
||
// but this is to be able to handle SIDX.
|
||
if (level.fragments.every(function (frag) {
|
||
return frag.relurl && isMP4Url(frag.relurl);
|
||
})) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_6__["logger"].warn('MP4 fragments found but no init segment (probably no MAP, incomplete M3U8), trying to fetch SIDX');
|
||
frag = new _fragment__WEBPACK_IMPORTED_MODULE_2__["Fragment"](type, baseurl);
|
||
frag.relurl = lastFragment.relurl;
|
||
frag.level = id;
|
||
frag.sn = 'initSegment';
|
||
firstFragment.initSegment = frag;
|
||
level.needSidxRanges = true;
|
||
}
|
||
}
|
||
}
|
||
} else {
|
||
level.endSN = 0;
|
||
level.startCC = 0;
|
||
}
|
||
|
||
if (level.fragmentHint) {
|
||
totalduration += level.fragmentHint.duration;
|
||
}
|
||
|
||
level.totalduration = totalduration;
|
||
level.endCC = discontinuityCounter;
|
||
/**
|
||
* Backfill any missing PDT values
|
||
* "If the first EXT-X-PROGRAM-DATE-TIME tag in a Playlist appears after
|
||
* one or more Media Segment URIs, the client SHOULD extrapolate
|
||
* backward from that tag (using EXTINF durations and/or media
|
||
* timestamps) to associate dates with those segments."
|
||
* We have already extrapolated forward, but all fragments up to the first instance of PDT do not have their PDTs
|
||
* computed.
|
||
*/
|
||
|
||
if (firstPdtIndex > 0) {
|
||
backfillProgramDateTimes(fragments, firstPdtIndex);
|
||
}
|
||
|
||
return level;
|
||
};
|
||
|
||
return M3U8Parser;
|
||
}();
|
||
|
||
|
||
|
||
function setCodecs(codecs, level) {
|
||
['video', 'audio', 'text'].forEach(function (type) {
|
||
var filtered = codecs.filter(function (codec) {
|
||
return Object(_utils_codecs__WEBPACK_IMPORTED_MODULE_7__["isCodecType"])(codec, type);
|
||
});
|
||
|
||
if (filtered.length) {
|
||
var preferred = filtered.filter(function (codec) {
|
||
return codec.lastIndexOf('avc1', 0) === 0 || codec.lastIndexOf('mp4a', 0) === 0;
|
||
});
|
||
level[type + "Codec"] = preferred.length > 0 ? preferred[0] : filtered[0]; // remove from list
|
||
|
||
codecs = codecs.filter(function (codec) {
|
||
return filtered.indexOf(codec) === -1;
|
||
});
|
||
}
|
||
});
|
||
level.unknownCodecs = codecs;
|
||
}
|
||
|
||
function assignCodec(media, groupItem, codecProperty) {
|
||
var codecValue = groupItem[codecProperty];
|
||
|
||
if (codecValue) {
|
||
media[codecProperty] = codecValue;
|
||
}
|
||
}
|
||
|
||
function backfillProgramDateTimes(fragments, firstPdtIndex) {
|
||
var fragPrev = fragments[firstPdtIndex];
|
||
|
||
for (var i = firstPdtIndex; i--;) {
|
||
var frag = fragments[i]; // Exit on delta-playlist skipped segments
|
||
|
||
if (!frag) {
|
||
return;
|
||
}
|
||
|
||
frag.programDateTime = fragPrev.programDateTime - frag.duration * 1000;
|
||
fragPrev = frag;
|
||
}
|
||
}
|
||
|
||
function assignProgramDateTime(frag, prevFrag) {
|
||
if (frag.rawProgramDateTime) {
|
||
frag.programDateTime = Date.parse(frag.rawProgramDateTime);
|
||
} else if (prevFrag !== null && prevFrag !== void 0 && prevFrag.programDateTime) {
|
||
frag.programDateTime = prevFrag.endProgramDateTime;
|
||
}
|
||
|
||
if (!Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(frag.programDateTime)) {
|
||
frag.programDateTime = null;
|
||
frag.rawProgramDateTime = null;
|
||
}
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/loader/playlist-loader.ts":
|
||
/*!***************************************!*\
|
||
!*** ./src/loader/playlist-loader.ts ***!
|
||
\***************************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony import */ var _Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
|
||
/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../events */ "./src/events.ts");
|
||
/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
/* harmony import */ var _utils_mp4_tools__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../utils/mp4-tools */ "./src/utils/mp4-tools.ts");
|
||
/* harmony import */ var _m3u8_parser__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./m3u8-parser */ "./src/loader/m3u8-parser.ts");
|
||
/* harmony import */ var _types_loader__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../types/loader */ "./src/types/loader.ts");
|
||
/* harmony import */ var _utils_attr_list__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ../utils/attr-list */ "./src/utils/attr-list.ts");
|
||
|
||
|
||
|
||
/**
|
||
* PlaylistLoader - delegate for media manifest/playlist loading tasks. Takes care of parsing media to internal data-models.
|
||
*
|
||
* Once loaded, dispatches events with parsed data-models of manifest/levels/audio/subtitle tracks.
|
||
*
|
||
* Uses loader(s) set in config to do actual internal loading of resource tasks.
|
||
*
|
||
* @module
|
||
*
|
||
*/
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
function mapContextToLevelType(context) {
|
||
var type = context.type;
|
||
|
||
switch (type) {
|
||
case _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].AUDIO_TRACK:
|
||
return _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].AUDIO;
|
||
|
||
case _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].SUBTITLE_TRACK:
|
||
return _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].SUBTITLE;
|
||
|
||
default:
|
||
return _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].MAIN;
|
||
}
|
||
}
|
||
|
||
function getResponseUrl(response, context) {
|
||
var url = response.url; // responseURL not supported on some browsers (it is used to detect URL redirection)
|
||
// data-uri mode also not supported (but no need to detect redirection)
|
||
|
||
if (url === undefined || url.indexOf('data:') === 0) {
|
||
// fallback to initial URL
|
||
url = context.url;
|
||
}
|
||
|
||
return url;
|
||
}
|
||
|
||
var PlaylistLoader = /*#__PURE__*/function () {
|
||
function PlaylistLoader(hls) {
|
||
this.hls = void 0;
|
||
this.loaders = Object.create(null);
|
||
this.hls = hls;
|
||
this.registerListeners();
|
||
}
|
||
|
||
var _proto = PlaylistLoader.prototype;
|
||
|
||
_proto.registerListeners = function registerListeners() {
|
||
var hls = this.hls;
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].LEVEL_LOADING, this.onLevelLoading, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].AUDIO_TRACK_LOADING, this.onAudioTrackLoading, this);
|
||
hls.on(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].SUBTITLE_TRACK_LOADING, this.onSubtitleTrackLoading, this);
|
||
};
|
||
|
||
_proto.unregisterListeners = function unregisterListeners() {
|
||
var hls = this.hls;
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_LOADING, this.onManifestLoading, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].LEVEL_LOADING, this.onLevelLoading, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].AUDIO_TRACK_LOADING, this.onAudioTrackLoading, this);
|
||
hls.off(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].SUBTITLE_TRACK_LOADING, this.onSubtitleTrackLoading, this);
|
||
}
|
||
/**
|
||
* Returns defaults or configured loader-type overloads (pLoader and loader config params)
|
||
*/
|
||
;
|
||
|
||
_proto.createInternalLoader = function createInternalLoader(context) {
|
||
var config = this.hls.config;
|
||
var PLoader = config.pLoader;
|
||
var Loader = config.loader;
|
||
var InternalLoader = PLoader || Loader;
|
||
var loader = new InternalLoader(config);
|
||
context.loader = loader;
|
||
this.loaders[context.type] = loader;
|
||
return loader;
|
||
};
|
||
|
||
_proto.getInternalLoader = function getInternalLoader(context) {
|
||
return this.loaders[context.type];
|
||
};
|
||
|
||
_proto.resetInternalLoader = function resetInternalLoader(contextType) {
|
||
if (this.loaders[contextType]) {
|
||
delete this.loaders[contextType];
|
||
}
|
||
}
|
||
/**
|
||
* Call `destroy` on all internal loader instances mapped (one per context type)
|
||
*/
|
||
;
|
||
|
||
_proto.destroyInternalLoaders = function destroyInternalLoaders() {
|
||
for (var contextType in this.loaders) {
|
||
var loader = this.loaders[contextType];
|
||
|
||
if (loader) {
|
||
loader.destroy();
|
||
}
|
||
|
||
this.resetInternalLoader(contextType);
|
||
}
|
||
};
|
||
|
||
_proto.destroy = function destroy() {
|
||
this.unregisterListeners();
|
||
this.destroyInternalLoaders();
|
||
};
|
||
|
||
_proto.onManifestLoading = function onManifestLoading(event, data) {
|
||
var url = data.url;
|
||
this.load({
|
||
id: null,
|
||
groupId: null,
|
||
level: 0,
|
||
responseType: 'text',
|
||
type: _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].MANIFEST,
|
||
url: url,
|
||
deliveryDirectives: null
|
||
});
|
||
};
|
||
|
||
_proto.onLevelLoading = function onLevelLoading(event, data) {
|
||
var id = data.id,
|
||
level = data.level,
|
||
url = data.url,
|
||
deliveryDirectives = data.deliveryDirectives;
|
||
this.load({
|
||
id: id,
|
||
groupId: null,
|
||
level: level,
|
||
responseType: 'text',
|
||
type: _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].LEVEL,
|
||
url: url,
|
||
deliveryDirectives: deliveryDirectives
|
||
});
|
||
};
|
||
|
||
_proto.onAudioTrackLoading = function onAudioTrackLoading(event, data) {
|
||
var id = data.id,
|
||
groupId = data.groupId,
|
||
url = data.url,
|
||
deliveryDirectives = data.deliveryDirectives;
|
||
this.load({
|
||
id: id,
|
||
groupId: groupId,
|
||
level: null,
|
||
responseType: 'text',
|
||
type: _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].AUDIO_TRACK,
|
||
url: url,
|
||
deliveryDirectives: deliveryDirectives
|
||
});
|
||
};
|
||
|
||
_proto.onSubtitleTrackLoading = function onSubtitleTrackLoading(event, data) {
|
||
var id = data.id,
|
||
groupId = data.groupId,
|
||
url = data.url,
|
||
deliveryDirectives = data.deliveryDirectives;
|
||
this.load({
|
||
id: id,
|
||
groupId: groupId,
|
||
level: null,
|
||
responseType: 'text',
|
||
type: _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].SUBTITLE_TRACK,
|
||
url: url,
|
||
deliveryDirectives: deliveryDirectives
|
||
});
|
||
};
|
||
|
||
_proto.load = function load(context) {
|
||
var _context$deliveryDire;
|
||
|
||
var config = this.hls.config; // logger.debug(`[playlist-loader]: Loading playlist of type ${context.type}, level: ${context.level}, id: ${context.id}`);
|
||
// Check if a loader for this context already exists
|
||
|
||
var loader = this.getInternalLoader(context);
|
||
|
||
if (loader) {
|
||
var loaderContext = loader.context;
|
||
|
||
if (loaderContext && loaderContext.url === context.url) {
|
||
// same URL can't overlap
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].trace('[playlist-loader]: playlist request ongoing');
|
||
return;
|
||
}
|
||
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].log("[playlist-loader]: aborting previous loader for type: " + context.type);
|
||
loader.abort();
|
||
}
|
||
|
||
var maxRetry;
|
||
var timeout;
|
||
var retryDelay;
|
||
var maxRetryDelay; // apply different configs for retries depending on
|
||
// context (manifest, level, audio/subs playlist)
|
||
|
||
switch (context.type) {
|
||
case _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].MANIFEST:
|
||
maxRetry = config.manifestLoadingMaxRetry;
|
||
timeout = config.manifestLoadingTimeOut;
|
||
retryDelay = config.manifestLoadingRetryDelay;
|
||
maxRetryDelay = config.manifestLoadingMaxRetryTimeout;
|
||
break;
|
||
|
||
case _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].LEVEL:
|
||
case _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].AUDIO_TRACK:
|
||
case _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].SUBTITLE_TRACK:
|
||
// Manage retries in Level/Track Controller
|
||
maxRetry = 0;
|
||
timeout = config.levelLoadingTimeOut;
|
||
break;
|
||
|
||
default:
|
||
maxRetry = config.levelLoadingMaxRetry;
|
||
timeout = config.levelLoadingTimeOut;
|
||
retryDelay = config.levelLoadingRetryDelay;
|
||
maxRetryDelay = config.levelLoadingMaxRetryTimeout;
|
||
break;
|
||
}
|
||
|
||
loader = this.createInternalLoader(context); // Override level/track timeout for LL-HLS requests
|
||
// (the default of 10000ms is counter productive to blocking playlist reload requests)
|
||
|
||
if ((_context$deliveryDire = context.deliveryDirectives) !== null && _context$deliveryDire !== void 0 && _context$deliveryDire.part) {
|
||
var levelDetails;
|
||
|
||
if (context.type === _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].LEVEL && context.level !== null) {
|
||
levelDetails = this.hls.levels[context.level].details;
|
||
} else if (context.type === _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].AUDIO_TRACK && context.id !== null) {
|
||
levelDetails = this.hls.audioTracks[context.id].details;
|
||
} else if (context.type === _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].SUBTITLE_TRACK && context.id !== null) {
|
||
levelDetails = this.hls.subtitleTracks[context.id].details;
|
||
}
|
||
|
||
if (levelDetails) {
|
||
var partTarget = levelDetails.partTarget;
|
||
var targetDuration = levelDetails.targetduration;
|
||
|
||
if (partTarget && targetDuration) {
|
||
timeout = Math.min(Math.max(partTarget * 3, targetDuration * 0.8) * 1000, timeout);
|
||
}
|
||
}
|
||
}
|
||
|
||
var loaderConfig = {
|
||
timeout: timeout,
|
||
maxRetry: maxRetry,
|
||
retryDelay: retryDelay,
|
||
maxRetryDelay: maxRetryDelay,
|
||
highWaterMark: 0
|
||
};
|
||
var loaderCallbacks = {
|
||
onSuccess: this.loadsuccess.bind(this),
|
||
onError: this.loaderror.bind(this),
|
||
onTimeout: this.loadtimeout.bind(this)
|
||
}; // logger.debug(`[playlist-loader]: Calling internal loader delegate for URL: ${context.url}`);
|
||
|
||
loader.load(context, loaderConfig, loaderCallbacks);
|
||
};
|
||
|
||
_proto.loadsuccess = function loadsuccess(response, stats, context, networkDetails) {
|
||
if (networkDetails === void 0) {
|
||
networkDetails = null;
|
||
}
|
||
|
||
if (context.isSidxRequest) {
|
||
this.handleSidxRequest(response, context);
|
||
this.handlePlaylistLoaded(response, stats, context, networkDetails);
|
||
return;
|
||
}
|
||
|
||
this.resetInternalLoader(context.type);
|
||
var string = response.data; // Validate if it is an M3U8 at all
|
||
|
||
if (string.indexOf('#EXTM3U') !== 0) {
|
||
this.handleManifestParsingError(response, context, 'no EXTM3U delimiter', networkDetails);
|
||
return;
|
||
}
|
||
|
||
stats.parsing.start = performance.now(); // Check if chunk-list or master. handle empty chunk list case (first EXTINF not signaled, but TARGETDURATION present)
|
||
|
||
if (string.indexOf('#EXTINF:') > 0 || string.indexOf('#EXT-X-TARGETDURATION:') > 0) {
|
||
this.handleTrackOrLevelPlaylist(response, stats, context, networkDetails);
|
||
} else {
|
||
this.handleMasterPlaylist(response, stats, context, networkDetails);
|
||
}
|
||
};
|
||
|
||
_proto.loaderror = function loaderror(response, context, networkDetails) {
|
||
if (networkDetails === void 0) {
|
||
networkDetails = null;
|
||
}
|
||
|
||
this.handleNetworkError(context, networkDetails, false, response);
|
||
};
|
||
|
||
_proto.loadtimeout = function loadtimeout(stats, context, networkDetails) {
|
||
if (networkDetails === void 0) {
|
||
networkDetails = null;
|
||
}
|
||
|
||
this.handleNetworkError(context, networkDetails, true);
|
||
};
|
||
|
||
_proto.handleMasterPlaylist = function handleMasterPlaylist(response, stats, context, networkDetails) {
|
||
var hls = this.hls;
|
||
var string = response.data;
|
||
var url = getResponseUrl(response, context);
|
||
|
||
var _M3U8Parser$parseMast = _m3u8_parser__WEBPACK_IMPORTED_MODULE_5__["default"].parseMasterPlaylist(string, url),
|
||
levels = _M3U8Parser$parseMast.levels,
|
||
sessionData = _M3U8Parser$parseMast.sessionData;
|
||
|
||
if (!levels.length) {
|
||
this.handleManifestParsingError(response, context, 'no level found in manifest', networkDetails);
|
||
return;
|
||
} // multi level playlist, parse level info
|
||
|
||
|
||
var audioGroups = levels.map(function (level) {
|
||
return {
|
||
id: level.attrs.AUDIO,
|
||
audioCodec: level.audioCodec
|
||
};
|
||
});
|
||
var subtitleGroups = levels.map(function (level) {
|
||
return {
|
||
id: level.attrs.SUBTITLES,
|
||
textCodec: level.textCodec
|
||
};
|
||
});
|
||
var audioTracks = _m3u8_parser__WEBPACK_IMPORTED_MODULE_5__["default"].parseMasterPlaylistMedia(string, url, 'AUDIO', audioGroups);
|
||
var subtitles = _m3u8_parser__WEBPACK_IMPORTED_MODULE_5__["default"].parseMasterPlaylistMedia(string, url, 'SUBTITLES', subtitleGroups);
|
||
var captions = _m3u8_parser__WEBPACK_IMPORTED_MODULE_5__["default"].parseMasterPlaylistMedia(string, url, 'CLOSED-CAPTIONS');
|
||
|
||
if (audioTracks.length) {
|
||
// check if we have found an audio track embedded in main playlist (audio track without URI attribute)
|
||
var embeddedAudioFound = audioTracks.some(function (audioTrack) {
|
||
return !audioTrack.url;
|
||
}); // if no embedded audio track defined, but audio codec signaled in quality level,
|
||
// we need to signal this main audio track this could happen with playlists with
|
||
// alt audio rendition in which quality levels (main)
|
||
// contains both audio+video. but with mixed audio track not signaled
|
||
|
||
if (!embeddedAudioFound && levels[0].audioCodec && !levels[0].attrs.AUDIO) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].log('[playlist-loader]: audio codec signaled in quality level, but no embedded audio track signaled, create one');
|
||
audioTracks.unshift({
|
||
type: 'main',
|
||
name: 'main',
|
||
default: false,
|
||
autoselect: false,
|
||
forced: false,
|
||
id: -1,
|
||
attrs: new _utils_attr_list__WEBPACK_IMPORTED_MODULE_7__["AttrList"]({}),
|
||
bitrate: 0,
|
||
url: ''
|
||
});
|
||
}
|
||
}
|
||
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_LOADED, {
|
||
levels: levels,
|
||
audioTracks: audioTracks,
|
||
subtitles: subtitles,
|
||
captions: captions,
|
||
url: url,
|
||
stats: stats,
|
||
networkDetails: networkDetails,
|
||
sessionData: sessionData
|
||
});
|
||
};
|
||
|
||
_proto.handleTrackOrLevelPlaylist = function handleTrackOrLevelPlaylist(response, stats, context, networkDetails) {
|
||
var hls = this.hls;
|
||
var id = context.id,
|
||
level = context.level,
|
||
type = context.type;
|
||
var url = getResponseUrl(response, context);
|
||
var levelUrlId = Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(id) ? id : 0;
|
||
var levelId = Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(level) ? level : levelUrlId;
|
||
var levelType = mapContextToLevelType(context);
|
||
var levelDetails = _m3u8_parser__WEBPACK_IMPORTED_MODULE_5__["default"].parseLevelPlaylist(response.data, url, levelId, levelType, levelUrlId);
|
||
|
||
if (!levelDetails.fragments.length) {
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorTypes"].NETWORK_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].LEVEL_EMPTY_ERROR,
|
||
fatal: false,
|
||
url: url,
|
||
reason: 'no fragments found in level',
|
||
level: typeof context.level === 'number' ? context.level : undefined
|
||
});
|
||
return;
|
||
} // We have done our first request (Manifest-type) and receive
|
||
// not a master playlist but a chunk-list (track/level)
|
||
// We fire the manifest-loaded event anyway with the parsed level-details
|
||
// by creating a single-level structure for it.
|
||
|
||
|
||
if (type === _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].MANIFEST) {
|
||
var singleLevel = {
|
||
attrs: new _utils_attr_list__WEBPACK_IMPORTED_MODULE_7__["AttrList"]({}),
|
||
bitrate: 0,
|
||
details: levelDetails,
|
||
name: '',
|
||
url: url
|
||
};
|
||
hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].MANIFEST_LOADED, {
|
||
levels: [singleLevel],
|
||
audioTracks: [],
|
||
url: url,
|
||
stats: stats,
|
||
networkDetails: networkDetails,
|
||
sessionData: null
|
||
});
|
||
} // save parsing time
|
||
|
||
|
||
stats.parsing.end = performance.now(); // in case we need SIDX ranges
|
||
// return early after calling load for
|
||
// the SIDX box.
|
||
|
||
if (levelDetails.needSidxRanges) {
|
||
var _levelDetails$fragmen;
|
||
|
||
var sidxUrl = (_levelDetails$fragmen = levelDetails.fragments[0].initSegment) === null || _levelDetails$fragmen === void 0 ? void 0 : _levelDetails$fragmen.url;
|
||
this.load({
|
||
url: sidxUrl,
|
||
isSidxRequest: true,
|
||
type: type,
|
||
level: level,
|
||
levelDetails: levelDetails,
|
||
id: id,
|
||
groupId: null,
|
||
rangeStart: 0,
|
||
rangeEnd: 2048,
|
||
responseType: 'arraybuffer',
|
||
deliveryDirectives: null
|
||
});
|
||
return;
|
||
} // extend the context with the new levelDetails property
|
||
|
||
|
||
context.levelDetails = levelDetails;
|
||
this.handlePlaylistLoaded(response, stats, context, networkDetails);
|
||
};
|
||
|
||
_proto.handleSidxRequest = function handleSidxRequest(response, context) {
|
||
var sidxInfo = Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_4__["parseSegmentIndex"])(new Uint8Array(response.data)); // if provided fragment does not contain sidx, early return
|
||
|
||
if (!sidxInfo) {
|
||
return;
|
||
}
|
||
|
||
var sidxReferences = sidxInfo.references;
|
||
var levelDetails = context.levelDetails;
|
||
sidxReferences.forEach(function (segmentRef, index) {
|
||
var segRefInfo = segmentRef.info;
|
||
var frag = levelDetails.fragments[index];
|
||
|
||
if (frag.byteRange.length === 0) {
|
||
frag.setByteRange(String(1 + segRefInfo.end - segRefInfo.start) + '@' + String(segRefInfo.start));
|
||
}
|
||
|
||
if (frag.initSegment) {
|
||
frag.initSegment.setByteRange(String(sidxInfo.moovEndOffset) + '@0');
|
||
}
|
||
});
|
||
};
|
||
|
||
_proto.handleManifestParsingError = function handleManifestParsingError(response, context, reason, networkDetails) {
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorTypes"].NETWORK_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].MANIFEST_PARSING_ERROR,
|
||
fatal: context.type === _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].MANIFEST,
|
||
url: response.url,
|
||
reason: reason,
|
||
response: response,
|
||
context: context,
|
||
networkDetails: networkDetails
|
||
});
|
||
};
|
||
|
||
_proto.handleNetworkError = function handleNetworkError(context, networkDetails, timeout, response) {
|
||
if (timeout === void 0) {
|
||
timeout = false;
|
||
}
|
||
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_3__["logger"].warn("[playlist-loader]: A network " + (timeout ? 'timeout' : 'error') + " occurred while loading " + context.type + " level: " + context.level + " id: " + context.id + " group-id: \"" + context.groupId + "\"");
|
||
var details = _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].UNKNOWN;
|
||
var fatal = false;
|
||
var loader = this.getInternalLoader(context);
|
||
|
||
switch (context.type) {
|
||
case _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].MANIFEST:
|
||
details = timeout ? _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].MANIFEST_LOAD_TIMEOUT : _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].MANIFEST_LOAD_ERROR;
|
||
fatal = true;
|
||
break;
|
||
|
||
case _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].LEVEL:
|
||
details = timeout ? _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].LEVEL_LOAD_TIMEOUT : _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].LEVEL_LOAD_ERROR;
|
||
fatal = false;
|
||
break;
|
||
|
||
case _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].AUDIO_TRACK:
|
||
details = timeout ? _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].AUDIO_TRACK_LOAD_TIMEOUT : _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].AUDIO_TRACK_LOAD_ERROR;
|
||
fatal = false;
|
||
break;
|
||
|
||
case _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].SUBTITLE_TRACK:
|
||
details = timeout ? _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].SUBTITLE_TRACK_LOAD_TIMEOUT : _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorDetails"].SUBTITLE_LOAD_ERROR;
|
||
fatal = false;
|
||
break;
|
||
}
|
||
|
||
if (loader) {
|
||
this.resetInternalLoader(context.type);
|
||
}
|
||
|
||
var errorData = {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_2__["ErrorTypes"].NETWORK_ERROR,
|
||
details: details,
|
||
fatal: fatal,
|
||
url: context.url,
|
||
loader: loader,
|
||
context: context,
|
||
networkDetails: networkDetails
|
||
};
|
||
|
||
if (response) {
|
||
errorData.response = response;
|
||
}
|
||
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].ERROR, errorData);
|
||
};
|
||
|
||
_proto.handlePlaylistLoaded = function handlePlaylistLoaded(response, stats, context, networkDetails) {
|
||
var type = context.type,
|
||
level = context.level,
|
||
id = context.id,
|
||
groupId = context.groupId,
|
||
loader = context.loader,
|
||
levelDetails = context.levelDetails,
|
||
deliveryDirectives = context.deliveryDirectives;
|
||
|
||
if (!(levelDetails !== null && levelDetails !== void 0 && levelDetails.targetduration)) {
|
||
this.handleManifestParsingError(response, context, 'invalid target duration', networkDetails);
|
||
return;
|
||
}
|
||
|
||
if (!loader) {
|
||
return;
|
||
}
|
||
|
||
if (levelDetails.live) {
|
||
if (loader.getCacheAge) {
|
||
levelDetails.ageHeader = loader.getCacheAge() || 0;
|
||
}
|
||
|
||
if (!loader.getCacheAge || isNaN(levelDetails.ageHeader)) {
|
||
levelDetails.ageHeader = 0;
|
||
}
|
||
}
|
||
|
||
switch (type) {
|
||
case _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].MANIFEST:
|
||
case _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].LEVEL:
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].LEVEL_LOADED, {
|
||
details: levelDetails,
|
||
level: level || 0,
|
||
id: id || 0,
|
||
stats: stats,
|
||
networkDetails: networkDetails,
|
||
deliveryDirectives: deliveryDirectives
|
||
});
|
||
break;
|
||
|
||
case _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].AUDIO_TRACK:
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].AUDIO_TRACK_LOADED, {
|
||
details: levelDetails,
|
||
id: id || 0,
|
||
groupId: groupId || '',
|
||
stats: stats,
|
||
networkDetails: networkDetails,
|
||
deliveryDirectives: deliveryDirectives
|
||
});
|
||
break;
|
||
|
||
case _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistContextType"].SUBTITLE_TRACK:
|
||
this.hls.trigger(_events__WEBPACK_IMPORTED_MODULE_1__["Events"].SUBTITLE_TRACK_LOADED, {
|
||
details: levelDetails,
|
||
id: id || 0,
|
||
groupId: groupId || '',
|
||
stats: stats,
|
||
networkDetails: networkDetails,
|
||
deliveryDirectives: deliveryDirectives
|
||
});
|
||
break;
|
||
}
|
||
};
|
||
|
||
return PlaylistLoader;
|
||
}();
|
||
|
||
/* harmony default export */ __webpack_exports__["default"] = (PlaylistLoader);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/polyfills/number.ts":
|
||
/*!*********************************!*\
|
||
!*** ./src/polyfills/number.ts ***!
|
||
\*********************************/
|
||
/*! exports provided: isFiniteNumber, MAX_SAFE_INTEGER */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isFiniteNumber", function() { return isFiniteNumber; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "MAX_SAFE_INTEGER", function() { return MAX_SAFE_INTEGER; });
|
||
var isFiniteNumber = Number.isFinite || function (value) {
|
||
return typeof value === 'number' && isFinite(value);
|
||
};
|
||
var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || 9007199254740991;
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/remux/aac-helper.ts":
|
||
/*!*********************************!*\
|
||
!*** ./src/remux/aac-helper.ts ***!
|
||
\*********************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/**
|
||
* AAC helper
|
||
*/
|
||
var AAC = /*#__PURE__*/function () {
|
||
function AAC() {}
|
||
|
||
AAC.getSilentFrame = function getSilentFrame(codec, channelCount) {
|
||
switch (codec) {
|
||
case 'mp4a.40.2':
|
||
if (channelCount === 1) {
|
||
return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x23, 0x80]);
|
||
} else if (channelCount === 2) {
|
||
return new Uint8Array([0x21, 0x00, 0x49, 0x90, 0x02, 0x19, 0x00, 0x23, 0x80]);
|
||
} else if (channelCount === 3) {
|
||
return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64, 0x00, 0x8e]);
|
||
} else if (channelCount === 4) {
|
||
return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64, 0x00, 0x80, 0x2c, 0x80, 0x08, 0x02, 0x38]);
|
||
} else if (channelCount === 5) {
|
||
return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64, 0x00, 0x82, 0x30, 0x04, 0x99, 0x00, 0x21, 0x90, 0x02, 0x38]);
|
||
} else if (channelCount === 6) {
|
||
return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64, 0x00, 0x82, 0x30, 0x04, 0x99, 0x00, 0x21, 0x90, 0x02, 0x00, 0xb2, 0x00, 0x20, 0x08, 0xe0]);
|
||
}
|
||
|
||
break;
|
||
// handle HE-AAC below (mp4a.40.5 / mp4a.40.29)
|
||
|
||
default:
|
||
if (channelCount === 1) {
|
||
// ffmpeg -y -f lavfi -i "aevalsrc=0:d=0.05" -c:a libfdk_aac -profile:a aac_he -b:a 4k output.aac && hexdump -v -e '16/1 "0x%x," "\n"' -v output.aac
|
||
return new Uint8Array([0x1, 0x40, 0x22, 0x80, 0xa3, 0x4e, 0xe6, 0x80, 0xba, 0x8, 0x0, 0x0, 0x0, 0x1c, 0x6, 0xf1, 0xc1, 0xa, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5e]);
|
||
} else if (channelCount === 2) {
|
||
// ffmpeg -y -f lavfi -i "aevalsrc=0|0:d=0.05" -c:a libfdk_aac -profile:a aac_he_v2 -b:a 4k output.aac && hexdump -v -e '16/1 "0x%x," "\n"' -v output.aac
|
||
return new Uint8Array([0x1, 0x40, 0x22, 0x80, 0xa3, 0x5e, 0xe6, 0x80, 0xba, 0x8, 0x0, 0x0, 0x0, 0x0, 0x95, 0x0, 0x6, 0xf1, 0xa1, 0xa, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5e]);
|
||
} else if (channelCount === 3) {
|
||
// ffmpeg -y -f lavfi -i "aevalsrc=0|0|0:d=0.05" -c:a libfdk_aac -profile:a aac_he_v2 -b:a 4k output.aac && hexdump -v -e '16/1 "0x%x," "\n"' -v output.aac
|
||
return new Uint8Array([0x1, 0x40, 0x22, 0x80, 0xa3, 0x5e, 0xe6, 0x80, 0xba, 0x8, 0x0, 0x0, 0x0, 0x0, 0x95, 0x0, 0x6, 0xf1, 0xa1, 0xa, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5e]);
|
||
}
|
||
|
||
break;
|
||
}
|
||
|
||
return undefined;
|
||
};
|
||
|
||
return AAC;
|
||
}();
|
||
|
||
/* harmony default export */ __webpack_exports__["default"] = (AAC);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/remux/mp4-generator.ts":
|
||
/*!************************************!*\
|
||
!*** ./src/remux/mp4-generator.ts ***!
|
||
\************************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/**
|
||
* Generate MP4 Box
|
||
*/
|
||
var UINT32_MAX = Math.pow(2, 32) - 1;
|
||
|
||
var MP4 = /*#__PURE__*/function () {
|
||
function MP4() {}
|
||
|
||
MP4.init = function init() {
|
||
MP4.types = {
|
||
avc1: [],
|
||
// codingname
|
||
avcC: [],
|
||
btrt: [],
|
||
dinf: [],
|
||
dref: [],
|
||
esds: [],
|
||
ftyp: [],
|
||
hdlr: [],
|
||
mdat: [],
|
||
mdhd: [],
|
||
mdia: [],
|
||
mfhd: [],
|
||
minf: [],
|
||
moof: [],
|
||
moov: [],
|
||
mp4a: [],
|
||
'.mp3': [],
|
||
mvex: [],
|
||
mvhd: [],
|
||
pasp: [],
|
||
sdtp: [],
|
||
stbl: [],
|
||
stco: [],
|
||
stsc: [],
|
||
stsd: [],
|
||
stsz: [],
|
||
stts: [],
|
||
tfdt: [],
|
||
tfhd: [],
|
||
traf: [],
|
||
trak: [],
|
||
trun: [],
|
||
trex: [],
|
||
tkhd: [],
|
||
vmhd: [],
|
||
smhd: []
|
||
};
|
||
var i;
|
||
|
||
for (i in MP4.types) {
|
||
if (MP4.types.hasOwnProperty(i)) {
|
||
MP4.types[i] = [i.charCodeAt(0), i.charCodeAt(1), i.charCodeAt(2), i.charCodeAt(3)];
|
||
}
|
||
}
|
||
|
||
var videoHdlr = new Uint8Array([0x00, // version 0
|
||
0x00, 0x00, 0x00, // flags
|
||
0x00, 0x00, 0x00, 0x00, // pre_defined
|
||
0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
|
||
0x00, 0x00, 0x00, 0x00, // reserved
|
||
0x00, 0x00, 0x00, 0x00, // reserved
|
||
0x00, 0x00, 0x00, 0x00, // reserved
|
||
0x56, 0x69, 0x64, 0x65, 0x6f, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
|
||
]);
|
||
var audioHdlr = new Uint8Array([0x00, // version 0
|
||
0x00, 0x00, 0x00, // flags
|
||
0x00, 0x00, 0x00, 0x00, // pre_defined
|
||
0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
|
||
0x00, 0x00, 0x00, 0x00, // reserved
|
||
0x00, 0x00, 0x00, 0x00, // reserved
|
||
0x00, 0x00, 0x00, 0x00, // reserved
|
||
0x53, 0x6f, 0x75, 0x6e, 0x64, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
|
||
]);
|
||
MP4.HDLR_TYPES = {
|
||
video: videoHdlr,
|
||
audio: audioHdlr
|
||
};
|
||
var dref = new Uint8Array([0x00, // version 0
|
||
0x00, 0x00, 0x00, // flags
|
||
0x00, 0x00, 0x00, 0x01, // entry_count
|
||
0x00, 0x00, 0x00, 0x0c, // entry_size
|
||
0x75, 0x72, 0x6c, 0x20, // 'url' type
|
||
0x00, // version 0
|
||
0x00, 0x00, 0x01 // entry_flags
|
||
]);
|
||
var stco = new Uint8Array([0x00, // version
|
||
0x00, 0x00, 0x00, // flags
|
||
0x00, 0x00, 0x00, 0x00 // entry_count
|
||
]);
|
||
MP4.STTS = MP4.STSC = MP4.STCO = stco;
|
||
MP4.STSZ = new Uint8Array([0x00, // version
|
||
0x00, 0x00, 0x00, // flags
|
||
0x00, 0x00, 0x00, 0x00, // sample_size
|
||
0x00, 0x00, 0x00, 0x00 // sample_count
|
||
]);
|
||
MP4.VMHD = new Uint8Array([0x00, // version
|
||
0x00, 0x00, 0x01, // flags
|
||
0x00, 0x00, // graphicsmode
|
||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // opcolor
|
||
]);
|
||
MP4.SMHD = new Uint8Array([0x00, // version
|
||
0x00, 0x00, 0x00, // flags
|
||
0x00, 0x00, // balance
|
||
0x00, 0x00 // reserved
|
||
]);
|
||
MP4.STSD = new Uint8Array([0x00, // version 0
|
||
0x00, 0x00, 0x00, // flags
|
||
0x00, 0x00, 0x00, 0x01]); // entry_count
|
||
|
||
var majorBrand = new Uint8Array([105, 115, 111, 109]); // isom
|
||
|
||
var avc1Brand = new Uint8Array([97, 118, 99, 49]); // avc1
|
||
|
||
var minorVersion = new Uint8Array([0, 0, 0, 1]);
|
||
MP4.FTYP = MP4.box(MP4.types.ftyp, majorBrand, minorVersion, majorBrand, avc1Brand);
|
||
MP4.DINF = MP4.box(MP4.types.dinf, MP4.box(MP4.types.dref, dref));
|
||
};
|
||
|
||
MP4.box = function box(type) {
|
||
var size = 8;
|
||
|
||
for (var _len = arguments.length, payload = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
|
||
payload[_key - 1] = arguments[_key];
|
||
}
|
||
|
||
var i = payload.length;
|
||
var len = i; // calculate the total size we need to allocate
|
||
|
||
while (i--) {
|
||
size += payload[i].byteLength;
|
||
}
|
||
|
||
var result = new Uint8Array(size);
|
||
result[0] = size >> 24 & 0xff;
|
||
result[1] = size >> 16 & 0xff;
|
||
result[2] = size >> 8 & 0xff;
|
||
result[3] = size & 0xff;
|
||
result.set(type, 4); // copy the payload into the result
|
||
|
||
for (i = 0, size = 8; i < len; i++) {
|
||
// copy payload[i] array @ offset size
|
||
result.set(payload[i], size);
|
||
size += payload[i].byteLength;
|
||
}
|
||
|
||
return result;
|
||
};
|
||
|
||
MP4.hdlr = function hdlr(type) {
|
||
return MP4.box(MP4.types.hdlr, MP4.HDLR_TYPES[type]);
|
||
};
|
||
|
||
MP4.mdat = function mdat(data) {
|
||
return MP4.box(MP4.types.mdat, data);
|
||
};
|
||
|
||
MP4.mdhd = function mdhd(timescale, duration) {
|
||
duration *= timescale;
|
||
var upperWordDuration = Math.floor(duration / (UINT32_MAX + 1));
|
||
var lowerWordDuration = Math.floor(duration % (UINT32_MAX + 1));
|
||
return MP4.box(MP4.types.mdhd, new Uint8Array([0x01, // version 1
|
||
0x00, 0x00, 0x00, // flags
|
||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, // creation_time
|
||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, // modification_time
|
||
timescale >> 24 & 0xff, timescale >> 16 & 0xff, timescale >> 8 & 0xff, timescale & 0xff, // timescale
|
||
upperWordDuration >> 24, upperWordDuration >> 16 & 0xff, upperWordDuration >> 8 & 0xff, upperWordDuration & 0xff, lowerWordDuration >> 24, lowerWordDuration >> 16 & 0xff, lowerWordDuration >> 8 & 0xff, lowerWordDuration & 0xff, 0x55, 0xc4, // 'und' language (undetermined)
|
||
0x00, 0x00]));
|
||
};
|
||
|
||
MP4.mdia = function mdia(track) {
|
||
return MP4.box(MP4.types.mdia, MP4.mdhd(track.timescale, track.duration), MP4.hdlr(track.type), MP4.minf(track));
|
||
};
|
||
|
||
MP4.mfhd = function mfhd(sequenceNumber) {
|
||
return MP4.box(MP4.types.mfhd, new Uint8Array([0x00, 0x00, 0x00, 0x00, // flags
|
||
sequenceNumber >> 24, sequenceNumber >> 16 & 0xff, sequenceNumber >> 8 & 0xff, sequenceNumber & 0xff // sequence_number
|
||
]));
|
||
};
|
||
|
||
MP4.minf = function minf(track) {
|
||
if (track.type === 'audio') {
|
||
return MP4.box(MP4.types.minf, MP4.box(MP4.types.smhd, MP4.SMHD), MP4.DINF, MP4.stbl(track));
|
||
} else {
|
||
return MP4.box(MP4.types.minf, MP4.box(MP4.types.vmhd, MP4.VMHD), MP4.DINF, MP4.stbl(track));
|
||
}
|
||
};
|
||
|
||
MP4.moof = function moof(sn, baseMediaDecodeTime, track) {
|
||
return MP4.box(MP4.types.moof, MP4.mfhd(sn), MP4.traf(track, baseMediaDecodeTime));
|
||
}
|
||
/**
|
||
* @param tracks... (optional) {array} the tracks associated with this movie
|
||
*/
|
||
;
|
||
|
||
MP4.moov = function moov(tracks) {
|
||
var i = tracks.length;
|
||
var boxes = [];
|
||
|
||
while (i--) {
|
||
boxes[i] = MP4.trak(tracks[i]);
|
||
}
|
||
|
||
return MP4.box.apply(null, [MP4.types.moov, MP4.mvhd(tracks[0].timescale, tracks[0].duration)].concat(boxes).concat(MP4.mvex(tracks)));
|
||
};
|
||
|
||
MP4.mvex = function mvex(tracks) {
|
||
var i = tracks.length;
|
||
var boxes = [];
|
||
|
||
while (i--) {
|
||
boxes[i] = MP4.trex(tracks[i]);
|
||
}
|
||
|
||
return MP4.box.apply(null, [MP4.types.mvex].concat(boxes));
|
||
};
|
||
|
||
MP4.mvhd = function mvhd(timescale, duration) {
|
||
duration *= timescale;
|
||
var upperWordDuration = Math.floor(duration / (UINT32_MAX + 1));
|
||
var lowerWordDuration = Math.floor(duration % (UINT32_MAX + 1));
|
||
var bytes = new Uint8Array([0x01, // version 1
|
||
0x00, 0x00, 0x00, // flags
|
||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, // creation_time
|
||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, // modification_time
|
||
timescale >> 24 & 0xff, timescale >> 16 & 0xff, timescale >> 8 & 0xff, timescale & 0xff, // timescale
|
||
upperWordDuration >> 24, upperWordDuration >> 16 & 0xff, upperWordDuration >> 8 & 0xff, upperWordDuration & 0xff, lowerWordDuration >> 24, lowerWordDuration >> 16 & 0xff, lowerWordDuration >> 8 & 0xff, lowerWordDuration & 0xff, 0x00, 0x01, 0x00, 0x00, // 1.0 rate
|
||
0x01, 0x00, // 1.0 volume
|
||
0x00, 0x00, // reserved
|
||
0x00, 0x00, 0x00, 0x00, // reserved
|
||
0x00, 0x00, 0x00, 0x00, // reserved
|
||
0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
|
||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
|
||
0xff, 0xff, 0xff, 0xff // next_track_ID
|
||
]);
|
||
return MP4.box(MP4.types.mvhd, bytes);
|
||
};
|
||
|
||
MP4.sdtp = function sdtp(track) {
|
||
var samples = track.samples || [];
|
||
var bytes = new Uint8Array(4 + samples.length);
|
||
var i;
|
||
var flags; // leave the full box header (4 bytes) all zero
|
||
// write the sample table
|
||
|
||
for (i = 0; i < samples.length; i++) {
|
||
flags = samples[i].flags;
|
||
bytes[i + 4] = flags.dependsOn << 4 | flags.isDependedOn << 2 | flags.hasRedundancy;
|
||
}
|
||
|
||
return MP4.box(MP4.types.sdtp, bytes);
|
||
};
|
||
|
||
MP4.stbl = function stbl(track) {
|
||
return MP4.box(MP4.types.stbl, MP4.stsd(track), MP4.box(MP4.types.stts, MP4.STTS), MP4.box(MP4.types.stsc, MP4.STSC), MP4.box(MP4.types.stsz, MP4.STSZ), MP4.box(MP4.types.stco, MP4.STCO));
|
||
};
|
||
|
||
MP4.avc1 = function avc1(track) {
|
||
var sps = [];
|
||
var pps = [];
|
||
var i;
|
||
var data;
|
||
var len; // assemble the SPSs
|
||
|
||
for (i = 0; i < track.sps.length; i++) {
|
||
data = track.sps[i];
|
||
len = data.byteLength;
|
||
sps.push(len >>> 8 & 0xff);
|
||
sps.push(len & 0xff); // SPS
|
||
|
||
sps = sps.concat(Array.prototype.slice.call(data));
|
||
} // assemble the PPSs
|
||
|
||
|
||
for (i = 0; i < track.pps.length; i++) {
|
||
data = track.pps[i];
|
||
len = data.byteLength;
|
||
pps.push(len >>> 8 & 0xff);
|
||
pps.push(len & 0xff);
|
||
pps = pps.concat(Array.prototype.slice.call(data));
|
||
}
|
||
|
||
var avcc = MP4.box(MP4.types.avcC, new Uint8Array([0x01, // version
|
||
sps[3], // profile
|
||
sps[4], // profile compat
|
||
sps[5], // level
|
||
0xfc | 3, // lengthSizeMinusOne, hard-coded to 4 bytes
|
||
0xe0 | track.sps.length // 3bit reserved (111) + numOfSequenceParameterSets
|
||
].concat(sps).concat([track.pps.length // numOfPictureParameterSets
|
||
]).concat(pps))); // "PPS"
|
||
|
||
var width = track.width;
|
||
var height = track.height;
|
||
var hSpacing = track.pixelRatio[0];
|
||
var vSpacing = track.pixelRatio[1];
|
||
return MP4.box(MP4.types.avc1, new Uint8Array([0x00, 0x00, 0x00, // reserved
|
||
0x00, 0x00, 0x00, // reserved
|
||
0x00, 0x01, // data_reference_index
|
||
0x00, 0x00, // pre_defined
|
||
0x00, 0x00, // reserved
|
||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
|
||
width >> 8 & 0xff, width & 0xff, // width
|
||
height >> 8 & 0xff, height & 0xff, // height
|
||
0x00, 0x48, 0x00, 0x00, // horizresolution
|
||
0x00, 0x48, 0x00, 0x00, // vertresolution
|
||
0x00, 0x00, 0x00, 0x00, // reserved
|
||
0x00, 0x01, // frame_count
|
||
0x12, 0x64, 0x61, 0x69, 0x6c, // dailymotion/hls.js
|
||
0x79, 0x6d, 0x6f, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x68, 0x6c, 0x73, 0x2e, 0x6a, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // compressorname
|
||
0x00, 0x18, // depth = 24
|
||
0x11, 0x11]), // pre_defined = -1
|
||
avcc, MP4.box(MP4.types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
|
||
0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
|
||
0x00, 0x2d, 0xc6, 0xc0])), // avgBitrate
|
||
MP4.box(MP4.types.pasp, new Uint8Array([hSpacing >> 24, // hSpacing
|
||
hSpacing >> 16 & 0xff, hSpacing >> 8 & 0xff, hSpacing & 0xff, vSpacing >> 24, // vSpacing
|
||
vSpacing >> 16 & 0xff, vSpacing >> 8 & 0xff, vSpacing & 0xff])));
|
||
};
|
||
|
||
MP4.esds = function esds(track) {
|
||
var configlen = track.config.length;
|
||
return new Uint8Array([0x00, // version 0
|
||
0x00, 0x00, 0x00, // flags
|
||
0x03, // descriptor_type
|
||
0x17 + configlen, // length
|
||
0x00, 0x01, // es_id
|
||
0x00, // stream_priority
|
||
0x04, // descriptor_type
|
||
0x0f + configlen, // length
|
||
0x40, // codec : mpeg4_audio
|
||
0x15, // stream_type
|
||
0x00, 0x00, 0x00, // buffer_size
|
||
0x00, 0x00, 0x00, 0x00, // maxBitrate
|
||
0x00, 0x00, 0x00, 0x00, // avgBitrate
|
||
0x05 // descriptor_type
|
||
].concat([configlen]).concat(track.config).concat([0x06, 0x01, 0x02])); // GASpecificConfig)); // length + audio config descriptor
|
||
};
|
||
|
||
MP4.mp4a = function mp4a(track) {
|
||
var samplerate = track.samplerate;
|
||
return MP4.box(MP4.types.mp4a, new Uint8Array([0x00, 0x00, 0x00, // reserved
|
||
0x00, 0x00, 0x00, // reserved
|
||
0x00, 0x01, // data_reference_index
|
||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
|
||
0x00, track.channelCount, // channelcount
|
||
0x00, 0x10, // sampleSize:16bits
|
||
0x00, 0x00, 0x00, 0x00, // reserved2
|
||
samplerate >> 8 & 0xff, samplerate & 0xff, //
|
||
0x00, 0x00]), MP4.box(MP4.types.esds, MP4.esds(track)));
|
||
};
|
||
|
||
MP4.mp3 = function mp3(track) {
|
||
var samplerate = track.samplerate;
|
||
return MP4.box(MP4.types['.mp3'], new Uint8Array([0x00, 0x00, 0x00, // reserved
|
||
0x00, 0x00, 0x00, // reserved
|
||
0x00, 0x01, // data_reference_index
|
||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
|
||
0x00, track.channelCount, // channelcount
|
||
0x00, 0x10, // sampleSize:16bits
|
||
0x00, 0x00, 0x00, 0x00, // reserved2
|
||
samplerate >> 8 & 0xff, samplerate & 0xff, //
|
||
0x00, 0x00]));
|
||
};
|
||
|
||
MP4.stsd = function stsd(track) {
|
||
if (track.type === 'audio') {
|
||
if (!track.isAAC && track.codec === 'mp3') {
|
||
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.mp3(track));
|
||
}
|
||
|
||
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.mp4a(track));
|
||
} else {
|
||
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.avc1(track));
|
||
}
|
||
};
|
||
|
||
MP4.tkhd = function tkhd(track) {
|
||
var id = track.id;
|
||
var duration = track.duration * track.timescale;
|
||
var width = track.width;
|
||
var height = track.height;
|
||
var upperWordDuration = Math.floor(duration / (UINT32_MAX + 1));
|
||
var lowerWordDuration = Math.floor(duration % (UINT32_MAX + 1));
|
||
return MP4.box(MP4.types.tkhd, new Uint8Array([0x01, // version 1
|
||
0x00, 0x00, 0x07, // flags
|
||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, // creation_time
|
||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, // modification_time
|
||
id >> 24 & 0xff, id >> 16 & 0xff, id >> 8 & 0xff, id & 0xff, // track_ID
|
||
0x00, 0x00, 0x00, 0x00, // reserved
|
||
upperWordDuration >> 24, upperWordDuration >> 16 & 0xff, upperWordDuration >> 8 & 0xff, upperWordDuration & 0xff, lowerWordDuration >> 24, lowerWordDuration >> 16 & 0xff, lowerWordDuration >> 8 & 0xff, lowerWordDuration & 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
|
||
0x00, 0x00, // layer
|
||
0x00, 0x00, // alternate_group
|
||
0x00, 0x00, // non-audio track volume
|
||
0x00, 0x00, // reserved
|
||
0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
|
||
width >> 8 & 0xff, width & 0xff, 0x00, 0x00, // width
|
||
height >> 8 & 0xff, height & 0xff, 0x00, 0x00 // height
|
||
]));
|
||
};
|
||
|
||
MP4.traf = function traf(track, baseMediaDecodeTime) {
|
||
var sampleDependencyTable = MP4.sdtp(track);
|
||
var id = track.id;
|
||
var upperWordBaseMediaDecodeTime = Math.floor(baseMediaDecodeTime / (UINT32_MAX + 1));
|
||
var lowerWordBaseMediaDecodeTime = Math.floor(baseMediaDecodeTime % (UINT32_MAX + 1));
|
||
return MP4.box(MP4.types.traf, MP4.box(MP4.types.tfhd, new Uint8Array([0x00, // version 0
|
||
0x00, 0x00, 0x00, // flags
|
||
id >> 24, id >> 16 & 0xff, id >> 8 & 0xff, id & 0xff // track_ID
|
||
])), MP4.box(MP4.types.tfdt, new Uint8Array([0x01, // version 1
|
||
0x00, 0x00, 0x00, // flags
|
||
upperWordBaseMediaDecodeTime >> 24, upperWordBaseMediaDecodeTime >> 16 & 0xff, upperWordBaseMediaDecodeTime >> 8 & 0xff, upperWordBaseMediaDecodeTime & 0xff, lowerWordBaseMediaDecodeTime >> 24, lowerWordBaseMediaDecodeTime >> 16 & 0xff, lowerWordBaseMediaDecodeTime >> 8 & 0xff, lowerWordBaseMediaDecodeTime & 0xff])), MP4.trun(track, sampleDependencyTable.length + 16 + // tfhd
|
||
20 + // tfdt
|
||
8 + // traf header
|
||
16 + // mfhd
|
||
8 + // moof header
|
||
8), // mdat header
|
||
sampleDependencyTable);
|
||
}
|
||
/**
|
||
* Generate a track box.
|
||
* @param track {object} a track definition
|
||
* @return {Uint8Array} the track box
|
||
*/
|
||
;
|
||
|
||
MP4.trak = function trak(track) {
|
||
track.duration = track.duration || 0xffffffff;
|
||
return MP4.box(MP4.types.trak, MP4.tkhd(track), MP4.mdia(track));
|
||
};
|
||
|
||
MP4.trex = function trex(track) {
|
||
var id = track.id;
|
||
return MP4.box(MP4.types.trex, new Uint8Array([0x00, // version 0
|
||
0x00, 0x00, 0x00, // flags
|
||
id >> 24, id >> 16 & 0xff, id >> 8 & 0xff, id & 0xff, // track_ID
|
||
0x00, 0x00, 0x00, 0x01, // default_sample_description_index
|
||
0x00, 0x00, 0x00, 0x00, // default_sample_duration
|
||
0x00, 0x00, 0x00, 0x00, // default_sample_size
|
||
0x00, 0x01, 0x00, 0x01 // default_sample_flags
|
||
]));
|
||
};
|
||
|
||
MP4.trun = function trun(track, offset) {
|
||
var samples = track.samples || [];
|
||
var len = samples.length;
|
||
var arraylen = 12 + 16 * len;
|
||
var array = new Uint8Array(arraylen);
|
||
var i;
|
||
var sample;
|
||
var duration;
|
||
var size;
|
||
var flags;
|
||
var cts;
|
||
offset += 8 + arraylen;
|
||
array.set([0x00, // version 0
|
||
0x00, 0x0f, 0x01, // flags
|
||
len >>> 24 & 0xff, len >>> 16 & 0xff, len >>> 8 & 0xff, len & 0xff, // sample_count
|
||
offset >>> 24 & 0xff, offset >>> 16 & 0xff, offset >>> 8 & 0xff, offset & 0xff // data_offset
|
||
], 0);
|
||
|
||
for (i = 0; i < len; i++) {
|
||
sample = samples[i];
|
||
duration = sample.duration;
|
||
size = sample.size;
|
||
flags = sample.flags;
|
||
cts = sample.cts;
|
||
array.set([duration >>> 24 & 0xff, duration >>> 16 & 0xff, duration >>> 8 & 0xff, duration & 0xff, // sample_duration
|
||
size >>> 24 & 0xff, size >>> 16 & 0xff, size >>> 8 & 0xff, size & 0xff, // sample_size
|
||
flags.isLeading << 2 | flags.dependsOn, flags.isDependedOn << 6 | flags.hasRedundancy << 4 | flags.paddingValue << 1 | flags.isNonSync, flags.degradPrio & 0xf0 << 8, flags.degradPrio & 0x0f, // sample_flags
|
||
cts >>> 24 & 0xff, cts >>> 16 & 0xff, cts >>> 8 & 0xff, cts & 0xff // sample_composition_time_offset
|
||
], 12 + 16 * i);
|
||
}
|
||
|
||
return MP4.box(MP4.types.trun, array);
|
||
};
|
||
|
||
MP4.initSegment = function initSegment(tracks) {
|
||
if (!MP4.types) {
|
||
MP4.init();
|
||
}
|
||
|
||
var movie = MP4.moov(tracks);
|
||
var result = new Uint8Array(MP4.FTYP.byteLength + movie.byteLength);
|
||
result.set(MP4.FTYP);
|
||
result.set(movie, MP4.FTYP.byteLength);
|
||
return result;
|
||
};
|
||
|
||
return MP4;
|
||
}();
|
||
|
||
MP4.types = void 0;
|
||
MP4.HDLR_TYPES = void 0;
|
||
MP4.STTS = void 0;
|
||
MP4.STSC = void 0;
|
||
MP4.STCO = void 0;
|
||
MP4.STSZ = void 0;
|
||
MP4.VMHD = void 0;
|
||
MP4.SMHD = void 0;
|
||
MP4.STSD = void 0;
|
||
MP4.FTYP = void 0;
|
||
MP4.DINF = void 0;
|
||
/* harmony default export */ __webpack_exports__["default"] = (MP4);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/remux/mp4-remuxer.ts":
|
||
/*!**********************************!*\
|
||
!*** ./src/remux/mp4-remuxer.ts ***!
|
||
\**********************************/
|
||
/*! exports provided: default, normalizePts, flushTextTrackMetadataCueSamples, flushTextTrackUserdataCueSamples */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return MP4Remuxer; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "normalizePts", function() { return normalizePts; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "flushTextTrackMetadataCueSamples", function() { return flushTextTrackMetadataCueSamples; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "flushTextTrackUserdataCueSamples", function() { return flushTextTrackUserdataCueSamples; });
|
||
/* harmony import */ var _Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
|
||
/* harmony import */ var _aac_helper__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./aac-helper */ "./src/remux/aac-helper.ts");
|
||
/* harmony import */ var _mp4_generator__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./mp4-generator */ "./src/remux/mp4-generator.ts");
|
||
/* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../events */ "./src/events.ts");
|
||
/* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../errors */ "./src/errors.ts");
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
/* harmony import */ var _types_loader__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../types/loader */ "./src/types/loader.ts");
|
||
/* harmony import */ var _utils_timescale_conversion__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ../utils/timescale-conversion */ "./src/utils/timescale-conversion.ts");
|
||
|
||
|
||
function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
var MAX_SILENT_FRAME_DURATION = 10 * 1000; // 10 seconds
|
||
|
||
var AAC_SAMPLES_PER_FRAME = 1024;
|
||
var MPEG_AUDIO_SAMPLE_PER_FRAME = 1152;
|
||
var chromeVersion = null;
|
||
var safariWebkitVersion = null;
|
||
var requiresPositiveDts = false;
|
||
|
||
var MP4Remuxer = /*#__PURE__*/function () {
|
||
function MP4Remuxer(observer, config, typeSupported, vendor) {
|
||
if (vendor === void 0) {
|
||
vendor = '';
|
||
}
|
||
|
||
this.observer = void 0;
|
||
this.config = void 0;
|
||
this.typeSupported = void 0;
|
||
this.ISGenerated = false;
|
||
this._initPTS = void 0;
|
||
this._initDTS = void 0;
|
||
this.nextAvcDts = null;
|
||
this.nextAudioPts = null;
|
||
this.isAudioContiguous = false;
|
||
this.isVideoContiguous = false;
|
||
this.observer = observer;
|
||
this.config = config;
|
||
this.typeSupported = typeSupported;
|
||
this.ISGenerated = false;
|
||
|
||
if (chromeVersion === null) {
|
||
var userAgent = navigator.userAgent || '';
|
||
var result = userAgent.match(/Chrome\/(\d+)/i);
|
||
chromeVersion = result ? parseInt(result[1]) : 0;
|
||
}
|
||
|
||
if (safariWebkitVersion === null) {
|
||
var _result = navigator.userAgent.match(/Safari\/(\d+)/i);
|
||
|
||
safariWebkitVersion = _result ? parseInt(_result[1]) : 0;
|
||
}
|
||
|
||
requiresPositiveDts = !!chromeVersion && chromeVersion < 75 || !!safariWebkitVersion && safariWebkitVersion < 600;
|
||
}
|
||
|
||
var _proto = MP4Remuxer.prototype;
|
||
|
||
_proto.destroy = function destroy() {};
|
||
|
||
_proto.resetTimeStamp = function resetTimeStamp(defaultTimeStamp) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].log('[mp4-remuxer]: initPTS & initDTS reset');
|
||
this._initPTS = this._initDTS = defaultTimeStamp;
|
||
};
|
||
|
||
_proto.resetNextTimestamp = function resetNextTimestamp() {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].log('[mp4-remuxer]: reset next timestamp');
|
||
this.isVideoContiguous = false;
|
||
this.isAudioContiguous = false;
|
||
};
|
||
|
||
_proto.resetInitSegment = function resetInitSegment() {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].log('[mp4-remuxer]: ISGenerated flag reset');
|
||
this.ISGenerated = false;
|
||
};
|
||
|
||
_proto.getVideoStartPts = function getVideoStartPts(videoSamples) {
|
||
var rolloverDetected = false;
|
||
var startPTS = videoSamples.reduce(function (minPTS, sample) {
|
||
var delta = sample.pts - minPTS;
|
||
|
||
if (delta < -4294967296) {
|
||
// 2^32, see PTSNormalize for reasoning, but we're hitting a rollover here, and we don't want that to impact the timeOffset calculation
|
||
rolloverDetected = true;
|
||
return normalizePts(minPTS, sample.pts);
|
||
} else if (delta > 0) {
|
||
return minPTS;
|
||
} else {
|
||
return sample.pts;
|
||
}
|
||
}, videoSamples[0].pts);
|
||
|
||
if (rolloverDetected) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].debug('PTS rollover detected');
|
||
}
|
||
|
||
return startPTS;
|
||
};
|
||
|
||
_proto.remux = function remux(audioTrack, videoTrack, id3Track, textTrack, timeOffset, accurateTimeOffset, flush, playlistType) {
|
||
var video;
|
||
var audio;
|
||
var initSegment;
|
||
var text;
|
||
var id3;
|
||
var independent;
|
||
var audioTimeOffset = timeOffset;
|
||
var videoTimeOffset = timeOffset; // If we're remuxing audio and video progressively, wait until we've received enough samples for each track before proceeding.
|
||
// This is done to synchronize the audio and video streams. We know if the current segment will have samples if the "pid"
|
||
// parameter is greater than -1. The pid is set when the PMT is parsed, which contains the tracks list.
|
||
// However, if the initSegment has already been generated, or we've reached the end of a segment (flush),
|
||
// then we can remux one track without waiting for the other.
|
||
|
||
var hasAudio = audioTrack.pid > -1;
|
||
var hasVideo = videoTrack.pid > -1;
|
||
var length = videoTrack.samples.length;
|
||
var enoughAudioSamples = audioTrack.samples.length > 0;
|
||
var enoughVideoSamples = length > 1;
|
||
var canRemuxAvc = (!hasAudio || enoughAudioSamples) && (!hasVideo || enoughVideoSamples) || this.ISGenerated || flush;
|
||
|
||
if (canRemuxAvc) {
|
||
if (!this.ISGenerated) {
|
||
initSegment = this.generateIS(audioTrack, videoTrack, timeOffset);
|
||
}
|
||
|
||
var isVideoContiguous = this.isVideoContiguous;
|
||
var firstKeyFrameIndex = -1;
|
||
|
||
if (enoughVideoSamples) {
|
||
firstKeyFrameIndex = findKeyframeIndex(videoTrack.samples);
|
||
|
||
if (!isVideoContiguous && this.config.forceKeyFrameOnDiscontinuity) {
|
||
independent = true;
|
||
|
||
if (firstKeyFrameIndex > 0) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].warn("[mp4-remuxer]: Dropped " + firstKeyFrameIndex + " out of " + length + " video samples due to a missing keyframe");
|
||
var startPTS = this.getVideoStartPts(videoTrack.samples);
|
||
videoTrack.samples = videoTrack.samples.slice(firstKeyFrameIndex);
|
||
videoTrack.dropped += firstKeyFrameIndex;
|
||
videoTimeOffset += (videoTrack.samples[0].pts - startPTS) / (videoTrack.timescale || 90000);
|
||
} else if (firstKeyFrameIndex === -1) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].warn("[mp4-remuxer]: No keyframe found out of " + length + " video samples");
|
||
independent = false;
|
||
}
|
||
}
|
||
}
|
||
|
||
if (this.ISGenerated) {
|
||
if (enoughAudioSamples && enoughVideoSamples) {
|
||
// timeOffset is expected to be the offset of the first timestamp of this fragment (first DTS)
|
||
// if first audio DTS is not aligned with first video DTS then we need to take that into account
|
||
// when providing timeOffset to remuxAudio / remuxVideo. if we don't do that, there might be a permanent / small
|
||
// drift between audio and video streams
|
||
var _startPTS = this.getVideoStartPts(videoTrack.samples);
|
||
|
||
var tsDelta = normalizePts(audioTrack.samples[0].pts, _startPTS) - _startPTS;
|
||
|
||
var audiovideoTimestampDelta = tsDelta / videoTrack.inputTimeScale;
|
||
audioTimeOffset += Math.max(0, audiovideoTimestampDelta);
|
||
videoTimeOffset += Math.max(0, -audiovideoTimestampDelta);
|
||
} // Purposefully remuxing audio before video, so that remuxVideo can use nextAudioPts, which is calculated in remuxAudio.
|
||
|
||
|
||
if (enoughAudioSamples) {
|
||
// if initSegment was generated without audio samples, regenerate it again
|
||
if (!audioTrack.samplerate) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].warn('[mp4-remuxer]: regenerate InitSegment as audio detected');
|
||
initSegment = this.generateIS(audioTrack, videoTrack, timeOffset);
|
||
}
|
||
|
||
audio = this.remuxAudio(audioTrack, audioTimeOffset, this.isAudioContiguous, accurateTimeOffset, hasVideo || enoughVideoSamples || playlistType === _types_loader__WEBPACK_IMPORTED_MODULE_6__["PlaylistLevelType"].AUDIO ? videoTimeOffset : undefined);
|
||
|
||
if (enoughVideoSamples) {
|
||
var audioTrackLength = audio ? audio.endPTS - audio.startPTS : 0; // if initSegment was generated without video samples, regenerate it again
|
||
|
||
if (!videoTrack.inputTimeScale) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].warn('[mp4-remuxer]: regenerate InitSegment as video detected');
|
||
initSegment = this.generateIS(audioTrack, videoTrack, timeOffset);
|
||
}
|
||
|
||
video = this.remuxVideo(videoTrack, videoTimeOffset, isVideoContiguous, audioTrackLength);
|
||
}
|
||
} else if (enoughVideoSamples) {
|
||
video = this.remuxVideo(videoTrack, videoTimeOffset, isVideoContiguous, 0);
|
||
}
|
||
|
||
if (video) {
|
||
video.firstKeyFrame = firstKeyFrameIndex;
|
||
video.independent = firstKeyFrameIndex !== -1;
|
||
}
|
||
}
|
||
} // Allow ID3 and text to remux, even if more audio/video samples are required
|
||
|
||
|
||
if (this.ISGenerated) {
|
||
if (id3Track.samples.length) {
|
||
id3 = flushTextTrackMetadataCueSamples(id3Track, timeOffset, this._initPTS, this._initDTS);
|
||
}
|
||
|
||
if (textTrack.samples.length) {
|
||
text = flushTextTrackUserdataCueSamples(textTrack, timeOffset, this._initPTS);
|
||
}
|
||
}
|
||
|
||
return {
|
||
audio: audio,
|
||
video: video,
|
||
initSegment: initSegment,
|
||
independent: independent,
|
||
text: text,
|
||
id3: id3
|
||
};
|
||
};
|
||
|
||
_proto.generateIS = function generateIS(audioTrack, videoTrack, timeOffset) {
|
||
var audioSamples = audioTrack.samples;
|
||
var videoSamples = videoTrack.samples;
|
||
var typeSupported = this.typeSupported;
|
||
var tracks = {};
|
||
var computePTSDTS = !Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(this._initPTS);
|
||
var container = 'audio/mp4';
|
||
var initPTS;
|
||
var initDTS;
|
||
var timescale;
|
||
|
||
if (computePTSDTS) {
|
||
initPTS = initDTS = Infinity;
|
||
}
|
||
|
||
if (audioTrack.config && audioSamples.length) {
|
||
// let's use audio sampling rate as MP4 time scale.
|
||
// rationale is that there is a integer nb of audio frames per audio sample (1024 for AAC)
|
||
// using audio sampling rate here helps having an integer MP4 frame duration
|
||
// this avoids potential rounding issue and AV sync issue
|
||
audioTrack.timescale = audioTrack.samplerate;
|
||
|
||
if (!audioTrack.isAAC) {
|
||
if (typeSupported.mpeg) {
|
||
// Chrome and Safari
|
||
container = 'audio/mpeg';
|
||
audioTrack.codec = '';
|
||
} else if (typeSupported.mp3) {
|
||
// Firefox
|
||
audioTrack.codec = 'mp3';
|
||
}
|
||
}
|
||
|
||
tracks.audio = {
|
||
id: 'audio',
|
||
container: container,
|
||
codec: audioTrack.codec,
|
||
initSegment: !audioTrack.isAAC && typeSupported.mpeg ? new Uint8Array(0) : _mp4_generator__WEBPACK_IMPORTED_MODULE_2__["default"].initSegment([audioTrack]),
|
||
metadata: {
|
||
channelCount: audioTrack.channelCount
|
||
}
|
||
};
|
||
|
||
if (computePTSDTS) {
|
||
timescale = audioTrack.inputTimeScale; // remember first PTS of this demuxing context. for audio, PTS = DTS
|
||
|
||
initPTS = initDTS = audioSamples[0].pts - Math.round(timescale * timeOffset);
|
||
}
|
||
}
|
||
|
||
if (videoTrack.sps && videoTrack.pps && videoSamples.length) {
|
||
// let's use input time scale as MP4 video timescale
|
||
// we use input time scale straight away to avoid rounding issues on frame duration / cts computation
|
||
videoTrack.timescale = videoTrack.inputTimeScale;
|
||
tracks.video = {
|
||
id: 'main',
|
||
container: 'video/mp4',
|
||
codec: videoTrack.codec,
|
||
initSegment: _mp4_generator__WEBPACK_IMPORTED_MODULE_2__["default"].initSegment([videoTrack]),
|
||
metadata: {
|
||
width: videoTrack.width,
|
||
height: videoTrack.height
|
||
}
|
||
};
|
||
|
||
if (computePTSDTS) {
|
||
timescale = videoTrack.inputTimeScale;
|
||
var startPTS = this.getVideoStartPts(videoSamples);
|
||
var startOffset = Math.round(timescale * timeOffset);
|
||
initDTS = Math.min(initDTS, normalizePts(videoSamples[0].dts, startPTS) - startOffset);
|
||
initPTS = Math.min(initPTS, startPTS - startOffset);
|
||
}
|
||
}
|
||
|
||
if (Object.keys(tracks).length) {
|
||
this.ISGenerated = true;
|
||
|
||
if (computePTSDTS) {
|
||
this._initPTS = initPTS;
|
||
this._initDTS = initDTS;
|
||
}
|
||
|
||
return {
|
||
tracks: tracks,
|
||
initPTS: initPTS,
|
||
timescale: timescale
|
||
};
|
||
}
|
||
};
|
||
|
||
_proto.remuxVideo = function remuxVideo(track, timeOffset, contiguous, audioTrackLength) {
|
||
var timeScale = track.inputTimeScale;
|
||
var inputSamples = track.samples;
|
||
var outputSamples = [];
|
||
var nbSamples = inputSamples.length;
|
||
var initPTS = this._initPTS;
|
||
var nextAvcDts = this.nextAvcDts;
|
||
var offset = 8;
|
||
var mp4SampleDuration;
|
||
var firstDTS;
|
||
var lastDTS;
|
||
var minPTS = Number.POSITIVE_INFINITY;
|
||
var maxPTS = Number.NEGATIVE_INFINITY;
|
||
var ptsDtsShift = 0;
|
||
var sortSamples = false; // if parsed fragment is contiguous with last one, let's use last DTS value as reference
|
||
|
||
if (!contiguous || nextAvcDts === null) {
|
||
var pts = timeOffset * timeScale;
|
||
var cts = inputSamples[0].pts - normalizePts(inputSamples[0].dts, inputSamples[0].pts); // if not contiguous, let's use target timeOffset
|
||
|
||
nextAvcDts = pts - cts;
|
||
} // PTS is coded on 33bits, and can loop from -2^32 to 2^32
|
||
// PTSNormalize will make PTS/DTS value monotonic, we use last known DTS value as reference value
|
||
|
||
|
||
for (var i = 0; i < nbSamples; i++) {
|
||
var sample = inputSamples[i];
|
||
sample.pts = normalizePts(sample.pts - initPTS, nextAvcDts);
|
||
sample.dts = normalizePts(sample.dts - initPTS, nextAvcDts);
|
||
|
||
if (sample.dts > sample.pts) {
|
||
var PTS_DTS_SHIFT_TOLERANCE_90KHZ = 90000 * 0.2;
|
||
ptsDtsShift = Math.max(Math.min(ptsDtsShift, sample.pts - sample.dts), -1 * PTS_DTS_SHIFT_TOLERANCE_90KHZ);
|
||
}
|
||
|
||
if (sample.dts < inputSamples[i > 0 ? i - 1 : i].dts) {
|
||
sortSamples = true;
|
||
}
|
||
} // sort video samples by DTS then PTS then demux id order
|
||
|
||
|
||
if (sortSamples) {
|
||
inputSamples.sort(function (a, b) {
|
||
var deltadts = a.dts - b.dts;
|
||
var deltapts = a.pts - b.pts;
|
||
return deltadts || deltapts;
|
||
});
|
||
} // Get first/last DTS
|
||
|
||
|
||
firstDTS = inputSamples[0].dts;
|
||
lastDTS = inputSamples[inputSamples.length - 1].dts; // on Safari let's signal the same sample duration for all samples
|
||
// sample duration (as expected by trun MP4 boxes), should be the delta between sample DTS
|
||
// set this constant duration as being the avg delta between consecutive DTS.
|
||
|
||
var averageSampleDuration = Math.round((lastDTS - firstDTS) / (nbSamples - 1)); // handle broken streams with PTS < DTS, tolerance up 0.2 seconds
|
||
|
||
if (ptsDtsShift < 0) {
|
||
if (ptsDtsShift < averageSampleDuration * -2) {
|
||
// Fix for "CNN special report, with CC" in test-streams (including Safari browser)
|
||
// With large PTS < DTS errors such as this, we want to correct CTS while maintaining increasing DTS values
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].warn("PTS < DTS detected in video samples, offsetting DTS from PTS by " + Object(_utils_timescale_conversion__WEBPACK_IMPORTED_MODULE_7__["toMsFromMpegTsClock"])(-averageSampleDuration, true) + " ms");
|
||
var lastDts = ptsDtsShift;
|
||
|
||
for (var _i = 0; _i < nbSamples; _i++) {
|
||
inputSamples[_i].dts = lastDts = Math.max(lastDts, inputSamples[_i].pts - averageSampleDuration);
|
||
inputSamples[_i].pts = Math.max(lastDts, inputSamples[_i].pts);
|
||
}
|
||
} else {
|
||
// Fix for "Custom IV with bad PTS DTS" in test-streams
|
||
// With smaller PTS < DTS errors we can simply move all DTS back. This increases CTS without causing buffer gaps or decode errors in Safari
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].warn("PTS < DTS detected in video samples, shifting DTS by " + Object(_utils_timescale_conversion__WEBPACK_IMPORTED_MODULE_7__["toMsFromMpegTsClock"])(ptsDtsShift, true) + " ms to overcome this issue");
|
||
|
||
for (var _i2 = 0; _i2 < nbSamples; _i2++) {
|
||
inputSamples[_i2].dts = inputSamples[_i2].dts + ptsDtsShift;
|
||
}
|
||
}
|
||
|
||
firstDTS = inputSamples[0].dts;
|
||
} // if fragment are contiguous, detect hole/overlapping between fragments
|
||
|
||
|
||
if (contiguous) {
|
||
// check timestamp continuity across consecutive fragments (this is to remove inter-fragment gap/hole)
|
||
var delta = firstDTS - nextAvcDts;
|
||
var foundHole = delta > averageSampleDuration;
|
||
var foundOverlap = delta < -1;
|
||
|
||
if (foundHole || foundOverlap) {
|
||
if (foundHole) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].warn("AVC: " + Object(_utils_timescale_conversion__WEBPACK_IMPORTED_MODULE_7__["toMsFromMpegTsClock"])(delta, true) + " ms (" + delta + "dts) hole between fragments detected, filling it");
|
||
} else {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].warn("AVC: " + Object(_utils_timescale_conversion__WEBPACK_IMPORTED_MODULE_7__["toMsFromMpegTsClock"])(-delta, true) + " ms (" + delta + "dts) overlapping between fragments detected");
|
||
}
|
||
|
||
firstDTS = nextAvcDts;
|
||
var firstPTS = inputSamples[0].pts - delta;
|
||
inputSamples[0].dts = firstDTS;
|
||
inputSamples[0].pts = firstPTS;
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].log("Video: First PTS/DTS adjusted: " + Object(_utils_timescale_conversion__WEBPACK_IMPORTED_MODULE_7__["toMsFromMpegTsClock"])(firstPTS, true) + "/" + Object(_utils_timescale_conversion__WEBPACK_IMPORTED_MODULE_7__["toMsFromMpegTsClock"])(firstDTS, true) + ", delta: " + Object(_utils_timescale_conversion__WEBPACK_IMPORTED_MODULE_7__["toMsFromMpegTsClock"])(delta, true) + " ms");
|
||
}
|
||
}
|
||
|
||
if (requiresPositiveDts) {
|
||
firstDTS = Math.max(0, firstDTS);
|
||
}
|
||
|
||
var nbNalu = 0;
|
||
var naluLen = 0;
|
||
|
||
for (var _i3 = 0; _i3 < nbSamples; _i3++) {
|
||
// compute total/avc sample length and nb of NAL units
|
||
var _sample = inputSamples[_i3];
|
||
var units = _sample.units;
|
||
var nbUnits = units.length;
|
||
var sampleLen = 0;
|
||
|
||
for (var j = 0; j < nbUnits; j++) {
|
||
sampleLen += units[j].data.length;
|
||
}
|
||
|
||
naluLen += sampleLen;
|
||
nbNalu += nbUnits;
|
||
_sample.length = sampleLen; // normalize PTS/DTS
|
||
// ensure sample monotonic DTS
|
||
|
||
_sample.dts = Math.max(_sample.dts, firstDTS); // ensure that computed value is greater or equal than sample DTS
|
||
|
||
_sample.pts = Math.max(_sample.pts, _sample.dts, 0);
|
||
minPTS = Math.min(_sample.pts, minPTS);
|
||
maxPTS = Math.max(_sample.pts, maxPTS);
|
||
}
|
||
|
||
lastDTS = inputSamples[nbSamples - 1].dts;
|
||
/* concatenate the video data and construct the mdat in place
|
||
(need 8 more bytes to fill length and mpdat type) */
|
||
|
||
var mdatSize = naluLen + 4 * nbNalu + 8;
|
||
var mdat;
|
||
|
||
try {
|
||
mdat = new Uint8Array(mdatSize);
|
||
} catch (err) {
|
||
this.observer.emit(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].ERROR, _events__WEBPACK_IMPORTED_MODULE_3__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_4__["ErrorTypes"].MUX_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_4__["ErrorDetails"].REMUX_ALLOC_ERROR,
|
||
fatal: false,
|
||
bytes: mdatSize,
|
||
reason: "fail allocating video mdat " + mdatSize
|
||
});
|
||
return;
|
||
}
|
||
|
||
var view = new DataView(mdat.buffer);
|
||
view.setUint32(0, mdatSize);
|
||
mdat.set(_mp4_generator__WEBPACK_IMPORTED_MODULE_2__["default"].types.mdat, 4);
|
||
|
||
for (var _i4 = 0; _i4 < nbSamples; _i4++) {
|
||
var avcSample = inputSamples[_i4];
|
||
var avcSampleUnits = avcSample.units;
|
||
var mp4SampleLength = 0; // convert NALU bitstream to MP4 format (prepend NALU with size field)
|
||
|
||
for (var _j = 0, _nbUnits = avcSampleUnits.length; _j < _nbUnits; _j++) {
|
||
var unit = avcSampleUnits[_j];
|
||
var unitData = unit.data;
|
||
var unitDataLen = unit.data.byteLength;
|
||
view.setUint32(offset, unitDataLen);
|
||
offset += 4;
|
||
mdat.set(unitData, offset);
|
||
offset += unitDataLen;
|
||
mp4SampleLength += 4 + unitDataLen;
|
||
} // expected sample duration is the Decoding Timestamp diff of consecutive samples
|
||
|
||
|
||
if (_i4 < nbSamples - 1) {
|
||
mp4SampleDuration = inputSamples[_i4 + 1].dts - avcSample.dts;
|
||
} else {
|
||
var config = this.config;
|
||
var lastFrameDuration = avcSample.dts - inputSamples[_i4 > 0 ? _i4 - 1 : _i4].dts;
|
||
|
||
if (config.stretchShortVideoTrack && this.nextAudioPts !== null) {
|
||
// In some cases, a segment's audio track duration may exceed the video track duration.
|
||
// Since we've already remuxed audio, and we know how long the audio track is, we look to
|
||
// see if the delta to the next segment is longer than maxBufferHole.
|
||
// If so, playback would potentially get stuck, so we artificially inflate
|
||
// the duration of the last frame to minimize any potential gap between segments.
|
||
var gapTolerance = Math.floor(config.maxBufferHole * timeScale);
|
||
var deltaToFrameEnd = (audioTrackLength ? minPTS + audioTrackLength * timeScale : this.nextAudioPts) - avcSample.pts;
|
||
|
||
if (deltaToFrameEnd > gapTolerance) {
|
||
// We subtract lastFrameDuration from deltaToFrameEnd to try to prevent any video
|
||
// frame overlap. maxBufferHole should be >> lastFrameDuration anyway.
|
||
mp4SampleDuration = deltaToFrameEnd - lastFrameDuration;
|
||
|
||
if (mp4SampleDuration < 0) {
|
||
mp4SampleDuration = lastFrameDuration;
|
||
}
|
||
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].log("[mp4-remuxer]: It is approximately " + deltaToFrameEnd / 90 + " ms to the next segment; using duration " + mp4SampleDuration / 90 + " ms for the last video frame.");
|
||
} else {
|
||
mp4SampleDuration = lastFrameDuration;
|
||
}
|
||
} else {
|
||
mp4SampleDuration = lastFrameDuration;
|
||
}
|
||
}
|
||
|
||
var compositionTimeOffset = Math.round(avcSample.pts - avcSample.dts);
|
||
outputSamples.push(new Mp4Sample(avcSample.key, mp4SampleDuration, mp4SampleLength, compositionTimeOffset));
|
||
}
|
||
|
||
if (outputSamples.length && chromeVersion && chromeVersion < 70) {
|
||
// Chrome workaround, mark first sample as being a Random Access Point (keyframe) to avoid sourcebuffer append issue
|
||
// https://code.google.com/p/chromium/issues/detail?id=229412
|
||
var flags = outputSamples[0].flags;
|
||
flags.dependsOn = 2;
|
||
flags.isNonSync = 0;
|
||
}
|
||
|
||
console.assert(mp4SampleDuration !== undefined, 'mp4SampleDuration must be computed'); // next AVC sample DTS should be equal to last sample DTS + last sample duration (in PES timescale)
|
||
|
||
this.nextAvcDts = nextAvcDts = lastDTS + mp4SampleDuration;
|
||
this.isVideoContiguous = true;
|
||
var moof = _mp4_generator__WEBPACK_IMPORTED_MODULE_2__["default"].moof(track.sequenceNumber++, firstDTS, _extends({}, track, {
|
||
samples: outputSamples
|
||
}));
|
||
var type = 'video';
|
||
var data = {
|
||
data1: moof,
|
||
data2: mdat,
|
||
startPTS: minPTS / timeScale,
|
||
endPTS: (maxPTS + mp4SampleDuration) / timeScale,
|
||
startDTS: firstDTS / timeScale,
|
||
endDTS: nextAvcDts / timeScale,
|
||
type: type,
|
||
hasAudio: false,
|
||
hasVideo: true,
|
||
nb: outputSamples.length,
|
||
dropped: track.dropped
|
||
};
|
||
track.samples = [];
|
||
track.dropped = 0;
|
||
console.assert(mdat.length, 'MDAT length must not be zero');
|
||
return data;
|
||
};
|
||
|
||
_proto.remuxAudio = function remuxAudio(track, timeOffset, contiguous, accurateTimeOffset, videoTimeOffset) {
|
||
var inputTimeScale = track.inputTimeScale;
|
||
var mp4timeScale = track.samplerate ? track.samplerate : inputTimeScale;
|
||
var scaleFactor = inputTimeScale / mp4timeScale;
|
||
var mp4SampleDuration = track.isAAC ? AAC_SAMPLES_PER_FRAME : MPEG_AUDIO_SAMPLE_PER_FRAME;
|
||
var inputSampleDuration = mp4SampleDuration * scaleFactor;
|
||
var initPTS = this._initPTS;
|
||
var rawMPEG = !track.isAAC && this.typeSupported.mpeg;
|
||
var outputSamples = [];
|
||
var inputSamples = track.samples;
|
||
var offset = rawMPEG ? 0 : 8;
|
||
var nextAudioPts = this.nextAudioPts || -1; // window.audioSamples ? window.audioSamples.push(inputSamples.map(s => s.pts)) : (window.audioSamples = [inputSamples.map(s => s.pts)]);
|
||
// for audio samples, also consider consecutive fragments as being contiguous (even if a level switch occurs),
|
||
// for sake of clarity:
|
||
// consecutive fragments are frags with
|
||
// - less than 100ms gaps between new time offset (if accurate) and next expected PTS OR
|
||
// - less than 20 audio frames distance
|
||
// contiguous fragments are consecutive fragments from same quality level (same level, new SN = old SN + 1)
|
||
// this helps ensuring audio continuity
|
||
// and this also avoids audio glitches/cut when switching quality, or reporting wrong duration on first audio frame
|
||
|
||
var timeOffsetMpegTS = timeOffset * inputTimeScale;
|
||
this.isAudioContiguous = contiguous = contiguous || inputSamples.length && nextAudioPts > 0 && (accurateTimeOffset && Math.abs(timeOffsetMpegTS - nextAudioPts) < 9000 || Math.abs(normalizePts(inputSamples[0].pts - initPTS, timeOffsetMpegTS) - nextAudioPts) < 20 * inputSampleDuration); // compute normalized PTS
|
||
|
||
inputSamples.forEach(function (sample) {
|
||
sample.pts = normalizePts(sample.pts - initPTS, timeOffsetMpegTS);
|
||
});
|
||
|
||
if (!contiguous || nextAudioPts < 0) {
|
||
// filter out sample with negative PTS that are not playable anyway
|
||
// if we don't remove these negative samples, they will shift all audio samples forward.
|
||
// leading to audio overlap between current / next fragment
|
||
inputSamples = inputSamples.filter(function (sample) {
|
||
return sample.pts >= 0;
|
||
}); // in case all samples have negative PTS, and have been filtered out, return now
|
||
|
||
if (!inputSamples.length) {
|
||
return;
|
||
}
|
||
|
||
if (videoTimeOffset === 0) {
|
||
// Set the start to 0 to match video so that start gaps larger than inputSampleDuration are filled with silence
|
||
nextAudioPts = 0;
|
||
} else if (accurateTimeOffset) {
|
||
// When not seeking, not live, and LevelDetails.PTSKnown, use fragment start as predicted next audio PTS
|
||
nextAudioPts = Math.max(0, timeOffsetMpegTS);
|
||
} else {
|
||
// if frags are not contiguous and if we cant trust time offset, let's use first sample PTS as next audio PTS
|
||
nextAudioPts = inputSamples[0].pts;
|
||
}
|
||
} // If the audio track is missing samples, the frames seem to get "left-shifted" within the
|
||
// resulting mp4 segment, causing sync issues and leaving gaps at the end of the audio segment.
|
||
// In an effort to prevent this from happening, we inject frames here where there are gaps.
|
||
// When possible, we inject a silent frame; when that's not possible, we duplicate the last
|
||
// frame.
|
||
|
||
|
||
if (track.isAAC) {
|
||
var alignedWithVideo = videoTimeOffset !== undefined;
|
||
var maxAudioFramesDrift = this.config.maxAudioFramesDrift;
|
||
|
||
for (var i = 0, nextPts = nextAudioPts; i < inputSamples.length; i++) {
|
||
// First, let's see how far off this frame is from where we expect it to be
|
||
var sample = inputSamples[i];
|
||
var pts = sample.pts;
|
||
var delta = pts - nextPts;
|
||
var duration = Math.abs(1000 * delta / inputTimeScale); // When remuxing with video, if we're overlapping by more than a duration, drop this sample to stay in sync
|
||
|
||
if (delta <= -maxAudioFramesDrift * inputSampleDuration && alignedWithVideo) {
|
||
if (i === 0) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].warn("Audio frame @ " + (pts / inputTimeScale).toFixed(3) + "s overlaps nextAudioPts by " + Math.round(1000 * delta / inputTimeScale) + " ms.");
|
||
this.nextAudioPts = nextAudioPts = nextPts = pts;
|
||
}
|
||
} // eslint-disable-line brace-style
|
||
// Insert missing frames if:
|
||
// 1: We're more than maxAudioFramesDrift frame away
|
||
// 2: Not more than MAX_SILENT_FRAME_DURATION away
|
||
// 3: currentTime (aka nextPtsNorm) is not 0
|
||
// 4: remuxing with video (videoTimeOffset !== undefined)
|
||
else if (delta >= maxAudioFramesDrift * inputSampleDuration && duration < MAX_SILENT_FRAME_DURATION && alignedWithVideo) {
|
||
var missing = Math.round(delta / inputSampleDuration); // Adjust nextPts so that silent samples are aligned with media pts. This will prevent media samples from
|
||
// later being shifted if nextPts is based on timeOffset and delta is not a multiple of inputSampleDuration.
|
||
|
||
nextPts = pts - missing * inputSampleDuration;
|
||
|
||
if (nextPts < 0) {
|
||
missing--;
|
||
nextPts += inputSampleDuration;
|
||
}
|
||
|
||
if (i === 0) {
|
||
this.nextAudioPts = nextAudioPts = nextPts;
|
||
}
|
||
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].warn("[mp4-remuxer]: Injecting " + missing + " audio frame @ " + (nextPts / inputTimeScale).toFixed(3) + "s due to " + Math.round(1000 * delta / inputTimeScale) + " ms gap.");
|
||
|
||
for (var j = 0; j < missing; j++) {
|
||
var newStamp = Math.max(nextPts, 0);
|
||
var fillFrame = _aac_helper__WEBPACK_IMPORTED_MODULE_1__["default"].getSilentFrame(track.manifestCodec || track.codec, track.channelCount);
|
||
|
||
if (!fillFrame) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].log('[mp4-remuxer]: Unable to get silent frame for given audio codec; duplicating last frame instead.');
|
||
fillFrame = sample.unit.subarray();
|
||
}
|
||
|
||
inputSamples.splice(i, 0, {
|
||
unit: fillFrame,
|
||
pts: newStamp
|
||
});
|
||
nextPts += inputSampleDuration;
|
||
i++;
|
||
}
|
||
}
|
||
|
||
sample.pts = nextPts;
|
||
nextPts += inputSampleDuration;
|
||
}
|
||
}
|
||
|
||
var firstPTS = null;
|
||
var lastPTS = null;
|
||
var mdat;
|
||
var mdatSize = 0;
|
||
var sampleLength = inputSamples.length;
|
||
|
||
while (sampleLength--) {
|
||
mdatSize += inputSamples[sampleLength].unit.byteLength;
|
||
}
|
||
|
||
for (var _j2 = 0, _nbSamples = inputSamples.length; _j2 < _nbSamples; _j2++) {
|
||
var audioSample = inputSamples[_j2];
|
||
var unit = audioSample.unit;
|
||
var _pts = audioSample.pts;
|
||
|
||
if (lastPTS !== null) {
|
||
// If we have more than one sample, set the duration of the sample to the "real" duration; the PTS diff with
|
||
// the previous sample
|
||
var prevSample = outputSamples[_j2 - 1];
|
||
prevSample.duration = Math.round((_pts - lastPTS) / scaleFactor);
|
||
} else {
|
||
if (contiguous && track.isAAC) {
|
||
// set PTS/DTS to expected PTS/DTS
|
||
_pts = nextAudioPts;
|
||
} // remember first PTS of our audioSamples
|
||
|
||
|
||
firstPTS = _pts;
|
||
|
||
if (mdatSize > 0) {
|
||
/* concatenate the audio data and construct the mdat in place
|
||
(need 8 more bytes to fill length and mdat type) */
|
||
mdatSize += offset;
|
||
|
||
try {
|
||
mdat = new Uint8Array(mdatSize);
|
||
} catch (err) {
|
||
this.observer.emit(_events__WEBPACK_IMPORTED_MODULE_3__["Events"].ERROR, _events__WEBPACK_IMPORTED_MODULE_3__["Events"].ERROR, {
|
||
type: _errors__WEBPACK_IMPORTED_MODULE_4__["ErrorTypes"].MUX_ERROR,
|
||
details: _errors__WEBPACK_IMPORTED_MODULE_4__["ErrorDetails"].REMUX_ALLOC_ERROR,
|
||
fatal: false,
|
||
bytes: mdatSize,
|
||
reason: "fail allocating audio mdat " + mdatSize
|
||
});
|
||
return;
|
||
}
|
||
|
||
if (!rawMPEG) {
|
||
var view = new DataView(mdat.buffer);
|
||
view.setUint32(0, mdatSize);
|
||
mdat.set(_mp4_generator__WEBPACK_IMPORTED_MODULE_2__["default"].types.mdat, 4);
|
||
}
|
||
} else {
|
||
// no audio samples
|
||
return;
|
||
}
|
||
}
|
||
|
||
mdat.set(unit, offset);
|
||
var unitLen = unit.byteLength;
|
||
offset += unitLen; // Default the sample's duration to the computed mp4SampleDuration, which will either be 1024 for AAC or 1152 for MPEG
|
||
// In the case that we have 1 sample, this will be the duration. If we have more than one sample, the duration
|
||
// becomes the PTS diff with the previous sample
|
||
|
||
outputSamples.push(new Mp4Sample(true, mp4SampleDuration, unitLen, 0));
|
||
lastPTS = _pts;
|
||
} // We could end up with no audio samples if all input samples were overlapping with the previously remuxed ones
|
||
|
||
|
||
var nbSamples = outputSamples.length;
|
||
|
||
if (!nbSamples) {
|
||
return;
|
||
} // The next audio sample PTS should be equal to last sample PTS + duration
|
||
|
||
|
||
var lastSample = outputSamples[outputSamples.length - 1];
|
||
this.nextAudioPts = nextAudioPts = lastPTS + scaleFactor * lastSample.duration; // Set the track samples from inputSamples to outputSamples before remuxing
|
||
|
||
var moof = rawMPEG ? new Uint8Array(0) : _mp4_generator__WEBPACK_IMPORTED_MODULE_2__["default"].moof(track.sequenceNumber++, firstPTS / scaleFactor, _extends({}, track, {
|
||
samples: outputSamples
|
||
})); // Clear the track samples. This also clears the samples array in the demuxer, since the reference is shared
|
||
|
||
track.samples = [];
|
||
var start = firstPTS / inputTimeScale;
|
||
var end = nextAudioPts / inputTimeScale;
|
||
var type = 'audio';
|
||
var audioData = {
|
||
data1: moof,
|
||
data2: mdat,
|
||
startPTS: start,
|
||
endPTS: end,
|
||
startDTS: start,
|
||
endDTS: end,
|
||
type: type,
|
||
hasAudio: true,
|
||
hasVideo: false,
|
||
nb: nbSamples
|
||
};
|
||
this.isAudioContiguous = true;
|
||
console.assert(mdat.length, 'MDAT length must not be zero');
|
||
return audioData;
|
||
};
|
||
|
||
_proto.remuxEmptyAudio = function remuxEmptyAudio(track, timeOffset, contiguous, videoData) {
|
||
var inputTimeScale = track.inputTimeScale;
|
||
var mp4timeScale = track.samplerate ? track.samplerate : inputTimeScale;
|
||
var scaleFactor = inputTimeScale / mp4timeScale;
|
||
var nextAudioPts = this.nextAudioPts; // sync with video's timestamp
|
||
|
||
var startDTS = (nextAudioPts !== null ? nextAudioPts : videoData.startDTS * inputTimeScale) + this._initDTS;
|
||
var endDTS = videoData.endDTS * inputTimeScale + this._initDTS; // one sample's duration value
|
||
|
||
var frameDuration = scaleFactor * AAC_SAMPLES_PER_FRAME; // samples count of this segment's duration
|
||
|
||
var nbSamples = Math.ceil((endDTS - startDTS) / frameDuration); // silent frame
|
||
|
||
var silentFrame = _aac_helper__WEBPACK_IMPORTED_MODULE_1__["default"].getSilentFrame(track.manifestCodec || track.codec, track.channelCount);
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].warn('[mp4-remuxer]: remux empty Audio'); // Can't remux if we can't generate a silent frame...
|
||
|
||
if (!silentFrame) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_5__["logger"].trace('[mp4-remuxer]: Unable to remuxEmptyAudio since we were unable to get a silent frame for given audio codec');
|
||
return;
|
||
}
|
||
|
||
var samples = [];
|
||
|
||
for (var i = 0; i < nbSamples; i++) {
|
||
var stamp = startDTS + i * frameDuration;
|
||
samples.push({
|
||
unit: silentFrame,
|
||
pts: stamp,
|
||
dts: stamp
|
||
});
|
||
}
|
||
|
||
track.samples = samples;
|
||
return this.remuxAudio(track, timeOffset, contiguous, false);
|
||
};
|
||
|
||
return MP4Remuxer;
|
||
}();
|
||
|
||
|
||
function normalizePts(value, reference) {
|
||
var offset;
|
||
|
||
if (reference === null) {
|
||
return value;
|
||
}
|
||
|
||
if (reference < value) {
|
||
// - 2^33
|
||
offset = -8589934592;
|
||
} else {
|
||
// + 2^33
|
||
offset = 8589934592;
|
||
}
|
||
/* PTS is 33bit (from 0 to 2^33 -1)
|
||
if diff between value and reference is bigger than half of the amplitude (2^32) then it means that
|
||
PTS looping occured. fill the gap */
|
||
|
||
|
||
while (Math.abs(value - reference) > 4294967296) {
|
||
value += offset;
|
||
}
|
||
|
||
return value;
|
||
}
|
||
|
||
function findKeyframeIndex(samples) {
|
||
for (var i = 0; i < samples.length; i++) {
|
||
if (samples[i].key) {
|
||
return i;
|
||
}
|
||
}
|
||
|
||
return -1;
|
||
}
|
||
|
||
function flushTextTrackMetadataCueSamples(track, timeOffset, initPTS, initDTS) {
|
||
var length = track.samples.length;
|
||
|
||
if (!length) {
|
||
return;
|
||
}
|
||
|
||
var inputTimeScale = track.inputTimeScale;
|
||
|
||
for (var index = 0; index < length; index++) {
|
||
var sample = track.samples[index]; // setting id3 pts, dts to relative time
|
||
// using this._initPTS and this._initDTS to calculate relative time
|
||
|
||
sample.pts = normalizePts(sample.pts - initPTS, timeOffset * inputTimeScale) / inputTimeScale;
|
||
sample.dts = normalizePts(sample.dts - initDTS, timeOffset * inputTimeScale) / inputTimeScale;
|
||
}
|
||
|
||
var samples = track.samples;
|
||
track.samples = [];
|
||
return {
|
||
samples: samples
|
||
};
|
||
}
|
||
function flushTextTrackUserdataCueSamples(track, timeOffset, initPTS) {
|
||
var length = track.samples.length;
|
||
|
||
if (!length) {
|
||
return;
|
||
}
|
||
|
||
var inputTimeScale = track.inputTimeScale;
|
||
|
||
for (var index = 0; index < length; index++) {
|
||
var sample = track.samples[index]; // setting text pts, dts to relative time
|
||
// using this._initPTS and this._initDTS to calculate relative time
|
||
|
||
sample.pts = normalizePts(sample.pts - initPTS, timeOffset * inputTimeScale) / inputTimeScale;
|
||
}
|
||
|
||
track.samples.sort(function (a, b) {
|
||
return a.pts - b.pts;
|
||
});
|
||
var samples = track.samples;
|
||
track.samples = [];
|
||
return {
|
||
samples: samples
|
||
};
|
||
}
|
||
|
||
var Mp4Sample = function Mp4Sample(isKeyframe, duration, size, cts) {
|
||
this.size = void 0;
|
||
this.duration = void 0;
|
||
this.cts = void 0;
|
||
this.flags = void 0;
|
||
this.duration = duration;
|
||
this.size = size;
|
||
this.cts = cts;
|
||
this.flags = new Mp4SampleFlags(isKeyframe);
|
||
};
|
||
|
||
var Mp4SampleFlags = function Mp4SampleFlags(isKeyframe) {
|
||
this.isLeading = 0;
|
||
this.isDependedOn = 0;
|
||
this.hasRedundancy = 0;
|
||
this.degradPrio = 0;
|
||
this.dependsOn = 1;
|
||
this.isNonSync = 1;
|
||
this.dependsOn = isKeyframe ? 2 : 1;
|
||
this.isNonSync = isKeyframe ? 0 : 1;
|
||
};
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/remux/passthrough-remuxer.ts":
|
||
/*!******************************************!*\
|
||
!*** ./src/remux/passthrough-remuxer.ts ***!
|
||
\******************************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony import */ var _Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
|
||
/* harmony import */ var _mp4_remuxer__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./mp4-remuxer */ "./src/remux/mp4-remuxer.ts");
|
||
/* harmony import */ var _utils_mp4_tools__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../utils/mp4-tools */ "./src/utils/mp4-tools.ts");
|
||
/* harmony import */ var _loader_fragment__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../loader/fragment */ "./src/loader/fragment.ts");
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
var PassThroughRemuxer = /*#__PURE__*/function () {
|
||
function PassThroughRemuxer() {
|
||
this.emitInitSegment = false;
|
||
this.audioCodec = void 0;
|
||
this.videoCodec = void 0;
|
||
this.initData = void 0;
|
||
this.initPTS = void 0;
|
||
this.initTracks = void 0;
|
||
this.lastEndDTS = null;
|
||
}
|
||
|
||
var _proto = PassThroughRemuxer.prototype;
|
||
|
||
_proto.destroy = function destroy() {};
|
||
|
||
_proto.resetTimeStamp = function resetTimeStamp(defaultInitPTS) {
|
||
this.initPTS = defaultInitPTS;
|
||
this.lastEndDTS = null;
|
||
};
|
||
|
||
_proto.resetNextTimestamp = function resetNextTimestamp() {
|
||
this.lastEndDTS = null;
|
||
};
|
||
|
||
_proto.resetInitSegment = function resetInitSegment(initSegment, audioCodec, videoCodec) {
|
||
this.audioCodec = audioCodec;
|
||
this.videoCodec = videoCodec;
|
||
this.generateInitSegment(initSegment);
|
||
this.emitInitSegment = true;
|
||
};
|
||
|
||
_proto.generateInitSegment = function generateInitSegment(initSegment) {
|
||
var audioCodec = this.audioCodec,
|
||
videoCodec = this.videoCodec;
|
||
|
||
if (!initSegment || !initSegment.byteLength) {
|
||
this.initTracks = undefined;
|
||
this.initData = undefined;
|
||
return;
|
||
}
|
||
|
||
var initData = this.initData = Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_2__["parseInitSegment"])(initSegment); // Get codec from initSegment or fallback to default
|
||
|
||
if (!audioCodec) {
|
||
audioCodec = getParsedTrackCodec(initData.audio, _loader_fragment__WEBPACK_IMPORTED_MODULE_3__["ElementaryStreamTypes"].AUDIO);
|
||
}
|
||
|
||
if (!videoCodec) {
|
||
videoCodec = getParsedTrackCodec(initData.video, _loader_fragment__WEBPACK_IMPORTED_MODULE_3__["ElementaryStreamTypes"].VIDEO);
|
||
}
|
||
|
||
var tracks = {};
|
||
|
||
if (initData.audio && initData.video) {
|
||
tracks.audiovideo = {
|
||
container: 'video/mp4',
|
||
codec: audioCodec + ',' + videoCodec,
|
||
initSegment: initSegment,
|
||
id: 'main'
|
||
};
|
||
} else if (initData.audio) {
|
||
tracks.audio = {
|
||
container: 'audio/mp4',
|
||
codec: audioCodec,
|
||
initSegment: initSegment,
|
||
id: 'audio'
|
||
};
|
||
} else if (initData.video) {
|
||
tracks.video = {
|
||
container: 'video/mp4',
|
||
codec: videoCodec,
|
||
initSegment: initSegment,
|
||
id: 'main'
|
||
};
|
||
} else {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_4__["logger"].warn('[passthrough-remuxer.ts]: initSegment does not contain moov or trak boxes.');
|
||
}
|
||
|
||
this.initTracks = tracks;
|
||
};
|
||
|
||
_proto.remux = function remux(audioTrack, videoTrack, id3Track, textTrack, timeOffset) {
|
||
var _this$initPTS;
|
||
|
||
var initPTS = this.initPTS,
|
||
lastEndDTS = this.lastEndDTS;
|
||
var result = {
|
||
audio: undefined,
|
||
video: undefined,
|
||
text: textTrack,
|
||
id3: id3Track,
|
||
initSegment: undefined
|
||
}; // If we haven't yet set a lastEndDTS, or it was reset, set it to the provided timeOffset. We want to use the
|
||
// lastEndDTS over timeOffset whenever possible; during progressive playback, the media source will not update
|
||
// the media duration (which is what timeOffset is provided as) before we need to process the next chunk.
|
||
|
||
if (!Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(lastEndDTS)) {
|
||
lastEndDTS = this.lastEndDTS = timeOffset || 0;
|
||
} // The binary segment data is added to the videoTrack in the mp4demuxer. We don't check to see if the data is only
|
||
// audio or video (or both); adding it to video was an arbitrary choice.
|
||
|
||
|
||
var data = videoTrack.samples;
|
||
|
||
if (!data || !data.length) {
|
||
return result;
|
||
}
|
||
|
||
var initSegment = {
|
||
initPTS: undefined,
|
||
timescale: 1
|
||
};
|
||
var initData = this.initData;
|
||
|
||
if (!initData || !initData.length) {
|
||
this.generateInitSegment(data);
|
||
initData = this.initData;
|
||
}
|
||
|
||
if (!initData || !initData.length) {
|
||
// We can't remux if the initSegment could not be generated
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_4__["logger"].warn('[passthrough-remuxer.ts]: Failed to generate initSegment.');
|
||
return result;
|
||
}
|
||
|
||
if (this.emitInitSegment) {
|
||
initSegment.tracks = this.initTracks;
|
||
this.emitInitSegment = false;
|
||
}
|
||
|
||
if (!Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(initPTS)) {
|
||
this.initPTS = initSegment.initPTS = initPTS = computeInitPTS(initData, data, lastEndDTS);
|
||
}
|
||
|
||
var duration = Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_2__["getDuration"])(data, initData);
|
||
var startDTS = lastEndDTS;
|
||
var endDTS = duration + startDTS;
|
||
Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_2__["offsetStartDTS"])(initData, data, initPTS);
|
||
|
||
if (duration > 0) {
|
||
this.lastEndDTS = endDTS;
|
||
} else {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_4__["logger"].warn('Duration parsed from mp4 should be greater than zero');
|
||
this.resetNextTimestamp();
|
||
}
|
||
|
||
var hasAudio = !!initData.audio;
|
||
var hasVideo = !!initData.video;
|
||
var type = '';
|
||
|
||
if (hasAudio) {
|
||
type += 'audio';
|
||
}
|
||
|
||
if (hasVideo) {
|
||
type += 'video';
|
||
}
|
||
|
||
var track = {
|
||
data1: data,
|
||
startPTS: startDTS,
|
||
startDTS: startDTS,
|
||
endPTS: endDTS,
|
||
endDTS: endDTS,
|
||
type: type,
|
||
hasAudio: hasAudio,
|
||
hasVideo: hasVideo,
|
||
nb: 1,
|
||
dropped: 0
|
||
};
|
||
result.audio = track.type === 'audio' ? track : undefined;
|
||
result.video = track.type !== 'audio' ? track : undefined;
|
||
result.initSegment = initSegment;
|
||
var initPtsNum = (_this$initPTS = this.initPTS) != null ? _this$initPTS : 0;
|
||
result.id3 = Object(_mp4_remuxer__WEBPACK_IMPORTED_MODULE_1__["flushTextTrackMetadataCueSamples"])(id3Track, timeOffset, initPtsNum, initPtsNum);
|
||
|
||
if (textTrack.samples.length) {
|
||
result.text = Object(_mp4_remuxer__WEBPACK_IMPORTED_MODULE_1__["flushTextTrackUserdataCueSamples"])(textTrack, timeOffset, initPtsNum);
|
||
}
|
||
|
||
return result;
|
||
};
|
||
|
||
return PassThroughRemuxer;
|
||
}();
|
||
|
||
var computeInitPTS = function computeInitPTS(initData, data, timeOffset) {
|
||
return Object(_utils_mp4_tools__WEBPACK_IMPORTED_MODULE_2__["getStartDTS"])(initData, data) - timeOffset;
|
||
};
|
||
|
||
function getParsedTrackCodec(track, type) {
|
||
var parsedCodec = track === null || track === void 0 ? void 0 : track.codec;
|
||
|
||
if (parsedCodec && parsedCodec.length > 4) {
|
||
return parsedCodec;
|
||
} // Since mp4-tools cannot parse full codec string (see 'TODO: Parse codec details'... in mp4-tools)
|
||
// Provide defaults based on codec type
|
||
// This allows for some playback of some fmp4 playlists without CODECS defined in manifest
|
||
|
||
|
||
if (parsedCodec === 'hvc1') {
|
||
return 'hvc1.1.c.L120.90';
|
||
}
|
||
|
||
if (parsedCodec === 'av01') {
|
||
return 'av01.0.04M.08';
|
||
}
|
||
|
||
if (parsedCodec === 'avc1' || type === _loader_fragment__WEBPACK_IMPORTED_MODULE_3__["ElementaryStreamTypes"].VIDEO) {
|
||
return 'avc1.42e01e';
|
||
}
|
||
|
||
return 'mp4a.40.5';
|
||
}
|
||
|
||
/* harmony default export */ __webpack_exports__["default"] = (PassThroughRemuxer);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/task-loop.ts":
|
||
/*!**************************!*\
|
||
!*** ./src/task-loop.ts ***!
|
||
\**************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return TaskLoop; });
|
||
/**
|
||
* Sub-class specialization of EventHandler base class.
|
||
*
|
||
* TaskLoop allows to schedule a task function being called (optionnaly repeatedly) on the main loop,
|
||
* scheduled asynchroneously, avoiding recursive calls in the same tick.
|
||
*
|
||
* The task itself is implemented in `doTick`. It can be requested and called for single execution
|
||
* using the `tick` method.
|
||
*
|
||
* It will be assured that the task execution method (`tick`) only gets called once per main loop "tick",
|
||
* no matter how often it gets requested for execution. Execution in further ticks will be scheduled accordingly.
|
||
*
|
||
* If further execution requests have already been scheduled on the next tick, it can be checked with `hasNextTick`,
|
||
* and cancelled with `clearNextTick`.
|
||
*
|
||
* The task can be scheduled as an interval repeatedly with a period as parameter (see `setInterval`, `clearInterval`).
|
||
*
|
||
* Sub-classes need to implement the `doTick` method which will effectively have the task execution routine.
|
||
*
|
||
* Further explanations:
|
||
*
|
||
* The baseclass has a `tick` method that will schedule the doTick call. It may be called synchroneously
|
||
* only for a stack-depth of one. On re-entrant calls, sub-sequent calls are scheduled for next main loop ticks.
|
||
*
|
||
* When the task execution (`tick` method) is called in re-entrant way this is detected and
|
||
* we are limiting the task execution per call stack to exactly one, but scheduling/post-poning further
|
||
* task processing on the next main loop iteration (also known as "next tick" in the Node/JS runtime lingo).
|
||
*/
|
||
var TaskLoop = /*#__PURE__*/function () {
|
||
function TaskLoop() {
|
||
this._boundTick = void 0;
|
||
this._tickTimer = null;
|
||
this._tickInterval = null;
|
||
this._tickCallCount = 0;
|
||
this._boundTick = this.tick.bind(this);
|
||
}
|
||
|
||
var _proto = TaskLoop.prototype;
|
||
|
||
_proto.destroy = function destroy() {
|
||
this.onHandlerDestroying();
|
||
this.onHandlerDestroyed();
|
||
};
|
||
|
||
_proto.onHandlerDestroying = function onHandlerDestroying() {
|
||
// clear all timers before unregistering from event bus
|
||
this.clearNextTick();
|
||
this.clearInterval();
|
||
};
|
||
|
||
_proto.onHandlerDestroyed = function onHandlerDestroyed() {}
|
||
/**
|
||
* @returns {boolean}
|
||
*/
|
||
;
|
||
|
||
_proto.hasInterval = function hasInterval() {
|
||
return !!this._tickInterval;
|
||
}
|
||
/**
|
||
* @returns {boolean}
|
||
*/
|
||
;
|
||
|
||
_proto.hasNextTick = function hasNextTick() {
|
||
return !!this._tickTimer;
|
||
}
|
||
/**
|
||
* @param {number} millis Interval time (ms)
|
||
* @returns {boolean} True when interval has been scheduled, false when already scheduled (no effect)
|
||
*/
|
||
;
|
||
|
||
_proto.setInterval = function setInterval(millis) {
|
||
if (!this._tickInterval) {
|
||
this._tickInterval = self.setInterval(this._boundTick, millis);
|
||
return true;
|
||
}
|
||
|
||
return false;
|
||
}
|
||
/**
|
||
* @returns {boolean} True when interval was cleared, false when none was set (no effect)
|
||
*/
|
||
;
|
||
|
||
_proto.clearInterval = function clearInterval() {
|
||
if (this._tickInterval) {
|
||
self.clearInterval(this._tickInterval);
|
||
this._tickInterval = null;
|
||
return true;
|
||
}
|
||
|
||
return false;
|
||
}
|
||
/**
|
||
* @returns {boolean} True when timeout was cleared, false when none was set (no effect)
|
||
*/
|
||
;
|
||
|
||
_proto.clearNextTick = function clearNextTick() {
|
||
if (this._tickTimer) {
|
||
self.clearTimeout(this._tickTimer);
|
||
this._tickTimer = null;
|
||
return true;
|
||
}
|
||
|
||
return false;
|
||
}
|
||
/**
|
||
* Will call the subclass doTick implementation in this main loop tick
|
||
* or in the next one (via setTimeout(,0)) in case it has already been called
|
||
* in this tick (in case this is a re-entrant call).
|
||
*/
|
||
;
|
||
|
||
_proto.tick = function tick() {
|
||
this._tickCallCount++;
|
||
|
||
if (this._tickCallCount === 1) {
|
||
this.doTick(); // re-entrant call to tick from previous doTick call stack
|
||
// -> schedule a call on the next main loop iteration to process this task processing request
|
||
|
||
if (this._tickCallCount > 1) {
|
||
// make sure only one timer exists at any time at max
|
||
this.tickImmediate();
|
||
}
|
||
|
||
this._tickCallCount = 0;
|
||
}
|
||
};
|
||
|
||
_proto.tickImmediate = function tickImmediate() {
|
||
this.clearNextTick();
|
||
this._tickTimer = self.setTimeout(this._boundTick, 0);
|
||
}
|
||
/**
|
||
* For subclass to implement task logic
|
||
* @abstract
|
||
*/
|
||
;
|
||
|
||
_proto.doTick = function doTick() {};
|
||
|
||
return TaskLoop;
|
||
}();
|
||
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/types/cmcd.ts":
|
||
/*!***************************!*\
|
||
!*** ./src/types/cmcd.ts ***!
|
||
\***************************/
|
||
/*! exports provided: CMCDVersion, CMCDObjectType, CMCDStreamingFormat, CMCDStreamType */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "CMCDVersion", function() { return CMCDVersion; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "CMCDObjectType", function() { return CMCDObjectType; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "CMCDStreamingFormat", function() { return CMCDStreamingFormat; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "CMCDStreamType", function() { return CMCDStreamType; });
|
||
/**
|
||
* CMCD spec version
|
||
*/
|
||
var CMCDVersion = 1;
|
||
/**
|
||
* CMCD Object Type
|
||
*/
|
||
|
||
var CMCDObjectType;
|
||
/**
|
||
* CMCD Streaming Format
|
||
*/
|
||
|
||
(function (CMCDObjectType) {
|
||
CMCDObjectType["MANIFEST"] = "m";
|
||
CMCDObjectType["AUDIO"] = "a";
|
||
CMCDObjectType["VIDEO"] = "v";
|
||
CMCDObjectType["MUXED"] = "av";
|
||
CMCDObjectType["INIT"] = "i";
|
||
CMCDObjectType["CAPTION"] = "c";
|
||
CMCDObjectType["TIMED_TEXT"] = "tt";
|
||
CMCDObjectType["KEY"] = "k";
|
||
CMCDObjectType["OTHER"] = "o";
|
||
})(CMCDObjectType || (CMCDObjectType = {}));
|
||
|
||
var CMCDStreamingFormat;
|
||
/**
|
||
* CMCD Streaming Type
|
||
*/
|
||
|
||
(function (CMCDStreamingFormat) {
|
||
CMCDStreamingFormat["DASH"] = "d";
|
||
CMCDStreamingFormat["HLS"] = "h";
|
||
CMCDStreamingFormat["SMOOTH"] = "s";
|
||
CMCDStreamingFormat["OTHER"] = "o";
|
||
})(CMCDStreamingFormat || (CMCDStreamingFormat = {}));
|
||
|
||
var CMCDStreamType;
|
||
/**
|
||
* CMCD Headers
|
||
*/
|
||
|
||
(function (CMCDStreamType) {
|
||
CMCDStreamType["VOD"] = "v";
|
||
CMCDStreamType["LIVE"] = "l";
|
||
})(CMCDStreamType || (CMCDStreamType = {}));
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/types/level.ts":
|
||
/*!****************************!*\
|
||
!*** ./src/types/level.ts ***!
|
||
\****************************/
|
||
/*! exports provided: HlsSkip, getSkipValue, HlsUrlParameters, Level */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "HlsSkip", function() { return HlsSkip; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getSkipValue", function() { return getSkipValue; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "HlsUrlParameters", function() { return HlsUrlParameters; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "Level", function() { return Level; });
|
||
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
|
||
|
||
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
|
||
|
||
var HlsSkip;
|
||
|
||
(function (HlsSkip) {
|
||
HlsSkip["No"] = "";
|
||
HlsSkip["Yes"] = "YES";
|
||
HlsSkip["v2"] = "v2";
|
||
})(HlsSkip || (HlsSkip = {}));
|
||
|
||
function getSkipValue(details, msn) {
|
||
var canSkipUntil = details.canSkipUntil,
|
||
canSkipDateRanges = details.canSkipDateRanges,
|
||
endSN = details.endSN;
|
||
var snChangeGoal = msn !== undefined ? msn - endSN : 0;
|
||
|
||
if (canSkipUntil && snChangeGoal < canSkipUntil) {
|
||
if (canSkipDateRanges) {
|
||
return HlsSkip.v2;
|
||
}
|
||
|
||
return HlsSkip.Yes;
|
||
}
|
||
|
||
return HlsSkip.No;
|
||
}
|
||
var HlsUrlParameters = /*#__PURE__*/function () {
|
||
function HlsUrlParameters(msn, part, skip) {
|
||
this.msn = void 0;
|
||
this.part = void 0;
|
||
this.skip = void 0;
|
||
this.msn = msn;
|
||
this.part = part;
|
||
this.skip = skip;
|
||
}
|
||
|
||
var _proto = HlsUrlParameters.prototype;
|
||
|
||
_proto.addDirectives = function addDirectives(uri) {
|
||
var url = new self.URL(uri);
|
||
|
||
if (this.msn !== undefined) {
|
||
url.searchParams.set('_HLS_msn', this.msn.toString());
|
||
}
|
||
|
||
if (this.part !== undefined) {
|
||
url.searchParams.set('_HLS_part', this.part.toString());
|
||
}
|
||
|
||
if (this.skip) {
|
||
url.searchParams.set('_HLS_skip', this.skip);
|
||
}
|
||
|
||
return url.toString();
|
||
};
|
||
|
||
return HlsUrlParameters;
|
||
}();
|
||
var Level = /*#__PURE__*/function () {
|
||
function Level(data) {
|
||
this.attrs = void 0;
|
||
this.audioCodec = void 0;
|
||
this.bitrate = void 0;
|
||
this.codecSet = void 0;
|
||
this.height = void 0;
|
||
this.id = void 0;
|
||
this.name = void 0;
|
||
this.videoCodec = void 0;
|
||
this.width = void 0;
|
||
this.unknownCodecs = void 0;
|
||
this.audioGroupIds = void 0;
|
||
this.details = void 0;
|
||
this.fragmentError = 0;
|
||
this.loadError = 0;
|
||
this.loaded = void 0;
|
||
this.realBitrate = 0;
|
||
this.textGroupIds = void 0;
|
||
this.url = void 0;
|
||
this._urlId = 0;
|
||
this.url = [data.url];
|
||
this.attrs = data.attrs;
|
||
this.bitrate = data.bitrate;
|
||
|
||
if (data.details) {
|
||
this.details = data.details;
|
||
}
|
||
|
||
this.id = data.id || 0;
|
||
this.name = data.name;
|
||
this.width = data.width || 0;
|
||
this.height = data.height || 0;
|
||
this.audioCodec = data.audioCodec;
|
||
this.videoCodec = data.videoCodec;
|
||
this.unknownCodecs = data.unknownCodecs;
|
||
this.codecSet = [data.videoCodec, data.audioCodec].filter(function (c) {
|
||
return c;
|
||
}).join(',').replace(/\.[^.,]+/g, '');
|
||
}
|
||
|
||
_createClass(Level, [{
|
||
key: "maxBitrate",
|
||
get: function get() {
|
||
return Math.max(this.realBitrate, this.bitrate);
|
||
}
|
||
}, {
|
||
key: "uri",
|
||
get: function get() {
|
||
return this.url[this._urlId] || '';
|
||
}
|
||
}, {
|
||
key: "urlId",
|
||
get: function get() {
|
||
return this._urlId;
|
||
},
|
||
set: function set(value) {
|
||
var newValue = value % this.url.length;
|
||
|
||
if (this._urlId !== newValue) {
|
||
this.details = undefined;
|
||
this._urlId = newValue;
|
||
}
|
||
}
|
||
}]);
|
||
|
||
return Level;
|
||
}();
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/types/loader.ts":
|
||
/*!*****************************!*\
|
||
!*** ./src/types/loader.ts ***!
|
||
\*****************************/
|
||
/*! exports provided: PlaylistContextType, PlaylistLevelType */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "PlaylistContextType", function() { return PlaylistContextType; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "PlaylistLevelType", function() { return PlaylistLevelType; });
|
||
var PlaylistContextType;
|
||
|
||
(function (PlaylistContextType) {
|
||
PlaylistContextType["MANIFEST"] = "manifest";
|
||
PlaylistContextType["LEVEL"] = "level";
|
||
PlaylistContextType["AUDIO_TRACK"] = "audioTrack";
|
||
PlaylistContextType["SUBTITLE_TRACK"] = "subtitleTrack";
|
||
})(PlaylistContextType || (PlaylistContextType = {}));
|
||
|
||
var PlaylistLevelType;
|
||
|
||
(function (PlaylistLevelType) {
|
||
PlaylistLevelType["MAIN"] = "main";
|
||
PlaylistLevelType["AUDIO"] = "audio";
|
||
PlaylistLevelType["SUBTITLE"] = "subtitle";
|
||
})(PlaylistLevelType || (PlaylistLevelType = {}));
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/types/transmuxer.ts":
|
||
/*!*********************************!*\
|
||
!*** ./src/types/transmuxer.ts ***!
|
||
\*********************************/
|
||
/*! exports provided: ChunkMetadata */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "ChunkMetadata", function() { return ChunkMetadata; });
|
||
var ChunkMetadata = function ChunkMetadata(level, sn, id, size, part, partial) {
|
||
if (size === void 0) {
|
||
size = 0;
|
||
}
|
||
|
||
if (part === void 0) {
|
||
part = -1;
|
||
}
|
||
|
||
if (partial === void 0) {
|
||
partial = false;
|
||
}
|
||
|
||
this.level = void 0;
|
||
this.sn = void 0;
|
||
this.part = void 0;
|
||
this.id = void 0;
|
||
this.size = void 0;
|
||
this.partial = void 0;
|
||
this.transmuxing = getNewPerformanceTiming();
|
||
this.buffering = {
|
||
audio: getNewPerformanceTiming(),
|
||
video: getNewPerformanceTiming(),
|
||
audiovideo: getNewPerformanceTiming()
|
||
};
|
||
this.level = level;
|
||
this.sn = sn;
|
||
this.id = id;
|
||
this.size = size;
|
||
this.part = part;
|
||
this.partial = partial;
|
||
};
|
||
|
||
function getNewPerformanceTiming() {
|
||
return {
|
||
start: 0,
|
||
executeStart: 0,
|
||
executeEnd: 0,
|
||
end: 0
|
||
};
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/utils/aes.js":
|
||
/*!**************************!*\
|
||
!*** ./src/utils/aes.js ***!
|
||
\**************************/
|
||
/*! no static exports found */
|
||
/***/ (function(module, exports, __webpack_require__) {
|
||
|
||
/*! MIT License. Copyright 2015-2018 Richard Moore <me@ricmoo.com>. See LICENSE.txt. */
|
||
(function (root) {
|
||
'use strict';
|
||
|
||
function checkInt(value) {
|
||
return parseInt(value) === value;
|
||
}
|
||
|
||
function checkInts(arrayish) {
|
||
if (!checkInt(arrayish.length)) {
|
||
return false;
|
||
}
|
||
|
||
for (var i = 0; i < arrayish.length; i++) {
|
||
if (!checkInt(arrayish[i]) || arrayish[i] < 0 || arrayish[i] > 255) {
|
||
return false;
|
||
}
|
||
}
|
||
|
||
return true;
|
||
}
|
||
|
||
function coerceArray(arg, copy) {
|
||
// ArrayBuffer view
|
||
if (arg.buffer && arg.name === 'Uint8Array') {
|
||
if (copy) {
|
||
if (arg.slice) {
|
||
arg = arg.slice();
|
||
} else {
|
||
arg = Array.prototype.slice.call(arg);
|
||
}
|
||
}
|
||
|
||
return arg;
|
||
} // It's an array; check it is a valid representation of a byte
|
||
|
||
|
||
if (Array.isArray(arg)) {
|
||
if (!checkInts(arg)) {
|
||
throw new Error('Array contains invalid value: ' + arg);
|
||
}
|
||
|
||
return new Uint8Array(arg);
|
||
} // Something else, but behaves like an array (maybe a Buffer? Arguments?)
|
||
|
||
|
||
if (checkInt(arg.length) && checkInts(arg)) {
|
||
return new Uint8Array(arg);
|
||
}
|
||
|
||
throw new Error('unsupported array-like object');
|
||
}
|
||
|
||
function createArray(length) {
|
||
return new Uint8Array(length);
|
||
}
|
||
|
||
function copyArray(sourceArray, targetArray, targetStart, sourceStart, sourceEnd) {
|
||
if (sourceStart != null || sourceEnd != null) {
|
||
if (sourceArray.slice) {
|
||
sourceArray = sourceArray.slice(sourceStart, sourceEnd);
|
||
} else {
|
||
sourceArray = Array.prototype.slice.call(sourceArray, sourceStart, sourceEnd);
|
||
}
|
||
}
|
||
|
||
targetArray.set(sourceArray, targetStart);
|
||
}
|
||
|
||
var convertUtf8 = function () {
|
||
function toBytes(text) {
|
||
var result = [];
|
||
var i = 0;
|
||
text = encodeURI(text);
|
||
|
||
while (i < text.length) {
|
||
var c = text.charCodeAt(i++); // if it is a % sign, encode the following 2 bytes as a hex value
|
||
|
||
if (c === 37) {
|
||
result.push(parseInt(text.substr(i, 2), 16));
|
||
i += 2; // otherwise, just the actual byte
|
||
} else {
|
||
result.push(c);
|
||
}
|
||
}
|
||
|
||
return coerceArray(result);
|
||
}
|
||
|
||
function fromBytes(bytes) {
|
||
var result = [];
|
||
var i = 0;
|
||
|
||
while (i < bytes.length) {
|
||
var c = bytes[i];
|
||
|
||
if (c < 128) {
|
||
result.push(String.fromCharCode(c));
|
||
i++;
|
||
} else if (c > 191 && c < 224) {
|
||
result.push(String.fromCharCode((c & 0x1f) << 6 | bytes[i + 1] & 0x3f));
|
||
i += 2;
|
||
} else {
|
||
result.push(String.fromCharCode((c & 0x0f) << 12 | (bytes[i + 1] & 0x3f) << 6 | bytes[i + 2] & 0x3f));
|
||
i += 3;
|
||
}
|
||
}
|
||
|
||
return result.join('');
|
||
}
|
||
|
||
return {
|
||
toBytes: toBytes,
|
||
fromBytes: fromBytes
|
||
};
|
||
}();
|
||
|
||
var convertHex = function () {
|
||
function toBytes(text) {
|
||
var result = [];
|
||
|
||
for (var i = 0; i < text.length; i += 2) {
|
||
result.push(parseInt(text.substr(i, 2), 16));
|
||
}
|
||
|
||
return result;
|
||
} // http://ixti.net/development/javascript/2011/11/11/base64-encodedecode-of-utf8-in-browser-with-js.html
|
||
|
||
|
||
var Hex = '0123456789abcdef';
|
||
|
||
function fromBytes(bytes) {
|
||
var result = [];
|
||
|
||
for (var i = 0; i < bytes.length; i++) {
|
||
var v = bytes[i];
|
||
result.push(Hex[(v & 0xf0) >> 4] + Hex[v & 0x0f]);
|
||
}
|
||
|
||
return result.join('');
|
||
}
|
||
|
||
return {
|
||
toBytes: toBytes,
|
||
fromBytes: fromBytes
|
||
};
|
||
}(); // Number of rounds by keysize
|
||
|
||
|
||
var numberOfRounds = {
|
||
16: 10,
|
||
24: 12,
|
||
32: 14
|
||
}; // Round constant words
|
||
|
||
var rcon = [0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a, 0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5, 0x91]; // S-box and Inverse S-box (S is for Substitution)
|
||
|
||
var S = [0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5, 0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76, 0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0, 0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0, 0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc, 0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15, 0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a, 0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75, 0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0, 0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84, 0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b, 0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf, 0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85, 0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8, 0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5, 0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2, 0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17, 0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73, 0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88, 0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb, 0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c, 0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79, 0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9, 0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08, 0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6, 0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a, 0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e, 0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e, 0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94, 0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf, 0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68, 0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16];
|
||
var Si = [0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38, 0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb, 0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87, 0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb, 0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d, 0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e, 0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2, 0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25, 0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16, 0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92, 0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda, 0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84, 0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a, 0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06, 0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02, 0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b, 0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea, 0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73, 0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85, 0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e, 0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89, 0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b, 0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20, 0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4, 0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31, 0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f, 0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d, 0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef, 0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0, 0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61, 0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26, 0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d]; // Transformations for encryption
|
||
|
||
var T1 = [0xc66363a5, 0xf87c7c84, 0xee777799, 0xf67b7b8d, 0xfff2f20d, 0xd66b6bbd, 0xde6f6fb1, 0x91c5c554, 0x60303050, 0x02010103, 0xce6767a9, 0x562b2b7d, 0xe7fefe19, 0xb5d7d762, 0x4dababe6, 0xec76769a, 0x8fcaca45, 0x1f82829d, 0x89c9c940, 0xfa7d7d87, 0xeffafa15, 0xb25959eb, 0x8e4747c9, 0xfbf0f00b, 0x41adadec, 0xb3d4d467, 0x5fa2a2fd, 0x45afafea, 0x239c9cbf, 0x53a4a4f7, 0xe4727296, 0x9bc0c05b, 0x75b7b7c2, 0xe1fdfd1c, 0x3d9393ae, 0x4c26266a, 0x6c36365a, 0x7e3f3f41, 0xf5f7f702, 0x83cccc4f, 0x6834345c, 0x51a5a5f4, 0xd1e5e534, 0xf9f1f108, 0xe2717193, 0xabd8d873, 0x62313153, 0x2a15153f, 0x0804040c, 0x95c7c752, 0x46232365, 0x9dc3c35e, 0x30181828, 0x379696a1, 0x0a05050f, 0x2f9a9ab5, 0x0e070709, 0x24121236, 0x1b80809b, 0xdfe2e23d, 0xcdebeb26, 0x4e272769, 0x7fb2b2cd, 0xea75759f, 0x1209091b, 0x1d83839e, 0x582c2c74, 0x341a1a2e, 0x361b1b2d, 0xdc6e6eb2, 0xb45a5aee, 0x5ba0a0fb, 0xa45252f6, 0x763b3b4d, 0xb7d6d661, 0x7db3b3ce, 0x5229297b, 0xdde3e33e, 0x5e2f2f71, 0x13848497, 0xa65353f5, 0xb9d1d168, 0x00000000, 0xc1eded2c, 0x40202060, 0xe3fcfc1f, 0x79b1b1c8, 0xb65b5bed, 0xd46a6abe, 0x8dcbcb46, 0x67bebed9, 0x7239394b, 0x944a4ade, 0x984c4cd4, 0xb05858e8, 0x85cfcf4a, 0xbbd0d06b, 0xc5efef2a, 0x4faaaae5, 0xedfbfb16, 0x864343c5, 0x9a4d4dd7, 0x66333355, 0x11858594, 0x8a4545cf, 0xe9f9f910, 0x04020206, 0xfe7f7f81, 0xa05050f0, 0x783c3c44, 0x259f9fba, 0x4ba8a8e3, 0xa25151f3, 0x5da3a3fe, 0x804040c0, 0x058f8f8a, 0x3f9292ad, 0x219d9dbc, 0x70383848, 0xf1f5f504, 0x63bcbcdf, 0x77b6b6c1, 0xafdada75, 0x42212163, 0x20101030, 0xe5ffff1a, 0xfdf3f30e, 0xbfd2d26d, 0x81cdcd4c, 0x180c0c14, 0x26131335, 0xc3ecec2f, 0xbe5f5fe1, 0x359797a2, 0x884444cc, 0x2e171739, 0x93c4c457, 0x55a7a7f2, 0xfc7e7e82, 0x7a3d3d47, 0xc86464ac, 0xba5d5de7, 0x3219192b, 0xe6737395, 0xc06060a0, 0x19818198, 0x9e4f4fd1, 0xa3dcdc7f, 0x44222266, 0x542a2a7e, 0x3b9090ab, 0x0b888883, 0x8c4646ca, 0xc7eeee29, 0x6bb8b8d3, 0x2814143c, 0xa7dede79, 0xbc5e5ee2, 0x160b0b1d, 0xaddbdb76, 0xdbe0e03b, 0x64323256, 0x743a3a4e, 0x140a0a1e, 0x924949db, 0x0c06060a, 0x4824246c, 0xb85c5ce4, 0x9fc2c25d, 0xbdd3d36e, 0x43acacef, 0xc46262a6, 0x399191a8, 0x319595a4, 0xd3e4e437, 0xf279798b, 0xd5e7e732, 0x8bc8c843, 0x6e373759, 0xda6d6db7, 0x018d8d8c, 0xb1d5d564, 0x9c4e4ed2, 0x49a9a9e0, 0xd86c6cb4, 0xac5656fa, 0xf3f4f407, 0xcfeaea25, 0xca6565af, 0xf47a7a8e, 0x47aeaee9, 0x10080818, 0x6fbabad5, 0xf0787888, 0x4a25256f, 0x5c2e2e72, 0x381c1c24, 0x57a6a6f1, 0x73b4b4c7, 0x97c6c651, 0xcbe8e823, 0xa1dddd7c, 0xe874749c, 0x3e1f1f21, 0x964b4bdd, 0x61bdbddc, 0x0d8b8b86, 0x0f8a8a85, 0xe0707090, 0x7c3e3e42, 0x71b5b5c4, 0xcc6666aa, 0x904848d8, 0x06030305, 0xf7f6f601, 0x1c0e0e12, 0xc26161a3, 0x6a35355f, 0xae5757f9, 0x69b9b9d0, 0x17868691, 0x99c1c158, 0x3a1d1d27, 0x279e9eb9, 0xd9e1e138, 0xebf8f813, 0x2b9898b3, 0x22111133, 0xd26969bb, 0xa9d9d970, 0x078e8e89, 0x339494a7, 0x2d9b9bb6, 0x3c1e1e22, 0x15878792, 0xc9e9e920, 0x87cece49, 0xaa5555ff, 0x50282878, 0xa5dfdf7a, 0x038c8c8f, 0x59a1a1f8, 0x09898980, 0x1a0d0d17, 0x65bfbfda, 0xd7e6e631, 0x844242c6, 0xd06868b8, 0x824141c3, 0x299999b0, 0x5a2d2d77, 0x1e0f0f11, 0x7bb0b0cb, 0xa85454fc, 0x6dbbbbd6, 0x2c16163a];
|
||
var T2 = [0xa5c66363, 0x84f87c7c, 0x99ee7777, 0x8df67b7b, 0x0dfff2f2, 0xbdd66b6b, 0xb1de6f6f, 0x5491c5c5, 0x50603030, 0x03020101, 0xa9ce6767, 0x7d562b2b, 0x19e7fefe, 0x62b5d7d7, 0xe64dabab, 0x9aec7676, 0x458fcaca, 0x9d1f8282, 0x4089c9c9, 0x87fa7d7d, 0x15effafa, 0xebb25959, 0xc98e4747, 0x0bfbf0f0, 0xec41adad, 0x67b3d4d4, 0xfd5fa2a2, 0xea45afaf, 0xbf239c9c, 0xf753a4a4, 0x96e47272, 0x5b9bc0c0, 0xc275b7b7, 0x1ce1fdfd, 0xae3d9393, 0x6a4c2626, 0x5a6c3636, 0x417e3f3f, 0x02f5f7f7, 0x4f83cccc, 0x5c683434, 0xf451a5a5, 0x34d1e5e5, 0x08f9f1f1, 0x93e27171, 0x73abd8d8, 0x53623131, 0x3f2a1515, 0x0c080404, 0x5295c7c7, 0x65462323, 0x5e9dc3c3, 0x28301818, 0xa1379696, 0x0f0a0505, 0xb52f9a9a, 0x090e0707, 0x36241212, 0x9b1b8080, 0x3ddfe2e2, 0x26cdebeb, 0x694e2727, 0xcd7fb2b2, 0x9fea7575, 0x1b120909, 0x9e1d8383, 0x74582c2c, 0x2e341a1a, 0x2d361b1b, 0xb2dc6e6e, 0xeeb45a5a, 0xfb5ba0a0, 0xf6a45252, 0x4d763b3b, 0x61b7d6d6, 0xce7db3b3, 0x7b522929, 0x3edde3e3, 0x715e2f2f, 0x97138484, 0xf5a65353, 0x68b9d1d1, 0x00000000, 0x2cc1eded, 0x60402020, 0x1fe3fcfc, 0xc879b1b1, 0xedb65b5b, 0xbed46a6a, 0x468dcbcb, 0xd967bebe, 0x4b723939, 0xde944a4a, 0xd4984c4c, 0xe8b05858, 0x4a85cfcf, 0x6bbbd0d0, 0x2ac5efef, 0xe54faaaa, 0x16edfbfb, 0xc5864343, 0xd79a4d4d, 0x55663333, 0x94118585, 0xcf8a4545, 0x10e9f9f9, 0x06040202, 0x81fe7f7f, 0xf0a05050, 0x44783c3c, 0xba259f9f, 0xe34ba8a8, 0xf3a25151, 0xfe5da3a3, 0xc0804040, 0x8a058f8f, 0xad3f9292, 0xbc219d9d, 0x48703838, 0x04f1f5f5, 0xdf63bcbc, 0xc177b6b6, 0x75afdada, 0x63422121, 0x30201010, 0x1ae5ffff, 0x0efdf3f3, 0x6dbfd2d2, 0x4c81cdcd, 0x14180c0c, 0x35261313, 0x2fc3ecec, 0xe1be5f5f, 0xa2359797, 0xcc884444, 0x392e1717, 0x5793c4c4, 0xf255a7a7, 0x82fc7e7e, 0x477a3d3d, 0xacc86464, 0xe7ba5d5d, 0x2b321919, 0x95e67373, 0xa0c06060, 0x98198181, 0xd19e4f4f, 0x7fa3dcdc, 0x66442222, 0x7e542a2a, 0xab3b9090, 0x830b8888, 0xca8c4646, 0x29c7eeee, 0xd36bb8b8, 0x3c281414, 0x79a7dede, 0xe2bc5e5e, 0x1d160b0b, 0x76addbdb, 0x3bdbe0e0, 0x56643232, 0x4e743a3a, 0x1e140a0a, 0xdb924949, 0x0a0c0606, 0x6c482424, 0xe4b85c5c, 0x5d9fc2c2, 0x6ebdd3d3, 0xef43acac, 0xa6c46262, 0xa8399191, 0xa4319595, 0x37d3e4e4, 0x8bf27979, 0x32d5e7e7, 0x438bc8c8, 0x596e3737, 0xb7da6d6d, 0x8c018d8d, 0x64b1d5d5, 0xd29c4e4e, 0xe049a9a9, 0xb4d86c6c, 0xfaac5656, 0x07f3f4f4, 0x25cfeaea, 0xafca6565, 0x8ef47a7a, 0xe947aeae, 0x18100808, 0xd56fbaba, 0x88f07878, 0x6f4a2525, 0x725c2e2e, 0x24381c1c, 0xf157a6a6, 0xc773b4b4, 0x5197c6c6, 0x23cbe8e8, 0x7ca1dddd, 0x9ce87474, 0x213e1f1f, 0xdd964b4b, 0xdc61bdbd, 0x860d8b8b, 0x850f8a8a, 0x90e07070, 0x427c3e3e, 0xc471b5b5, 0xaacc6666, 0xd8904848, 0x05060303, 0x01f7f6f6, 0x121c0e0e, 0xa3c26161, 0x5f6a3535, 0xf9ae5757, 0xd069b9b9, 0x91178686, 0x5899c1c1, 0x273a1d1d, 0xb9279e9e, 0x38d9e1e1, 0x13ebf8f8, 0xb32b9898, 0x33221111, 0xbbd26969, 0x70a9d9d9, 0x89078e8e, 0xa7339494, 0xb62d9b9b, 0x223c1e1e, 0x92158787, 0x20c9e9e9, 0x4987cece, 0xffaa5555, 0x78502828, 0x7aa5dfdf, 0x8f038c8c, 0xf859a1a1, 0x80098989, 0x171a0d0d, 0xda65bfbf, 0x31d7e6e6, 0xc6844242, 0xb8d06868, 0xc3824141, 0xb0299999, 0x775a2d2d, 0x111e0f0f, 0xcb7bb0b0, 0xfca85454, 0xd66dbbbb, 0x3a2c1616];
|
||
var T3 = [0x63a5c663, 0x7c84f87c, 0x7799ee77, 0x7b8df67b, 0xf20dfff2, 0x6bbdd66b, 0x6fb1de6f, 0xc55491c5, 0x30506030, 0x01030201, 0x67a9ce67, 0x2b7d562b, 0xfe19e7fe, 0xd762b5d7, 0xabe64dab, 0x769aec76, 0xca458fca, 0x829d1f82, 0xc94089c9, 0x7d87fa7d, 0xfa15effa, 0x59ebb259, 0x47c98e47, 0xf00bfbf0, 0xadec41ad, 0xd467b3d4, 0xa2fd5fa2, 0xafea45af, 0x9cbf239c, 0xa4f753a4, 0x7296e472, 0xc05b9bc0, 0xb7c275b7, 0xfd1ce1fd, 0x93ae3d93, 0x266a4c26, 0x365a6c36, 0x3f417e3f, 0xf702f5f7, 0xcc4f83cc, 0x345c6834, 0xa5f451a5, 0xe534d1e5, 0xf108f9f1, 0x7193e271, 0xd873abd8, 0x31536231, 0x153f2a15, 0x040c0804, 0xc75295c7, 0x23654623, 0xc35e9dc3, 0x18283018, 0x96a13796, 0x050f0a05, 0x9ab52f9a, 0x07090e07, 0x12362412, 0x809b1b80, 0xe23ddfe2, 0xeb26cdeb, 0x27694e27, 0xb2cd7fb2, 0x759fea75, 0x091b1209, 0x839e1d83, 0x2c74582c, 0x1a2e341a, 0x1b2d361b, 0x6eb2dc6e, 0x5aeeb45a, 0xa0fb5ba0, 0x52f6a452, 0x3b4d763b, 0xd661b7d6, 0xb3ce7db3, 0x297b5229, 0xe33edde3, 0x2f715e2f, 0x84971384, 0x53f5a653, 0xd168b9d1, 0x00000000, 0xed2cc1ed, 0x20604020, 0xfc1fe3fc, 0xb1c879b1, 0x5bedb65b, 0x6abed46a, 0xcb468dcb, 0xbed967be, 0x394b7239, 0x4ade944a, 0x4cd4984c, 0x58e8b058, 0xcf4a85cf, 0xd06bbbd0, 0xef2ac5ef, 0xaae54faa, 0xfb16edfb, 0x43c58643, 0x4dd79a4d, 0x33556633, 0x85941185, 0x45cf8a45, 0xf910e9f9, 0x02060402, 0x7f81fe7f, 0x50f0a050, 0x3c44783c, 0x9fba259f, 0xa8e34ba8, 0x51f3a251, 0xa3fe5da3, 0x40c08040, 0x8f8a058f, 0x92ad3f92, 0x9dbc219d, 0x38487038, 0xf504f1f5, 0xbcdf63bc, 0xb6c177b6, 0xda75afda, 0x21634221, 0x10302010, 0xff1ae5ff, 0xf30efdf3, 0xd26dbfd2, 0xcd4c81cd, 0x0c14180c, 0x13352613, 0xec2fc3ec, 0x5fe1be5f, 0x97a23597, 0x44cc8844, 0x17392e17, 0xc45793c4, 0xa7f255a7, 0x7e82fc7e, 0x3d477a3d, 0x64acc864, 0x5de7ba5d, 0x192b3219, 0x7395e673, 0x60a0c060, 0x81981981, 0x4fd19e4f, 0xdc7fa3dc, 0x22664422, 0x2a7e542a, 0x90ab3b90, 0x88830b88, 0x46ca8c46, 0xee29c7ee, 0xb8d36bb8, 0x143c2814, 0xde79a7de, 0x5ee2bc5e, 0x0b1d160b, 0xdb76addb, 0xe03bdbe0, 0x32566432, 0x3a4e743a, 0x0a1e140a, 0x49db9249, 0x060a0c06, 0x246c4824, 0x5ce4b85c, 0xc25d9fc2, 0xd36ebdd3, 0xacef43ac, 0x62a6c462, 0x91a83991, 0x95a43195, 0xe437d3e4, 0x798bf279, 0xe732d5e7, 0xc8438bc8, 0x37596e37, 0x6db7da6d, 0x8d8c018d, 0xd564b1d5, 0x4ed29c4e, 0xa9e049a9, 0x6cb4d86c, 0x56faac56, 0xf407f3f4, 0xea25cfea, 0x65afca65, 0x7a8ef47a, 0xaee947ae, 0x08181008, 0xbad56fba, 0x7888f078, 0x256f4a25, 0x2e725c2e, 0x1c24381c, 0xa6f157a6, 0xb4c773b4, 0xc65197c6, 0xe823cbe8, 0xdd7ca1dd, 0x749ce874, 0x1f213e1f, 0x4bdd964b, 0xbddc61bd, 0x8b860d8b, 0x8a850f8a, 0x7090e070, 0x3e427c3e, 0xb5c471b5, 0x66aacc66, 0x48d89048, 0x03050603, 0xf601f7f6, 0x0e121c0e, 0x61a3c261, 0x355f6a35, 0x57f9ae57, 0xb9d069b9, 0x86911786, 0xc15899c1, 0x1d273a1d, 0x9eb9279e, 0xe138d9e1, 0xf813ebf8, 0x98b32b98, 0x11332211, 0x69bbd269, 0xd970a9d9, 0x8e89078e, 0x94a73394, 0x9bb62d9b, 0x1e223c1e, 0x87921587, 0xe920c9e9, 0xce4987ce, 0x55ffaa55, 0x28785028, 0xdf7aa5df, 0x8c8f038c, 0xa1f859a1, 0x89800989, 0x0d171a0d, 0xbfda65bf, 0xe631d7e6, 0x42c68442, 0x68b8d068, 0x41c38241, 0x99b02999, 0x2d775a2d, 0x0f111e0f, 0xb0cb7bb0, 0x54fca854, 0xbbd66dbb, 0x163a2c16];
|
||
var T4 = [0x6363a5c6, 0x7c7c84f8, 0x777799ee, 0x7b7b8df6, 0xf2f20dff, 0x6b6bbdd6, 0x6f6fb1de, 0xc5c55491, 0x30305060, 0x01010302, 0x6767a9ce, 0x2b2b7d56, 0xfefe19e7, 0xd7d762b5, 0xababe64d, 0x76769aec, 0xcaca458f, 0x82829d1f, 0xc9c94089, 0x7d7d87fa, 0xfafa15ef, 0x5959ebb2, 0x4747c98e, 0xf0f00bfb, 0xadadec41, 0xd4d467b3, 0xa2a2fd5f, 0xafafea45, 0x9c9cbf23, 0xa4a4f753, 0x727296e4, 0xc0c05b9b, 0xb7b7c275, 0xfdfd1ce1, 0x9393ae3d, 0x26266a4c, 0x36365a6c, 0x3f3f417e, 0xf7f702f5, 0xcccc4f83, 0x34345c68, 0xa5a5f451, 0xe5e534d1, 0xf1f108f9, 0x717193e2, 0xd8d873ab, 0x31315362, 0x15153f2a, 0x04040c08, 0xc7c75295, 0x23236546, 0xc3c35e9d, 0x18182830, 0x9696a137, 0x05050f0a, 0x9a9ab52f, 0x0707090e, 0x12123624, 0x80809b1b, 0xe2e23ddf, 0xebeb26cd, 0x2727694e, 0xb2b2cd7f, 0x75759fea, 0x09091b12, 0x83839e1d, 0x2c2c7458, 0x1a1a2e34, 0x1b1b2d36, 0x6e6eb2dc, 0x5a5aeeb4, 0xa0a0fb5b, 0x5252f6a4, 0x3b3b4d76, 0xd6d661b7, 0xb3b3ce7d, 0x29297b52, 0xe3e33edd, 0x2f2f715e, 0x84849713, 0x5353f5a6, 0xd1d168b9, 0x00000000, 0xeded2cc1, 0x20206040, 0xfcfc1fe3, 0xb1b1c879, 0x5b5bedb6, 0x6a6abed4, 0xcbcb468d, 0xbebed967, 0x39394b72, 0x4a4ade94, 0x4c4cd498, 0x5858e8b0, 0xcfcf4a85, 0xd0d06bbb, 0xefef2ac5, 0xaaaae54f, 0xfbfb16ed, 0x4343c586, 0x4d4dd79a, 0x33335566, 0x85859411, 0x4545cf8a, 0xf9f910e9, 0x02020604, 0x7f7f81fe, 0x5050f0a0, 0x3c3c4478, 0x9f9fba25, 0xa8a8e34b, 0x5151f3a2, 0xa3a3fe5d, 0x4040c080, 0x8f8f8a05, 0x9292ad3f, 0x9d9dbc21, 0x38384870, 0xf5f504f1, 0xbcbcdf63, 0xb6b6c177, 0xdada75af, 0x21216342, 0x10103020, 0xffff1ae5, 0xf3f30efd, 0xd2d26dbf, 0xcdcd4c81, 0x0c0c1418, 0x13133526, 0xecec2fc3, 0x5f5fe1be, 0x9797a235, 0x4444cc88, 0x1717392e, 0xc4c45793, 0xa7a7f255, 0x7e7e82fc, 0x3d3d477a, 0x6464acc8, 0x5d5de7ba, 0x19192b32, 0x737395e6, 0x6060a0c0, 0x81819819, 0x4f4fd19e, 0xdcdc7fa3, 0x22226644, 0x2a2a7e54, 0x9090ab3b, 0x8888830b, 0x4646ca8c, 0xeeee29c7, 0xb8b8d36b, 0x14143c28, 0xdede79a7, 0x5e5ee2bc, 0x0b0b1d16, 0xdbdb76ad, 0xe0e03bdb, 0x32325664, 0x3a3a4e74, 0x0a0a1e14, 0x4949db92, 0x06060a0c, 0x24246c48, 0x5c5ce4b8, 0xc2c25d9f, 0xd3d36ebd, 0xacacef43, 0x6262a6c4, 0x9191a839, 0x9595a431, 0xe4e437d3, 0x79798bf2, 0xe7e732d5, 0xc8c8438b, 0x3737596e, 0x6d6db7da, 0x8d8d8c01, 0xd5d564b1, 0x4e4ed29c, 0xa9a9e049, 0x6c6cb4d8, 0x5656faac, 0xf4f407f3, 0xeaea25cf, 0x6565afca, 0x7a7a8ef4, 0xaeaee947, 0x08081810, 0xbabad56f, 0x787888f0, 0x25256f4a, 0x2e2e725c, 0x1c1c2438, 0xa6a6f157, 0xb4b4c773, 0xc6c65197, 0xe8e823cb, 0xdddd7ca1, 0x74749ce8, 0x1f1f213e, 0x4b4bdd96, 0xbdbddc61, 0x8b8b860d, 0x8a8a850f, 0x707090e0, 0x3e3e427c, 0xb5b5c471, 0x6666aacc, 0x4848d890, 0x03030506, 0xf6f601f7, 0x0e0e121c, 0x6161a3c2, 0x35355f6a, 0x5757f9ae, 0xb9b9d069, 0x86869117, 0xc1c15899, 0x1d1d273a, 0x9e9eb927, 0xe1e138d9, 0xf8f813eb, 0x9898b32b, 0x11113322, 0x6969bbd2, 0xd9d970a9, 0x8e8e8907, 0x9494a733, 0x9b9bb62d, 0x1e1e223c, 0x87879215, 0xe9e920c9, 0xcece4987, 0x5555ffaa, 0x28287850, 0xdfdf7aa5, 0x8c8c8f03, 0xa1a1f859, 0x89898009, 0x0d0d171a, 0xbfbfda65, 0xe6e631d7, 0x4242c684, 0x6868b8d0, 0x4141c382, 0x9999b029, 0x2d2d775a, 0x0f0f111e, 0xb0b0cb7b, 0x5454fca8, 0xbbbbd66d, 0x16163a2c]; // Transformations for decryption
|
||
|
||
var T5 = [0x51f4a750, 0x7e416553, 0x1a17a4c3, 0x3a275e96, 0x3bab6bcb, 0x1f9d45f1, 0xacfa58ab, 0x4be30393, 0x2030fa55, 0xad766df6, 0x88cc7691, 0xf5024c25, 0x4fe5d7fc, 0xc52acbd7, 0x26354480, 0xb562a38f, 0xdeb15a49, 0x25ba1b67, 0x45ea0e98, 0x5dfec0e1, 0xc32f7502, 0x814cf012, 0x8d4697a3, 0x6bd3f9c6, 0x038f5fe7, 0x15929c95, 0xbf6d7aeb, 0x955259da, 0xd4be832d, 0x587421d3, 0x49e06929, 0x8ec9c844, 0x75c2896a, 0xf48e7978, 0x99583e6b, 0x27b971dd, 0xbee14fb6, 0xf088ad17, 0xc920ac66, 0x7dce3ab4, 0x63df4a18, 0xe51a3182, 0x97513360, 0x62537f45, 0xb16477e0, 0xbb6bae84, 0xfe81a01c, 0xf9082b94, 0x70486858, 0x8f45fd19, 0x94de6c87, 0x527bf8b7, 0xab73d323, 0x724b02e2, 0xe31f8f57, 0x6655ab2a, 0xb2eb2807, 0x2fb5c203, 0x86c57b9a, 0xd33708a5, 0x302887f2, 0x23bfa5b2, 0x02036aba, 0xed16825c, 0x8acf1c2b, 0xa779b492, 0xf307f2f0, 0x4e69e2a1, 0x65daf4cd, 0x0605bed5, 0xd134621f, 0xc4a6fe8a, 0x342e539d, 0xa2f355a0, 0x058ae132, 0xa4f6eb75, 0x0b83ec39, 0x4060efaa, 0x5e719f06, 0xbd6e1051, 0x3e218af9, 0x96dd063d, 0xdd3e05ae, 0x4de6bd46, 0x91548db5, 0x71c45d05, 0x0406d46f, 0x605015ff, 0x1998fb24, 0xd6bde997, 0x894043cc, 0x67d99e77, 0xb0e842bd, 0x07898b88, 0xe7195b38, 0x79c8eedb, 0xa17c0a47, 0x7c420fe9, 0xf8841ec9, 0x00000000, 0x09808683, 0x322bed48, 0x1e1170ac, 0x6c5a724e, 0xfd0efffb, 0x0f853856, 0x3daed51e, 0x362d3927, 0x0a0fd964, 0x685ca621, 0x9b5b54d1, 0x24362e3a, 0x0c0a67b1, 0x9357e70f, 0xb4ee96d2, 0x1b9b919e, 0x80c0c54f, 0x61dc20a2, 0x5a774b69, 0x1c121a16, 0xe293ba0a, 0xc0a02ae5, 0x3c22e043, 0x121b171d, 0x0e090d0b, 0xf28bc7ad, 0x2db6a8b9, 0x141ea9c8, 0x57f11985, 0xaf75074c, 0xee99ddbb, 0xa37f60fd, 0xf701269f, 0x5c72f5bc, 0x44663bc5, 0x5bfb7e34, 0x8b432976, 0xcb23c6dc, 0xb6edfc68, 0xb8e4f163, 0xd731dcca, 0x42638510, 0x13972240, 0x84c61120, 0x854a247d, 0xd2bb3df8, 0xaef93211, 0xc729a16d, 0x1d9e2f4b, 0xdcb230f3, 0x0d8652ec, 0x77c1e3d0, 0x2bb3166c, 0xa970b999, 0x119448fa, 0x47e96422, 0xa8fc8cc4, 0xa0f03f1a, 0x567d2cd8, 0x223390ef, 0x87494ec7, 0xd938d1c1, 0x8ccaa2fe, 0x98d40b36, 0xa6f581cf, 0xa57ade28, 0xdab78e26, 0x3fadbfa4, 0x2c3a9de4, 0x5078920d, 0x6a5fcc9b, 0x547e4662, 0xf68d13c2, 0x90d8b8e8, 0x2e39f75e, 0x82c3aff5, 0x9f5d80be, 0x69d0937c, 0x6fd52da9, 0xcf2512b3, 0xc8ac993b, 0x10187da7, 0xe89c636e, 0xdb3bbb7b, 0xcd267809, 0x6e5918f4, 0xec9ab701, 0x834f9aa8, 0xe6956e65, 0xaaffe67e, 0x21bccf08, 0xef15e8e6, 0xbae79bd9, 0x4a6f36ce, 0xea9f09d4, 0x29b07cd6, 0x31a4b2af, 0x2a3f2331, 0xc6a59430, 0x35a266c0, 0x744ebc37, 0xfc82caa6, 0xe090d0b0, 0x33a7d815, 0xf104984a, 0x41ecdaf7, 0x7fcd500e, 0x1791f62f, 0x764dd68d, 0x43efb04d, 0xccaa4d54, 0xe49604df, 0x9ed1b5e3, 0x4c6a881b, 0xc12c1fb8, 0x4665517f, 0x9d5eea04, 0x018c355d, 0xfa877473, 0xfb0b412e, 0xb3671d5a, 0x92dbd252, 0xe9105633, 0x6dd64713, 0x9ad7618c, 0x37a10c7a, 0x59f8148e, 0xeb133c89, 0xcea927ee, 0xb761c935, 0xe11ce5ed, 0x7a47b13c, 0x9cd2df59, 0x55f2733f, 0x1814ce79, 0x73c737bf, 0x53f7cdea, 0x5ffdaa5b, 0xdf3d6f14, 0x7844db86, 0xcaaff381, 0xb968c43e, 0x3824342c, 0xc2a3405f, 0x161dc372, 0xbce2250c, 0x283c498b, 0xff0d9541, 0x39a80171, 0x080cb3de, 0xd8b4e49c, 0x6456c190, 0x7bcb8461, 0xd532b670, 0x486c5c74, 0xd0b85742];
|
||
var T6 = [0x5051f4a7, 0x537e4165, 0xc31a17a4, 0x963a275e, 0xcb3bab6b, 0xf11f9d45, 0xabacfa58, 0x934be303, 0x552030fa, 0xf6ad766d, 0x9188cc76, 0x25f5024c, 0xfc4fe5d7, 0xd7c52acb, 0x80263544, 0x8fb562a3, 0x49deb15a, 0x6725ba1b, 0x9845ea0e, 0xe15dfec0, 0x02c32f75, 0x12814cf0, 0xa38d4697, 0xc66bd3f9, 0xe7038f5f, 0x9515929c, 0xebbf6d7a, 0xda955259, 0x2dd4be83, 0xd3587421, 0x2949e069, 0x448ec9c8, 0x6a75c289, 0x78f48e79, 0x6b99583e, 0xdd27b971, 0xb6bee14f, 0x17f088ad, 0x66c920ac, 0xb47dce3a, 0x1863df4a, 0x82e51a31, 0x60975133, 0x4562537f, 0xe0b16477, 0x84bb6bae, 0x1cfe81a0, 0x94f9082b, 0x58704868, 0x198f45fd, 0x8794de6c, 0xb7527bf8, 0x23ab73d3, 0xe2724b02, 0x57e31f8f, 0x2a6655ab, 0x07b2eb28, 0x032fb5c2, 0x9a86c57b, 0xa5d33708, 0xf2302887, 0xb223bfa5, 0xba02036a, 0x5ced1682, 0x2b8acf1c, 0x92a779b4, 0xf0f307f2, 0xa14e69e2, 0xcd65daf4, 0xd50605be, 0x1fd13462, 0x8ac4a6fe, 0x9d342e53, 0xa0a2f355, 0x32058ae1, 0x75a4f6eb, 0x390b83ec, 0xaa4060ef, 0x065e719f, 0x51bd6e10, 0xf93e218a, 0x3d96dd06, 0xaedd3e05, 0x464de6bd, 0xb591548d, 0x0571c45d, 0x6f0406d4, 0xff605015, 0x241998fb, 0x97d6bde9, 0xcc894043, 0x7767d99e, 0xbdb0e842, 0x8807898b, 0x38e7195b, 0xdb79c8ee, 0x47a17c0a, 0xe97c420f, 0xc9f8841e, 0x00000000, 0x83098086, 0x48322bed, 0xac1e1170, 0x4e6c5a72, 0xfbfd0eff, 0x560f8538, 0x1e3daed5, 0x27362d39, 0x640a0fd9, 0x21685ca6, 0xd19b5b54, 0x3a24362e, 0xb10c0a67, 0x0f9357e7, 0xd2b4ee96, 0x9e1b9b91, 0x4f80c0c5, 0xa261dc20, 0x695a774b, 0x161c121a, 0x0ae293ba, 0xe5c0a02a, 0x433c22e0, 0x1d121b17, 0x0b0e090d, 0xadf28bc7, 0xb92db6a8, 0xc8141ea9, 0x8557f119, 0x4caf7507, 0xbbee99dd, 0xfda37f60, 0x9ff70126, 0xbc5c72f5, 0xc544663b, 0x345bfb7e, 0x768b4329, 0xdccb23c6, 0x68b6edfc, 0x63b8e4f1, 0xcad731dc, 0x10426385, 0x40139722, 0x2084c611, 0x7d854a24, 0xf8d2bb3d, 0x11aef932, 0x6dc729a1, 0x4b1d9e2f, 0xf3dcb230, 0xec0d8652, 0xd077c1e3, 0x6c2bb316, 0x99a970b9, 0xfa119448, 0x2247e964, 0xc4a8fc8c, 0x1aa0f03f, 0xd8567d2c, 0xef223390, 0xc787494e, 0xc1d938d1, 0xfe8ccaa2, 0x3698d40b, 0xcfa6f581, 0x28a57ade, 0x26dab78e, 0xa43fadbf, 0xe42c3a9d, 0x0d507892, 0x9b6a5fcc, 0x62547e46, 0xc2f68d13, 0xe890d8b8, 0x5e2e39f7, 0xf582c3af, 0xbe9f5d80, 0x7c69d093, 0xa96fd52d, 0xb3cf2512, 0x3bc8ac99, 0xa710187d, 0x6ee89c63, 0x7bdb3bbb, 0x09cd2678, 0xf46e5918, 0x01ec9ab7, 0xa8834f9a, 0x65e6956e, 0x7eaaffe6, 0x0821bccf, 0xe6ef15e8, 0xd9bae79b, 0xce4a6f36, 0xd4ea9f09, 0xd629b07c, 0xaf31a4b2, 0x312a3f23, 0x30c6a594, 0xc035a266, 0x37744ebc, 0xa6fc82ca, 0xb0e090d0, 0x1533a7d8, 0x4af10498, 0xf741ecda, 0x0e7fcd50, 0x2f1791f6, 0x8d764dd6, 0x4d43efb0, 0x54ccaa4d, 0xdfe49604, 0xe39ed1b5, 0x1b4c6a88, 0xb8c12c1f, 0x7f466551, 0x049d5eea, 0x5d018c35, 0x73fa8774, 0x2efb0b41, 0x5ab3671d, 0x5292dbd2, 0x33e91056, 0x136dd647, 0x8c9ad761, 0x7a37a10c, 0x8e59f814, 0x89eb133c, 0xeecea927, 0x35b761c9, 0xede11ce5, 0x3c7a47b1, 0x599cd2df, 0x3f55f273, 0x791814ce, 0xbf73c737, 0xea53f7cd, 0x5b5ffdaa, 0x14df3d6f, 0x867844db, 0x81caaff3, 0x3eb968c4, 0x2c382434, 0x5fc2a340, 0x72161dc3, 0x0cbce225, 0x8b283c49, 0x41ff0d95, 0x7139a801, 0xde080cb3, 0x9cd8b4e4, 0x906456c1, 0x617bcb84, 0x70d532b6, 0x74486c5c, 0x42d0b857];
|
||
var T7 = [0xa75051f4, 0x65537e41, 0xa4c31a17, 0x5e963a27, 0x6bcb3bab, 0x45f11f9d, 0x58abacfa, 0x03934be3, 0xfa552030, 0x6df6ad76, 0x769188cc, 0x4c25f502, 0xd7fc4fe5, 0xcbd7c52a, 0x44802635, 0xa38fb562, 0x5a49deb1, 0x1b6725ba, 0x0e9845ea, 0xc0e15dfe, 0x7502c32f, 0xf012814c, 0x97a38d46, 0xf9c66bd3, 0x5fe7038f, 0x9c951592, 0x7aebbf6d, 0x59da9552, 0x832dd4be, 0x21d35874, 0x692949e0, 0xc8448ec9, 0x896a75c2, 0x7978f48e, 0x3e6b9958, 0x71dd27b9, 0x4fb6bee1, 0xad17f088, 0xac66c920, 0x3ab47dce, 0x4a1863df, 0x3182e51a, 0x33609751, 0x7f456253, 0x77e0b164, 0xae84bb6b, 0xa01cfe81, 0x2b94f908, 0x68587048, 0xfd198f45, 0x6c8794de, 0xf8b7527b, 0xd323ab73, 0x02e2724b, 0x8f57e31f, 0xab2a6655, 0x2807b2eb, 0xc2032fb5, 0x7b9a86c5, 0x08a5d337, 0x87f23028, 0xa5b223bf, 0x6aba0203, 0x825ced16, 0x1c2b8acf, 0xb492a779, 0xf2f0f307, 0xe2a14e69, 0xf4cd65da, 0xbed50605, 0x621fd134, 0xfe8ac4a6, 0x539d342e, 0x55a0a2f3, 0xe132058a, 0xeb75a4f6, 0xec390b83, 0xefaa4060, 0x9f065e71, 0x1051bd6e, 0x8af93e21, 0x063d96dd, 0x05aedd3e, 0xbd464de6, 0x8db59154, 0x5d0571c4, 0xd46f0406, 0x15ff6050, 0xfb241998, 0xe997d6bd, 0x43cc8940, 0x9e7767d9, 0x42bdb0e8, 0x8b880789, 0x5b38e719, 0xeedb79c8, 0x0a47a17c, 0x0fe97c42, 0x1ec9f884, 0x00000000, 0x86830980, 0xed48322b, 0x70ac1e11, 0x724e6c5a, 0xfffbfd0e, 0x38560f85, 0xd51e3dae, 0x3927362d, 0xd9640a0f, 0xa621685c, 0x54d19b5b, 0x2e3a2436, 0x67b10c0a, 0xe70f9357, 0x96d2b4ee, 0x919e1b9b, 0xc54f80c0, 0x20a261dc, 0x4b695a77, 0x1a161c12, 0xba0ae293, 0x2ae5c0a0, 0xe0433c22, 0x171d121b, 0x0d0b0e09, 0xc7adf28b, 0xa8b92db6, 0xa9c8141e, 0x198557f1, 0x074caf75, 0xddbbee99, 0x60fda37f, 0x269ff701, 0xf5bc5c72, 0x3bc54466, 0x7e345bfb, 0x29768b43, 0xc6dccb23, 0xfc68b6ed, 0xf163b8e4, 0xdccad731, 0x85104263, 0x22401397, 0x112084c6, 0x247d854a, 0x3df8d2bb, 0x3211aef9, 0xa16dc729, 0x2f4b1d9e, 0x30f3dcb2, 0x52ec0d86, 0xe3d077c1, 0x166c2bb3, 0xb999a970, 0x48fa1194, 0x642247e9, 0x8cc4a8fc, 0x3f1aa0f0, 0x2cd8567d, 0x90ef2233, 0x4ec78749, 0xd1c1d938, 0xa2fe8cca, 0x0b3698d4, 0x81cfa6f5, 0xde28a57a, 0x8e26dab7, 0xbfa43fad, 0x9de42c3a, 0x920d5078, 0xcc9b6a5f, 0x4662547e, 0x13c2f68d, 0xb8e890d8, 0xf75e2e39, 0xaff582c3, 0x80be9f5d, 0x937c69d0, 0x2da96fd5, 0x12b3cf25, 0x993bc8ac, 0x7da71018, 0x636ee89c, 0xbb7bdb3b, 0x7809cd26, 0x18f46e59, 0xb701ec9a, 0x9aa8834f, 0x6e65e695, 0xe67eaaff, 0xcf0821bc, 0xe8e6ef15, 0x9bd9bae7, 0x36ce4a6f, 0x09d4ea9f, 0x7cd629b0, 0xb2af31a4, 0x23312a3f, 0x9430c6a5, 0x66c035a2, 0xbc37744e, 0xcaa6fc82, 0xd0b0e090, 0xd81533a7, 0x984af104, 0xdaf741ec, 0x500e7fcd, 0xf62f1791, 0xd68d764d, 0xb04d43ef, 0x4d54ccaa, 0x04dfe496, 0xb5e39ed1, 0x881b4c6a, 0x1fb8c12c, 0x517f4665, 0xea049d5e, 0x355d018c, 0x7473fa87, 0x412efb0b, 0x1d5ab367, 0xd25292db, 0x5633e910, 0x47136dd6, 0x618c9ad7, 0x0c7a37a1, 0x148e59f8, 0x3c89eb13, 0x27eecea9, 0xc935b761, 0xe5ede11c, 0xb13c7a47, 0xdf599cd2, 0x733f55f2, 0xce791814, 0x37bf73c7, 0xcdea53f7, 0xaa5b5ffd, 0x6f14df3d, 0xdb867844, 0xf381caaf, 0xc43eb968, 0x342c3824, 0x405fc2a3, 0xc372161d, 0x250cbce2, 0x498b283c, 0x9541ff0d, 0x017139a8, 0xb3de080c, 0xe49cd8b4, 0xc1906456, 0x84617bcb, 0xb670d532, 0x5c74486c, 0x5742d0b8];
|
||
var T8 = [0xf4a75051, 0x4165537e, 0x17a4c31a, 0x275e963a, 0xab6bcb3b, 0x9d45f11f, 0xfa58abac, 0xe303934b, 0x30fa5520, 0x766df6ad, 0xcc769188, 0x024c25f5, 0xe5d7fc4f, 0x2acbd7c5, 0x35448026, 0x62a38fb5, 0xb15a49de, 0xba1b6725, 0xea0e9845, 0xfec0e15d, 0x2f7502c3, 0x4cf01281, 0x4697a38d, 0xd3f9c66b, 0x8f5fe703, 0x929c9515, 0x6d7aebbf, 0x5259da95, 0xbe832dd4, 0x7421d358, 0xe0692949, 0xc9c8448e, 0xc2896a75, 0x8e7978f4, 0x583e6b99, 0xb971dd27, 0xe14fb6be, 0x88ad17f0, 0x20ac66c9, 0xce3ab47d, 0xdf4a1863, 0x1a3182e5, 0x51336097, 0x537f4562, 0x6477e0b1, 0x6bae84bb, 0x81a01cfe, 0x082b94f9, 0x48685870, 0x45fd198f, 0xde6c8794, 0x7bf8b752, 0x73d323ab, 0x4b02e272, 0x1f8f57e3, 0x55ab2a66, 0xeb2807b2, 0xb5c2032f, 0xc57b9a86, 0x3708a5d3, 0x2887f230, 0xbfa5b223, 0x036aba02, 0x16825ced, 0xcf1c2b8a, 0x79b492a7, 0x07f2f0f3, 0x69e2a14e, 0xdaf4cd65, 0x05bed506, 0x34621fd1, 0xa6fe8ac4, 0x2e539d34, 0xf355a0a2, 0x8ae13205, 0xf6eb75a4, 0x83ec390b, 0x60efaa40, 0x719f065e, 0x6e1051bd, 0x218af93e, 0xdd063d96, 0x3e05aedd, 0xe6bd464d, 0x548db591, 0xc45d0571, 0x06d46f04, 0x5015ff60, 0x98fb2419, 0xbde997d6, 0x4043cc89, 0xd99e7767, 0xe842bdb0, 0x898b8807, 0x195b38e7, 0xc8eedb79, 0x7c0a47a1, 0x420fe97c, 0x841ec9f8, 0x00000000, 0x80868309, 0x2bed4832, 0x1170ac1e, 0x5a724e6c, 0x0efffbfd, 0x8538560f, 0xaed51e3d, 0x2d392736, 0x0fd9640a, 0x5ca62168, 0x5b54d19b, 0x362e3a24, 0x0a67b10c, 0x57e70f93, 0xee96d2b4, 0x9b919e1b, 0xc0c54f80, 0xdc20a261, 0x774b695a, 0x121a161c, 0x93ba0ae2, 0xa02ae5c0, 0x22e0433c, 0x1b171d12, 0x090d0b0e, 0x8bc7adf2, 0xb6a8b92d, 0x1ea9c814, 0xf1198557, 0x75074caf, 0x99ddbbee, 0x7f60fda3, 0x01269ff7, 0x72f5bc5c, 0x663bc544, 0xfb7e345b, 0x4329768b, 0x23c6dccb, 0xedfc68b6, 0xe4f163b8, 0x31dccad7, 0x63851042, 0x97224013, 0xc6112084, 0x4a247d85, 0xbb3df8d2, 0xf93211ae, 0x29a16dc7, 0x9e2f4b1d, 0xb230f3dc, 0x8652ec0d, 0xc1e3d077, 0xb3166c2b, 0x70b999a9, 0x9448fa11, 0xe9642247, 0xfc8cc4a8, 0xf03f1aa0, 0x7d2cd856, 0x3390ef22, 0x494ec787, 0x38d1c1d9, 0xcaa2fe8c, 0xd40b3698, 0xf581cfa6, 0x7ade28a5, 0xb78e26da, 0xadbfa43f, 0x3a9de42c, 0x78920d50, 0x5fcc9b6a, 0x7e466254, 0x8d13c2f6, 0xd8b8e890, 0x39f75e2e, 0xc3aff582, 0x5d80be9f, 0xd0937c69, 0xd52da96f, 0x2512b3cf, 0xac993bc8, 0x187da710, 0x9c636ee8, 0x3bbb7bdb, 0x267809cd, 0x5918f46e, 0x9ab701ec, 0x4f9aa883, 0x956e65e6, 0xffe67eaa, 0xbccf0821, 0x15e8e6ef, 0xe79bd9ba, 0x6f36ce4a, 0x9f09d4ea, 0xb07cd629, 0xa4b2af31, 0x3f23312a, 0xa59430c6, 0xa266c035, 0x4ebc3774, 0x82caa6fc, 0x90d0b0e0, 0xa7d81533, 0x04984af1, 0xecdaf741, 0xcd500e7f, 0x91f62f17, 0x4dd68d76, 0xefb04d43, 0xaa4d54cc, 0x9604dfe4, 0xd1b5e39e, 0x6a881b4c, 0x2c1fb8c1, 0x65517f46, 0x5eea049d, 0x8c355d01, 0x877473fa, 0x0b412efb, 0x671d5ab3, 0xdbd25292, 0x105633e9, 0xd647136d, 0xd7618c9a, 0xa10c7a37, 0xf8148e59, 0x133c89eb, 0xa927eece, 0x61c935b7, 0x1ce5ede1, 0x47b13c7a, 0xd2df599c, 0xf2733f55, 0x14ce7918, 0xc737bf73, 0xf7cdea53, 0xfdaa5b5f, 0x3d6f14df, 0x44db8678, 0xaff381ca, 0x68c43eb9, 0x24342c38, 0xa3405fc2, 0x1dc37216, 0xe2250cbc, 0x3c498b28, 0x0d9541ff, 0xa8017139, 0x0cb3de08, 0xb4e49cd8, 0x56c19064, 0xcb84617b, 0x32b670d5, 0x6c5c7448, 0xb85742d0]; // Transformations for decryption key expansion
|
||
|
||
var U1 = [0x00000000, 0x0e090d0b, 0x1c121a16, 0x121b171d, 0x3824342c, 0x362d3927, 0x24362e3a, 0x2a3f2331, 0x70486858, 0x7e416553, 0x6c5a724e, 0x62537f45, 0x486c5c74, 0x4665517f, 0x547e4662, 0x5a774b69, 0xe090d0b0, 0xee99ddbb, 0xfc82caa6, 0xf28bc7ad, 0xd8b4e49c, 0xd6bde997, 0xc4a6fe8a, 0xcaaff381, 0x90d8b8e8, 0x9ed1b5e3, 0x8ccaa2fe, 0x82c3aff5, 0xa8fc8cc4, 0xa6f581cf, 0xb4ee96d2, 0xbae79bd9, 0xdb3bbb7b, 0xd532b670, 0xc729a16d, 0xc920ac66, 0xe31f8f57, 0xed16825c, 0xff0d9541, 0xf104984a, 0xab73d323, 0xa57ade28, 0xb761c935, 0xb968c43e, 0x9357e70f, 0x9d5eea04, 0x8f45fd19, 0x814cf012, 0x3bab6bcb, 0x35a266c0, 0x27b971dd, 0x29b07cd6, 0x038f5fe7, 0x0d8652ec, 0x1f9d45f1, 0x119448fa, 0x4be30393, 0x45ea0e98, 0x57f11985, 0x59f8148e, 0x73c737bf, 0x7dce3ab4, 0x6fd52da9, 0x61dc20a2, 0xad766df6, 0xa37f60fd, 0xb16477e0, 0xbf6d7aeb, 0x955259da, 0x9b5b54d1, 0x894043cc, 0x87494ec7, 0xdd3e05ae, 0xd33708a5, 0xc12c1fb8, 0xcf2512b3, 0xe51a3182, 0xeb133c89, 0xf9082b94, 0xf701269f, 0x4de6bd46, 0x43efb04d, 0x51f4a750, 0x5ffdaa5b, 0x75c2896a, 0x7bcb8461, 0x69d0937c, 0x67d99e77, 0x3daed51e, 0x33a7d815, 0x21bccf08, 0x2fb5c203, 0x058ae132, 0x0b83ec39, 0x1998fb24, 0x1791f62f, 0x764dd68d, 0x7844db86, 0x6a5fcc9b, 0x6456c190, 0x4e69e2a1, 0x4060efaa, 0x527bf8b7, 0x5c72f5bc, 0x0605bed5, 0x080cb3de, 0x1a17a4c3, 0x141ea9c8, 0x3e218af9, 0x302887f2, 0x223390ef, 0x2c3a9de4, 0x96dd063d, 0x98d40b36, 0x8acf1c2b, 0x84c61120, 0xaef93211, 0xa0f03f1a, 0xb2eb2807, 0xbce2250c, 0xe6956e65, 0xe89c636e, 0xfa877473, 0xf48e7978, 0xdeb15a49, 0xd0b85742, 0xc2a3405f, 0xccaa4d54, 0x41ecdaf7, 0x4fe5d7fc, 0x5dfec0e1, 0x53f7cdea, 0x79c8eedb, 0x77c1e3d0, 0x65daf4cd, 0x6bd3f9c6, 0x31a4b2af, 0x3fadbfa4, 0x2db6a8b9, 0x23bfa5b2, 0x09808683, 0x07898b88, 0x15929c95, 0x1b9b919e, 0xa17c0a47, 0xaf75074c, 0xbd6e1051, 0xb3671d5a, 0x99583e6b, 0x97513360, 0x854a247d, 0x8b432976, 0xd134621f, 0xdf3d6f14, 0xcd267809, 0xc32f7502, 0xe9105633, 0xe7195b38, 0xf5024c25, 0xfb0b412e, 0x9ad7618c, 0x94de6c87, 0x86c57b9a, 0x88cc7691, 0xa2f355a0, 0xacfa58ab, 0xbee14fb6, 0xb0e842bd, 0xea9f09d4, 0xe49604df, 0xf68d13c2, 0xf8841ec9, 0xd2bb3df8, 0xdcb230f3, 0xcea927ee, 0xc0a02ae5, 0x7a47b13c, 0x744ebc37, 0x6655ab2a, 0x685ca621, 0x42638510, 0x4c6a881b, 0x5e719f06, 0x5078920d, 0x0a0fd964, 0x0406d46f, 0x161dc372, 0x1814ce79, 0x322bed48, 0x3c22e043, 0x2e39f75e, 0x2030fa55, 0xec9ab701, 0xe293ba0a, 0xf088ad17, 0xfe81a01c, 0xd4be832d, 0xdab78e26, 0xc8ac993b, 0xc6a59430, 0x9cd2df59, 0x92dbd252, 0x80c0c54f, 0x8ec9c844, 0xa4f6eb75, 0xaaffe67e, 0xb8e4f163, 0xb6edfc68, 0x0c0a67b1, 0x02036aba, 0x10187da7, 0x1e1170ac, 0x342e539d, 0x3a275e96, 0x283c498b, 0x26354480, 0x7c420fe9, 0x724b02e2, 0x605015ff, 0x6e5918f4, 0x44663bc5, 0x4a6f36ce, 0x587421d3, 0x567d2cd8, 0x37a10c7a, 0x39a80171, 0x2bb3166c, 0x25ba1b67, 0x0f853856, 0x018c355d, 0x13972240, 0x1d9e2f4b, 0x47e96422, 0x49e06929, 0x5bfb7e34, 0x55f2733f, 0x7fcd500e, 0x71c45d05, 0x63df4a18, 0x6dd64713, 0xd731dcca, 0xd938d1c1, 0xcb23c6dc, 0xc52acbd7, 0xef15e8e6, 0xe11ce5ed, 0xf307f2f0, 0xfd0efffb, 0xa779b492, 0xa970b999, 0xbb6bae84, 0xb562a38f, 0x9f5d80be, 0x91548db5, 0x834f9aa8, 0x8d4697a3];
|
||
var U2 = [0x00000000, 0x0b0e090d, 0x161c121a, 0x1d121b17, 0x2c382434, 0x27362d39, 0x3a24362e, 0x312a3f23, 0x58704868, 0x537e4165, 0x4e6c5a72, 0x4562537f, 0x74486c5c, 0x7f466551, 0x62547e46, 0x695a774b, 0xb0e090d0, 0xbbee99dd, 0xa6fc82ca, 0xadf28bc7, 0x9cd8b4e4, 0x97d6bde9, 0x8ac4a6fe, 0x81caaff3, 0xe890d8b8, 0xe39ed1b5, 0xfe8ccaa2, 0xf582c3af, 0xc4a8fc8c, 0xcfa6f581, 0xd2b4ee96, 0xd9bae79b, 0x7bdb3bbb, 0x70d532b6, 0x6dc729a1, 0x66c920ac, 0x57e31f8f, 0x5ced1682, 0x41ff0d95, 0x4af10498, 0x23ab73d3, 0x28a57ade, 0x35b761c9, 0x3eb968c4, 0x0f9357e7, 0x049d5eea, 0x198f45fd, 0x12814cf0, 0xcb3bab6b, 0xc035a266, 0xdd27b971, 0xd629b07c, 0xe7038f5f, 0xec0d8652, 0xf11f9d45, 0xfa119448, 0x934be303, 0x9845ea0e, 0x8557f119, 0x8e59f814, 0xbf73c737, 0xb47dce3a, 0xa96fd52d, 0xa261dc20, 0xf6ad766d, 0xfda37f60, 0xe0b16477, 0xebbf6d7a, 0xda955259, 0xd19b5b54, 0xcc894043, 0xc787494e, 0xaedd3e05, 0xa5d33708, 0xb8c12c1f, 0xb3cf2512, 0x82e51a31, 0x89eb133c, 0x94f9082b, 0x9ff70126, 0x464de6bd, 0x4d43efb0, 0x5051f4a7, 0x5b5ffdaa, 0x6a75c289, 0x617bcb84, 0x7c69d093, 0x7767d99e, 0x1e3daed5, 0x1533a7d8, 0x0821bccf, 0x032fb5c2, 0x32058ae1, 0x390b83ec, 0x241998fb, 0x2f1791f6, 0x8d764dd6, 0x867844db, 0x9b6a5fcc, 0x906456c1, 0xa14e69e2, 0xaa4060ef, 0xb7527bf8, 0xbc5c72f5, 0xd50605be, 0xde080cb3, 0xc31a17a4, 0xc8141ea9, 0xf93e218a, 0xf2302887, 0xef223390, 0xe42c3a9d, 0x3d96dd06, 0x3698d40b, 0x2b8acf1c, 0x2084c611, 0x11aef932, 0x1aa0f03f, 0x07b2eb28, 0x0cbce225, 0x65e6956e, 0x6ee89c63, 0x73fa8774, 0x78f48e79, 0x49deb15a, 0x42d0b857, 0x5fc2a340, 0x54ccaa4d, 0xf741ecda, 0xfc4fe5d7, 0xe15dfec0, 0xea53f7cd, 0xdb79c8ee, 0xd077c1e3, 0xcd65daf4, 0xc66bd3f9, 0xaf31a4b2, 0xa43fadbf, 0xb92db6a8, 0xb223bfa5, 0x83098086, 0x8807898b, 0x9515929c, 0x9e1b9b91, 0x47a17c0a, 0x4caf7507, 0x51bd6e10, 0x5ab3671d, 0x6b99583e, 0x60975133, 0x7d854a24, 0x768b4329, 0x1fd13462, 0x14df3d6f, 0x09cd2678, 0x02c32f75, 0x33e91056, 0x38e7195b, 0x25f5024c, 0x2efb0b41, 0x8c9ad761, 0x8794de6c, 0x9a86c57b, 0x9188cc76, 0xa0a2f355, 0xabacfa58, 0xb6bee14f, 0xbdb0e842, 0xd4ea9f09, 0xdfe49604, 0xc2f68d13, 0xc9f8841e, 0xf8d2bb3d, 0xf3dcb230, 0xeecea927, 0xe5c0a02a, 0x3c7a47b1, 0x37744ebc, 0x2a6655ab, 0x21685ca6, 0x10426385, 0x1b4c6a88, 0x065e719f, 0x0d507892, 0x640a0fd9, 0x6f0406d4, 0x72161dc3, 0x791814ce, 0x48322bed, 0x433c22e0, 0x5e2e39f7, 0x552030fa, 0x01ec9ab7, 0x0ae293ba, 0x17f088ad, 0x1cfe81a0, 0x2dd4be83, 0x26dab78e, 0x3bc8ac99, 0x30c6a594, 0x599cd2df, 0x5292dbd2, 0x4f80c0c5, 0x448ec9c8, 0x75a4f6eb, 0x7eaaffe6, 0x63b8e4f1, 0x68b6edfc, 0xb10c0a67, 0xba02036a, 0xa710187d, 0xac1e1170, 0x9d342e53, 0x963a275e, 0x8b283c49, 0x80263544, 0xe97c420f, 0xe2724b02, 0xff605015, 0xf46e5918, 0xc544663b, 0xce4a6f36, 0xd3587421, 0xd8567d2c, 0x7a37a10c, 0x7139a801, 0x6c2bb316, 0x6725ba1b, 0x560f8538, 0x5d018c35, 0x40139722, 0x4b1d9e2f, 0x2247e964, 0x2949e069, 0x345bfb7e, 0x3f55f273, 0x0e7fcd50, 0x0571c45d, 0x1863df4a, 0x136dd647, 0xcad731dc, 0xc1d938d1, 0xdccb23c6, 0xd7c52acb, 0xe6ef15e8, 0xede11ce5, 0xf0f307f2, 0xfbfd0eff, 0x92a779b4, 0x99a970b9, 0x84bb6bae, 0x8fb562a3, 0xbe9f5d80, 0xb591548d, 0xa8834f9a, 0xa38d4697];
|
||
var U3 = [0x00000000, 0x0d0b0e09, 0x1a161c12, 0x171d121b, 0x342c3824, 0x3927362d, 0x2e3a2436, 0x23312a3f, 0x68587048, 0x65537e41, 0x724e6c5a, 0x7f456253, 0x5c74486c, 0x517f4665, 0x4662547e, 0x4b695a77, 0xd0b0e090, 0xddbbee99, 0xcaa6fc82, 0xc7adf28b, 0xe49cd8b4, 0xe997d6bd, 0xfe8ac4a6, 0xf381caaf, 0xb8e890d8, 0xb5e39ed1, 0xa2fe8cca, 0xaff582c3, 0x8cc4a8fc, 0x81cfa6f5, 0x96d2b4ee, 0x9bd9bae7, 0xbb7bdb3b, 0xb670d532, 0xa16dc729, 0xac66c920, 0x8f57e31f, 0x825ced16, 0x9541ff0d, 0x984af104, 0xd323ab73, 0xde28a57a, 0xc935b761, 0xc43eb968, 0xe70f9357, 0xea049d5e, 0xfd198f45, 0xf012814c, 0x6bcb3bab, 0x66c035a2, 0x71dd27b9, 0x7cd629b0, 0x5fe7038f, 0x52ec0d86, 0x45f11f9d, 0x48fa1194, 0x03934be3, 0x0e9845ea, 0x198557f1, 0x148e59f8, 0x37bf73c7, 0x3ab47dce, 0x2da96fd5, 0x20a261dc, 0x6df6ad76, 0x60fda37f, 0x77e0b164, 0x7aebbf6d, 0x59da9552, 0x54d19b5b, 0x43cc8940, 0x4ec78749, 0x05aedd3e, 0x08a5d337, 0x1fb8c12c, 0x12b3cf25, 0x3182e51a, 0x3c89eb13, 0x2b94f908, 0x269ff701, 0xbd464de6, 0xb04d43ef, 0xa75051f4, 0xaa5b5ffd, 0x896a75c2, 0x84617bcb, 0x937c69d0, 0x9e7767d9, 0xd51e3dae, 0xd81533a7, 0xcf0821bc, 0xc2032fb5, 0xe132058a, 0xec390b83, 0xfb241998, 0xf62f1791, 0xd68d764d, 0xdb867844, 0xcc9b6a5f, 0xc1906456, 0xe2a14e69, 0xefaa4060, 0xf8b7527b, 0xf5bc5c72, 0xbed50605, 0xb3de080c, 0xa4c31a17, 0xa9c8141e, 0x8af93e21, 0x87f23028, 0x90ef2233, 0x9de42c3a, 0x063d96dd, 0x0b3698d4, 0x1c2b8acf, 0x112084c6, 0x3211aef9, 0x3f1aa0f0, 0x2807b2eb, 0x250cbce2, 0x6e65e695, 0x636ee89c, 0x7473fa87, 0x7978f48e, 0x5a49deb1, 0x5742d0b8, 0x405fc2a3, 0x4d54ccaa, 0xdaf741ec, 0xd7fc4fe5, 0xc0e15dfe, 0xcdea53f7, 0xeedb79c8, 0xe3d077c1, 0xf4cd65da, 0xf9c66bd3, 0xb2af31a4, 0xbfa43fad, 0xa8b92db6, 0xa5b223bf, 0x86830980, 0x8b880789, 0x9c951592, 0x919e1b9b, 0x0a47a17c, 0x074caf75, 0x1051bd6e, 0x1d5ab367, 0x3e6b9958, 0x33609751, 0x247d854a, 0x29768b43, 0x621fd134, 0x6f14df3d, 0x7809cd26, 0x7502c32f, 0x5633e910, 0x5b38e719, 0x4c25f502, 0x412efb0b, 0x618c9ad7, 0x6c8794de, 0x7b9a86c5, 0x769188cc, 0x55a0a2f3, 0x58abacfa, 0x4fb6bee1, 0x42bdb0e8, 0x09d4ea9f, 0x04dfe496, 0x13c2f68d, 0x1ec9f884, 0x3df8d2bb, 0x30f3dcb2, 0x27eecea9, 0x2ae5c0a0, 0xb13c7a47, 0xbc37744e, 0xab2a6655, 0xa621685c, 0x85104263, 0x881b4c6a, 0x9f065e71, 0x920d5078, 0xd9640a0f, 0xd46f0406, 0xc372161d, 0xce791814, 0xed48322b, 0xe0433c22, 0xf75e2e39, 0xfa552030, 0xb701ec9a, 0xba0ae293, 0xad17f088, 0xa01cfe81, 0x832dd4be, 0x8e26dab7, 0x993bc8ac, 0x9430c6a5, 0xdf599cd2, 0xd25292db, 0xc54f80c0, 0xc8448ec9, 0xeb75a4f6, 0xe67eaaff, 0xf163b8e4, 0xfc68b6ed, 0x67b10c0a, 0x6aba0203, 0x7da71018, 0x70ac1e11, 0x539d342e, 0x5e963a27, 0x498b283c, 0x44802635, 0x0fe97c42, 0x02e2724b, 0x15ff6050, 0x18f46e59, 0x3bc54466, 0x36ce4a6f, 0x21d35874, 0x2cd8567d, 0x0c7a37a1, 0x017139a8, 0x166c2bb3, 0x1b6725ba, 0x38560f85, 0x355d018c, 0x22401397, 0x2f4b1d9e, 0x642247e9, 0x692949e0, 0x7e345bfb, 0x733f55f2, 0x500e7fcd, 0x5d0571c4, 0x4a1863df, 0x47136dd6, 0xdccad731, 0xd1c1d938, 0xc6dccb23, 0xcbd7c52a, 0xe8e6ef15, 0xe5ede11c, 0xf2f0f307, 0xfffbfd0e, 0xb492a779, 0xb999a970, 0xae84bb6b, 0xa38fb562, 0x80be9f5d, 0x8db59154, 0x9aa8834f, 0x97a38d46];
|
||
var U4 = [0x00000000, 0x090d0b0e, 0x121a161c, 0x1b171d12, 0x24342c38, 0x2d392736, 0x362e3a24, 0x3f23312a, 0x48685870, 0x4165537e, 0x5a724e6c, 0x537f4562, 0x6c5c7448, 0x65517f46, 0x7e466254, 0x774b695a, 0x90d0b0e0, 0x99ddbbee, 0x82caa6fc, 0x8bc7adf2, 0xb4e49cd8, 0xbde997d6, 0xa6fe8ac4, 0xaff381ca, 0xd8b8e890, 0xd1b5e39e, 0xcaa2fe8c, 0xc3aff582, 0xfc8cc4a8, 0xf581cfa6, 0xee96d2b4, 0xe79bd9ba, 0x3bbb7bdb, 0x32b670d5, 0x29a16dc7, 0x20ac66c9, 0x1f8f57e3, 0x16825ced, 0x0d9541ff, 0x04984af1, 0x73d323ab, 0x7ade28a5, 0x61c935b7, 0x68c43eb9, 0x57e70f93, 0x5eea049d, 0x45fd198f, 0x4cf01281, 0xab6bcb3b, 0xa266c035, 0xb971dd27, 0xb07cd629, 0x8f5fe703, 0x8652ec0d, 0x9d45f11f, 0x9448fa11, 0xe303934b, 0xea0e9845, 0xf1198557, 0xf8148e59, 0xc737bf73, 0xce3ab47d, 0xd52da96f, 0xdc20a261, 0x766df6ad, 0x7f60fda3, 0x6477e0b1, 0x6d7aebbf, 0x5259da95, 0x5b54d19b, 0x4043cc89, 0x494ec787, 0x3e05aedd, 0x3708a5d3, 0x2c1fb8c1, 0x2512b3cf, 0x1a3182e5, 0x133c89eb, 0x082b94f9, 0x01269ff7, 0xe6bd464d, 0xefb04d43, 0xf4a75051, 0xfdaa5b5f, 0xc2896a75, 0xcb84617b, 0xd0937c69, 0xd99e7767, 0xaed51e3d, 0xa7d81533, 0xbccf0821, 0xb5c2032f, 0x8ae13205, 0x83ec390b, 0x98fb2419, 0x91f62f17, 0x4dd68d76, 0x44db8678, 0x5fcc9b6a, 0x56c19064, 0x69e2a14e, 0x60efaa40, 0x7bf8b752, 0x72f5bc5c, 0x05bed506, 0x0cb3de08, 0x17a4c31a, 0x1ea9c814, 0x218af93e, 0x2887f230, 0x3390ef22, 0x3a9de42c, 0xdd063d96, 0xd40b3698, 0xcf1c2b8a, 0xc6112084, 0xf93211ae, 0xf03f1aa0, 0xeb2807b2, 0xe2250cbc, 0x956e65e6, 0x9c636ee8, 0x877473fa, 0x8e7978f4, 0xb15a49de, 0xb85742d0, 0xa3405fc2, 0xaa4d54cc, 0xecdaf741, 0xe5d7fc4f, 0xfec0e15d, 0xf7cdea53, 0xc8eedb79, 0xc1e3d077, 0xdaf4cd65, 0xd3f9c66b, 0xa4b2af31, 0xadbfa43f, 0xb6a8b92d, 0xbfa5b223, 0x80868309, 0x898b8807, 0x929c9515, 0x9b919e1b, 0x7c0a47a1, 0x75074caf, 0x6e1051bd, 0x671d5ab3, 0x583e6b99, 0x51336097, 0x4a247d85, 0x4329768b, 0x34621fd1, 0x3d6f14df, 0x267809cd, 0x2f7502c3, 0x105633e9, 0x195b38e7, 0x024c25f5, 0x0b412efb, 0xd7618c9a, 0xde6c8794, 0xc57b9a86, 0xcc769188, 0xf355a0a2, 0xfa58abac, 0xe14fb6be, 0xe842bdb0, 0x9f09d4ea, 0x9604dfe4, 0x8d13c2f6, 0x841ec9f8, 0xbb3df8d2, 0xb230f3dc, 0xa927eece, 0xa02ae5c0, 0x47b13c7a, 0x4ebc3774, 0x55ab2a66, 0x5ca62168, 0x63851042, 0x6a881b4c, 0x719f065e, 0x78920d50, 0x0fd9640a, 0x06d46f04, 0x1dc37216, 0x14ce7918, 0x2bed4832, 0x22e0433c, 0x39f75e2e, 0x30fa5520, 0x9ab701ec, 0x93ba0ae2, 0x88ad17f0, 0x81a01cfe, 0xbe832dd4, 0xb78e26da, 0xac993bc8, 0xa59430c6, 0xd2df599c, 0xdbd25292, 0xc0c54f80, 0xc9c8448e, 0xf6eb75a4, 0xffe67eaa, 0xe4f163b8, 0xedfc68b6, 0x0a67b10c, 0x036aba02, 0x187da710, 0x1170ac1e, 0x2e539d34, 0x275e963a, 0x3c498b28, 0x35448026, 0x420fe97c, 0x4b02e272, 0x5015ff60, 0x5918f46e, 0x663bc544, 0x6f36ce4a, 0x7421d358, 0x7d2cd856, 0xa10c7a37, 0xa8017139, 0xb3166c2b, 0xba1b6725, 0x8538560f, 0x8c355d01, 0x97224013, 0x9e2f4b1d, 0xe9642247, 0xe0692949, 0xfb7e345b, 0xf2733f55, 0xcd500e7f, 0xc45d0571, 0xdf4a1863, 0xd647136d, 0x31dccad7, 0x38d1c1d9, 0x23c6dccb, 0x2acbd7c5, 0x15e8e6ef, 0x1ce5ede1, 0x07f2f0f3, 0x0efffbfd, 0x79b492a7, 0x70b999a9, 0x6bae84bb, 0x62a38fb5, 0x5d80be9f, 0x548db591, 0x4f9aa883, 0x4697a38d];
|
||
|
||
function convertToInt32(bytes) {
|
||
var result = [];
|
||
|
||
for (var i = 0; i < bytes.length; i += 4) {
|
||
result.push(bytes[i] << 24 | bytes[i + 1] << 16 | bytes[i + 2] << 8 | bytes[i + 3]);
|
||
}
|
||
|
||
return result;
|
||
}
|
||
|
||
var AES = function AES(key) {
|
||
if (!(this instanceof AES)) {
|
||
throw Error('AES must be instanitated with `new`');
|
||
}
|
||
|
||
Object.defineProperty(this, 'key', {
|
||
value: coerceArray(key, true)
|
||
});
|
||
|
||
this._prepare();
|
||
};
|
||
|
||
AES.prototype._prepare = function () {
|
||
var rounds = numberOfRounds[this.key.length];
|
||
|
||
if (rounds == null) {
|
||
throw new Error('invalid key size (must be 16, 24 or 32 bytes)');
|
||
} // encryption round keys
|
||
|
||
|
||
this._Ke = []; // decryption round keys
|
||
|
||
this._Kd = [];
|
||
|
||
for (var i = 0; i <= rounds; i++) {
|
||
this._Ke.push([0, 0, 0, 0]);
|
||
|
||
this._Kd.push([0, 0, 0, 0]);
|
||
}
|
||
|
||
var roundKeyCount = (rounds + 1) * 4;
|
||
var KC = this.key.length / 4; // convert the key into ints
|
||
|
||
var tk = convertToInt32(this.key); // copy values into round key arrays
|
||
|
||
var index;
|
||
|
||
for (var i = 0; i < KC; i++) {
|
||
index = i >> 2;
|
||
this._Ke[index][i % 4] = tk[i];
|
||
this._Kd[rounds - index][i % 4] = tk[i];
|
||
} // key expansion (fips-197 section 5.2)
|
||
|
||
|
||
var rconpointer = 0;
|
||
var t = KC;
|
||
var tt;
|
||
|
||
while (t < roundKeyCount) {
|
||
tt = tk[KC - 1];
|
||
tk[0] ^= S[tt >> 16 & 0xFF] << 24 ^ S[tt >> 8 & 0xFF] << 16 ^ S[tt & 0xFF] << 8 ^ S[tt >> 24 & 0xFF] ^ rcon[rconpointer] << 24;
|
||
rconpointer += 1; // key expansion (for non-256 bit)
|
||
|
||
if (KC != 8) {
|
||
for (var i = 1; i < KC; i++) {
|
||
tk[i] ^= tk[i - 1];
|
||
} // key expansion for 256-bit keys is "slightly different" (fips-197)
|
||
|
||
} else {
|
||
for (var i = 1; i < KC / 2; i++) {
|
||
tk[i] ^= tk[i - 1];
|
||
}
|
||
|
||
tt = tk[KC / 2 - 1];
|
||
tk[KC / 2] ^= S[tt & 0xFF] ^ S[tt >> 8 & 0xFF] << 8 ^ S[tt >> 16 & 0xFF] << 16 ^ S[tt >> 24 & 0xFF] << 24;
|
||
|
||
for (var i = KC / 2 + 1; i < KC; i++) {
|
||
tk[i] ^= tk[i - 1];
|
||
}
|
||
} // copy values into round key arrays
|
||
|
||
|
||
var i = 0;
|
||
var r;
|
||
var c;
|
||
|
||
while (i < KC && t < roundKeyCount) {
|
||
r = t >> 2;
|
||
c = t % 4;
|
||
this._Ke[r][c] = tk[i];
|
||
this._Kd[rounds - r][c] = tk[i++];
|
||
t++;
|
||
}
|
||
} // inverse-cipher-ify the decryption round key (fips-197 section 5.3)
|
||
|
||
|
||
for (var r = 1; r < rounds; r++) {
|
||
for (var c = 0; c < 4; c++) {
|
||
tt = this._Kd[r][c];
|
||
this._Kd[r][c] = U1[tt >> 24 & 0xFF] ^ U2[tt >> 16 & 0xFF] ^ U3[tt >> 8 & 0xFF] ^ U4[tt & 0xFF];
|
||
}
|
||
}
|
||
};
|
||
|
||
AES.prototype.encrypt = function (plaintext) {
|
||
if (plaintext.length != 16) {
|
||
throw new Error('invalid plaintext size (must be 16 bytes)');
|
||
}
|
||
|
||
var rounds = this._Ke.length - 1;
|
||
var a = [0, 0, 0, 0]; // convert plaintext to (ints ^ key)
|
||
|
||
var t = convertToInt32(plaintext);
|
||
|
||
for (var i = 0; i < 4; i++) {
|
||
t[i] ^= this._Ke[0][i];
|
||
} // apply round transforms
|
||
|
||
|
||
for (var r = 1; r < rounds; r++) {
|
||
for (var i = 0; i < 4; i++) {
|
||
a[i] = T1[t[i] >> 24 & 0xff] ^ T2[t[(i + 1) % 4] >> 16 & 0xff] ^ T3[t[(i + 2) % 4] >> 8 & 0xff] ^ T4[t[(i + 3) % 4] & 0xff] ^ this._Ke[r][i];
|
||
}
|
||
|
||
t = a.slice();
|
||
} // the last round is special
|
||
|
||
|
||
var result = createArray(16);
|
||
var tt;
|
||
|
||
for (var i = 0; i < 4; i++) {
|
||
tt = this._Ke[rounds][i];
|
||
result[4 * i] = (S[t[i] >> 24 & 0xff] ^ tt >> 24) & 0xff;
|
||
result[4 * i + 1] = (S[t[(i + 1) % 4] >> 16 & 0xff] ^ tt >> 16) & 0xff;
|
||
result[4 * i + 2] = (S[t[(i + 2) % 4] >> 8 & 0xff] ^ tt >> 8) & 0xff;
|
||
result[4 * i + 3] = (S[t[(i + 3) % 4] & 0xff] ^ tt) & 0xff;
|
||
}
|
||
|
||
return result;
|
||
};
|
||
|
||
AES.prototype.decrypt = function (ciphertext) {
|
||
if (ciphertext.length != 16) {
|
||
throw new Error('invalid ciphertext size (must be 16 bytes)');
|
||
}
|
||
|
||
var rounds = this._Kd.length - 1;
|
||
var a = [0, 0, 0, 0]; // convert plaintext to (ints ^ key)
|
||
|
||
var t = convertToInt32(ciphertext);
|
||
|
||
for (var i = 0; i < 4; i++) {
|
||
t[i] ^= this._Kd[0][i];
|
||
} // apply round transforms
|
||
|
||
|
||
for (var r = 1; r < rounds; r++) {
|
||
for (var i = 0; i < 4; i++) {
|
||
a[i] = T5[t[i] >> 24 & 0xff] ^ T6[t[(i + 3) % 4] >> 16 & 0xff] ^ T7[t[(i + 2) % 4] >> 8 & 0xff] ^ T8[t[(i + 1) % 4] & 0xff] ^ this._Kd[r][i];
|
||
}
|
||
|
||
t = a.slice();
|
||
} // the last round is special
|
||
|
||
|
||
var result = createArray(16);
|
||
var tt;
|
||
|
||
for (var i = 0; i < 4; i++) {
|
||
tt = this._Kd[rounds][i];
|
||
result[4 * i] = (Si[t[i] >> 24 & 0xff] ^ tt >> 24) & 0xff;
|
||
result[4 * i + 1] = (Si[t[(i + 3) % 4] >> 16 & 0xff] ^ tt >> 16) & 0xff;
|
||
result[4 * i + 2] = (Si[t[(i + 2) % 4] >> 8 & 0xff] ^ tt >> 8) & 0xff;
|
||
result[4 * i + 3] = (Si[t[(i + 1) % 4] & 0xff] ^ tt) & 0xff;
|
||
}
|
||
|
||
return result;
|
||
};
|
||
/**
|
||
* Mode Of Operation - Cipher Block Chaining (CBC)
|
||
*/
|
||
|
||
|
||
var ModeOfOperationCBC = function ModeOfOperationCBC(key, iv) {
|
||
if (!(this instanceof ModeOfOperationCBC)) {
|
||
throw Error('AES must be instanitated with `new`');
|
||
}
|
||
|
||
this.description = 'Cipher Block Chaining';
|
||
this.name = 'cbc';
|
||
|
||
if (!iv) {
|
||
iv = createArray(16);
|
||
} else if (iv.length != 16) {
|
||
throw new Error('invalid initialation vector size (must be 16 bytes)');
|
||
}
|
||
|
||
this._lastCipherblock = coerceArray(iv, true);
|
||
this._aes = new AES(key);
|
||
};
|
||
|
||
ModeOfOperationCBC.prototype.encrypt = function (plaintext) {
|
||
plaintext = coerceArray(plaintext);
|
||
|
||
if (plaintext.length % 16 !== 0) {
|
||
throw new Error('invalid plaintext size (must be multiple of 16 bytes)');
|
||
}
|
||
|
||
var ciphertext = createArray(plaintext.length);
|
||
var block = createArray(16);
|
||
|
||
for (var i = 0; i < plaintext.length; i += 16) {
|
||
copyArray(plaintext, block, 0, i, i + 16);
|
||
|
||
for (var j = 0; j < 16; j++) {
|
||
block[j] ^= this._lastCipherblock[j];
|
||
}
|
||
|
||
this._lastCipherblock = this._aes.encrypt(block);
|
||
copyArray(this._lastCipherblock, ciphertext, i);
|
||
}
|
||
|
||
return ciphertext;
|
||
};
|
||
|
||
ModeOfOperationCBC.prototype.decrypt = function (ciphertext) {
|
||
ciphertext = coerceArray(ciphertext);
|
||
|
||
if (ciphertext.length % 16 !== 0) {
|
||
throw new Error('invalid ciphertext size (must be multiple of 16 bytes)');
|
||
}
|
||
|
||
var plaintext = createArray(ciphertext.length);
|
||
var block = createArray(16);
|
||
|
||
for (var i = 0; i < ciphertext.length; i += 16) {
|
||
copyArray(ciphertext, block, 0, i, i + 16);
|
||
block = this._aes.decrypt(block);
|
||
|
||
for (var j = 0; j < 16; j++) {
|
||
plaintext[i + j] = block[j] ^ this._lastCipherblock[j];
|
||
}
|
||
|
||
copyArray(ciphertext, this._lastCipherblock, 0, i, i + 16);
|
||
}
|
||
|
||
return plaintext;
|
||
};
|
||
/**
|
||
* Counter object for CTR common mode of operation
|
||
*/
|
||
|
||
|
||
var Counter = function Counter(initialValue) {
|
||
if (!(this instanceof Counter)) {
|
||
throw Error('Counter must be instanitated with `new`');
|
||
} // We allow 0, but anything false-ish uses the default 1
|
||
|
||
|
||
if (initialValue !== 0 && !initialValue) {
|
||
initialValue = 1;
|
||
}
|
||
|
||
if (typeof initialValue === 'number') {
|
||
this._counter = createArray(16);
|
||
this.setValue(initialValue);
|
||
} else {
|
||
this.setBytes(initialValue);
|
||
}
|
||
};
|
||
|
||
Counter.prototype.setValue = function (value) {
|
||
if (typeof value !== 'number' || parseInt(value) != value) {
|
||
throw new Error('invalid counter value (must be an integer)');
|
||
} // We cannot safely handle numbers beyond the safe range for integers
|
||
|
||
|
||
if (value > Number.MAX_SAFE_INTEGER) {
|
||
throw new Error('integer value out of safe range');
|
||
}
|
||
|
||
for (var index = 15; index >= 0; --index) {
|
||
this._counter[index] = value % 256;
|
||
value = parseInt(value / 256);
|
||
}
|
||
};
|
||
|
||
Counter.prototype.setBytes = function (bytes) {
|
||
bytes = coerceArray(bytes, true);
|
||
|
||
if (bytes.length != 16) {
|
||
throw new Error('invalid counter bytes size (must be 16 bytes)');
|
||
}
|
||
|
||
this._counter = bytes;
|
||
};
|
||
|
||
Counter.prototype.increment = function () {
|
||
for (var i = 15; i >= 0; i--) {
|
||
if (this._counter[i] === 255) {
|
||
this._counter[i] = 0;
|
||
} else {
|
||
this._counter[i]++;
|
||
break;
|
||
}
|
||
}
|
||
}; /// ////////////////////
|
||
// Padding
|
||
// See:https://tools.ietf.org/html/rfc2315
|
||
|
||
|
||
function pkcs7pad(data) {
|
||
data = coerceArray(data, true);
|
||
var padder = 16 - data.length % 16;
|
||
var result = createArray(data.length + padder);
|
||
copyArray(data, result);
|
||
|
||
for (var i = data.length; i < result.length; i++) {
|
||
result[i] = padder;
|
||
}
|
||
|
||
return result;
|
||
}
|
||
|
||
function pkcs7strip(data) {
|
||
data = coerceArray(data, true);
|
||
|
||
if (data.length < 16) {
|
||
throw new Error('PKCS#7 invalid length');
|
||
}
|
||
|
||
var padder = data[data.length - 1];
|
||
|
||
if (padder > 16) {
|
||
throw new Error('PKCS#7 padding byte out of range');
|
||
}
|
||
|
||
var length = data.length - padder;
|
||
|
||
for (var i = 0; i < padder; i++) {
|
||
if (data[length + i] !== padder) {
|
||
throw new Error('PKCS#7 invalid padding byte');
|
||
}
|
||
}
|
||
|
||
var result = createArray(length);
|
||
copyArray(data, result, 0, 0, length);
|
||
return result;
|
||
} /// ////////////////////
|
||
// Exporting
|
||
// The block cipher
|
||
|
||
|
||
var aesjs = {
|
||
AES: AES,
|
||
Counter: Counter,
|
||
modeOfOperation: {
|
||
cbc: ModeOfOperationCBC
|
||
},
|
||
utils: {
|
||
hex: convertHex,
|
||
utf8: convertUtf8
|
||
},
|
||
padding: {
|
||
pkcs7: {
|
||
pad: pkcs7pad,
|
||
strip: pkcs7strip
|
||
}
|
||
},
|
||
_arrayTest: {
|
||
coerceArray: coerceArray,
|
||
createArray: createArray,
|
||
copyArray: copyArray
|
||
}
|
||
}; // node.js
|
||
|
||
if (true) {
|
||
module.exports = aesjs; // RequireJS/AMD
|
||
// http://www.requirejs.org/docs/api.html
|
||
// https://github.com/amdjs/amdjs-api/wiki/AMD
|
||
// eslint-disable-next-line no-undef
|
||
} else {}
|
||
})(this);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/utils/attr-list.ts":
|
||
/*!********************************!*\
|
||
!*** ./src/utils/attr-list.ts ***!
|
||
\********************************/
|
||
/*! exports provided: AttrList */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "AttrList", function() { return AttrList; });
|
||
var DECIMAL_RESOLUTION_REGEX = /^(\d+)x(\d+)$/; // eslint-disable-line no-useless-escape
|
||
|
||
var ATTR_LIST_REGEX = /\s*(.+?)\s*=((?:\".*?\")|.*?)(?:,|$)/g; // eslint-disable-line no-useless-escape
|
||
// adapted from https://github.com/kanongil/node-m3u8parse/blob/master/attrlist.js
|
||
|
||
var AttrList = /*#__PURE__*/function () {
|
||
function AttrList(attrs) {
|
||
if (typeof attrs === 'string') {
|
||
attrs = AttrList.parseAttrList(attrs);
|
||
}
|
||
|
||
for (var attr in attrs) {
|
||
if (attrs.hasOwnProperty(attr)) {
|
||
this[attr] = attrs[attr];
|
||
}
|
||
}
|
||
}
|
||
|
||
var _proto = AttrList.prototype;
|
||
|
||
_proto.decimalInteger = function decimalInteger(attrName) {
|
||
var intValue = parseInt(this[attrName], 10);
|
||
|
||
if (intValue > Number.MAX_SAFE_INTEGER) {
|
||
return Infinity;
|
||
}
|
||
|
||
return intValue;
|
||
};
|
||
|
||
_proto.hexadecimalInteger = function hexadecimalInteger(attrName) {
|
||
if (this[attrName]) {
|
||
var stringValue = (this[attrName] || '0x').slice(2);
|
||
stringValue = (stringValue.length & 1 ? '0' : '') + stringValue;
|
||
var value = new Uint8Array(stringValue.length / 2);
|
||
|
||
for (var i = 0; i < stringValue.length / 2; i++) {
|
||
value[i] = parseInt(stringValue.slice(i * 2, i * 2 + 2), 16);
|
||
}
|
||
|
||
return value;
|
||
} else {
|
||
return null;
|
||
}
|
||
};
|
||
|
||
_proto.hexadecimalIntegerAsNumber = function hexadecimalIntegerAsNumber(attrName) {
|
||
var intValue = parseInt(this[attrName], 16);
|
||
|
||
if (intValue > Number.MAX_SAFE_INTEGER) {
|
||
return Infinity;
|
||
}
|
||
|
||
return intValue;
|
||
};
|
||
|
||
_proto.decimalFloatingPoint = function decimalFloatingPoint(attrName) {
|
||
return parseFloat(this[attrName]);
|
||
};
|
||
|
||
_proto.optionalFloat = function optionalFloat(attrName, defaultValue) {
|
||
var value = this[attrName];
|
||
return value ? parseFloat(value) : defaultValue;
|
||
};
|
||
|
||
_proto.enumeratedString = function enumeratedString(attrName) {
|
||
return this[attrName];
|
||
};
|
||
|
||
_proto.bool = function bool(attrName) {
|
||
return this[attrName] === 'YES';
|
||
};
|
||
|
||
_proto.decimalResolution = function decimalResolution(attrName) {
|
||
var res = DECIMAL_RESOLUTION_REGEX.exec(this[attrName]);
|
||
|
||
if (res === null) {
|
||
return undefined;
|
||
}
|
||
|
||
return {
|
||
width: parseInt(res[1], 10),
|
||
height: parseInt(res[2], 10)
|
||
};
|
||
};
|
||
|
||
AttrList.parseAttrList = function parseAttrList(input) {
|
||
var match;
|
||
var attrs = {};
|
||
var quote = '"';
|
||
ATTR_LIST_REGEX.lastIndex = 0;
|
||
|
||
while ((match = ATTR_LIST_REGEX.exec(input)) !== null) {
|
||
var value = match[2];
|
||
|
||
if (value.indexOf(quote) === 0 && value.lastIndexOf(quote) === value.length - 1) {
|
||
value = value.slice(1, -1);
|
||
}
|
||
|
||
attrs[match[1]] = value;
|
||
}
|
||
|
||
return attrs;
|
||
};
|
||
|
||
return AttrList;
|
||
}();
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/utils/binary-search.ts":
|
||
/*!************************************!*\
|
||
!*** ./src/utils/binary-search.ts ***!
|
||
\************************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
var BinarySearch = {
|
||
/**
|
||
* Searches for an item in an array which matches a certain condition.
|
||
* This requires the condition to only match one item in the array,
|
||
* and for the array to be ordered.
|
||
*
|
||
* @param {Array<T>} list The array to search.
|
||
* @param {BinarySearchComparison<T>} comparisonFn
|
||
* Called and provided a candidate item as the first argument.
|
||
* Should return:
|
||
* > -1 if the item should be located at a lower index than the provided item.
|
||
* > 1 if the item should be located at a higher index than the provided item.
|
||
* > 0 if the item is the item you're looking for.
|
||
*
|
||
* @return {T | null} The object if it is found or null otherwise.
|
||
*/
|
||
search: function search(list, comparisonFn) {
|
||
var minIndex = 0;
|
||
var maxIndex = list.length - 1;
|
||
var currentIndex = null;
|
||
var currentElement = null;
|
||
|
||
while (minIndex <= maxIndex) {
|
||
currentIndex = (minIndex + maxIndex) / 2 | 0;
|
||
currentElement = list[currentIndex];
|
||
var comparisonResult = comparisonFn(currentElement);
|
||
|
||
if (comparisonResult > 0) {
|
||
minIndex = currentIndex + 1;
|
||
} else if (comparisonResult < 0) {
|
||
maxIndex = currentIndex - 1;
|
||
} else {
|
||
return currentElement;
|
||
}
|
||
}
|
||
|
||
return null;
|
||
}
|
||
};
|
||
/* harmony default export */ __webpack_exports__["default"] = (BinarySearch);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/utils/buffer-helper.ts":
|
||
/*!************************************!*\
|
||
!*** ./src/utils/buffer-helper.ts ***!
|
||
\************************************/
|
||
/*! exports provided: BufferHelper */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "BufferHelper", function() { return BufferHelper; });
|
||
/* harmony import */ var _logger__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./logger */ "./src/utils/logger.ts");
|
||
/**
|
||
* @module BufferHelper
|
||
*
|
||
* Providing methods dealing with buffer length retrieval for example.
|
||
*
|
||
* In general, a helper around HTML5 MediaElement TimeRanges gathered from `buffered` property.
|
||
*
|
||
* Also @see https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/buffered
|
||
*/
|
||
|
||
var noopBuffered = {
|
||
length: 0,
|
||
start: function start() {
|
||
return 0;
|
||
},
|
||
end: function end() {
|
||
return 0;
|
||
}
|
||
};
|
||
var BufferHelper = /*#__PURE__*/function () {
|
||
function BufferHelper() {}
|
||
|
||
/**
|
||
* Return true if `media`'s buffered include `position`
|
||
* @param {Bufferable} media
|
||
* @param {number} position
|
||
* @returns {boolean}
|
||
*/
|
||
BufferHelper.isBuffered = function isBuffered(media, position) {
|
||
try {
|
||
if (media) {
|
||
var buffered = BufferHelper.getBuffered(media);
|
||
|
||
for (var i = 0; i < buffered.length; i++) {
|
||
if (position >= buffered.start(i) && position <= buffered.end(i)) {
|
||
return true;
|
||
}
|
||
}
|
||
}
|
||
} catch (error) {// this is to catch
|
||
// InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
|
||
// This SourceBuffer has been removed from the parent media source
|
||
}
|
||
|
||
return false;
|
||
};
|
||
|
||
BufferHelper.bufferInfo = function bufferInfo(media, pos, maxHoleDuration) {
|
||
try {
|
||
if (media) {
|
||
var vbuffered = BufferHelper.getBuffered(media);
|
||
var buffered = [];
|
||
var i;
|
||
|
||
for (i = 0; i < vbuffered.length; i++) {
|
||
buffered.push({
|
||
start: vbuffered.start(i),
|
||
end: vbuffered.end(i)
|
||
});
|
||
}
|
||
|
||
return this.bufferedInfo(buffered, pos, maxHoleDuration);
|
||
}
|
||
} catch (error) {// this is to catch
|
||
// InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
|
||
// This SourceBuffer has been removed from the parent media source
|
||
}
|
||
|
||
return {
|
||
len: 0,
|
||
start: pos,
|
||
end: pos,
|
||
nextStart: undefined
|
||
};
|
||
};
|
||
|
||
BufferHelper.bufferedInfo = function bufferedInfo(buffered, pos, maxHoleDuration) {
|
||
pos = Math.max(0, pos); // sort on buffer.start/smaller end (IE does not always return sorted buffered range)
|
||
|
||
buffered.sort(function (a, b) {
|
||
var diff = a.start - b.start;
|
||
|
||
if (diff) {
|
||
return diff;
|
||
} else {
|
||
return b.end - a.end;
|
||
}
|
||
});
|
||
var buffered2 = [];
|
||
|
||
if (maxHoleDuration) {
|
||
// there might be some small holes between buffer time range
|
||
// consider that holes smaller than maxHoleDuration are irrelevant and build another
|
||
// buffer time range representations that discards those holes
|
||
for (var i = 0; i < buffered.length; i++) {
|
||
var buf2len = buffered2.length;
|
||
|
||
if (buf2len) {
|
||
var buf2end = buffered2[buf2len - 1].end; // if small hole (value between 0 or maxHoleDuration ) or overlapping (negative)
|
||
|
||
if (buffered[i].start - buf2end < maxHoleDuration) {
|
||
// merge overlapping time ranges
|
||
// update lastRange.end only if smaller than item.end
|
||
// e.g. [ 1, 15] with [ 2,8] => [ 1,15] (no need to modify lastRange.end)
|
||
// whereas [ 1, 8] with [ 2,15] => [ 1,15] ( lastRange should switch from [1,8] to [1,15])
|
||
if (buffered[i].end > buf2end) {
|
||
buffered2[buf2len - 1].end = buffered[i].end;
|
||
}
|
||
} else {
|
||
// big hole
|
||
buffered2.push(buffered[i]);
|
||
}
|
||
} else {
|
||
// first value
|
||
buffered2.push(buffered[i]);
|
||
}
|
||
}
|
||
} else {
|
||
buffered2 = buffered;
|
||
}
|
||
|
||
var bufferLen = 0; // bufferStartNext can possibly be undefined based on the conditional logic below
|
||
|
||
var bufferStartNext; // bufferStart and bufferEnd are buffer boundaries around current video position
|
||
|
||
var bufferStart = pos;
|
||
var bufferEnd = pos;
|
||
|
||
for (var _i = 0; _i < buffered2.length; _i++) {
|
||
var start = buffered2[_i].start;
|
||
var end = buffered2[_i].end; // logger.log('buf start/end:' + buffered.start(i) + '/' + buffered.end(i));
|
||
|
||
if (pos + maxHoleDuration >= start && pos < end) {
|
||
// play position is inside this buffer TimeRange, retrieve end of buffer position and buffer length
|
||
bufferStart = start;
|
||
bufferEnd = end;
|
||
bufferLen = bufferEnd - pos;
|
||
} else if (pos + maxHoleDuration < start) {
|
||
bufferStartNext = start;
|
||
break;
|
||
}
|
||
}
|
||
|
||
return {
|
||
len: bufferLen,
|
||
start: bufferStart || 0,
|
||
end: bufferEnd || 0,
|
||
nextStart: bufferStartNext
|
||
};
|
||
}
|
||
/**
|
||
* Safe method to get buffered property.
|
||
* SourceBuffer.buffered may throw if SourceBuffer is removed from it's MediaSource
|
||
*/
|
||
;
|
||
|
||
BufferHelper.getBuffered = function getBuffered(media) {
|
||
try {
|
||
return media.buffered;
|
||
} catch (e) {
|
||
_logger__WEBPACK_IMPORTED_MODULE_0__["logger"].log('failed to get media.buffered', e);
|
||
return noopBuffered;
|
||
}
|
||
};
|
||
|
||
return BufferHelper;
|
||
}();
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/utils/cea-608-parser.ts":
|
||
/*!*************************************!*\
|
||
!*** ./src/utils/cea-608-parser.ts ***!
|
||
\*************************************/
|
||
/*! exports provided: Row, CaptionScreen, default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "Row", function() { return Row; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "CaptionScreen", function() { return CaptionScreen; });
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
|
||
/**
|
||
*
|
||
* This code was ported from the dash.js project at:
|
||
* https://github.com/Dash-Industry-Forum/dash.js/blob/development/externals/cea608-parser.js
|
||
* https://github.com/Dash-Industry-Forum/dash.js/commit/8269b26a761e0853bb21d78780ed945144ecdd4d#diff-71bc295a2d6b6b7093a1d3290d53a4b2
|
||
*
|
||
* The original copyright appears below:
|
||
*
|
||
* The copyright in this software is being made available under the BSD License,
|
||
* included below. This software may be subject to other third party and contributor
|
||
* rights, including patent rights, and no such rights are granted under this license.
|
||
*
|
||
* Copyright (c) 2015-2016, DASH Industry Forum.
|
||
* All rights reserved.
|
||
*
|
||
* Redistribution and use in source and binary forms, with or without modification,
|
||
* are permitted provided that the following conditions are met:
|
||
* 1. Redistributions of source code must retain the above copyright notice, this
|
||
* list of conditions and the following disclaimer.
|
||
* * Redistributions in binary form must reproduce the above copyright notice,
|
||
* this list of conditions and the following disclaimer in the documentation and/or
|
||
* other materials provided with the distribution.
|
||
* 2. Neither the name of Dash Industry Forum nor the names of its
|
||
* contributors may be used to endorse or promote products derived from this software
|
||
* without specific prior written permission.
|
||
*
|
||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY
|
||
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
|
||
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
||
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||
* POSSIBILITY OF SUCH DAMAGE.
|
||
*/
|
||
|
||
/**
|
||
* Exceptions from regular ASCII. CodePoints are mapped to UTF-16 codes
|
||
*/
|
||
|
||
var specialCea608CharsCodes = {
|
||
0x2a: 0xe1,
|
||
// lowercase a, acute accent
|
||
0x5c: 0xe9,
|
||
// lowercase e, acute accent
|
||
0x5e: 0xed,
|
||
// lowercase i, acute accent
|
||
0x5f: 0xf3,
|
||
// lowercase o, acute accent
|
||
0x60: 0xfa,
|
||
// lowercase u, acute accent
|
||
0x7b: 0xe7,
|
||
// lowercase c with cedilla
|
||
0x7c: 0xf7,
|
||
// division symbol
|
||
0x7d: 0xd1,
|
||
// uppercase N tilde
|
||
0x7e: 0xf1,
|
||
// lowercase n tilde
|
||
0x7f: 0x2588,
|
||
// Full block
|
||
// THIS BLOCK INCLUDES THE 16 EXTENDED (TWO-BYTE) LINE 21 CHARACTERS
|
||
// THAT COME FROM HI BYTE=0x11 AND LOW BETWEEN 0x30 AND 0x3F
|
||
// THIS MEANS THAT \x50 MUST BE ADDED TO THE VALUES
|
||
0x80: 0xae,
|
||
// Registered symbol (R)
|
||
0x81: 0xb0,
|
||
// degree sign
|
||
0x82: 0xbd,
|
||
// 1/2 symbol
|
||
0x83: 0xbf,
|
||
// Inverted (open) question mark
|
||
0x84: 0x2122,
|
||
// Trademark symbol (TM)
|
||
0x85: 0xa2,
|
||
// Cents symbol
|
||
0x86: 0xa3,
|
||
// Pounds sterling
|
||
0x87: 0x266a,
|
||
// Music 8'th note
|
||
0x88: 0xe0,
|
||
// lowercase a, grave accent
|
||
0x89: 0x20,
|
||
// transparent space (regular)
|
||
0x8a: 0xe8,
|
||
// lowercase e, grave accent
|
||
0x8b: 0xe2,
|
||
// lowercase a, circumflex accent
|
||
0x8c: 0xea,
|
||
// lowercase e, circumflex accent
|
||
0x8d: 0xee,
|
||
// lowercase i, circumflex accent
|
||
0x8e: 0xf4,
|
||
// lowercase o, circumflex accent
|
||
0x8f: 0xfb,
|
||
// lowercase u, circumflex accent
|
||
// THIS BLOCK INCLUDES THE 32 EXTENDED (TWO-BYTE) LINE 21 CHARACTERS
|
||
// THAT COME FROM HI BYTE=0x12 AND LOW BETWEEN 0x20 AND 0x3F
|
||
0x90: 0xc1,
|
||
// capital letter A with acute
|
||
0x91: 0xc9,
|
||
// capital letter E with acute
|
||
0x92: 0xd3,
|
||
// capital letter O with acute
|
||
0x93: 0xda,
|
||
// capital letter U with acute
|
||
0x94: 0xdc,
|
||
// capital letter U with diaresis
|
||
0x95: 0xfc,
|
||
// lowercase letter U with diaeresis
|
||
0x96: 0x2018,
|
||
// opening single quote
|
||
0x97: 0xa1,
|
||
// inverted exclamation mark
|
||
0x98: 0x2a,
|
||
// asterisk
|
||
0x99: 0x2019,
|
||
// closing single quote
|
||
0x9a: 0x2501,
|
||
// box drawings heavy horizontal
|
||
0x9b: 0xa9,
|
||
// copyright sign
|
||
0x9c: 0x2120,
|
||
// Service mark
|
||
0x9d: 0x2022,
|
||
// (round) bullet
|
||
0x9e: 0x201c,
|
||
// Left double quotation mark
|
||
0x9f: 0x201d,
|
||
// Right double quotation mark
|
||
0xa0: 0xc0,
|
||
// uppercase A, grave accent
|
||
0xa1: 0xc2,
|
||
// uppercase A, circumflex
|
||
0xa2: 0xc7,
|
||
// uppercase C with cedilla
|
||
0xa3: 0xc8,
|
||
// uppercase E, grave accent
|
||
0xa4: 0xca,
|
||
// uppercase E, circumflex
|
||
0xa5: 0xcb,
|
||
// capital letter E with diaresis
|
||
0xa6: 0xeb,
|
||
// lowercase letter e with diaresis
|
||
0xa7: 0xce,
|
||
// uppercase I, circumflex
|
||
0xa8: 0xcf,
|
||
// uppercase I, with diaresis
|
||
0xa9: 0xef,
|
||
// lowercase i, with diaresis
|
||
0xaa: 0xd4,
|
||
// uppercase O, circumflex
|
||
0xab: 0xd9,
|
||
// uppercase U, grave accent
|
||
0xac: 0xf9,
|
||
// lowercase u, grave accent
|
||
0xad: 0xdb,
|
||
// uppercase U, circumflex
|
||
0xae: 0xab,
|
||
// left-pointing double angle quotation mark
|
||
0xaf: 0xbb,
|
||
// right-pointing double angle quotation mark
|
||
// THIS BLOCK INCLUDES THE 32 EXTENDED (TWO-BYTE) LINE 21 CHARACTERS
|
||
// THAT COME FROM HI BYTE=0x13 AND LOW BETWEEN 0x20 AND 0x3F
|
||
0xb0: 0xc3,
|
||
// Uppercase A, tilde
|
||
0xb1: 0xe3,
|
||
// Lowercase a, tilde
|
||
0xb2: 0xcd,
|
||
// Uppercase I, acute accent
|
||
0xb3: 0xcc,
|
||
// Uppercase I, grave accent
|
||
0xb4: 0xec,
|
||
// Lowercase i, grave accent
|
||
0xb5: 0xd2,
|
||
// Uppercase O, grave accent
|
||
0xb6: 0xf2,
|
||
// Lowercase o, grave accent
|
||
0xb7: 0xd5,
|
||
// Uppercase O, tilde
|
||
0xb8: 0xf5,
|
||
// Lowercase o, tilde
|
||
0xb9: 0x7b,
|
||
// Open curly brace
|
||
0xba: 0x7d,
|
||
// Closing curly brace
|
||
0xbb: 0x5c,
|
||
// Backslash
|
||
0xbc: 0x5e,
|
||
// Caret
|
||
0xbd: 0x5f,
|
||
// Underscore
|
||
0xbe: 0x7c,
|
||
// Pipe (vertical line)
|
||
0xbf: 0x223c,
|
||
// Tilde operator
|
||
0xc0: 0xc4,
|
||
// Uppercase A, umlaut
|
||
0xc1: 0xe4,
|
||
// Lowercase A, umlaut
|
||
0xc2: 0xd6,
|
||
// Uppercase O, umlaut
|
||
0xc3: 0xf6,
|
||
// Lowercase o, umlaut
|
||
0xc4: 0xdf,
|
||
// Esszett (sharp S)
|
||
0xc5: 0xa5,
|
||
// Yen symbol
|
||
0xc6: 0xa4,
|
||
// Generic currency sign
|
||
0xc7: 0x2503,
|
||
// Box drawings heavy vertical
|
||
0xc8: 0xc5,
|
||
// Uppercase A, ring
|
||
0xc9: 0xe5,
|
||
// Lowercase A, ring
|
||
0xca: 0xd8,
|
||
// Uppercase O, stroke
|
||
0xcb: 0xf8,
|
||
// Lowercase o, strok
|
||
0xcc: 0x250f,
|
||
// Box drawings heavy down and right
|
||
0xcd: 0x2513,
|
||
// Box drawings heavy down and left
|
||
0xce: 0x2517,
|
||
// Box drawings heavy up and right
|
||
0xcf: 0x251b // Box drawings heavy up and left
|
||
|
||
};
|
||
/**
|
||
* Utils
|
||
*/
|
||
|
||
var getCharForByte = function getCharForByte(_byte) {
|
||
var charCode = _byte;
|
||
|
||
if (specialCea608CharsCodes.hasOwnProperty(_byte)) {
|
||
charCode = specialCea608CharsCodes[_byte];
|
||
}
|
||
|
||
return String.fromCharCode(charCode);
|
||
};
|
||
|
||
var NR_ROWS = 15;
|
||
var NR_COLS = 100; // Tables to look up row from PAC data
|
||
|
||
var rowsLowCh1 = {
|
||
0x11: 1,
|
||
0x12: 3,
|
||
0x15: 5,
|
||
0x16: 7,
|
||
0x17: 9,
|
||
0x10: 11,
|
||
0x13: 12,
|
||
0x14: 14
|
||
};
|
||
var rowsHighCh1 = {
|
||
0x11: 2,
|
||
0x12: 4,
|
||
0x15: 6,
|
||
0x16: 8,
|
||
0x17: 10,
|
||
0x13: 13,
|
||
0x14: 15
|
||
};
|
||
var rowsLowCh2 = {
|
||
0x19: 1,
|
||
0x1a: 3,
|
||
0x1d: 5,
|
||
0x1e: 7,
|
||
0x1f: 9,
|
||
0x18: 11,
|
||
0x1b: 12,
|
||
0x1c: 14
|
||
};
|
||
var rowsHighCh2 = {
|
||
0x19: 2,
|
||
0x1a: 4,
|
||
0x1d: 6,
|
||
0x1e: 8,
|
||
0x1f: 10,
|
||
0x1b: 13,
|
||
0x1c: 15
|
||
};
|
||
var backgroundColors = ['white', 'green', 'blue', 'cyan', 'red', 'yellow', 'magenta', 'black', 'transparent'];
|
||
var VerboseLevel;
|
||
|
||
(function (VerboseLevel) {
|
||
VerboseLevel[VerboseLevel["ERROR"] = 0] = "ERROR";
|
||
VerboseLevel[VerboseLevel["TEXT"] = 1] = "TEXT";
|
||
VerboseLevel[VerboseLevel["WARNING"] = 2] = "WARNING";
|
||
VerboseLevel[VerboseLevel["INFO"] = 2] = "INFO";
|
||
VerboseLevel[VerboseLevel["DEBUG"] = 3] = "DEBUG";
|
||
VerboseLevel[VerboseLevel["DATA"] = 3] = "DATA";
|
||
})(VerboseLevel || (VerboseLevel = {}));
|
||
|
||
var CaptionsLogger = /*#__PURE__*/function () {
|
||
function CaptionsLogger() {
|
||
this.time = null;
|
||
this.verboseLevel = VerboseLevel.ERROR;
|
||
}
|
||
|
||
var _proto = CaptionsLogger.prototype;
|
||
|
||
_proto.log = function log(severity, msg) {
|
||
if (this.verboseLevel >= severity) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_0__["logger"].log(this.time + " [" + severity + "] " + msg);
|
||
}
|
||
};
|
||
|
||
return CaptionsLogger;
|
||
}();
|
||
|
||
var numArrayToHexArray = function numArrayToHexArray(numArray) {
|
||
var hexArray = [];
|
||
|
||
for (var j = 0; j < numArray.length; j++) {
|
||
hexArray.push(numArray[j].toString(16));
|
||
}
|
||
|
||
return hexArray;
|
||
};
|
||
|
||
var PenState = /*#__PURE__*/function () {
|
||
function PenState(foreground, underline, italics, background, flash) {
|
||
this.foreground = void 0;
|
||
this.underline = void 0;
|
||
this.italics = void 0;
|
||
this.background = void 0;
|
||
this.flash = void 0;
|
||
this.foreground = foreground || 'white';
|
||
this.underline = underline || false;
|
||
this.italics = italics || false;
|
||
this.background = background || 'black';
|
||
this.flash = flash || false;
|
||
}
|
||
|
||
var _proto2 = PenState.prototype;
|
||
|
||
_proto2.reset = function reset() {
|
||
this.foreground = 'white';
|
||
this.underline = false;
|
||
this.italics = false;
|
||
this.background = 'black';
|
||
this.flash = false;
|
||
};
|
||
|
||
_proto2.setStyles = function setStyles(styles) {
|
||
var attribs = ['foreground', 'underline', 'italics', 'background', 'flash'];
|
||
|
||
for (var i = 0; i < attribs.length; i++) {
|
||
var style = attribs[i];
|
||
|
||
if (styles.hasOwnProperty(style)) {
|
||
this[style] = styles[style];
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto2.isDefault = function isDefault() {
|
||
return this.foreground === 'white' && !this.underline && !this.italics && this.background === 'black' && !this.flash;
|
||
};
|
||
|
||
_proto2.equals = function equals(other) {
|
||
return this.foreground === other.foreground && this.underline === other.underline && this.italics === other.italics && this.background === other.background && this.flash === other.flash;
|
||
};
|
||
|
||
_proto2.copy = function copy(newPenState) {
|
||
this.foreground = newPenState.foreground;
|
||
this.underline = newPenState.underline;
|
||
this.italics = newPenState.italics;
|
||
this.background = newPenState.background;
|
||
this.flash = newPenState.flash;
|
||
};
|
||
|
||
_proto2.toString = function toString() {
|
||
return 'color=' + this.foreground + ', underline=' + this.underline + ', italics=' + this.italics + ', background=' + this.background + ', flash=' + this.flash;
|
||
};
|
||
|
||
return PenState;
|
||
}();
|
||
/**
|
||
* Unicode character with styling and background.
|
||
* @constructor
|
||
*/
|
||
|
||
|
||
var StyledUnicodeChar = /*#__PURE__*/function () {
|
||
function StyledUnicodeChar(uchar, foreground, underline, italics, background, flash) {
|
||
this.uchar = void 0;
|
||
this.penState = void 0;
|
||
this.uchar = uchar || ' '; // unicode character
|
||
|
||
this.penState = new PenState(foreground, underline, italics, background, flash);
|
||
}
|
||
|
||
var _proto3 = StyledUnicodeChar.prototype;
|
||
|
||
_proto3.reset = function reset() {
|
||
this.uchar = ' ';
|
||
this.penState.reset();
|
||
};
|
||
|
||
_proto3.setChar = function setChar(uchar, newPenState) {
|
||
this.uchar = uchar;
|
||
this.penState.copy(newPenState);
|
||
};
|
||
|
||
_proto3.setPenState = function setPenState(newPenState) {
|
||
this.penState.copy(newPenState);
|
||
};
|
||
|
||
_proto3.equals = function equals(other) {
|
||
return this.uchar === other.uchar && this.penState.equals(other.penState);
|
||
};
|
||
|
||
_proto3.copy = function copy(newChar) {
|
||
this.uchar = newChar.uchar;
|
||
this.penState.copy(newChar.penState);
|
||
};
|
||
|
||
_proto3.isEmpty = function isEmpty() {
|
||
return this.uchar === ' ' && this.penState.isDefault();
|
||
};
|
||
|
||
return StyledUnicodeChar;
|
||
}();
|
||
/**
|
||
* CEA-608 row consisting of NR_COLS instances of StyledUnicodeChar.
|
||
* @constructor
|
||
*/
|
||
|
||
|
||
var Row = /*#__PURE__*/function () {
|
||
function Row(logger) {
|
||
this.chars = void 0;
|
||
this.pos = void 0;
|
||
this.currPenState = void 0;
|
||
this.cueStartTime = void 0;
|
||
this.logger = void 0;
|
||
this.chars = [];
|
||
|
||
for (var i = 0; i < NR_COLS; i++) {
|
||
this.chars.push(new StyledUnicodeChar());
|
||
}
|
||
|
||
this.logger = logger;
|
||
this.pos = 0;
|
||
this.currPenState = new PenState();
|
||
}
|
||
|
||
var _proto4 = Row.prototype;
|
||
|
||
_proto4.equals = function equals(other) {
|
||
var equal = true;
|
||
|
||
for (var i = 0; i < NR_COLS; i++) {
|
||
if (!this.chars[i].equals(other.chars[i])) {
|
||
equal = false;
|
||
break;
|
||
}
|
||
}
|
||
|
||
return equal;
|
||
};
|
||
|
||
_proto4.copy = function copy(other) {
|
||
for (var i = 0; i < NR_COLS; i++) {
|
||
this.chars[i].copy(other.chars[i]);
|
||
}
|
||
};
|
||
|
||
_proto4.isEmpty = function isEmpty() {
|
||
var empty = true;
|
||
|
||
for (var i = 0; i < NR_COLS; i++) {
|
||
if (!this.chars[i].isEmpty()) {
|
||
empty = false;
|
||
break;
|
||
}
|
||
}
|
||
|
||
return empty;
|
||
}
|
||
/**
|
||
* Set the cursor to a valid column.
|
||
*/
|
||
;
|
||
|
||
_proto4.setCursor = function setCursor(absPos) {
|
||
if (this.pos !== absPos) {
|
||
this.pos = absPos;
|
||
}
|
||
|
||
if (this.pos < 0) {
|
||
this.logger.log(VerboseLevel.DEBUG, 'Negative cursor position ' + this.pos);
|
||
this.pos = 0;
|
||
} else if (this.pos > NR_COLS) {
|
||
this.logger.log(VerboseLevel.DEBUG, 'Too large cursor position ' + this.pos);
|
||
this.pos = NR_COLS;
|
||
}
|
||
}
|
||
/**
|
||
* Move the cursor relative to current position.
|
||
*/
|
||
;
|
||
|
||
_proto4.moveCursor = function moveCursor(relPos) {
|
||
var newPos = this.pos + relPos;
|
||
|
||
if (relPos > 1) {
|
||
for (var i = this.pos + 1; i < newPos + 1; i++) {
|
||
this.chars[i].setPenState(this.currPenState);
|
||
}
|
||
}
|
||
|
||
this.setCursor(newPos);
|
||
}
|
||
/**
|
||
* Backspace, move one step back and clear character.
|
||
*/
|
||
;
|
||
|
||
_proto4.backSpace = function backSpace() {
|
||
this.moveCursor(-1);
|
||
this.chars[this.pos].setChar(' ', this.currPenState);
|
||
};
|
||
|
||
_proto4.insertChar = function insertChar(_byte2) {
|
||
if (_byte2 >= 0x90) {
|
||
// Extended char
|
||
this.backSpace();
|
||
}
|
||
|
||
var _char = getCharForByte(_byte2);
|
||
|
||
if (this.pos >= NR_COLS) {
|
||
this.logger.log(VerboseLevel.ERROR, 'Cannot insert ' + _byte2.toString(16) + ' (' + _char + ') at position ' + this.pos + '. Skipping it!');
|
||
return;
|
||
}
|
||
|
||
this.chars[this.pos].setChar(_char, this.currPenState);
|
||
this.moveCursor(1);
|
||
};
|
||
|
||
_proto4.clearFromPos = function clearFromPos(startPos) {
|
||
var i;
|
||
|
||
for (i = startPos; i < NR_COLS; i++) {
|
||
this.chars[i].reset();
|
||
}
|
||
};
|
||
|
||
_proto4.clear = function clear() {
|
||
this.clearFromPos(0);
|
||
this.pos = 0;
|
||
this.currPenState.reset();
|
||
};
|
||
|
||
_proto4.clearToEndOfRow = function clearToEndOfRow() {
|
||
this.clearFromPos(this.pos);
|
||
};
|
||
|
||
_proto4.getTextString = function getTextString() {
|
||
var chars = [];
|
||
var empty = true;
|
||
|
||
for (var i = 0; i < NR_COLS; i++) {
|
||
var _char2 = this.chars[i].uchar;
|
||
|
||
if (_char2 !== ' ') {
|
||
empty = false;
|
||
}
|
||
|
||
chars.push(_char2);
|
||
}
|
||
|
||
if (empty) {
|
||
return '';
|
||
} else {
|
||
return chars.join('');
|
||
}
|
||
};
|
||
|
||
_proto4.setPenStyles = function setPenStyles(styles) {
|
||
this.currPenState.setStyles(styles);
|
||
var currChar = this.chars[this.pos];
|
||
currChar.setPenState(this.currPenState);
|
||
};
|
||
|
||
return Row;
|
||
}();
|
||
/**
|
||
* Keep a CEA-608 screen of 32x15 styled characters
|
||
* @constructor
|
||
*/
|
||
|
||
var CaptionScreen = /*#__PURE__*/function () {
|
||
function CaptionScreen(logger) {
|
||
this.rows = void 0;
|
||
this.currRow = void 0;
|
||
this.nrRollUpRows = void 0;
|
||
this.lastOutputScreen = void 0;
|
||
this.logger = void 0;
|
||
this.rows = [];
|
||
|
||
for (var i = 0; i < NR_ROWS; i++) {
|
||
this.rows.push(new Row(logger));
|
||
} // Note that we use zero-based numbering (0-14)
|
||
|
||
|
||
this.logger = logger;
|
||
this.currRow = NR_ROWS - 1;
|
||
this.nrRollUpRows = null;
|
||
this.lastOutputScreen = null;
|
||
this.reset();
|
||
}
|
||
|
||
var _proto5 = CaptionScreen.prototype;
|
||
|
||
_proto5.reset = function reset() {
|
||
for (var i = 0; i < NR_ROWS; i++) {
|
||
this.rows[i].clear();
|
||
}
|
||
|
||
this.currRow = NR_ROWS - 1;
|
||
};
|
||
|
||
_proto5.equals = function equals(other) {
|
||
var equal = true;
|
||
|
||
for (var i = 0; i < NR_ROWS; i++) {
|
||
if (!this.rows[i].equals(other.rows[i])) {
|
||
equal = false;
|
||
break;
|
||
}
|
||
}
|
||
|
||
return equal;
|
||
};
|
||
|
||
_proto5.copy = function copy(other) {
|
||
for (var i = 0; i < NR_ROWS; i++) {
|
||
this.rows[i].copy(other.rows[i]);
|
||
}
|
||
};
|
||
|
||
_proto5.isEmpty = function isEmpty() {
|
||
var empty = true;
|
||
|
||
for (var i = 0; i < NR_ROWS; i++) {
|
||
if (!this.rows[i].isEmpty()) {
|
||
empty = false;
|
||
break;
|
||
}
|
||
}
|
||
|
||
return empty;
|
||
};
|
||
|
||
_proto5.backSpace = function backSpace() {
|
||
var row = this.rows[this.currRow];
|
||
row.backSpace();
|
||
};
|
||
|
||
_proto5.clearToEndOfRow = function clearToEndOfRow() {
|
||
var row = this.rows[this.currRow];
|
||
row.clearToEndOfRow();
|
||
}
|
||
/**
|
||
* Insert a character (without styling) in the current row.
|
||
*/
|
||
;
|
||
|
||
_proto5.insertChar = function insertChar(_char3) {
|
||
var row = this.rows[this.currRow];
|
||
row.insertChar(_char3);
|
||
};
|
||
|
||
_proto5.setPen = function setPen(styles) {
|
||
var row = this.rows[this.currRow];
|
||
row.setPenStyles(styles);
|
||
};
|
||
|
||
_proto5.moveCursor = function moveCursor(relPos) {
|
||
var row = this.rows[this.currRow];
|
||
row.moveCursor(relPos);
|
||
};
|
||
|
||
_proto5.setCursor = function setCursor(absPos) {
|
||
this.logger.log(VerboseLevel.INFO, 'setCursor: ' + absPos);
|
||
var row = this.rows[this.currRow];
|
||
row.setCursor(absPos);
|
||
};
|
||
|
||
_proto5.setPAC = function setPAC(pacData) {
|
||
this.logger.log(VerboseLevel.INFO, 'pacData = ' + JSON.stringify(pacData));
|
||
var newRow = pacData.row - 1;
|
||
|
||
if (this.nrRollUpRows && newRow < this.nrRollUpRows - 1) {
|
||
newRow = this.nrRollUpRows - 1;
|
||
} // Make sure this only affects Roll-up Captions by checking this.nrRollUpRows
|
||
|
||
|
||
if (this.nrRollUpRows && this.currRow !== newRow) {
|
||
// clear all rows first
|
||
for (var i = 0; i < NR_ROWS; i++) {
|
||
this.rows[i].clear();
|
||
} // Copy this.nrRollUpRows rows from lastOutputScreen and place it in the newRow location
|
||
// topRowIndex - the start of rows to copy (inclusive index)
|
||
|
||
|
||
var topRowIndex = this.currRow + 1 - this.nrRollUpRows; // We only copy if the last position was already shown.
|
||
// We use the cueStartTime value to check this.
|
||
|
||
var lastOutputScreen = this.lastOutputScreen;
|
||
|
||
if (lastOutputScreen) {
|
||
var prevLineTime = lastOutputScreen.rows[topRowIndex].cueStartTime;
|
||
var time = this.logger.time;
|
||
|
||
if (prevLineTime && time !== null && prevLineTime < time) {
|
||
for (var _i = 0; _i < this.nrRollUpRows; _i++) {
|
||
this.rows[newRow - this.nrRollUpRows + _i + 1].copy(lastOutputScreen.rows[topRowIndex + _i]);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
this.currRow = newRow;
|
||
var row = this.rows[this.currRow];
|
||
|
||
if (pacData.indent !== null) {
|
||
var indent = pacData.indent;
|
||
var prevPos = Math.max(indent - 1, 0);
|
||
row.setCursor(pacData.indent);
|
||
pacData.color = row.chars[prevPos].penState.foreground;
|
||
}
|
||
|
||
var styles = {
|
||
foreground: pacData.color,
|
||
underline: pacData.underline,
|
||
italics: pacData.italics,
|
||
background: 'black',
|
||
flash: false
|
||
};
|
||
this.setPen(styles);
|
||
}
|
||
/**
|
||
* Set background/extra foreground, but first do back_space, and then insert space (backwards compatibility).
|
||
*/
|
||
;
|
||
|
||
_proto5.setBkgData = function setBkgData(bkgData) {
|
||
this.logger.log(VerboseLevel.INFO, 'bkgData = ' + JSON.stringify(bkgData));
|
||
this.backSpace();
|
||
this.setPen(bkgData);
|
||
this.insertChar(0x20); // Space
|
||
};
|
||
|
||
_proto5.setRollUpRows = function setRollUpRows(nrRows) {
|
||
this.nrRollUpRows = nrRows;
|
||
};
|
||
|
||
_proto5.rollUp = function rollUp() {
|
||
if (this.nrRollUpRows === null) {
|
||
this.logger.log(VerboseLevel.DEBUG, 'roll_up but nrRollUpRows not set yet');
|
||
return; // Not properly setup
|
||
}
|
||
|
||
this.logger.log(VerboseLevel.TEXT, this.getDisplayText());
|
||
var topRowIndex = this.currRow + 1 - this.nrRollUpRows;
|
||
var topRow = this.rows.splice(topRowIndex, 1)[0];
|
||
topRow.clear();
|
||
this.rows.splice(this.currRow, 0, topRow);
|
||
this.logger.log(VerboseLevel.INFO, 'Rolling up'); // this.logger.log(VerboseLevel.TEXT, this.get_display_text())
|
||
}
|
||
/**
|
||
* Get all non-empty rows with as unicode text.
|
||
*/
|
||
;
|
||
|
||
_proto5.getDisplayText = function getDisplayText(asOneRow) {
|
||
asOneRow = asOneRow || false;
|
||
var displayText = [];
|
||
var text = '';
|
||
var rowNr = -1;
|
||
|
||
for (var i = 0; i < NR_ROWS; i++) {
|
||
var rowText = this.rows[i].getTextString();
|
||
|
||
if (rowText) {
|
||
rowNr = i + 1;
|
||
|
||
if (asOneRow) {
|
||
displayText.push('Row ' + rowNr + ": '" + rowText + "'");
|
||
} else {
|
||
displayText.push(rowText.trim());
|
||
}
|
||
}
|
||
}
|
||
|
||
if (displayText.length > 0) {
|
||
if (asOneRow) {
|
||
text = '[' + displayText.join(' | ') + ']';
|
||
} else {
|
||
text = displayText.join('\n');
|
||
}
|
||
}
|
||
|
||
return text;
|
||
};
|
||
|
||
_proto5.getTextAndFormat = function getTextAndFormat() {
|
||
return this.rows;
|
||
};
|
||
|
||
return CaptionScreen;
|
||
}(); // var modes = ['MODE_ROLL-UP', 'MODE_POP-ON', 'MODE_PAINT-ON', 'MODE_TEXT'];
|
||
|
||
var Cea608Channel = /*#__PURE__*/function () {
|
||
function Cea608Channel(channelNumber, outputFilter, logger) {
|
||
this.chNr = void 0;
|
||
this.outputFilter = void 0;
|
||
this.mode = void 0;
|
||
this.verbose = void 0;
|
||
this.displayedMemory = void 0;
|
||
this.nonDisplayedMemory = void 0;
|
||
this.lastOutputScreen = void 0;
|
||
this.currRollUpRow = void 0;
|
||
this.writeScreen = void 0;
|
||
this.cueStartTime = void 0;
|
||
this.logger = void 0;
|
||
this.chNr = channelNumber;
|
||
this.outputFilter = outputFilter;
|
||
this.mode = null;
|
||
this.verbose = 0;
|
||
this.displayedMemory = new CaptionScreen(logger);
|
||
this.nonDisplayedMemory = new CaptionScreen(logger);
|
||
this.lastOutputScreen = new CaptionScreen(logger);
|
||
this.currRollUpRow = this.displayedMemory.rows[NR_ROWS - 1];
|
||
this.writeScreen = this.displayedMemory;
|
||
this.mode = null;
|
||
this.cueStartTime = null; // Keeps track of where a cue started.
|
||
|
||
this.logger = logger;
|
||
}
|
||
|
||
var _proto6 = Cea608Channel.prototype;
|
||
|
||
_proto6.reset = function reset() {
|
||
this.mode = null;
|
||
this.displayedMemory.reset();
|
||
this.nonDisplayedMemory.reset();
|
||
this.lastOutputScreen.reset();
|
||
this.outputFilter.reset();
|
||
this.currRollUpRow = this.displayedMemory.rows[NR_ROWS - 1];
|
||
this.writeScreen = this.displayedMemory;
|
||
this.mode = null;
|
||
this.cueStartTime = null;
|
||
};
|
||
|
||
_proto6.getHandler = function getHandler() {
|
||
return this.outputFilter;
|
||
};
|
||
|
||
_proto6.setHandler = function setHandler(newHandler) {
|
||
this.outputFilter = newHandler;
|
||
};
|
||
|
||
_proto6.setPAC = function setPAC(pacData) {
|
||
this.writeScreen.setPAC(pacData);
|
||
};
|
||
|
||
_proto6.setBkgData = function setBkgData(bkgData) {
|
||
this.writeScreen.setBkgData(bkgData);
|
||
};
|
||
|
||
_proto6.setMode = function setMode(newMode) {
|
||
if (newMode === this.mode) {
|
||
return;
|
||
}
|
||
|
||
this.mode = newMode;
|
||
this.logger.log(VerboseLevel.INFO, 'MODE=' + newMode);
|
||
|
||
if (this.mode === 'MODE_POP-ON') {
|
||
this.writeScreen = this.nonDisplayedMemory;
|
||
} else {
|
||
this.writeScreen = this.displayedMemory;
|
||
this.writeScreen.reset();
|
||
}
|
||
|
||
if (this.mode !== 'MODE_ROLL-UP') {
|
||
this.displayedMemory.nrRollUpRows = null;
|
||
this.nonDisplayedMemory.nrRollUpRows = null;
|
||
}
|
||
|
||
this.mode = newMode;
|
||
};
|
||
|
||
_proto6.insertChars = function insertChars(chars) {
|
||
for (var i = 0; i < chars.length; i++) {
|
||
this.writeScreen.insertChar(chars[i]);
|
||
}
|
||
|
||
var screen = this.writeScreen === this.displayedMemory ? 'DISP' : 'NON_DISP';
|
||
this.logger.log(VerboseLevel.INFO, screen + ': ' + this.writeScreen.getDisplayText(true));
|
||
|
||
if (this.mode === 'MODE_PAINT-ON' || this.mode === 'MODE_ROLL-UP') {
|
||
this.logger.log(VerboseLevel.TEXT, 'DISPLAYED: ' + this.displayedMemory.getDisplayText(true));
|
||
this.outputDataUpdate();
|
||
}
|
||
};
|
||
|
||
_proto6.ccRCL = function ccRCL() {
|
||
// Resume Caption Loading (switch mode to Pop On)
|
||
this.logger.log(VerboseLevel.INFO, 'RCL - Resume Caption Loading');
|
||
this.setMode('MODE_POP-ON');
|
||
};
|
||
|
||
_proto6.ccBS = function ccBS() {
|
||
// BackSpace
|
||
this.logger.log(VerboseLevel.INFO, 'BS - BackSpace');
|
||
|
||
if (this.mode === 'MODE_TEXT') {
|
||
return;
|
||
}
|
||
|
||
this.writeScreen.backSpace();
|
||
|
||
if (this.writeScreen === this.displayedMemory) {
|
||
this.outputDataUpdate();
|
||
}
|
||
};
|
||
|
||
_proto6.ccAOF = function ccAOF() {// Reserved (formerly Alarm Off)
|
||
};
|
||
|
||
_proto6.ccAON = function ccAON() {// Reserved (formerly Alarm On)
|
||
};
|
||
|
||
_proto6.ccDER = function ccDER() {
|
||
// Delete to End of Row
|
||
this.logger.log(VerboseLevel.INFO, 'DER- Delete to End of Row');
|
||
this.writeScreen.clearToEndOfRow();
|
||
this.outputDataUpdate();
|
||
};
|
||
|
||
_proto6.ccRU = function ccRU(nrRows) {
|
||
// Roll-Up Captions-2,3,or 4 Rows
|
||
this.logger.log(VerboseLevel.INFO, 'RU(' + nrRows + ') - Roll Up');
|
||
this.writeScreen = this.displayedMemory;
|
||
this.setMode('MODE_ROLL-UP');
|
||
this.writeScreen.setRollUpRows(nrRows);
|
||
};
|
||
|
||
_proto6.ccFON = function ccFON() {
|
||
// Flash On
|
||
this.logger.log(VerboseLevel.INFO, 'FON - Flash On');
|
||
this.writeScreen.setPen({
|
||
flash: true
|
||
});
|
||
};
|
||
|
||
_proto6.ccRDC = function ccRDC() {
|
||
// Resume Direct Captioning (switch mode to PaintOn)
|
||
this.logger.log(VerboseLevel.INFO, 'RDC - Resume Direct Captioning');
|
||
this.setMode('MODE_PAINT-ON');
|
||
};
|
||
|
||
_proto6.ccTR = function ccTR() {
|
||
// Text Restart in text mode (not supported, however)
|
||
this.logger.log(VerboseLevel.INFO, 'TR');
|
||
this.setMode('MODE_TEXT');
|
||
};
|
||
|
||
_proto6.ccRTD = function ccRTD() {
|
||
// Resume Text Display in Text mode (not supported, however)
|
||
this.logger.log(VerboseLevel.INFO, 'RTD');
|
||
this.setMode('MODE_TEXT');
|
||
};
|
||
|
||
_proto6.ccEDM = function ccEDM() {
|
||
// Erase Displayed Memory
|
||
this.logger.log(VerboseLevel.INFO, 'EDM - Erase Displayed Memory');
|
||
this.displayedMemory.reset();
|
||
this.outputDataUpdate(true);
|
||
};
|
||
|
||
_proto6.ccCR = function ccCR() {
|
||
// Carriage Return
|
||
this.logger.log(VerboseLevel.INFO, 'CR - Carriage Return');
|
||
this.writeScreen.rollUp();
|
||
this.outputDataUpdate(true);
|
||
};
|
||
|
||
_proto6.ccENM = function ccENM() {
|
||
// Erase Non-Displayed Memory
|
||
this.logger.log(VerboseLevel.INFO, 'ENM - Erase Non-displayed Memory');
|
||
this.nonDisplayedMemory.reset();
|
||
};
|
||
|
||
_proto6.ccEOC = function ccEOC() {
|
||
// End of Caption (Flip Memories)
|
||
this.logger.log(VerboseLevel.INFO, 'EOC - End Of Caption');
|
||
|
||
if (this.mode === 'MODE_POP-ON') {
|
||
var tmp = this.displayedMemory;
|
||
this.displayedMemory = this.nonDisplayedMemory;
|
||
this.nonDisplayedMemory = tmp;
|
||
this.writeScreen = this.nonDisplayedMemory;
|
||
this.logger.log(VerboseLevel.TEXT, 'DISP: ' + this.displayedMemory.getDisplayText());
|
||
}
|
||
|
||
this.outputDataUpdate(true);
|
||
};
|
||
|
||
_proto6.ccTO = function ccTO(nrCols) {
|
||
// Tab Offset 1,2, or 3 columns
|
||
this.logger.log(VerboseLevel.INFO, 'TO(' + nrCols + ') - Tab Offset');
|
||
this.writeScreen.moveCursor(nrCols);
|
||
};
|
||
|
||
_proto6.ccMIDROW = function ccMIDROW(secondByte) {
|
||
// Parse MIDROW command
|
||
var styles = {
|
||
flash: false
|
||
};
|
||
styles.underline = secondByte % 2 === 1;
|
||
styles.italics = secondByte >= 0x2e;
|
||
|
||
if (!styles.italics) {
|
||
var colorIndex = Math.floor(secondByte / 2) - 0x10;
|
||
var colors = ['white', 'green', 'blue', 'cyan', 'red', 'yellow', 'magenta'];
|
||
styles.foreground = colors[colorIndex];
|
||
} else {
|
||
styles.foreground = 'white';
|
||
}
|
||
|
||
this.logger.log(VerboseLevel.INFO, 'MIDROW: ' + JSON.stringify(styles));
|
||
this.writeScreen.setPen(styles);
|
||
};
|
||
|
||
_proto6.outputDataUpdate = function outputDataUpdate(dispatch) {
|
||
if (dispatch === void 0) {
|
||
dispatch = false;
|
||
}
|
||
|
||
var time = this.logger.time;
|
||
|
||
if (time === null) {
|
||
return;
|
||
}
|
||
|
||
if (this.outputFilter) {
|
||
if (this.cueStartTime === null && !this.displayedMemory.isEmpty()) {
|
||
// Start of a new cue
|
||
this.cueStartTime = time;
|
||
} else {
|
||
if (!this.displayedMemory.equals(this.lastOutputScreen)) {
|
||
this.outputFilter.newCue(this.cueStartTime, time, this.lastOutputScreen);
|
||
|
||
if (dispatch && this.outputFilter.dispatchCue) {
|
||
this.outputFilter.dispatchCue();
|
||
}
|
||
|
||
this.cueStartTime = this.displayedMemory.isEmpty() ? null : time;
|
||
}
|
||
}
|
||
|
||
this.lastOutputScreen.copy(this.displayedMemory);
|
||
}
|
||
};
|
||
|
||
_proto6.cueSplitAtTime = function cueSplitAtTime(t) {
|
||
if (this.outputFilter) {
|
||
if (!this.displayedMemory.isEmpty()) {
|
||
if (this.outputFilter.newCue) {
|
||
this.outputFilter.newCue(this.cueStartTime, t, this.displayedMemory);
|
||
}
|
||
|
||
this.cueStartTime = t;
|
||
}
|
||
}
|
||
};
|
||
|
||
return Cea608Channel;
|
||
}();
|
||
|
||
var Cea608Parser = /*#__PURE__*/function () {
|
||
function Cea608Parser(field, out1, out2) {
|
||
this.channels = void 0;
|
||
this.currentChannel = 0;
|
||
this.cmdHistory = void 0;
|
||
this.logger = void 0;
|
||
var logger = new CaptionsLogger();
|
||
this.channels = [null, new Cea608Channel(field, out1, logger), new Cea608Channel(field + 1, out2, logger)];
|
||
this.cmdHistory = createCmdHistory();
|
||
this.logger = logger;
|
||
}
|
||
|
||
var _proto7 = Cea608Parser.prototype;
|
||
|
||
_proto7.getHandler = function getHandler(channel) {
|
||
return this.channels[channel].getHandler();
|
||
};
|
||
|
||
_proto7.setHandler = function setHandler(channel, newHandler) {
|
||
this.channels[channel].setHandler(newHandler);
|
||
}
|
||
/**
|
||
* Add data for time t in forms of list of bytes (unsigned ints). The bytes are treated as pairs.
|
||
*/
|
||
;
|
||
|
||
_proto7.addData = function addData(time, byteList) {
|
||
var cmdFound;
|
||
var a;
|
||
var b;
|
||
var charsFound = false;
|
||
this.logger.time = time;
|
||
|
||
for (var i = 0; i < byteList.length; i += 2) {
|
||
a = byteList[i] & 0x7f;
|
||
b = byteList[i + 1] & 0x7f;
|
||
|
||
if (a === 0 && b === 0) {
|
||
continue;
|
||
} else {
|
||
this.logger.log(VerboseLevel.DATA, '[' + numArrayToHexArray([byteList[i], byteList[i + 1]]) + '] -> (' + numArrayToHexArray([a, b]) + ')');
|
||
}
|
||
|
||
cmdFound = this.parseCmd(a, b);
|
||
|
||
if (!cmdFound) {
|
||
cmdFound = this.parseMidrow(a, b);
|
||
}
|
||
|
||
if (!cmdFound) {
|
||
cmdFound = this.parsePAC(a, b);
|
||
}
|
||
|
||
if (!cmdFound) {
|
||
cmdFound = this.parseBackgroundAttributes(a, b);
|
||
}
|
||
|
||
if (!cmdFound) {
|
||
charsFound = this.parseChars(a, b);
|
||
|
||
if (charsFound) {
|
||
var currChNr = this.currentChannel;
|
||
|
||
if (currChNr && currChNr > 0) {
|
||
var channel = this.channels[currChNr];
|
||
channel.insertChars(charsFound);
|
||
} else {
|
||
this.logger.log(VerboseLevel.WARNING, 'No channel found yet. TEXT-MODE?');
|
||
}
|
||
}
|
||
}
|
||
|
||
if (!cmdFound && !charsFound) {
|
||
this.logger.log(VerboseLevel.WARNING, "Couldn't parse cleaned data " + numArrayToHexArray([a, b]) + ' orig: ' + numArrayToHexArray([byteList[i], byteList[i + 1]]));
|
||
}
|
||
}
|
||
}
|
||
/**
|
||
* Parse Command.
|
||
* @returns {Boolean} Tells if a command was found
|
||
*/
|
||
;
|
||
|
||
_proto7.parseCmd = function parseCmd(a, b) {
|
||
var cmdHistory = this.cmdHistory;
|
||
var cond1 = (a === 0x14 || a === 0x1c || a === 0x15 || a === 0x1d) && b >= 0x20 && b <= 0x2f;
|
||
var cond2 = (a === 0x17 || a === 0x1f) && b >= 0x21 && b <= 0x23;
|
||
|
||
if (!(cond1 || cond2)) {
|
||
return false;
|
||
}
|
||
|
||
if (hasCmdRepeated(a, b, cmdHistory)) {
|
||
setLastCmd(null, null, cmdHistory);
|
||
this.logger.log(VerboseLevel.DEBUG, 'Repeated command (' + numArrayToHexArray([a, b]) + ') is dropped');
|
||
return true;
|
||
}
|
||
|
||
var chNr = a === 0x14 || a === 0x15 || a === 0x17 ? 1 : 2;
|
||
var channel = this.channels[chNr];
|
||
|
||
if (a === 0x14 || a === 0x15 || a === 0x1c || a === 0x1d) {
|
||
if (b === 0x20) {
|
||
channel.ccRCL();
|
||
} else if (b === 0x21) {
|
||
channel.ccBS();
|
||
} else if (b === 0x22) {
|
||
channel.ccAOF();
|
||
} else if (b === 0x23) {
|
||
channel.ccAON();
|
||
} else if (b === 0x24) {
|
||
channel.ccDER();
|
||
} else if (b === 0x25) {
|
||
channel.ccRU(2);
|
||
} else if (b === 0x26) {
|
||
channel.ccRU(3);
|
||
} else if (b === 0x27) {
|
||
channel.ccRU(4);
|
||
} else if (b === 0x28) {
|
||
channel.ccFON();
|
||
} else if (b === 0x29) {
|
||
channel.ccRDC();
|
||
} else if (b === 0x2a) {
|
||
channel.ccTR();
|
||
} else if (b === 0x2b) {
|
||
channel.ccRTD();
|
||
} else if (b === 0x2c) {
|
||
channel.ccEDM();
|
||
} else if (b === 0x2d) {
|
||
channel.ccCR();
|
||
} else if (b === 0x2e) {
|
||
channel.ccENM();
|
||
} else if (b === 0x2f) {
|
||
channel.ccEOC();
|
||
}
|
||
} else {
|
||
// a == 0x17 || a == 0x1F
|
||
channel.ccTO(b - 0x20);
|
||
}
|
||
|
||
setLastCmd(a, b, cmdHistory);
|
||
this.currentChannel = chNr;
|
||
return true;
|
||
}
|
||
/**
|
||
* Parse midrow styling command
|
||
* @returns {Boolean}
|
||
*/
|
||
;
|
||
|
||
_proto7.parseMidrow = function parseMidrow(a, b) {
|
||
var chNr = 0;
|
||
|
||
if ((a === 0x11 || a === 0x19) && b >= 0x20 && b <= 0x2f) {
|
||
if (a === 0x11) {
|
||
chNr = 1;
|
||
} else {
|
||
chNr = 2;
|
||
}
|
||
|
||
if (chNr !== this.currentChannel) {
|
||
this.logger.log(VerboseLevel.ERROR, 'Mismatch channel in midrow parsing');
|
||
return false;
|
||
}
|
||
|
||
var channel = this.channels[chNr];
|
||
|
||
if (!channel) {
|
||
return false;
|
||
}
|
||
|
||
channel.ccMIDROW(b);
|
||
this.logger.log(VerboseLevel.DEBUG, 'MIDROW (' + numArrayToHexArray([a, b]) + ')');
|
||
return true;
|
||
}
|
||
|
||
return false;
|
||
}
|
||
/**
|
||
* Parse Preable Access Codes (Table 53).
|
||
* @returns {Boolean} Tells if PAC found
|
||
*/
|
||
;
|
||
|
||
_proto7.parsePAC = function parsePAC(a, b) {
|
||
var row;
|
||
var cmdHistory = this.cmdHistory;
|
||
var case1 = (a >= 0x11 && a <= 0x17 || a >= 0x19 && a <= 0x1f) && b >= 0x40 && b <= 0x7f;
|
||
var case2 = (a === 0x10 || a === 0x18) && b >= 0x40 && b <= 0x5f;
|
||
|
||
if (!(case1 || case2)) {
|
||
return false;
|
||
}
|
||
|
||
if (hasCmdRepeated(a, b, cmdHistory)) {
|
||
setLastCmd(null, null, cmdHistory);
|
||
return true; // Repeated commands are dropped (once)
|
||
}
|
||
|
||
var chNr = a <= 0x17 ? 1 : 2;
|
||
|
||
if (b >= 0x40 && b <= 0x5f) {
|
||
row = chNr === 1 ? rowsLowCh1[a] : rowsLowCh2[a];
|
||
} else {
|
||
// 0x60 <= b <= 0x7F
|
||
row = chNr === 1 ? rowsHighCh1[a] : rowsHighCh2[a];
|
||
}
|
||
|
||
var channel = this.channels[chNr];
|
||
|
||
if (!channel) {
|
||
return false;
|
||
}
|
||
|
||
channel.setPAC(this.interpretPAC(row, b));
|
||
setLastCmd(a, b, cmdHistory);
|
||
this.currentChannel = chNr;
|
||
return true;
|
||
}
|
||
/**
|
||
* Interpret the second byte of the pac, and return the information.
|
||
* @returns {Object} pacData with style parameters.
|
||
*/
|
||
;
|
||
|
||
_proto7.interpretPAC = function interpretPAC(row, _byte3) {
|
||
var pacIndex;
|
||
var pacData = {
|
||
color: null,
|
||
italics: false,
|
||
indent: null,
|
||
underline: false,
|
||
row: row
|
||
};
|
||
|
||
if (_byte3 > 0x5f) {
|
||
pacIndex = _byte3 - 0x60;
|
||
} else {
|
||
pacIndex = _byte3 - 0x40;
|
||
}
|
||
|
||
pacData.underline = (pacIndex & 1) === 1;
|
||
|
||
if (pacIndex <= 0xd) {
|
||
pacData.color = ['white', 'green', 'blue', 'cyan', 'red', 'yellow', 'magenta', 'white'][Math.floor(pacIndex / 2)];
|
||
} else if (pacIndex <= 0xf) {
|
||
pacData.italics = true;
|
||
pacData.color = 'white';
|
||
} else {
|
||
pacData.indent = Math.floor((pacIndex - 0x10) / 2) * 4;
|
||
}
|
||
|
||
return pacData; // Note that row has zero offset. The spec uses 1.
|
||
}
|
||
/**
|
||
* Parse characters.
|
||
* @returns An array with 1 to 2 codes corresponding to chars, if found. null otherwise.
|
||
*/
|
||
;
|
||
|
||
_proto7.parseChars = function parseChars(a, b) {
|
||
var channelNr;
|
||
var charCodes = null;
|
||
var charCode1 = null;
|
||
|
||
if (a >= 0x19) {
|
||
channelNr = 2;
|
||
charCode1 = a - 8;
|
||
} else {
|
||
channelNr = 1;
|
||
charCode1 = a;
|
||
}
|
||
|
||
if (charCode1 >= 0x11 && charCode1 <= 0x13) {
|
||
// Special character
|
||
var oneCode;
|
||
|
||
if (charCode1 === 0x11) {
|
||
oneCode = b + 0x50;
|
||
} else if (charCode1 === 0x12) {
|
||
oneCode = b + 0x70;
|
||
} else {
|
||
oneCode = b + 0x90;
|
||
}
|
||
|
||
this.logger.log(VerboseLevel.INFO, "Special char '" + getCharForByte(oneCode) + "' in channel " + channelNr);
|
||
charCodes = [oneCode];
|
||
} else if (a >= 0x20 && a <= 0x7f) {
|
||
charCodes = b === 0 ? [a] : [a, b];
|
||
}
|
||
|
||
if (charCodes) {
|
||
var hexCodes = numArrayToHexArray(charCodes);
|
||
this.logger.log(VerboseLevel.DEBUG, 'Char codes = ' + hexCodes.join(','));
|
||
setLastCmd(a, b, this.cmdHistory);
|
||
}
|
||
|
||
return charCodes;
|
||
}
|
||
/**
|
||
* Parse extended background attributes as well as new foreground color black.
|
||
* @returns {Boolean} Tells if background attributes are found
|
||
*/
|
||
;
|
||
|
||
_proto7.parseBackgroundAttributes = function parseBackgroundAttributes(a, b) {
|
||
var case1 = (a === 0x10 || a === 0x18) && b >= 0x20 && b <= 0x2f;
|
||
var case2 = (a === 0x17 || a === 0x1f) && b >= 0x2d && b <= 0x2f;
|
||
|
||
if (!(case1 || case2)) {
|
||
return false;
|
||
}
|
||
|
||
var index;
|
||
var bkgData = {};
|
||
|
||
if (a === 0x10 || a === 0x18) {
|
||
index = Math.floor((b - 0x20) / 2);
|
||
bkgData.background = backgroundColors[index];
|
||
|
||
if (b % 2 === 1) {
|
||
bkgData.background = bkgData.background + '_semi';
|
||
}
|
||
} else if (b === 0x2d) {
|
||
bkgData.background = 'transparent';
|
||
} else {
|
||
bkgData.foreground = 'black';
|
||
|
||
if (b === 0x2f) {
|
||
bkgData.underline = true;
|
||
}
|
||
}
|
||
|
||
var chNr = a <= 0x17 ? 1 : 2;
|
||
var channel = this.channels[chNr];
|
||
channel.setBkgData(bkgData);
|
||
setLastCmd(a, b, this.cmdHistory);
|
||
return true;
|
||
}
|
||
/**
|
||
* Reset state of parser and its channels.
|
||
*/
|
||
;
|
||
|
||
_proto7.reset = function reset() {
|
||
for (var i = 0; i < Object.keys(this.channels).length; i++) {
|
||
var channel = this.channels[i];
|
||
|
||
if (channel) {
|
||
channel.reset();
|
||
}
|
||
}
|
||
|
||
this.cmdHistory = createCmdHistory();
|
||
}
|
||
/**
|
||
* Trigger the generation of a cue, and the start of a new one if displayScreens are not empty.
|
||
*/
|
||
;
|
||
|
||
_proto7.cueSplitAtTime = function cueSplitAtTime(t) {
|
||
for (var i = 0; i < this.channels.length; i++) {
|
||
var channel = this.channels[i];
|
||
|
||
if (channel) {
|
||
channel.cueSplitAtTime(t);
|
||
}
|
||
}
|
||
};
|
||
|
||
return Cea608Parser;
|
||
}();
|
||
|
||
function setLastCmd(a, b, cmdHistory) {
|
||
cmdHistory.a = a;
|
||
cmdHistory.b = b;
|
||
}
|
||
|
||
function hasCmdRepeated(a, b, cmdHistory) {
|
||
return cmdHistory.a === a && cmdHistory.b === b;
|
||
}
|
||
|
||
function createCmdHistory() {
|
||
return {
|
||
a: null,
|
||
b: null
|
||
};
|
||
}
|
||
|
||
/* harmony default export */ __webpack_exports__["default"] = (Cea608Parser);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/utils/codecs.ts":
|
||
/*!*****************************!*\
|
||
!*** ./src/utils/codecs.ts ***!
|
||
\*****************************/
|
||
/*! exports provided: isCodecType, isCodecSupportedInMp4 */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isCodecType", function() { return isCodecType; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isCodecSupportedInMp4", function() { return isCodecSupportedInMp4; });
|
||
// from http://mp4ra.org/codecs.html
|
||
var sampleEntryCodesISO = {
|
||
audio: {
|
||
a3ds: true,
|
||
'ac-3': true,
|
||
'ac-4': true,
|
||
alac: true,
|
||
alaw: true,
|
||
dra1: true,
|
||
'dts+': true,
|
||
'dts-': true,
|
||
dtsc: true,
|
||
dtse: true,
|
||
dtsh: true,
|
||
'ec-3': true,
|
||
enca: true,
|
||
g719: true,
|
||
g726: true,
|
||
m4ae: true,
|
||
mha1: true,
|
||
mha2: true,
|
||
mhm1: true,
|
||
mhm2: true,
|
||
mlpa: true,
|
||
mp4a: true,
|
||
'raw ': true,
|
||
Opus: true,
|
||
samr: true,
|
||
sawb: true,
|
||
sawp: true,
|
||
sevc: true,
|
||
sqcp: true,
|
||
ssmv: true,
|
||
twos: true,
|
||
ulaw: true
|
||
},
|
||
video: {
|
||
avc1: true,
|
||
avc2: true,
|
||
avc3: true,
|
||
avc4: true,
|
||
avcp: true,
|
||
av01: true,
|
||
drac: true,
|
||
dvav: true,
|
||
dvhe: true,
|
||
encv: true,
|
||
hev1: true,
|
||
hvc1: true,
|
||
mjp2: true,
|
||
mp4v: true,
|
||
mvc1: true,
|
||
mvc2: true,
|
||
mvc3: true,
|
||
mvc4: true,
|
||
resv: true,
|
||
rv60: true,
|
||
s263: true,
|
||
svc1: true,
|
||
svc2: true,
|
||
'vc-1': true,
|
||
vp08: true,
|
||
vp09: true
|
||
},
|
||
text: {
|
||
stpp: true,
|
||
wvtt: true
|
||
}
|
||
};
|
||
function isCodecType(codec, type) {
|
||
var typeCodes = sampleEntryCodesISO[type];
|
||
return !!typeCodes && typeCodes[codec.slice(0, 4)] === true;
|
||
}
|
||
function isCodecSupportedInMp4(codec, type) {
|
||
return MediaSource.isTypeSupported((type || 'video') + "/mp4;codecs=\"" + codec + "\"");
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/utils/cues.ts":
|
||
/*!***************************!*\
|
||
!*** ./src/utils/cues.ts ***!
|
||
\***************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony import */ var _vttparser__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./vttparser */ "./src/utils/vttparser.ts");
|
||
/* harmony import */ var _webvtt_parser__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./webvtt-parser */ "./src/utils/webvtt-parser.ts");
|
||
/* harmony import */ var _texttrack_utils__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./texttrack-utils */ "./src/utils/texttrack-utils.ts");
|
||
|
||
|
||
|
||
var WHITESPACE_CHAR = /\s/;
|
||
var Cues = {
|
||
newCue: function newCue(track, startTime, endTime, captionScreen) {
|
||
var result = [];
|
||
var row; // the type data states this is VTTCue, but it can potentially be a TextTrackCue on old browsers
|
||
|
||
var cue;
|
||
var indenting;
|
||
var indent;
|
||
var text;
|
||
var Cue = self.VTTCue || self.TextTrackCue;
|
||
|
||
for (var r = 0; r < captionScreen.rows.length; r++) {
|
||
row = captionScreen.rows[r];
|
||
indenting = true;
|
||
indent = 0;
|
||
text = '';
|
||
|
||
if (!row.isEmpty()) {
|
||
for (var c = 0; c < row.chars.length; c++) {
|
||
if (WHITESPACE_CHAR.test(row.chars[c].uchar) && indenting) {
|
||
indent++;
|
||
} else {
|
||
text += row.chars[c].uchar;
|
||
indenting = false;
|
||
}
|
||
} // To be used for cleaning-up orphaned roll-up captions
|
||
|
||
|
||
row.cueStartTime = startTime; // Give a slight bump to the endTime if it's equal to startTime to avoid a SyntaxError in IE
|
||
|
||
if (startTime === endTime) {
|
||
endTime += 0.0001;
|
||
}
|
||
|
||
if (indent >= 16) {
|
||
indent--;
|
||
} else {
|
||
indent++;
|
||
}
|
||
|
||
var cueText = Object(_vttparser__WEBPACK_IMPORTED_MODULE_0__["fixLineBreaks"])(text.trim());
|
||
var id = Object(_webvtt_parser__WEBPACK_IMPORTED_MODULE_1__["generateCueId"])(startTime, endTime, cueText); // If this cue already exists in the track do not push it
|
||
|
||
if (!track || !track.cues || !track.cues.getCueById(id)) {
|
||
cue = new Cue(startTime, endTime, cueText);
|
||
cue.id = id;
|
||
cue.line = r + 1;
|
||
cue.align = 'left'; // Clamp the position between 10 and 80 percent (CEA-608 PAC indent code)
|
||
// https://dvcs.w3.org/hg/text-tracks/raw-file/default/608toVTT/608toVTT.html#positioning-in-cea-608
|
||
// Firefox throws an exception and captions break with out of bounds 0-100 values
|
||
|
||
cue.position = 10 + Math.min(80, Math.floor(indent * 8 / 32) * 10);
|
||
result.push(cue);
|
||
}
|
||
}
|
||
}
|
||
|
||
if (track && result.length) {
|
||
// Sort bottom cues in reverse order so that they render in line order when overlapping in Chrome
|
||
result.sort(function (cueA, cueB) {
|
||
if (cueA.line === 'auto' || cueB.line === 'auto') {
|
||
return 0;
|
||
}
|
||
|
||
if (cueA.line > 8 && cueB.line > 8) {
|
||
return cueB.line - cueA.line;
|
||
}
|
||
|
||
return cueA.line - cueB.line;
|
||
});
|
||
result.forEach(function (cue) {
|
||
return Object(_texttrack_utils__WEBPACK_IMPORTED_MODULE_2__["addCueToTrack"])(track, cue);
|
||
});
|
||
}
|
||
|
||
return result;
|
||
}
|
||
};
|
||
/* harmony default export */ __webpack_exports__["default"] = (Cues);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/utils/discontinuities.ts":
|
||
/*!**************************************!*\
|
||
!*** ./src/utils/discontinuities.ts ***!
|
||
\**************************************/
|
||
/*! exports provided: findFirstFragWithCC, shouldAlignOnDiscontinuities, findDiscontinuousReferenceFrag, adjustSlidingStart, alignStream, alignPDT, alignFragmentByPDTDelta, alignMediaPlaylistByPDT */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "findFirstFragWithCC", function() { return findFirstFragWithCC; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "shouldAlignOnDiscontinuities", function() { return shouldAlignOnDiscontinuities; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "findDiscontinuousReferenceFrag", function() { return findDiscontinuousReferenceFrag; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "adjustSlidingStart", function() { return adjustSlidingStart; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "alignStream", function() { return alignStream; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "alignPDT", function() { return alignPDT; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "alignFragmentByPDTDelta", function() { return alignFragmentByPDTDelta; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "alignMediaPlaylistByPDT", function() { return alignMediaPlaylistByPDT; });
|
||
/* harmony import */ var _Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
|
||
/* harmony import */ var _logger__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./logger */ "./src/utils/logger.ts");
|
||
/* harmony import */ var _controller_level_helper__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../controller/level-helper */ "./src/controller/level-helper.ts");
|
||
|
||
|
||
|
||
|
||
function findFirstFragWithCC(fragments, cc) {
|
||
var firstFrag = null;
|
||
|
||
for (var i = 0, len = fragments.length; i < len; i++) {
|
||
var currentFrag = fragments[i];
|
||
|
||
if (currentFrag && currentFrag.cc === cc) {
|
||
firstFrag = currentFrag;
|
||
break;
|
||
}
|
||
}
|
||
|
||
return firstFrag;
|
||
}
|
||
function shouldAlignOnDiscontinuities(lastFrag, lastLevel, details) {
|
||
if (lastLevel.details) {
|
||
if (details.endCC > details.startCC || lastFrag && lastFrag.cc < details.startCC) {
|
||
return true;
|
||
}
|
||
}
|
||
|
||
return false;
|
||
} // Find the first frag in the previous level which matches the CC of the first frag of the new level
|
||
|
||
function findDiscontinuousReferenceFrag(prevDetails, curDetails) {
|
||
var prevFrags = prevDetails.fragments;
|
||
var curFrags = curDetails.fragments;
|
||
|
||
if (!curFrags.length || !prevFrags.length) {
|
||
_logger__WEBPACK_IMPORTED_MODULE_1__["logger"].log('No fragments to align');
|
||
return;
|
||
}
|
||
|
||
var prevStartFrag = findFirstFragWithCC(prevFrags, curFrags[0].cc);
|
||
|
||
if (!prevStartFrag || prevStartFrag && !prevStartFrag.startPTS) {
|
||
_logger__WEBPACK_IMPORTED_MODULE_1__["logger"].log('No frag in previous level to align on');
|
||
return;
|
||
}
|
||
|
||
return prevStartFrag;
|
||
}
|
||
|
||
function adjustFragmentStart(frag, sliding) {
|
||
if (frag) {
|
||
var start = frag.start + sliding;
|
||
frag.start = frag.startPTS = start;
|
||
frag.endPTS = start + frag.duration;
|
||
}
|
||
}
|
||
|
||
function adjustSlidingStart(sliding, details) {
|
||
// Update segments
|
||
var fragments = details.fragments;
|
||
|
||
for (var i = 0, len = fragments.length; i < len; i++) {
|
||
adjustFragmentStart(fragments[i], sliding);
|
||
} // Update LL-HLS parts at the end of the playlist
|
||
|
||
|
||
if (details.fragmentHint) {
|
||
adjustFragmentStart(details.fragmentHint, sliding);
|
||
}
|
||
|
||
details.alignedSliding = true;
|
||
}
|
||
/**
|
||
* Using the parameters of the last level, this function computes PTS' of the new fragments so that they form a
|
||
* contiguous stream with the last fragments.
|
||
* The PTS of a fragment lets Hls.js know where it fits into a stream - by knowing every PTS, we know which fragment to
|
||
* download at any given time. PTS is normally computed when the fragment is demuxed, so taking this step saves us time
|
||
* and an extra download.
|
||
* @param lastFrag
|
||
* @param lastLevel
|
||
* @param details
|
||
*/
|
||
|
||
function alignStream(lastFrag, lastLevel, details) {
|
||
if (!lastLevel) {
|
||
return;
|
||
}
|
||
|
||
alignDiscontinuities(lastFrag, details, lastLevel);
|
||
|
||
if (!details.alignedSliding && lastLevel.details) {
|
||
// If the PTS wasn't figured out via discontinuity sequence that means there was no CC increase within the level.
|
||
// Aligning via Program Date Time should therefore be reliable, since PDT should be the same within the same
|
||
// discontinuity sequence.
|
||
alignPDT(details, lastLevel.details);
|
||
}
|
||
|
||
if (!details.alignedSliding && lastLevel.details && !details.skippedSegments) {
|
||
// Try to align on sn so that we pick a better start fragment.
|
||
// Do not perform this on playlists with delta updates as this is only to align levels on switch
|
||
// and adjustSliding only adjusts fragments after skippedSegments.
|
||
Object(_controller_level_helper__WEBPACK_IMPORTED_MODULE_2__["adjustSliding"])(lastLevel.details, details);
|
||
}
|
||
}
|
||
/**
|
||
* Computes the PTS if a new level's fragments using the PTS of a fragment in the last level which shares the same
|
||
* discontinuity sequence.
|
||
* @param lastFrag - The last Fragment which shares the same discontinuity sequence
|
||
* @param lastLevel - The details of the last loaded level
|
||
* @param details - The details of the new level
|
||
*/
|
||
|
||
function alignDiscontinuities(lastFrag, details, lastLevel) {
|
||
if (shouldAlignOnDiscontinuities(lastFrag, lastLevel, details)) {
|
||
var referenceFrag = findDiscontinuousReferenceFrag(lastLevel.details, details);
|
||
|
||
if (referenceFrag && Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(referenceFrag.start)) {
|
||
_logger__WEBPACK_IMPORTED_MODULE_1__["logger"].log("Adjusting PTS using last level due to CC increase within current level " + details.url);
|
||
adjustSlidingStart(referenceFrag.start, details);
|
||
}
|
||
}
|
||
}
|
||
/**
|
||
* Computes the PTS of a new level's fragments using the difference in Program Date Time from the last level.
|
||
* @param details - The details of the new level
|
||
* @param lastDetails - The details of the last loaded level
|
||
*/
|
||
|
||
|
||
function alignPDT(details, lastDetails) {
|
||
// This check protects the unsafe "!" usage below for null program date time access.
|
||
if (!lastDetails.fragments.length || !details.hasProgramDateTime || !lastDetails.hasProgramDateTime) {
|
||
return;
|
||
} // if last level sliding is 1000 and its first frag PROGRAM-DATE-TIME is 2017-08-20 1:10:00 AM
|
||
// and if new details first frag PROGRAM DATE-TIME is 2017-08-20 1:10:08 AM
|
||
// then we can deduce that playlist B sliding is 1000+8 = 1008s
|
||
|
||
|
||
var lastPDT = lastDetails.fragments[0].programDateTime; // hasProgramDateTime check above makes this safe.
|
||
|
||
var newPDT = details.fragments[0].programDateTime; // date diff is in ms. frag.start is in seconds
|
||
|
||
var sliding = (newPDT - lastPDT) / 1000 + lastDetails.fragments[0].start;
|
||
|
||
if (sliding && Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(sliding)) {
|
||
_logger__WEBPACK_IMPORTED_MODULE_1__["logger"].log("Adjusting PTS using programDateTime delta " + (newPDT - lastPDT) + "ms, sliding:" + sliding.toFixed(3) + " " + details.url + " ");
|
||
adjustSlidingStart(sliding, details);
|
||
}
|
||
}
|
||
function alignFragmentByPDTDelta(frag, delta) {
|
||
var programDateTime = frag.programDateTime;
|
||
if (!programDateTime) return;
|
||
var start = (programDateTime - delta) / 1000;
|
||
frag.start = frag.startPTS = start;
|
||
frag.endPTS = start + frag.duration;
|
||
}
|
||
/**
|
||
* Ensures appropriate time-alignment between renditions based on PDT. Unlike `alignPDT`, which adjusts
|
||
* the timeline based on the delta between PDTs of the 0th fragment of two playlists/`LevelDetails`,
|
||
* this function assumes the timelines represented in `refDetails` are accurate, including the PDTs,
|
||
* and uses the "wallclock"/PDT timeline as a cross-reference to `details`, adjusting the presentation
|
||
* times/timelines of `details` accordingly.
|
||
* Given the asynchronous nature of fetches and initial loads of live `main` and audio/subtitle tracks,
|
||
* the primary purpose of this function is to ensure the "local timelines" of audio/subtitle tracks
|
||
* are aligned to the main/video timeline, using PDT as the cross-reference/"anchor" that should
|
||
* be consistent across playlists, per the HLS spec.
|
||
* @param details - The details of the rendition you'd like to time-align (e.g. an audio rendition).
|
||
* @param refDetails - The details of the reference rendition with start and PDT times for alignment.
|
||
*/
|
||
|
||
function alignMediaPlaylistByPDT(details, refDetails) {
|
||
// This check protects the unsafe "!" usage below for null program date time access.
|
||
if (!refDetails.fragments.length || !details.hasProgramDateTime || !refDetails.hasProgramDateTime) {
|
||
return;
|
||
}
|
||
|
||
var refPDT = refDetails.fragments[0].programDateTime; // hasProgramDateTime check above makes this safe.
|
||
|
||
var refStart = refDetails.fragments[0].start; // Use the delta between the reference details' presentation timeline's start time and its PDT
|
||
// to align the other rendition's timeline.
|
||
|
||
var delta = refPDT - refStart * 1000; // Per spec: "If any Media Playlist in a Master Playlist contains an EXT-X-PROGRAM-DATE-TIME tag, then all
|
||
// Media Playlists in that Master Playlist MUST contain EXT-X-PROGRAM-DATE-TIME tags with consistent mappings
|
||
// of date and time to media timestamps."
|
||
// So we should be able to use each rendition's PDT as a reference time and use the delta to compute our relevant
|
||
// start and end times.
|
||
// NOTE: This code assumes each level/details timelines have already been made "internally consistent"
|
||
|
||
details.fragments.forEach(function (frag) {
|
||
alignFragmentByPDTDelta(frag, delta);
|
||
});
|
||
|
||
if (details.fragmentHint) {
|
||
alignFragmentByPDTDelta(details.fragmentHint, delta);
|
||
}
|
||
|
||
details.alignedSliding = true;
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/utils/ewma-bandwidth-estimator.ts":
|
||
/*!***********************************************!*\
|
||
!*** ./src/utils/ewma-bandwidth-estimator.ts ***!
|
||
\***********************************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony import */ var _utils_ewma__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../utils/ewma */ "./src/utils/ewma.ts");
|
||
/*
|
||
* EWMA Bandwidth Estimator
|
||
* - heavily inspired from shaka-player
|
||
* Tracks bandwidth samples and estimates available bandwidth.
|
||
* Based on the minimum of two exponentially-weighted moving averages with
|
||
* different half-lives.
|
||
*/
|
||
|
||
|
||
var EwmaBandWidthEstimator = /*#__PURE__*/function () {
|
||
function EwmaBandWidthEstimator(slow, fast, defaultEstimate) {
|
||
this.defaultEstimate_ = void 0;
|
||
this.minWeight_ = void 0;
|
||
this.minDelayMs_ = void 0;
|
||
this.slow_ = void 0;
|
||
this.fast_ = void 0;
|
||
this.defaultEstimate_ = defaultEstimate;
|
||
this.minWeight_ = 0.001;
|
||
this.minDelayMs_ = 50;
|
||
this.slow_ = new _utils_ewma__WEBPACK_IMPORTED_MODULE_0__["default"](slow);
|
||
this.fast_ = new _utils_ewma__WEBPACK_IMPORTED_MODULE_0__["default"](fast);
|
||
}
|
||
|
||
var _proto = EwmaBandWidthEstimator.prototype;
|
||
|
||
_proto.update = function update(slow, fast) {
|
||
var slow_ = this.slow_,
|
||
fast_ = this.fast_;
|
||
|
||
if (this.slow_.halfLife !== slow) {
|
||
this.slow_ = new _utils_ewma__WEBPACK_IMPORTED_MODULE_0__["default"](slow, slow_.getEstimate(), slow_.getTotalWeight());
|
||
}
|
||
|
||
if (this.fast_.halfLife !== fast) {
|
||
this.fast_ = new _utils_ewma__WEBPACK_IMPORTED_MODULE_0__["default"](fast, fast_.getEstimate(), fast_.getTotalWeight());
|
||
}
|
||
};
|
||
|
||
_proto.sample = function sample(durationMs, numBytes) {
|
||
durationMs = Math.max(durationMs, this.minDelayMs_);
|
||
var numBits = 8 * numBytes; // weight is duration in seconds
|
||
|
||
var durationS = durationMs / 1000; // value is bandwidth in bits/s
|
||
|
||
var bandwidthInBps = numBits / durationS;
|
||
this.fast_.sample(durationS, bandwidthInBps);
|
||
this.slow_.sample(durationS, bandwidthInBps);
|
||
};
|
||
|
||
_proto.canEstimate = function canEstimate() {
|
||
var fast = this.fast_;
|
||
return fast && fast.getTotalWeight() >= this.minWeight_;
|
||
};
|
||
|
||
_proto.getEstimate = function getEstimate() {
|
||
if (this.canEstimate()) {
|
||
// console.log('slow estimate:'+ Math.round(this.slow_.getEstimate()));
|
||
// console.log('fast estimate:'+ Math.round(this.fast_.getEstimate()));
|
||
// Take the minimum of these two estimates. This should have the effect of
|
||
// adapting down quickly, but up more slowly.
|
||
return Math.min(this.fast_.getEstimate(), this.slow_.getEstimate());
|
||
} else {
|
||
return this.defaultEstimate_;
|
||
}
|
||
};
|
||
|
||
_proto.destroy = function destroy() {};
|
||
|
||
return EwmaBandWidthEstimator;
|
||
}();
|
||
|
||
/* harmony default export */ __webpack_exports__["default"] = (EwmaBandWidthEstimator);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/utils/ewma.ts":
|
||
/*!***************************!*\
|
||
!*** ./src/utils/ewma.ts ***!
|
||
\***************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/*
|
||
* compute an Exponential Weighted moving average
|
||
* - https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
|
||
* - heavily inspired from shaka-player
|
||
*/
|
||
var EWMA = /*#__PURE__*/function () {
|
||
// About half of the estimated value will be from the last |halfLife| samples by weight.
|
||
function EWMA(halfLife, estimate, weight) {
|
||
if (estimate === void 0) {
|
||
estimate = 0;
|
||
}
|
||
|
||
if (weight === void 0) {
|
||
weight = 0;
|
||
}
|
||
|
||
this.halfLife = void 0;
|
||
this.alpha_ = void 0;
|
||
this.estimate_ = void 0;
|
||
this.totalWeight_ = void 0;
|
||
this.halfLife = halfLife; // Larger values of alpha expire historical data more slowly.
|
||
|
||
this.alpha_ = halfLife ? Math.exp(Math.log(0.5) / halfLife) : 0;
|
||
this.estimate_ = estimate;
|
||
this.totalWeight_ = weight;
|
||
}
|
||
|
||
var _proto = EWMA.prototype;
|
||
|
||
_proto.sample = function sample(weight, value) {
|
||
var adjAlpha = Math.pow(this.alpha_, weight);
|
||
this.estimate_ = value * (1 - adjAlpha) + adjAlpha * this.estimate_;
|
||
this.totalWeight_ += weight;
|
||
};
|
||
|
||
_proto.getTotalWeight = function getTotalWeight() {
|
||
return this.totalWeight_;
|
||
};
|
||
|
||
_proto.getEstimate = function getEstimate() {
|
||
if (this.alpha_) {
|
||
var zeroFactor = 1 - Math.pow(this.alpha_, this.totalWeight_);
|
||
|
||
if (zeroFactor) {
|
||
return this.estimate_ / zeroFactor;
|
||
}
|
||
}
|
||
|
||
return this.estimate_;
|
||
};
|
||
|
||
return EWMA;
|
||
}();
|
||
|
||
/* harmony default export */ __webpack_exports__["default"] = (EWMA);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/utils/fetch-loader.ts":
|
||
/*!***********************************!*\
|
||
!*** ./src/utils/fetch-loader.ts ***!
|
||
\***********************************/
|
||
/*! exports provided: fetchSupported, default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "fetchSupported", function() { return fetchSupported; });
|
||
/* harmony import */ var _Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
|
||
/* harmony import */ var _loader_load_stats__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../loader/load-stats */ "./src/loader/load-stats.ts");
|
||
/* harmony import */ var _demux_chunk_cache__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../demux/chunk-cache */ "./src/demux/chunk-cache.ts");
|
||
|
||
|
||
|
||
function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
|
||
|
||
function _wrapNativeSuper(Class) { var _cache = typeof Map === "function" ? new Map() : undefined; _wrapNativeSuper = function _wrapNativeSuper(Class) { if (Class === null || !_isNativeFunction(Class)) return Class; if (typeof Class !== "function") { throw new TypeError("Super expression must either be null or a function"); } if (typeof _cache !== "undefined") { if (_cache.has(Class)) return _cache.get(Class); _cache.set(Class, Wrapper); } function Wrapper() { return _construct(Class, arguments, _getPrototypeOf(this).constructor); } Wrapper.prototype = Object.create(Class.prototype, { constructor: { value: Wrapper, enumerable: false, writable: true, configurable: true } }); return _setPrototypeOf(Wrapper, Class); }; return _wrapNativeSuper(Class); }
|
||
|
||
function _construct(Parent, args, Class) { if (_isNativeReflectConstruct()) { _construct = Reflect.construct; } else { _construct = function _construct(Parent, args, Class) { var a = [null]; a.push.apply(a, args); var Constructor = Function.bind.apply(Parent, a); var instance = new Constructor(); if (Class) _setPrototypeOf(instance, Class.prototype); return instance; }; } return _construct.apply(null, arguments); }
|
||
|
||
function _isNativeReflectConstruct() { if (typeof Reflect === "undefined" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === "function") return true; try { Boolean.prototype.valueOf.call(Reflect.construct(Boolean, [], function () {})); return true; } catch (e) { return false; } }
|
||
|
||
function _isNativeFunction(fn) { return Function.toString.call(fn).indexOf("[native code]") !== -1; }
|
||
|
||
function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
|
||
|
||
function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
|
||
|
||
function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
|
||
|
||
|
||
|
||
function fetchSupported() {
|
||
if ( // @ts-ignore
|
||
self.fetch && self.AbortController && self.ReadableStream && self.Request) {
|
||
try {
|
||
new self.ReadableStream({}); // eslint-disable-line no-new
|
||
|
||
return true;
|
||
} catch (e) {
|
||
/* noop */
|
||
}
|
||
}
|
||
|
||
return false;
|
||
}
|
||
|
||
var FetchLoader = /*#__PURE__*/function () {
|
||
function FetchLoader(config
|
||
/* HlsConfig */
|
||
) {
|
||
this.fetchSetup = void 0;
|
||
this.requestTimeout = void 0;
|
||
this.request = void 0;
|
||
this.response = void 0;
|
||
this.controller = void 0;
|
||
this.context = void 0;
|
||
this.config = null;
|
||
this.callbacks = null;
|
||
this.stats = void 0;
|
||
this.loader = null;
|
||
this.fetchSetup = config.fetchSetup || getRequest;
|
||
this.controller = new self.AbortController();
|
||
this.stats = new _loader_load_stats__WEBPACK_IMPORTED_MODULE_1__["LoadStats"]();
|
||
}
|
||
|
||
var _proto = FetchLoader.prototype;
|
||
|
||
_proto.destroy = function destroy() {
|
||
this.loader = this.callbacks = null;
|
||
this.abortInternal();
|
||
};
|
||
|
||
_proto.abortInternal = function abortInternal() {
|
||
var response = this.response;
|
||
|
||
if (!response || !response.ok) {
|
||
this.stats.aborted = true;
|
||
this.controller.abort();
|
||
}
|
||
};
|
||
|
||
_proto.abort = function abort() {
|
||
var _this$callbacks;
|
||
|
||
this.abortInternal();
|
||
|
||
if ((_this$callbacks = this.callbacks) !== null && _this$callbacks !== void 0 && _this$callbacks.onAbort) {
|
||
this.callbacks.onAbort(this.stats, this.context, this.response);
|
||
}
|
||
};
|
||
|
||
_proto.load = function load(context, config, callbacks) {
|
||
var _this = this;
|
||
|
||
var stats = this.stats;
|
||
|
||
if (stats.loading.start) {
|
||
throw new Error('Loader can only be used once.');
|
||
}
|
||
|
||
stats.loading.start = self.performance.now();
|
||
var initParams = getRequestParameters(context, this.controller.signal);
|
||
var onProgress = callbacks.onProgress;
|
||
var isArrayBuffer = context.responseType === 'arraybuffer';
|
||
var LENGTH = isArrayBuffer ? 'byteLength' : 'length';
|
||
this.context = context;
|
||
this.config = config;
|
||
this.callbacks = callbacks;
|
||
this.request = this.fetchSetup(context, initParams);
|
||
self.clearTimeout(this.requestTimeout);
|
||
this.requestTimeout = self.setTimeout(function () {
|
||
_this.abortInternal();
|
||
|
||
callbacks.onTimeout(stats, context, _this.response);
|
||
}, config.timeout);
|
||
self.fetch(this.request).then(function (response) {
|
||
_this.response = _this.loader = response;
|
||
|
||
if (!response.ok) {
|
||
var status = response.status,
|
||
statusText = response.statusText;
|
||
throw new FetchError(statusText || 'fetch, bad network response', status, response);
|
||
}
|
||
|
||
stats.loading.first = Math.max(self.performance.now(), stats.loading.start);
|
||
stats.total = parseInt(response.headers.get('Content-Length') || '0');
|
||
|
||
if (onProgress && Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(config.highWaterMark)) {
|
||
return _this.loadProgressively(response, stats, context, config.highWaterMark, onProgress);
|
||
}
|
||
|
||
if (isArrayBuffer) {
|
||
return response.arrayBuffer();
|
||
}
|
||
|
||
return response.text();
|
||
}).then(function (responseData) {
|
||
var response = _this.response;
|
||
self.clearTimeout(_this.requestTimeout);
|
||
stats.loading.end = Math.max(self.performance.now(), stats.loading.first);
|
||
stats.loaded = stats.total = responseData[LENGTH];
|
||
var loaderResponse = {
|
||
url: response.url,
|
||
data: responseData
|
||
};
|
||
|
||
if (onProgress && !Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(config.highWaterMark)) {
|
||
onProgress(stats, context, responseData, response);
|
||
}
|
||
|
||
callbacks.onSuccess(loaderResponse, stats, context, response);
|
||
}).catch(function (error) {
|
||
self.clearTimeout(_this.requestTimeout);
|
||
|
||
if (stats.aborted) {
|
||
return;
|
||
} // CORS errors result in an undefined code. Set it to 0 here to align with XHR's behavior
|
||
|
||
|
||
var code = error.code || 0;
|
||
callbacks.onError({
|
||
code: code,
|
||
text: error.message
|
||
}, context, error.details);
|
||
});
|
||
};
|
||
|
||
_proto.getCacheAge = function getCacheAge() {
|
||
var result = null;
|
||
|
||
if (this.response) {
|
||
var ageHeader = this.response.headers.get('age');
|
||
result = ageHeader ? parseFloat(ageHeader) : null;
|
||
}
|
||
|
||
return result;
|
||
};
|
||
|
||
_proto.loadProgressively = function loadProgressively(response, stats, context, highWaterMark, onProgress) {
|
||
if (highWaterMark === void 0) {
|
||
highWaterMark = 0;
|
||
}
|
||
|
||
var chunkCache = new _demux_chunk_cache__WEBPACK_IMPORTED_MODULE_2__["default"]();
|
||
var reader = response.body.getReader();
|
||
|
||
var pump = function pump() {
|
||
return reader.read().then(function (data) {
|
||
if (data.done) {
|
||
if (chunkCache.dataLength) {
|
||
onProgress(stats, context, chunkCache.flush(), response);
|
||
}
|
||
|
||
return Promise.resolve(new ArrayBuffer(0));
|
||
}
|
||
|
||
var chunk = data.value;
|
||
var len = chunk.length;
|
||
stats.loaded += len;
|
||
|
||
if (len < highWaterMark || chunkCache.dataLength) {
|
||
// The current chunk is too small to to be emitted or the cache already has data
|
||
// Push it to the cache
|
||
chunkCache.push(chunk);
|
||
|
||
if (chunkCache.dataLength >= highWaterMark) {
|
||
// flush in order to join the typed arrays
|
||
onProgress(stats, context, chunkCache.flush(), response);
|
||
}
|
||
} else {
|
||
// If there's nothing cached already, and the chache is large enough
|
||
// just emit the progress event
|
||
onProgress(stats, context, chunk, response);
|
||
}
|
||
|
||
return pump();
|
||
}).catch(function () {
|
||
/* aborted */
|
||
return Promise.reject();
|
||
});
|
||
};
|
||
|
||
return pump();
|
||
};
|
||
|
||
return FetchLoader;
|
||
}();
|
||
|
||
function getRequestParameters(context, signal) {
|
||
var initParams = {
|
||
method: 'GET',
|
||
mode: 'cors',
|
||
credentials: 'same-origin',
|
||
signal: signal,
|
||
headers: new self.Headers(_extends({}, context.headers))
|
||
};
|
||
|
||
if (context.rangeEnd) {
|
||
initParams.headers.set('Range', 'bytes=' + context.rangeStart + '-' + String(context.rangeEnd - 1));
|
||
}
|
||
|
||
return initParams;
|
||
}
|
||
|
||
function getRequest(context, initParams) {
|
||
return new self.Request(context.url, initParams);
|
||
}
|
||
|
||
var FetchError = /*#__PURE__*/function (_Error) {
|
||
_inheritsLoose(FetchError, _Error);
|
||
|
||
function FetchError(message, code, details) {
|
||
var _this2;
|
||
|
||
_this2 = _Error.call(this, message) || this;
|
||
_this2.code = void 0;
|
||
_this2.details = void 0;
|
||
_this2.code = code;
|
||
_this2.details = details;
|
||
return _this2;
|
||
}
|
||
|
||
return FetchError;
|
||
}( /*#__PURE__*/_wrapNativeSuper(Error));
|
||
|
||
/* harmony default export */ __webpack_exports__["default"] = (FetchLoader);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/utils/imsc1-ttml-parser.ts":
|
||
/*!****************************************!*\
|
||
!*** ./src/utils/imsc1-ttml-parser.ts ***!
|
||
\****************************************/
|
||
/*! exports provided: IMSC1_CODEC, parseIMSC1 */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "IMSC1_CODEC", function() { return IMSC1_CODEC; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "parseIMSC1", function() { return parseIMSC1; });
|
||
/* harmony import */ var _mp4_tools__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./mp4-tools */ "./src/utils/mp4-tools.ts");
|
||
/* harmony import */ var _vttparser__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./vttparser */ "./src/utils/vttparser.ts");
|
||
/* harmony import */ var _vttcue__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./vttcue */ "./src/utils/vttcue.ts");
|
||
/* harmony import */ var _demux_id3__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../demux/id3 */ "./src/demux/id3.ts");
|
||
/* harmony import */ var _timescale_conversion__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./timescale-conversion */ "./src/utils/timescale-conversion.ts");
|
||
/* harmony import */ var _webvtt_parser__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./webvtt-parser */ "./src/utils/webvtt-parser.ts");
|
||
function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
var IMSC1_CODEC = 'stpp.ttml.im1t'; // Time format: h:m:s:frames(.subframes)
|
||
|
||
var HMSF_REGEX = /^(\d{2,}):(\d{2}):(\d{2}):(\d{2})\.?(\d+)?$/; // Time format: hours, minutes, seconds, milliseconds, frames, ticks
|
||
|
||
var TIME_UNIT_REGEX = /^(\d*(?:\.\d*)?)(h|m|s|ms|f|t)$/;
|
||
var textAlignToLineAlign = {
|
||
left: 'start',
|
||
center: 'center',
|
||
right: 'end',
|
||
start: 'start',
|
||
end: 'end'
|
||
};
|
||
function parseIMSC1(payload, initPTS, timescale, callBack, errorCallBack) {
|
||
var results = Object(_mp4_tools__WEBPACK_IMPORTED_MODULE_0__["findBox"])(new Uint8Array(payload), ['mdat']);
|
||
|
||
if (results.length === 0) {
|
||
errorCallBack(new Error('Could not parse IMSC1 mdat'));
|
||
return;
|
||
}
|
||
|
||
var ttmlList = results.map(function (mdat) {
|
||
return Object(_demux_id3__WEBPACK_IMPORTED_MODULE_3__["utf8ArrayToStr"])(mdat);
|
||
});
|
||
var syncTime = Object(_timescale_conversion__WEBPACK_IMPORTED_MODULE_4__["toTimescaleFromScale"])(initPTS, 1, timescale);
|
||
|
||
try {
|
||
ttmlList.forEach(function (ttml) {
|
||
return callBack(parseTTML(ttml, syncTime));
|
||
});
|
||
} catch (error) {
|
||
errorCallBack(error);
|
||
}
|
||
}
|
||
|
||
function parseTTML(ttml, syncTime) {
|
||
var parser = new DOMParser();
|
||
var xmlDoc = parser.parseFromString(ttml, 'text/xml');
|
||
var tt = xmlDoc.getElementsByTagName('tt')[0];
|
||
|
||
if (!tt) {
|
||
throw new Error('Invalid ttml');
|
||
}
|
||
|
||
var defaultRateInfo = {
|
||
frameRate: 30,
|
||
subFrameRate: 1,
|
||
frameRateMultiplier: 0,
|
||
tickRate: 0
|
||
};
|
||
var rateInfo = Object.keys(defaultRateInfo).reduce(function (result, key) {
|
||
result[key] = tt.getAttribute("ttp:" + key) || defaultRateInfo[key];
|
||
return result;
|
||
}, {});
|
||
var trim = tt.getAttribute('xml:space') !== 'preserve';
|
||
var styleElements = collectionToDictionary(getElementCollection(tt, 'styling', 'style'));
|
||
var regionElements = collectionToDictionary(getElementCollection(tt, 'layout', 'region'));
|
||
var cueElements = getElementCollection(tt, 'body', '[begin]');
|
||
return [].map.call(cueElements, function (cueElement) {
|
||
var cueText = getTextContent(cueElement, trim);
|
||
|
||
if (!cueText || !cueElement.hasAttribute('begin')) {
|
||
return null;
|
||
}
|
||
|
||
var startTime = parseTtmlTime(cueElement.getAttribute('begin'), rateInfo);
|
||
var duration = parseTtmlTime(cueElement.getAttribute('dur'), rateInfo);
|
||
var endTime = parseTtmlTime(cueElement.getAttribute('end'), rateInfo);
|
||
|
||
if (startTime === null) {
|
||
throw timestampParsingError(cueElement);
|
||
}
|
||
|
||
if (endTime === null) {
|
||
if (duration === null) {
|
||
throw timestampParsingError(cueElement);
|
||
}
|
||
|
||
endTime = startTime + duration;
|
||
}
|
||
|
||
var cue = new _vttcue__WEBPACK_IMPORTED_MODULE_2__["default"](startTime - syncTime, endTime - syncTime, cueText);
|
||
cue.id = Object(_webvtt_parser__WEBPACK_IMPORTED_MODULE_5__["generateCueId"])(cue.startTime, cue.endTime, cue.text);
|
||
var region = regionElements[cueElement.getAttribute('region')];
|
||
var style = styleElements[cueElement.getAttribute('style')]; // TODO: Add regions to track and cue (origin and extend)
|
||
// These values are hard-coded (for now) to simulate region settings in the demo
|
||
|
||
cue.position = 10;
|
||
cue.size = 80; // Apply styles to cue
|
||
|
||
var styles = getTtmlStyles(region, style, styleElements);
|
||
var textAlign = styles.textAlign;
|
||
|
||
if (textAlign) {
|
||
// cue.positionAlign not settable in FF~2016
|
||
var lineAlign = textAlignToLineAlign[textAlign];
|
||
|
||
if (lineAlign) {
|
||
cue.lineAlign = lineAlign;
|
||
}
|
||
|
||
cue.align = textAlign;
|
||
}
|
||
|
||
_extends(cue, styles);
|
||
|
||
return cue;
|
||
}).filter(function (cue) {
|
||
return cue !== null;
|
||
});
|
||
}
|
||
|
||
function getElementCollection(fromElement, parentName, childName) {
|
||
var parent = fromElement.getElementsByTagName(parentName)[0];
|
||
|
||
if (parent) {
|
||
return [].slice.call(parent.querySelectorAll(childName));
|
||
}
|
||
|
||
return [];
|
||
}
|
||
|
||
function collectionToDictionary(elementsWithId) {
|
||
return elementsWithId.reduce(function (dict, element) {
|
||
var id = element.getAttribute('xml:id');
|
||
|
||
if (id) {
|
||
dict[id] = element;
|
||
}
|
||
|
||
return dict;
|
||
}, {});
|
||
}
|
||
|
||
function getTextContent(element, trim) {
|
||
return [].slice.call(element.childNodes).reduce(function (str, node, i) {
|
||
var _node$childNodes;
|
||
|
||
if (node.nodeName === 'br' && i) {
|
||
return str + '\n';
|
||
}
|
||
|
||
if ((_node$childNodes = node.childNodes) !== null && _node$childNodes !== void 0 && _node$childNodes.length) {
|
||
return getTextContent(node, trim);
|
||
} else if (trim) {
|
||
return str + node.textContent.trim().replace(/\s+/g, ' ');
|
||
}
|
||
|
||
return str + node.textContent;
|
||
}, '');
|
||
}
|
||
|
||
function getTtmlStyles(region, style, styleElements) {
|
||
var ttsNs = 'http://www.w3.org/ns/ttml#styling';
|
||
var regionStyle = null;
|
||
var styleAttributes = ['displayAlign', 'textAlign', 'color', 'backgroundColor', 'fontSize', 'fontFamily' // 'fontWeight',
|
||
// 'lineHeight',
|
||
// 'wrapOption',
|
||
// 'fontStyle',
|
||
// 'direction',
|
||
// 'writingMode'
|
||
];
|
||
var regionStyleName = region !== null && region !== void 0 && region.hasAttribute('style') ? region.getAttribute('style') : null;
|
||
|
||
if (regionStyleName && styleElements.hasOwnProperty(regionStyleName)) {
|
||
regionStyle = styleElements[regionStyleName];
|
||
}
|
||
|
||
return styleAttributes.reduce(function (styles, name) {
|
||
var value = getAttributeNS(style, ttsNs, name) || getAttributeNS(region, ttsNs, name) || getAttributeNS(regionStyle, ttsNs, name);
|
||
|
||
if (value) {
|
||
styles[name] = value;
|
||
}
|
||
|
||
return styles;
|
||
}, {});
|
||
}
|
||
|
||
function getAttributeNS(element, ns, name) {
|
||
if (!element) {
|
||
return null;
|
||
}
|
||
|
||
return element.hasAttributeNS(ns, name) ? element.getAttributeNS(ns, name) : null;
|
||
}
|
||
|
||
function timestampParsingError(node) {
|
||
return new Error("Could not parse ttml timestamp " + node);
|
||
}
|
||
|
||
function parseTtmlTime(timeAttributeValue, rateInfo) {
|
||
if (!timeAttributeValue) {
|
||
return null;
|
||
}
|
||
|
||
var seconds = Object(_vttparser__WEBPACK_IMPORTED_MODULE_1__["parseTimeStamp"])(timeAttributeValue);
|
||
|
||
if (seconds === null) {
|
||
if (HMSF_REGEX.test(timeAttributeValue)) {
|
||
seconds = parseHoursMinutesSecondsFrames(timeAttributeValue, rateInfo);
|
||
} else if (TIME_UNIT_REGEX.test(timeAttributeValue)) {
|
||
seconds = parseTimeUnits(timeAttributeValue, rateInfo);
|
||
}
|
||
}
|
||
|
||
return seconds;
|
||
}
|
||
|
||
function parseHoursMinutesSecondsFrames(timeAttributeValue, rateInfo) {
|
||
var m = HMSF_REGEX.exec(timeAttributeValue);
|
||
var frames = (m[4] | 0) + (m[5] | 0) / rateInfo.subFrameRate;
|
||
return (m[1] | 0) * 3600 + (m[2] | 0) * 60 + (m[3] | 0) + frames / rateInfo.frameRate;
|
||
}
|
||
|
||
function parseTimeUnits(timeAttributeValue, rateInfo) {
|
||
var m = TIME_UNIT_REGEX.exec(timeAttributeValue);
|
||
var value = Number(m[1]);
|
||
var unit = m[2];
|
||
|
||
switch (unit) {
|
||
case 'h':
|
||
return value * 3600;
|
||
|
||
case 'm':
|
||
return value * 60;
|
||
|
||
case 'ms':
|
||
return value * 1000;
|
||
|
||
case 'f':
|
||
return value / rateInfo.frameRate;
|
||
|
||
case 't':
|
||
return value / rateInfo.tickRate;
|
||
}
|
||
|
||
return value;
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/utils/logger.ts":
|
||
/*!*****************************!*\
|
||
!*** ./src/utils/logger.ts ***!
|
||
\*****************************/
|
||
/*! exports provided: enableLogs, logger */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "enableLogs", function() { return enableLogs; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "logger", function() { return logger; });
|
||
var noop = function noop() {};
|
||
|
||
var fakeLogger = {
|
||
trace: noop,
|
||
debug: noop,
|
||
log: noop,
|
||
warn: noop,
|
||
info: noop,
|
||
error: noop
|
||
};
|
||
var exportedLogger = fakeLogger; // let lastCallTime;
|
||
// function formatMsgWithTimeInfo(type, msg) {
|
||
// const now = Date.now();
|
||
// const diff = lastCallTime ? '+' + (now - lastCallTime) : '0';
|
||
// lastCallTime = now;
|
||
// msg = (new Date(now)).toISOString() + ' | [' + type + '] > ' + msg + ' ( ' + diff + ' ms )';
|
||
// return msg;
|
||
// }
|
||
|
||
function consolePrintFn(type) {
|
||
var func = self.console[type];
|
||
|
||
if (func) {
|
||
return func.bind(self.console, "[" + type + "] >");
|
||
}
|
||
|
||
return noop;
|
||
}
|
||
|
||
function exportLoggerFunctions(debugConfig) {
|
||
for (var _len = arguments.length, functions = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
|
||
functions[_key - 1] = arguments[_key];
|
||
}
|
||
|
||
functions.forEach(function (type) {
|
||
exportedLogger[type] = debugConfig[type] ? debugConfig[type].bind(debugConfig) : consolePrintFn(type);
|
||
});
|
||
}
|
||
|
||
function enableLogs(debugConfig) {
|
||
// check that console is available
|
||
if (self.console && debugConfig === true || typeof debugConfig === 'object') {
|
||
exportLoggerFunctions(debugConfig, // Remove out from list here to hard-disable a log-level
|
||
// 'trace',
|
||
'debug', 'log', 'info', 'warn', 'error'); // Some browsers don't allow to use bind on console object anyway
|
||
// fallback to default if needed
|
||
|
||
try {
|
||
exportedLogger.log();
|
||
} catch (e) {
|
||
exportedLogger = fakeLogger;
|
||
}
|
||
} else {
|
||
exportedLogger = fakeLogger;
|
||
}
|
||
}
|
||
var logger = exportedLogger;
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/utils/mediakeys-helper.ts":
|
||
/*!***************************************!*\
|
||
!*** ./src/utils/mediakeys-helper.ts ***!
|
||
\***************************************/
|
||
/*! exports provided: KeySystems, requestMediaKeySystemAccess */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "KeySystems", function() { return KeySystems; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "requestMediaKeySystemAccess", function() { return requestMediaKeySystemAccess; });
|
||
/**
|
||
* @see https://developer.mozilla.org/en-US/docs/Web/API/Navigator/requestMediaKeySystemAccess
|
||
*/
|
||
var KeySystems;
|
||
|
||
(function (KeySystems) {
|
||
KeySystems["WIDEVINE"] = "com.widevine.alpha";
|
||
KeySystems["PLAYREADY"] = "com.microsoft.playready";
|
||
})(KeySystems || (KeySystems = {}));
|
||
|
||
var requestMediaKeySystemAccess = function () {
|
||
if (typeof self !== 'undefined' && self.navigator && self.navigator.requestMediaKeySystemAccess) {
|
||
return self.navigator.requestMediaKeySystemAccess.bind(self.navigator);
|
||
} else {
|
||
return null;
|
||
}
|
||
}();
|
||
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/utils/mediasource-helper.ts":
|
||
/*!*****************************************!*\
|
||
!*** ./src/utils/mediasource-helper.ts ***!
|
||
\*****************************************/
|
||
/*! exports provided: getMediaSource */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getMediaSource", function() { return getMediaSource; });
|
||
/**
|
||
* MediaSource helper
|
||
*/
|
||
function getMediaSource() {
|
||
return self.MediaSource || self.WebKitMediaSource;
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/utils/mp4-tools.ts":
|
||
/*!********************************!*\
|
||
!*** ./src/utils/mp4-tools.ts ***!
|
||
\********************************/
|
||
/*! exports provided: RemuxerTrackIdConfig, bin2str, readUint16, readUint32, readSint32, writeUint32, findBox, parseSegmentIndex, parseInitSegment, getStartDTS, getDuration, computeRawDurationFromSamples, offsetStartDTS, segmentValidRange, appendUint8Array, parseSamples, parseSEIMessageFromNALu, parseEmsg */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "RemuxerTrackIdConfig", function() { return RemuxerTrackIdConfig; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "bin2str", function() { return bin2str; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "readUint16", function() { return readUint16; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "readUint32", function() { return readUint32; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "readSint32", function() { return readSint32; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "writeUint32", function() { return writeUint32; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "findBox", function() { return findBox; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "parseSegmentIndex", function() { return parseSegmentIndex; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "parseInitSegment", function() { return parseInitSegment; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getStartDTS", function() { return getStartDTS; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getDuration", function() { return getDuration; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "computeRawDurationFromSamples", function() { return computeRawDurationFromSamples; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "offsetStartDTS", function() { return offsetStartDTS; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "segmentValidRange", function() { return segmentValidRange; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "appendUint8Array", function() { return appendUint8Array; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "parseSamples", function() { return parseSamples; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "parseSEIMessageFromNALu", function() { return parseSEIMessageFromNALu; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "parseEmsg", function() { return parseEmsg; });
|
||
/* harmony import */ var _typed_array__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./typed-array */ "./src/utils/typed-array.ts");
|
||
/* harmony import */ var _loader_fragment__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../loader/fragment */ "./src/loader/fragment.ts");
|
||
/* harmony import */ var _demux_id3__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../demux/id3 */ "./src/demux/id3.ts");
|
||
|
||
|
||
|
||
var UINT32_MAX = Math.pow(2, 32) - 1;
|
||
var push = [].push; // We are using fixed track IDs for driving the MP4 remuxer
|
||
// instead of following the TS PIDs.
|
||
// There is no reason not to do this and some browsers/SourceBuffer-demuxers
|
||
// may not like if there are TrackID "switches"
|
||
// See https://github.com/video-dev/hls.js/issues/1331
|
||
// Here we are mapping our internal track types to constant MP4 track IDs
|
||
// With MSE currently one can only have one track of each, and we are muxing
|
||
// whatever video/audio rendition in them.
|
||
|
||
var RemuxerTrackIdConfig = {
|
||
video: 1,
|
||
audio: 2,
|
||
id3: 3,
|
||
text: 4
|
||
};
|
||
function bin2str(data) {
|
||
return String.fromCharCode.apply(null, data);
|
||
}
|
||
function readUint16(buffer, offset) {
|
||
var val = buffer[offset] << 8 | buffer[offset + 1];
|
||
return val < 0 ? 65536 + val : val;
|
||
}
|
||
function readUint32(buffer, offset) {
|
||
var val = readSint32(buffer, offset);
|
||
return val < 0 ? 4294967296 + val : val;
|
||
}
|
||
function readSint32(buffer, offset) {
|
||
return buffer[offset] << 24 | buffer[offset + 1] << 16 | buffer[offset + 2] << 8 | buffer[offset + 3];
|
||
}
|
||
function writeUint32(buffer, offset, value) {
|
||
buffer[offset] = value >> 24;
|
||
buffer[offset + 1] = value >> 16 & 0xff;
|
||
buffer[offset + 2] = value >> 8 & 0xff;
|
||
buffer[offset + 3] = value & 0xff;
|
||
} // Find the data for a box specified by its path
|
||
|
||
function findBox(data, path) {
|
||
var results = [];
|
||
|
||
if (!path.length) {
|
||
// short-circuit the search for empty paths
|
||
return results;
|
||
}
|
||
|
||
var end = data.byteLength;
|
||
|
||
for (var i = 0; i < end;) {
|
||
var size = readUint32(data, i);
|
||
var type = bin2str(data.subarray(i + 4, i + 8));
|
||
var endbox = size > 1 ? i + size : end;
|
||
|
||
if (type === path[0]) {
|
||
if (path.length === 1) {
|
||
// this is the end of the path and we've found the box we were
|
||
// looking for
|
||
results.push(data.subarray(i + 8, endbox));
|
||
} else {
|
||
// recursively search for the next box along the path
|
||
var subresults = findBox(data.subarray(i + 8, endbox), path.slice(1));
|
||
|
||
if (subresults.length) {
|
||
push.apply(results, subresults);
|
||
}
|
||
}
|
||
}
|
||
|
||
i = endbox;
|
||
} // we've finished searching all of data
|
||
|
||
|
||
return results;
|
||
}
|
||
function parseSegmentIndex(initSegment) {
|
||
var moovBox = findBox(initSegment, ['moov']);
|
||
var moov = moovBox[0];
|
||
var moovEndOffset = moov ? moov.length : null; // we need this in case we need to chop of garbage of the end of current data
|
||
|
||
var sidxBox = findBox(initSegment, ['sidx']);
|
||
|
||
if (!sidxBox || !sidxBox[0]) {
|
||
return null;
|
||
}
|
||
|
||
var references = [];
|
||
var sidx = sidxBox[0];
|
||
var version = sidx[0]; // set initial offset, we skip the reference ID (not needed)
|
||
|
||
var index = version === 0 ? 8 : 16;
|
||
var timescale = readUint32(sidx, index);
|
||
index += 4; // TODO: parse earliestPresentationTime and firstOffset
|
||
// usually zero in our case
|
||
|
||
var earliestPresentationTime = 0;
|
||
var firstOffset = 0;
|
||
|
||
if (version === 0) {
|
||
index += 8;
|
||
} else {
|
||
index += 16;
|
||
} // skip reserved
|
||
|
||
|
||
index += 2;
|
||
var startByte = sidx.length + firstOffset;
|
||
var referencesCount = readUint16(sidx, index);
|
||
index += 2;
|
||
|
||
for (var i = 0; i < referencesCount; i++) {
|
||
var referenceIndex = index;
|
||
var referenceInfo = readUint32(sidx, referenceIndex);
|
||
referenceIndex += 4;
|
||
var referenceSize = referenceInfo & 0x7fffffff;
|
||
var referenceType = (referenceInfo & 0x80000000) >>> 31;
|
||
|
||
if (referenceType === 1) {
|
||
// eslint-disable-next-line no-console
|
||
console.warn('SIDX has hierarchical references (not supported)');
|
||
return null;
|
||
}
|
||
|
||
var subsegmentDuration = readUint32(sidx, referenceIndex);
|
||
referenceIndex += 4;
|
||
references.push({
|
||
referenceSize: referenceSize,
|
||
subsegmentDuration: subsegmentDuration,
|
||
// unscaled
|
||
info: {
|
||
duration: subsegmentDuration / timescale,
|
||
start: startByte,
|
||
end: startByte + referenceSize - 1
|
||
}
|
||
});
|
||
startByte += referenceSize; // Skipping 1 bit for |startsWithSap|, 3 bits for |sapType|, and 28 bits
|
||
// for |sapDelta|.
|
||
|
||
referenceIndex += 4; // skip to next ref
|
||
|
||
index = referenceIndex;
|
||
}
|
||
|
||
return {
|
||
earliestPresentationTime: earliestPresentationTime,
|
||
timescale: timescale,
|
||
version: version,
|
||
referencesCount: referencesCount,
|
||
references: references,
|
||
moovEndOffset: moovEndOffset
|
||
};
|
||
}
|
||
/**
|
||
* Parses an MP4 initialization segment and extracts stream type and
|
||
* timescale values for any declared tracks. Timescale values indicate the
|
||
* number of clock ticks per second to assume for time-based values
|
||
* elsewhere in the MP4.
|
||
*
|
||
* To determine the start time of an MP4, you need two pieces of
|
||
* information: the timescale unit and the earliest base media decode
|
||
* time. Multiple timescales can be specified within an MP4 but the
|
||
* base media decode time is always expressed in the timescale from
|
||
* the media header box for the track:
|
||
* ```
|
||
* moov > trak > mdia > mdhd.timescale
|
||
* moov > trak > mdia > hdlr
|
||
* ```
|
||
* @param initSegment {Uint8Array} the bytes of the init segment
|
||
* @return {InitData} a hash of track type to timescale values or null if
|
||
* the init segment is malformed.
|
||
*/
|
||
|
||
function parseInitSegment(initSegment) {
|
||
var result = [];
|
||
var traks = findBox(initSegment, ['moov', 'trak']);
|
||
|
||
for (var i = 0; i < traks.length; i++) {
|
||
var trak = traks[i];
|
||
var tkhd = findBox(trak, ['tkhd'])[0];
|
||
|
||
if (tkhd) {
|
||
var version = tkhd[0];
|
||
|
||
var _index = version === 0 ? 12 : 20;
|
||
|
||
var trackId = readUint32(tkhd, _index);
|
||
var mdhd = findBox(trak, ['mdia', 'mdhd'])[0];
|
||
|
||
if (mdhd) {
|
||
version = mdhd[0];
|
||
_index = version === 0 ? 12 : 20;
|
||
var timescale = readUint32(mdhd, _index);
|
||
var hdlr = findBox(trak, ['mdia', 'hdlr'])[0];
|
||
|
||
if (hdlr) {
|
||
var hdlrType = bin2str(hdlr.subarray(8, 12));
|
||
var type = {
|
||
soun: _loader_fragment__WEBPACK_IMPORTED_MODULE_1__["ElementaryStreamTypes"].AUDIO,
|
||
vide: _loader_fragment__WEBPACK_IMPORTED_MODULE_1__["ElementaryStreamTypes"].VIDEO
|
||
}[hdlrType];
|
||
|
||
if (type) {
|
||
// Parse codec details
|
||
var stsd = findBox(trak, ['mdia', 'minf', 'stbl', 'stsd'])[0];
|
||
var codec = void 0;
|
||
|
||
if (stsd) {
|
||
codec = bin2str(stsd.subarray(12, 16)); // TODO: Parse codec details to be able to build MIME type.
|
||
// stsd.start += 8;
|
||
// const codecBox = findBox(stsd, [codec])[0];
|
||
// if (codecBox) {
|
||
// TODO: Codec parsing support for avc1, mp4a, hevc, av01...
|
||
// }
|
||
}
|
||
|
||
result[trackId] = {
|
||
timescale: timescale,
|
||
type: type
|
||
};
|
||
result[type] = {
|
||
timescale: timescale,
|
||
id: trackId,
|
||
codec: codec
|
||
};
|
||
}
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
var trex = findBox(initSegment, ['moov', 'mvex', 'trex']);
|
||
trex.forEach(function (trex) {
|
||
var trackId = readUint32(trex, 4);
|
||
var track = result[trackId];
|
||
|
||
if (track) {
|
||
track.default = {
|
||
duration: readUint32(trex, 12),
|
||
flags: readUint32(trex, 20)
|
||
};
|
||
}
|
||
});
|
||
return result;
|
||
}
|
||
/**
|
||
* Determine the base media decode start time, in seconds, for an MP4
|
||
* fragment. If multiple fragments are specified, the earliest time is
|
||
* returned.
|
||
*
|
||
* The base media decode time can be parsed from track fragment
|
||
* metadata:
|
||
* ```
|
||
* moof > traf > tfdt.baseMediaDecodeTime
|
||
* ```
|
||
* It requires the timescale value from the mdhd to interpret.
|
||
*
|
||
* @param initData {InitData} a hash of track type to timescale values
|
||
* @param fmp4 {Uint8Array} the bytes of the mp4 fragment
|
||
* @return {number} the earliest base media decode start time for the
|
||
* fragment, in seconds
|
||
*/
|
||
|
||
function getStartDTS(initData, fmp4) {
|
||
// we need info from two children of each track fragment box
|
||
return findBox(fmp4, ['moof', 'traf']).reduce(function (result, traf) {
|
||
var tfdt = findBox(traf, ['tfdt'])[0];
|
||
var version = tfdt[0];
|
||
var start = findBox(traf, ['tfhd']).reduce(function (result, tfhd) {
|
||
// get the track id from the tfhd
|
||
var id = readUint32(tfhd, 4);
|
||
var track = initData[id];
|
||
|
||
if (track) {
|
||
var baseTime = readUint32(tfdt, 4);
|
||
|
||
if (version === 1) {
|
||
baseTime *= Math.pow(2, 32);
|
||
baseTime += readUint32(tfdt, 8);
|
||
} // assume a 90kHz clock if no timescale was specified
|
||
|
||
|
||
var scale = track.timescale || 90e3; // convert base time to seconds
|
||
|
||
var startTime = baseTime / scale;
|
||
|
||
if (isFinite(startTime) && (result === null || startTime < result)) {
|
||
return startTime;
|
||
}
|
||
}
|
||
|
||
return result;
|
||
}, null);
|
||
|
||
if (start !== null && isFinite(start) && (result === null || start < result)) {
|
||
return start;
|
||
}
|
||
|
||
return result;
|
||
}, null) || 0;
|
||
}
|
||
/*
|
||
For Reference:
|
||
aligned(8) class TrackFragmentHeaderBox
|
||
extends FullBox(‘tfhd’, 0, tf_flags){
|
||
unsigned int(32) track_ID;
|
||
// all the following are optional fields
|
||
unsigned int(64) base_data_offset;
|
||
unsigned int(32) sample_description_index;
|
||
unsigned int(32) default_sample_duration;
|
||
unsigned int(32) default_sample_size;
|
||
unsigned int(32) default_sample_flags
|
||
}
|
||
*/
|
||
|
||
function getDuration(data, initData) {
|
||
var rawDuration = 0;
|
||
var videoDuration = 0;
|
||
var audioDuration = 0;
|
||
var trafs = findBox(data, ['moof', 'traf']);
|
||
|
||
for (var i = 0; i < trafs.length; i++) {
|
||
var traf = trafs[i]; // There is only one tfhd & trun per traf
|
||
// This is true for CMAF style content, and we should perhaps check the ftyp
|
||
// and only look for a single trun then, but for ISOBMFF we should check
|
||
// for multiple track runs.
|
||
|
||
var tfhd = findBox(traf, ['tfhd'])[0]; // get the track id from the tfhd
|
||
|
||
var id = readUint32(tfhd, 4);
|
||
var track = initData[id];
|
||
|
||
if (!track) {
|
||
continue;
|
||
}
|
||
|
||
var trackDefault = track.default;
|
||
var tfhdFlags = readUint32(tfhd, 0) | (trackDefault === null || trackDefault === void 0 ? void 0 : trackDefault.flags);
|
||
var sampleDuration = trackDefault === null || trackDefault === void 0 ? void 0 : trackDefault.duration;
|
||
|
||
if (tfhdFlags & 0x000008) {
|
||
// 0x000008 indicates the presence of the default_sample_duration field
|
||
if (tfhdFlags & 0x000002) {
|
||
// 0x000002 indicates the presence of the sample_description_index field, which precedes default_sample_duration
|
||
// If present, the default_sample_duration exists at byte offset 12
|
||
sampleDuration = readUint32(tfhd, 12);
|
||
} else {
|
||
// Otherwise, the duration is at byte offset 8
|
||
sampleDuration = readUint32(tfhd, 8);
|
||
}
|
||
} // assume a 90kHz clock if no timescale was specified
|
||
|
||
|
||
var timescale = track.timescale || 90e3;
|
||
var truns = findBox(traf, ['trun']);
|
||
|
||
for (var j = 0; j < truns.length; j++) {
|
||
rawDuration = computeRawDurationFromSamples(truns[j]);
|
||
|
||
if (!rawDuration && sampleDuration) {
|
||
var sampleCount = readUint32(truns[j], 4);
|
||
rawDuration = sampleDuration * sampleCount;
|
||
}
|
||
|
||
if (track.type === _loader_fragment__WEBPACK_IMPORTED_MODULE_1__["ElementaryStreamTypes"].VIDEO) {
|
||
videoDuration += rawDuration / timescale;
|
||
} else if (track.type === _loader_fragment__WEBPACK_IMPORTED_MODULE_1__["ElementaryStreamTypes"].AUDIO) {
|
||
audioDuration += rawDuration / timescale;
|
||
}
|
||
}
|
||
}
|
||
|
||
if (videoDuration === 0 && audioDuration === 0) {
|
||
// If duration samples are not available in the traf use sidx subsegment_duration
|
||
var sidx = parseSegmentIndex(data);
|
||
|
||
if (sidx !== null && sidx !== void 0 && sidx.references) {
|
||
return sidx.references.reduce(function (dur, ref) {
|
||
return dur + ref.info.duration || 0;
|
||
}, 0);
|
||
}
|
||
}
|
||
|
||
if (videoDuration) {
|
||
return videoDuration;
|
||
}
|
||
|
||
return audioDuration;
|
||
}
|
||
/*
|
||
For Reference:
|
||
aligned(8) class TrackRunBox
|
||
extends FullBox(‘trun’, version, tr_flags) {
|
||
unsigned int(32) sample_count;
|
||
// the following are optional fields
|
||
signed int(32) data_offset;
|
||
unsigned int(32) first_sample_flags;
|
||
// all fields in the following array are optional
|
||
{
|
||
unsigned int(32) sample_duration;
|
||
unsigned int(32) sample_size;
|
||
unsigned int(32) sample_flags
|
||
if (version == 0)
|
||
{ unsigned int(32)
|
||
else
|
||
{ signed int(32)
|
||
}[ sample_count ]
|
||
}
|
||
*/
|
||
|
||
function computeRawDurationFromSamples(trun) {
|
||
var flags = readUint32(trun, 0); // Flags are at offset 0, non-optional sample_count is at offset 4. Therefore we start 8 bytes in.
|
||
// Each field is an int32, which is 4 bytes
|
||
|
||
var offset = 8; // data-offset-present flag
|
||
|
||
if (flags & 0x000001) {
|
||
offset += 4;
|
||
} // first-sample-flags-present flag
|
||
|
||
|
||
if (flags & 0x000004) {
|
||
offset += 4;
|
||
}
|
||
|
||
var duration = 0;
|
||
var sampleCount = readUint32(trun, 4);
|
||
|
||
for (var i = 0; i < sampleCount; i++) {
|
||
// sample-duration-present flag
|
||
if (flags & 0x000100) {
|
||
var sampleDuration = readUint32(trun, offset);
|
||
duration += sampleDuration;
|
||
offset += 4;
|
||
} // sample-size-present flag
|
||
|
||
|
||
if (flags & 0x000200) {
|
||
offset += 4;
|
||
} // sample-flags-present flag
|
||
|
||
|
||
if (flags & 0x000400) {
|
||
offset += 4;
|
||
} // sample-composition-time-offsets-present flag
|
||
|
||
|
||
if (flags & 0x000800) {
|
||
offset += 4;
|
||
}
|
||
}
|
||
|
||
return duration;
|
||
}
|
||
function offsetStartDTS(initData, fmp4, timeOffset) {
|
||
findBox(fmp4, ['moof', 'traf']).forEach(function (traf) {
|
||
findBox(traf, ['tfhd']).forEach(function (tfhd) {
|
||
// get the track id from the tfhd
|
||
var id = readUint32(tfhd, 4);
|
||
var track = initData[id];
|
||
|
||
if (!track) {
|
||
return;
|
||
} // assume a 90kHz clock if no timescale was specified
|
||
|
||
|
||
var timescale = track.timescale || 90e3; // get the base media decode time from the tfdt
|
||
|
||
findBox(traf, ['tfdt']).forEach(function (tfdt) {
|
||
var version = tfdt[0];
|
||
var baseMediaDecodeTime = readUint32(tfdt, 4);
|
||
|
||
if (version === 0) {
|
||
writeUint32(tfdt, 4, baseMediaDecodeTime - timeOffset * timescale);
|
||
} else {
|
||
baseMediaDecodeTime *= Math.pow(2, 32);
|
||
baseMediaDecodeTime += readUint32(tfdt, 8);
|
||
baseMediaDecodeTime -= timeOffset * timescale;
|
||
baseMediaDecodeTime = Math.max(baseMediaDecodeTime, 0);
|
||
var upper = Math.floor(baseMediaDecodeTime / (UINT32_MAX + 1));
|
||
var lower = Math.floor(baseMediaDecodeTime % (UINT32_MAX + 1));
|
||
writeUint32(tfdt, 4, upper);
|
||
writeUint32(tfdt, 8, lower);
|
||
}
|
||
});
|
||
});
|
||
});
|
||
} // TODO: Check if the last moof+mdat pair is part of the valid range
|
||
|
||
function segmentValidRange(data) {
|
||
var segmentedRange = {
|
||
valid: null,
|
||
remainder: null
|
||
};
|
||
var moofs = findBox(data, ['moof']);
|
||
|
||
if (!moofs) {
|
||
return segmentedRange;
|
||
} else if (moofs.length < 2) {
|
||
segmentedRange.remainder = data;
|
||
return segmentedRange;
|
||
}
|
||
|
||
var last = moofs[moofs.length - 1]; // Offset by 8 bytes; findBox offsets the start by as much
|
||
|
||
segmentedRange.valid = Object(_typed_array__WEBPACK_IMPORTED_MODULE_0__["sliceUint8"])(data, 0, last.byteOffset - 8);
|
||
segmentedRange.remainder = Object(_typed_array__WEBPACK_IMPORTED_MODULE_0__["sliceUint8"])(data, last.byteOffset - 8);
|
||
return segmentedRange;
|
||
}
|
||
function appendUint8Array(data1, data2) {
|
||
var temp = new Uint8Array(data1.length + data2.length);
|
||
temp.set(data1);
|
||
temp.set(data2, data1.length);
|
||
return temp;
|
||
}
|
||
function parseSamples(timeOffset, track) {
|
||
var seiSamples = [];
|
||
var videoData = track.samples;
|
||
var timescale = track.timescale;
|
||
var trackId = track.id;
|
||
var isHEVCFlavor = false;
|
||
var moofs = findBox(videoData, ['moof']);
|
||
moofs.map(function (moof) {
|
||
var moofOffset = moof.byteOffset - 8;
|
||
var trafs = findBox(moof, ['traf']);
|
||
trafs.map(function (traf) {
|
||
// get the base media decode time from the tfdt
|
||
var baseTime = findBox(traf, ['tfdt']).map(function (tfdt) {
|
||
var version = tfdt[0];
|
||
var result = readUint32(tfdt, 4);
|
||
|
||
if (version === 1) {
|
||
result *= Math.pow(2, 32);
|
||
result += readUint32(tfdt, 8);
|
||
}
|
||
|
||
return result / timescale;
|
||
})[0];
|
||
|
||
if (baseTime !== undefined) {
|
||
timeOffset = baseTime;
|
||
}
|
||
|
||
return findBox(traf, ['tfhd']).map(function (tfhd) {
|
||
var id = readUint32(tfhd, 4);
|
||
var tfhdFlags = readUint32(tfhd, 0) & 0xffffff;
|
||
var baseDataOffsetPresent = (tfhdFlags & 0x000001) !== 0;
|
||
var sampleDescriptionIndexPresent = (tfhdFlags & 0x000002) !== 0;
|
||
var defaultSampleDurationPresent = (tfhdFlags & 0x000008) !== 0;
|
||
var defaultSampleDuration = 0;
|
||
var defaultSampleSizePresent = (tfhdFlags & 0x000010) !== 0;
|
||
var defaultSampleSize = 0;
|
||
var defaultSampleFlagsPresent = (tfhdFlags & 0x000020) !== 0;
|
||
var tfhdOffset = 8;
|
||
|
||
if (id === trackId) {
|
||
if (baseDataOffsetPresent) {
|
||
tfhdOffset += 8;
|
||
}
|
||
|
||
if (sampleDescriptionIndexPresent) {
|
||
tfhdOffset += 4;
|
||
}
|
||
|
||
if (defaultSampleDurationPresent) {
|
||
defaultSampleDuration = readUint32(tfhd, tfhdOffset);
|
||
tfhdOffset += 4;
|
||
}
|
||
|
||
if (defaultSampleSizePresent) {
|
||
defaultSampleSize = readUint32(tfhd, tfhdOffset);
|
||
tfhdOffset += 4;
|
||
}
|
||
|
||
if (defaultSampleFlagsPresent) {
|
||
tfhdOffset += 4;
|
||
}
|
||
|
||
if (track.type === 'video') {
|
||
isHEVCFlavor = isHEVC(track.codec);
|
||
}
|
||
|
||
findBox(traf, ['trun']).map(function (trun) {
|
||
var version = trun[0];
|
||
var flags = readUint32(trun, 0) & 0xffffff;
|
||
var dataOffsetPresent = (flags & 0x000001) !== 0;
|
||
var dataOffset = 0;
|
||
var firstSampleFlagsPresent = (flags & 0x000004) !== 0;
|
||
var sampleDurationPresent = (flags & 0x000100) !== 0;
|
||
var sampleDuration = 0;
|
||
var sampleSizePresent = (flags & 0x000200) !== 0;
|
||
var sampleSize = 0;
|
||
var sampleFlagsPresent = (flags & 0x000400) !== 0;
|
||
var sampleCompositionOffsetsPresent = (flags & 0x000800) !== 0;
|
||
var compositionOffset = 0;
|
||
var sampleCount = readUint32(trun, 4);
|
||
var trunOffset = 8; // past version, flags, and sample count
|
||
|
||
if (dataOffsetPresent) {
|
||
dataOffset = readUint32(trun, trunOffset);
|
||
trunOffset += 4;
|
||
}
|
||
|
||
if (firstSampleFlagsPresent) {
|
||
trunOffset += 4;
|
||
}
|
||
|
||
var sampleOffset = dataOffset + moofOffset;
|
||
|
||
for (var ix = 0; ix < sampleCount; ix++) {
|
||
if (sampleDurationPresent) {
|
||
sampleDuration = readUint32(trun, trunOffset);
|
||
trunOffset += 4;
|
||
} else {
|
||
sampleDuration = defaultSampleDuration;
|
||
}
|
||
|
||
if (sampleSizePresent) {
|
||
sampleSize = readUint32(trun, trunOffset);
|
||
trunOffset += 4;
|
||
} else {
|
||
sampleSize = defaultSampleSize;
|
||
}
|
||
|
||
if (sampleFlagsPresent) {
|
||
trunOffset += 4;
|
||
}
|
||
|
||
if (sampleCompositionOffsetsPresent) {
|
||
if (version === 0) {
|
||
compositionOffset = readUint32(trun, trunOffset);
|
||
} else {
|
||
compositionOffset = readSint32(trun, trunOffset);
|
||
}
|
||
|
||
trunOffset += 4;
|
||
}
|
||
|
||
if (track.type === _loader_fragment__WEBPACK_IMPORTED_MODULE_1__["ElementaryStreamTypes"].VIDEO) {
|
||
var naluTotalSize = 0;
|
||
|
||
while (naluTotalSize < sampleSize) {
|
||
var naluSize = readUint32(videoData, sampleOffset);
|
||
sampleOffset += 4;
|
||
var naluType = videoData[sampleOffset] & 0x1f;
|
||
|
||
if (isSEIMessage(isHEVCFlavor, naluType)) {
|
||
var data = videoData.subarray(sampleOffset, sampleOffset + naluSize);
|
||
parseSEIMessageFromNALu(data, timeOffset + compositionOffset / timescale, seiSamples);
|
||
}
|
||
|
||
sampleOffset += naluSize;
|
||
naluTotalSize += naluSize + 4;
|
||
}
|
||
}
|
||
|
||
timeOffset += sampleDuration / timescale;
|
||
}
|
||
});
|
||
}
|
||
});
|
||
});
|
||
});
|
||
return seiSamples;
|
||
}
|
||
|
||
function isHEVC(codec) {
|
||
if (!codec) {
|
||
return false;
|
||
}
|
||
|
||
var delimit = codec.indexOf('.');
|
||
var baseCodec = delimit < 0 ? codec : codec.substring(0, delimit);
|
||
return baseCodec === 'hvc1' || baseCodec === 'hev1' || // Dolby Vision
|
||
baseCodec === 'dvh1' || baseCodec === 'dvhe';
|
||
}
|
||
|
||
function isSEIMessage(isHEVCFlavor, naluType) {
|
||
return isHEVCFlavor ? naluType === 39 || naluType === 40 : naluType === 6;
|
||
}
|
||
|
||
function parseSEIMessageFromNALu(unescapedData, pts, samples) {
|
||
var data = discardEPB(unescapedData);
|
||
var seiPtr = 0; // skip frameType
|
||
|
||
seiPtr++;
|
||
var payloadType = 0;
|
||
var payloadSize = 0;
|
||
var endOfCaptions = false;
|
||
var b = 0;
|
||
|
||
while (seiPtr < data.length) {
|
||
payloadType = 0;
|
||
|
||
do {
|
||
if (seiPtr >= data.length) {
|
||
break;
|
||
}
|
||
|
||
b = data[seiPtr++];
|
||
payloadType += b;
|
||
} while (b === 0xff); // Parse payload size.
|
||
|
||
|
||
payloadSize = 0;
|
||
|
||
do {
|
||
if (seiPtr >= data.length) {
|
||
break;
|
||
}
|
||
|
||
b = data[seiPtr++];
|
||
payloadSize += b;
|
||
} while (b === 0xff);
|
||
|
||
var leftOver = data.length - seiPtr;
|
||
|
||
if (!endOfCaptions && payloadType === 4 && seiPtr < data.length) {
|
||
endOfCaptions = true;
|
||
var countryCode = data[seiPtr++];
|
||
|
||
if (countryCode === 181) {
|
||
var providerCode = readUint16(data, seiPtr);
|
||
seiPtr += 2;
|
||
|
||
if (providerCode === 49) {
|
||
var userStructure = readUint32(data, seiPtr);
|
||
seiPtr += 4;
|
||
|
||
if (userStructure === 0x47413934) {
|
||
var userDataType = data[seiPtr++]; // Raw CEA-608 bytes wrapped in CEA-708 packet
|
||
|
||
if (userDataType === 3) {
|
||
var firstByte = data[seiPtr++];
|
||
var totalCCs = 0x1f & firstByte;
|
||
var enabled = 0x40 & firstByte;
|
||
var totalBytes = enabled ? 2 + totalCCs * 3 : 0;
|
||
var byteArray = new Uint8Array(totalBytes);
|
||
|
||
if (enabled) {
|
||
byteArray[0] = firstByte;
|
||
|
||
for (var i = 1; i < totalBytes; i++) {
|
||
byteArray[i] = data[seiPtr++];
|
||
}
|
||
}
|
||
|
||
samples.push({
|
||
type: userDataType,
|
||
payloadType: payloadType,
|
||
pts: pts,
|
||
bytes: byteArray
|
||
});
|
||
}
|
||
}
|
||
}
|
||
}
|
||
} else if (payloadType === 5 && payloadSize < leftOver) {
|
||
endOfCaptions = true;
|
||
|
||
if (payloadSize > 16) {
|
||
var uuidStrArray = [];
|
||
|
||
for (var _i = 0; _i < 16; _i++) {
|
||
var _b = data[seiPtr++].toString(16);
|
||
|
||
uuidStrArray.push(_b.length == 1 ? '0' + _b : _b);
|
||
|
||
if (_i === 3 || _i === 5 || _i === 7 || _i === 9) {
|
||
uuidStrArray.push('-');
|
||
}
|
||
}
|
||
|
||
var length = payloadSize - 16;
|
||
var userDataBytes = new Uint8Array(length);
|
||
|
||
for (var _i2 = 0; _i2 < length; _i2++) {
|
||
userDataBytes[_i2] = data[seiPtr++];
|
||
}
|
||
|
||
samples.push({
|
||
payloadType: payloadType,
|
||
pts: pts,
|
||
uuid: uuidStrArray.join(''),
|
||
userData: Object(_demux_id3__WEBPACK_IMPORTED_MODULE_2__["utf8ArrayToStr"])(userDataBytes),
|
||
userDataBytes: userDataBytes
|
||
});
|
||
}
|
||
} else if (payloadSize < leftOver) {
|
||
seiPtr += payloadSize;
|
||
} else if (payloadSize > leftOver) {
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
/**
|
||
* remove Emulation Prevention bytes from a RBSP
|
||
*/
|
||
|
||
function discardEPB(data) {
|
||
var length = data.byteLength;
|
||
var EPBPositions = [];
|
||
var i = 1; // Find all `Emulation Prevention Bytes`
|
||
|
||
while (i < length - 2) {
|
||
if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
|
||
EPBPositions.push(i + 2);
|
||
i += 2;
|
||
} else {
|
||
i++;
|
||
}
|
||
} // If no Emulation Prevention Bytes were found just return the original
|
||
// array
|
||
|
||
|
||
if (EPBPositions.length === 0) {
|
||
return data;
|
||
} // Create a new array to hold the NAL unit data
|
||
|
||
|
||
var newLength = length - EPBPositions.length;
|
||
var newData = new Uint8Array(newLength);
|
||
var sourceIndex = 0;
|
||
|
||
for (i = 0; i < newLength; sourceIndex++, i++) {
|
||
if (sourceIndex === EPBPositions[0]) {
|
||
// Skip this byte
|
||
sourceIndex++; // Remove this position index
|
||
|
||
EPBPositions.shift();
|
||
}
|
||
|
||
newData[i] = data[sourceIndex];
|
||
}
|
||
|
||
return newData;
|
||
}
|
||
|
||
function parseEmsg(data) {
|
||
var version = data[0];
|
||
var schemeIdUri = '';
|
||
var value = '';
|
||
var timeScale = 0;
|
||
var presentationTimeDelta = 0;
|
||
var presentationTime = 0;
|
||
var eventDuration = 0;
|
||
var id = 0;
|
||
var offset = 0;
|
||
|
||
if (version === 0) {
|
||
while (bin2str(data.subarray(offset, offset + 1)) !== '\0') {
|
||
schemeIdUri += bin2str(data.subarray(offset, offset + 1));
|
||
offset += 1;
|
||
}
|
||
|
||
schemeIdUri += bin2str(data.subarray(offset, offset + 1));
|
||
offset += 1;
|
||
|
||
while (bin2str(data.subarray(offset, offset + 1)) !== '\0') {
|
||
value += bin2str(data.subarray(offset, offset + 1));
|
||
offset += 1;
|
||
}
|
||
|
||
value += bin2str(data.subarray(offset, offset + 1));
|
||
offset += 1;
|
||
timeScale = readUint32(data, 12);
|
||
presentationTimeDelta = readUint32(data, 16);
|
||
eventDuration = readUint32(data, 20);
|
||
id = readUint32(data, 24);
|
||
offset = 28;
|
||
} else if (version === 1) {
|
||
offset += 4;
|
||
timeScale = readUint32(data, offset);
|
||
offset += 4;
|
||
var leftPresentationTime = readUint32(data, offset);
|
||
offset += 4;
|
||
var rightPresentationTime = readUint32(data, offset);
|
||
offset += 4;
|
||
presentationTime = Math.pow(2, 32) * leftPresentationTime + rightPresentationTime;
|
||
|
||
if (!Number.isSafeInteger(presentationTime)) {
|
||
presentationTime = Number.MAX_SAFE_INTEGER; // eslint-disable-next-line no-console
|
||
|
||
console.warn('Presentation time exceeds safe integer limit and wrapped to max safe integer in parsing emsg box');
|
||
}
|
||
|
||
eventDuration = readUint32(data, offset);
|
||
offset += 4;
|
||
id = readUint32(data, offset);
|
||
offset += 4;
|
||
|
||
while (bin2str(data.subarray(offset, offset + 1)) !== '\0') {
|
||
schemeIdUri += bin2str(data.subarray(offset, offset + 1));
|
||
offset += 1;
|
||
}
|
||
|
||
schemeIdUri += bin2str(data.subarray(offset, offset + 1));
|
||
offset += 1;
|
||
|
||
while (bin2str(data.subarray(offset, offset + 1)) !== '\0') {
|
||
value += bin2str(data.subarray(offset, offset + 1));
|
||
offset += 1;
|
||
}
|
||
|
||
value += bin2str(data.subarray(offset, offset + 1));
|
||
offset += 1;
|
||
}
|
||
|
||
var payload = data.subarray(offset, data.byteLength);
|
||
return {
|
||
schemeIdUri: schemeIdUri,
|
||
value: value,
|
||
timeScale: timeScale,
|
||
presentationTime: presentationTime,
|
||
presentationTimeDelta: presentationTimeDelta,
|
||
eventDuration: eventDuration,
|
||
id: id,
|
||
payload: payload
|
||
};
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/utils/output-filter.ts":
|
||
/*!************************************!*\
|
||
!*** ./src/utils/output-filter.ts ***!
|
||
\************************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return OutputFilter; });
|
||
var OutputFilter = /*#__PURE__*/function () {
|
||
function OutputFilter(timelineController, trackName) {
|
||
this.timelineController = void 0;
|
||
this.cueRanges = [];
|
||
this.trackName = void 0;
|
||
this.startTime = null;
|
||
this.endTime = null;
|
||
this.screen = null;
|
||
this.timelineController = timelineController;
|
||
this.trackName = trackName;
|
||
}
|
||
|
||
var _proto = OutputFilter.prototype;
|
||
|
||
_proto.dispatchCue = function dispatchCue() {
|
||
if (this.startTime === null) {
|
||
return;
|
||
}
|
||
|
||
this.timelineController.addCues(this.trackName, this.startTime, this.endTime, this.screen, this.cueRanges);
|
||
this.startTime = null;
|
||
};
|
||
|
||
_proto.newCue = function newCue(startTime, endTime, screen) {
|
||
if (this.startTime === null || this.startTime > startTime) {
|
||
this.startTime = startTime;
|
||
}
|
||
|
||
this.endTime = endTime;
|
||
this.screen = screen;
|
||
this.timelineController.createCaptionsTrack(this.trackName);
|
||
};
|
||
|
||
_proto.reset = function reset() {
|
||
this.cueRanges = [];
|
||
this.startTime = null;
|
||
};
|
||
|
||
return OutputFilter;
|
||
}();
|
||
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/utils/texttrack-utils.ts":
|
||
/*!**************************************!*\
|
||
!*** ./src/utils/texttrack-utils.ts ***!
|
||
\**************************************/
|
||
/*! exports provided: sendAddTrackEvent, addCueToTrack, clearCurrentCues, removeCuesInRange, getCuesInRange */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "sendAddTrackEvent", function() { return sendAddTrackEvent; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "addCueToTrack", function() { return addCueToTrack; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "clearCurrentCues", function() { return clearCurrentCues; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "removeCuesInRange", function() { return removeCuesInRange; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getCuesInRange", function() { return getCuesInRange; });
|
||
/* harmony import */ var _logger__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./logger */ "./src/utils/logger.ts");
|
||
|
||
function sendAddTrackEvent(track, videoEl) {
|
||
var event;
|
||
|
||
try {
|
||
event = new Event('addtrack');
|
||
} catch (err) {
|
||
// for IE11
|
||
event = document.createEvent('Event');
|
||
event.initEvent('addtrack', false, false);
|
||
}
|
||
|
||
event.track = track;
|
||
videoEl.dispatchEvent(event);
|
||
}
|
||
function addCueToTrack(track, cue) {
|
||
// Sometimes there are cue overlaps on segmented vtts so the same
|
||
// cue can appear more than once in different vtt files.
|
||
// This avoid showing duplicated cues with same timecode and text.
|
||
var mode = track.mode;
|
||
|
||
if (mode === 'disabled') {
|
||
track.mode = 'hidden';
|
||
}
|
||
|
||
if (track.cues && !track.cues.getCueById(cue.id)) {
|
||
try {
|
||
track.addCue(cue);
|
||
|
||
if (!track.cues.getCueById(cue.id)) {
|
||
throw new Error("addCue is failed for: " + cue);
|
||
}
|
||
} catch (err) {
|
||
_logger__WEBPACK_IMPORTED_MODULE_0__["logger"].debug("[texttrack-utils]: " + err);
|
||
var textTrackCue = new self.TextTrackCue(cue.startTime, cue.endTime, cue.text);
|
||
textTrackCue.id = cue.id;
|
||
track.addCue(textTrackCue);
|
||
}
|
||
}
|
||
|
||
if (mode === 'disabled') {
|
||
track.mode = mode;
|
||
}
|
||
}
|
||
function clearCurrentCues(track) {
|
||
// When track.mode is disabled, track.cues will be null.
|
||
// To guarantee the removal of cues, we need to temporarily
|
||
// change the mode to hidden
|
||
var mode = track.mode;
|
||
|
||
if (mode === 'disabled') {
|
||
track.mode = 'hidden';
|
||
}
|
||
|
||
if (track.cues) {
|
||
for (var i = track.cues.length; i--;) {
|
||
track.removeCue(track.cues[i]);
|
||
}
|
||
}
|
||
|
||
if (mode === 'disabled') {
|
||
track.mode = mode;
|
||
}
|
||
}
|
||
function removeCuesInRange(track, start, end) {
|
||
var mode = track.mode;
|
||
|
||
if (mode === 'disabled') {
|
||
track.mode = 'hidden';
|
||
}
|
||
|
||
if (track.cues && track.cues.length > 0) {
|
||
var cues = getCuesInRange(track.cues, start, end);
|
||
|
||
for (var i = 0; i < cues.length; i++) {
|
||
track.removeCue(cues[i]);
|
||
}
|
||
}
|
||
|
||
if (mode === 'disabled') {
|
||
track.mode = mode;
|
||
}
|
||
} // Find first cue starting after given time.
|
||
// Modified version of binary search O(log(n)).
|
||
|
||
function getFirstCueIndexAfterTime(cues, time) {
|
||
// If first cue starts after time, start there
|
||
if (time < cues[0].startTime) {
|
||
return 0;
|
||
} // If the last cue ends before time there is no overlap
|
||
|
||
|
||
var len = cues.length - 1;
|
||
|
||
if (time > cues[len].endTime) {
|
||
return -1;
|
||
}
|
||
|
||
var left = 0;
|
||
var right = len;
|
||
|
||
while (left <= right) {
|
||
var mid = Math.floor((right + left) / 2);
|
||
|
||
if (time < cues[mid].startTime) {
|
||
right = mid - 1;
|
||
} else if (time > cues[mid].startTime && left < len) {
|
||
left = mid + 1;
|
||
} else {
|
||
// If it's not lower or higher, it must be equal.
|
||
return mid;
|
||
}
|
||
} // At this point, left and right have swapped.
|
||
// No direct match was found, left or right element must be the closest. Check which one has the smallest diff.
|
||
|
||
|
||
return cues[left].startTime - time < time - cues[right].startTime ? left : right;
|
||
}
|
||
|
||
function getCuesInRange(cues, start, end) {
|
||
var cuesFound = [];
|
||
var firstCueInRange = getFirstCueIndexAfterTime(cues, start);
|
||
|
||
if (firstCueInRange > -1) {
|
||
for (var i = firstCueInRange, len = cues.length; i < len; i++) {
|
||
var cue = cues[i];
|
||
|
||
if (cue.startTime >= start && cue.endTime <= end) {
|
||
cuesFound.push(cue);
|
||
} else if (cue.startTime > end) {
|
||
return cuesFound;
|
||
}
|
||
}
|
||
}
|
||
|
||
return cuesFound;
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/utils/time-ranges.ts":
|
||
/*!**********************************!*\
|
||
!*** ./src/utils/time-ranges.ts ***!
|
||
\**********************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/**
|
||
* TimeRanges to string helper
|
||
*/
|
||
var TimeRanges = {
|
||
toString: function toString(r) {
|
||
var log = '';
|
||
var len = r.length;
|
||
|
||
for (var i = 0; i < len; i++) {
|
||
log += '[' + r.start(i).toFixed(3) + ',' + r.end(i).toFixed(3) + ']';
|
||
}
|
||
|
||
return log;
|
||
}
|
||
};
|
||
/* harmony default export */ __webpack_exports__["default"] = (TimeRanges);
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/utils/timescale-conversion.ts":
|
||
/*!*******************************************!*\
|
||
!*** ./src/utils/timescale-conversion.ts ***!
|
||
\*******************************************/
|
||
/*! exports provided: toTimescaleFromBase, toTimescaleFromScale, toMsFromMpegTsClock, toMpegTsClockFromTimescale */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "toTimescaleFromBase", function() { return toTimescaleFromBase; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "toTimescaleFromScale", function() { return toTimescaleFromScale; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "toMsFromMpegTsClock", function() { return toMsFromMpegTsClock; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "toMpegTsClockFromTimescale", function() { return toMpegTsClockFromTimescale; });
|
||
var MPEG_TS_CLOCK_FREQ_HZ = 90000;
|
||
function toTimescaleFromBase(value, destScale, srcBase, round) {
|
||
if (srcBase === void 0) {
|
||
srcBase = 1;
|
||
}
|
||
|
||
if (round === void 0) {
|
||
round = false;
|
||
}
|
||
|
||
var result = value * destScale * srcBase; // equivalent to `(value * scale) / (1 / base)`
|
||
|
||
return round ? Math.round(result) : result;
|
||
}
|
||
function toTimescaleFromScale(value, destScale, srcScale, round) {
|
||
if (srcScale === void 0) {
|
||
srcScale = 1;
|
||
}
|
||
|
||
if (round === void 0) {
|
||
round = false;
|
||
}
|
||
|
||
return toTimescaleFromBase(value, destScale, 1 / srcScale, round);
|
||
}
|
||
function toMsFromMpegTsClock(value, round) {
|
||
if (round === void 0) {
|
||
round = false;
|
||
}
|
||
|
||
return toTimescaleFromBase(value, 1000, 1 / MPEG_TS_CLOCK_FREQ_HZ, round);
|
||
}
|
||
function toMpegTsClockFromTimescale(value, srcScale) {
|
||
if (srcScale === void 0) {
|
||
srcScale = 1;
|
||
}
|
||
|
||
return toTimescaleFromBase(value, MPEG_TS_CLOCK_FREQ_HZ, 1 / srcScale);
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/utils/typed-array.ts":
|
||
/*!**********************************!*\
|
||
!*** ./src/utils/typed-array.ts ***!
|
||
\**********************************/
|
||
/*! exports provided: sliceUint8 */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "sliceUint8", function() { return sliceUint8; });
|
||
function sliceUint8(array, start, end) {
|
||
// @ts-expect-error This polyfills IE11 usage of Uint8Array slice.
|
||
// It always exists in the TypeScript definition so fails, but it fails at runtime on IE11.
|
||
return Uint8Array.prototype.slice ? array.slice(start, end) : new Uint8Array(Array.prototype.slice.call(array, start, end));
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/utils/vttcue.ts":
|
||
/*!*****************************!*\
|
||
!*** ./src/utils/vttcue.ts ***!
|
||
\*****************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/**
|
||
* Copyright 2013 vtt.js Contributors
|
||
*
|
||
* Licensed under the Apache License, Version 2.0 (the 'License');
|
||
* you may not use this file except in compliance with the License.
|
||
* You may obtain a copy of the License at
|
||
*
|
||
* http://www.apache.org/licenses/LICENSE-2.0
|
||
*
|
||
* Unless required by applicable law or agreed to in writing, software
|
||
* distributed under the License is distributed on an 'AS IS' BASIS,
|
||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||
* See the License for the specific language governing permissions and
|
||
* limitations under the License.
|
||
*/
|
||
/* harmony default export */ __webpack_exports__["default"] = ((function () {
|
||
if (typeof self !== 'undefined' && self.VTTCue) {
|
||
return self.VTTCue;
|
||
}
|
||
|
||
var AllowedDirections = ['', 'lr', 'rl'];
|
||
var AllowedAlignments = ['start', 'middle', 'end', 'left', 'right'];
|
||
|
||
function isAllowedValue(allowed, value) {
|
||
if (typeof value !== 'string') {
|
||
return false;
|
||
} // necessary for assuring the generic conforms to the Array interface
|
||
|
||
|
||
if (!Array.isArray(allowed)) {
|
||
return false;
|
||
} // reset the type so that the next narrowing works well
|
||
|
||
|
||
var lcValue = value.toLowerCase(); // use the allow list to narrow the type to a specific subset of strings
|
||
|
||
if (~allowed.indexOf(lcValue)) {
|
||
return lcValue;
|
||
}
|
||
|
||
return false;
|
||
}
|
||
|
||
function findDirectionSetting(value) {
|
||
return isAllowedValue(AllowedDirections, value);
|
||
}
|
||
|
||
function findAlignSetting(value) {
|
||
return isAllowedValue(AllowedAlignments, value);
|
||
}
|
||
|
||
function extend(obj) {
|
||
for (var _len = arguments.length, rest = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
|
||
rest[_key - 1] = arguments[_key];
|
||
}
|
||
|
||
var i = 1;
|
||
|
||
for (; i < arguments.length; i++) {
|
||
var cobj = arguments[i];
|
||
|
||
for (var p in cobj) {
|
||
obj[p] = cobj[p];
|
||
}
|
||
}
|
||
|
||
return obj;
|
||
}
|
||
|
||
function VTTCue(startTime, endTime, text) {
|
||
var cue = this;
|
||
var baseObj = {
|
||
enumerable: true
|
||
};
|
||
/**
|
||
* Shim implementation specific properties. These properties are not in
|
||
* the spec.
|
||
*/
|
||
// Lets us know when the VTTCue's data has changed in such a way that we need
|
||
// to recompute its display state. This lets us compute its display state
|
||
// lazily.
|
||
|
||
cue.hasBeenReset = false;
|
||
/**
|
||
* VTTCue and TextTrackCue properties
|
||
* http://dev.w3.org/html5/webvtt/#vttcue-interface
|
||
*/
|
||
|
||
var _id = '';
|
||
var _pauseOnExit = false;
|
||
var _startTime = startTime;
|
||
var _endTime = endTime;
|
||
var _text = text;
|
||
var _region = null;
|
||
var _vertical = '';
|
||
var _snapToLines = true;
|
||
var _line = 'auto';
|
||
var _lineAlign = 'start';
|
||
var _position = 50;
|
||
var _positionAlign = 'middle';
|
||
var _size = 50;
|
||
var _align = 'middle';
|
||
Object.defineProperty(cue, 'id', extend({}, baseObj, {
|
||
get: function get() {
|
||
return _id;
|
||
},
|
||
set: function set(value) {
|
||
_id = '' + value;
|
||
}
|
||
}));
|
||
Object.defineProperty(cue, 'pauseOnExit', extend({}, baseObj, {
|
||
get: function get() {
|
||
return _pauseOnExit;
|
||
},
|
||
set: function set(value) {
|
||
_pauseOnExit = !!value;
|
||
}
|
||
}));
|
||
Object.defineProperty(cue, 'startTime', extend({}, baseObj, {
|
||
get: function get() {
|
||
return _startTime;
|
||
},
|
||
set: function set(value) {
|
||
if (typeof value !== 'number') {
|
||
throw new TypeError('Start time must be set to a number.');
|
||
}
|
||
|
||
_startTime = value;
|
||
this.hasBeenReset = true;
|
||
}
|
||
}));
|
||
Object.defineProperty(cue, 'endTime', extend({}, baseObj, {
|
||
get: function get() {
|
||
return _endTime;
|
||
},
|
||
set: function set(value) {
|
||
if (typeof value !== 'number') {
|
||
throw new TypeError('End time must be set to a number.');
|
||
}
|
||
|
||
_endTime = value;
|
||
this.hasBeenReset = true;
|
||
}
|
||
}));
|
||
Object.defineProperty(cue, 'text', extend({}, baseObj, {
|
||
get: function get() {
|
||
return _text;
|
||
},
|
||
set: function set(value) {
|
||
_text = '' + value;
|
||
this.hasBeenReset = true;
|
||
}
|
||
})); // todo: implement VTTRegion polyfill?
|
||
|
||
Object.defineProperty(cue, 'region', extend({}, baseObj, {
|
||
get: function get() {
|
||
return _region;
|
||
},
|
||
set: function set(value) {
|
||
_region = value;
|
||
this.hasBeenReset = true;
|
||
}
|
||
}));
|
||
Object.defineProperty(cue, 'vertical', extend({}, baseObj, {
|
||
get: function get() {
|
||
return _vertical;
|
||
},
|
||
set: function set(value) {
|
||
var setting = findDirectionSetting(value); // Have to check for false because the setting an be an empty string.
|
||
|
||
if (setting === false) {
|
||
throw new SyntaxError('An invalid or illegal string was specified.');
|
||
}
|
||
|
||
_vertical = setting;
|
||
this.hasBeenReset = true;
|
||
}
|
||
}));
|
||
Object.defineProperty(cue, 'snapToLines', extend({}, baseObj, {
|
||
get: function get() {
|
||
return _snapToLines;
|
||
},
|
||
set: function set(value) {
|
||
_snapToLines = !!value;
|
||
this.hasBeenReset = true;
|
||
}
|
||
}));
|
||
Object.defineProperty(cue, 'line', extend({}, baseObj, {
|
||
get: function get() {
|
||
return _line;
|
||
},
|
||
set: function set(value) {
|
||
if (typeof value !== 'number' && value !== 'auto') {
|
||
throw new SyntaxError('An invalid number or illegal string was specified.');
|
||
}
|
||
|
||
_line = value;
|
||
this.hasBeenReset = true;
|
||
}
|
||
}));
|
||
Object.defineProperty(cue, 'lineAlign', extend({}, baseObj, {
|
||
get: function get() {
|
||
return _lineAlign;
|
||
},
|
||
set: function set(value) {
|
||
var setting = findAlignSetting(value);
|
||
|
||
if (!setting) {
|
||
throw new SyntaxError('An invalid or illegal string was specified.');
|
||
}
|
||
|
||
_lineAlign = setting;
|
||
this.hasBeenReset = true;
|
||
}
|
||
}));
|
||
Object.defineProperty(cue, 'position', extend({}, baseObj, {
|
||
get: function get() {
|
||
return _position;
|
||
},
|
||
set: function set(value) {
|
||
if (value < 0 || value > 100) {
|
||
throw new Error('Position must be between 0 and 100.');
|
||
}
|
||
|
||
_position = value;
|
||
this.hasBeenReset = true;
|
||
}
|
||
}));
|
||
Object.defineProperty(cue, 'positionAlign', extend({}, baseObj, {
|
||
get: function get() {
|
||
return _positionAlign;
|
||
},
|
||
set: function set(value) {
|
||
var setting = findAlignSetting(value);
|
||
|
||
if (!setting) {
|
||
throw new SyntaxError('An invalid or illegal string was specified.');
|
||
}
|
||
|
||
_positionAlign = setting;
|
||
this.hasBeenReset = true;
|
||
}
|
||
}));
|
||
Object.defineProperty(cue, 'size', extend({}, baseObj, {
|
||
get: function get() {
|
||
return _size;
|
||
},
|
||
set: function set(value) {
|
||
if (value < 0 || value > 100) {
|
||
throw new Error('Size must be between 0 and 100.');
|
||
}
|
||
|
||
_size = value;
|
||
this.hasBeenReset = true;
|
||
}
|
||
}));
|
||
Object.defineProperty(cue, 'align', extend({}, baseObj, {
|
||
get: function get() {
|
||
return _align;
|
||
},
|
||
set: function set(value) {
|
||
var setting = findAlignSetting(value);
|
||
|
||
if (!setting) {
|
||
throw new SyntaxError('An invalid or illegal string was specified.');
|
||
}
|
||
|
||
_align = setting;
|
||
this.hasBeenReset = true;
|
||
}
|
||
}));
|
||
/**
|
||
* Other <track> spec defined properties
|
||
*/
|
||
// http://www.whatwg.org/specs/web-apps/current-work/multipage/the-video-element.html#text-track-cue-display-state
|
||
|
||
cue.displayState = undefined;
|
||
}
|
||
/**
|
||
* VTTCue methods
|
||
*/
|
||
|
||
|
||
VTTCue.prototype.getCueAsHTML = function () {
|
||
// Assume WebVTT.convertCueToDOMTree is on the global.
|
||
var WebVTT = self.WebVTT;
|
||
return WebVTT.convertCueToDOMTree(self, this.text);
|
||
}; // this is a polyfill hack
|
||
|
||
|
||
return VTTCue;
|
||
})());
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/utils/vttparser.ts":
|
||
/*!********************************!*\
|
||
!*** ./src/utils/vttparser.ts ***!
|
||
\********************************/
|
||
/*! exports provided: parseTimeStamp, fixLineBreaks, VTTParser */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "parseTimeStamp", function() { return parseTimeStamp; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "fixLineBreaks", function() { return fixLineBreaks; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "VTTParser", function() { return VTTParser; });
|
||
/* harmony import */ var _vttcue__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./vttcue */ "./src/utils/vttcue.ts");
|
||
/*
|
||
* Source: https://github.com/mozilla/vtt.js/blob/master/dist/vtt.js
|
||
*/
|
||
|
||
|
||
var StringDecoder = /*#__PURE__*/function () {
|
||
function StringDecoder() {}
|
||
|
||
var _proto = StringDecoder.prototype;
|
||
|
||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||
_proto.decode = function decode(data, options) {
|
||
if (!data) {
|
||
return '';
|
||
}
|
||
|
||
if (typeof data !== 'string') {
|
||
throw new Error('Error - expected string data.');
|
||
}
|
||
|
||
return decodeURIComponent(encodeURIComponent(data));
|
||
};
|
||
|
||
return StringDecoder;
|
||
}(); // Try to parse input as a time stamp.
|
||
|
||
|
||
function parseTimeStamp(input) {
|
||
function computeSeconds(h, m, s, f) {
|
||
return (h | 0) * 3600 + (m | 0) * 60 + (s | 0) + parseFloat(f || 0);
|
||
}
|
||
|
||
var m = input.match(/^(?:(\d+):)?(\d{2}):(\d{2})(\.\d+)?/);
|
||
|
||
if (!m) {
|
||
return null;
|
||
}
|
||
|
||
if (parseFloat(m[2]) > 59) {
|
||
// Timestamp takes the form of [hours]:[minutes].[milliseconds]
|
||
// First position is hours as it's over 59.
|
||
return computeSeconds(m[2], m[3], 0, m[4]);
|
||
} // Timestamp takes the form of [hours (optional)]:[minutes]:[seconds].[milliseconds]
|
||
|
||
|
||
return computeSeconds(m[1], m[2], m[3], m[4]);
|
||
} // A settings object holds key/value pairs and will ignore anything but the first
|
||
// assignment to a specific key.
|
||
|
||
var Settings = /*#__PURE__*/function () {
|
||
function Settings() {
|
||
this.values = Object.create(null);
|
||
}
|
||
|
||
var _proto2 = Settings.prototype;
|
||
|
||
// Only accept the first assignment to any key.
|
||
_proto2.set = function set(k, v) {
|
||
if (!this.get(k) && v !== '') {
|
||
this.values[k] = v;
|
||
}
|
||
} // Return the value for a key, or a default value.
|
||
// If 'defaultKey' is passed then 'dflt' is assumed to be an object with
|
||
// a number of possible default values as properties where 'defaultKey' is
|
||
// the key of the property that will be chosen; otherwise it's assumed to be
|
||
// a single value.
|
||
;
|
||
|
||
_proto2.get = function get(k, dflt, defaultKey) {
|
||
if (defaultKey) {
|
||
return this.has(k) ? this.values[k] : dflt[defaultKey];
|
||
}
|
||
|
||
return this.has(k) ? this.values[k] : dflt;
|
||
} // Check whether we have a value for a key.
|
||
;
|
||
|
||
_proto2.has = function has(k) {
|
||
return k in this.values;
|
||
} // Accept a setting if its one of the given alternatives.
|
||
;
|
||
|
||
_proto2.alt = function alt(k, v, a) {
|
||
for (var n = 0; n < a.length; ++n) {
|
||
if (v === a[n]) {
|
||
this.set(k, v);
|
||
break;
|
||
}
|
||
}
|
||
} // Accept a setting if its a valid (signed) integer.
|
||
;
|
||
|
||
_proto2.integer = function integer(k, v) {
|
||
if (/^-?\d+$/.test(v)) {
|
||
// integer
|
||
this.set(k, parseInt(v, 10));
|
||
}
|
||
} // Accept a setting if its a valid percentage.
|
||
;
|
||
|
||
_proto2.percent = function percent(k, v) {
|
||
if (/^([\d]{1,3})(\.[\d]*)?%$/.test(v)) {
|
||
var percent = parseFloat(v);
|
||
|
||
if (percent >= 0 && percent <= 100) {
|
||
this.set(k, percent);
|
||
return true;
|
||
}
|
||
}
|
||
|
||
return false;
|
||
};
|
||
|
||
return Settings;
|
||
}(); // Helper function to parse input into groups separated by 'groupDelim', and
|
||
// interpret each group as a key/value pair separated by 'keyValueDelim'.
|
||
|
||
|
||
function parseOptions(input, callback, keyValueDelim, groupDelim) {
|
||
var groups = groupDelim ? input.split(groupDelim) : [input];
|
||
|
||
for (var i in groups) {
|
||
if (typeof groups[i] !== 'string') {
|
||
continue;
|
||
}
|
||
|
||
var kv = groups[i].split(keyValueDelim);
|
||
|
||
if (kv.length !== 2) {
|
||
continue;
|
||
}
|
||
|
||
var _k = kv[0];
|
||
var _v = kv[1];
|
||
callback(_k, _v);
|
||
}
|
||
}
|
||
|
||
var defaults = new _vttcue__WEBPACK_IMPORTED_MODULE_0__["default"](0, 0, ''); // 'middle' was changed to 'center' in the spec: https://github.com/w3c/webvtt/pull/244
|
||
// Safari doesn't yet support this change, but FF and Chrome do.
|
||
|
||
var center = defaults.align === 'middle' ? 'middle' : 'center';
|
||
|
||
function parseCue(input, cue, regionList) {
|
||
// Remember the original input if we need to throw an error.
|
||
var oInput = input; // 4.1 WebVTT timestamp
|
||
|
||
function consumeTimeStamp() {
|
||
var ts = parseTimeStamp(input);
|
||
|
||
if (ts === null) {
|
||
throw new Error('Malformed timestamp: ' + oInput);
|
||
} // Remove time stamp from input.
|
||
|
||
|
||
input = input.replace(/^[^\sa-zA-Z-]+/, '');
|
||
return ts;
|
||
} // 4.4.2 WebVTT cue settings
|
||
|
||
|
||
function consumeCueSettings(input, cue) {
|
||
var settings = new Settings();
|
||
parseOptions(input, function (k, v) {
|
||
var vals;
|
||
|
||
switch (k) {
|
||
case 'region':
|
||
// Find the last region we parsed with the same region id.
|
||
for (var i = regionList.length - 1; i >= 0; i--) {
|
||
if (regionList[i].id === v) {
|
||
settings.set(k, regionList[i].region);
|
||
break;
|
||
}
|
||
}
|
||
|
||
break;
|
||
|
||
case 'vertical':
|
||
settings.alt(k, v, ['rl', 'lr']);
|
||
break;
|
||
|
||
case 'line':
|
||
vals = v.split(',');
|
||
settings.integer(k, vals[0]);
|
||
|
||
if (settings.percent(k, vals[0])) {
|
||
settings.set('snapToLines', false);
|
||
}
|
||
|
||
settings.alt(k, vals[0], ['auto']);
|
||
|
||
if (vals.length === 2) {
|
||
settings.alt('lineAlign', vals[1], ['start', center, 'end']);
|
||
}
|
||
|
||
break;
|
||
|
||
case 'position':
|
||
vals = v.split(',');
|
||
settings.percent(k, vals[0]);
|
||
|
||
if (vals.length === 2) {
|
||
settings.alt('positionAlign', vals[1], ['start', center, 'end', 'line-left', 'line-right', 'auto']);
|
||
}
|
||
|
||
break;
|
||
|
||
case 'size':
|
||
settings.percent(k, v);
|
||
break;
|
||
|
||
case 'align':
|
||
settings.alt(k, v, ['start', center, 'end', 'left', 'right']);
|
||
break;
|
||
}
|
||
}, /:/, /\s/); // Apply default values for any missing fields.
|
||
|
||
cue.region = settings.get('region', null);
|
||
cue.vertical = settings.get('vertical', '');
|
||
var line = settings.get('line', 'auto');
|
||
|
||
if (line === 'auto' && defaults.line === -1) {
|
||
// set numeric line number for Safari
|
||
line = -1;
|
||
}
|
||
|
||
cue.line = line;
|
||
cue.lineAlign = settings.get('lineAlign', 'start');
|
||
cue.snapToLines = settings.get('snapToLines', true);
|
||
cue.size = settings.get('size', 100);
|
||
cue.align = settings.get('align', center);
|
||
var position = settings.get('position', 'auto');
|
||
|
||
if (position === 'auto' && defaults.position === 50) {
|
||
// set numeric position for Safari
|
||
position = cue.align === 'start' || cue.align === 'left' ? 0 : cue.align === 'end' || cue.align === 'right' ? 100 : 50;
|
||
}
|
||
|
||
cue.position = position;
|
||
}
|
||
|
||
function skipWhitespace() {
|
||
input = input.replace(/^\s+/, '');
|
||
} // 4.1 WebVTT cue timings.
|
||
|
||
|
||
skipWhitespace();
|
||
cue.startTime = consumeTimeStamp(); // (1) collect cue start time
|
||
|
||
skipWhitespace();
|
||
|
||
if (input.substr(0, 3) !== '-->') {
|
||
// (3) next characters must match '-->'
|
||
throw new Error("Malformed time stamp (time stamps must be separated by '-->'): " + oInput);
|
||
}
|
||
|
||
input = input.substr(3);
|
||
skipWhitespace();
|
||
cue.endTime = consumeTimeStamp(); // (5) collect cue end time
|
||
// 4.1 WebVTT cue settings list.
|
||
|
||
skipWhitespace();
|
||
consumeCueSettings(input, cue);
|
||
}
|
||
|
||
function fixLineBreaks(input) {
|
||
return input.replace(/<br(?: \/)?>/gi, '\n');
|
||
}
|
||
var VTTParser = /*#__PURE__*/function () {
|
||
function VTTParser() {
|
||
this.state = 'INITIAL';
|
||
this.buffer = '';
|
||
this.decoder = new StringDecoder();
|
||
this.regionList = [];
|
||
this.cue = null;
|
||
this.oncue = void 0;
|
||
this.onparsingerror = void 0;
|
||
this.onflush = void 0;
|
||
}
|
||
|
||
var _proto3 = VTTParser.prototype;
|
||
|
||
_proto3.parse = function parse(data) {
|
||
var _this = this; // If there is no data then we won't decode it, but will just try to parse
|
||
// whatever is in buffer already. This may occur in circumstances, for
|
||
// example when flush() is called.
|
||
|
||
|
||
if (data) {
|
||
// Try to decode the data that we received.
|
||
_this.buffer += _this.decoder.decode(data, {
|
||
stream: true
|
||
});
|
||
}
|
||
|
||
function collectNextLine() {
|
||
var buffer = _this.buffer;
|
||
var pos = 0;
|
||
buffer = fixLineBreaks(buffer);
|
||
|
||
while (pos < buffer.length && buffer[pos] !== '\r' && buffer[pos] !== '\n') {
|
||
++pos;
|
||
}
|
||
|
||
var line = buffer.substr(0, pos); // Advance the buffer early in case we fail below.
|
||
|
||
if (buffer[pos] === '\r') {
|
||
++pos;
|
||
}
|
||
|
||
if (buffer[pos] === '\n') {
|
||
++pos;
|
||
}
|
||
|
||
_this.buffer = buffer.substr(pos);
|
||
return line;
|
||
} // 3.2 WebVTT metadata header syntax
|
||
|
||
|
||
function parseHeader(input) {
|
||
parseOptions(input, function (k, v) {// switch (k) {
|
||
// case 'region':
|
||
// 3.3 WebVTT region metadata header syntax
|
||
// console.log('parse region', v);
|
||
// parseRegion(v);
|
||
// break;
|
||
// }
|
||
}, /:/);
|
||
} // 5.1 WebVTT file parsing.
|
||
|
||
|
||
try {
|
||
var line = '';
|
||
|
||
if (_this.state === 'INITIAL') {
|
||
// We can't start parsing until we have the first line.
|
||
if (!/\r\n|\n/.test(_this.buffer)) {
|
||
return this;
|
||
}
|
||
|
||
line = collectNextLine(); // strip of UTF-8 BOM if any
|
||
// https://en.wikipedia.org/wiki/Byte_order_mark#UTF-8
|
||
|
||
var m = line.match(/^()?WEBVTT([ \t].*)?$/);
|
||
|
||
if (!m || !m[0]) {
|
||
throw new Error('Malformed WebVTT signature.');
|
||
}
|
||
|
||
_this.state = 'HEADER';
|
||
}
|
||
|
||
var alreadyCollectedLine = false;
|
||
|
||
while (_this.buffer) {
|
||
// We can't parse a line until we have the full line.
|
||
if (!/\r\n|\n/.test(_this.buffer)) {
|
||
return this;
|
||
}
|
||
|
||
if (!alreadyCollectedLine) {
|
||
line = collectNextLine();
|
||
} else {
|
||
alreadyCollectedLine = false;
|
||
}
|
||
|
||
switch (_this.state) {
|
||
case 'HEADER':
|
||
// 13-18 - Allow a header (metadata) under the WEBVTT line.
|
||
if (/:/.test(line)) {
|
||
parseHeader(line);
|
||
} else if (!line) {
|
||
// An empty line terminates the header and starts the body (cues).
|
||
_this.state = 'ID';
|
||
}
|
||
|
||
continue;
|
||
|
||
case 'NOTE':
|
||
// Ignore NOTE blocks.
|
||
if (!line) {
|
||
_this.state = 'ID';
|
||
}
|
||
|
||
continue;
|
||
|
||
case 'ID':
|
||
// Check for the start of NOTE blocks.
|
||
if (/^NOTE($|[ \t])/.test(line)) {
|
||
_this.state = 'NOTE';
|
||
break;
|
||
} // 19-29 - Allow any number of line terminators, then initialize new cue values.
|
||
|
||
|
||
if (!line) {
|
||
continue;
|
||
}
|
||
|
||
_this.cue = new _vttcue__WEBPACK_IMPORTED_MODULE_0__["default"](0, 0, '');
|
||
_this.state = 'CUE'; // 30-39 - Check if self line contains an optional identifier or timing data.
|
||
|
||
if (line.indexOf('-->') === -1) {
|
||
_this.cue.id = line;
|
||
continue;
|
||
}
|
||
|
||
// Process line as start of a cue.
|
||
|
||
/* falls through */
|
||
|
||
case 'CUE':
|
||
// 40 - Collect cue timings and settings.
|
||
if (!_this.cue) {
|
||
_this.state = 'BADCUE';
|
||
continue;
|
||
}
|
||
|
||
try {
|
||
parseCue(line, _this.cue, _this.regionList);
|
||
} catch (e) {
|
||
// In case of an error ignore rest of the cue.
|
||
_this.cue = null;
|
||
_this.state = 'BADCUE';
|
||
continue;
|
||
}
|
||
|
||
_this.state = 'CUETEXT';
|
||
continue;
|
||
|
||
case 'CUETEXT':
|
||
{
|
||
var hasSubstring = line.indexOf('-->') !== -1; // 34 - If we have an empty line then report the cue.
|
||
// 35 - If we have the special substring '-->' then report the cue,
|
||
// but do not collect the line as we need to process the current
|
||
// one as a new cue.
|
||
|
||
if (!line || hasSubstring && (alreadyCollectedLine = true)) {
|
||
// We are done parsing self cue.
|
||
if (_this.oncue && _this.cue) {
|
||
_this.oncue(_this.cue);
|
||
}
|
||
|
||
_this.cue = null;
|
||
_this.state = 'ID';
|
||
continue;
|
||
}
|
||
|
||
if (_this.cue === null) {
|
||
continue;
|
||
}
|
||
|
||
if (_this.cue.text) {
|
||
_this.cue.text += '\n';
|
||
}
|
||
|
||
_this.cue.text += line;
|
||
}
|
||
continue;
|
||
|
||
case 'BADCUE':
|
||
// 54-62 - Collect and discard the remaining cue.
|
||
if (!line) {
|
||
_this.state = 'ID';
|
||
}
|
||
|
||
}
|
||
}
|
||
} catch (e) {
|
||
// If we are currently parsing a cue, report what we have.
|
||
if (_this.state === 'CUETEXT' && _this.cue && _this.oncue) {
|
||
_this.oncue(_this.cue);
|
||
}
|
||
|
||
_this.cue = null; // Enter BADWEBVTT state if header was not parsed correctly otherwise
|
||
// another exception occurred so enter BADCUE state.
|
||
|
||
_this.state = _this.state === 'INITIAL' ? 'BADWEBVTT' : 'BADCUE';
|
||
}
|
||
|
||
return this;
|
||
};
|
||
|
||
_proto3.flush = function flush() {
|
||
var _this = this;
|
||
|
||
try {
|
||
// Finish decoding the stream.
|
||
// _this.buffer += _this.decoder.decode();
|
||
// Synthesize the end of the current cue or region.
|
||
if (_this.cue || _this.state === 'HEADER') {
|
||
_this.buffer += '\n\n';
|
||
|
||
_this.parse();
|
||
} // If we've flushed, parsed, and we're still on the INITIAL state then
|
||
// that means we don't have enough of the stream to parse the first
|
||
// line.
|
||
|
||
|
||
if (_this.state === 'INITIAL' || _this.state === 'BADWEBVTT') {
|
||
throw new Error('Malformed WebVTT signature.');
|
||
}
|
||
} catch (e) {
|
||
if (_this.onparsingerror) {
|
||
_this.onparsingerror(e);
|
||
}
|
||
}
|
||
|
||
if (_this.onflush) {
|
||
_this.onflush();
|
||
}
|
||
|
||
return this;
|
||
};
|
||
|
||
return VTTParser;
|
||
}();
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/utils/webvtt-parser.ts":
|
||
/*!************************************!*\
|
||
!*** ./src/utils/webvtt-parser.ts ***!
|
||
\************************************/
|
||
/*! exports provided: generateCueId, parseWebVTT */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "generateCueId", function() { return generateCueId; });
|
||
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "parseWebVTT", function() { return parseWebVTT; });
|
||
/* harmony import */ var _Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.ts");
|
||
/* harmony import */ var _vttparser__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./vttparser */ "./src/utils/vttparser.ts");
|
||
/* harmony import */ var _demux_id3__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../demux/id3 */ "./src/demux/id3.ts");
|
||
/* harmony import */ var _timescale_conversion__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./timescale-conversion */ "./src/utils/timescale-conversion.ts");
|
||
/* harmony import */ var _remux_mp4_remuxer__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../remux/mp4-remuxer */ "./src/remux/mp4-remuxer.ts");
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
var LINEBREAKS = /\r\n|\n\r|\n|\r/g; // String.prototype.startsWith is not supported in IE11
|
||
|
||
var startsWith = function startsWith(inputString, searchString, position) {
|
||
if (position === void 0) {
|
||
position = 0;
|
||
}
|
||
|
||
return inputString.substr(position, searchString.length) === searchString;
|
||
};
|
||
|
||
var cueString2millis = function cueString2millis(timeString) {
|
||
var ts = parseInt(timeString.substr(-3));
|
||
var secs = parseInt(timeString.substr(-6, 2));
|
||
var mins = parseInt(timeString.substr(-9, 2));
|
||
var hours = timeString.length > 9 ? parseInt(timeString.substr(0, timeString.indexOf(':'))) : 0;
|
||
|
||
if (!Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(ts) || !Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(secs) || !Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(mins) || !Object(_Users_vincent_Documents_lib_hls_js_src_polyfills_number__WEBPACK_IMPORTED_MODULE_0__["isFiniteNumber"])(hours)) {
|
||
throw Error("Malformed X-TIMESTAMP-MAP: Local:" + timeString);
|
||
}
|
||
|
||
ts += 1000 * secs;
|
||
ts += 60 * 1000 * mins;
|
||
ts += 60 * 60 * 1000 * hours;
|
||
return ts;
|
||
}; // From https://github.com/darkskyapp/string-hash
|
||
|
||
|
||
var hash = function hash(text) {
|
||
var hash = 5381;
|
||
var i = text.length;
|
||
|
||
while (i) {
|
||
hash = hash * 33 ^ text.charCodeAt(--i);
|
||
}
|
||
|
||
return (hash >>> 0).toString();
|
||
}; // Create a unique hash id for a cue based on start/end times and text.
|
||
// This helps timeline-controller to avoid showing repeated captions.
|
||
|
||
|
||
function generateCueId(startTime, endTime, text) {
|
||
return hash(startTime.toString()) + hash(endTime.toString()) + hash(text);
|
||
}
|
||
|
||
var calculateOffset = function calculateOffset(vttCCs, cc, presentationTime) {
|
||
var currCC = vttCCs[cc];
|
||
var prevCC = vttCCs[currCC.prevCC]; // This is the first discontinuity or cues have been processed since the last discontinuity
|
||
// Offset = current discontinuity time
|
||
|
||
if (!prevCC || !prevCC.new && currCC.new) {
|
||
vttCCs.ccOffset = vttCCs.presentationOffset = currCC.start;
|
||
currCC.new = false;
|
||
return;
|
||
} // There have been discontinuities since cues were last parsed.
|
||
// Offset = time elapsed
|
||
|
||
|
||
while ((_prevCC = prevCC) !== null && _prevCC !== void 0 && _prevCC.new) {
|
||
var _prevCC;
|
||
|
||
vttCCs.ccOffset += currCC.start - prevCC.start;
|
||
currCC.new = false;
|
||
currCC = prevCC;
|
||
prevCC = vttCCs[currCC.prevCC];
|
||
}
|
||
|
||
vttCCs.presentationOffset = presentationTime;
|
||
};
|
||
|
||
function parseWebVTT(vttByteArray, initPTS, timescale, vttCCs, cc, timeOffset, callBack, errorCallBack) {
|
||
var parser = new _vttparser__WEBPACK_IMPORTED_MODULE_1__["VTTParser"](); // Convert byteArray into string, replacing any somewhat exotic linefeeds with "\n", then split on that character.
|
||
// Uint8Array.prototype.reduce is not implemented in IE11
|
||
|
||
var vttLines = Object(_demux_id3__WEBPACK_IMPORTED_MODULE_2__["utf8ArrayToStr"])(new Uint8Array(vttByteArray)).trim().replace(LINEBREAKS, '\n').split('\n');
|
||
var cues = [];
|
||
var initPTS90Hz = Object(_timescale_conversion__WEBPACK_IMPORTED_MODULE_3__["toMpegTsClockFromTimescale"])(initPTS, timescale);
|
||
var cueTime = '00:00.000';
|
||
var timestampMapMPEGTS = 0;
|
||
var timestampMapLOCAL = 0;
|
||
var parsingError;
|
||
var inHeader = true;
|
||
|
||
parser.oncue = function (cue) {
|
||
// Adjust cue timing; clamp cues to start no earlier than - and drop cues that don't end after - 0 on timeline.
|
||
var currCC = vttCCs[cc];
|
||
var cueOffset = vttCCs.ccOffset; // Calculate subtitle PTS offset
|
||
|
||
var webVttMpegTsMapOffset = (timestampMapMPEGTS - initPTS90Hz) / 90000; // Update offsets for new discontinuities
|
||
|
||
if (currCC !== null && currCC !== void 0 && currCC.new) {
|
||
if (timestampMapLOCAL !== undefined) {
|
||
// When local time is provided, offset = discontinuity start time - local time
|
||
cueOffset = vttCCs.ccOffset = currCC.start;
|
||
} else {
|
||
calculateOffset(vttCCs, cc, webVttMpegTsMapOffset);
|
||
}
|
||
}
|
||
|
||
if (webVttMpegTsMapOffset) {
|
||
// If we have MPEGTS, offset = presentation time + discontinuity offset
|
||
cueOffset = webVttMpegTsMapOffset - vttCCs.presentationOffset;
|
||
}
|
||
|
||
var duration = cue.endTime - cue.startTime;
|
||
var startTime = Object(_remux_mp4_remuxer__WEBPACK_IMPORTED_MODULE_4__["normalizePts"])((cue.startTime + cueOffset - timestampMapLOCAL) * 90000, timeOffset * 90000) / 90000;
|
||
cue.startTime = startTime;
|
||
cue.endTime = startTime + duration; //trim trailing webvtt block whitespaces
|
||
|
||
var text = cue.text.trim(); // Fix encoding of special characters
|
||
|
||
cue.text = decodeURIComponent(encodeURIComponent(text)); // If the cue was not assigned an id from the VTT file (line above the content), create one.
|
||
|
||
if (!cue.id) {
|
||
cue.id = generateCueId(cue.startTime, cue.endTime, text);
|
||
}
|
||
|
||
if (cue.endTime > 0) {
|
||
cues.push(cue);
|
||
}
|
||
};
|
||
|
||
parser.onparsingerror = function (error) {
|
||
parsingError = error;
|
||
};
|
||
|
||
parser.onflush = function () {
|
||
if (parsingError) {
|
||
errorCallBack(parsingError);
|
||
return;
|
||
}
|
||
|
||
callBack(cues);
|
||
}; // Go through contents line by line.
|
||
|
||
|
||
vttLines.forEach(function (line) {
|
||
if (inHeader) {
|
||
// Look for X-TIMESTAMP-MAP in header.
|
||
if (startsWith(line, 'X-TIMESTAMP-MAP=')) {
|
||
// Once found, no more are allowed anyway, so stop searching.
|
||
inHeader = false; // Extract LOCAL and MPEGTS.
|
||
|
||
line.substr(16).split(',').forEach(function (timestamp) {
|
||
if (startsWith(timestamp, 'LOCAL:')) {
|
||
cueTime = timestamp.substr(6);
|
||
} else if (startsWith(timestamp, 'MPEGTS:')) {
|
||
timestampMapMPEGTS = parseInt(timestamp.substr(7));
|
||
}
|
||
});
|
||
|
||
try {
|
||
// Convert cue time to seconds
|
||
timestampMapLOCAL = cueString2millis(cueTime) / 1000;
|
||
} catch (error) {
|
||
parsingError = error;
|
||
} // Return without parsing X-TIMESTAMP-MAP line.
|
||
|
||
|
||
return;
|
||
} else if (line === '') {
|
||
inHeader = false;
|
||
}
|
||
} // Parse line by default.
|
||
|
||
|
||
parser.parse(line + '\n');
|
||
});
|
||
parser.flush();
|
||
}
|
||
|
||
/***/ }),
|
||
|
||
/***/ "./src/utils/xhr-loader.ts":
|
||
/*!*********************************!*\
|
||
!*** ./src/utils/xhr-loader.ts ***!
|
||
\*********************************/
|
||
/*! exports provided: default */
|
||
/***/ (function(module, __webpack_exports__, __webpack_require__) {
|
||
|
||
"use strict";
|
||
__webpack_require__.r(__webpack_exports__);
|
||
/* harmony import */ var _utils_logger__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.ts");
|
||
/* harmony import */ var _loader_load_stats__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../loader/load-stats */ "./src/loader/load-stats.ts");
|
||
|
||
|
||
var AGE_HEADER_LINE_REGEX = /^age:\s*[\d.]+\s*$/m;
|
||
|
||
var XhrLoader = /*#__PURE__*/function () {
|
||
function XhrLoader(config
|
||
/* HlsConfig */
|
||
) {
|
||
this.xhrSetup = void 0;
|
||
this.requestTimeout = void 0;
|
||
this.retryTimeout = void 0;
|
||
this.retryDelay = void 0;
|
||
this.config = null;
|
||
this.callbacks = null;
|
||
this.context = void 0;
|
||
this.loader = null;
|
||
this.stats = void 0;
|
||
this.xhrSetup = config ? config.xhrSetup : null;
|
||
this.stats = new _loader_load_stats__WEBPACK_IMPORTED_MODULE_1__["LoadStats"]();
|
||
this.retryDelay = 0;
|
||
}
|
||
|
||
var _proto = XhrLoader.prototype;
|
||
|
||
_proto.destroy = function destroy() {
|
||
this.callbacks = null;
|
||
this.abortInternal();
|
||
this.loader = null;
|
||
this.config = null;
|
||
};
|
||
|
||
_proto.abortInternal = function abortInternal() {
|
||
var loader = this.loader;
|
||
self.clearTimeout(this.requestTimeout);
|
||
self.clearTimeout(this.retryTimeout);
|
||
|
||
if (loader) {
|
||
loader.onreadystatechange = null;
|
||
loader.onprogress = null;
|
||
|
||
if (loader.readyState !== 4) {
|
||
this.stats.aborted = true;
|
||
loader.abort();
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto.abort = function abort() {
|
||
var _this$callbacks;
|
||
|
||
this.abortInternal();
|
||
|
||
if ((_this$callbacks = this.callbacks) !== null && _this$callbacks !== void 0 && _this$callbacks.onAbort) {
|
||
this.callbacks.onAbort(this.stats, this.context, this.loader);
|
||
}
|
||
};
|
||
|
||
_proto.load = function load(context, config, callbacks) {
|
||
if (this.stats.loading.start) {
|
||
throw new Error('Loader can only be used once.');
|
||
}
|
||
|
||
this.stats.loading.start = self.performance.now();
|
||
this.context = context;
|
||
this.config = config;
|
||
this.callbacks = callbacks;
|
||
this.retryDelay = config.retryDelay;
|
||
this.loadInternal();
|
||
};
|
||
|
||
_proto.loadInternal = function loadInternal() {
|
||
var config = this.config,
|
||
context = this.context;
|
||
|
||
if (!config) {
|
||
return;
|
||
}
|
||
|
||
var xhr = this.loader = new self.XMLHttpRequest();
|
||
var stats = this.stats;
|
||
stats.loading.first = 0;
|
||
stats.loaded = 0;
|
||
var xhrSetup = this.xhrSetup;
|
||
|
||
try {
|
||
if (xhrSetup) {
|
||
try {
|
||
xhrSetup(xhr, context.url);
|
||
} catch (e) {
|
||
// fix xhrSetup: (xhr, url) => {xhr.setRequestHeader("Content-Language", "test");}
|
||
// not working, as xhr.setRequestHeader expects xhr.readyState === OPEN
|
||
xhr.open('GET', context.url, true);
|
||
xhrSetup(xhr, context.url);
|
||
}
|
||
}
|
||
|
||
if (!xhr.readyState) {
|
||
xhr.open('GET', context.url, true);
|
||
}
|
||
|
||
var headers = this.context.headers;
|
||
|
||
if (headers) {
|
||
for (var header in headers) {
|
||
xhr.setRequestHeader(header, headers[header]);
|
||
}
|
||
}
|
||
} catch (e) {
|
||
// IE11 throws an exception on xhr.open if attempting to access an HTTP resource over HTTPS
|
||
this.callbacks.onError({
|
||
code: xhr.status,
|
||
text: e.message
|
||
}, context, xhr);
|
||
return;
|
||
}
|
||
|
||
if (context.rangeEnd) {
|
||
xhr.setRequestHeader('Range', 'bytes=' + context.rangeStart + '-' + (context.rangeEnd - 1));
|
||
}
|
||
|
||
xhr.onreadystatechange = this.readystatechange.bind(this);
|
||
xhr.onprogress = this.loadprogress.bind(this);
|
||
xhr.responseType = context.responseType; // setup timeout before we perform request
|
||
|
||
self.clearTimeout(this.requestTimeout);
|
||
this.requestTimeout = self.setTimeout(this.loadtimeout.bind(this), config.timeout);
|
||
xhr.send();
|
||
};
|
||
|
||
_proto.readystatechange = function readystatechange() {
|
||
var context = this.context,
|
||
xhr = this.loader,
|
||
stats = this.stats;
|
||
|
||
if (!context || !xhr) {
|
||
return;
|
||
}
|
||
|
||
var readyState = xhr.readyState;
|
||
var config = this.config; // don't proceed if xhr has been aborted
|
||
|
||
if (stats.aborted) {
|
||
return;
|
||
} // >= HEADERS_RECEIVED
|
||
|
||
|
||
if (readyState >= 2) {
|
||
// clear xhr timeout and rearm it if readyState less than 4
|
||
self.clearTimeout(this.requestTimeout);
|
||
|
||
if (stats.loading.first === 0) {
|
||
stats.loading.first = Math.max(self.performance.now(), stats.loading.start);
|
||
}
|
||
|
||
if (readyState === 4) {
|
||
xhr.onreadystatechange = null;
|
||
xhr.onprogress = null;
|
||
var status = xhr.status; // http status between 200 to 299 are all successful
|
||
|
||
if (status >= 200 && status < 300) {
|
||
stats.loading.end = Math.max(self.performance.now(), stats.loading.first);
|
||
var data;
|
||
var len;
|
||
|
||
if (context.responseType === 'arraybuffer') {
|
||
data = xhr.response;
|
||
len = data.byteLength;
|
||
} else {
|
||
data = xhr.responseText;
|
||
len = data.length;
|
||
}
|
||
|
||
stats.loaded = stats.total = len;
|
||
|
||
if (!this.callbacks) {
|
||
return;
|
||
}
|
||
|
||
var onProgress = this.callbacks.onProgress;
|
||
|
||
if (onProgress) {
|
||
onProgress(stats, context, data, xhr);
|
||
}
|
||
|
||
if (!this.callbacks) {
|
||
return;
|
||
}
|
||
|
||
var response = {
|
||
url: xhr.responseURL,
|
||
data: data
|
||
};
|
||
this.callbacks.onSuccess(response, stats, context, xhr);
|
||
} else {
|
||
// if max nb of retries reached or if http status between 400 and 499 (such error cannot be recovered, retrying is useless), return error
|
||
if (stats.retry >= config.maxRetry || status >= 400 && status < 499) {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_0__["logger"].error(status + " while loading " + context.url);
|
||
this.callbacks.onError({
|
||
code: status,
|
||
text: xhr.statusText
|
||
}, context, xhr);
|
||
} else {
|
||
// retry
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_0__["logger"].warn(status + " while loading " + context.url + ", retrying in " + this.retryDelay + "..."); // abort and reset internal state
|
||
|
||
this.abortInternal();
|
||
this.loader = null; // schedule retry
|
||
|
||
self.clearTimeout(this.retryTimeout);
|
||
this.retryTimeout = self.setTimeout(this.loadInternal.bind(this), this.retryDelay); // set exponential backoff
|
||
|
||
this.retryDelay = Math.min(2 * this.retryDelay, config.maxRetryDelay);
|
||
stats.retry++;
|
||
}
|
||
}
|
||
} else {
|
||
// readyState >= 2 AND readyState !==4 (readyState = HEADERS_RECEIVED || LOADING) rearm timeout as xhr not finished yet
|
||
self.clearTimeout(this.requestTimeout);
|
||
this.requestTimeout = self.setTimeout(this.loadtimeout.bind(this), config.timeout);
|
||
}
|
||
}
|
||
};
|
||
|
||
_proto.loadtimeout = function loadtimeout() {
|
||
_utils_logger__WEBPACK_IMPORTED_MODULE_0__["logger"].warn("timeout while loading " + this.context.url);
|
||
var callbacks = this.callbacks;
|
||
|
||
if (callbacks) {
|
||
this.abortInternal();
|
||
callbacks.onTimeout(this.stats, this.context, this.loader);
|
||
}
|
||
};
|
||
|
||
_proto.loadprogress = function loadprogress(event) {
|
||
var stats = this.stats;
|
||
stats.loaded = event.loaded;
|
||
|
||
if (event.lengthComputable) {
|
||
stats.total = event.total;
|
||
}
|
||
};
|
||
|
||
_proto.getCacheAge = function getCacheAge() {
|
||
var result = null;
|
||
|
||
if (this.loader && AGE_HEADER_LINE_REGEX.test(this.loader.getAllResponseHeaders())) {
|
||
var ageHeader = this.loader.getResponseHeader('age');
|
||
result = ageHeader ? parseFloat(ageHeader) : null;
|
||
}
|
||
|
||
return result;
|
||
};
|
||
|
||
return XhrLoader;
|
||
}();
|
||
|
||
/* harmony default export */ __webpack_exports__["default"] = (XhrLoader);
|
||
|
||
/***/ })
|
||
|
||
/******/ })["default"];
|
||
});
|
||
//# sourceMappingURL=hls.js.map |