Source: lib/media/media_source_engine.js

  1. /*! @license
  2. * Shaka Player
  3. * Copyright 2016 Google LLC
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. goog.provide('shaka.media.MediaSourceEngine');
  7. goog.require('goog.asserts');
  8. goog.require('shaka.log');
  9. goog.require('shaka.config.CodecSwitchingStrategy');
  10. goog.require('shaka.media.Capabilities');
  11. goog.require('shaka.media.ContentWorkarounds');
  12. goog.require('shaka.media.ClosedCaptionParser');
  13. goog.require('shaka.media.IClosedCaptionParser');
  14. goog.require('shaka.media.ManifestParser');
  15. goog.require('shaka.media.SegmentReference');
  16. goog.require('shaka.media.TimeRangesUtils');
  17. goog.require('shaka.text.TextEngine');
  18. goog.require('shaka.transmuxer.TransmuxerEngine');
  19. goog.require('shaka.util.BufferUtils');
  20. goog.require('shaka.util.Destroyer');
  21. goog.require('shaka.util.Error');
  22. goog.require('shaka.util.EventManager');
  23. goog.require('shaka.util.Functional');
  24. goog.require('shaka.util.IDestroyable');
  25. goog.require('shaka.util.Id3Utils');
  26. goog.require('shaka.util.ManifestParserUtils');
  27. goog.require('shaka.util.MimeUtils');
  28. goog.require('shaka.util.Mp4BoxParsers');
  29. goog.require('shaka.util.Mp4Parser');
  30. goog.require('shaka.util.Platform');
  31. goog.require('shaka.util.PublicPromise');
  32. goog.require('shaka.util.StreamUtils');
  33. goog.require('shaka.util.TsParser');
  34. goog.require('shaka.lcevc.Dec');
  35. /**
  36. * @summary
  37. * MediaSourceEngine wraps all operations on MediaSource and SourceBuffers.
  38. * All asynchronous operations return a Promise, and all operations are
  39. * internally synchronized and serialized as needed. Operations that can
  40. * be done in parallel will be done in parallel.
  41. *
  42. * @implements {shaka.util.IDestroyable}
  43. */
  44. shaka.media.MediaSourceEngine = class {
  45. /**
  46. * @param {HTMLMediaElement} video The video element, whose source is tied to
  47. * MediaSource during the lifetime of the MediaSourceEngine.
  48. * @param {!shaka.extern.TextDisplayer} textDisplayer
  49. * The text displayer that will be used with the text engine.
  50. * MediaSourceEngine takes ownership of the displayer. When
  51. * MediaSourceEngine is destroyed, it will destroy the displayer.
  52. * @param {!shaka.media.MediaSourceEngine.PlayerInterface} playerInterface
  53. * Interface for common player methods.
  54. * @param {?shaka.lcevc.Dec} [lcevcDec] Optional - LCEVC Decoder Object
  55. */
  56. constructor(video, textDisplayer, playerInterface, lcevcDec) {
  57. /** @private {HTMLMediaElement} */
  58. this.video_ = video;
  59. /** @private {?shaka.media.MediaSourceEngine.PlayerInterface} */
  60. this.playerInterface_ = playerInterface;
  61. /** @private {?shaka.extern.MediaSourceConfiguration} */
  62. this.config_ = null;
  63. /** @private {shaka.extern.TextDisplayer} */
  64. this.textDisplayer_ = textDisplayer;
  65. /** @private {!Object.<shaka.util.ManifestParserUtils.ContentType,
  66. SourceBuffer>} */
  67. this.sourceBuffers_ = {};
  68. /** @private {!Object.<shaka.util.ManifestParserUtils.ContentType,
  69. string>} */
  70. this.sourceBufferTypes_ = {};
  71. /** @private {!Object.<shaka.util.ManifestParserUtils.ContentType,
  72. boolean>} */
  73. this.expectedEncryption_ = {};
  74. /** @private {shaka.text.TextEngine} */
  75. this.textEngine_ = null;
  76. /** @private {boolean} */
  77. this.segmentRelativeVttTiming_ = false;
  78. /** @private {?shaka.lcevc.Dec} */
  79. this.lcevcDec_ = lcevcDec || null;
  80. /**
  81. * @private {!Object.<string,
  82. * !Array.<shaka.media.MediaSourceEngine.Operation>>}
  83. */
  84. this.queues_ = {};
  85. /** @private {shaka.util.EventManager} */
  86. this.eventManager_ = new shaka.util.EventManager();
  87. /** @private {!Object.<string, !shaka.extern.Transmuxer>} */
  88. this.transmuxers_ = {};
  89. /** @private {?shaka.media.IClosedCaptionParser} */
  90. this.captionParser_ = null;
  91. /** @private {!shaka.util.PublicPromise} */
  92. this.mediaSourceOpen_ = new shaka.util.PublicPromise();
  93. /** @private {string} */
  94. this.url_ = '';
  95. /** @private {boolean} */
  96. this.playbackHasBegun_ = false;
  97. /** @private {MediaSource} */
  98. this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_);
  99. /** @private {boolean} */
  100. this.reloadingMediaSource_ = false;
  101. /** @type {!shaka.util.Destroyer} */
  102. this.destroyer_ = new shaka.util.Destroyer(() => this.doDestroy_());
  103. /** @private {boolean} */
  104. this.sequenceMode_ = false;
  105. /** @private {string} */
  106. this.manifestType_ = shaka.media.ManifestParser.UNKNOWN;
  107. /** @private {boolean} */
  108. this.ignoreManifestTimestampsInSegmentsMode_ = false;
  109. /** @private {boolean} */
  110. this.attemptTimestampOffsetCalculation_ = false;
  111. /** @private {!shaka.util.PublicPromise.<number>} */
  112. this.textSequenceModeOffset_ = new shaka.util.PublicPromise();
  113. /** @private {boolean} */
  114. this.needSplitMuxedContent_ = false;
  115. /** @private {boolean} */
  116. this.streamingAllowed_ = true;
  117. /** @private {?number} */
  118. this.lastDuration_ = null;
  119. /** @private {?shaka.util.TsParser} */
  120. this.tsParser_ = null;
  121. /** @private {?number} */
  122. this.firstVideoTimestamp_ = null;
  123. /** @private {?number} */
  124. this.firstVideoReferenceStartTime_ = null;
  125. /** @private {?number} */
  126. this.firstAudioTimestamp_ = null;
  127. /** @private {?number} */
  128. this.firstAudioReferenceStartTime_ = null;
  129. /** @private {!shaka.util.PublicPromise.<number>} */
  130. this.audioCompensation_ = new shaka.util.PublicPromise();
  131. }
  132. /**
  133. * Create a MediaSource object, attach it to the video element, and return it.
  134. * Resolves the given promise when the MediaSource is ready.
  135. *
  136. * Replaced by unit tests.
  137. *
  138. * @param {!shaka.util.PublicPromise} p
  139. * @return {!MediaSource}
  140. */
  141. createMediaSource(p) {
  142. this.streamingAllowed_ = true;
  143. /** @type {!MediaSource} */
  144. let mediaSource;
  145. if (window.ManagedMediaSource) {
  146. this.video_.disableRemotePlayback = true;
  147. mediaSource = new ManagedMediaSource();
  148. this.eventManager_.listen(
  149. mediaSource, 'startstreaming', () => {
  150. shaka.log.info('MMS startstreaming');
  151. this.streamingAllowed_ = true;
  152. });
  153. this.eventManager_.listen(
  154. mediaSource, 'endstreaming', () => {
  155. shaka.log.info('MMS endstreaming');
  156. this.streamingAllowed_ = false;
  157. });
  158. } else {
  159. mediaSource = new MediaSource();
  160. }
  161. // Set up MediaSource on the video element.
  162. this.eventManager_.listenOnce(
  163. mediaSource, 'sourceopen', () => this.onSourceOpen_(p));
  164. // Correctly set when playback has begun.
  165. this.eventManager_.listenOnce(this.video_, 'playing', () => {
  166. this.playbackHasBegun_ = true;
  167. });
  168. // Store the object URL for releasing it later.
  169. this.url_ = shaka.media.MediaSourceEngine.createObjectURL(mediaSource);
  170. this.video_.src = this.url_;
  171. return mediaSource;
  172. }
  173. /**
  174. * @param {shaka.util.PublicPromise} p
  175. * @private
  176. */
  177. onSourceOpen_(p) {
  178. goog.asserts.assert(this.url_, 'Must have object URL');
  179. // Release the object URL that was previously created, to prevent memory
  180. // leak.
  181. // createObjectURL creates a strong reference to the MediaSource object
  182. // inside the browser. Setting the src of the video then creates another
  183. // reference within the video element. revokeObjectURL will remove the
  184. // strong reference to the MediaSource object, and allow it to be
  185. // garbage-collected later.
  186. URL.revokeObjectURL(this.url_);
  187. p.resolve();
  188. }
  189. /**
  190. * Checks if a certain type is supported.
  191. *
  192. * @param {shaka.extern.Stream} stream
  193. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  194. * @return {!Promise.<boolean>}
  195. */
  196. static async isStreamSupported(stream, contentType) {
  197. if (stream.createSegmentIndex) {
  198. await stream.createSegmentIndex();
  199. }
  200. if (!stream.segmentIndex) {
  201. return false;
  202. }
  203. if (stream.segmentIndex.isEmpty()) {
  204. return true;
  205. }
  206. const MimeUtils = shaka.util.MimeUtils;
  207. const TransmuxerEngine = shaka.transmuxer.TransmuxerEngine;
  208. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  209. const StreamUtils = shaka.util.StreamUtils;
  210. const seenCombos = new Set();
  211. // Check each combination of mimeType and codecs within the segment index.
  212. // Unfortunately we cannot use fullMimeTypes, as we ALSO need to check the
  213. // getFullTypeWithAllCodecs (for the sake of the transmuxer) and we have no
  214. // way of going from a full mimeType to a full mimeType with all codecs.
  215. // As this function is only called in debug mode, a little inefficiency is
  216. // acceptable.
  217. for (const ref of stream.segmentIndex) {
  218. const mimeType = ref.mimeType || stream.mimeType || '';
  219. let codecs = ref.codecs || stream.codecs || '';
  220. // Don't check the same combination of mimetype + codecs twice.
  221. const combo = mimeType + ':' + codecs;
  222. if (seenCombos.has(combo)) {
  223. continue;
  224. }
  225. seenCombos.add(combo);
  226. if (contentType == ContentType.TEXT) {
  227. const fullMimeType = MimeUtils.getFullType(mimeType, codecs);
  228. if (!shaka.text.TextEngine.isTypeSupported(fullMimeType)) {
  229. return false;
  230. }
  231. } else {
  232. if (contentType == ContentType.VIDEO) {
  233. codecs = StreamUtils.getCorrectVideoCodecs(codecs);
  234. } else if (contentType == ContentType.AUDIO) {
  235. codecs = StreamUtils.getCorrectAudioCodecs(codecs, mimeType);
  236. }
  237. const extendedMimeType = MimeUtils.getExtendedType(
  238. stream, mimeType, codecs);
  239. const fullMimeType = MimeUtils.getFullTypeWithAllCodecs(
  240. mimeType, codecs);
  241. if (!shaka.media.Capabilities.isTypeSupported(extendedMimeType) &&
  242. !TransmuxerEngine.isSupported(fullMimeType, stream.type)) {
  243. return false;
  244. }
  245. }
  246. }
  247. return true;
  248. }
  249. /**
  250. * Returns a map of MediaSource support for well-known types.
  251. *
  252. * @return {!Object.<string, boolean>}
  253. */
  254. static probeSupport() {
  255. const testMimeTypes = [
  256. // MP4 types
  257. 'video/mp4; codecs="avc1.42E01E"',
  258. 'video/mp4; codecs="avc3.42E01E"',
  259. 'video/mp4; codecs="hev1.1.6.L93.90"',
  260. 'video/mp4; codecs="hvc1.1.6.L93.90"',
  261. 'video/mp4; codecs="hev1.2.4.L153.B0"; eotf="smpte2084"', // HDR HEVC
  262. 'video/mp4; codecs="hvc1.2.4.L153.B0"; eotf="smpte2084"', // HDR HEVC
  263. 'video/mp4; codecs="vp9"',
  264. 'video/mp4; codecs="vp09.00.10.08"',
  265. 'video/mp4; codecs="av01.0.01M.08"',
  266. 'video/mp4; codecs="dvh1.20.01"',
  267. 'audio/mp4; codecs="mp4a.40.2"',
  268. 'audio/mp4; codecs="ac-3"',
  269. 'audio/mp4; codecs="ec-3"',
  270. 'audio/mp4; codecs="ac-4.02.01.01"',
  271. 'audio/mp4; codecs="opus"',
  272. 'audio/mp4; codecs="flac"',
  273. 'audio/mp4; codecs="dtsc"', // DTS Digital Surround
  274. 'audio/mp4; codecs="dtse"', // DTS Express
  275. 'audio/mp4; codecs="dtsx"', // DTS:X
  276. // WebM types
  277. 'video/webm; codecs="vp8"',
  278. 'video/webm; codecs="vp9"',
  279. 'video/webm; codecs="vp09.00.10.08"',
  280. 'audio/webm; codecs="vorbis"',
  281. 'audio/webm; codecs="opus"',
  282. // MPEG2 TS types (video/ is also used for audio: https://bit.ly/TsMse)
  283. 'video/mp2t; codecs="avc1.42E01E"',
  284. 'video/mp2t; codecs="avc3.42E01E"',
  285. 'video/mp2t; codecs="hvc1.1.6.L93.90"',
  286. 'video/mp2t; codecs="mp4a.40.2"',
  287. 'video/mp2t; codecs="ac-3"',
  288. 'video/mp2t; codecs="ec-3"',
  289. // WebVTT types
  290. 'text/vtt',
  291. 'application/mp4; codecs="wvtt"',
  292. // TTML types
  293. 'application/ttml+xml',
  294. 'application/mp4; codecs="stpp"',
  295. // Containerless types
  296. ...shaka.util.MimeUtils.RAW_FORMATS,
  297. ];
  298. const support = {};
  299. for (const type of testMimeTypes) {
  300. if (shaka.text.TextEngine.isTypeSupported(type)) {
  301. support[type] = true;
  302. } else if (shaka.util.Platform.supportsMediaSource()) {
  303. support[type] = shaka.media.Capabilities.isTypeSupported(type) ||
  304. shaka.transmuxer.TransmuxerEngine.isSupported(type);
  305. } else {
  306. support[type] = shaka.util.Platform.supportsMediaType(type);
  307. }
  308. const basicType = type.split(';')[0];
  309. support[basicType] = support[basicType] || support[type];
  310. }
  311. return support;
  312. }
  313. /** @override */
  314. destroy() {
  315. return this.destroyer_.destroy();
  316. }
  317. /** @private */
  318. async doDestroy_() {
  319. const Functional = shaka.util.Functional;
  320. const cleanup = [];
  321. for (const contentType in this.queues_) {
  322. // Make a local copy of the queue and the first item.
  323. const q = this.queues_[contentType];
  324. const inProgress = q[0];
  325. // Drop everything else out of the original queue.
  326. this.queues_[contentType] = q.slice(0, 1);
  327. // We will wait for this item to complete/fail.
  328. if (inProgress) {
  329. cleanup.push(inProgress.p.catch(Functional.noop));
  330. }
  331. // The rest will be rejected silently if possible.
  332. for (const item of q.slice(1)) {
  333. item.p.reject(shaka.util.Destroyer.destroyedError());
  334. }
  335. }
  336. if (this.textEngine_) {
  337. cleanup.push(this.textEngine_.destroy());
  338. }
  339. if (this.textDisplayer_) {
  340. cleanup.push(this.textDisplayer_.destroy());
  341. }
  342. for (const contentType in this.transmuxers_) {
  343. cleanup.push(this.transmuxers_[contentType].destroy());
  344. }
  345. await Promise.all(cleanup);
  346. if (this.eventManager_) {
  347. this.eventManager_.release();
  348. this.eventManager_ = null;
  349. }
  350. if (this.video_) {
  351. // "unload" the video element.
  352. this.video_.removeAttribute('src');
  353. this.video_.load();
  354. this.video_ = null;
  355. }
  356. this.config_ = null;
  357. this.mediaSource_ = null;
  358. this.textEngine_ = null;
  359. this.textDisplayer_ = null;
  360. this.sourceBuffers_ = {};
  361. this.transmuxers_ = {};
  362. this.captionParser_ = null;
  363. if (goog.DEBUG) {
  364. for (const contentType in this.queues_) {
  365. goog.asserts.assert(
  366. this.queues_[contentType].length == 0,
  367. contentType + ' queue should be empty after destroy!');
  368. }
  369. }
  370. this.queues_ = {};
  371. // This object is owned by Player
  372. this.lcevcDec_ = null;
  373. this.tsParser_ = null;
  374. this.playerInterface_ = null;
  375. }
  376. /**
  377. * @return {!Promise} Resolved when MediaSource is open and attached to the
  378. * media element. This process is actually initiated by the constructor.
  379. */
  380. open() {
  381. return this.mediaSourceOpen_;
  382. }
  383. /**
  384. * Initialize MediaSourceEngine.
  385. *
  386. * Note that it is not valid to call this multiple times, except to add or
  387. * reinitialize text streams.
  388. *
  389. * @param {!Map.<shaka.util.ManifestParserUtils.ContentType,
  390. * shaka.extern.Stream>} streamsByType
  391. * A map of content types to streams. All streams must be supported
  392. * according to MediaSourceEngine.isStreamSupported.
  393. * @param {boolean=} sequenceMode
  394. * If true, the media segments are appended to the SourceBuffer in strict
  395. * sequence.
  396. * @param {string=} manifestType
  397. * Indicates the type of the manifest.
  398. * @param {boolean=} ignoreManifestTimestampsInSegmentsMode
  399. * If true, don't adjust the timestamp offset to account for manifest
  400. * segment durations being out of sync with segment durations. In other
  401. * words, assume that there are no gaps in the segments when appending
  402. * to the SourceBuffer, even if the manifest and segment times disagree.
  403. * Indicates if the manifest has text streams.
  404. *
  405. * @return {!Promise}
  406. */
  407. async init(streamsByType, sequenceMode=false,
  408. manifestType=shaka.media.ManifestParser.UNKNOWN,
  409. ignoreManifestTimestampsInSegmentsMode=false) {
  410. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  411. await this.mediaSourceOpen_;
  412. this.sequenceMode_ = sequenceMode;
  413. this.manifestType_ = manifestType;
  414. this.ignoreManifestTimestampsInSegmentsMode_ =
  415. ignoreManifestTimestampsInSegmentsMode;
  416. this.attemptTimestampOffsetCalculation_ = !this.sequenceMode_ &&
  417. this.manifestType_ == shaka.media.ManifestParser.HLS &&
  418. !this.ignoreManifestTimestampsInSegmentsMode_;
  419. this.tsParser_ = null;
  420. for (const contentType of streamsByType.keys()) {
  421. const stream = streamsByType.get(contentType);
  422. // eslint-disable-next-line no-await-in-loop
  423. await this.initSourceBuffer_(contentType, stream, stream.codecs);
  424. if (this.needSplitMuxedContent_) {
  425. this.queues_[ContentType.AUDIO] = [];
  426. this.queues_[ContentType.VIDEO] = [];
  427. } else {
  428. this.queues_[contentType] = [];
  429. }
  430. }
  431. }
  432. /**
  433. * Initialize a specific SourceBuffer.
  434. *
  435. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  436. * @param {shaka.extern.Stream} stream
  437. * @param {string} codecs
  438. * @return {!Promise}
  439. * @private
  440. */
  441. async initSourceBuffer_(contentType, stream, codecs) {
  442. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  443. goog.asserts.assert(
  444. await shaka.media.MediaSourceEngine.isStreamSupported(
  445. stream, contentType),
  446. 'Type negotiation should happen before MediaSourceEngine.init!');
  447. let mimeType = shaka.util.MimeUtils.getFullType(
  448. stream.mimeType, codecs);
  449. if (contentType == ContentType.TEXT) {
  450. this.reinitText(mimeType, this.sequenceMode_, stream.external);
  451. } else {
  452. let needTransmux = this.config_.forceTransmux;
  453. if (!shaka.media.Capabilities.isTypeSupported(mimeType) ||
  454. (!this.sequenceMode_ &&
  455. shaka.util.MimeUtils.RAW_FORMATS.includes(mimeType))) {
  456. needTransmux = true;
  457. }
  458. const mimeTypeWithAllCodecs =
  459. shaka.util.MimeUtils.getFullTypeWithAllCodecs(
  460. stream.mimeType, codecs);
  461. if (needTransmux) {
  462. const audioCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  463. ContentType.AUDIO, (codecs || '').split(','));
  464. const videoCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  465. ContentType.VIDEO, (codecs || '').split(','));
  466. if (audioCodec && videoCodec) {
  467. this.needSplitMuxedContent_ = true;
  468. await this.initSourceBuffer_(ContentType.AUDIO, stream, audioCodec);
  469. await this.initSourceBuffer_(ContentType.VIDEO, stream, videoCodec);
  470. return;
  471. }
  472. const transmuxerPlugin = shaka.transmuxer.TransmuxerEngine
  473. .findTransmuxer(mimeTypeWithAllCodecs);
  474. if (transmuxerPlugin) {
  475. const transmuxer = transmuxerPlugin();
  476. this.transmuxers_[contentType] = transmuxer;
  477. mimeType =
  478. transmuxer.convertCodecs(contentType, mimeTypeWithAllCodecs);
  479. }
  480. }
  481. const type = this.addExtraFeaturesToMimeType_(mimeType);
  482. this.destroyer_.ensureNotDestroyed();
  483. let sourceBuffer;
  484. try {
  485. sourceBuffer = this.mediaSource_.addSourceBuffer(type);
  486. } catch (exception) {
  487. throw new shaka.util.Error(
  488. shaka.util.Error.Severity.CRITICAL,
  489. shaka.util.Error.Category.MEDIA,
  490. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  491. exception,
  492. 'The mediaSource_ status was ' + this.mediaSource_.readyState +
  493. ' expected \'open\'',
  494. null);
  495. }
  496. if (this.sequenceMode_) {
  497. sourceBuffer.mode =
  498. shaka.media.MediaSourceEngine.SourceBufferMode_.SEQUENCE;
  499. }
  500. this.eventManager_.listen(
  501. sourceBuffer, 'error',
  502. () => this.onError_(contentType));
  503. this.eventManager_.listen(
  504. sourceBuffer, 'updateend',
  505. () => this.onUpdateEnd_(contentType));
  506. this.sourceBuffers_[contentType] = sourceBuffer;
  507. this.sourceBufferTypes_[contentType] = mimeType;
  508. this.expectedEncryption_[contentType] = !!stream.drmInfos.length;
  509. }
  510. }
  511. /**
  512. * Called by the Player to provide an updated configuration any time it
  513. * changes. Must be called at least once before init().
  514. *
  515. * @param {shaka.extern.MediaSourceConfiguration} config
  516. */
  517. configure(config) {
  518. this.config_ = config;
  519. if (this.textEngine_) {
  520. this.textEngine_.setModifyCueCallback(config.modifyCueCallback);
  521. }
  522. }
  523. /**
  524. * Indicate if the streaming is allowed by MediaSourceEngine.
  525. * If we using MediaSource we allways returns true.
  526. *
  527. * @return {boolean}
  528. */
  529. isStreamingAllowed() {
  530. return this.streamingAllowed_;
  531. }
  532. /**
  533. * Reinitialize the TextEngine for a new text type.
  534. * @param {string} mimeType
  535. * @param {boolean} sequenceMode
  536. * @param {boolean} external
  537. */
  538. reinitText(mimeType, sequenceMode, external) {
  539. if (!this.textEngine_) {
  540. this.textEngine_ = new shaka.text.TextEngine(this.textDisplayer_);
  541. if (this.textEngine_) {
  542. this.textEngine_.setModifyCueCallback(this.config_.modifyCueCallback);
  543. }
  544. }
  545. this.textEngine_.initParser(mimeType, sequenceMode,
  546. external || this.segmentRelativeVttTiming_, this.manifestType_);
  547. }
  548. /**
  549. * @return {boolean} True if the MediaSource is in an "ended" state, or if the
  550. * object has been destroyed.
  551. */
  552. ended() {
  553. if (this.reloadingMediaSource_) {
  554. return false;
  555. }
  556. return this.mediaSource_ ? this.mediaSource_.readyState == 'ended' : true;
  557. }
  558. /**
  559. * Gets the first timestamp in buffer for the given content type.
  560. *
  561. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  562. * @return {?number} The timestamp in seconds, or null if nothing is buffered.
  563. */
  564. bufferStart(contentType) {
  565. if (!Object.keys(this.sourceBuffers_).length) {
  566. return null;
  567. }
  568. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  569. if (contentType == ContentType.TEXT) {
  570. return this.textEngine_.bufferStart();
  571. }
  572. return shaka.media.TimeRangesUtils.bufferStart(
  573. this.getBuffered_(contentType));
  574. }
  575. /**
  576. * Gets the last timestamp in buffer for the given content type.
  577. *
  578. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  579. * @return {?number} The timestamp in seconds, or null if nothing is buffered.
  580. */
  581. bufferEnd(contentType) {
  582. if (!Object.keys(this.sourceBuffers_).length) {
  583. return null;
  584. }
  585. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  586. if (contentType == ContentType.TEXT) {
  587. return this.textEngine_.bufferEnd();
  588. }
  589. return shaka.media.TimeRangesUtils.bufferEnd(
  590. this.getBuffered_(contentType));
  591. }
  592. /**
  593. * Determines if the given time is inside the buffered range of the given
  594. * content type.
  595. *
  596. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  597. * @param {number} time Playhead time
  598. * @return {boolean}
  599. */
  600. isBuffered(contentType, time) {
  601. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  602. if (contentType == ContentType.TEXT) {
  603. return this.textEngine_.isBuffered(time);
  604. } else {
  605. const buffered = this.getBuffered_(contentType);
  606. return shaka.media.TimeRangesUtils.isBuffered(buffered, time);
  607. }
  608. }
  609. /**
  610. * Computes how far ahead of the given timestamp is buffered for the given
  611. * content type.
  612. *
  613. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  614. * @param {number} time
  615. * @return {number} The amount of time buffered ahead in seconds.
  616. */
  617. bufferedAheadOf(contentType, time) {
  618. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  619. if (contentType == ContentType.TEXT) {
  620. return this.textEngine_.bufferedAheadOf(time);
  621. } else {
  622. const buffered = this.getBuffered_(contentType);
  623. return shaka.media.TimeRangesUtils.bufferedAheadOf(buffered, time);
  624. }
  625. }
  626. /**
  627. * Returns info about what is currently buffered.
  628. * @return {shaka.extern.BufferedInfo}
  629. */
  630. getBufferedInfo() {
  631. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  632. const TimeRangesUtils = shaka.media.TimeRangesUtils;
  633. const info = {
  634. total: this.reloadingMediaSource_ ? [] :
  635. TimeRangesUtils.getBufferedInfo(this.video_.buffered),
  636. audio:
  637. TimeRangesUtils.getBufferedInfo(this.getBuffered_(ContentType.AUDIO)),
  638. video:
  639. TimeRangesUtils.getBufferedInfo(this.getBuffered_(ContentType.VIDEO)),
  640. text: [],
  641. };
  642. if (this.textEngine_) {
  643. const start = this.textEngine_.bufferStart();
  644. const end = this.textEngine_.bufferEnd();
  645. if (start != null && end != null) {
  646. info.text.push({start: start, end: end});
  647. }
  648. }
  649. return info;
  650. }
  651. /**
  652. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  653. * @return {TimeRanges} The buffered ranges for the given content type, or
  654. * null if the buffered ranges could not be obtained.
  655. * @private
  656. */
  657. getBuffered_(contentType) {
  658. if (this.reloadingMediaSource_) {
  659. return null;
  660. }
  661. try {
  662. return this.sourceBuffers_[contentType].buffered;
  663. } catch (exception) {
  664. if (contentType in this.sourceBuffers_) {
  665. // Note: previous MediaSource errors may cause access to |buffered| to
  666. // throw.
  667. shaka.log.error('failed to get buffered range for ' + contentType,
  668. exception);
  669. }
  670. return null;
  671. }
  672. }
  673. /**
  674. * Create a new closed caption parser. This will ONLY be replaced by tests as
  675. * a way to inject fake closed caption parser instances.
  676. *
  677. * @param {string} mimeType
  678. * @return {!shaka.media.IClosedCaptionParser}
  679. */
  680. getCaptionParser(mimeType) {
  681. return new shaka.media.ClosedCaptionParser(mimeType);
  682. }
  683. /**
  684. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  685. * @param {!BufferSource} data
  686. * @param {?shaka.media.SegmentReference} reference The segment reference
  687. * we are appending, or null for init segments
  688. * @param {!string} mimeType
  689. * @return {{timestamp: ?number, metadata: !Array.<shaka.extern.ID3Metadata>}}
  690. * @private
  691. */
  692. getTimestampAndDispatchMetadata_(contentType, data, reference, mimeType) {
  693. let timestamp = null;
  694. let metadata = [];
  695. const uint8ArrayData = shaka.util.BufferUtils.toUint8(data);
  696. if (shaka.util.MimeUtils.RAW_FORMATS.includes(mimeType)) {
  697. const frames = shaka.util.Id3Utils.getID3Frames(uint8ArrayData);
  698. if (frames.length && reference) {
  699. const metadataTimestamp = frames.find((frame) => {
  700. return frame.description ===
  701. 'com.apple.streaming.transportStreamTimestamp';
  702. });
  703. if (metadataTimestamp && metadataTimestamp.data) {
  704. timestamp = Math.round(metadataTimestamp.data) / 1000;
  705. }
  706. /** @private {shaka.extern.ID3Metadata} */
  707. const id3Metadata = {
  708. cueTime: reference.startTime,
  709. data: uint8ArrayData,
  710. frames: frames,
  711. dts: reference.startTime,
  712. pts: reference.startTime,
  713. };
  714. this.playerInterface_.onMetadata(
  715. [id3Metadata], /* offset= */ 0, reference.endTime);
  716. }
  717. } else if (mimeType.includes('/mp4') &&
  718. reference && reference.timestampOffset == 0 &&
  719. reference.initSegmentReference &&
  720. reference.initSegmentReference.timescale) {
  721. const timescale = reference.initSegmentReference.timescale;
  722. if (!isNaN(timescale)) {
  723. const Mp4Parser = shaka.util.Mp4Parser;
  724. let startTime = 0;
  725. let parsedMedia = false;
  726. new Mp4Parser()
  727. .box('moof', Mp4Parser.children)
  728. .box('traf', Mp4Parser.children)
  729. .fullBox('tfdt', (box) => {
  730. goog.asserts.assert(
  731. box.version == 0 || box.version == 1,
  732. 'TFDT version can only be 0 or 1');
  733. const parsed = shaka.util.Mp4BoxParsers.parseTFDTInaccurate(
  734. box.reader, box.version);
  735. startTime = parsed.baseMediaDecodeTime / timescale;
  736. parsedMedia = true;
  737. box.parser.stop();
  738. }).parse(data, /* partialOkay= */ true);
  739. if (parsedMedia) {
  740. timestamp = startTime;
  741. }
  742. }
  743. } else if (!mimeType.includes('/mp4') && !mimeType.includes('/webm') &&
  744. shaka.util.TsParser.probe(uint8ArrayData)) {
  745. if (!this.tsParser_) {
  746. this.tsParser_ = new shaka.util.TsParser();
  747. } else {
  748. this.tsParser_.clearData();
  749. }
  750. const tsParser = this.tsParser_.parse(uint8ArrayData);
  751. const startTime = tsParser.getStartTime(contentType);
  752. if (startTime != null) {
  753. timestamp = startTime;
  754. }
  755. metadata = tsParser.getMetadata();
  756. }
  757. return {timestamp, metadata};
  758. }
  759. /**
  760. * Enqueue an operation to append data to the SourceBuffer.
  761. * Start and end times are needed for TextEngine, but not for MediaSource.
  762. * Start and end times may be null for initialization segments; if present
  763. * they are relative to the presentation timeline.
  764. *
  765. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  766. * @param {!BufferSource} data
  767. * @param {?shaka.media.SegmentReference} reference The segment reference
  768. * we are appending, or null for init segments
  769. * @param {shaka.extern.Stream} stream
  770. * @param {?boolean} hasClosedCaptions True if the buffer contains CEA closed
  771. * captions
  772. * @param {boolean=} seeked True if we just seeked
  773. * @param {boolean=} adaptation True if we just automatically switched active
  774. * variant(s).
  775. * @param {boolean=} isChunkedData True if we add to the buffer from the
  776. * partial read of the segment.
  777. * @return {!Promise}
  778. */
  779. async appendBuffer(
  780. contentType, data, reference, stream, hasClosedCaptions, seeked = false,
  781. adaptation = false, isChunkedData = false, fromSplit = false) {
  782. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  783. if (contentType == ContentType.TEXT) {
  784. if (this.sequenceMode_) {
  785. // This won't be known until the first video segment is appended.
  786. const offset = await this.textSequenceModeOffset_;
  787. this.textEngine_.setTimestampOffset(offset);
  788. }
  789. await this.textEngine_.appendBuffer(
  790. data,
  791. reference ? reference.startTime : null,
  792. reference ? reference.endTime : null,
  793. reference ? reference.getUris()[0] : null);
  794. return;
  795. }
  796. if (!fromSplit && this.needSplitMuxedContent_) {
  797. await this.appendBuffer(ContentType.AUDIO, data, reference, stream,
  798. hasClosedCaptions, seeked, adaptation, isChunkedData,
  799. /* fromSplit= */ true);
  800. await this.appendBuffer(ContentType.VIDEO, data, reference, stream,
  801. hasClosedCaptions, seeked, adaptation, isChunkedData,
  802. /* fromSplit= */ true);
  803. return;
  804. }
  805. if (!this.sourceBuffers_[contentType]) {
  806. shaka.log.warning('Attempted to restore a non-existent source buffer');
  807. return;
  808. }
  809. let timestampOffset = this.sourceBuffers_[contentType].timestampOffset;
  810. let mimeType = this.sourceBufferTypes_[contentType];
  811. if (this.transmuxers_[contentType]) {
  812. mimeType = this.transmuxers_[contentType].getOriginalMimeType();
  813. }
  814. if (reference) {
  815. const {timestamp, metadata} = this.getTimestampAndDispatchMetadata_(
  816. contentType, data, reference, mimeType);
  817. if (timestamp != null) {
  818. if (this.firstVideoTimestamp_ == null &&
  819. contentType == ContentType.VIDEO) {
  820. this.firstVideoTimestamp_ = timestamp;
  821. this.firstVideoReferenceStartTime_ = reference.startTime;
  822. if (this.firstAudioTimestamp_ != null) {
  823. let compensation = 0;
  824. // Only apply compensation if video and audio segment startTime
  825. // match, to avoid introducing sync issues.
  826. if (this.firstVideoReferenceStartTime_ ==
  827. this.firstAudioReferenceStartTime_) {
  828. compensation =
  829. this.firstVideoTimestamp_ - this.firstAudioTimestamp_;
  830. }
  831. this.audioCompensation_.resolve(compensation);
  832. }
  833. }
  834. if (this.firstAudioTimestamp_ == null &&
  835. contentType == ContentType.AUDIO) {
  836. this.firstAudioTimestamp_ = timestamp;
  837. this.firstAudioReferenceStartTime_ = reference.startTime;
  838. if (this.firstVideoTimestamp_ != null) {
  839. let compensation = 0;
  840. // Only apply compensation if video and audio segment startTime
  841. // match, to avoid introducing sync issues.
  842. if (this.firstVideoReferenceStartTime_ ==
  843. this.firstAudioReferenceStartTime_) {
  844. compensation =
  845. this.firstVideoTimestamp_ - this.firstAudioTimestamp_;
  846. }
  847. this.audioCompensation_.resolve(compensation);
  848. }
  849. }
  850. const calculatedTimestampOffset = reference.startTime - timestamp;
  851. const timestampOffsetDifference =
  852. Math.abs(timestampOffset - calculatedTimestampOffset);
  853. if ((timestampOffsetDifference >= 0.001 || seeked || adaptation) &&
  854. (!isChunkedData || calculatedTimestampOffset > 0 ||
  855. !timestampOffset)) {
  856. timestampOffset = calculatedTimestampOffset;
  857. if (this.attemptTimestampOffsetCalculation_) {
  858. this.enqueueOperation_(
  859. contentType,
  860. () => this.abort_(contentType),
  861. null);
  862. this.enqueueOperation_(
  863. contentType,
  864. () => this.setTimestampOffset_(contentType, timestampOffset),
  865. null);
  866. }
  867. }
  868. // Timestamps can only be reliably extracted from video, not audio.
  869. // Packed audio formats do not have internal timestamps at all.
  870. // Prefer video for this when available.
  871. const isBestSourceBufferForTimestamps =
  872. contentType == ContentType.VIDEO ||
  873. !(ContentType.VIDEO in this.sourceBuffers_);
  874. if (this.sequenceMode_ && isBestSourceBufferForTimestamps) {
  875. this.textSequenceModeOffset_.resolve(timestampOffset);
  876. }
  877. }
  878. if (metadata.length) {
  879. this.playerInterface_.onMetadata(metadata, timestampOffset,
  880. reference ? reference.endTime : null);
  881. }
  882. }
  883. if (hasClosedCaptions && contentType == ContentType.VIDEO) {
  884. if (!this.textEngine_) {
  885. this.reinitText(shaka.util.MimeUtils.CEA608_CLOSED_CAPTION_MIMETYPE,
  886. this.sequenceMode_, /* external= */ false);
  887. }
  888. if (!this.captionParser_) {
  889. const basicType = mimeType.split(';', 1)[0];
  890. this.captionParser_ = this.getCaptionParser(basicType);
  891. }
  892. // If it is the init segment for closed captions, initialize the closed
  893. // caption parser.
  894. if (!reference) {
  895. this.captionParser_.init(data, adaptation);
  896. } else {
  897. const closedCaptions = this.captionParser_.parseFrom(data);
  898. if (closedCaptions.length) {
  899. this.textEngine_.storeAndAppendClosedCaptions(
  900. closedCaptions,
  901. reference.startTime,
  902. reference.endTime,
  903. timestampOffset);
  904. }
  905. }
  906. }
  907. if (this.transmuxers_[contentType]) {
  908. data = await this.transmuxers_[contentType].transmux(
  909. data, stream, reference, this.mediaSource_.duration, contentType);
  910. }
  911. data = this.workAroundBrokenPlatforms_(
  912. data, reference ? reference.startTime : null, contentType,
  913. reference ? reference.getUris()[0] : null);
  914. if (reference && this.sequenceMode_ && contentType != ContentType.TEXT) {
  915. // In sequence mode, for non-text streams, if we just cleared the buffer
  916. // and are either performing an unbuffered seek or handling an automatic
  917. // adaptation, we need to set a new timestampOffset on the sourceBuffer.
  918. if (seeked || adaptation) {
  919. let timestampOffset = reference.startTime;
  920. // Audio and video may not be aligned, so we will compensate for audio
  921. // if necessary.
  922. if (this.manifestType_ == shaka.media.ManifestParser.HLS &&
  923. !this.needSplitMuxedContent_ &&
  924. contentType == ContentType.AUDIO &&
  925. this.sourceBuffers_[ContentType.VIDEO]) {
  926. const compensation = await this.audioCompensation_;
  927. // Only apply compensation if the difference is greater than 100ms
  928. if (Math.abs(compensation) > 0.1) {
  929. timestampOffset -= compensation;
  930. }
  931. }
  932. // The logic to call abort() before setting the timestampOffset is
  933. // extended during unbuffered seeks or automatic adaptations; it is
  934. // possible for the append state to be PARSING_MEDIA_SEGMENT from the
  935. // previous SourceBuffer#appendBuffer() call.
  936. this.enqueueOperation_(
  937. contentType,
  938. () => this.abort_(contentType),
  939. null);
  940. this.enqueueOperation_(
  941. contentType,
  942. () => this.setTimestampOffset_(contentType, timestampOffset),
  943. null);
  944. }
  945. }
  946. let bufferedBefore = null;
  947. await this.enqueueOperation_(contentType, () => {
  948. if (goog.DEBUG && reference && !reference.isPreload() && !isChunkedData) {
  949. bufferedBefore = this.getBuffered_(contentType);
  950. }
  951. this.append_(contentType, data, timestampOffset);
  952. }, reference ? reference.getUris()[0] : null);
  953. if (goog.DEBUG && reference && !reference.isPreload() && !isChunkedData) {
  954. const bufferedAfter = this.getBuffered_(contentType);
  955. const newBuffered = shaka.media.TimeRangesUtils.computeAddedRange(
  956. bufferedBefore, bufferedAfter);
  957. if (newBuffered) {
  958. const segmentDuration = reference.endTime - reference.startTime;
  959. const timeAdded = newBuffered.end - newBuffered.start;
  960. // Check end times instead of start times. We may be overwriting a
  961. // buffer and only the end changes, and that would be fine.
  962. // Also, exclude tiny segments. Sometimes alignment segments as small
  963. // as 33ms are seen in Google DAI content. For such tiny segments,
  964. // half a segment duration would be no issue.
  965. const offset = Math.abs(newBuffered.end - reference.endTime);
  966. if (segmentDuration > 0.100 && (offset > segmentDuration / 2 ||
  967. Math.abs(segmentDuration - timeAdded) > 0.030)) {
  968. shaka.log.error('Possible encoding problem detected!',
  969. 'Unexpected buffered range for reference', reference,
  970. 'from URIs', reference.getUris(),
  971. 'should be', {start: reference.startTime, end: reference.endTime},
  972. 'but got', newBuffered);
  973. }
  974. }
  975. }
  976. }
  977. /**
  978. * Set the selected closed captions Id and language.
  979. *
  980. * @param {string} id
  981. */
  982. setSelectedClosedCaptionId(id) {
  983. const VIDEO = shaka.util.ManifestParserUtils.ContentType.VIDEO;
  984. const videoBufferEndTime = this.bufferEnd(VIDEO) || 0;
  985. this.textEngine_.setSelectedClosedCaptionId(id, videoBufferEndTime);
  986. }
  987. /** Disable embedded closed captions. */
  988. clearSelectedClosedCaptionId() {
  989. if (this.textEngine_) {
  990. this.textEngine_.setSelectedClosedCaptionId('', 0);
  991. }
  992. }
  993. /**
  994. * Enqueue an operation to remove data from the SourceBuffer.
  995. *
  996. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  997. * @param {number} startTime relative to the start of the presentation
  998. * @param {number} endTime relative to the start of the presentation
  999. * @return {!Promise}
  1000. */
  1001. async remove(contentType, startTime, endTime) {
  1002. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1003. if (contentType == ContentType.TEXT) {
  1004. await this.textEngine_.remove(startTime, endTime);
  1005. } else {
  1006. await this.enqueueOperation_(
  1007. contentType,
  1008. () => this.remove_(contentType, startTime, endTime),
  1009. null);
  1010. if (this.needSplitMuxedContent_) {
  1011. await this.enqueueOperation_(
  1012. ContentType.AUDIO,
  1013. () => this.remove_(ContentType.AUDIO, startTime, endTime),
  1014. null);
  1015. }
  1016. }
  1017. }
  1018. /**
  1019. * Enqueue an operation to clear the SourceBuffer.
  1020. *
  1021. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1022. * @return {!Promise}
  1023. */
  1024. async clear(contentType) {
  1025. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1026. if (contentType == ContentType.TEXT) {
  1027. if (!this.textEngine_) {
  1028. return;
  1029. }
  1030. await this.textEngine_.remove(0, Infinity);
  1031. } else {
  1032. // Note that not all platforms allow clearing to Infinity.
  1033. await this.enqueueOperation_(
  1034. contentType,
  1035. () => this.remove_(contentType, 0, this.mediaSource_.duration),
  1036. null);
  1037. if (this.needSplitMuxedContent_) {
  1038. await this.enqueueOperation_(
  1039. ContentType.AUDIO,
  1040. () => this.remove_(
  1041. ContentType.AUDIO, 0, this.mediaSource_.duration),
  1042. null);
  1043. }
  1044. }
  1045. }
  1046. /**
  1047. * Fully reset the state of the caption parser owned by MediaSourceEngine.
  1048. */
  1049. resetCaptionParser() {
  1050. if (this.captionParser_) {
  1051. this.captionParser_.reset();
  1052. }
  1053. }
  1054. /**
  1055. * Enqueue an operation to flush the SourceBuffer.
  1056. * This is a workaround for what we believe is a Chromecast bug.
  1057. *
  1058. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1059. * @return {!Promise}
  1060. */
  1061. async flush(contentType) {
  1062. // Flush the pipeline. Necessary on Chromecast, even though we have removed
  1063. // everything.
  1064. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1065. if (contentType == ContentType.TEXT) {
  1066. // Nothing to flush for text.
  1067. return;
  1068. }
  1069. await this.enqueueOperation_(
  1070. contentType,
  1071. () => this.flush_(contentType),
  1072. null);
  1073. if (this.needSplitMuxedContent_) {
  1074. await this.enqueueOperation_(
  1075. ContentType.AUDIO,
  1076. () => this.flush_(ContentType.AUDIO),
  1077. null);
  1078. }
  1079. }
  1080. /**
  1081. * Sets the timestamp offset and append window end for the given content type.
  1082. *
  1083. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1084. * @param {number} timestampOffset The timestamp offset. Segments which start
  1085. * at time t will be inserted at time t + timestampOffset instead. This
  1086. * value does not affect segments which have already been inserted.
  1087. * @param {number} appendWindowStart The timestamp to set the append window
  1088. * start to. For future appends, frames/samples with timestamps less than
  1089. * this value will be dropped.
  1090. * @param {number} appendWindowEnd The timestamp to set the append window end
  1091. * to. For future appends, frames/samples with timestamps greater than this
  1092. * value will be dropped.
  1093. * @param {boolean} ignoreTimestampOffset If true, the timestampOffset will
  1094. * not be applied in this step.
  1095. * @param {string} mimeType
  1096. * @param {string} codecs
  1097. * @param {!Map.<shaka.util.ManifestParserUtils.ContentType,
  1098. * shaka.extern.Stream>} streamsByType
  1099. * A map of content types to streams. All streams must be supported
  1100. * according to MediaSourceEngine.isStreamSupported.
  1101. *
  1102. * @return {!Promise}
  1103. */
  1104. async setStreamProperties(
  1105. contentType, timestampOffset, appendWindowStart, appendWindowEnd,
  1106. ignoreTimestampOffset, mimeType, codecs, streamsByType) {
  1107. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1108. if (contentType == ContentType.TEXT) {
  1109. if (!ignoreTimestampOffset) {
  1110. this.textEngine_.setTimestampOffset(timestampOffset);
  1111. }
  1112. this.textEngine_.setAppendWindow(appendWindowStart, appendWindowEnd);
  1113. return;
  1114. }
  1115. const operations = [];
  1116. const hasChangedCodecs = await this.codecSwitchIfNecessary_(
  1117. contentType, mimeType, codecs, streamsByType);
  1118. if (!hasChangedCodecs) {
  1119. // Queue an abort() to help MSE splice together overlapping segments.
  1120. // We set appendWindowEnd when we change periods in DASH content, and the
  1121. // period transition may result in overlap.
  1122. //
  1123. // An abort() also helps with MPEG2-TS. When we append a TS segment, we
  1124. // always enter a PARSING_MEDIA_SEGMENT state and we can't change the
  1125. // timestamp offset. By calling abort(), we reset the state so we can
  1126. // set it.
  1127. operations.push(this.enqueueOperation_(
  1128. contentType,
  1129. () => this.abort_(contentType),
  1130. null));
  1131. if (this.needSplitMuxedContent_) {
  1132. operations.push(this.enqueueOperation_(
  1133. ContentType.AUDIO,
  1134. () => this.abort_(ContentType.AUDIO),
  1135. null));
  1136. }
  1137. }
  1138. if (!ignoreTimestampOffset) {
  1139. operations.push(this.enqueueOperation_(
  1140. contentType,
  1141. () => this.setTimestampOffset_(contentType, timestampOffset),
  1142. null));
  1143. if (this.needSplitMuxedContent_) {
  1144. operations.push(this.enqueueOperation_(
  1145. ContentType.AUDIO,
  1146. () => this.setTimestampOffset_(
  1147. ContentType.AUDIO, timestampOffset),
  1148. null));
  1149. }
  1150. }
  1151. operations.push(this.enqueueOperation_(
  1152. contentType,
  1153. () => this.setAppendWindow_(
  1154. contentType, appendWindowStart, appendWindowEnd),
  1155. null));
  1156. if (this.needSplitMuxedContent_) {
  1157. operations.push(this.enqueueOperation_(
  1158. ContentType.AUDIO,
  1159. () => this.setAppendWindow_(
  1160. ContentType.AUDIO, appendWindowStart, appendWindowEnd),
  1161. null));
  1162. }
  1163. await Promise.all(operations);
  1164. }
  1165. /**
  1166. * Adjust timestamp offset to maintain AV sync across discontinuities.
  1167. *
  1168. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1169. * @param {number} timestampOffset
  1170. * @return {!Promise}
  1171. */
  1172. async resync(contentType, timestampOffset) {
  1173. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1174. if (contentType == ContentType.TEXT) {
  1175. // This operation is for audio and video only.
  1176. return;
  1177. }
  1178. // Reset the promise in case the timestamp offset changed during
  1179. // a period/discontinuity transition.
  1180. if (contentType == ContentType.VIDEO) {
  1181. this.textSequenceModeOffset_ = new shaka.util.PublicPromise();
  1182. }
  1183. // Queue an abort() to help MSE splice together overlapping segments.
  1184. // We set appendWindowEnd when we change periods in DASH content, and the
  1185. // period transition may result in overlap.
  1186. //
  1187. // An abort() also helps with MPEG2-TS. When we append a TS segment, we
  1188. // always enter a PARSING_MEDIA_SEGMENT state and we can't change the
  1189. // timestamp offset. By calling abort(), we reset the state so we can
  1190. // set it.
  1191. this.enqueueOperation_(
  1192. contentType,
  1193. () => this.abort_(contentType),
  1194. null);
  1195. if (this.needSplitMuxedContent_) {
  1196. this.enqueueOperation_(
  1197. ContentType.AUDIO,
  1198. () => this.abort_(ContentType.AUDIO),
  1199. null);
  1200. }
  1201. await this.enqueueOperation_(
  1202. contentType,
  1203. () => this.setTimestampOffset_(contentType, timestampOffset),
  1204. null);
  1205. if (this.needSplitMuxedContent_) {
  1206. await this.enqueueOperation_(
  1207. ContentType.AUDIO,
  1208. () => this.setTimestampOffset_(ContentType.AUDIO, timestampOffset),
  1209. null);
  1210. }
  1211. }
  1212. /**
  1213. * @param {string=} reason Valid reasons are 'network' and 'decode'.
  1214. * @return {!Promise}
  1215. * @see http://w3c.github.io/media-source/#idl-def-EndOfStreamError
  1216. */
  1217. async endOfStream(reason) {
  1218. await this.enqueueBlockingOperation_(() => {
  1219. // If endOfStream() has already been called on the media source,
  1220. // don't call it again. Also do not call if readyState is
  1221. // 'closed' (not attached to video element) since it is not a
  1222. // valid operation.
  1223. if (this.ended() || this.mediaSource_.readyState === 'closed') {
  1224. return;
  1225. }
  1226. // Tizen won't let us pass undefined, but it will let us omit the
  1227. // argument.
  1228. if (reason) {
  1229. this.mediaSource_.endOfStream(reason);
  1230. } else {
  1231. this.mediaSource_.endOfStream();
  1232. }
  1233. });
  1234. }
  1235. /**
  1236. * @param {number} duration
  1237. * @return {!Promise}
  1238. */
  1239. async setDuration(duration) {
  1240. await this.enqueueBlockingOperation_(() => {
  1241. // Reducing the duration causes the MSE removal algorithm to run, which
  1242. // triggers an 'updateend' event to fire. To handle this scenario, we
  1243. // have to insert a dummy operation into the beginning of each queue,
  1244. // which the 'updateend' handler will remove.
  1245. if (duration < this.mediaSource_.duration) {
  1246. for (const contentType in this.sourceBuffers_) {
  1247. const dummyOperation = {
  1248. start: () => {},
  1249. p: new shaka.util.PublicPromise(),
  1250. uri: null,
  1251. };
  1252. this.queues_[contentType].unshift(dummyOperation);
  1253. }
  1254. }
  1255. this.mediaSource_.duration = duration;
  1256. this.lastDuration_ = duration;
  1257. });
  1258. }
  1259. /**
  1260. * Get the current MediaSource duration.
  1261. *
  1262. * @return {number}
  1263. */
  1264. getDuration() {
  1265. return this.mediaSource_.duration;
  1266. }
  1267. /**
  1268. * Append data to the SourceBuffer.
  1269. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1270. * @param {BufferSource} data
  1271. * @param {number} timestampOffset
  1272. * @private
  1273. */
  1274. append_(contentType, data, timestampOffset) {
  1275. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1276. // Append only video data to the LCEVC Dec.
  1277. if (contentType == ContentType.VIDEO && this.lcevcDec_) {
  1278. // Append video buffers to the LCEVC Dec for parsing and storing
  1279. // of LCEVC data.
  1280. this.lcevcDec_.appendBuffer(data, timestampOffset);
  1281. }
  1282. // This will trigger an 'updateend' event.
  1283. this.sourceBuffers_[contentType].appendBuffer(data);
  1284. }
  1285. /**
  1286. * Remove data from the SourceBuffer.
  1287. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1288. * @param {number} startTime relative to the start of the presentation
  1289. * @param {number} endTime relative to the start of the presentation
  1290. * @private
  1291. */
  1292. remove_(contentType, startTime, endTime) {
  1293. if (endTime <= startTime) {
  1294. // Ignore removal of inverted or empty ranges.
  1295. // Fake 'updateend' event to resolve the operation.
  1296. this.onUpdateEnd_(contentType);
  1297. return;
  1298. }
  1299. // This will trigger an 'updateend' event.
  1300. this.sourceBuffers_[contentType].remove(startTime, endTime);
  1301. }
  1302. /**
  1303. * Call abort() on the SourceBuffer.
  1304. * This resets MSE's last_decode_timestamp on all track buffers, which should
  1305. * trigger the splicing logic for overlapping segments.
  1306. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1307. * @private
  1308. */
  1309. abort_(contentType) {
  1310. // Save the append window, which is reset on abort().
  1311. const appendWindowStart =
  1312. this.sourceBuffers_[contentType].appendWindowStart;
  1313. const appendWindowEnd = this.sourceBuffers_[contentType].appendWindowEnd;
  1314. // This will not trigger an 'updateend' event, since nothing is happening.
  1315. // This is only to reset MSE internals, not to abort an actual operation.
  1316. this.sourceBuffers_[contentType].abort();
  1317. // Restore the append window.
  1318. this.sourceBuffers_[contentType].appendWindowStart = appendWindowStart;
  1319. this.sourceBuffers_[contentType].appendWindowEnd = appendWindowEnd;
  1320. // Fake an 'updateend' event to resolve the operation.
  1321. this.onUpdateEnd_(contentType);
  1322. }
  1323. /**
  1324. * Nudge the playhead to force the media pipeline to be flushed.
  1325. * This seems to be necessary on Chromecast to get new content to replace old
  1326. * content.
  1327. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1328. * @private
  1329. */
  1330. flush_(contentType) {
  1331. // Never use flush_ if there's data. It causes a hiccup in playback.
  1332. goog.asserts.assert(
  1333. this.video_.buffered.length == 0, 'MediaSourceEngine.flush_ should ' +
  1334. 'only be used after clearing all data!');
  1335. // Seeking forces the pipeline to be flushed.
  1336. this.video_.currentTime -= 0.001;
  1337. // Fake an 'updateend' event to resolve the operation.
  1338. this.onUpdateEnd_(contentType);
  1339. }
  1340. /**
  1341. * Set the SourceBuffer's timestamp offset.
  1342. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1343. * @param {number} timestampOffset
  1344. * @private
  1345. */
  1346. setTimestampOffset_(contentType, timestampOffset) {
  1347. // Work around for
  1348. // https://github.com/shaka-project/shaka-player/issues/1281:
  1349. // TODO(https://bit.ly/2ttKiBU): follow up when this is fixed in Edge
  1350. if (timestampOffset < 0) {
  1351. // Try to prevent rounding errors in Edge from removing the first
  1352. // keyframe.
  1353. timestampOffset += 0.001;
  1354. }
  1355. this.sourceBuffers_[contentType].timestampOffset = timestampOffset;
  1356. // Fake an 'updateend' event to resolve the operation.
  1357. this.onUpdateEnd_(contentType);
  1358. }
  1359. /**
  1360. * Set the SourceBuffer's append window end.
  1361. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1362. * @param {number} appendWindowStart
  1363. * @param {number} appendWindowEnd
  1364. * @private
  1365. */
  1366. setAppendWindow_(contentType, appendWindowStart, appendWindowEnd) {
  1367. // You can't set start > end, so first set start to 0, then set the new
  1368. // end, then set the new start. That way, there are no intermediate
  1369. // states which are invalid.
  1370. this.sourceBuffers_[contentType].appendWindowStart = 0;
  1371. this.sourceBuffers_[contentType].appendWindowEnd = appendWindowEnd;
  1372. this.sourceBuffers_[contentType].appendWindowStart = appendWindowStart;
  1373. // Fake an 'updateend' event to resolve the operation.
  1374. this.onUpdateEnd_(contentType);
  1375. }
  1376. /**
  1377. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1378. * @private
  1379. */
  1380. onError_(contentType) {
  1381. const operation = this.queues_[contentType][0];
  1382. goog.asserts.assert(operation, 'Spurious error event!');
  1383. goog.asserts.assert(!this.sourceBuffers_[contentType].updating,
  1384. 'SourceBuffer should not be updating on error!');
  1385. const code = this.video_.error ? this.video_.error.code : 0;
  1386. operation.p.reject(new shaka.util.Error(
  1387. shaka.util.Error.Severity.CRITICAL,
  1388. shaka.util.Error.Category.MEDIA,
  1389. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_FAILED,
  1390. code, operation.uri));
  1391. // Do not pop from queue. An 'updateend' event will fire next, and to
  1392. // avoid synchronizing these two event handlers, we will allow that one to
  1393. // pop from the queue as normal. Note that because the operation has
  1394. // already been rejected, the call to resolve() in the 'updateend' handler
  1395. // will have no effect.
  1396. }
  1397. /**
  1398. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1399. * @private
  1400. */
  1401. onUpdateEnd_(contentType) {
  1402. if (this.reloadingMediaSource_) {
  1403. return;
  1404. }
  1405. const operation = this.queues_[contentType][0];
  1406. goog.asserts.assert(operation, 'Spurious updateend event!');
  1407. if (!operation) {
  1408. return;
  1409. }
  1410. goog.asserts.assert(!this.sourceBuffers_[contentType].updating,
  1411. 'SourceBuffer should not be updating on updateend!');
  1412. operation.p.resolve();
  1413. this.popFromQueue_(contentType);
  1414. }
  1415. /**
  1416. * Enqueue an operation and start it if appropriate.
  1417. *
  1418. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1419. * @param {function()} start
  1420. * @param {?string} uri
  1421. * @return {!Promise}
  1422. * @private
  1423. */
  1424. enqueueOperation_(contentType, start, uri) {
  1425. this.destroyer_.ensureNotDestroyed();
  1426. const operation = {
  1427. start: start,
  1428. p: new shaka.util.PublicPromise(),
  1429. uri,
  1430. };
  1431. this.queues_[contentType].push(operation);
  1432. if (this.queues_[contentType].length == 1) {
  1433. this.startOperation_(contentType);
  1434. }
  1435. return operation.p;
  1436. }
  1437. /**
  1438. * Enqueue an operation which must block all other operations on all
  1439. * SourceBuffers.
  1440. *
  1441. * @param {function():(Promise|undefined)} run
  1442. * @return {!Promise}
  1443. * @private
  1444. */
  1445. async enqueueBlockingOperation_(run) {
  1446. this.destroyer_.ensureNotDestroyed();
  1447. /** @type {!Array.<!shaka.util.PublicPromise>} */
  1448. const allWaiters = [];
  1449. // Enqueue a 'wait' operation onto each queue.
  1450. // This operation signals its readiness when it starts.
  1451. // When all wait operations are ready, the real operation takes place.
  1452. for (const contentType in this.sourceBuffers_) {
  1453. const ready = new shaka.util.PublicPromise();
  1454. const operation = {
  1455. start: () => ready.resolve(),
  1456. p: ready,
  1457. uri: null,
  1458. };
  1459. this.queues_[contentType].push(operation);
  1460. allWaiters.push(ready);
  1461. if (this.queues_[contentType].length == 1) {
  1462. operation.start();
  1463. }
  1464. }
  1465. // Return a Promise to the real operation, which waits to begin until
  1466. // there are no other in-progress operations on any SourceBuffers.
  1467. try {
  1468. await Promise.all(allWaiters);
  1469. } catch (error) {
  1470. // One of the waiters failed, which means we've been destroyed.
  1471. goog.asserts.assert(
  1472. this.destroyer_.destroyed(), 'Should be destroyed by now');
  1473. // We haven't popped from the queue. Canceled waiters have been removed
  1474. // by destroy. What's left now should just be resolved waiters. In
  1475. // uncompiled mode, we will maintain good hygiene and make sure the
  1476. // assert at the end of destroy passes. In compiled mode, the queues
  1477. // are wiped in destroy.
  1478. if (goog.DEBUG) {
  1479. for (const contentType in this.sourceBuffers_) {
  1480. if (this.queues_[contentType].length) {
  1481. goog.asserts.assert(
  1482. this.queues_[contentType].length == 1,
  1483. 'Should be at most one item in queue!');
  1484. goog.asserts.assert(
  1485. allWaiters.includes(this.queues_[contentType][0].p),
  1486. 'The item in queue should be one of our waiters!');
  1487. this.queues_[contentType].shift();
  1488. }
  1489. }
  1490. }
  1491. throw error;
  1492. }
  1493. if (goog.DEBUG) {
  1494. // If we did it correctly, nothing is updating.
  1495. for (const contentType in this.sourceBuffers_) {
  1496. goog.asserts.assert(
  1497. this.sourceBuffers_[contentType].updating == false,
  1498. 'SourceBuffers should not be updating after a blocking op!');
  1499. }
  1500. }
  1501. // Run the real operation, which can be asynchronous.
  1502. try {
  1503. await run();
  1504. } catch (exception) {
  1505. throw new shaka.util.Error(
  1506. shaka.util.Error.Severity.CRITICAL,
  1507. shaka.util.Error.Category.MEDIA,
  1508. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  1509. exception,
  1510. this.video_.error || 'No error in the media element',
  1511. null);
  1512. } finally {
  1513. // Unblock the queues.
  1514. for (const contentType in this.sourceBuffers_) {
  1515. this.popFromQueue_(contentType);
  1516. }
  1517. }
  1518. }
  1519. /**
  1520. * Pop from the front of the queue and start a new operation.
  1521. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1522. * @private
  1523. */
  1524. popFromQueue_(contentType) {
  1525. // Remove the in-progress operation, which is now complete.
  1526. this.queues_[contentType].shift();
  1527. this.startOperation_(contentType);
  1528. }
  1529. /**
  1530. * Starts the next operation in the queue.
  1531. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1532. * @private
  1533. */
  1534. startOperation_(contentType) {
  1535. // Retrieve the next operation, if any, from the queue and start it.
  1536. const next = this.queues_[contentType][0];
  1537. if (next) {
  1538. try {
  1539. next.start();
  1540. } catch (exception) {
  1541. if (exception.name == 'QuotaExceededError') {
  1542. next.p.reject(new shaka.util.Error(
  1543. shaka.util.Error.Severity.CRITICAL,
  1544. shaka.util.Error.Category.MEDIA,
  1545. shaka.util.Error.Code.QUOTA_EXCEEDED_ERROR,
  1546. contentType));
  1547. } else {
  1548. next.p.reject(new shaka.util.Error(
  1549. shaka.util.Error.Severity.CRITICAL,
  1550. shaka.util.Error.Category.MEDIA,
  1551. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  1552. exception,
  1553. this.video_.error || 'No error in the media element',
  1554. next.uri));
  1555. }
  1556. this.popFromQueue_(contentType);
  1557. }
  1558. }
  1559. }
  1560. /**
  1561. * @return {!shaka.extern.TextDisplayer}
  1562. */
  1563. getTextDisplayer() {
  1564. goog.asserts.assert(
  1565. this.textDisplayer_,
  1566. 'TextDisplayer should only be null when this is destroyed');
  1567. return this.textDisplayer_;
  1568. }
  1569. /**
  1570. * @param {!shaka.extern.TextDisplayer} textDisplayer
  1571. */
  1572. setTextDisplayer(textDisplayer) {
  1573. const oldTextDisplayer = this.textDisplayer_;
  1574. this.textDisplayer_ = textDisplayer;
  1575. if (oldTextDisplayer) {
  1576. textDisplayer.setTextVisibility(oldTextDisplayer.isTextVisible());
  1577. oldTextDisplayer.destroy();
  1578. }
  1579. if (this.textEngine_) {
  1580. this.textEngine_.setDisplayer(textDisplayer);
  1581. }
  1582. }
  1583. /**
  1584. * @param {boolean} segmentRelativeVttTiming
  1585. */
  1586. setSegmentRelativeVttTiming(segmentRelativeVttTiming) {
  1587. this.segmentRelativeVttTiming_ = segmentRelativeVttTiming;
  1588. }
  1589. /**
  1590. * Apply platform-specific transformations to this segment to work around
  1591. * issues in the platform.
  1592. *
  1593. * @param {!BufferSource} segment
  1594. * @param {?number} startTime
  1595. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1596. * @param {?string} uri
  1597. * @return {!BufferSource}
  1598. * @private
  1599. */
  1600. workAroundBrokenPlatforms_(segment, startTime, contentType, uri) {
  1601. const Platform = shaka.util.Platform;
  1602. const isInitSegment = startTime == null;
  1603. const encryptionExpected = this.expectedEncryption_[contentType];
  1604. const keySystem = this.playerInterface_.getKeySystem();
  1605. // If:
  1606. // 1. the configuration tells to insert fake encryption,
  1607. // 2. and this is an init segment,
  1608. // 3. and encryption is expected,
  1609. // 4. and the platform requires encryption in all init segments,
  1610. // 5. and the content is MP4 (mimeType == "video/mp4" or "audio/mp4"),
  1611. // then insert fake encryption metadata for init segments that lack it.
  1612. // The MP4 requirement is because we can currently only do this
  1613. // transformation on MP4 containers.
  1614. // See: https://github.com/shaka-project/shaka-player/issues/2759
  1615. if (this.config_.insertFakeEncryptionInInit &&
  1616. isInitSegment &&
  1617. encryptionExpected &&
  1618. Platform.requiresEncryptionInfoInAllInitSegments(keySystem) &&
  1619. shaka.util.MimeUtils.getContainerType(
  1620. this.sourceBufferTypes_[contentType]) == 'mp4') {
  1621. shaka.log.debug('Forcing fake encryption information in init segment.');
  1622. segment = shaka.media.ContentWorkarounds.fakeEncryption(segment, uri);
  1623. }
  1624. return segment;
  1625. }
  1626. /**
  1627. * Prepare the SourceBuffer to parse a potentially new type or codec.
  1628. *
  1629. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1630. * @param {string} mimeType
  1631. * @param {?shaka.extern.Transmuxer} transmuxer
  1632. * @private
  1633. */
  1634. change_(contentType, mimeType, transmuxer) {
  1635. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1636. if (contentType === ContentType.TEXT) {
  1637. shaka.log.debug(`Change not supported for ${contentType}`);
  1638. return;
  1639. }
  1640. shaka.log.debug(
  1641. `Change Type: ${this.sourceBufferTypes_[contentType]} -> ${mimeType}`);
  1642. if (shaka.media.Capabilities.isChangeTypeSupported()) {
  1643. if (this.transmuxers_[contentType]) {
  1644. this.transmuxers_[contentType].destroy();
  1645. delete this.transmuxers_[contentType];
  1646. }
  1647. if (transmuxer) {
  1648. this.transmuxers_[contentType] = transmuxer;
  1649. }
  1650. const type = this.addExtraFeaturesToMimeType_(mimeType);
  1651. this.sourceBuffers_[contentType].changeType(type);
  1652. this.sourceBufferTypes_[contentType] = mimeType;
  1653. } else {
  1654. shaka.log.debug('Change Type not supported');
  1655. }
  1656. // Fake an 'updateend' event to resolve the operation.
  1657. this.onUpdateEnd_(contentType);
  1658. }
  1659. /**
  1660. * Enqueue an operation to prepare the SourceBuffer to parse a potentially new
  1661. * type or codec.
  1662. *
  1663. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1664. * @param {string} mimeType
  1665. * @param {?shaka.extern.Transmuxer} transmuxer
  1666. * @return {!Promise}
  1667. */
  1668. changeType(contentType, mimeType, transmuxer) {
  1669. return this.enqueueOperation_(
  1670. contentType,
  1671. () => this.change_(contentType, mimeType, transmuxer),
  1672. null);
  1673. }
  1674. /**
  1675. * Resets the MediaSource and re-adds source buffers due to codec mismatch
  1676. *
  1677. * @param {!Map.<shaka.util.ManifestParserUtils.ContentType,
  1678. * shaka.extern.Stream>} streamsByType
  1679. * @private
  1680. */
  1681. async reset_(streamsByType) {
  1682. const Functional = shaka.util.Functional;
  1683. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1684. this.reloadingMediaSource_ = true;
  1685. this.needSplitMuxedContent_ = false;
  1686. const currentTime = this.video_.currentTime;
  1687. // When codec switching if the user is currently paused we don't want
  1688. // to trigger a play when switching codec.
  1689. // Playing can also end up in a paused state after a codec switch
  1690. // so we need to remember the current states.
  1691. const previousAutoPlayState = this.video_.autoplay;
  1692. const previousPausedState = this.video_.paused;
  1693. if (this.playbackHasBegun_) {
  1694. // Only set autoplay to false if the video playback has already begun.
  1695. // When a codec switch happens before playback has begun this can cause
  1696. // autoplay not to work as expected.
  1697. this.video_.autoplay = false;
  1698. }
  1699. try {
  1700. this.eventManager_.removeAll();
  1701. const cleanup = [];
  1702. for (const contentType in this.transmuxers_) {
  1703. cleanup.push(this.transmuxers_[contentType].destroy());
  1704. }
  1705. for (const contentType in this.queues_) {
  1706. // Make a local copy of the queue and the first item.
  1707. const q = this.queues_[contentType];
  1708. const inProgress = q[0];
  1709. // Drop everything else out of the original queue.
  1710. this.queues_[contentType] = q.slice(0, 1);
  1711. // We will wait for this item to complete/fail.
  1712. if (inProgress) {
  1713. cleanup.push(inProgress.p.catch(Functional.noop));
  1714. }
  1715. // The rest will be rejected silently if possible.
  1716. for (const item of q.slice(1)) {
  1717. item.p.reject(shaka.util.Destroyer.destroyedError());
  1718. }
  1719. }
  1720. for (const contentType in this.sourceBuffers_) {
  1721. const sourceBuffer = this.sourceBuffers_[contentType];
  1722. try {
  1723. this.mediaSource_.removeSourceBuffer(sourceBuffer);
  1724. } catch (e) {}
  1725. }
  1726. await Promise.all(cleanup);
  1727. this.transmuxers_ = {};
  1728. this.sourceBuffers_ = {};
  1729. const previousDuration = this.mediaSource_.duration;
  1730. this.mediaSourceOpen_ = new shaka.util.PublicPromise();
  1731. this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_);
  1732. await this.mediaSourceOpen_;
  1733. if (!isNaN(previousDuration) && previousDuration) {
  1734. this.mediaSource_.duration = previousDuration;
  1735. } else if (!isNaN(this.lastDuration_) && this.lastDuration_) {
  1736. this.mediaSource_.duration = this.lastDuration_;
  1737. }
  1738. const sourceBufferAdded = new shaka.util.PublicPromise();
  1739. const sourceBuffers =
  1740. /** @type {EventTarget} */(this.mediaSource_.sourceBuffers);
  1741. const totalOfBuffers = streamsByType.size;
  1742. let numberOfSourceBufferAdded = 0;
  1743. const onSourceBufferAdded = () => {
  1744. numberOfSourceBufferAdded++;
  1745. if (numberOfSourceBufferAdded === totalOfBuffers) {
  1746. sourceBufferAdded.resolve();
  1747. this.eventManager_.unlisten(sourceBuffers, 'addsourcebuffer',
  1748. onSourceBufferAdded);
  1749. }
  1750. };
  1751. this.eventManager_.listen(sourceBuffers, 'addsourcebuffer',
  1752. onSourceBufferAdded);
  1753. for (const contentType of streamsByType.keys()) {
  1754. const stream = streamsByType.get(contentType);
  1755. // eslint-disable-next-line no-await-in-loop
  1756. await this.initSourceBuffer_(contentType, stream, stream.codecs);
  1757. if (this.needSplitMuxedContent_) {
  1758. this.queues_[ContentType.AUDIO] = [];
  1759. this.queues_[ContentType.VIDEO] = [];
  1760. } else {
  1761. this.queues_[contentType] = [];
  1762. }
  1763. }
  1764. // Fake a seek to catchup the playhead.
  1765. this.video_.currentTime = currentTime;
  1766. await sourceBufferAdded;
  1767. } finally {
  1768. this.reloadingMediaSource_ = false;
  1769. this.destroyer_.ensureNotDestroyed();
  1770. this.eventManager_.listenOnce(this.video_, 'canplaythrough', () => {
  1771. // Don't use ensureNotDestroyed() from this event listener, because
  1772. // that results in an uncaught exception. Instead, just check the
  1773. // flag.
  1774. if (this.destroyer_.destroyed()) {
  1775. return;
  1776. }
  1777. this.video_.autoplay = previousAutoPlayState;
  1778. if (!previousPausedState) {
  1779. this.video_.play();
  1780. }
  1781. });
  1782. }
  1783. }
  1784. /**
  1785. * Resets the Media Source
  1786. * @param {!Map.<shaka.util.ManifestParserUtils.ContentType,
  1787. * shaka.extern.Stream>} streamsByType
  1788. * @return {!Promise}
  1789. */
  1790. reset(streamsByType) {
  1791. return this.enqueueBlockingOperation_(
  1792. () => this.reset_(streamsByType));
  1793. }
  1794. /**
  1795. * Codec switch if necessary, this will not resolve until the codec
  1796. * switch is over.
  1797. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1798. * @param {string} mimeType
  1799. * @param {string} codecs
  1800. * @param {!Map.<shaka.util.ManifestParserUtils.ContentType,
  1801. * shaka.extern.Stream>} streamsByType
  1802. * @return {!Promise.<boolean>} true if there was a codec switch,
  1803. * false otherwise.
  1804. * @private
  1805. */
  1806. async codecSwitchIfNecessary_(contentType, mimeType, codecs, streamsByType) {
  1807. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1808. if (contentType == ContentType.TEXT) {
  1809. return false;
  1810. }
  1811. const MimeUtils = shaka.util.MimeUtils;
  1812. const currentCodec = MimeUtils.getNormalizedCodec(
  1813. MimeUtils.getCodecs(this.sourceBufferTypes_[contentType]));
  1814. const currentBasicType = MimeUtils.getBasicType(
  1815. this.sourceBufferTypes_[contentType]);
  1816. /** @type {?shaka.extern.Transmuxer} */
  1817. let transmuxer;
  1818. let transmuxerMuxed = false;
  1819. let newMimeType = shaka.util.MimeUtils.getFullType(mimeType, codecs);
  1820. let needTransmux = this.config_.forceTransmux;
  1821. if (!shaka.media.Capabilities.isTypeSupported(newMimeType) ||
  1822. (!this.sequenceMode_ &&
  1823. shaka.util.MimeUtils.RAW_FORMATS.includes(newMimeType))) {
  1824. needTransmux = true;
  1825. }
  1826. const TransmuxerEngine = shaka.transmuxer.TransmuxerEngine;
  1827. if (needTransmux) {
  1828. const newMimeTypeWithAllCodecs =
  1829. shaka.util.MimeUtils.getFullTypeWithAllCodecs(mimeType, codecs);
  1830. const transmuxerPlugin =
  1831. TransmuxerEngine.findTransmuxer(newMimeTypeWithAllCodecs);
  1832. if (transmuxerPlugin) {
  1833. transmuxer = transmuxerPlugin();
  1834. const audioCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  1835. ContentType.AUDIO, (codecs || '').split(','));
  1836. const videoCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  1837. ContentType.VIDEO, (codecs || '').split(','));
  1838. if (audioCodec && videoCodec) {
  1839. transmuxerMuxed = true;
  1840. let codec = videoCodec;
  1841. if (contentType == ContentType.AUDIO) {
  1842. codec = audioCodec;
  1843. }
  1844. newMimeType = transmuxer.convertCodecs(contentType,
  1845. shaka.util.MimeUtils.getFullTypeWithAllCodecs(mimeType, codec));
  1846. } else {
  1847. newMimeType =
  1848. transmuxer.convertCodecs(contentType, newMimeTypeWithAllCodecs);
  1849. }
  1850. }
  1851. }
  1852. const newCodec = MimeUtils.getNormalizedCodec(
  1853. MimeUtils.getCodecs(newMimeType));
  1854. const newBasicType = MimeUtils.getBasicType(newMimeType);
  1855. // Current/new codecs base and basic type match then no need to switch
  1856. if (currentCodec === newCodec && currentBasicType === newBasicType) {
  1857. if (this.transmuxers_[contentType] && !transmuxer) {
  1858. this.transmuxers_[contentType].destroy();
  1859. delete this.transmuxers_[contentType];
  1860. } else if (!this.transmuxers_[contentType] && transmuxer) {
  1861. this.transmuxers_[contentType] = transmuxer;
  1862. } else if (transmuxer) {
  1863. // Compare if the transmuxer is different
  1864. if (this.transmuxers_[contentType] &&
  1865. this.transmuxers_[contentType].transmux != transmuxer.transmux) {
  1866. this.transmuxers_[contentType].destroy();
  1867. delete this.transmuxers_[contentType];
  1868. this.transmuxers_[contentType] = transmuxer;
  1869. } else {
  1870. transmuxer.destroy();
  1871. }
  1872. }
  1873. return false;
  1874. }
  1875. let allowChangeType = true;
  1876. if (this.needSplitMuxedContent_ || (transmuxerMuxed &&
  1877. transmuxer && !this.transmuxers_[contentType])) {
  1878. allowChangeType = false;
  1879. }
  1880. if (allowChangeType && this.config_.codecSwitchingStrategy ===
  1881. shaka.config.CodecSwitchingStrategy.SMOOTH &&
  1882. shaka.media.Capabilities.isChangeTypeSupported()) {
  1883. await this.changeType(contentType, newMimeType, transmuxer);
  1884. } else {
  1885. if (transmuxer) {
  1886. transmuxer.destroy();
  1887. }
  1888. await this.reset(streamsByType);
  1889. }
  1890. return true;
  1891. }
  1892. /**
  1893. * Returns true if it's necessary codec switch to load the new stream.
  1894. *
  1895. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1896. * @param {shaka.extern.Stream} stream
  1897. * @param {string} refMimeType
  1898. * @param {string} refCodecs
  1899. * @return {boolean}
  1900. * @private
  1901. */
  1902. isCodecSwitchNecessary_(contentType, stream, refMimeType, refCodecs) {
  1903. if (contentType == shaka.util.ManifestParserUtils.ContentType.TEXT) {
  1904. return false;
  1905. }
  1906. const MimeUtils = shaka.util.MimeUtils;
  1907. const currentCodec = MimeUtils.getNormalizedCodec(
  1908. MimeUtils.getCodecs(this.sourceBufferTypes_[contentType]));
  1909. const currentBasicType = MimeUtils.getBasicType(
  1910. this.sourceBufferTypes_[contentType]);
  1911. let newMimeType = shaka.util.MimeUtils.getFullType(refMimeType, refCodecs);
  1912. let needTransmux = this.config_.forceTransmux;
  1913. if (!shaka.media.Capabilities.isTypeSupported(newMimeType) ||
  1914. (!this.sequenceMode_ &&
  1915. shaka.util.MimeUtils.RAW_FORMATS.includes(newMimeType))) {
  1916. needTransmux = true;
  1917. }
  1918. const newMimeTypeWithAllCodecs =
  1919. shaka.util.MimeUtils.getFullTypeWithAllCodecs(
  1920. refMimeType, refCodecs);
  1921. const TransmuxerEngine = shaka.transmuxer.TransmuxerEngine;
  1922. if (needTransmux) {
  1923. const transmuxerPlugin =
  1924. TransmuxerEngine.findTransmuxer(newMimeTypeWithAllCodecs);
  1925. if (transmuxerPlugin) {
  1926. const transmuxer = transmuxerPlugin();
  1927. newMimeType =
  1928. transmuxer.convertCodecs(contentType, newMimeTypeWithAllCodecs);
  1929. transmuxer.destroy();
  1930. }
  1931. }
  1932. const newCodec = MimeUtils.getNormalizedCodec(
  1933. MimeUtils.getCodecs(newMimeType));
  1934. const newBasicType = MimeUtils.getBasicType(newMimeType);
  1935. return currentCodec !== newCodec || currentBasicType !== newBasicType;
  1936. }
  1937. /**
  1938. * Returns true if it's necessary reset the media source to load the
  1939. * new stream.
  1940. *
  1941. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1942. * @param {shaka.extern.Stream} stream
  1943. * @param {string} mimeType
  1944. * @param {string} codecs
  1945. * @return {boolean}
  1946. */
  1947. isResetMediaSourceNecessary(contentType, stream, mimeType, codecs) {
  1948. if (!this.isCodecSwitchNecessary_(contentType, stream, mimeType, codecs)) {
  1949. return false;
  1950. }
  1951. return this.config_.codecSwitchingStrategy !==
  1952. shaka.config.CodecSwitchingStrategy.SMOOTH ||
  1953. !shaka.media.Capabilities.isChangeTypeSupported() ||
  1954. this.needSplitMuxedContent_;
  1955. }
  1956. /**
  1957. * Update LCEVC Decoder object when ready for LCEVC Decode.
  1958. * @param {?shaka.lcevc.Dec} lcevcDec
  1959. */
  1960. updateLcevcDec(lcevcDec) {
  1961. this.lcevcDec_ = lcevcDec;
  1962. }
  1963. /**
  1964. * @param {string} mimeType
  1965. * @return {string}
  1966. * @private
  1967. */
  1968. addExtraFeaturesToMimeType_(mimeType) {
  1969. const extraFeatures = this.config_.addExtraFeaturesToSourceBuffer(mimeType);
  1970. const extendedType = mimeType + extraFeatures;
  1971. shaka.log.debug('Using full mime type', extendedType);
  1972. return extendedType;
  1973. }
  1974. };
  1975. /**
  1976. * Internal reference to window.URL.createObjectURL function to avoid
  1977. * compatibility issues with other libraries and frameworks such as React
  1978. * Native. For use in unit tests only, not meant for external use.
  1979. *
  1980. * @type {function(?):string}
  1981. */
  1982. shaka.media.MediaSourceEngine.createObjectURL = window.URL.createObjectURL;
  1983. /**
  1984. * @typedef {{
  1985. * start: function(),
  1986. * p: !shaka.util.PublicPromise,
  1987. * uri: ?string
  1988. * }}
  1989. *
  1990. * @summary An operation in queue.
  1991. * @property {function()} start
  1992. * The function which starts the operation.
  1993. * @property {!shaka.util.PublicPromise} p
  1994. * The PublicPromise which is associated with this operation.
  1995. * @property {?string} uri
  1996. * A segment URI (if any) associated with this operation.
  1997. */
  1998. shaka.media.MediaSourceEngine.Operation;
  1999. /**
  2000. * @enum {string}
  2001. * @private
  2002. */
  2003. shaka.media.MediaSourceEngine.SourceBufferMode_ = {
  2004. SEQUENCE: 'sequence',
  2005. SEGMENTS: 'segments',
  2006. };
  2007. /**
  2008. * @typedef {{
  2009. * getKeySystem: function():?string,
  2010. * onMetadata: function(!Array<shaka.extern.ID3Metadata>, number, ?number)
  2011. * }}
  2012. *
  2013. * @summary Player interface
  2014. * @property {function():?string} getKeySystem
  2015. * Gets currently used key system or null if not used.
  2016. * @property {function(
  2017. * !Array<shaka.extern.ID3Metadata>, number, ?number)} onMetadata
  2018. * Callback to use when metadata arrives.
  2019. */
  2020. shaka.media.MediaSourceEngine.PlayerInterface;