Source: lib/media/media_source_engine.js

  1. /*! @license
  2. * Shaka Player
  3. * Copyright 2016 Google LLC
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. goog.provide('shaka.media.MediaSourceEngine');
  7. goog.require('goog.asserts');
  8. goog.require('shaka.log');
  9. goog.require('shaka.config.CodecSwitchingStrategy');
  10. goog.require('shaka.media.Capabilities');
  11. goog.require('shaka.media.ContentWorkarounds');
  12. goog.require('shaka.media.ClosedCaptionParser');
  13. goog.require('shaka.media.IClosedCaptionParser');
  14. goog.require('shaka.media.ManifestParser');
  15. goog.require('shaka.media.SegmentReference');
  16. goog.require('shaka.media.TimeRangesUtils');
  17. goog.require('shaka.text.TextEngine');
  18. goog.require('shaka.transmuxer.TransmuxerEngine');
  19. goog.require('shaka.util.BufferUtils');
  20. goog.require('shaka.util.Destroyer');
  21. goog.require('shaka.util.Error');
  22. goog.require('shaka.util.EventManager');
  23. goog.require('shaka.util.Functional');
  24. goog.require('shaka.util.IDestroyable');
  25. goog.require('shaka.util.Id3Utils');
  26. goog.require('shaka.util.ManifestParserUtils');
  27. goog.require('shaka.util.MimeUtils');
  28. goog.require('shaka.util.Mp4BoxParsers');
  29. goog.require('shaka.util.Mp4Parser');
  30. goog.require('shaka.util.Platform');
  31. goog.require('shaka.util.PublicPromise');
  32. goog.require('shaka.util.StreamUtils');
  33. goog.require('shaka.util.TsParser');
  34. goog.require('shaka.lcevc.Dec');
  35. /**
  36. * @summary
  37. * MediaSourceEngine wraps all operations on MediaSource and SourceBuffers.
  38. * All asynchronous operations return a Promise, and all operations are
  39. * internally synchronized and serialized as needed. Operations that can
  40. * be done in parallel will be done in parallel.
  41. *
  42. * @implements {shaka.util.IDestroyable}
  43. */
  44. shaka.media.MediaSourceEngine = class {
  45. /**
  46. * @param {HTMLMediaElement} video The video element, whose source is tied to
  47. * MediaSource during the lifetime of the MediaSourceEngine.
  48. * @param {!shaka.extern.TextDisplayer} textDisplayer
  49. * The text displayer that will be used with the text engine.
  50. * MediaSourceEngine takes ownership of the displayer. When
  51. * MediaSourceEngine is destroyed, it will destroy the displayer.
  52. * @param {!function(!Array.<shaka.extern.ID3Metadata>, number, ?number)=}
  53. * onMetadata
  54. * @param {?shaka.lcevc.Dec} [lcevcDec] Optional - LCEVC Decoder Object
  55. *
  56. */
  57. constructor(video, textDisplayer, onMetadata, lcevcDec) {
  58. /** @private {HTMLMediaElement} */
  59. this.video_ = video;
  60. /** @private {?shaka.extern.MediaSourceConfiguration} */
  61. this.config_ = null;
  62. /** @private {shaka.extern.TextDisplayer} */
  63. this.textDisplayer_ = textDisplayer;
  64. /** @private {!Object.<shaka.util.ManifestParserUtils.ContentType,
  65. SourceBuffer>} */
  66. this.sourceBuffers_ = {};
  67. /** @private {!Object.<shaka.util.ManifestParserUtils.ContentType,
  68. string>} */
  69. this.sourceBufferTypes_ = {};
  70. /** @private {!Object.<shaka.util.ManifestParserUtils.ContentType,
  71. boolean>} */
  72. this.expectedEncryption_ = {};
  73. /** @private {shaka.text.TextEngine} */
  74. this.textEngine_ = null;
  75. /** @private {boolean} */
  76. this.segmentRelativeVttTiming_ = false;
  77. const onMetadataNoOp = (metadata, timestampOffset, segmentEnd) => {};
  78. /** @private {!function(!Array.<shaka.extern.ID3Metadata>,
  79. number, ?number)} */
  80. this.onMetadata_ = onMetadata || onMetadataNoOp;
  81. /** @private {?shaka.lcevc.Dec} */
  82. this.lcevcDec_ = lcevcDec || null;
  83. /**
  84. * @private {!Object.<string,
  85. * !Array.<shaka.media.MediaSourceEngine.Operation>>}
  86. */
  87. this.queues_ = {};
  88. /** @private {shaka.util.EventManager} */
  89. this.eventManager_ = new shaka.util.EventManager();
  90. /** @private {!Object.<string, !shaka.extern.Transmuxer>} */
  91. this.transmuxers_ = {};
  92. /** @private {?shaka.media.IClosedCaptionParser} */
  93. this.captionParser_ = null;
  94. /** @private {!shaka.util.PublicPromise} */
  95. this.mediaSourceOpen_ = new shaka.util.PublicPromise();
  96. /** @private {string} */
  97. this.url_ = '';
  98. /** @private {boolean} */
  99. this.playbackHasBegun_ = false;
  100. /** @private {(MediaSource|ManagedMediaSource)} */
  101. this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_);
  102. /** @private {boolean} */
  103. this.reloadingMediaSource_ = false;
  104. /** @type {!shaka.util.Destroyer} */
  105. this.destroyer_ = new shaka.util.Destroyer(() => this.doDestroy_());
  106. /** @private {boolean} */
  107. this.sequenceMode_ = false;
  108. /** @private {string} */
  109. this.manifestType_ = shaka.media.ManifestParser.UNKNOWN;
  110. /** @private {boolean} */
  111. this.ignoreManifestTimestampsInSegmentsMode_ = false;
  112. /** @private {boolean} */
  113. this.attemptTimestampOffsetCalculation_ = false;
  114. /** @private {!shaka.util.PublicPromise.<number>} */
  115. this.textSequenceModeOffset_ = new shaka.util.PublicPromise();
  116. /** @private {boolean} */
  117. this.needSplitMuxedContent_ = false;
  118. /** @private {boolean} */
  119. this.streamingAllowed_ = true;
  120. /** @private {?number} */
  121. this.lastDuration_ = null;
  122. /** @private {?shaka.util.TsParser} */
  123. this.tsParser_ = null;
  124. }
  125. /**
  126. * Create a MediaSource object, attach it to the video element, and return it.
  127. * Resolves the given promise when the MediaSource is ready.
  128. *
  129. * Replaced by unit tests.
  130. *
  131. * @param {!shaka.util.PublicPromise} p
  132. * @return {!(MediaSource|ManagedMediaSource)}
  133. */
  134. createMediaSource(p) {
  135. if (window.ManagedMediaSource) {
  136. this.video_.disableRemotePlayback = true;
  137. const mediaSource = new ManagedMediaSource();
  138. this.eventManager_.listen(
  139. mediaSource, 'startstreaming', () => {
  140. this.streamingAllowed_ = true;
  141. });
  142. this.eventManager_.listen(
  143. mediaSource, 'endstreaming', () => {
  144. this.streamingAllowed_ = false;
  145. });
  146. this.eventManager_.listenOnce(
  147. mediaSource, 'sourceopen', () => this.onSourceOpen_(p));
  148. // Correctly set when playback has begun.
  149. this.eventManager_.listenOnce(this.video_, 'playing', () => {
  150. this.playbackHasBegun_ = true;
  151. });
  152. this.url_ = shaka.media.MediaSourceEngine.createObjectURL(mediaSource);
  153. this.video_.src = this.url_;
  154. return mediaSource;
  155. } else {
  156. const mediaSource = new MediaSource();
  157. // Set up MediaSource on the video element.
  158. this.eventManager_.listenOnce(
  159. mediaSource, 'sourceopen', () => this.onSourceOpen_(p));
  160. // Correctly set when playback has begun.
  161. this.eventManager_.listenOnce(this.video_, 'playing', () => {
  162. this.playbackHasBegun_ = true;
  163. });
  164. // Store the object URL for releasing it later.
  165. this.url_ = shaka.media.MediaSourceEngine.createObjectURL(mediaSource);
  166. this.video_.src = this.url_;
  167. return mediaSource;
  168. }
  169. }
  170. /**
  171. * @param {shaka.util.PublicPromise} p
  172. * @private
  173. */
  174. onSourceOpen_(p) {
  175. goog.asserts.assert(this.url_, 'Must have object URL');
  176. // Release the object URL that was previously created, to prevent memory
  177. // leak.
  178. // createObjectURL creates a strong reference to the MediaSource object
  179. // inside the browser. Setting the src of the video then creates another
  180. // reference within the video element. revokeObjectURL will remove the
  181. // strong reference to the MediaSource object, and allow it to be
  182. // garbage-collected later.
  183. URL.revokeObjectURL(this.url_);
  184. p.resolve();
  185. }
  186. /**
  187. * Checks if a certain type is supported.
  188. *
  189. * @param {shaka.extern.Stream} stream
  190. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  191. * @return {!Promise.<boolean>}
  192. */
  193. static async isStreamSupported(stream, contentType) {
  194. if (stream.createSegmentIndex) {
  195. await stream.createSegmentIndex();
  196. }
  197. if (!stream.segmentIndex) {
  198. return false;
  199. }
  200. if (stream.segmentIndex.isEmpty()) {
  201. return true;
  202. }
  203. const MimeUtils = shaka.util.MimeUtils;
  204. const TransmuxerEngine = shaka.transmuxer.TransmuxerEngine;
  205. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  206. const StreamUtils = shaka.util.StreamUtils;
  207. const seenCombos = new Set();
  208. // Check each combination of mimeType and codecs within the segment index.
  209. // Unfortunately we cannot use fullMimeTypes, as we ALSO need to check the
  210. // getFullTypeWithAllCodecs (for the sake of the transmuxer) and we have no
  211. // way of going from a full mimeType to a full mimeType with all codecs.
  212. // As this function is only called in debug mode, a little inefficiency is
  213. // acceptable.
  214. for (const ref of stream.segmentIndex) {
  215. const mimeType = ref.mimeType || stream.mimeType || '';
  216. let codecs = ref.codecs || stream.codecs || '';
  217. // Don't check the same combination of mimetype + codecs twice.
  218. const combo = mimeType + ':' + codecs;
  219. if (seenCombos.has(combo)) {
  220. continue;
  221. }
  222. seenCombos.add(combo);
  223. if (contentType == ContentType.TEXT) {
  224. const fullMimeType = MimeUtils.getFullType(mimeType, codecs);
  225. if (!shaka.text.TextEngine.isTypeSupported(fullMimeType)) {
  226. return false;
  227. }
  228. } else {
  229. if (contentType == ContentType.VIDEO) {
  230. codecs = StreamUtils.getCorrectVideoCodecs(codecs);
  231. } else if (contentType == ContentType.AUDIO) {
  232. codecs = StreamUtils.getCorrectAudioCodecs(codecs, mimeType);
  233. }
  234. const extendedMimeType = MimeUtils.getExtendedType(
  235. stream, mimeType, codecs);
  236. const fullMimeType = MimeUtils.getFullTypeWithAllCodecs(
  237. mimeType, codecs);
  238. if (!shaka.media.Capabilities.isTypeSupported(extendedMimeType) &&
  239. !TransmuxerEngine.isSupported(fullMimeType, stream.type)) {
  240. return false;
  241. }
  242. }
  243. }
  244. return true;
  245. }
  246. /**
  247. * Returns a map of MediaSource support for well-known types.
  248. *
  249. * @return {!Object.<string, boolean>}
  250. */
  251. static probeSupport() {
  252. const testMimeTypes = [
  253. // MP4 types
  254. 'video/mp4; codecs="avc1.42E01E"',
  255. 'video/mp4; codecs="avc3.42E01E"',
  256. 'video/mp4; codecs="hev1.1.6.L93.90"',
  257. 'video/mp4; codecs="hvc1.1.6.L93.90"',
  258. 'video/mp4; codecs="hev1.2.4.L153.B0"; eotf="smpte2084"', // HDR HEVC
  259. 'video/mp4; codecs="hvc1.2.4.L153.B0"; eotf="smpte2084"', // HDR HEVC
  260. 'video/mp4; codecs="vp9"',
  261. 'video/mp4; codecs="vp09.00.10.08"',
  262. 'video/mp4; codecs="av01.0.01M.08"',
  263. 'video/mp4; codecs="dvh1.20.01"',
  264. 'audio/mp4; codecs="mp4a.40.2"',
  265. 'audio/mp4; codecs="ac-3"',
  266. 'audio/mp4; codecs="ec-3"',
  267. 'audio/mp4; codecs="ac-4"',
  268. 'audio/mp4; codecs="opus"',
  269. 'audio/mp4; codecs="flac"',
  270. 'audio/mp4; codecs="dtsc"', // DTS Digital Surround
  271. 'audio/mp4; codecs="dtse"', // DTS Express
  272. 'audio/mp4; codecs="dtsx"', // DTS:X
  273. // WebM types
  274. 'video/webm; codecs="vp8"',
  275. 'video/webm; codecs="vp9"',
  276. 'video/webm; codecs="vp09.00.10.08"',
  277. 'audio/webm; codecs="vorbis"',
  278. 'audio/webm; codecs="opus"',
  279. // MPEG2 TS types (video/ is also used for audio: https://bit.ly/TsMse)
  280. 'video/mp2t; codecs="avc1.42E01E"',
  281. 'video/mp2t; codecs="avc3.42E01E"',
  282. 'video/mp2t; codecs="hvc1.1.6.L93.90"',
  283. 'video/mp2t; codecs="mp4a.40.2"',
  284. 'video/mp2t; codecs="ac-3"',
  285. 'video/mp2t; codecs="ec-3"',
  286. // WebVTT types
  287. 'text/vtt',
  288. 'application/mp4; codecs="wvtt"',
  289. // TTML types
  290. 'application/ttml+xml',
  291. 'application/mp4; codecs="stpp"',
  292. // Containerless types
  293. ...shaka.util.MimeUtils.RAW_FORMATS,
  294. ];
  295. const support = {};
  296. for (const type of testMimeTypes) {
  297. if (shaka.text.TextEngine.isTypeSupported(type)) {
  298. support[type] = true;
  299. } else if (shaka.util.Platform.supportsMediaSource()) {
  300. support[type] = shaka.media.Capabilities.isTypeSupported(type) ||
  301. shaka.transmuxer.TransmuxerEngine.isSupported(type);
  302. } else {
  303. support[type] = shaka.util.Platform.supportsMediaType(type);
  304. }
  305. const basicType = type.split(';')[0];
  306. support[basicType] = support[basicType] || support[type];
  307. }
  308. return support;
  309. }
  310. /** @override */
  311. destroy() {
  312. return this.destroyer_.destroy();
  313. }
  314. /** @private */
  315. async doDestroy_() {
  316. const Functional = shaka.util.Functional;
  317. const cleanup = [];
  318. for (const contentType in this.queues_) {
  319. // Make a local copy of the queue and the first item.
  320. const q = this.queues_[contentType];
  321. const inProgress = q[0];
  322. // Drop everything else out of the original queue.
  323. this.queues_[contentType] = q.slice(0, 1);
  324. // We will wait for this item to complete/fail.
  325. if (inProgress) {
  326. cleanup.push(inProgress.p.catch(Functional.noop));
  327. }
  328. // The rest will be rejected silently if possible.
  329. for (const item of q.slice(1)) {
  330. item.p.reject(shaka.util.Destroyer.destroyedError());
  331. }
  332. }
  333. if (this.textEngine_) {
  334. cleanup.push(this.textEngine_.destroy());
  335. }
  336. if (this.textDisplayer_) {
  337. cleanup.push(this.textDisplayer_.destroy());
  338. }
  339. for (const contentType in this.transmuxers_) {
  340. cleanup.push(this.transmuxers_[contentType].destroy());
  341. }
  342. await Promise.all(cleanup);
  343. if (this.eventManager_) {
  344. this.eventManager_.release();
  345. this.eventManager_ = null;
  346. }
  347. if (this.video_) {
  348. // "unload" the video element.
  349. this.video_.removeAttribute('src');
  350. this.video_.load();
  351. this.video_ = null;
  352. }
  353. this.config_ = null;
  354. this.mediaSource_ = null;
  355. this.textEngine_ = null;
  356. this.textDisplayer_ = null;
  357. this.sourceBuffers_ = {};
  358. this.transmuxers_ = {};
  359. this.captionParser_ = null;
  360. if (goog.DEBUG) {
  361. for (const contentType in this.queues_) {
  362. goog.asserts.assert(
  363. this.queues_[contentType].length == 0,
  364. contentType + ' queue should be empty after destroy!');
  365. }
  366. }
  367. this.queues_ = {};
  368. // This object is owned by Player
  369. this.lcevcDec_ = null;
  370. this.tsParser_ = null;
  371. }
  372. /**
  373. * @return {!Promise} Resolved when MediaSource is open and attached to the
  374. * media element. This process is actually initiated by the constructor.
  375. */
  376. open() {
  377. return this.mediaSourceOpen_;
  378. }
  379. /**
  380. * Initialize MediaSourceEngine.
  381. *
  382. * Note that it is not valid to call this multiple times, except to add or
  383. * reinitialize text streams.
  384. *
  385. * @param {!Map.<shaka.util.ManifestParserUtils.ContentType,
  386. * shaka.extern.Stream>} streamsByType
  387. * A map of content types to streams. All streams must be supported
  388. * according to MediaSourceEngine.isStreamSupported.
  389. * @param {boolean=} sequenceMode
  390. * If true, the media segments are appended to the SourceBuffer in strict
  391. * sequence.
  392. * @param {string=} manifestType
  393. * Indicates the type of the manifest.
  394. * @param {boolean=} ignoreManifestTimestampsInSegmentsMode
  395. * If true, don't adjust the timestamp offset to account for manifest
  396. * segment durations being out of sync with segment durations. In other
  397. * words, assume that there are no gaps in the segments when appending
  398. * to the SourceBuffer, even if the manifest and segment times disagree.
  399. * Indicates if the manifest has text streams.
  400. *
  401. * @return {!Promise}
  402. */
  403. async init(streamsByType, sequenceMode=false,
  404. manifestType=shaka.media.ManifestParser.UNKNOWN,
  405. ignoreManifestTimestampsInSegmentsMode=false) {
  406. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  407. await this.mediaSourceOpen_;
  408. this.sequenceMode_ = sequenceMode;
  409. this.manifestType_ = manifestType;
  410. this.ignoreManifestTimestampsInSegmentsMode_ =
  411. ignoreManifestTimestampsInSegmentsMode;
  412. this.attemptTimestampOffsetCalculation_ = !this.sequenceMode_ &&
  413. this.manifestType_ == shaka.media.ManifestParser.HLS &&
  414. !this.ignoreManifestTimestampsInSegmentsMode_;
  415. this.tsParser_ = null;
  416. for (const contentType of streamsByType.keys()) {
  417. const stream = streamsByType.get(contentType);
  418. // eslint-disable-next-line no-await-in-loop
  419. await this.initSourceBuffer_(contentType, stream, stream.codecs);
  420. if (this.needSplitMuxedContent_) {
  421. this.queues_[ContentType.AUDIO] = [];
  422. this.queues_[ContentType.VIDEO] = [];
  423. } else {
  424. this.queues_[contentType] = [];
  425. }
  426. }
  427. }
  428. /**
  429. * Initialize a specific SourceBuffer.
  430. *
  431. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  432. * @param {shaka.extern.Stream} stream
  433. * @param {string} codecs
  434. * @return {!Promise}
  435. * @private
  436. */
  437. async initSourceBuffer_(contentType, stream, codecs) {
  438. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  439. goog.asserts.assert(
  440. await shaka.media.MediaSourceEngine.isStreamSupported(
  441. stream, contentType),
  442. 'Type negotiation should happen before MediaSourceEngine.init!');
  443. let mimeType = shaka.util.MimeUtils.getFullType(
  444. stream.mimeType, codecs);
  445. if (contentType == ContentType.TEXT) {
  446. this.reinitText(mimeType, this.sequenceMode_, stream.external);
  447. } else {
  448. let needTransmux = this.config_.forceTransmux;
  449. if (!shaka.media.Capabilities.isTypeSupported(mimeType) ||
  450. (!this.sequenceMode_ &&
  451. shaka.util.MimeUtils.RAW_FORMATS.includes(mimeType))) {
  452. needTransmux = true;
  453. }
  454. const mimeTypeWithAllCodecs =
  455. shaka.util.MimeUtils.getFullTypeWithAllCodecs(
  456. stream.mimeType, codecs);
  457. if (needTransmux) {
  458. const audioCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  459. ContentType.AUDIO, (codecs || '').split(','));
  460. const videoCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  461. ContentType.VIDEO, (codecs || '').split(','));
  462. if (audioCodec && videoCodec) {
  463. this.needSplitMuxedContent_ = true;
  464. await this.initSourceBuffer_(ContentType.AUDIO, stream, audioCodec);
  465. await this.initSourceBuffer_(ContentType.VIDEO, stream, videoCodec);
  466. return;
  467. }
  468. const transmuxerPlugin = shaka.transmuxer.TransmuxerEngine
  469. .findTransmuxer(mimeTypeWithAllCodecs);
  470. if (transmuxerPlugin) {
  471. const transmuxer = transmuxerPlugin();
  472. this.transmuxers_[contentType] = transmuxer;
  473. mimeType =
  474. transmuxer.convertCodecs(contentType, mimeTypeWithAllCodecs);
  475. }
  476. }
  477. const type = this.addExtraFeaturesToMimeType_(mimeType);
  478. this.destroyer_.ensureNotDestroyed();
  479. let sourceBuffer;
  480. try {
  481. sourceBuffer = this.mediaSource_.addSourceBuffer(type);
  482. } catch (exception) {
  483. throw new shaka.util.Error(
  484. shaka.util.Error.Severity.CRITICAL,
  485. shaka.util.Error.Category.MEDIA,
  486. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  487. exception,
  488. 'The mediaSource_ status was ' + this.mediaSource_.readyState +
  489. ' expected \'open\'',
  490. null);
  491. }
  492. if (this.sequenceMode_) {
  493. sourceBuffer.mode =
  494. shaka.media.MediaSourceEngine.SourceBufferMode_.SEQUENCE;
  495. }
  496. this.eventManager_.listen(
  497. sourceBuffer, 'error',
  498. () => this.onError_(contentType));
  499. this.eventManager_.listen(
  500. sourceBuffer, 'updateend',
  501. () => this.onUpdateEnd_(contentType));
  502. this.sourceBuffers_[contentType] = sourceBuffer;
  503. this.sourceBufferTypes_[contentType] = mimeType;
  504. this.expectedEncryption_[contentType] = !!stream.drmInfos.length;
  505. }
  506. }
  507. /**
  508. * Called by the Player to provide an updated configuration any time it
  509. * changes. Must be called at least once before init().
  510. *
  511. * @param {shaka.extern.MediaSourceConfiguration} config
  512. */
  513. configure(config) {
  514. this.config_ = config;
  515. if (this.textEngine_) {
  516. this.textEngine_.setModifyCueCallback(config.modifyCueCallback);
  517. }
  518. }
  519. /**
  520. * Indicate if the streaming is allowed by MediaSourceEngine.
  521. * If we using MediaSource we allways returns true.
  522. *
  523. * @return {boolean}
  524. */
  525. isStreamingAllowed() {
  526. return this.streamingAllowed_;
  527. }
  528. /**
  529. * Reinitialize the TextEngine for a new text type.
  530. * @param {string} mimeType
  531. * @param {boolean} sequenceMode
  532. * @param {boolean} external
  533. */
  534. reinitText(mimeType, sequenceMode, external) {
  535. if (!this.textEngine_) {
  536. this.textEngine_ = new shaka.text.TextEngine(this.textDisplayer_);
  537. if (this.textEngine_) {
  538. this.textEngine_.setModifyCueCallback(this.config_.modifyCueCallback);
  539. }
  540. }
  541. this.textEngine_.initParser(mimeType, sequenceMode,
  542. external || this.segmentRelativeVttTiming_, this.manifestType_);
  543. }
  544. /**
  545. * @return {boolean} True if the MediaSource is in an "ended" state, or if the
  546. * object has been destroyed.
  547. */
  548. ended() {
  549. if (this.reloadingMediaSource_) {
  550. return false;
  551. }
  552. return this.mediaSource_ ? this.mediaSource_.readyState == 'ended' : true;
  553. }
  554. /**
  555. * Gets the first timestamp in buffer for the given content type.
  556. *
  557. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  558. * @return {?number} The timestamp in seconds, or null if nothing is buffered.
  559. */
  560. bufferStart(contentType) {
  561. if (this.reloadingMediaSource_ ||
  562. !Object.keys(this.sourceBuffers_).length) {
  563. return null;
  564. }
  565. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  566. if (contentType == ContentType.TEXT) {
  567. return this.textEngine_.bufferStart();
  568. }
  569. return shaka.media.TimeRangesUtils.bufferStart(
  570. this.getBuffered_(contentType));
  571. }
  572. /**
  573. * Gets the last timestamp in buffer for the given content type.
  574. *
  575. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  576. * @return {?number} The timestamp in seconds, or null if nothing is buffered.
  577. */
  578. bufferEnd(contentType) {
  579. if (this.reloadingMediaSource_) {
  580. return null;
  581. }
  582. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  583. if (contentType == ContentType.TEXT) {
  584. return this.textEngine_.bufferEnd();
  585. }
  586. return shaka.media.TimeRangesUtils.bufferEnd(
  587. this.getBuffered_(contentType));
  588. }
  589. /**
  590. * Determines if the given time is inside the buffered range of the given
  591. * content type.
  592. *
  593. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  594. * @param {number} time Playhead time
  595. * @return {boolean}
  596. */
  597. isBuffered(contentType, time) {
  598. if (this.reloadingMediaSource_) {
  599. return false;
  600. }
  601. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  602. if (contentType == ContentType.TEXT) {
  603. return this.textEngine_.isBuffered(time);
  604. } else {
  605. const buffered = this.getBuffered_(contentType);
  606. return shaka.media.TimeRangesUtils.isBuffered(buffered, time);
  607. }
  608. }
  609. /**
  610. * Computes how far ahead of the given timestamp is buffered for the given
  611. * content type.
  612. *
  613. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  614. * @param {number} time
  615. * @return {number} The amount of time buffered ahead in seconds.
  616. */
  617. bufferedAheadOf(contentType, time) {
  618. if (this.reloadingMediaSource_) {
  619. return 0;
  620. }
  621. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  622. if (contentType == ContentType.TEXT) {
  623. return this.textEngine_.bufferedAheadOf(time);
  624. } else {
  625. const buffered = this.getBuffered_(contentType);
  626. return shaka.media.TimeRangesUtils.bufferedAheadOf(buffered, time);
  627. }
  628. }
  629. /**
  630. * Returns info about what is currently buffered.
  631. * @return {shaka.extern.BufferedInfo}
  632. */
  633. getBufferedInfo() {
  634. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  635. const TimeRangesUtils = shaka.media.TimeRangesUtils;
  636. const info = {
  637. total: this.reloadingMediaSource_ ? [] :
  638. TimeRangesUtils.getBufferedInfo(this.video_.buffered),
  639. audio: this.reloadingMediaSource_ ? [] :
  640. TimeRangesUtils.getBufferedInfo(this.getBuffered_(ContentType.AUDIO)),
  641. video: this.reloadingMediaSource_ ? [] :
  642. TimeRangesUtils.getBufferedInfo(this.getBuffered_(ContentType.VIDEO)),
  643. text: [],
  644. };
  645. if (this.textEngine_) {
  646. const start = this.textEngine_.bufferStart();
  647. const end = this.textEngine_.bufferEnd();
  648. if (start != null && end != null) {
  649. info.text.push({start: start, end: end});
  650. }
  651. }
  652. return info;
  653. }
  654. /**
  655. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  656. * @return {TimeRanges} The buffered ranges for the given content type, or
  657. * null if the buffered ranges could not be obtained.
  658. * @private
  659. */
  660. getBuffered_(contentType) {
  661. try {
  662. return this.sourceBuffers_[contentType].buffered;
  663. } catch (exception) {
  664. if (contentType in this.sourceBuffers_) {
  665. // Note: previous MediaSource errors may cause access to |buffered| to
  666. // throw.
  667. shaka.log.error('failed to get buffered range for ' + contentType,
  668. exception);
  669. }
  670. return null;
  671. }
  672. }
  673. /**
  674. * Create a new closed caption parser. This will ONLY be replaced by tests as
  675. * a way to inject fake closed caption parser instances.
  676. *
  677. * @param {string} mimeType
  678. * @return {!shaka.media.IClosedCaptionParser}
  679. */
  680. getCaptionParser(mimeType) {
  681. return new shaka.media.ClosedCaptionParser(mimeType);
  682. }
  683. /**
  684. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  685. * @param {!BufferSource} data
  686. * @param {?shaka.media.SegmentReference} reference The segment reference
  687. * we are appending, or null for init segments
  688. * @param {!string} mimeType
  689. * @param {!number} timestampOffset
  690. * @return {?number}
  691. * @private
  692. */
  693. getTimestampAndDispatchMetadata_(contentType, data, reference, mimeType,
  694. timestampOffset) {
  695. let timestamp = null;
  696. const uint8ArrayData = shaka.util.BufferUtils.toUint8(data);
  697. if (shaka.util.MimeUtils.RAW_FORMATS.includes(mimeType)) {
  698. const frames = shaka.util.Id3Utils.getID3Frames(uint8ArrayData);
  699. if (frames.length && reference) {
  700. const metadataTimestamp = frames.find((frame) => {
  701. return frame.description ===
  702. 'com.apple.streaming.transportStreamTimestamp';
  703. });
  704. if (metadataTimestamp && metadataTimestamp.data) {
  705. timestamp = Math.round(metadataTimestamp.data) / 1000;
  706. }
  707. /** @private {shaka.extern.ID3Metadata} */
  708. const metadata = {
  709. cueTime: reference.startTime,
  710. data: uint8ArrayData,
  711. frames: frames,
  712. dts: reference.startTime,
  713. pts: reference.startTime,
  714. };
  715. this.onMetadata_([metadata], /* offset= */ 0, reference.endTime);
  716. }
  717. } else if (mimeType.includes('/mp4') &&
  718. reference && reference.timestampOffset == 0 &&
  719. reference.initSegmentReference &&
  720. reference.initSegmentReference.timescale) {
  721. const timescale = reference.initSegmentReference.timescale;
  722. if (!isNaN(timescale)) {
  723. const Mp4Parser = shaka.util.Mp4Parser;
  724. let startTime = 0;
  725. let parsedMedia = false;
  726. new Mp4Parser()
  727. .box('moof', Mp4Parser.children)
  728. .box('traf', Mp4Parser.children)
  729. .fullBox('tfdt', (box) => {
  730. goog.asserts.assert(
  731. box.version == 0 || box.version == 1,
  732. 'TFDT version can only be 0 or 1');
  733. const parsed = shaka.util.Mp4BoxParsers.parseTFDTInaccurate(
  734. box.reader, box.version);
  735. startTime = parsed.baseMediaDecodeTime / timescale;
  736. parsedMedia = true;
  737. box.parser.stop();
  738. }).parse(data, /* partialOkay= */ true);
  739. if (parsedMedia) {
  740. timestamp = startTime;
  741. }
  742. }
  743. } else if (!mimeType.includes('/mp4') && !mimeType.includes('/webm') &&
  744. shaka.util.TsParser.probe(uint8ArrayData)) {
  745. if (!this.tsParser_) {
  746. this.tsParser_ = new shaka.util.TsParser();
  747. } else {
  748. this.tsParser_.clearData();
  749. }
  750. const tsParser = this.tsParser_.parse(uint8ArrayData);
  751. const startTime = tsParser.getStartTime(contentType);
  752. if (startTime != null) {
  753. timestamp = startTime;
  754. }
  755. const metadata = tsParser.getMetadata();
  756. if (metadata.length) {
  757. this.onMetadata_(metadata, timestampOffset,
  758. reference ? reference.endTime : null);
  759. }
  760. }
  761. return timestamp;
  762. }
  763. /**
  764. * Enqueue an operation to append data to the SourceBuffer.
  765. * Start and end times are needed for TextEngine, but not for MediaSource.
  766. * Start and end times may be null for initialization segments; if present
  767. * they are relative to the presentation timeline.
  768. *
  769. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  770. * @param {!BufferSource} data
  771. * @param {?shaka.media.SegmentReference} reference The segment reference
  772. * we are appending, or null for init segments
  773. * @param {shaka.extern.Stream} stream
  774. * @param {?boolean} hasClosedCaptions True if the buffer contains CEA closed
  775. * captions
  776. * @param {boolean=} seeked True if we just seeked
  777. * @param {boolean=} adaptation True if we just automatically switched active
  778. * variant(s).
  779. * @param {boolean=} isChunkedData True if we add to the buffer from the
  780. * partial read of the segment.
  781. * @return {!Promise}
  782. */
  783. async appendBuffer(
  784. contentType, data, reference, stream, hasClosedCaptions, seeked = false,
  785. adaptation = false, isChunkedData = false, fromSplit = false) {
  786. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  787. if (contentType == ContentType.TEXT) {
  788. if (this.sequenceMode_) {
  789. // This won't be known until the first video segment is appended.
  790. const offset = await this.textSequenceModeOffset_;
  791. this.textEngine_.setTimestampOffset(offset);
  792. }
  793. await this.textEngine_.appendBuffer(
  794. data,
  795. reference ? reference.startTime : null,
  796. reference ? reference.endTime : null,
  797. reference ? reference.getUris()[0] : null);
  798. return;
  799. }
  800. if (!fromSplit && this.needSplitMuxedContent_) {
  801. await this.appendBuffer(ContentType.AUDIO, data, reference, stream,
  802. hasClosedCaptions, seeked, adaptation, isChunkedData,
  803. /* fromSplit= */ true);
  804. await this.appendBuffer(ContentType.VIDEO, data, reference, stream,
  805. hasClosedCaptions, seeked, adaptation, isChunkedData,
  806. /* fromSplit= */ true);
  807. return;
  808. }
  809. if (!this.sourceBuffers_[contentType]) {
  810. shaka.log.warning('Attempted to restore a non-existent source buffer');
  811. return;
  812. }
  813. let timestampOffset = this.sourceBuffers_[contentType].timestampOffset;
  814. let mimeType = this.sourceBufferTypes_[contentType];
  815. if (this.transmuxers_[contentType]) {
  816. mimeType = this.transmuxers_[contentType].getOriginalMimeType();
  817. }
  818. if (reference) {
  819. const timestamp = this.getTimestampAndDispatchMetadata_(
  820. contentType, data, reference, mimeType, timestampOffset);
  821. if (timestamp != null) {
  822. const calculatedTimestampOffset = reference.startTime - timestamp;
  823. const timestampOffsetDifference =
  824. Math.abs(timestampOffset - calculatedTimestampOffset);
  825. if ((timestampOffsetDifference >= 0.001 || seeked || adaptation) &&
  826. (!isChunkedData || calculatedTimestampOffset > 0 ||
  827. !timestampOffset)) {
  828. timestampOffset = calculatedTimestampOffset;
  829. if (this.attemptTimestampOffsetCalculation_) {
  830. this.enqueueOperation_(
  831. contentType,
  832. () => this.abort_(contentType),
  833. null);
  834. this.enqueueOperation_(
  835. contentType,
  836. () => this.setTimestampOffset_(contentType, timestampOffset),
  837. null);
  838. }
  839. }
  840. // Timestamps can only be reliably extracted from video, not audio.
  841. // Packed audio formats do not have internal timestamps at all.
  842. // Prefer video for this when available.
  843. const isBestSourceBufferForTimestamps =
  844. contentType == ContentType.VIDEO ||
  845. !(ContentType.VIDEO in this.sourceBuffers_);
  846. if (this.sequenceMode_ && isBestSourceBufferForTimestamps) {
  847. this.textSequenceModeOffset_.resolve(timestampOffset);
  848. }
  849. }
  850. }
  851. if (hasClosedCaptions && contentType == ContentType.VIDEO) {
  852. if (!this.textEngine_) {
  853. this.reinitText(shaka.util.MimeUtils.CEA608_CLOSED_CAPTION_MIMETYPE,
  854. this.sequenceMode_, /* external= */ false);
  855. }
  856. if (!this.captionParser_) {
  857. const basicType = mimeType.split(';', 1)[0];
  858. this.captionParser_ = this.getCaptionParser(basicType);
  859. }
  860. // If it is the init segment for closed captions, initialize the closed
  861. // caption parser.
  862. if (!reference) {
  863. this.captionParser_.init(data, adaptation);
  864. } else {
  865. const closedCaptions = this.captionParser_.parseFrom(data);
  866. if (closedCaptions.length) {
  867. this.textEngine_.storeAndAppendClosedCaptions(
  868. closedCaptions,
  869. reference.startTime,
  870. reference.endTime,
  871. timestampOffset);
  872. }
  873. }
  874. }
  875. if (this.transmuxers_[contentType]) {
  876. data = await this.transmuxers_[contentType].transmux(
  877. data, stream, reference, this.mediaSource_.duration, contentType);
  878. }
  879. data = this.workAroundBrokenPlatforms_(
  880. data, reference ? reference.startTime : null, contentType,
  881. reference ? reference.getUris()[0] : null);
  882. if (reference && this.sequenceMode_ && contentType != ContentType.TEXT) {
  883. // In sequence mode, for non-text streams, if we just cleared the buffer
  884. // and are either performing an unbuffered seek or handling an automatic
  885. // adaptation, we need to set a new timestampOffset on the sourceBuffer.
  886. if (seeked || adaptation) {
  887. const timestampOffset = reference.startTime;
  888. // The logic to call abort() before setting the timestampOffset is
  889. // extended during unbuffered seeks or automatic adaptations; it is
  890. // possible for the append state to be PARSING_MEDIA_SEGMENT from the
  891. // previous SourceBuffer#appendBuffer() call.
  892. this.enqueueOperation_(
  893. contentType,
  894. () => this.abort_(contentType),
  895. null);
  896. this.enqueueOperation_(
  897. contentType,
  898. () => this.setTimestampOffset_(contentType, timestampOffset),
  899. null);
  900. }
  901. }
  902. let bufferedBefore = null;
  903. await this.enqueueOperation_(contentType, () => {
  904. if (goog.DEBUG && reference && !reference.isPreload() && !isChunkedData) {
  905. bufferedBefore = this.getBuffered_(contentType);
  906. }
  907. this.append_(contentType, data, timestampOffset);
  908. }, reference ? reference.getUris()[0] : null);
  909. if (goog.DEBUG && reference && !reference.isPreload() && !isChunkedData) {
  910. const bufferedAfter = this.getBuffered_(contentType);
  911. const newBuffered = shaka.media.TimeRangesUtils.computeAddedRange(
  912. bufferedBefore, bufferedAfter);
  913. if (newBuffered) {
  914. const segmentDuration = reference.endTime - reference.startTime;
  915. // Check end times instead of start times. We may be overwriting a
  916. // buffer and only the end changes, and that would be fine.
  917. // Also, exclude tiny segments. Sometimes alignment segments as small
  918. // as 33ms are seen in Google DAI content. For such tiny segments,
  919. // half a segment duration would be no issue.
  920. const offset = Math.abs(newBuffered.end - reference.endTime);
  921. if (segmentDuration > 0.100 && offset > segmentDuration / 2) {
  922. shaka.log.error('Possible encoding problem detected!',
  923. 'Unexpected buffered range for reference', reference,
  924. 'from URIs', reference.getUris(),
  925. 'should be', {start: reference.startTime, end: reference.endTime},
  926. 'but got', newBuffered);
  927. }
  928. }
  929. }
  930. }
  931. /**
  932. * Set the selected closed captions Id and language.
  933. *
  934. * @param {string} id
  935. */
  936. setSelectedClosedCaptionId(id) {
  937. const VIDEO = shaka.util.ManifestParserUtils.ContentType.VIDEO;
  938. const videoBufferEndTime = this.bufferEnd(VIDEO) || 0;
  939. this.textEngine_.setSelectedClosedCaptionId(id, videoBufferEndTime);
  940. }
  941. /** Disable embedded closed captions. */
  942. clearSelectedClosedCaptionId() {
  943. if (this.textEngine_) {
  944. this.textEngine_.setSelectedClosedCaptionId('', 0);
  945. }
  946. }
  947. /**
  948. * Enqueue an operation to remove data from the SourceBuffer.
  949. *
  950. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  951. * @param {number} startTime relative to the start of the presentation
  952. * @param {number} endTime relative to the start of the presentation
  953. * @return {!Promise}
  954. */
  955. async remove(contentType, startTime, endTime) {
  956. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  957. if (contentType == ContentType.TEXT) {
  958. await this.textEngine_.remove(startTime, endTime);
  959. } else {
  960. await this.enqueueOperation_(
  961. contentType,
  962. () => this.remove_(contentType, startTime, endTime),
  963. null);
  964. if (this.needSplitMuxedContent_) {
  965. await this.enqueueOperation_(
  966. ContentType.AUDIO,
  967. () => this.remove_(ContentType.AUDIO, startTime, endTime),
  968. null);
  969. }
  970. }
  971. }
  972. /**
  973. * Enqueue an operation to clear the SourceBuffer.
  974. *
  975. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  976. * @return {!Promise}
  977. */
  978. async clear(contentType) {
  979. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  980. if (contentType == ContentType.TEXT) {
  981. if (!this.textEngine_) {
  982. return;
  983. }
  984. await this.textEngine_.remove(0, Infinity);
  985. } else {
  986. // Note that not all platforms allow clearing to Infinity.
  987. await this.enqueueOperation_(
  988. contentType,
  989. () => this.remove_(contentType, 0, this.mediaSource_.duration),
  990. null);
  991. if (this.needSplitMuxedContent_) {
  992. await this.enqueueOperation_(
  993. ContentType.AUDIO,
  994. () => this.remove_(
  995. ContentType.AUDIO, 0, this.mediaSource_.duration),
  996. null);
  997. }
  998. }
  999. }
  1000. /**
  1001. * Fully reset the state of the caption parser owned by MediaSourceEngine.
  1002. */
  1003. resetCaptionParser() {
  1004. if (this.captionParser_) {
  1005. this.captionParser_.reset();
  1006. }
  1007. }
  1008. /**
  1009. * Enqueue an operation to flush the SourceBuffer.
  1010. * This is a workaround for what we believe is a Chromecast bug.
  1011. *
  1012. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1013. * @return {!Promise}
  1014. */
  1015. async flush(contentType) {
  1016. // Flush the pipeline. Necessary on Chromecast, even though we have removed
  1017. // everything.
  1018. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1019. if (contentType == ContentType.TEXT) {
  1020. // Nothing to flush for text.
  1021. return;
  1022. }
  1023. await this.enqueueOperation_(
  1024. contentType,
  1025. () => this.flush_(contentType),
  1026. null);
  1027. if (this.needSplitMuxedContent_) {
  1028. await this.enqueueOperation_(
  1029. ContentType.AUDIO,
  1030. () => this.flush_(ContentType.AUDIO),
  1031. null);
  1032. }
  1033. }
  1034. /**
  1035. * Sets the timestamp offset and append window end for the given content type.
  1036. *
  1037. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1038. * @param {number} timestampOffset The timestamp offset. Segments which start
  1039. * at time t will be inserted at time t + timestampOffset instead. This
  1040. * value does not affect segments which have already been inserted.
  1041. * @param {number} appendWindowStart The timestamp to set the append window
  1042. * start to. For future appends, frames/samples with timestamps less than
  1043. * this value will be dropped.
  1044. * @param {number} appendWindowEnd The timestamp to set the append window end
  1045. * to. For future appends, frames/samples with timestamps greater than this
  1046. * value will be dropped.
  1047. * @param {boolean} ignoreTimestampOffset If true, the timestampOffset will
  1048. * not be applied in this step.
  1049. * @param {string} mimeType
  1050. * @param {string} codecs
  1051. * @param {!Map.<shaka.util.ManifestParserUtils.ContentType,
  1052. * shaka.extern.Stream>} streamsByType
  1053. * A map of content types to streams. All streams must be supported
  1054. * according to MediaSourceEngine.isStreamSupported.
  1055. *
  1056. * @return {!Promise}
  1057. */
  1058. async setStreamProperties(
  1059. contentType, timestampOffset, appendWindowStart, appendWindowEnd,
  1060. ignoreTimestampOffset, mimeType, codecs, streamsByType) {
  1061. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1062. if (contentType == ContentType.TEXT) {
  1063. if (!ignoreTimestampOffset) {
  1064. this.textEngine_.setTimestampOffset(timestampOffset);
  1065. }
  1066. this.textEngine_.setAppendWindow(appendWindowStart, appendWindowEnd);
  1067. return;
  1068. }
  1069. const operations = [];
  1070. const hasChangedCodecs = await this.codecSwitchIfNecessary_(
  1071. contentType, mimeType, codecs, streamsByType);
  1072. if (!hasChangedCodecs) {
  1073. // Queue an abort() to help MSE splice together overlapping segments.
  1074. // We set appendWindowEnd when we change periods in DASH content, and the
  1075. // period transition may result in overlap.
  1076. //
  1077. // An abort() also helps with MPEG2-TS. When we append a TS segment, we
  1078. // always enter a PARSING_MEDIA_SEGMENT state and we can't change the
  1079. // timestamp offset. By calling abort(), we reset the state so we can
  1080. // set it.
  1081. operations.push(this.enqueueOperation_(
  1082. contentType,
  1083. () => this.abort_(contentType),
  1084. null));
  1085. if (this.needSplitMuxedContent_) {
  1086. operations.push(this.enqueueOperation_(
  1087. ContentType.AUDIO,
  1088. () => this.abort_(ContentType.AUDIO),
  1089. null));
  1090. }
  1091. }
  1092. if (!ignoreTimestampOffset) {
  1093. operations.push(this.enqueueOperation_(
  1094. contentType,
  1095. () => this.setTimestampOffset_(contentType, timestampOffset),
  1096. null));
  1097. if (this.needSplitMuxedContent_) {
  1098. operations.push(this.enqueueOperation_(
  1099. ContentType.AUDIO,
  1100. () => this.setTimestampOffset_(
  1101. ContentType.AUDIO, timestampOffset),
  1102. null));
  1103. }
  1104. }
  1105. operations.push(this.enqueueOperation_(
  1106. contentType,
  1107. () => this.setAppendWindow_(
  1108. contentType, appendWindowStart, appendWindowEnd),
  1109. null));
  1110. if (this.needSplitMuxedContent_) {
  1111. operations.push(this.enqueueOperation_(
  1112. ContentType.AUDIO,
  1113. () => this.setAppendWindow_(
  1114. ContentType.AUDIO, appendWindowStart, appendWindowEnd),
  1115. null));
  1116. }
  1117. await Promise.all(operations);
  1118. }
  1119. /**
  1120. * Adjust timestamp offset to maintain AV sync across discontinuities.
  1121. *
  1122. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1123. * @param {number} timestampOffset
  1124. * @return {!Promise}
  1125. */
  1126. async resync(contentType, timestampOffset) {
  1127. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1128. if (contentType == ContentType.TEXT) {
  1129. // This operation is for audio and video only.
  1130. return;
  1131. }
  1132. // Reset the promise in case the timestamp offset changed during
  1133. // a period/discontinuity transition.
  1134. if (contentType == ContentType.VIDEO) {
  1135. this.textSequenceModeOffset_ = new shaka.util.PublicPromise();
  1136. }
  1137. // Queue an abort() to help MSE splice together overlapping segments.
  1138. // We set appendWindowEnd when we change periods in DASH content, and the
  1139. // period transition may result in overlap.
  1140. //
  1141. // An abort() also helps with MPEG2-TS. When we append a TS segment, we
  1142. // always enter a PARSING_MEDIA_SEGMENT state and we can't change the
  1143. // timestamp offset. By calling abort(), we reset the state so we can
  1144. // set it.
  1145. this.enqueueOperation_(
  1146. contentType,
  1147. () => this.abort_(contentType),
  1148. null);
  1149. if (this.needSplitMuxedContent_) {
  1150. this.enqueueOperation_(
  1151. ContentType.AUDIO,
  1152. () => this.abort_(ContentType.AUDIO),
  1153. null);
  1154. }
  1155. await this.enqueueOperation_(
  1156. contentType,
  1157. () => this.setTimestampOffset_(contentType, timestampOffset),
  1158. null);
  1159. if (this.needSplitMuxedContent_) {
  1160. await this.enqueueOperation_(
  1161. ContentType.AUDIO,
  1162. () => this.setTimestampOffset_(ContentType.AUDIO, timestampOffset),
  1163. null);
  1164. }
  1165. }
  1166. /**
  1167. * @param {string=} reason Valid reasons are 'network' and 'decode'.
  1168. * @return {!Promise}
  1169. * @see http://w3c.github.io/media-source/#idl-def-EndOfStreamError
  1170. */
  1171. async endOfStream(reason) {
  1172. await this.enqueueBlockingOperation_(() => {
  1173. // If endOfStream() has already been called on the media source,
  1174. // don't call it again. Also do not call if readyState is
  1175. // 'closed' (not attached to video element) since it is not a
  1176. // valid operation.
  1177. if (this.ended() || this.mediaSource_.readyState === 'closed') {
  1178. return;
  1179. }
  1180. // Tizen won't let us pass undefined, but it will let us omit the
  1181. // argument.
  1182. if (reason) {
  1183. this.mediaSource_.endOfStream(reason);
  1184. } else {
  1185. this.mediaSource_.endOfStream();
  1186. }
  1187. });
  1188. }
  1189. /**
  1190. * @param {number} duration
  1191. * @return {!Promise}
  1192. */
  1193. async setDuration(duration) {
  1194. await this.enqueueBlockingOperation_(() => {
  1195. // Reducing the duration causes the MSE removal algorithm to run, which
  1196. // triggers an 'updateend' event to fire. To handle this scenario, we
  1197. // have to insert a dummy operation into the beginning of each queue,
  1198. // which the 'updateend' handler will remove.
  1199. if (duration < this.mediaSource_.duration) {
  1200. for (const contentType in this.sourceBuffers_) {
  1201. const dummyOperation = {
  1202. start: () => {},
  1203. p: new shaka.util.PublicPromise(),
  1204. uri: null,
  1205. };
  1206. this.queues_[contentType].unshift(dummyOperation);
  1207. }
  1208. }
  1209. this.mediaSource_.duration = duration;
  1210. this.lastDuration_ = duration;
  1211. });
  1212. }
  1213. /**
  1214. * Get the current MediaSource duration.
  1215. *
  1216. * @return {number}
  1217. */
  1218. getDuration() {
  1219. return this.mediaSource_.duration;
  1220. }
  1221. /**
  1222. * Append data to the SourceBuffer.
  1223. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1224. * @param {BufferSource} data
  1225. * @param {number} timestampOffset
  1226. * @private
  1227. */
  1228. append_(contentType, data, timestampOffset) {
  1229. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1230. // Append only video data to the LCEVC Dec.
  1231. if (contentType == ContentType.VIDEO && this.lcevcDec_) {
  1232. // Append video buffers to the LCEVC Dec for parsing and storing
  1233. // of LCEVC data.
  1234. this.lcevcDec_.appendBuffer(data, timestampOffset);
  1235. }
  1236. // This will trigger an 'updateend' event.
  1237. this.sourceBuffers_[contentType].appendBuffer(data);
  1238. }
  1239. /**
  1240. * Remove data from the SourceBuffer.
  1241. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1242. * @param {number} startTime relative to the start of the presentation
  1243. * @param {number} endTime relative to the start of the presentation
  1244. * @private
  1245. */
  1246. remove_(contentType, startTime, endTime) {
  1247. if (endTime <= startTime) {
  1248. // Ignore removal of inverted or empty ranges.
  1249. // Fake 'updateend' event to resolve the operation.
  1250. this.onUpdateEnd_(contentType);
  1251. return;
  1252. }
  1253. // This will trigger an 'updateend' event.
  1254. this.sourceBuffers_[contentType].remove(startTime, endTime);
  1255. }
  1256. /**
  1257. * Call abort() on the SourceBuffer.
  1258. * This resets MSE's last_decode_timestamp on all track buffers, which should
  1259. * trigger the splicing logic for overlapping segments.
  1260. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1261. * @private
  1262. */
  1263. abort_(contentType) {
  1264. // Save the append window, which is reset on abort().
  1265. const appendWindowStart =
  1266. this.sourceBuffers_[contentType].appendWindowStart;
  1267. const appendWindowEnd = this.sourceBuffers_[contentType].appendWindowEnd;
  1268. // This will not trigger an 'updateend' event, since nothing is happening.
  1269. // This is only to reset MSE internals, not to abort an actual operation.
  1270. this.sourceBuffers_[contentType].abort();
  1271. // Restore the append window.
  1272. this.sourceBuffers_[contentType].appendWindowStart = appendWindowStart;
  1273. this.sourceBuffers_[contentType].appendWindowEnd = appendWindowEnd;
  1274. // Fake an 'updateend' event to resolve the operation.
  1275. this.onUpdateEnd_(contentType);
  1276. }
  1277. /**
  1278. * Nudge the playhead to force the media pipeline to be flushed.
  1279. * This seems to be necessary on Chromecast to get new content to replace old
  1280. * content.
  1281. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1282. * @private
  1283. */
  1284. flush_(contentType) {
  1285. // Never use flush_ if there's data. It causes a hiccup in playback.
  1286. goog.asserts.assert(
  1287. this.video_.buffered.length == 0, 'MediaSourceEngine.flush_ should ' +
  1288. 'only be used after clearing all data!');
  1289. // Seeking forces the pipeline to be flushed.
  1290. this.video_.currentTime -= 0.001;
  1291. // Fake an 'updateend' event to resolve the operation.
  1292. this.onUpdateEnd_(contentType);
  1293. }
  1294. /**
  1295. * Set the SourceBuffer's timestamp offset.
  1296. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1297. * @param {number} timestampOffset
  1298. * @private
  1299. */
  1300. setTimestampOffset_(contentType, timestampOffset) {
  1301. // Work around for
  1302. // https://github.com/shaka-project/shaka-player/issues/1281:
  1303. // TODO(https://bit.ly/2ttKiBU): follow up when this is fixed in Edge
  1304. if (timestampOffset < 0) {
  1305. // Try to prevent rounding errors in Edge from removing the first
  1306. // keyframe.
  1307. timestampOffset += 0.001;
  1308. }
  1309. this.sourceBuffers_[contentType].timestampOffset = timestampOffset;
  1310. // Fake an 'updateend' event to resolve the operation.
  1311. this.onUpdateEnd_(contentType);
  1312. }
  1313. /**
  1314. * Set the SourceBuffer's append window end.
  1315. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1316. * @param {number} appendWindowStart
  1317. * @param {number} appendWindowEnd
  1318. * @private
  1319. */
  1320. setAppendWindow_(contentType, appendWindowStart, appendWindowEnd) {
  1321. // You can't set start > end, so first set start to 0, then set the new
  1322. // end, then set the new start. That way, there are no intermediate
  1323. // states which are invalid.
  1324. this.sourceBuffers_[contentType].appendWindowStart = 0;
  1325. this.sourceBuffers_[contentType].appendWindowEnd = appendWindowEnd;
  1326. this.sourceBuffers_[contentType].appendWindowStart = appendWindowStart;
  1327. // Fake an 'updateend' event to resolve the operation.
  1328. this.onUpdateEnd_(contentType);
  1329. }
  1330. /**
  1331. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1332. * @private
  1333. */
  1334. onError_(contentType) {
  1335. const operation = this.queues_[contentType][0];
  1336. goog.asserts.assert(operation, 'Spurious error event!');
  1337. goog.asserts.assert(!this.sourceBuffers_[contentType].updating,
  1338. 'SourceBuffer should not be updating on error!');
  1339. const code = this.video_.error ? this.video_.error.code : 0;
  1340. operation.p.reject(new shaka.util.Error(
  1341. shaka.util.Error.Severity.CRITICAL,
  1342. shaka.util.Error.Category.MEDIA,
  1343. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_FAILED,
  1344. code, operation.uri));
  1345. // Do not pop from queue. An 'updateend' event will fire next, and to
  1346. // avoid synchronizing these two event handlers, we will allow that one to
  1347. // pop from the queue as normal. Note that because the operation has
  1348. // already been rejected, the call to resolve() in the 'updateend' handler
  1349. // will have no effect.
  1350. }
  1351. /**
  1352. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1353. * @private
  1354. */
  1355. onUpdateEnd_(contentType) {
  1356. if (this.reloadingMediaSource_) {
  1357. return;
  1358. }
  1359. const operation = this.queues_[contentType][0];
  1360. goog.asserts.assert(operation, 'Spurious updateend event!');
  1361. if (!operation) {
  1362. return;
  1363. }
  1364. goog.asserts.assert(!this.sourceBuffers_[contentType].updating,
  1365. 'SourceBuffer should not be updating on updateend!');
  1366. operation.p.resolve();
  1367. this.popFromQueue_(contentType);
  1368. }
  1369. /**
  1370. * Enqueue an operation and start it if appropriate.
  1371. *
  1372. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1373. * @param {function()} start
  1374. * @param {?string} uri
  1375. * @return {!Promise}
  1376. * @private
  1377. */
  1378. enqueueOperation_(contentType, start, uri) {
  1379. this.destroyer_.ensureNotDestroyed();
  1380. const operation = {
  1381. start: start,
  1382. p: new shaka.util.PublicPromise(),
  1383. uri,
  1384. };
  1385. this.queues_[contentType].push(operation);
  1386. if (this.queues_[contentType].length == 1) {
  1387. this.startOperation_(contentType);
  1388. }
  1389. return operation.p;
  1390. }
  1391. /**
  1392. * Enqueue an operation which must block all other operations on all
  1393. * SourceBuffers.
  1394. *
  1395. * @param {function():(Promise|undefined)} run
  1396. * @return {!Promise}
  1397. * @private
  1398. */
  1399. async enqueueBlockingOperation_(run) {
  1400. this.destroyer_.ensureNotDestroyed();
  1401. /** @type {!Array.<!shaka.util.PublicPromise>} */
  1402. const allWaiters = [];
  1403. // Enqueue a 'wait' operation onto each queue.
  1404. // This operation signals its readiness when it starts.
  1405. // When all wait operations are ready, the real operation takes place.
  1406. for (const contentType in this.sourceBuffers_) {
  1407. const ready = new shaka.util.PublicPromise();
  1408. const operation = {
  1409. start: () => ready.resolve(),
  1410. p: ready,
  1411. uri: null,
  1412. };
  1413. this.queues_[contentType].push(operation);
  1414. allWaiters.push(ready);
  1415. if (this.queues_[contentType].length == 1) {
  1416. operation.start();
  1417. }
  1418. }
  1419. // Return a Promise to the real operation, which waits to begin until
  1420. // there are no other in-progress operations on any SourceBuffers.
  1421. try {
  1422. await Promise.all(allWaiters);
  1423. } catch (error) {
  1424. // One of the waiters failed, which means we've been destroyed.
  1425. goog.asserts.assert(
  1426. this.destroyer_.destroyed(), 'Should be destroyed by now');
  1427. // We haven't popped from the queue. Canceled waiters have been removed
  1428. // by destroy. What's left now should just be resolved waiters. In
  1429. // uncompiled mode, we will maintain good hygiene and make sure the
  1430. // assert at the end of destroy passes. In compiled mode, the queues
  1431. // are wiped in destroy.
  1432. if (goog.DEBUG) {
  1433. for (const contentType in this.sourceBuffers_) {
  1434. if (this.queues_[contentType].length) {
  1435. goog.asserts.assert(
  1436. this.queues_[contentType].length == 1,
  1437. 'Should be at most one item in queue!');
  1438. goog.asserts.assert(
  1439. allWaiters.includes(this.queues_[contentType][0].p),
  1440. 'The item in queue should be one of our waiters!');
  1441. this.queues_[contentType].shift();
  1442. }
  1443. }
  1444. }
  1445. throw error;
  1446. }
  1447. if (goog.DEBUG) {
  1448. // If we did it correctly, nothing is updating.
  1449. for (const contentType in this.sourceBuffers_) {
  1450. goog.asserts.assert(
  1451. this.sourceBuffers_[contentType].updating == false,
  1452. 'SourceBuffers should not be updating after a blocking op!');
  1453. }
  1454. }
  1455. // Run the real operation, which can be asynchronous.
  1456. try {
  1457. await run();
  1458. } catch (exception) {
  1459. throw new shaka.util.Error(
  1460. shaka.util.Error.Severity.CRITICAL,
  1461. shaka.util.Error.Category.MEDIA,
  1462. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  1463. exception,
  1464. this.video_.error || 'No error in the media element',
  1465. null);
  1466. } finally {
  1467. // Unblock the queues.
  1468. for (const contentType in this.sourceBuffers_) {
  1469. this.popFromQueue_(contentType);
  1470. }
  1471. }
  1472. }
  1473. /**
  1474. * Pop from the front of the queue and start a new operation.
  1475. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1476. * @private
  1477. */
  1478. popFromQueue_(contentType) {
  1479. // Remove the in-progress operation, which is now complete.
  1480. this.queues_[contentType].shift();
  1481. this.startOperation_(contentType);
  1482. }
  1483. /**
  1484. * Starts the next operation in the queue.
  1485. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1486. * @private
  1487. */
  1488. startOperation_(contentType) {
  1489. // Retrieve the next operation, if any, from the queue and start it.
  1490. const next = this.queues_[contentType][0];
  1491. if (next) {
  1492. try {
  1493. next.start();
  1494. } catch (exception) {
  1495. if (exception.name == 'QuotaExceededError') {
  1496. next.p.reject(new shaka.util.Error(
  1497. shaka.util.Error.Severity.CRITICAL,
  1498. shaka.util.Error.Category.MEDIA,
  1499. shaka.util.Error.Code.QUOTA_EXCEEDED_ERROR,
  1500. contentType));
  1501. } else {
  1502. next.p.reject(new shaka.util.Error(
  1503. shaka.util.Error.Severity.CRITICAL,
  1504. shaka.util.Error.Category.MEDIA,
  1505. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  1506. exception,
  1507. this.video_.error || 'No error in the media element',
  1508. next.uri));
  1509. }
  1510. this.popFromQueue_(contentType);
  1511. }
  1512. }
  1513. }
  1514. /**
  1515. * @return {!shaka.extern.TextDisplayer}
  1516. */
  1517. getTextDisplayer() {
  1518. goog.asserts.assert(
  1519. this.textDisplayer_,
  1520. 'TextDisplayer should only be null when this is destroyed');
  1521. return this.textDisplayer_;
  1522. }
  1523. /**
  1524. * @param {!shaka.extern.TextDisplayer} textDisplayer
  1525. */
  1526. setTextDisplayer(textDisplayer) {
  1527. const oldTextDisplayer = this.textDisplayer_;
  1528. this.textDisplayer_ = textDisplayer;
  1529. if (oldTextDisplayer) {
  1530. textDisplayer.setTextVisibility(oldTextDisplayer.isTextVisible());
  1531. oldTextDisplayer.destroy();
  1532. }
  1533. if (this.textEngine_) {
  1534. this.textEngine_.setDisplayer(textDisplayer);
  1535. }
  1536. }
  1537. /**
  1538. * @param {boolean} segmentRelativeVttTiming
  1539. */
  1540. setSegmentRelativeVttTiming(segmentRelativeVttTiming) {
  1541. this.segmentRelativeVttTiming_ = segmentRelativeVttTiming;
  1542. }
  1543. /**
  1544. * Apply platform-specific transformations to this segment to work around
  1545. * issues in the platform.
  1546. *
  1547. * @param {!BufferSource} segment
  1548. * @param {?number} startTime
  1549. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1550. * @param {?string} uri
  1551. * @return {!BufferSource}
  1552. * @private
  1553. */
  1554. workAroundBrokenPlatforms_(segment, startTime, contentType, uri) {
  1555. const isInitSegment = startTime == null;
  1556. const encryptionExpected = this.expectedEncryption_[contentType];
  1557. // If:
  1558. // 1. the configuration tells to insert fake encryption,
  1559. // 2. and this is an init segment,
  1560. // 3. and encryption is expected,
  1561. // 4. and the platform requires encryption in all init segments,
  1562. // 5. and the content is MP4 (mimeType == "video/mp4" or "audio/mp4"),
  1563. // then insert fake encryption metadata for init segments that lack it.
  1564. // The MP4 requirement is because we can currently only do this
  1565. // transformation on MP4 containers.
  1566. // See: https://github.com/shaka-project/shaka-player/issues/2759
  1567. if (this.config_.insertFakeEncryptionInInit &&
  1568. isInitSegment &&
  1569. encryptionExpected &&
  1570. shaka.util.Platform.requiresEncryptionInfoInAllInitSegments() &&
  1571. shaka.util.MimeUtils.getContainerType(
  1572. this.sourceBufferTypes_[contentType]) == 'mp4') {
  1573. shaka.log.debug('Forcing fake encryption information in init segment.');
  1574. segment = shaka.media.ContentWorkarounds.fakeEncryption(segment, uri);
  1575. }
  1576. return segment;
  1577. }
  1578. /**
  1579. * Prepare the SourceBuffer to parse a potentially new type or codec.
  1580. *
  1581. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1582. * @param {string} mimeType
  1583. * @param {?shaka.extern.Transmuxer} transmuxer
  1584. * @private
  1585. */
  1586. change_(contentType, mimeType, transmuxer) {
  1587. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1588. if (contentType === ContentType.TEXT) {
  1589. shaka.log.debug(`Change not supported for ${contentType}`);
  1590. return;
  1591. }
  1592. shaka.log.debug(
  1593. `Change Type: ${this.sourceBufferTypes_[contentType]} -> ${mimeType}`);
  1594. if (shaka.media.Capabilities.isChangeTypeSupported()) {
  1595. if (this.transmuxers_[contentType]) {
  1596. this.transmuxers_[contentType].destroy();
  1597. delete this.transmuxers_[contentType];
  1598. }
  1599. if (transmuxer) {
  1600. this.transmuxers_[contentType] = transmuxer;
  1601. }
  1602. const type = this.addExtraFeaturesToMimeType_(mimeType);
  1603. this.sourceBuffers_[contentType].changeType(type);
  1604. this.sourceBufferTypes_[contentType] = mimeType;
  1605. } else {
  1606. shaka.log.debug('Change Type not supported');
  1607. }
  1608. // Fake an 'updateend' event to resolve the operation.
  1609. this.onUpdateEnd_(contentType);
  1610. }
  1611. /**
  1612. * Enqueue an operation to prepare the SourceBuffer to parse a potentially new
  1613. * type or codec.
  1614. *
  1615. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1616. * @param {string} mimeType
  1617. * @param {?shaka.extern.Transmuxer} transmuxer
  1618. * @return {!Promise}
  1619. */
  1620. changeType(contentType, mimeType, transmuxer) {
  1621. return this.enqueueOperation_(
  1622. contentType,
  1623. () => this.change_(contentType, mimeType, transmuxer),
  1624. null);
  1625. }
  1626. /**
  1627. * Resets the MediaSource and re-adds source buffers due to codec mismatch
  1628. *
  1629. * @param {!Map.<shaka.util.ManifestParserUtils.ContentType,
  1630. * shaka.extern.Stream>} streamsByType
  1631. * @private
  1632. */
  1633. async reset_(streamsByType) {
  1634. const Functional = shaka.util.Functional;
  1635. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1636. this.reloadingMediaSource_ = true;
  1637. this.needSplitMuxedContent_ = false;
  1638. const currentTime = this.video_.currentTime;
  1639. // When codec switching if the user is currently paused we don't want
  1640. // to trigger a play when switching codec.
  1641. // Playing can also end up in a paused state after a codec switch
  1642. // so we need to remember the current states.
  1643. const previousAutoPlayState = this.video_.autoplay;
  1644. const previousPausedState = this.video_.paused;
  1645. if (this.playbackHasBegun_) {
  1646. // Only set autoplay to false if the video playback has already begun.
  1647. // When a codec switch happens before playback has begun this can cause
  1648. // autoplay not to work as expected.
  1649. this.video_.autoplay = false;
  1650. }
  1651. try {
  1652. this.eventManager_.removeAll();
  1653. const cleanup = [];
  1654. for (const contentType in this.transmuxers_) {
  1655. cleanup.push(this.transmuxers_[contentType].destroy());
  1656. }
  1657. for (const contentType in this.queues_) {
  1658. // Make a local copy of the queue and the first item.
  1659. const q = this.queues_[contentType];
  1660. const inProgress = q[0];
  1661. // Drop everything else out of the original queue.
  1662. this.queues_[contentType] = q.slice(0, 1);
  1663. // We will wait for this item to complete/fail.
  1664. if (inProgress) {
  1665. cleanup.push(inProgress.p.catch(Functional.noop));
  1666. }
  1667. // The rest will be rejected silently if possible.
  1668. for (const item of q.slice(1)) {
  1669. item.p.reject(shaka.util.Destroyer.destroyedError());
  1670. }
  1671. }
  1672. for (const contentType in this.sourceBuffers_) {
  1673. const sourceBuffer = this.sourceBuffers_[contentType];
  1674. try {
  1675. this.mediaSource_.removeSourceBuffer(sourceBuffer);
  1676. } catch (e) {}
  1677. }
  1678. await Promise.all(cleanup);
  1679. this.transmuxers_ = {};
  1680. this.sourceBuffers_ = {};
  1681. const previousDuration = this.mediaSource_.duration;
  1682. this.mediaSourceOpen_ = new shaka.util.PublicPromise();
  1683. this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_);
  1684. await this.mediaSourceOpen_;
  1685. if (!isNaN(previousDuration) && previousDuration) {
  1686. this.mediaSource_.duration = previousDuration;
  1687. } else if (!isNaN(this.lastDuration_) && this.lastDuration_) {
  1688. this.mediaSource_.duration = this.lastDuration_;
  1689. }
  1690. const sourceBufferAdded = new shaka.util.PublicPromise();
  1691. const sourceBuffers =
  1692. /** @type {EventTarget} */(this.mediaSource_.sourceBuffers);
  1693. const totalOfBuffers = streamsByType.size;
  1694. let numberOfSourceBufferAdded = 0;
  1695. const onSourceBufferAdded = () => {
  1696. numberOfSourceBufferAdded++;
  1697. if (numberOfSourceBufferAdded === totalOfBuffers) {
  1698. sourceBufferAdded.resolve();
  1699. this.eventManager_.unlisten(sourceBuffers, 'addsourcebuffer',
  1700. onSourceBufferAdded);
  1701. }
  1702. };
  1703. this.eventManager_.listen(sourceBuffers, 'addsourcebuffer',
  1704. onSourceBufferAdded);
  1705. for (const contentType of streamsByType.keys()) {
  1706. const stream = streamsByType.get(contentType);
  1707. // eslint-disable-next-line no-await-in-loop
  1708. await this.initSourceBuffer_(contentType, stream, stream.codecs);
  1709. if (this.needSplitMuxedContent_) {
  1710. this.queues_[ContentType.AUDIO] = [];
  1711. this.queues_[ContentType.VIDEO] = [];
  1712. } else {
  1713. this.queues_[contentType] = [];
  1714. }
  1715. }
  1716. // Fake a seek to catchup the playhead.
  1717. this.video_.currentTime = currentTime;
  1718. await sourceBufferAdded;
  1719. } finally {
  1720. this.reloadingMediaSource_ = false;
  1721. this.destroyer_.ensureNotDestroyed();
  1722. this.eventManager_.listenOnce(this.video_, 'canplaythrough', () => {
  1723. // Don't use ensureNotDestroyed() from this event listener, because
  1724. // that results in an uncaught exception. Instead, just check the
  1725. // flag.
  1726. if (this.destroyer_.destroyed()) {
  1727. return;
  1728. }
  1729. this.video_.autoplay = previousAutoPlayState;
  1730. if (!previousPausedState) {
  1731. this.video_.play();
  1732. }
  1733. });
  1734. }
  1735. }
  1736. /**
  1737. * Resets the Media Source
  1738. * @param {!Map.<shaka.util.ManifestParserUtils.ContentType,
  1739. * shaka.extern.Stream>} streamsByType
  1740. * @return {!Promise}
  1741. */
  1742. reset(streamsByType) {
  1743. return this.enqueueBlockingOperation_(
  1744. () => this.reset_(streamsByType));
  1745. }
  1746. /**
  1747. * Codec switch if necessary, this will not resolve until the codec
  1748. * switch is over.
  1749. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1750. * @param {string} mimeType
  1751. * @param {string} codecs
  1752. * @param {!Map.<shaka.util.ManifestParserUtils.ContentType,
  1753. * shaka.extern.Stream>} streamsByType
  1754. * @return {!Promise.<boolean>} true if there was a codec switch,
  1755. * false otherwise.
  1756. * @private
  1757. */
  1758. async codecSwitchIfNecessary_(contentType, mimeType, codecs, streamsByType) {
  1759. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1760. if (contentType == ContentType.TEXT) {
  1761. return false;
  1762. }
  1763. const MimeUtils = shaka.util.MimeUtils;
  1764. const currentCodec = MimeUtils.getCodecBase(
  1765. MimeUtils.getCodecs(this.sourceBufferTypes_[contentType]));
  1766. const currentBasicType = MimeUtils.getBasicType(
  1767. this.sourceBufferTypes_[contentType]);
  1768. /** @type {?shaka.extern.Transmuxer} */
  1769. let transmuxer;
  1770. let transmuxerMuxed = false;
  1771. let newMimeType = shaka.util.MimeUtils.getFullType(mimeType, codecs);
  1772. let needTransmux = this.config_.forceTransmux;
  1773. if (!shaka.media.Capabilities.isTypeSupported(newMimeType) ||
  1774. (!this.sequenceMode_ &&
  1775. shaka.util.MimeUtils.RAW_FORMATS.includes(newMimeType))) {
  1776. needTransmux = true;
  1777. }
  1778. const TransmuxerEngine = shaka.transmuxer.TransmuxerEngine;
  1779. if (needTransmux) {
  1780. const newMimeTypeWithAllCodecs =
  1781. shaka.util.MimeUtils.getFullTypeWithAllCodecs(mimeType, codecs);
  1782. const transmuxerPlugin =
  1783. TransmuxerEngine.findTransmuxer(newMimeTypeWithAllCodecs);
  1784. if (transmuxerPlugin) {
  1785. transmuxer = transmuxerPlugin();
  1786. const audioCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  1787. ContentType.AUDIO, (codecs || '').split(','));
  1788. const videoCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  1789. ContentType.VIDEO, (codecs || '').split(','));
  1790. if (audioCodec && videoCodec) {
  1791. transmuxerMuxed = true;
  1792. let codec = videoCodec;
  1793. if (contentType == ContentType.AUDIO) {
  1794. codec = audioCodec;
  1795. }
  1796. newMimeType = transmuxer.convertCodecs(contentType,
  1797. shaka.util.MimeUtils.getFullTypeWithAllCodecs(mimeType, codec));
  1798. } else {
  1799. newMimeType =
  1800. transmuxer.convertCodecs(contentType, newMimeTypeWithAllCodecs);
  1801. }
  1802. }
  1803. }
  1804. const newCodec = MimeUtils.getCodecBase(
  1805. MimeUtils.getCodecs(newMimeType));
  1806. const newBasicType = MimeUtils.getBasicType(newMimeType);
  1807. // Current/new codecs base and basic type match then no need to switch
  1808. if (currentCodec === newCodec && currentBasicType === newBasicType) {
  1809. return false;
  1810. }
  1811. let allowChangeType = true;
  1812. if (this.needSplitMuxedContent_ || (transmuxerMuxed &&
  1813. transmuxer && !this.transmuxers_[contentType])) {
  1814. allowChangeType = false;
  1815. }
  1816. if (allowChangeType && this.config_.codecSwitchingStrategy ===
  1817. shaka.config.CodecSwitchingStrategy.SMOOTH &&
  1818. shaka.media.Capabilities.isChangeTypeSupported()) {
  1819. await this.changeType(contentType, newMimeType, transmuxer);
  1820. } else {
  1821. if (transmuxer) {
  1822. transmuxer.destroy();
  1823. }
  1824. await this.reset(streamsByType);
  1825. }
  1826. return true;
  1827. }
  1828. /**
  1829. * Returns true if it's necessary codec switch to load the new stream.
  1830. *
  1831. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1832. * @param {shaka.extern.Stream} stream
  1833. * @param {string} refMimeType
  1834. * @param {string} refCodecs
  1835. * @return {boolean}
  1836. * @private
  1837. */
  1838. isCodecSwitchNecessary_(contentType, stream, refMimeType, refCodecs) {
  1839. if (contentType == shaka.util.ManifestParserUtils.ContentType.TEXT) {
  1840. return false;
  1841. }
  1842. const MimeUtils = shaka.util.MimeUtils;
  1843. const currentCodec = MimeUtils.getCodecBase(
  1844. MimeUtils.getCodecs(this.sourceBufferTypes_[contentType]));
  1845. const currentBasicType = MimeUtils.getBasicType(
  1846. this.sourceBufferTypes_[contentType]);
  1847. let newMimeType = shaka.util.MimeUtils.getFullType(refMimeType, refCodecs);
  1848. let needTransmux = this.config_.forceTransmux;
  1849. if (!shaka.media.Capabilities.isTypeSupported(newMimeType) ||
  1850. (!this.sequenceMode_ &&
  1851. shaka.util.MimeUtils.RAW_FORMATS.includes(newMimeType))) {
  1852. needTransmux = true;
  1853. }
  1854. const newMimeTypeWithAllCodecs =
  1855. shaka.util.MimeUtils.getFullTypeWithAllCodecs(
  1856. refMimeType, refCodecs);
  1857. const TransmuxerEngine = shaka.transmuxer.TransmuxerEngine;
  1858. if (needTransmux) {
  1859. const transmuxerPlugin =
  1860. TransmuxerEngine.findTransmuxer(newMimeTypeWithAllCodecs);
  1861. if (transmuxerPlugin) {
  1862. const transmuxer = transmuxerPlugin();
  1863. newMimeType =
  1864. transmuxer.convertCodecs(contentType, newMimeTypeWithAllCodecs);
  1865. transmuxer.destroy();
  1866. }
  1867. }
  1868. const newCodec = MimeUtils.getCodecBase(
  1869. MimeUtils.getCodecs(newMimeType));
  1870. const newBasicType = MimeUtils.getBasicType(newMimeType);
  1871. return currentCodec !== newCodec || currentBasicType !== newBasicType;
  1872. }
  1873. /**
  1874. * Returns true if it's necessary reset the media source to load the
  1875. * new stream.
  1876. *
  1877. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1878. * @param {shaka.extern.Stream} stream
  1879. * @param {string} mimeType
  1880. * @param {string} codecs
  1881. * @return {boolean}
  1882. */
  1883. isResetMediaSourceNecessary(contentType, stream, mimeType, codecs) {
  1884. if (!this.isCodecSwitchNecessary_(contentType, stream, mimeType, codecs)) {
  1885. return false;
  1886. }
  1887. return this.config_.codecSwitchingStrategy !==
  1888. shaka.config.CodecSwitchingStrategy.SMOOTH ||
  1889. !shaka.media.Capabilities.isChangeTypeSupported() ||
  1890. this.needSplitMuxedContent_;
  1891. }
  1892. /**
  1893. * Update LCEVC Decoder object when ready for LCEVC Decode.
  1894. * @param {?shaka.lcevc.Dec} lcevcDec
  1895. */
  1896. updateLcevcDec(lcevcDec) {
  1897. this.lcevcDec_ = lcevcDec;
  1898. }
  1899. /**
  1900. * @param {string} mimeType
  1901. * @return {string}
  1902. * @private
  1903. */
  1904. addExtraFeaturesToMimeType_(mimeType) {
  1905. const extraFeatures = this.config_.addExtraFeaturesToSourceBuffer(mimeType);
  1906. const extendedType = mimeType + extraFeatures;
  1907. shaka.log.debug('Using full mime type', extendedType);
  1908. return extendedType;
  1909. }
  1910. };
  1911. /**
  1912. * Internal reference to window.URL.createObjectURL function to avoid
  1913. * compatibility issues with other libraries and frameworks such as React
  1914. * Native. For use in unit tests only, not meant for external use.
  1915. *
  1916. * @type {function(?):string}
  1917. */
  1918. shaka.media.MediaSourceEngine.createObjectURL = window.URL.createObjectURL;
  1919. /**
  1920. * @typedef {{
  1921. * start: function(),
  1922. * p: !shaka.util.PublicPromise,
  1923. * uri: ?string
  1924. * }}
  1925. *
  1926. * @summary An operation in queue.
  1927. * @property {function()} start
  1928. * The function which starts the operation.
  1929. * @property {!shaka.util.PublicPromise} p
  1930. * The PublicPromise which is associated with this operation.
  1931. * @property {?string} uri
  1932. * A segment URI (if any) associated with this operation.
  1933. */
  1934. shaka.media.MediaSourceEngine.Operation;
  1935. /**
  1936. * @enum {string}
  1937. * @private
  1938. */
  1939. shaka.media.MediaSourceEngine.SourceBufferMode_ = {
  1940. SEQUENCE: 'sequence',
  1941. SEGMENTS: 'segments',
  1942. };