Home Reference Source

src/controller/buffer-controller.ts

  1. import { Events } from '../events';
  2. import { logger } from '../utils/logger';
  3. import { ErrorDetails, ErrorTypes } from '../errors';
  4. import { BufferHelper } from '../utils/buffer-helper';
  5. import { getMediaSource } from '../utils/mediasource-helper';
  6. import { ElementaryStreamTypes } from '../loader/fragment';
  7. import type { TrackSet } from '../types/track';
  8. import BufferOperationQueue from './buffer-operation-queue';
  9. import {
  10. BufferOperation,
  11. SourceBuffers,
  12. SourceBufferName,
  13. SourceBufferListeners,
  14. } from '../types/buffer';
  15. import type {
  16. LevelUpdatedData,
  17. BufferAppendingData,
  18. MediaAttachingData,
  19. ManifestParsedData,
  20. BufferCodecsData,
  21. BufferEOSData,
  22. BufferFlushingData,
  23. FragParsedData,
  24. FragChangedData,
  25. } from '../types/events';
  26. import type { ComponentAPI } from '../types/component-api';
  27. import type { ChunkMetadata } from '../types/transmuxer';
  28. import type Hls from '../hls';
  29. import type { LevelDetails } from '../loader/level-details';
  30.  
  31. const MediaSource = getMediaSource();
  32. const VIDEO_CODEC_PROFILE_REPACE = /([ha]vc.)(?:\.[^.,]+)+/;
  33.  
  34. export default class BufferController implements ComponentAPI {
  35. // The level details used to determine duration, target-duration and live
  36. private details: LevelDetails | null = null;
  37. // cache the self generated object url to detect hijack of video tag
  38. private _objectUrl: string | null = null;
  39. // A queue of buffer operations which require the SourceBuffer to not be updating upon execution
  40. private operationQueue!: BufferOperationQueue;
  41. // References to event listeners for each SourceBuffer, so that they can be referenced for event removal
  42. private listeners!: SourceBufferListeners;
  43.  
  44. private hls: Hls;
  45.  
  46. // The number of BUFFER_CODEC events received before any sourceBuffers are created
  47. public bufferCodecEventsExpected: number = 0;
  48.  
  49. // The total number of BUFFER_CODEC events received
  50. private _bufferCodecEventsTotal: number = 0;
  51.  
  52. // A reference to the attached media element
  53. public media: HTMLMediaElement | null = null;
  54.  
  55. // A reference to the active media source
  56. public mediaSource: MediaSource | null = null;
  57.  
  58. // Last MP3 audio chunk appended
  59. private lastMpegAudioChunk: ChunkMetadata | null = null;
  60.  
  61. // counters
  62. public appendError: number = 0;
  63.  
  64. public tracks: TrackSet = {};
  65. public pendingTracks: TrackSet = {};
  66. public sourceBuffer!: SourceBuffers;
  67.  
  68. constructor(hls: Hls) {
  69. this.hls = hls;
  70. this._initSourceBuffer();
  71. this.registerListeners();
  72. }
  73.  
  74. public hasSourceTypes(): boolean {
  75. return (
  76. this.getSourceBufferTypes().length > 0 ||
  77. Object.keys(this.pendingTracks).length > 0
  78. );
  79. }
  80.  
  81. public destroy() {
  82. this.unregisterListeners();
  83. this.details = null;
  84. this.lastMpegAudioChunk = null;
  85. }
  86.  
  87. protected registerListeners() {
  88. const { hls } = this;
  89. hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
  90. hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  91. hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
  92. hls.on(Events.BUFFER_RESET, this.onBufferReset, this);
  93. hls.on(Events.BUFFER_APPENDING, this.onBufferAppending, this);
  94. hls.on(Events.BUFFER_CODECS, this.onBufferCodecs, this);
  95. hls.on(Events.BUFFER_EOS, this.onBufferEos, this);
  96. hls.on(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
  97. hls.on(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
  98. hls.on(Events.FRAG_PARSED, this.onFragParsed, this);
  99. hls.on(Events.FRAG_CHANGED, this.onFragChanged, this);
  100. }
  101.  
  102. protected unregisterListeners() {
  103. const { hls } = this;
  104. hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
  105. hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  106. hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
  107. hls.off(Events.BUFFER_RESET, this.onBufferReset, this);
  108. hls.off(Events.BUFFER_APPENDING, this.onBufferAppending, this);
  109. hls.off(Events.BUFFER_CODECS, this.onBufferCodecs, this);
  110. hls.off(Events.BUFFER_EOS, this.onBufferEos, this);
  111. hls.off(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
  112. hls.off(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
  113. hls.off(Events.FRAG_PARSED, this.onFragParsed, this);
  114. hls.off(Events.FRAG_CHANGED, this.onFragChanged, this);
  115. }
  116.  
  117. private _initSourceBuffer() {
  118. this.sourceBuffer = {};
  119. this.operationQueue = new BufferOperationQueue(this.sourceBuffer);
  120. this.listeners = {
  121. audio: [],
  122. video: [],
  123. audiovideo: [],
  124. };
  125. this.lastMpegAudioChunk = null;
  126. }
  127.  
  128. protected onManifestParsed(
  129. event: Events.MANIFEST_PARSED,
  130. data: ManifestParsedData
  131. ) {
  132. // in case of alt audio 2 BUFFER_CODECS events will be triggered, one per stream controller
  133. // sourcebuffers will be created all at once when the expected nb of tracks will be reached
  134. // in case alt audio is not used, only one BUFFER_CODEC event will be fired from main stream controller
  135. // it will contain the expected nb of source buffers, no need to compute it
  136. let codecEvents: number = 2;
  137. if ((data.audio && !data.video) || !data.altAudio || !__USE_ALT_AUDIO__) {
  138. codecEvents = 1;
  139. }
  140. this.bufferCodecEventsExpected = this._bufferCodecEventsTotal = codecEvents;
  141. this.details = null;
  142. logger.log(
  143. `${this.bufferCodecEventsExpected} bufferCodec event(s) expected`
  144. );
  145. }
  146.  
  147. protected onMediaAttaching(
  148. event: Events.MEDIA_ATTACHING,
  149. data: MediaAttachingData
  150. ) {
  151. const media = (this.media = data.media);
  152. if (media && MediaSource) {
  153. const ms = (this.mediaSource = new MediaSource());
  154. // MediaSource listeners are arrow functions with a lexical scope, and do not need to be bound
  155. ms.addEventListener('sourceopen', this._onMediaSourceOpen);
  156. ms.addEventListener('sourceended', this._onMediaSourceEnded);
  157. ms.addEventListener('sourceclose', this._onMediaSourceClose);
  158. // link video and media Source
  159. media.src = self.URL.createObjectURL(ms);
  160. // cache the locally generated object url
  161. this._objectUrl = media.src;
  162. }
  163. }
  164.  
  165. protected onMediaDetaching() {
  166. const { media, mediaSource, _objectUrl } = this;
  167. if (mediaSource) {
  168. logger.log('[buffer-controller]: media source detaching');
  169. if (mediaSource.readyState === 'open') {
  170. try {
  171. // endOfStream could trigger exception if any sourcebuffer is in updating state
  172. // we don't really care about checking sourcebuffer state here,
  173. // as we are anyway detaching the MediaSource
  174. // let's just avoid this exception to propagate
  175. mediaSource.endOfStream();
  176. } catch (err) {
  177. logger.warn(
  178. `[buffer-controller]: onMediaDetaching: ${err.message} while calling endOfStream`
  179. );
  180. }
  181. }
  182. // Clean up the SourceBuffers by invoking onBufferReset
  183. this.onBufferReset();
  184. mediaSource.removeEventListener('sourceopen', this._onMediaSourceOpen);
  185. mediaSource.removeEventListener('sourceended', this._onMediaSourceEnded);
  186. mediaSource.removeEventListener('sourceclose', this._onMediaSourceClose);
  187.  
  188. // Detach properly the MediaSource from the HTMLMediaElement as
  189. // suggested in https://github.com/w3c/media-source/issues/53.
  190. if (media) {
  191. if (_objectUrl) {
  192. self.URL.revokeObjectURL(_objectUrl);
  193. }
  194.  
  195. // clean up video tag src only if it's our own url. some external libraries might
  196. // hijack the video tag and change its 'src' without destroying the Hls instance first
  197. if (media.src === _objectUrl) {
  198. media.removeAttribute('src');
  199. media.load();
  200. } else {
  201. logger.warn(
  202. '[buffer-controller]: media.src was changed by a third party - skip cleanup'
  203. );
  204. }
  205. }
  206.  
  207. this.mediaSource = null;
  208. this.media = null;
  209. this._objectUrl = null;
  210. this.bufferCodecEventsExpected = this._bufferCodecEventsTotal;
  211. this.pendingTracks = {};
  212. this.tracks = {};
  213. }
  214.  
  215. this.hls.trigger(Events.MEDIA_DETACHED, undefined);
  216. }
  217.  
  218. protected onBufferReset() {
  219. this.getSourceBufferTypes().forEach((type) => {
  220. const sb = this.sourceBuffer[type];
  221. try {
  222. if (sb) {
  223. this.removeBufferListeners(type);
  224. if (this.mediaSource) {
  225. this.mediaSource.removeSourceBuffer(sb);
  226. }
  227. // Synchronously remove the SB from the map before the next call in order to prevent an async function from
  228. // accessing it
  229. this.sourceBuffer[type] = undefined;
  230. }
  231. } catch (err) {
  232. logger.warn(
  233. `[buffer-controller]: Failed to reset the ${type} buffer`,
  234. err
  235. );
  236. }
  237. });
  238. this._initSourceBuffer();
  239. }
  240.  
  241. protected onBufferCodecs(
  242. event: Events.BUFFER_CODECS,
  243. data: BufferCodecsData
  244. ) {
  245. const sourceBufferCount = this.getSourceBufferTypes().length;
  246.  
  247. Object.keys(data).forEach((trackName) => {
  248. if (sourceBufferCount) {
  249. // check if SourceBuffer codec needs to change
  250. const track = this.tracks[trackName];
  251. if (track && typeof track.buffer.changeType === 'function') {
  252. const { id, codec, levelCodec, container, metadata } =
  253. data[trackName];
  254. const currentCodec = (track.levelCodec || track.codec).replace(
  255. VIDEO_CODEC_PROFILE_REPACE,
  256. '$1'
  257. );
  258. const nextCodec = (levelCodec || codec).replace(
  259. VIDEO_CODEC_PROFILE_REPACE,
  260. '$1'
  261. );
  262. if (currentCodec !== nextCodec) {
  263. const mimeType = `${container};codecs=${levelCodec || codec}`;
  264. this.appendChangeType(trackName, mimeType);
  265. logger.log(
  266. `[buffer-controller]: switching codec ${currentCodec} to ${nextCodec}`
  267. );
  268. this.tracks[trackName] = {
  269. buffer: track.buffer,
  270. codec,
  271. container,
  272. levelCodec,
  273. metadata,
  274. id,
  275. };
  276. }
  277. }
  278. } else {
  279. // if source buffer(s) not created yet, appended buffer tracks in this.pendingTracks
  280. this.pendingTracks[trackName] = data[trackName];
  281. }
  282. });
  283.  
  284. // if sourcebuffers already created, do nothing ...
  285. if (sourceBufferCount) {
  286. return;
  287. }
  288.  
  289. this.bufferCodecEventsExpected = Math.max(
  290. this.bufferCodecEventsExpected - 1,
  291. 0
  292. );
  293. if (this.mediaSource && this.mediaSource.readyState === 'open') {
  294. this.checkPendingTracks();
  295. }
  296. }
  297.  
  298. protected appendChangeType(type, mimeType) {
  299. const { operationQueue } = this;
  300. const operation: BufferOperation = {
  301. execute: () => {
  302. const sb = this.sourceBuffer[type];
  303. if (sb) {
  304. logger.log(
  305. `[buffer-controller]: changing ${type} sourceBuffer type to ${mimeType}`
  306. );
  307. sb.changeType(mimeType);
  308. }
  309. operationQueue.shiftAndExecuteNext(type);
  310. },
  311. onStart: () => {},
  312. onComplete: () => {},
  313. onError: (e) => {
  314. logger.warn(
  315. `[buffer-controller]: Failed to change ${type} SourceBuffer type`,
  316. e
  317. );
  318. },
  319. };
  320.  
  321. operationQueue.append(operation, type);
  322. }
  323.  
  324. protected onBufferAppending(
  325. event: Events.BUFFER_APPENDING,
  326. eventData: BufferAppendingData
  327. ) {
  328. const { hls, operationQueue, tracks } = this;
  329. const { data, type, frag, part, chunkMeta } = eventData;
  330. const chunkStats = chunkMeta.buffering[type];
  331.  
  332. const bufferAppendingStart = self.performance.now();
  333. chunkStats.start = bufferAppendingStart;
  334. const fragBuffering = frag.stats.buffering;
  335. const partBuffering = part ? part.stats.buffering : null;
  336. if (fragBuffering.start === 0) {
  337. fragBuffering.start = bufferAppendingStart;
  338. }
  339. if (partBuffering && partBuffering.start === 0) {
  340. partBuffering.start = bufferAppendingStart;
  341. }
  342.  
  343. // TODO: Only update timestampOffset when audio/mpeg fragment or part is not contiguous with previously appended
  344. // Adjusting `SourceBuffer.timestampOffset` (desired point in the timeline where the next frames should be appended)
  345. // in Chrome browser when we detect MPEG audio container and time delta between level PTS and `SourceBuffer.timestampOffset`
  346. // is greater than 100ms (this is enough to handle seek for VOD or level change for LIVE videos).
  347. // More info here: https://github.com/video-dev/hls.js/issues/332#issuecomment-257986486
  348. const audioTrack = tracks.audio;
  349. let checkTimestampOffset = false;
  350. if (type === 'audio' && audioTrack?.container === 'audio/mpeg') {
  351. checkTimestampOffset =
  352. !this.lastMpegAudioChunk ||
  353. chunkMeta.id === 1 ||
  354. this.lastMpegAudioChunk.sn !== chunkMeta.sn;
  355. this.lastMpegAudioChunk = chunkMeta;
  356. }
  357.  
  358. const fragStart = frag.start;
  359. const operation: BufferOperation = {
  360. execute: () => {
  361. chunkStats.executeStart = self.performance.now();
  362. if (checkTimestampOffset) {
  363. const sb = this.sourceBuffer[type];
  364. if (sb) {
  365. const delta = fragStart - sb.timestampOffset;
  366. if (Math.abs(delta) >= 0.1) {
  367. logger.log(
  368. `[buffer-controller]: Updating audio SourceBuffer timestampOffset to ${fragStart} (delta: ${delta}) sn: ${frag.sn})`
  369. );
  370. sb.timestampOffset = fragStart;
  371. }
  372. }
  373. }
  374. this.appendExecutor(data, type);
  375. },
  376. onStart: () => {
  377. // logger.debug(`[buffer-controller]: ${type} SourceBuffer updatestart`);
  378. },
  379. onComplete: () => {
  380. // logger.debug(`[buffer-controller]: ${type} SourceBuffer updateend`);
  381. const end = self.performance.now();
  382. chunkStats.executeEnd = chunkStats.end = end;
  383. if (fragBuffering.first === 0) {
  384. fragBuffering.first = end;
  385. }
  386. if (partBuffering && partBuffering.first === 0) {
  387. partBuffering.first = end;
  388. }
  389.  
  390. const { sourceBuffer } = this;
  391. const timeRanges = {};
  392. for (const type in sourceBuffer) {
  393. timeRanges[type] = BufferHelper.getBuffered(sourceBuffer[type]);
  394. }
  395. this.appendError = 0;
  396. this.hls.trigger(Events.BUFFER_APPENDED, {
  397. type,
  398. frag,
  399. part,
  400. chunkMeta,
  401. parent: frag.type,
  402. timeRanges,
  403. });
  404. },
  405. onError: (err) => {
  406. // in case any error occured while appending, put back segment in segments table
  407. logger.error(
  408. `[buffer-controller]: Error encountered while trying to append to the ${type} SourceBuffer`,
  409. err
  410. );
  411. const event = {
  412. type: ErrorTypes.MEDIA_ERROR,
  413. parent: frag.type,
  414. details: ErrorDetails.BUFFER_APPEND_ERROR,
  415. err,
  416. fatal: false,
  417. };
  418.  
  419. if (err.code === DOMException.QUOTA_EXCEEDED_ERR) {
  420. // QuotaExceededError: http://www.w3.org/TR/html5/infrastructure.html#quotaexceedederror
  421. // let's stop appending any segments, and report BUFFER_FULL_ERROR error
  422. event.details = ErrorDetails.BUFFER_FULL_ERROR;
  423. } else {
  424. this.appendError++;
  425. event.details = ErrorDetails.BUFFER_APPEND_ERROR;
  426. /* with UHD content, we could get loop of quota exceeded error until
  427. browser is able to evict some data from sourcebuffer. Retrying can help recover.
  428. */
  429. if (this.appendError > hls.config.appendErrorMaxRetry) {
  430. logger.error(
  431. `[buffer-controller]: Failed ${hls.config.appendErrorMaxRetry} times to append segment in sourceBuffer`
  432. );
  433. event.fatal = true;
  434. hls.stopLoad();
  435. }
  436. }
  437. hls.trigger(Events.ERROR, event);
  438. },
  439. };
  440. operationQueue.append(operation, type);
  441. }
  442.  
  443. protected onBufferFlushing(
  444. event: Events.BUFFER_FLUSHING,
  445. data: BufferFlushingData
  446. ) {
  447. const { operationQueue } = this;
  448. const flushOperation = (type: SourceBufferName): BufferOperation => ({
  449. execute: this.removeExecutor.bind(
  450. this,
  451. type,
  452. data.startOffset,
  453. data.endOffset
  454. ),
  455. onStart: () => {
  456. // logger.debug(`[buffer-controller]: Started flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
  457. },
  458. onComplete: () => {
  459. // logger.debug(`[buffer-controller]: Finished flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
  460. this.hls.trigger(Events.BUFFER_FLUSHED, { type });
  461. },
  462. onError: (e) => {
  463. logger.warn(
  464. `[buffer-controller]: Failed to remove from ${type} SourceBuffer`,
  465. e
  466. );
  467. },
  468. });
  469.  
  470. if (data.type) {
  471. operationQueue.append(flushOperation(data.type), data.type);
  472. } else {
  473. this.getSourceBufferTypes().forEach((type: SourceBufferName) => {
  474. operationQueue.append(flushOperation(type), type);
  475. });
  476. }
  477. }
  478.  
  479. protected onFragParsed(event: Events.FRAG_PARSED, data: FragParsedData) {
  480. const { frag, part } = data;
  481. const buffersAppendedTo: Array<SourceBufferName> = [];
  482. const elementaryStreams = part
  483. ? part.elementaryStreams
  484. : frag.elementaryStreams;
  485. if (elementaryStreams[ElementaryStreamTypes.AUDIOVIDEO]) {
  486. buffersAppendedTo.push('audiovideo');
  487. } else {
  488. if (elementaryStreams[ElementaryStreamTypes.AUDIO]) {
  489. buffersAppendedTo.push('audio');
  490. }
  491. if (elementaryStreams[ElementaryStreamTypes.VIDEO]) {
  492. buffersAppendedTo.push('video');
  493. }
  494. }
  495.  
  496. const onUnblocked = () => {
  497. const now = self.performance.now();
  498. frag.stats.buffering.end = now;
  499. if (part) {
  500. part.stats.buffering.end = now;
  501. }
  502. const stats = part ? part.stats : frag.stats;
  503. this.hls.trigger(Events.FRAG_BUFFERED, {
  504. frag,
  505. part,
  506. stats,
  507. id: frag.type,
  508. });
  509. };
  510.  
  511. if (buffersAppendedTo.length === 0) {
  512. logger.warn(
  513. `Fragments must have at least one ElementaryStreamType set. type: ${frag.type} level: ${frag.level} sn: ${frag.sn}`
  514. );
  515. }
  516.  
  517. this.blockBuffers(onUnblocked, buffersAppendedTo);
  518. }
  519.  
  520. private onFragChanged(event: Events.FRAG_CHANGED, data: FragChangedData) {
  521. this.flushBackBuffer();
  522. }
  523.  
  524. // on BUFFER_EOS mark matching sourcebuffer(s) as ended and trigger checkEos()
  525. // an undefined data.type will mark all buffers as EOS.
  526. protected onBufferEos(event: Events.BUFFER_EOS, data: BufferEOSData) {
  527. const ended = this.getSourceBufferTypes().reduce((acc, type) => {
  528. const sb = this.sourceBuffer[type];
  529. if (sb && (!data.type || data.type === type)) {
  530. sb.ending = true;
  531. if (!sb.ended) {
  532. sb.ended = true;
  533. logger.log(`[buffer-controller]: ${type} sourceBuffer now EOS`);
  534. }
  535. }
  536. return acc && !!(!sb || sb.ended);
  537. }, true);
  538.  
  539. if (ended) {
  540. logger.log(`[buffer-controller]: Queueing mediaSource.endOfStream()`);
  541. this.blockBuffers(() => {
  542. this.getSourceBufferTypes().forEach((type) => {
  543. const sb = this.sourceBuffer[type];
  544. if (sb) {
  545. sb.ending = false;
  546. }
  547. });
  548. const { mediaSource } = this;
  549. if (!mediaSource || mediaSource.readyState !== 'open') {
  550. if (mediaSource) {
  551. logger.info(
  552. `[buffer-controller]: Could not call mediaSource.endOfStream(). mediaSource.readyState: ${mediaSource.readyState}`
  553. );
  554. }
  555. return;
  556. }
  557. logger.log(`[buffer-controller]: Calling mediaSource.endOfStream()`);
  558. // Allow this to throw and be caught by the enqueueing function
  559. mediaSource.endOfStream();
  560. });
  561. }
  562. }
  563.  
  564. protected onLevelUpdated(
  565. event: Events.LEVEL_UPDATED,
  566. { details }: LevelUpdatedData
  567. ) {
  568. if (!details.fragments.length) {
  569. return;
  570. }
  571. this.details = details;
  572.  
  573. if (this.getSourceBufferTypes().length) {
  574. this.blockBuffers(this.updateMediaElementDuration.bind(this));
  575. } else {
  576. this.updateMediaElementDuration();
  577. }
  578. }
  579.  
  580. flushBackBuffer() {
  581. const { hls, details, media, sourceBuffer } = this;
  582. if (!media || details === null) {
  583. return;
  584. }
  585.  
  586. const sourceBufferTypes = this.getSourceBufferTypes();
  587. if (!sourceBufferTypes.length) {
  588. return;
  589. }
  590.  
  591. // Support for deprecated liveBackBufferLength
  592. const backBufferLength =
  593. details.live && hls.config.liveBackBufferLength !== null
  594. ? hls.config.liveBackBufferLength
  595. : hls.config.backBufferLength;
  596.  
  597. if (!Number.isFinite(backBufferLength) || backBufferLength < 0) {
  598. return;
  599. }
  600.  
  601. const currentTime = media.currentTime;
  602. const targetDuration = details.levelTargetDuration;
  603. const maxBackBufferLength = Math.max(backBufferLength, targetDuration);
  604. const targetBackBufferPosition =
  605. Math.floor(currentTime / targetDuration) * targetDuration -
  606. maxBackBufferLength;
  607. sourceBufferTypes.forEach((type: SourceBufferName) => {
  608. const sb = sourceBuffer[type];
  609. if (sb) {
  610. const buffered = BufferHelper.getBuffered(sb);
  611. // when target buffer start exceeds actual buffer start
  612. if (
  613. buffered.length > 0 &&
  614. targetBackBufferPosition > buffered.start(0)
  615. ) {
  616. hls.trigger(Events.BACK_BUFFER_REACHED, {
  617. bufferEnd: targetBackBufferPosition,
  618. });
  619.  
  620. // Support for deprecated event:
  621. if (details.live) {
  622. hls.trigger(Events.LIVE_BACK_BUFFER_REACHED, {
  623. bufferEnd: targetBackBufferPosition,
  624. });
  625. } else if (
  626. sb.ended &&
  627. buffered.end(buffered.length - 1) - currentTime < targetDuration * 2
  628. ) {
  629. logger.info(
  630. `[buffer-controller]: Cannot flush ${type} back buffer while SourceBuffer is in ended state`
  631. );
  632. return;
  633. }
  634.  
  635. hls.trigger(Events.BUFFER_FLUSHING, {
  636. startOffset: 0,
  637. endOffset: targetBackBufferPosition,
  638. type,
  639. });
  640. }
  641. }
  642. });
  643. }
  644.  
  645. /**
  646. * Update Media Source duration to current level duration or override to Infinity if configuration parameter
  647. * 'liveDurationInfinity` is set to `true`
  648. * More details: https://github.com/video-dev/hls.js/issues/355
  649. */
  650. private updateMediaElementDuration() {
  651. if (
  652. !this.details ||
  653. !this.media ||
  654. !this.mediaSource ||
  655. this.mediaSource.readyState !== 'open'
  656. ) {
  657. return;
  658. }
  659. const { details, hls, media, mediaSource } = this;
  660. const levelDuration = details.fragments[0].start + details.totalduration;
  661. const mediaDuration = media.duration;
  662. const msDuration = Number.isFinite(mediaSource.duration)
  663. ? mediaSource.duration
  664. : 0;
  665.  
  666. if (details.live && hls.config.liveDurationInfinity) {
  667. // Override duration to Infinity
  668. logger.log(
  669. '[buffer-controller]: Media Source duration is set to Infinity'
  670. );
  671. mediaSource.duration = Infinity;
  672. this.updateSeekableRange(details);
  673. } else if (
  674. (levelDuration > msDuration && levelDuration > mediaDuration) ||
  675. !Number.isFinite(mediaDuration)
  676. ) {
  677. // levelDuration was the last value we set.
  678. // not using mediaSource.duration as the browser may tweak this value
  679. // only update Media Source duration if its value increase, this is to avoid
  680. // flushing already buffered portion when switching between quality level
  681. logger.log(
  682. `[buffer-controller]: Updating Media Source duration to ${levelDuration.toFixed(
  683. 3
  684. )}`
  685. );
  686. mediaSource.duration = levelDuration;
  687. }
  688. }
  689.  
  690. updateSeekableRange(levelDetails) {
  691. const mediaSource = this.mediaSource;
  692. const fragments = levelDetails.fragments;
  693. const len = fragments.length;
  694. if (len && levelDetails.live && mediaSource?.setLiveSeekableRange) {
  695. const start = Math.max(0, fragments[0].start);
  696. const end = Math.max(start, start + levelDetails.totalduration);
  697. mediaSource.setLiveSeekableRange(start, end);
  698. }
  699. }
  700.  
  701. protected checkPendingTracks() {
  702. const { bufferCodecEventsExpected, operationQueue, pendingTracks } = this;
  703.  
  704. // Check if we've received all of the expected bufferCodec events. When none remain, create all the sourceBuffers at once.
  705. // This is important because the MSE spec allows implementations to throw QuotaExceededErrors if creating new sourceBuffers after
  706. // data has been appended to existing ones.
  707. // 2 tracks is the max (one for audio, one for video). If we've reach this max go ahead and create the buffers.
  708. const pendingTracksCount = Object.keys(pendingTracks).length;
  709. if (
  710. (pendingTracksCount && !bufferCodecEventsExpected) ||
  711. pendingTracksCount === 2
  712. ) {
  713. // ok, let's create them now !
  714. this.createSourceBuffers(pendingTracks);
  715. this.pendingTracks = {};
  716. // append any pending segments now !
  717. const buffers = this.getSourceBufferTypes();
  718. if (buffers.length === 0) {
  719. this.hls.trigger(Events.ERROR, {
  720. type: ErrorTypes.MEDIA_ERROR,
  721. details: ErrorDetails.BUFFER_INCOMPATIBLE_CODECS_ERROR,
  722. fatal: true,
  723. reason: 'could not create source buffer for media codec(s)',
  724. });
  725. return;
  726. }
  727. buffers.forEach((type: SourceBufferName) => {
  728. operationQueue.executeNext(type);
  729. });
  730. }
  731. }
  732.  
  733. protected createSourceBuffers(tracks: TrackSet) {
  734. const { sourceBuffer, mediaSource } = this;
  735. if (!mediaSource) {
  736. throw Error('createSourceBuffers called when mediaSource was null');
  737. }
  738. let tracksCreated = 0;
  739. for (const trackName in tracks) {
  740. if (!sourceBuffer[trackName]) {
  741. const track = tracks[trackName as keyof TrackSet];
  742. if (!track) {
  743. throw Error(
  744. `source buffer exists for track ${trackName}, however track does not`
  745. );
  746. }
  747. // use levelCodec as first priority
  748. const codec = track.levelCodec || track.codec;
  749. const mimeType = `${track.container};codecs=${codec}`;
  750. logger.log(`[buffer-controller]: creating sourceBuffer(${mimeType})`);
  751. try {
  752. const sb = (sourceBuffer[trackName] =
  753. mediaSource.addSourceBuffer(mimeType));
  754. const sbName = trackName as SourceBufferName;
  755. this.addBufferListener(sbName, 'updatestart', this._onSBUpdateStart);
  756. this.addBufferListener(sbName, 'updateend', this._onSBUpdateEnd);
  757. this.addBufferListener(sbName, 'error', this._onSBUpdateError);
  758. this.tracks[trackName] = {
  759. buffer: sb,
  760. codec: codec,
  761. container: track.container,
  762. levelCodec: track.levelCodec,
  763. metadata: track.metadata,
  764. id: track.id,
  765. };
  766. tracksCreated++;
  767. } catch (err) {
  768. logger.error(
  769. `[buffer-controller]: error while trying to add sourceBuffer: ${err.message}`
  770. );
  771. this.hls.trigger(Events.ERROR, {
  772. type: ErrorTypes.MEDIA_ERROR,
  773. details: ErrorDetails.BUFFER_ADD_CODEC_ERROR,
  774. fatal: false,
  775. error: err,
  776. mimeType: mimeType,
  777. });
  778. }
  779. }
  780. }
  781. if (tracksCreated) {
  782. this.hls.trigger(Events.BUFFER_CREATED, { tracks: this.tracks });
  783. }
  784. }
  785.  
  786. // Keep as arrow functions so that we can directly reference these functions directly as event listeners
  787. private _onMediaSourceOpen = () => {
  788. const { hls, media, mediaSource } = this;
  789. logger.log('[buffer-controller]: Media source opened');
  790. if (media) {
  791. this.updateMediaElementDuration();
  792. hls.trigger(Events.MEDIA_ATTACHED, { media });
  793. }
  794.  
  795. if (mediaSource) {
  796. // once received, don't listen anymore to sourceopen event
  797. mediaSource.removeEventListener('sourceopen', this._onMediaSourceOpen);
  798. }
  799. this.checkPendingTracks();
  800. };
  801.  
  802. private _onMediaSourceClose = () => {
  803. logger.log('[buffer-controller]: Media source closed');
  804. };
  805.  
  806. private _onMediaSourceEnded = () => {
  807. logger.log('[buffer-controller]: Media source ended');
  808. };
  809.  
  810. private _onSBUpdateStart(type: SourceBufferName) {
  811. const { operationQueue } = this;
  812. const operation = operationQueue.current(type);
  813. operation.onStart();
  814. }
  815.  
  816. private _onSBUpdateEnd(type: SourceBufferName) {
  817. const { operationQueue } = this;
  818. const operation = operationQueue.current(type);
  819. operation.onComplete();
  820. operationQueue.shiftAndExecuteNext(type);
  821. }
  822.  
  823. private _onSBUpdateError(type: SourceBufferName, event: Event) {
  824. logger.error(`[buffer-controller]: ${type} SourceBuffer error`, event);
  825. // according to http://www.w3.org/TR/media-source/#sourcebuffer-append-error
  826. // SourceBuffer errors are not necessarily fatal; if so, the HTMLMediaElement will fire an error event
  827. this.hls.trigger(Events.ERROR, {
  828. type: ErrorTypes.MEDIA_ERROR,
  829. details: ErrorDetails.BUFFER_APPENDING_ERROR,
  830. fatal: false,
  831. });
  832. // updateend is always fired after error, so we'll allow that to shift the current operation off of the queue
  833. const operation = this.operationQueue.current(type);
  834. if (operation) {
  835. operation.onError(event);
  836. }
  837. }
  838.  
  839. // This method must result in an updateend event; if remove is not called, _onSBUpdateEnd must be called manually
  840. private removeExecutor(
  841. type: SourceBufferName,
  842. startOffset: number,
  843. endOffset: number
  844. ) {
  845. const { media, mediaSource, operationQueue, sourceBuffer } = this;
  846. const sb = sourceBuffer[type];
  847. if (!media || !mediaSource || !sb) {
  848. logger.warn(
  849. `[buffer-controller]: Attempting to remove from the ${type} SourceBuffer, but it does not exist`
  850. );
  851. operationQueue.shiftAndExecuteNext(type);
  852. return;
  853. }
  854. const mediaDuration = Number.isFinite(media.duration)
  855. ? media.duration
  856. : Infinity;
  857. const msDuration = Number.isFinite(mediaSource.duration)
  858. ? mediaSource.duration
  859. : Infinity;
  860. const removeStart = Math.max(0, startOffset);
  861. const removeEnd = Math.min(endOffset, mediaDuration, msDuration);
  862. if (removeEnd > removeStart && !sb.ending) {
  863. sb.ended = false;
  864. logger.log(
  865. `[buffer-controller]: Removing [${removeStart},${removeEnd}] from the ${type} SourceBuffer`
  866. );
  867. console.assert(!sb.updating, `${type} sourceBuffer must not be updating`);
  868. sb.remove(removeStart, removeEnd);
  869. } else {
  870. // Cycle the queue
  871. operationQueue.shiftAndExecuteNext(type);
  872. }
  873. }
  874.  
  875. // This method must result in an updateend event; if append is not called, _onSBUpdateEnd must be called manually
  876. private appendExecutor(data: Uint8Array, type: SourceBufferName) {
  877. const { operationQueue, sourceBuffer } = this;
  878. const sb = sourceBuffer[type];
  879. if (!sb) {
  880. logger.warn(
  881. `[buffer-controller]: Attempting to append to the ${type} SourceBuffer, but it does not exist`
  882. );
  883. operationQueue.shiftAndExecuteNext(type);
  884. return;
  885. }
  886.  
  887. sb.ended = false;
  888. console.assert(!sb.updating, `${type} sourceBuffer must not be updating`);
  889. sb.appendBuffer(data);
  890. }
  891.  
  892. // Enqueues an operation to each SourceBuffer queue which, upon execution, resolves a promise. When all promises
  893. // resolve, the onUnblocked function is executed. Functions calling this method do not need to unblock the queue
  894. // upon completion, since we already do it here
  895. private blockBuffers(
  896. onUnblocked: () => void,
  897. buffers: Array<SourceBufferName> = this.getSourceBufferTypes()
  898. ) {
  899. if (!buffers.length) {
  900. logger.log(
  901. '[buffer-controller]: Blocking operation requested, but no SourceBuffers exist'
  902. );
  903. Promise.resolve().then(onUnblocked);
  904. return;
  905. }
  906. const { operationQueue } = this;
  907.  
  908. // logger.debug(`[buffer-controller]: Blocking ${buffers} SourceBuffer`);
  909. const blockingOperations = buffers.map((type) =>
  910. operationQueue.appendBlocker(type as SourceBufferName)
  911. );
  912. Promise.all(blockingOperations).then(() => {
  913. // logger.debug(`[buffer-controller]: Blocking operation resolved; unblocking ${buffers} SourceBuffer`);
  914. onUnblocked();
  915. buffers.forEach((type) => {
  916. const sb = this.sourceBuffer[type];
  917. // Only cycle the queue if the SB is not updating. There's a bug in Chrome which sets the SB updating flag to
  918. // true when changing the MediaSource duration (https://bugs.chromium.org/p/chromium/issues/detail?id=959359&can=2&q=mediasource%20duration)
  919. // While this is a workaround, it's probably useful to have around
  920. if (!sb || !sb.updating) {
  921. operationQueue.shiftAndExecuteNext(type);
  922. }
  923. });
  924. });
  925. }
  926.  
  927. private getSourceBufferTypes(): Array<SourceBufferName> {
  928. return Object.keys(this.sourceBuffer) as Array<SourceBufferName>;
  929. }
  930.  
  931. private addBufferListener(
  932. type: SourceBufferName,
  933. event: string,
  934. fn: Function
  935. ) {
  936. const buffer = this.sourceBuffer[type];
  937. if (!buffer) {
  938. return;
  939. }
  940. const listener = fn.bind(this, type);
  941. this.listeners[type].push({ event, listener });
  942. buffer.addEventListener(event, listener);
  943. }
  944.  
  945. private removeBufferListeners(type: SourceBufferName) {
  946. const buffer = this.sourceBuffer[type];
  947. if (!buffer) {
  948. return;
  949. }
  950. this.listeners[type].forEach((l) => {
  951. buffer.removeEventListener(l.event, l.listener);
  952. });
  953. }
  954. }