Home Reference Source

src/controller/stream-controller.ts

  1. import BaseStreamController, { State } from './base-stream-controller';
  2. import { changeTypeSupported } from '../is-supported';
  3. import { Events } from '../events';
  4. import { BufferHelper, BufferInfo } from '../utils/buffer-helper';
  5. import { FragmentState } from './fragment-tracker';
  6. import { PlaylistLevelType } from '../types/loader';
  7. import { ElementaryStreamTypes, Fragment } from '../loader/fragment';
  8. import TransmuxerInterface from '../demux/transmuxer-interface';
  9. import { ChunkMetadata } from '../types/transmuxer';
  10. import GapController from './gap-controller';
  11. import { ErrorDetails, ErrorTypes } from '../errors';
  12. import type { NetworkComponentAPI } from '../types/component-api';
  13. import type Hls from '../hls';
  14. import type { Level } from '../types/level';
  15. import type { LevelDetails } from '../loader/level-details';
  16. import type { FragmentTracker } from './fragment-tracker';
  17. import type KeyLoader from '../loader/key-loader';
  18. import type { TransmuxerResult } from '../types/transmuxer';
  19. import type { TrackSet } from '../types/track';
  20. import type { SourceBufferName } from '../types/buffer';
  21. import type {
  22. AudioTrackSwitchedData,
  23. AudioTrackSwitchingData,
  24. BufferCreatedData,
  25. BufferEOSData,
  26. BufferFlushedData,
  27. ErrorData,
  28. FragBufferedData,
  29. FragLoadedData,
  30. FragParsingMetadataData,
  31. FragParsingUserdataData,
  32. LevelLoadedData,
  33. LevelLoadingData,
  34. LevelsUpdatedData,
  35. ManifestParsedData,
  36. MediaAttachedData,
  37. } from '../types/events';
  38.  
  39. const TICK_INTERVAL = 100; // how often to tick in ms
  40.  
  41. export default class StreamController
  42. extends BaseStreamController
  43. implements NetworkComponentAPI
  44. {
  45. private audioCodecSwap: boolean = false;
  46. private gapController: GapController | null = null;
  47. private level: number = -1;
  48. private _forceStartLoad: boolean = false;
  49. private altAudio: boolean = false;
  50. private audioOnly: boolean = false;
  51. private fragPlaying: Fragment | null = null;
  52. private onvplaying: EventListener | null = null;
  53. private onvseeked: EventListener | null = null;
  54. private fragLastKbps: number = 0;
  55. private couldBacktrack: boolean = false;
  56. private backtrackFragment: Fragment | null = null;
  57. private audioCodecSwitch: boolean = false;
  58. private videoBuffer: any | null = null;
  59.  
  60. constructor(
  61. hls: Hls,
  62. fragmentTracker: FragmentTracker,
  63. keyLoader: KeyLoader
  64. ) {
  65. super(hls, fragmentTracker, keyLoader, '[stream-controller]');
  66. this._registerListeners();
  67. }
  68.  
  69. private _registerListeners() {
  70. const { hls } = this;
  71. hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
  72. hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  73. hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  74. hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
  75. hls.on(Events.LEVEL_LOADING, this.onLevelLoading, this);
  76. hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
  77. hls.on(
  78. Events.FRAG_LOAD_EMERGENCY_ABORTED,
  79. this.onFragLoadEmergencyAborted,
  80. this
  81. );
  82. hls.on(Events.ERROR, this.onError, this);
  83. hls.on(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
  84. hls.on(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
  85. hls.on(Events.BUFFER_CREATED, this.onBufferCreated, this);
  86. hls.on(Events.BUFFER_FLUSHED, this.onBufferFlushed, this);
  87. hls.on(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
  88. hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
  89. }
  90.  
  91. protected _unregisterListeners() {
  92. const { hls } = this;
  93. hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
  94. hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  95. hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  96. hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
  97. hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
  98. hls.off(
  99. Events.FRAG_LOAD_EMERGENCY_ABORTED,
  100. this.onFragLoadEmergencyAborted,
  101. this
  102. );
  103. hls.off(Events.ERROR, this.onError, this);
  104. hls.off(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
  105. hls.off(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
  106. hls.off(Events.BUFFER_CREATED, this.onBufferCreated, this);
  107. hls.off(Events.BUFFER_FLUSHED, this.onBufferFlushed, this);
  108. hls.off(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
  109. hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
  110. }
  111.  
  112. protected onHandlerDestroying() {
  113. this._unregisterListeners();
  114. this.onMediaDetaching();
  115. }
  116.  
  117. public startLoad(startPosition: number): void {
  118. if (this.levels) {
  119. const { lastCurrentTime, hls } = this;
  120. this.stopLoad();
  121. this.setInterval(TICK_INTERVAL);
  122. this.level = -1;
  123. this.fragLoadError = 0;
  124. if (!this.startFragRequested) {
  125. // determine load level
  126. let startLevel = hls.startLevel;
  127. if (startLevel === -1) {
  128. if (hls.config.testBandwidth && this.levels.length > 1) {
  129. // -1 : guess start Level by doing a bitrate test by loading first fragment of lowest quality level
  130. startLevel = 0;
  131. this.bitrateTest = true;
  132. } else {
  133. startLevel = hls.nextAutoLevel;
  134. }
  135. }
  136. // set new level to playlist loader : this will trigger start level load
  137. // hls.nextLoadLevel remains until it is set to a new value or until a new frag is successfully loaded
  138. this.level = hls.nextLoadLevel = startLevel;
  139. this.loadedmetadata = false;
  140. }
  141. // if startPosition undefined but lastCurrentTime set, set startPosition to last currentTime
  142. if (lastCurrentTime > 0 && startPosition === -1) {
  143. this.log(
  144. `Override startPosition with lastCurrentTime @${lastCurrentTime.toFixed(
  145. 3
  146. )}`
  147. );
  148. startPosition = lastCurrentTime;
  149. }
  150. this.state = State.IDLE;
  151. this.nextLoadPosition =
  152. this.startPosition =
  153. this.lastCurrentTime =
  154. startPosition;
  155. this.tick();
  156. } else {
  157. this._forceStartLoad = true;
  158. this.state = State.STOPPED;
  159. }
  160. }
  161.  
  162. public stopLoad() {
  163. this._forceStartLoad = false;
  164. super.stopLoad();
  165. }
  166.  
  167. protected doTick() {
  168. switch (this.state) {
  169. case State.IDLE:
  170. this.doTickIdle();
  171. break;
  172. case State.WAITING_LEVEL: {
  173. const { levels, level } = this;
  174. const details = levels?.[level]?.details;
  175. if (details && (!details.live || this.levelLastLoaded === this.level)) {
  176. if (this.waitForCdnTuneIn(details)) {
  177. break;
  178. }
  179. this.state = State.IDLE;
  180. break;
  181. }
  182. break;
  183. }
  184. case State.FRAG_LOADING_WAITING_RETRY:
  185. {
  186. const now = self.performance.now();
  187. const retryDate = this.retryDate;
  188. // if current time is gt than retryDate, or if media seeking let's switch to IDLE state to retry loading
  189. if (!retryDate || now >= retryDate || this.media?.seeking) {
  190. this.log('retryDate reached, switch back to IDLE state');
  191. this.resetStartWhenNotLoaded(this.level);
  192. this.state = State.IDLE;
  193. }
  194. }
  195. break;
  196. default:
  197. break;
  198. }
  199. // check buffer
  200. // check/update current fragment
  201. this.onTickEnd();
  202. }
  203.  
  204. protected onTickEnd() {
  205. super.onTickEnd();
  206. this.checkBuffer();
  207. this.checkFragmentChanged();
  208. }
  209.  
  210. private doTickIdle() {
  211. const { hls, levelLastLoaded, levels, media } = this;
  212. const { config, nextLoadLevel: level } = hls;
  213.  
  214. // if start level not parsed yet OR
  215. // if video not attached AND start fragment already requested OR start frag prefetch not enabled
  216. // exit loop, as we either need more info (level not parsed) or we need media to be attached to load new fragment
  217. if (
  218. levelLastLoaded === null ||
  219. (!media && (this.startFragRequested || !config.startFragPrefetch))
  220. ) {
  221. return;
  222. }
  223.  
  224. // If the "main" level is audio-only but we are loading an alternate track in the same group, do not load anything
  225. if (this.altAudio && this.audioOnly) {
  226. return;
  227. }
  228.  
  229. if (!levels || !levels[level]) {
  230. return;
  231. }
  232.  
  233. const levelInfo = levels[level];
  234.  
  235. // if buffer length is less than maxBufLen try to load a new fragment
  236.  
  237. const bufferInfo = this.getMainFwdBufferInfo();
  238. if (bufferInfo === null) {
  239. return;
  240. }
  241.  
  242. const lastDetails = this.getLevelDetails();
  243. if (lastDetails && this._streamEnded(bufferInfo, lastDetails)) {
  244. const data: BufferEOSData = {};
  245. if (this.altAudio) {
  246. data.type = 'video';
  247. }
  248.  
  249. this.hls.trigger(Events.BUFFER_EOS, data);
  250. this.state = State.ENDED;
  251. return;
  252. }
  253.  
  254. // set next load level : this will trigger a playlist load if needed
  255. this.level = hls.nextLoadLevel = level;
  256.  
  257. const levelDetails = levelInfo.details;
  258. // if level info not retrieved yet, switch state and wait for level retrieval
  259. // if live playlist, ensure that new playlist has been refreshed to avoid loading/try to load
  260. // a useless and outdated fragment (that might even introduce load error if it is already out of the live playlist)
  261. if (
  262. !levelDetails ||
  263. this.state === State.WAITING_LEVEL ||
  264. (levelDetails.live && this.levelLastLoaded !== level)
  265. ) {
  266. this.level = level;
  267. this.state = State.WAITING_LEVEL;
  268. return;
  269. }
  270.  
  271. const bufferLen = bufferInfo.len;
  272.  
  273. // compute max Buffer Length that we could get from this load level, based on level bitrate. don't buffer more than 60 MB and more than 30s
  274. const maxBufLen = this.getMaxBufferLength(levelInfo.maxBitrate);
  275.  
  276. // Stay idle if we are still with buffer margins
  277. if (bufferLen >= maxBufLen) {
  278. return;
  279. }
  280.  
  281. if (
  282. this.backtrackFragment &&
  283. this.backtrackFragment.start > bufferInfo.end
  284. ) {
  285. this.backtrackFragment = null;
  286. }
  287. const targetBufferTime = this.backtrackFragment
  288. ? this.backtrackFragment.start
  289. : bufferInfo.end;
  290. let frag = this.getNextFragment(targetBufferTime, levelDetails);
  291. // Avoid backtracking by loading an earlier segment in streams with segments that do not start with a key frame (flagged by `couldBacktrack`)
  292. if (
  293. this.couldBacktrack &&
  294. !this.fragPrevious &&
  295. frag &&
  296. frag.sn !== 'initSegment' &&
  297. this.fragmentTracker.getState(frag) !== FragmentState.OK
  298. ) {
  299. const backtrackSn = (this.backtrackFragment ?? frag).sn as number;
  300. const fragIdx = backtrackSn - levelDetails.startSN;
  301. const backtrackFrag = levelDetails.fragments[fragIdx - 1];
  302. if (backtrackFrag && frag.cc === backtrackFrag.cc) {
  303. frag = backtrackFrag;
  304. this.fragmentTracker.removeFragment(backtrackFrag);
  305. }
  306. } else if (this.backtrackFragment && bufferInfo.len) {
  307. this.backtrackFragment = null;
  308. }
  309. // Avoid loop loading by using nextLoadPosition set for backtracking
  310. if (
  311. frag &&
  312. this.fragmentTracker.getState(frag) === FragmentState.OK &&
  313. this.nextLoadPosition > targetBufferTime
  314. ) {
  315. // Cleanup the fragment tracker before trying to find the next unbuffered fragment
  316. const type =
  317. this.audioOnly && !this.altAudio
  318. ? ElementaryStreamTypes.AUDIO
  319. : ElementaryStreamTypes.VIDEO;
  320. const mediaBuffer =
  321. (type === ElementaryStreamTypes.VIDEO
  322. ? this.videoBuffer
  323. : this.mediaBuffer) || this.media;
  324. if (mediaBuffer) {
  325. this.afterBufferFlushed(mediaBuffer, type, PlaylistLevelType.MAIN);
  326. }
  327. frag = this.getNextFragment(this.nextLoadPosition, levelDetails);
  328. }
  329. if (!frag) {
  330. return;
  331. }
  332. if (frag.initSegment && !frag.initSegment.data && !this.bitrateTest) {
  333. frag = frag.initSegment;
  334. }
  335.  
  336. this.loadFragment(frag, levelInfo, targetBufferTime);
  337. }
  338.  
  339. protected loadFragment(
  340. frag: Fragment,
  341. level: Level,
  342. targetBufferTime: number
  343. ) {
  344. // Check if fragment is not loaded
  345. const fragState = this.fragmentTracker.getState(frag);
  346. this.fragCurrent = frag;
  347. if (fragState === FragmentState.NOT_LOADED) {
  348. if (frag.sn === 'initSegment') {
  349. this._loadInitSegment(frag, level);
  350. } else if (this.bitrateTest) {
  351. this.log(
  352. `Fragment ${frag.sn} of level ${frag.level} is being downloaded to test bitrate and will not be buffered`
  353. );
  354. this._loadBitrateTestFrag(frag, level);
  355. } else {
  356. this.startFragRequested = true;
  357. super.loadFragment(frag, level, targetBufferTime);
  358. }
  359. } else if (fragState === FragmentState.APPENDING) {
  360. // Lower the buffer size and try again
  361. if (this.reduceMaxBufferLength(frag.duration)) {
  362. this.fragmentTracker.removeFragment(frag);
  363. }
  364. } else if (this.media?.buffered.length === 0) {
  365. // Stop gap for bad tracker / buffer flush behavior
  366. this.fragmentTracker.removeAllFragments();
  367. }
  368. }
  369.  
  370. private getAppendedFrag(position): Fragment | null {
  371. const fragOrPart = this.fragmentTracker.getAppendedFrag(
  372. position,
  373. PlaylistLevelType.MAIN
  374. );
  375. if (fragOrPart && 'fragment' in fragOrPart) {
  376. return fragOrPart.fragment;
  377. }
  378. return fragOrPart;
  379. }
  380.  
  381. private getBufferedFrag(position) {
  382. return this.fragmentTracker.getBufferedFrag(
  383. position,
  384. PlaylistLevelType.MAIN
  385. );
  386. }
  387.  
  388. private followingBufferedFrag(frag: Fragment | null) {
  389. if (frag) {
  390. // try to get range of next fragment (500ms after this range)
  391. return this.getBufferedFrag(frag.end + 0.5);
  392. }
  393. return null;
  394. }
  395.  
  396. /*
  397. on immediate level switch :
  398. - pause playback if playing
  399. - cancel any pending load request
  400. - and trigger a buffer flush
  401. */
  402. public immediateLevelSwitch() {
  403. this.abortCurrentFrag();
  404. this.flushMainBuffer(0, Number.POSITIVE_INFINITY);
  405. }
  406.  
  407. /**
  408. * try to switch ASAP without breaking video playback:
  409. * in order to ensure smooth but quick level switching,
  410. * we need to find the next flushable buffer range
  411. * we should take into account new segment fetch time
  412. */
  413. public nextLevelSwitch() {
  414. const { levels, media } = this;
  415. // ensure that media is defined and that metadata are available (to retrieve currentTime)
  416. if (media?.readyState) {
  417. let fetchdelay;
  418. const fragPlayingCurrent = this.getAppendedFrag(media.currentTime);
  419. if (fragPlayingCurrent && fragPlayingCurrent.start > 1) {
  420. // flush buffer preceding current fragment (flush until current fragment start offset)
  421. // minus 1s to avoid video freezing, that could happen if we flush keyframe of current video ...
  422. this.flushMainBuffer(0, fragPlayingCurrent.start - 1);
  423. }
  424. if (!media.paused && levels) {
  425. // add a safety delay of 1s
  426. const nextLevelId = this.hls.nextLoadLevel;
  427. const nextLevel = levels[nextLevelId];
  428. const fragLastKbps = this.fragLastKbps;
  429. if (fragLastKbps && this.fragCurrent) {
  430. fetchdelay =
  431. (this.fragCurrent.duration * nextLevel.maxBitrate) /
  432. (1000 * fragLastKbps) +
  433. 1;
  434. } else {
  435. fetchdelay = 0;
  436. }
  437. } else {
  438. fetchdelay = 0;
  439. }
  440. // this.log('fetchdelay:'+fetchdelay);
  441. // find buffer range that will be reached once new fragment will be fetched
  442. const bufferedFrag = this.getBufferedFrag(media.currentTime + fetchdelay);
  443. if (bufferedFrag) {
  444. // we can flush buffer range following this one without stalling playback
  445. const nextBufferedFrag = this.followingBufferedFrag(bufferedFrag);
  446. if (nextBufferedFrag) {
  447. // if we are here, we can also cancel any loading/demuxing in progress, as they are useless
  448. this.abortCurrentFrag();
  449. // start flush position is in next buffered frag. Leave some padding for non-independent segments and smoother playback.
  450. const maxStart = nextBufferedFrag.maxStartPTS
  451. ? nextBufferedFrag.maxStartPTS
  452. : nextBufferedFrag.start;
  453. const fragDuration = nextBufferedFrag.duration;
  454. const startPts = Math.max(
  455. bufferedFrag.end,
  456. maxStart +
  457. Math.min(
  458. Math.max(
  459. fragDuration - this.config.maxFragLookUpTolerance,
  460. fragDuration * 0.5
  461. ),
  462. fragDuration * 0.75
  463. )
  464. );
  465. this.flushMainBuffer(startPts, Number.POSITIVE_INFINITY);
  466. }
  467. }
  468. }
  469. }
  470.  
  471. private abortCurrentFrag() {
  472. const fragCurrent = this.fragCurrent;
  473. this.fragCurrent = null;
  474. this.backtrackFragment = null;
  475. if (fragCurrent) {
  476. fragCurrent.abortRequests();
  477. }
  478. switch (this.state) {
  479. case State.KEY_LOADING:
  480. case State.FRAG_LOADING:
  481. case State.FRAG_LOADING_WAITING_RETRY:
  482. case State.PARSING:
  483. case State.PARSED:
  484. this.state = State.IDLE;
  485. break;
  486. }
  487. this.nextLoadPosition = this.getLoadPosition();
  488. }
  489.  
  490. protected flushMainBuffer(startOffset: number, endOffset: number) {
  491. super.flushMainBuffer(
  492. startOffset,
  493. endOffset,
  494. this.altAudio ? 'video' : null
  495. );
  496. }
  497.  
  498. protected onMediaAttached(
  499. event: Events.MEDIA_ATTACHED,
  500. data: MediaAttachedData
  501. ) {
  502. super.onMediaAttached(event, data);
  503. const media = data.media;
  504. this.onvplaying = this.onMediaPlaying.bind(this);
  505. this.onvseeked = this.onMediaSeeked.bind(this);
  506. media.addEventListener('playing', this.onvplaying as EventListener);
  507. media.addEventListener('seeked', this.onvseeked as EventListener);
  508. this.gapController = new GapController(
  509. this.config,
  510. media,
  511. this.fragmentTracker,
  512. this.hls
  513. );
  514. }
  515.  
  516. protected onMediaDetaching() {
  517. const { media } = this;
  518. if (media && this.onvplaying && this.onvseeked) {
  519. media.removeEventListener('playing', this.onvplaying);
  520. media.removeEventListener('seeked', this.onvseeked);
  521. this.onvplaying = this.onvseeked = null;
  522. this.videoBuffer = null;
  523. }
  524. this.fragPlaying = null;
  525. if (this.gapController) {
  526. this.gapController.destroy();
  527. this.gapController = null;
  528. }
  529. super.onMediaDetaching();
  530. }
  531.  
  532. private onMediaPlaying() {
  533. // tick to speed up FRAG_CHANGED triggering
  534. this.tick();
  535. }
  536.  
  537. private onMediaSeeked() {
  538. const media = this.media;
  539. const currentTime = media ? media.currentTime : null;
  540. if (Number.isFinite(currentTime)) {
  541. this.log(`Media seeked to ${(currentTime as number).toFixed(3)}`);
  542. }
  543.  
  544. // tick to speed up FRAG_CHANGED triggering
  545. this.tick();
  546. }
  547.  
  548. private onManifestLoading() {
  549. // reset buffer on manifest loading
  550. this.log('Trigger BUFFER_RESET');
  551. this.hls.trigger(Events.BUFFER_RESET, undefined);
  552. this.fragmentTracker.removeAllFragments();
  553. this.couldBacktrack = false;
  554. this.startPosition = this.lastCurrentTime = 0;
  555. this.fragPlaying = null;
  556. this.backtrackFragment = null;
  557. }
  558.  
  559. private onManifestParsed(
  560. event: Events.MANIFEST_PARSED,
  561. data: ManifestParsedData
  562. ) {
  563. let aac = false;
  564. let heaac = false;
  565. let codec;
  566. data.levels.forEach((level) => {
  567. // detect if we have different kind of audio codecs used amongst playlists
  568. codec = level.audioCodec;
  569. if (codec) {
  570. if (codec.indexOf('mp4a.40.2') !== -1) {
  571. aac = true;
  572. }
  573.  
  574. if (codec.indexOf('mp4a.40.5') !== -1) {
  575. heaac = true;
  576. }
  577. }
  578. });
  579. this.audioCodecSwitch = aac && heaac && !changeTypeSupported();
  580. if (this.audioCodecSwitch) {
  581. this.log(
  582. 'Both AAC/HE-AAC audio found in levels; declaring level codec as HE-AAC'
  583. );
  584. }
  585.  
  586. this.levels = data.levels;
  587. this.startFragRequested = false;
  588. }
  589.  
  590. private onLevelLoading(event: Events.LEVEL_LOADING, data: LevelLoadingData) {
  591. const { levels } = this;
  592. if (!levels || this.state !== State.IDLE) {
  593. return;
  594. }
  595. const level = levels[data.level];
  596. if (
  597. !level.details ||
  598. (level.details.live && this.levelLastLoaded !== data.level) ||
  599. this.waitForCdnTuneIn(level.details)
  600. ) {
  601. this.state = State.WAITING_LEVEL;
  602. }
  603. }
  604.  
  605. private onLevelLoaded(event: Events.LEVEL_LOADED, data: LevelLoadedData) {
  606. const { levels } = this;
  607. const newLevelId = data.level;
  608. const newDetails = data.details;
  609. const duration = newDetails.totalduration;
  610.  
  611. if (!levels) {
  612. this.warn(`Levels were reset while loading level ${newLevelId}`);
  613. return;
  614. }
  615. this.log(
  616. `Level ${newLevelId} loaded [${newDetails.startSN},${newDetails.endSN}], cc [${newDetails.startCC}, ${newDetails.endCC}] duration:${duration}`
  617. );
  618.  
  619. const fragCurrent = this.fragCurrent;
  620. if (
  621. fragCurrent &&
  622. (this.state === State.FRAG_LOADING ||
  623. this.state === State.FRAG_LOADING_WAITING_RETRY)
  624. ) {
  625. if (fragCurrent.level !== data.level && fragCurrent.loader) {
  626. this.state = State.IDLE;
  627. this.backtrackFragment = null;
  628. fragCurrent.abortRequests();
  629. }
  630. }
  631.  
  632. const curLevel = levels[newLevelId];
  633. let sliding = 0;
  634. if (newDetails.live || curLevel.details?.live) {
  635. if (!newDetails.fragments[0]) {
  636. newDetails.deltaUpdateFailed = true;
  637. }
  638. if (newDetails.deltaUpdateFailed) {
  639. return;
  640. }
  641. sliding = this.alignPlaylists(newDetails, curLevel.details);
  642. }
  643. // override level info
  644. curLevel.details = newDetails;
  645. this.levelLastLoaded = newLevelId;
  646.  
  647. this.hls.trigger(Events.LEVEL_UPDATED, {
  648. details: newDetails,
  649. level: newLevelId,
  650. });
  651.  
  652. // only switch back to IDLE state if we were waiting for level to start downloading a new fragment
  653. if (this.state === State.WAITING_LEVEL) {
  654. if (this.waitForCdnTuneIn(newDetails)) {
  655. // Wait for Low-Latency CDN Tune-in
  656. return;
  657. }
  658. this.state = State.IDLE;
  659. }
  660.  
  661. if (!this.startFragRequested) {
  662. this.setStartPosition(newDetails, sliding);
  663. } else if (newDetails.live) {
  664. this.synchronizeToLiveEdge(newDetails);
  665. }
  666.  
  667. // trigger handler right now
  668. this.tick();
  669. }
  670.  
  671. protected _handleFragmentLoadProgress(data: FragLoadedData) {
  672. const { frag, part, payload } = data;
  673. const { levels } = this;
  674. if (!levels) {
  675. this.warn(
  676. `Levels were reset while fragment load was in progress. Fragment ${frag.sn} of level ${frag.level} will not be buffered`
  677. );
  678. return;
  679. }
  680. const currentLevel = levels[frag.level];
  681. const details = currentLevel.details as LevelDetails;
  682. if (!details) {
  683. this.warn(
  684. `Dropping fragment ${frag.sn} of level ${frag.level} after level details were reset`
  685. );
  686. return;
  687. }
  688. const videoCodec = currentLevel.videoCodec;
  689.  
  690. // time Offset is accurate if level PTS is known, or if playlist is not sliding (not live)
  691. const accurateTimeOffset = details.PTSKnown || !details.live;
  692. const initSegmentData = frag.initSegment?.data;
  693. const audioCodec = this._getAudioCodec(currentLevel);
  694.  
  695. // transmux the MPEG-TS data to ISO-BMFF segments
  696. // this.log(`Transmuxing ${frag.sn} of [${details.startSN} ,${details.endSN}],level ${frag.level}, cc ${frag.cc}`);
  697. const transmuxer = (this.transmuxer =
  698. this.transmuxer ||
  699. new TransmuxerInterface(
  700. this.hls,
  701. PlaylistLevelType.MAIN,
  702. this._handleTransmuxComplete.bind(this),
  703. this._handleTransmuxerFlush.bind(this)
  704. ));
  705. const partIndex = part ? part.index : -1;
  706. const partial = partIndex !== -1;
  707. const chunkMeta = new ChunkMetadata(
  708. frag.level,
  709. frag.sn as number,
  710. frag.stats.chunkCount,
  711. payload.byteLength,
  712. partIndex,
  713. partial
  714. );
  715. const initPTS = this.initPTS[frag.cc];
  716.  
  717. transmuxer.push(
  718. payload,
  719. initSegmentData,
  720. audioCodec,
  721. videoCodec,
  722. frag,
  723. part,
  724. details.totalduration,
  725. accurateTimeOffset,
  726. chunkMeta,
  727. initPTS
  728. );
  729. }
  730.  
  731. private onAudioTrackSwitching(
  732. event: Events.AUDIO_TRACK_SWITCHING,
  733. data: AudioTrackSwitchingData
  734. ) {
  735. // if any URL found on new audio track, it is an alternate audio track
  736. const fromAltAudio = this.altAudio;
  737. const altAudio = !!data.url;
  738. const trackId = data.id;
  739. // if we switch on main audio, ensure that main fragment scheduling is synced with media.buffered
  740. // don't do anything if we switch to alt audio: audio stream controller is handling it.
  741. // we will just have to change buffer scheduling on audioTrackSwitched
  742. if (!altAudio) {
  743. if (this.mediaBuffer !== this.media) {
  744. this.log(
  745. 'Switching on main audio, use media.buffered to schedule main fragment loading'
  746. );
  747. this.mediaBuffer = this.media;
  748. const fragCurrent = this.fragCurrent;
  749. // we need to refill audio buffer from main: cancel any frag loading to speed up audio switch
  750. if (fragCurrent) {
  751. this.log('Switching to main audio track, cancel main fragment load');
  752. fragCurrent.abortRequests();
  753. }
  754. // destroy transmuxer to force init segment generation (following audio switch)
  755. this.resetTransmuxer();
  756. // switch to IDLE state to load new fragment
  757. this.resetLoadingState();
  758. } else if (this.audioOnly) {
  759. // Reset audio transmuxer so when switching back to main audio we're not still appending where we left off
  760. this.resetTransmuxer();
  761. }
  762. const hls = this.hls;
  763. // If switching from alt to main audio, flush all audio and trigger track switched
  764. if (fromAltAudio) {
  765. hls.trigger(Events.BUFFER_FLUSHING, {
  766. startOffset: 0,
  767. endOffset: Number.POSITIVE_INFINITY,
  768. type: 'audio',
  769. });
  770. }
  771. hls.trigger(Events.AUDIO_TRACK_SWITCHED, {
  772. id: trackId,
  773. });
  774. }
  775. }
  776.  
  777. private onAudioTrackSwitched(
  778. event: Events.AUDIO_TRACK_SWITCHED,
  779. data: AudioTrackSwitchedData
  780. ) {
  781. const trackId = data.id;
  782. const altAudio = !!this.hls.audioTracks[trackId].url;
  783. if (altAudio) {
  784. const videoBuffer = this.videoBuffer;
  785. // if we switched on alternate audio, ensure that main fragment scheduling is synced with video sourcebuffer buffered
  786. if (videoBuffer && this.mediaBuffer !== videoBuffer) {
  787. this.log(
  788. 'Switching on alternate audio, use video.buffered to schedule main fragment loading'
  789. );
  790. this.mediaBuffer = videoBuffer;
  791. }
  792. }
  793. this.altAudio = altAudio;
  794. this.tick();
  795. }
  796.  
  797. private onBufferCreated(
  798. event: Events.BUFFER_CREATED,
  799. data: BufferCreatedData
  800. ) {
  801. const tracks = data.tracks;
  802. let mediaTrack;
  803. let name;
  804. let alternate = false;
  805. for (const type in tracks) {
  806. const track = tracks[type];
  807. if (track.id === 'main') {
  808. name = type;
  809. mediaTrack = track;
  810. // keep video source buffer reference
  811. if (type === 'video') {
  812. const videoTrack = tracks[type];
  813. if (videoTrack) {
  814. this.videoBuffer = videoTrack.buffer;
  815. }
  816. }
  817. } else {
  818. alternate = true;
  819. }
  820. }
  821. if (alternate && mediaTrack) {
  822. this.log(
  823. `Alternate track found, use ${name}.buffered to schedule main fragment loading`
  824. );
  825. this.mediaBuffer = mediaTrack.buffer;
  826. } else {
  827. this.mediaBuffer = this.media;
  828. }
  829. }
  830.  
  831. private onFragBuffered(event: Events.FRAG_BUFFERED, data: FragBufferedData) {
  832. const { frag, part } = data;
  833. if (frag && frag.type !== PlaylistLevelType.MAIN) {
  834. return;
  835. }
  836. if (this.fragContextChanged(frag)) {
  837. // If a level switch was requested while a fragment was buffering, it will emit the FRAG_BUFFERED event upon completion
  838. // Avoid setting state back to IDLE, since that will interfere with a level switch
  839. this.warn(
  840. `Fragment ${frag.sn}${part ? ' p: ' + part.index : ''} of level ${
  841. frag.level
  842. } finished buffering, but was aborted. state: ${this.state}`
  843. );
  844. if (this.state === State.PARSED) {
  845. this.state = State.IDLE;
  846. }
  847. return;
  848. }
  849. const stats = part ? part.stats : frag.stats;
  850. this.fragLastKbps = Math.round(
  851. (8 * stats.total) / (stats.buffering.end - stats.loading.first)
  852. );
  853. if (frag.sn !== 'initSegment') {
  854. this.fragPrevious = frag;
  855. }
  856. this.fragBufferedComplete(frag, part);
  857. }
  858.  
  859. private onError(event: Events.ERROR, data: ErrorData) {
  860. if (data.type === ErrorTypes.KEY_SYSTEM_ERROR) {
  861. this.onFragmentOrKeyLoadError(PlaylistLevelType.MAIN, data);
  862. return;
  863. }
  864. switch (data.details) {
  865. case ErrorDetails.FRAG_LOAD_ERROR:
  866. case ErrorDetails.FRAG_LOAD_TIMEOUT:
  867. case ErrorDetails.FRAG_PARSING_ERROR:
  868. case ErrorDetails.KEY_LOAD_ERROR:
  869. case ErrorDetails.KEY_LOAD_TIMEOUT:
  870. this.onFragmentOrKeyLoadError(PlaylistLevelType.MAIN, data);
  871. break;
  872. case ErrorDetails.LEVEL_LOAD_ERROR:
  873. case ErrorDetails.LEVEL_LOAD_TIMEOUT:
  874. if (this.state !== State.ERROR) {
  875. if (data.fatal) {
  876. // if fatal error, stop processing
  877. this.warn(`${data.details}`);
  878. this.state = State.ERROR;
  879. } else {
  880. // in case of non fatal error while loading level, if level controller is not retrying to load level , switch back to IDLE
  881. if (!data.levelRetry && this.state === State.WAITING_LEVEL) {
  882. this.state = State.IDLE;
  883. }
  884. }
  885. }
  886. break;
  887. case ErrorDetails.BUFFER_FULL_ERROR:
  888. // if in appending state
  889. if (
  890. data.parent === 'main' &&
  891. (this.state === State.PARSING || this.state === State.PARSED)
  892. ) {
  893. let flushBuffer = true;
  894. const bufferedInfo = this.getFwdBufferInfo(
  895. this.media,
  896. PlaylistLevelType.MAIN
  897. );
  898. // 0.5 : tolerance needed as some browsers stalls playback before reaching buffered end
  899. // reduce max buf len if current position is buffered
  900. if (bufferedInfo && bufferedInfo.len > 0.5) {
  901. flushBuffer = !this.reduceMaxBufferLength(bufferedInfo.len);
  902. }
  903. if (flushBuffer) {
  904. // current position is not buffered, but browser is still complaining about buffer full error
  905. // this happens on IE/Edge, refer to https://github.com/video-dev/hls.js/pull/708
  906. // in that case flush the whole buffer to recover
  907. this.warn(
  908. 'buffer full error also media.currentTime is not buffered, flush main'
  909. );
  910. // flush main buffer
  911. this.immediateLevelSwitch();
  912. }
  913. this.resetLoadingState();
  914. }
  915. break;
  916. default:
  917. break;
  918. }
  919. }
  920.  
  921. // Checks the health of the buffer and attempts to resolve playback stalls.
  922. private checkBuffer() {
  923. const { media, gapController } = this;
  924. if (!media || !gapController || !media.readyState) {
  925. // Exit early if we don't have media or if the media hasn't buffered anything yet (readyState 0)
  926. return;
  927. }
  928.  
  929. if (this.loadedmetadata || !BufferHelper.getBuffered(media).length) {
  930. // Resolve gaps using the main buffer, whose ranges are the intersections of the A/V sourcebuffers
  931. const activeFrag = this.state !== State.IDLE ? this.fragCurrent : null;
  932. gapController.poll(this.lastCurrentTime, activeFrag);
  933. }
  934.  
  935. this.lastCurrentTime = media.currentTime;
  936. }
  937.  
  938. private onFragLoadEmergencyAborted() {
  939. this.state = State.IDLE;
  940. // if loadedmetadata is not set, it means that we are emergency switch down on first frag
  941. // in that case, reset startFragRequested flag
  942. if (!this.loadedmetadata) {
  943. this.startFragRequested = false;
  944. this.nextLoadPosition = this.startPosition;
  945. }
  946. this.tickImmediate();
  947. }
  948.  
  949. private onBufferFlushed(
  950. event: Events.BUFFER_FLUSHED,
  951. { type }: BufferFlushedData
  952. ) {
  953. if (
  954. type !== ElementaryStreamTypes.AUDIO ||
  955. (this.audioOnly && !this.altAudio)
  956. ) {
  957. const mediaBuffer =
  958. (type === ElementaryStreamTypes.VIDEO
  959. ? this.videoBuffer
  960. : this.mediaBuffer) || this.media;
  961. this.afterBufferFlushed(mediaBuffer, type, PlaylistLevelType.MAIN);
  962. }
  963. }
  964.  
  965. private onLevelsUpdated(
  966. event: Events.LEVELS_UPDATED,
  967. data: LevelsUpdatedData
  968. ) {
  969. this.levels = data.levels;
  970. }
  971.  
  972. public swapAudioCodec() {
  973. this.audioCodecSwap = !this.audioCodecSwap;
  974. }
  975.  
  976. /**
  977. * Seeks to the set startPosition if not equal to the mediaElement's current time.
  978. */
  979. protected seekToStartPos() {
  980. const { media } = this;
  981. if (!media) {
  982. return;
  983. }
  984. const currentTime = media.currentTime;
  985. let startPosition = this.startPosition;
  986. // only adjust currentTime if different from startPosition or if startPosition not buffered
  987. // at that stage, there should be only one buffered range, as we reach that code after first fragment has been buffered
  988. if (startPosition >= 0 && currentTime < startPosition) {
  989. if (media.seeking) {
  990. this.log(
  991. `could not seek to ${startPosition}, already seeking at ${currentTime}`
  992. );
  993. return;
  994. }
  995. const buffered = BufferHelper.getBuffered(media);
  996. const bufferStart = buffered.length ? buffered.start(0) : 0;
  997. const delta = bufferStart - startPosition;
  998. if (
  999. delta > 0 &&
  1000. (delta < this.config.maxBufferHole ||
  1001. delta < this.config.maxFragLookUpTolerance)
  1002. ) {
  1003. this.log(`adjusting start position by ${delta} to match buffer start`);
  1004. startPosition += delta;
  1005. this.startPosition = startPosition;
  1006. }
  1007. this.log(
  1008. `seek to target start position ${startPosition} from current time ${currentTime}`
  1009. );
  1010. media.currentTime = startPosition;
  1011. }
  1012. }
  1013.  
  1014. private _getAudioCodec(currentLevel) {
  1015. let audioCodec = this.config.defaultAudioCodec || currentLevel.audioCodec;
  1016. if (this.audioCodecSwap && audioCodec) {
  1017. this.log('Swapping audio codec');
  1018. if (audioCodec.indexOf('mp4a.40.5') !== -1) {
  1019. audioCodec = 'mp4a.40.2';
  1020. } else {
  1021. audioCodec = 'mp4a.40.5';
  1022. }
  1023. }
  1024.  
  1025. return audioCodec;
  1026. }
  1027.  
  1028. private _loadBitrateTestFrag(frag: Fragment, level: Level) {
  1029. frag.bitrateTest = true;
  1030. this._doFragLoad(frag, level).then((data) => {
  1031. const { hls } = this;
  1032. if (!data || hls.nextLoadLevel || this.fragContextChanged(frag)) {
  1033. return;
  1034. }
  1035. this.fragLoadError = 0;
  1036. this.state = State.IDLE;
  1037. this.startFragRequested = false;
  1038. this.bitrateTest = false;
  1039. const stats = frag.stats;
  1040. // Bitrate tests fragments are neither parsed nor buffered
  1041. stats.parsing.start =
  1042. stats.parsing.end =
  1043. stats.buffering.start =
  1044. stats.buffering.end =
  1045. self.performance.now();
  1046. hls.trigger(Events.FRAG_LOADED, data as FragLoadedData);
  1047. frag.bitrateTest = false;
  1048. });
  1049. }
  1050.  
  1051. private _handleTransmuxComplete(transmuxResult: TransmuxerResult) {
  1052. const id = 'main';
  1053. const { hls } = this;
  1054. const { remuxResult, chunkMeta } = transmuxResult;
  1055.  
  1056. const context = this.getCurrentContext(chunkMeta);
  1057. if (!context) {
  1058. this.warn(
  1059. `The loading context changed while buffering fragment ${chunkMeta.sn} of level ${chunkMeta.level}. This chunk will not be buffered.`
  1060. );
  1061. this.resetStartWhenNotLoaded(chunkMeta.level);
  1062. return;
  1063. }
  1064. const { frag, part, level } = context;
  1065. const { video, text, id3, initSegment } = remuxResult;
  1066. const { details } = level;
  1067. // The audio-stream-controller handles audio buffering if Hls.js is playing an alternate audio track
  1068. const audio = this.altAudio ? undefined : remuxResult.audio;
  1069.  
  1070. // Check if the current fragment has been aborted. We check this by first seeing if we're still playing the current level.
  1071. // If we are, subsequently check if the currently loading fragment (fragCurrent) has changed.
  1072. if (this.fragContextChanged(frag)) {
  1073. return;
  1074. }
  1075.  
  1076. this.state = State.PARSING;
  1077.  
  1078. if (initSegment) {
  1079. if (initSegment.tracks) {
  1080. this._bufferInitSegment(level, initSegment.tracks, frag, chunkMeta);
  1081. hls.trigger(Events.FRAG_PARSING_INIT_SEGMENT, {
  1082. frag,
  1083. id,
  1084. tracks: initSegment.tracks,
  1085. });
  1086. }
  1087.  
  1088. // This would be nice if Number.isFinite acted as a typeguard, but it doesn't. See: https://github.com/Microsoft/TypeScript/issues/10038
  1089. const initPTS = initSegment.initPTS as number;
  1090. const timescale = initSegment.timescale as number;
  1091. if (Number.isFinite(initPTS)) {
  1092. this.initPTS[frag.cc] = initPTS;
  1093. hls.trigger(Events.INIT_PTS_FOUND, { frag, id, initPTS, timescale });
  1094. }
  1095. }
  1096.  
  1097. // Avoid buffering if backtracking this fragment
  1098. if (video && remuxResult.independent !== false) {
  1099. if (details) {
  1100. const { startPTS, endPTS, startDTS, endDTS } = video;
  1101. if (part) {
  1102. part.elementaryStreams[video.type] = {
  1103. startPTS,
  1104. endPTS,
  1105. startDTS,
  1106. endDTS,
  1107. };
  1108. } else {
  1109. if (video.firstKeyFrame && video.independent && chunkMeta.id === 1) {
  1110. this.couldBacktrack = true;
  1111. }
  1112. if (video.dropped && video.independent) {
  1113. // Backtrack if dropped frames create a gap after currentTime
  1114.  
  1115. const bufferInfo = this.getMainFwdBufferInfo();
  1116. const targetBufferTime =
  1117. (bufferInfo ? bufferInfo.end : this.getLoadPosition()) +
  1118. this.config.maxBufferHole;
  1119. const startTime = video.firstKeyFramePTS
  1120. ? video.firstKeyFramePTS
  1121. : startPTS;
  1122. if (targetBufferTime < startTime - this.config.maxBufferHole) {
  1123. this.backtrack(frag);
  1124. return;
  1125. }
  1126. // Set video stream start to fragment start so that truncated samples do not distort the timeline, and mark it partial
  1127. frag.setElementaryStreamInfo(
  1128. video.type as ElementaryStreamTypes,
  1129. frag.start,
  1130. endPTS,
  1131. frag.start,
  1132. endDTS,
  1133. true
  1134. );
  1135. }
  1136. }
  1137. frag.setElementaryStreamInfo(
  1138. video.type as ElementaryStreamTypes,
  1139. startPTS,
  1140. endPTS,
  1141. startDTS,
  1142. endDTS
  1143. );
  1144. if (this.backtrackFragment) {
  1145. this.backtrackFragment = frag;
  1146. }
  1147. this.bufferFragmentData(video, frag, part, chunkMeta);
  1148. }
  1149. } else if (remuxResult.independent === false) {
  1150. this.backtrack(frag);
  1151. return;
  1152. }
  1153.  
  1154. if (audio) {
  1155. const { startPTS, endPTS, startDTS, endDTS } = audio;
  1156. if (part) {
  1157. part.elementaryStreams[ElementaryStreamTypes.AUDIO] = {
  1158. startPTS,
  1159. endPTS,
  1160. startDTS,
  1161. endDTS,
  1162. };
  1163. }
  1164. frag.setElementaryStreamInfo(
  1165. ElementaryStreamTypes.AUDIO,
  1166. startPTS,
  1167. endPTS,
  1168. startDTS,
  1169. endDTS
  1170. );
  1171. this.bufferFragmentData(audio, frag, part, chunkMeta);
  1172. }
  1173.  
  1174. if (details && id3?.samples?.length) {
  1175. const emittedID3: FragParsingMetadataData = {
  1176. id,
  1177. frag,
  1178. details,
  1179. samples: id3.samples,
  1180. };
  1181. hls.trigger(Events.FRAG_PARSING_METADATA, emittedID3);
  1182. }
  1183. if (details && text) {
  1184. const emittedText: FragParsingUserdataData = {
  1185. id,
  1186. frag,
  1187. details,
  1188. samples: text.samples,
  1189. };
  1190. hls.trigger(Events.FRAG_PARSING_USERDATA, emittedText);
  1191. }
  1192. }
  1193.  
  1194. private _bufferInitSegment(
  1195. currentLevel: Level,
  1196. tracks: TrackSet,
  1197. frag: Fragment,
  1198. chunkMeta: ChunkMetadata
  1199. ) {
  1200. if (this.state !== State.PARSING) {
  1201. return;
  1202. }
  1203.  
  1204. this.audioOnly = !!tracks.audio && !tracks.video;
  1205.  
  1206. // if audio track is expected to come from audio stream controller, discard any coming from main
  1207. if (this.altAudio && !this.audioOnly) {
  1208. delete tracks.audio;
  1209. }
  1210. // include levelCodec in audio and video tracks
  1211. const { audio, video, audiovideo } = tracks;
  1212. if (audio) {
  1213. let audioCodec = currentLevel.audioCodec;
  1214. const ua = navigator.userAgent.toLowerCase();
  1215. if (this.audioCodecSwitch) {
  1216. if (audioCodec) {
  1217. if (audioCodec.indexOf('mp4a.40.5') !== -1) {
  1218. audioCodec = 'mp4a.40.2';
  1219. } else {
  1220. audioCodec = 'mp4a.40.5';
  1221. }
  1222. }
  1223. // In the case that AAC and HE-AAC audio codecs are signalled in manifest,
  1224. // force HE-AAC, as it seems that most browsers prefers it.
  1225. // don't force HE-AAC if mono stream, or in Firefox
  1226. if (audio.metadata.channelCount !== 1 && ua.indexOf('firefox') === -1) {
  1227. audioCodec = 'mp4a.40.5';
  1228. }
  1229. }
  1230. // HE-AAC is broken on Android, always signal audio codec as AAC even if variant manifest states otherwise
  1231. if (ua.indexOf('android') !== -1 && audio.container !== 'audio/mpeg') {
  1232. // Exclude mpeg audio
  1233. audioCodec = 'mp4a.40.2';
  1234. this.log(`Android: force audio codec to ${audioCodec}`);
  1235. }
  1236. if (currentLevel.audioCodec && currentLevel.audioCodec !== audioCodec) {
  1237. this.log(
  1238. `Swapping manifest audio codec "${currentLevel.audioCodec}" for "${audioCodec}"`
  1239. );
  1240. }
  1241. audio.levelCodec = audioCodec;
  1242. audio.id = 'main';
  1243. this.log(
  1244. `Init audio buffer, container:${
  1245. audio.container
  1246. }, codecs[selected/level/parsed]=[${audioCodec || ''}/${
  1247. currentLevel.audioCodec || ''
  1248. }/${audio.codec}]`
  1249. );
  1250. }
  1251. if (video) {
  1252. video.levelCodec = currentLevel.videoCodec;
  1253. video.id = 'main';
  1254. this.log(
  1255. `Init video buffer, container:${
  1256. video.container
  1257. }, codecs[level/parsed]=[${currentLevel.videoCodec || ''}/${
  1258. video.codec
  1259. }]`
  1260. );
  1261. }
  1262. if (audiovideo) {
  1263. this.log(
  1264. `Init audiovideo buffer, container:${
  1265. audiovideo.container
  1266. }, codecs[level/parsed]=[${currentLevel.attrs.CODECS || ''}/${
  1267. audiovideo.codec
  1268. }]`
  1269. );
  1270. }
  1271. this.hls.trigger(Events.BUFFER_CODECS, tracks);
  1272. // loop through tracks that are going to be provided to bufferController
  1273. Object.keys(tracks).forEach((trackName) => {
  1274. const track = tracks[trackName];
  1275. const initSegment = track.initSegment;
  1276. if (initSegment?.byteLength) {
  1277. this.hls.trigger(Events.BUFFER_APPENDING, {
  1278. type: trackName as SourceBufferName,
  1279. data: initSegment,
  1280. frag,
  1281. part: null,
  1282. chunkMeta,
  1283. parent: frag.type,
  1284. });
  1285. }
  1286. });
  1287. // trigger handler right now
  1288. this.tick();
  1289. }
  1290.  
  1291. public getMainFwdBufferInfo(): BufferInfo | null {
  1292. return this.getFwdBufferInfo(
  1293. this.mediaBuffer ? this.mediaBuffer : this.media,
  1294. PlaylistLevelType.MAIN
  1295. );
  1296. }
  1297.  
  1298. private backtrack(frag: Fragment) {
  1299. this.couldBacktrack = true;
  1300. // Causes findFragments to backtrack through fragments to find the keyframe
  1301. this.backtrackFragment = frag;
  1302. this.resetTransmuxer();
  1303. this.flushBufferGap(frag);
  1304. this.fragmentTracker.removeFragment(frag);
  1305. this.fragPrevious = null;
  1306. this.nextLoadPosition = frag.start;
  1307. this.state = State.IDLE;
  1308. }
  1309.  
  1310. private checkFragmentChanged() {
  1311. const video = this.media;
  1312. let fragPlayingCurrent: Fragment | null = null;
  1313. if (video && video.readyState > 1 && video.seeking === false) {
  1314. const currentTime = video.currentTime;
  1315. /* if video element is in seeked state, currentTime can only increase.
  1316. (assuming that playback rate is positive ...)
  1317. As sometimes currentTime jumps back to zero after a
  1318. media decode error, check this, to avoid seeking back to
  1319. wrong position after a media decode error
  1320. */
  1321.  
  1322. if (BufferHelper.isBuffered(video, currentTime)) {
  1323. fragPlayingCurrent = this.getAppendedFrag(currentTime);
  1324. } else if (BufferHelper.isBuffered(video, currentTime + 0.1)) {
  1325. /* ensure that FRAG_CHANGED event is triggered at startup,
  1326. when first video frame is displayed and playback is paused.
  1327. add a tolerance of 100ms, in case current position is not buffered,
  1328. check if current pos+100ms is buffered and use that buffer range
  1329. for FRAG_CHANGED event reporting */
  1330. fragPlayingCurrent = this.getAppendedFrag(currentTime + 0.1);
  1331. }
  1332. if (fragPlayingCurrent) {
  1333. this.backtrackFragment = null;
  1334. const fragPlaying = this.fragPlaying;
  1335. const fragCurrentLevel = fragPlayingCurrent.level;
  1336. if (
  1337. !fragPlaying ||
  1338. fragPlayingCurrent.sn !== fragPlaying.sn ||
  1339. fragPlaying.level !== fragCurrentLevel ||
  1340. fragPlayingCurrent.urlId !== fragPlaying.urlId
  1341. ) {
  1342. this.fragPlaying = fragPlayingCurrent;
  1343. this.hls.trigger(Events.FRAG_CHANGED, { frag: fragPlayingCurrent });
  1344. if (!fragPlaying || fragPlaying.level !== fragCurrentLevel) {
  1345. this.hls.trigger(Events.LEVEL_SWITCHED, {
  1346. level: fragCurrentLevel,
  1347. });
  1348. }
  1349. }
  1350. }
  1351. }
  1352. }
  1353.  
  1354. get nextLevel(): number {
  1355. const frag = this.nextBufferedFrag;
  1356. if (frag) {
  1357. return frag.level;
  1358. }
  1359. return -1;
  1360. }
  1361.  
  1362. get currentFrag(): Fragment | null {
  1363. const media = this.media;
  1364. if (media) {
  1365. return this.fragPlaying || this.getAppendedFrag(media.currentTime);
  1366. }
  1367. return null;
  1368. }
  1369.  
  1370. get currentProgramDateTime(): Date | null {
  1371. const media = this.media;
  1372. if (media) {
  1373. const currentTime = media.currentTime;
  1374. const frag = this.currentFrag;
  1375. if (
  1376. frag &&
  1377. Number.isFinite(currentTime) &&
  1378. Number.isFinite(frag.programDateTime)
  1379. ) {
  1380. const epocMs =
  1381. (frag.programDateTime as number) + (currentTime - frag.start) * 1000;
  1382. return new Date(epocMs);
  1383. }
  1384. }
  1385. return null;
  1386. }
  1387.  
  1388. get currentLevel(): number {
  1389. const frag = this.currentFrag;
  1390. if (frag) {
  1391. return frag.level;
  1392. }
  1393. return -1;
  1394. }
  1395.  
  1396. get nextBufferedFrag() {
  1397. const frag = this.currentFrag;
  1398. if (frag) {
  1399. return this.followingBufferedFrag(frag);
  1400. }
  1401. return null;
  1402. }
  1403.  
  1404. get forceStartLoad() {
  1405. return this._forceStartLoad;
  1406. }
  1407. }