Home Reference Source

src/controller/stream-controller.ts

  1. import BaseStreamController, { State } from './base-stream-controller';
  2. import { changeTypeSupported } from '../is-supported';
  3. import type { NetworkComponentAPI } from '../types/component-api';
  4. import { Events } from '../events';
  5. import { BufferHelper } from '../utils/buffer-helper';
  6. import type { FragmentTracker } from './fragment-tracker';
  7. import { FragmentState } from './fragment-tracker';
  8. import type { Level } from '../types/level';
  9. import { PlaylistLevelType } from '../types/loader';
  10. import { ElementaryStreamTypes, Fragment } from '../loader/fragment';
  11. import TransmuxerInterface from '../demux/transmuxer-interface';
  12. import type { TransmuxerResult } from '../types/transmuxer';
  13. import { ChunkMetadata } from '../types/transmuxer';
  14. import GapController from './gap-controller';
  15. import { ErrorDetails } from '../errors';
  16. import { logger } from '../utils/logger';
  17. import type Hls from '../hls';
  18. import type { LevelDetails } from '../loader/level-details';
  19. import type { TrackSet } from '../types/track';
  20. import type { SourceBufferName } from '../types/buffer';
  21. import type {
  22. AudioTrackSwitchedData,
  23. AudioTrackSwitchingData,
  24. BufferCreatedData,
  25. BufferEOSData,
  26. BufferFlushedData,
  27. ErrorData,
  28. FragBufferedData,
  29. FragLoadedData,
  30. FragParsingMetadataData,
  31. FragParsingUserdataData,
  32. LevelLoadedData,
  33. LevelLoadingData,
  34. LevelsUpdatedData,
  35. ManifestParsedData,
  36. MediaAttachedData,
  37. } from '../types/events';
  38.  
  39. const TICK_INTERVAL = 100; // how often to tick in ms
  40.  
  41. export default class StreamController
  42. extends BaseStreamController
  43. implements NetworkComponentAPI
  44. {
  45. private audioCodecSwap: boolean = false;
  46. private gapController: GapController | null = null;
  47. private level: number = -1;
  48. private _forceStartLoad: boolean = false;
  49. private altAudio: boolean = false;
  50. private audioOnly: boolean = false;
  51. private fragPlaying: Fragment | null = null;
  52. private onvplaying: EventListener | null = null;
  53. private onvseeked: EventListener | null = null;
  54. private fragLastKbps: number = 0;
  55. private stalled: boolean = false;
  56. private couldBacktrack: boolean = false;
  57. private audioCodecSwitch: boolean = false;
  58. private videoBuffer: any | null = null;
  59.  
  60. constructor(hls: Hls, fragmentTracker: FragmentTracker) {
  61. super(hls, fragmentTracker, '[stream-controller]');
  62. this._registerListeners();
  63. }
  64.  
  65. private _registerListeners() {
  66. const { hls } = this;
  67. hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
  68. hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  69. hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  70. hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
  71. hls.on(Events.LEVEL_LOADING, this.onLevelLoading, this);
  72. hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
  73. hls.on(
  74. Events.FRAG_LOAD_EMERGENCY_ABORTED,
  75. this.onFragLoadEmergencyAborted,
  76. this
  77. );
  78. hls.on(Events.ERROR, this.onError, this);
  79. hls.on(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
  80. hls.on(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
  81. hls.on(Events.BUFFER_CREATED, this.onBufferCreated, this);
  82. hls.on(Events.BUFFER_FLUSHED, this.onBufferFlushed, this);
  83. hls.on(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
  84. hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
  85. }
  86.  
  87. protected _unregisterListeners() {
  88. const { hls } = this;
  89. hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
  90. hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  91. hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  92. hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
  93. hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
  94. hls.off(
  95. Events.FRAG_LOAD_EMERGENCY_ABORTED,
  96. this.onFragLoadEmergencyAborted,
  97. this
  98. );
  99. hls.off(Events.ERROR, this.onError, this);
  100. hls.off(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
  101. hls.off(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
  102. hls.off(Events.BUFFER_CREATED, this.onBufferCreated, this);
  103. hls.off(Events.BUFFER_FLUSHED, this.onBufferFlushed, this);
  104. hls.off(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
  105. hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
  106. }
  107.  
  108. protected onHandlerDestroying() {
  109. this._unregisterListeners();
  110. this.onMediaDetaching();
  111. }
  112.  
  113. public startLoad(startPosition: number): void {
  114. if (this.levels) {
  115. const { lastCurrentTime, hls } = this;
  116. this.stopLoad();
  117. this.setInterval(TICK_INTERVAL);
  118. this.level = -1;
  119. this.fragLoadError = 0;
  120. if (!this.startFragRequested) {
  121. // determine load level
  122. let startLevel = hls.startLevel;
  123. if (startLevel === -1) {
  124. if (hls.config.testBandwidth) {
  125. // -1 : guess start Level by doing a bitrate test by loading first fragment of lowest quality level
  126. startLevel = 0;
  127. this.bitrateTest = true;
  128. } else {
  129. startLevel = hls.nextAutoLevel;
  130. }
  131. }
  132. // set new level to playlist loader : this will trigger start level load
  133. // hls.nextLoadLevel remains until it is set to a new value or until a new frag is successfully loaded
  134. this.level = hls.nextLoadLevel = startLevel;
  135. this.loadedmetadata = false;
  136. }
  137. // if startPosition undefined but lastCurrentTime set, set startPosition to last currentTime
  138. if (lastCurrentTime > 0 && startPosition === -1) {
  139. this.log(
  140. `Override startPosition with lastCurrentTime @${lastCurrentTime.toFixed(
  141. 3
  142. )}`
  143. );
  144. startPosition = lastCurrentTime;
  145. }
  146. this.state = State.IDLE;
  147. this.nextLoadPosition =
  148. this.startPosition =
  149. this.lastCurrentTime =
  150. startPosition;
  151. this.tick();
  152. } else {
  153. this._forceStartLoad = true;
  154. this.state = State.STOPPED;
  155. }
  156. }
  157.  
  158. public stopLoad() {
  159. this._forceStartLoad = false;
  160. super.stopLoad();
  161. }
  162.  
  163. protected doTick() {
  164. switch (this.state) {
  165. case State.IDLE:
  166. this.doTickIdle();
  167. break;
  168. case State.WAITING_LEVEL: {
  169. const { levels, level } = this;
  170. const details = levels?.[level]?.details;
  171. if (details && (!details.live || this.levelLastLoaded === this.level)) {
  172. if (this.waitForCdnTuneIn(details)) {
  173. break;
  174. }
  175. this.state = State.IDLE;
  176. break;
  177. }
  178. break;
  179. }
  180. case State.FRAG_LOADING_WAITING_RETRY:
  181. {
  182. const now = self.performance.now();
  183. const retryDate = this.retryDate;
  184. // if current time is gt than retryDate, or if media seeking let's switch to IDLE state to retry loading
  185. if (!retryDate || now >= retryDate || this.media?.seeking) {
  186. this.log('retryDate reached, switch back to IDLE state');
  187. this.state = State.IDLE;
  188. }
  189. }
  190. break;
  191. default:
  192. break;
  193. }
  194. // check buffer
  195. // check/update current fragment
  196. this.onTickEnd();
  197. }
  198.  
  199. protected onTickEnd() {
  200. super.onTickEnd();
  201. this.checkBuffer();
  202. this.checkFragmentChanged();
  203. }
  204.  
  205. private doTickIdle() {
  206. const { hls, levelLastLoaded, levels, media } = this;
  207. const { config, nextLoadLevel: level } = hls;
  208.  
  209. // if start level not parsed yet OR
  210. // if video not attached AND start fragment already requested OR start frag prefetch not enabled
  211. // exit loop, as we either need more info (level not parsed) or we need media to be attached to load new fragment
  212. if (
  213. levelLastLoaded === null ||
  214. (!media && (this.startFragRequested || !config.startFragPrefetch))
  215. ) {
  216. return;
  217. }
  218.  
  219. // If the "main" level is audio-only but we are loading an alternate track in the same group, do not load anything
  220. if (this.altAudio && this.audioOnly) {
  221. return;
  222. }
  223.  
  224. if (!levels || !levels[level]) {
  225. return;
  226. }
  227.  
  228. const levelInfo = levels[level];
  229.  
  230. // if buffer length is less than maxBufLen try to load a new fragment
  231. // set next load level : this will trigger a playlist load if needed
  232. this.level = hls.nextLoadLevel = level;
  233.  
  234. const levelDetails = levelInfo.details;
  235. // if level info not retrieved yet, switch state and wait for level retrieval
  236. // if live playlist, ensure that new playlist has been refreshed to avoid loading/try to load
  237. // a useless and outdated fragment (that might even introduce load error if it is already out of the live playlist)
  238. if (
  239. !levelDetails ||
  240. this.state === State.WAITING_LEVEL ||
  241. (levelDetails.live && this.levelLastLoaded !== level)
  242. ) {
  243. this.state = State.WAITING_LEVEL;
  244. return;
  245. }
  246.  
  247. const bufferInfo = this.getFwdBufferInfo(
  248. this.mediaBuffer ? this.mediaBuffer : media,
  249. PlaylistLevelType.MAIN
  250. );
  251. if (bufferInfo === null) {
  252. return;
  253. }
  254. const bufferLen = bufferInfo.len;
  255.  
  256. // compute max Buffer Length that we could get from this load level, based on level bitrate. don't buffer more than 60 MB and more than 30s
  257. const maxBufLen = this.getMaxBufferLength(levelInfo.maxBitrate);
  258.  
  259. // Stay idle if we are still with buffer margins
  260. if (bufferLen >= maxBufLen) {
  261. return;
  262. }
  263.  
  264. if (this._streamEnded(bufferInfo, levelDetails)) {
  265. const data: BufferEOSData = {};
  266. if (this.altAudio) {
  267. data.type = 'video';
  268. }
  269.  
  270. this.hls.trigger(Events.BUFFER_EOS, data);
  271. this.state = State.ENDED;
  272. return;
  273. }
  274.  
  275. const targetBufferTime = bufferInfo.end;
  276. let frag = this.getNextFragment(targetBufferTime, levelDetails);
  277. // Avoid backtracking after seeking or switching by loading an earlier segment in streams that could backtrack
  278. if (
  279. this.couldBacktrack &&
  280. !this.fragPrevious &&
  281. frag &&
  282. frag.sn !== 'initSegment'
  283. ) {
  284. const fragIdx = frag.sn - levelDetails.startSN;
  285. if (fragIdx > 1) {
  286. frag = levelDetails.fragments[fragIdx - 1];
  287. this.fragmentTracker.removeFragment(frag);
  288. }
  289. }
  290. // Avoid loop loading by using nextLoadPosition set for backtracking
  291. if (
  292. frag &&
  293. this.fragmentTracker.getState(frag) === FragmentState.OK &&
  294. this.nextLoadPosition > targetBufferTime
  295. ) {
  296. // Cleanup the fragment tracker before trying to find the next unbuffered fragment
  297. const type =
  298. this.audioOnly && !this.altAudio
  299. ? ElementaryStreamTypes.AUDIO
  300. : ElementaryStreamTypes.VIDEO;
  301. this.afterBufferFlushed(media, type, PlaylistLevelType.MAIN);
  302. frag = this.getNextFragment(this.nextLoadPosition, levelDetails);
  303. }
  304. if (!frag) {
  305. return;
  306. }
  307. if (frag.initSegment && !frag.initSegment.data && !this.bitrateTest) {
  308. frag = frag.initSegment;
  309. }
  310.  
  311. // We want to load the key if we're dealing with an identity key, because we will decrypt
  312. // this content using the key we fetch. Other keys will be handled by the DRM CDM via EME.
  313. if (frag.decryptdata?.keyFormat === 'identity' && !frag.decryptdata?.key) {
  314. this.loadKey(frag, levelDetails);
  315. } else {
  316. this.loadFragment(frag, levelDetails, targetBufferTime);
  317. }
  318. }
  319.  
  320. protected loadFragment(
  321. frag: Fragment,
  322. levelDetails: LevelDetails,
  323. targetBufferTime: number
  324. ) {
  325. // Check if fragment is not loaded
  326. let fragState = this.fragmentTracker.getState(frag);
  327. this.fragCurrent = frag;
  328. // Use data from loaded backtracked fragment if available
  329. if (fragState === FragmentState.BACKTRACKED) {
  330. const data = this.fragmentTracker.getBacktrackData(frag);
  331. if (data) {
  332. this._handleFragmentLoadProgress(data);
  333. this._handleFragmentLoadComplete(data);
  334. return;
  335. } else {
  336. fragState = FragmentState.NOT_LOADED;
  337. }
  338. }
  339. if (
  340. fragState === FragmentState.NOT_LOADED ||
  341. fragState === FragmentState.PARTIAL
  342. ) {
  343. if (frag.sn === 'initSegment') {
  344. this._loadInitSegment(frag);
  345. } else if (this.bitrateTest) {
  346. frag.bitrateTest = true;
  347. this.log(
  348. `Fragment ${frag.sn} of level ${frag.level} is being downloaded to test bitrate and will not be buffered`
  349. );
  350. this._loadBitrateTestFrag(frag);
  351. } else {
  352. this.startFragRequested = true;
  353. super.loadFragment(frag, levelDetails, targetBufferTime);
  354. }
  355. } else if (fragState === FragmentState.APPENDING) {
  356. // Lower the buffer size and try again
  357. if (this.reduceMaxBufferLength(frag.duration)) {
  358. this.fragmentTracker.removeFragment(frag);
  359. }
  360. } else if (this.media?.buffered.length === 0) {
  361. // Stop gap for bad tracker / buffer flush behavior
  362. this.fragmentTracker.removeAllFragments();
  363. }
  364. }
  365.  
  366. private getAppendedFrag(position): Fragment | null {
  367. const fragOrPart = this.fragmentTracker.getAppendedFrag(
  368. position,
  369. PlaylistLevelType.MAIN
  370. );
  371. if (fragOrPart && 'fragment' in fragOrPart) {
  372. return fragOrPart.fragment;
  373. }
  374. return fragOrPart;
  375. }
  376.  
  377. private getBufferedFrag(position) {
  378. return this.fragmentTracker.getBufferedFrag(
  379. position,
  380. PlaylistLevelType.MAIN
  381. );
  382. }
  383.  
  384. private followingBufferedFrag(frag: Fragment | null) {
  385. if (frag) {
  386. // try to get range of next fragment (500ms after this range)
  387. return this.getBufferedFrag(frag.end + 0.5);
  388. }
  389. return null;
  390. }
  391.  
  392. /*
  393. on immediate level switch :
  394. - pause playback if playing
  395. - cancel any pending load request
  396. - and trigger a buffer flush
  397. */
  398. public immediateLevelSwitch() {
  399. this.abortCurrentFrag();
  400. this.flushMainBuffer(0, Number.POSITIVE_INFINITY);
  401. }
  402.  
  403. /**
  404. * try to switch ASAP without breaking video playback:
  405. * in order to ensure smooth but quick level switching,
  406. * we need to find the next flushable buffer range
  407. * we should take into account new segment fetch time
  408. */
  409. public nextLevelSwitch() {
  410. const { levels, media } = this;
  411. // ensure that media is defined and that metadata are available (to retrieve currentTime)
  412. if (media?.readyState) {
  413. let fetchdelay;
  414. const fragPlayingCurrent = this.getAppendedFrag(media.currentTime);
  415. if (fragPlayingCurrent && fragPlayingCurrent.start > 1) {
  416. // flush buffer preceding current fragment (flush until current fragment start offset)
  417. // minus 1s to avoid video freezing, that could happen if we flush keyframe of current video ...
  418. this.flushMainBuffer(0, fragPlayingCurrent.start - 1);
  419. }
  420. if (!media.paused && levels) {
  421. // add a safety delay of 1s
  422. const nextLevelId = this.hls.nextLoadLevel;
  423. const nextLevel = levels[nextLevelId];
  424. const fragLastKbps = this.fragLastKbps;
  425. if (fragLastKbps && this.fragCurrent) {
  426. fetchdelay =
  427. (this.fragCurrent.duration * nextLevel.maxBitrate) /
  428. (1000 * fragLastKbps) +
  429. 1;
  430. } else {
  431. fetchdelay = 0;
  432. }
  433. } else {
  434. fetchdelay = 0;
  435. }
  436. // this.log('fetchdelay:'+fetchdelay);
  437. // find buffer range that will be reached once new fragment will be fetched
  438. const bufferedFrag = this.getBufferedFrag(media.currentTime + fetchdelay);
  439. if (bufferedFrag) {
  440. // we can flush buffer range following this one without stalling playback
  441. const nextBufferedFrag = this.followingBufferedFrag(bufferedFrag);
  442. if (nextBufferedFrag) {
  443. // if we are here, we can also cancel any loading/demuxing in progress, as they are useless
  444. this.abortCurrentFrag();
  445. // start flush position is in next buffered frag. Leave some padding for non-independent segments and smoother playback.
  446. const maxStart = nextBufferedFrag.maxStartPTS
  447. ? nextBufferedFrag.maxStartPTS
  448. : nextBufferedFrag.start;
  449. const fragDuration = nextBufferedFrag.duration;
  450. const startPts = Math.max(
  451. bufferedFrag.end,
  452. maxStart +
  453. Math.min(
  454. Math.max(
  455. fragDuration - this.config.maxFragLookUpTolerance,
  456. fragDuration * 0.5
  457. ),
  458. fragDuration * 0.75
  459. )
  460. );
  461. this.flushMainBuffer(startPts, Number.POSITIVE_INFINITY);
  462. }
  463. }
  464. }
  465. }
  466.  
  467. private abortCurrentFrag() {
  468. const fragCurrent = this.fragCurrent;
  469. this.fragCurrent = null;
  470. if (fragCurrent?.loader) {
  471. fragCurrent.loader.abort();
  472. }
  473. if (this.state === State.KEY_LOADING) {
  474. this.state = State.IDLE;
  475. }
  476. this.nextLoadPosition = this.getLoadPosition();
  477. }
  478.  
  479. protected flushMainBuffer(startOffset: number, endOffset: number) {
  480. super.flushMainBuffer(
  481. startOffset,
  482. endOffset,
  483. this.altAudio ? 'video' : null
  484. );
  485. }
  486.  
  487. protected onMediaAttached(
  488. event: Events.MEDIA_ATTACHED,
  489. data: MediaAttachedData
  490. ) {
  491. super.onMediaAttached(event, data);
  492. const media = data.media;
  493. this.onvplaying = this.onMediaPlaying.bind(this);
  494. this.onvseeked = this.onMediaSeeked.bind(this);
  495. media.addEventListener('playing', this.onvplaying as EventListener);
  496. media.addEventListener('seeked', this.onvseeked as EventListener);
  497. this.gapController = new GapController(
  498. this.config,
  499. media,
  500. this.fragmentTracker,
  501. this.hls
  502. );
  503. }
  504.  
  505. protected onMediaDetaching() {
  506. const { media } = this;
  507. if (media) {
  508. media.removeEventListener('playing', this.onvplaying);
  509. media.removeEventListener('seeked', this.onvseeked);
  510. this.onvplaying = this.onvseeked = null;
  511. this.videoBuffer = null;
  512. }
  513. this.fragPlaying = null;
  514. if (this.gapController) {
  515. this.gapController.destroy();
  516. this.gapController = null;
  517. }
  518. super.onMediaDetaching();
  519. }
  520.  
  521. private onMediaPlaying() {
  522. // tick to speed up FRAG_CHANGED triggering
  523. this.tick();
  524. }
  525.  
  526. private onMediaSeeked() {
  527. const media = this.media;
  528. const currentTime = media ? media.currentTime : null;
  529. if (Number.isFinite(currentTime)) {
  530. this.log(`Media seeked to ${currentTime.toFixed(3)}`);
  531. }
  532.  
  533. // tick to speed up FRAG_CHANGED triggering
  534. this.tick();
  535. }
  536.  
  537. private onManifestLoading() {
  538. // reset buffer on manifest loading
  539. this.log('Trigger BUFFER_RESET');
  540. this.hls.trigger(Events.BUFFER_RESET, undefined);
  541. this.fragmentTracker.removeAllFragments();
  542. this.couldBacktrack = this.stalled = false;
  543. this.startPosition = this.lastCurrentTime = 0;
  544. this.fragPlaying = null;
  545. }
  546.  
  547. private onManifestParsed(
  548. event: Events.MANIFEST_PARSED,
  549. data: ManifestParsedData
  550. ) {
  551. let aac = false;
  552. let heaac = false;
  553. let codec;
  554. data.levels.forEach((level) => {
  555. // detect if we have different kind of audio codecs used amongst playlists
  556. codec = level.audioCodec;
  557. if (codec) {
  558. if (codec.indexOf('mp4a.40.2') !== -1) {
  559. aac = true;
  560. }
  561.  
  562. if (codec.indexOf('mp4a.40.5') !== -1) {
  563. heaac = true;
  564. }
  565. }
  566. });
  567. this.audioCodecSwitch = aac && heaac && !changeTypeSupported();
  568. if (this.audioCodecSwitch) {
  569. this.log(
  570. 'Both AAC/HE-AAC audio found in levels; declaring level codec as HE-AAC'
  571. );
  572. }
  573.  
  574. this.levels = data.levels;
  575. this.startFragRequested = false;
  576. }
  577.  
  578. private onLevelLoading(event: Events.LEVEL_LOADING, data: LevelLoadingData) {
  579. const { levels } = this;
  580. if (!levels || this.state !== State.IDLE) {
  581. return;
  582. }
  583. const level = levels[data.level];
  584. if (
  585. !level.details ||
  586. (level.details.live && this.levelLastLoaded !== data.level) ||
  587. this.waitForCdnTuneIn(level.details)
  588. ) {
  589. this.state = State.WAITING_LEVEL;
  590. }
  591. }
  592.  
  593. private onLevelLoaded(event: Events.LEVEL_LOADED, data: LevelLoadedData) {
  594. const { levels } = this;
  595. const newLevelId = data.level;
  596. const newDetails = data.details;
  597. const duration = newDetails.totalduration;
  598.  
  599. if (!levels) {
  600. this.warn(`Levels were reset while loading level ${newLevelId}`);
  601. return;
  602. }
  603. this.log(
  604. `Level ${newLevelId} loaded [${newDetails.startSN},${newDetails.endSN}], cc [${newDetails.startCC}, ${newDetails.endCC}] duration:${duration}`
  605. );
  606.  
  607. const fragCurrent = this.fragCurrent;
  608. if (
  609. fragCurrent &&
  610. (this.state === State.FRAG_LOADING ||
  611. this.state === State.FRAG_LOADING_WAITING_RETRY)
  612. ) {
  613. if (fragCurrent.level !== data.level && fragCurrent.loader) {
  614. this.state = State.IDLE;
  615. fragCurrent.loader.abort();
  616. }
  617. }
  618.  
  619. const curLevel = levels[newLevelId];
  620. let sliding = 0;
  621. if (newDetails.live || curLevel.details?.live) {
  622. if (!newDetails.fragments[0]) {
  623. newDetails.deltaUpdateFailed = true;
  624. }
  625. if (newDetails.deltaUpdateFailed) {
  626. return;
  627. }
  628. sliding = this.alignPlaylists(newDetails, curLevel.details);
  629. }
  630. // override level info
  631. curLevel.details = newDetails;
  632. this.levelLastLoaded = newLevelId;
  633.  
  634. this.hls.trigger(Events.LEVEL_UPDATED, {
  635. details: newDetails,
  636. level: newLevelId,
  637. });
  638.  
  639. // only switch back to IDLE state if we were waiting for level to start downloading a new fragment
  640. if (this.state === State.WAITING_LEVEL) {
  641. if (this.waitForCdnTuneIn(newDetails)) {
  642. // Wait for Low-Latency CDN Tune-in
  643. return;
  644. }
  645. this.state = State.IDLE;
  646. }
  647.  
  648. if (!this.startFragRequested) {
  649. this.setStartPosition(newDetails, sliding);
  650. } else if (newDetails.live) {
  651. this.synchronizeToLiveEdge(newDetails);
  652. }
  653.  
  654. // trigger handler right now
  655. this.tick();
  656. }
  657.  
  658. protected _handleFragmentLoadProgress(data: FragLoadedData) {
  659. const { frag, part, payload } = data;
  660. const { levels } = this;
  661. if (!levels) {
  662. this.warn(
  663. `Levels were reset while fragment load was in progress. Fragment ${frag.sn} of level ${frag.level} will not be buffered`
  664. );
  665. return;
  666. }
  667. const currentLevel = levels[frag.level];
  668. const details = currentLevel.details as LevelDetails;
  669. if (!details) {
  670. this.warn(
  671. `Dropping fragment ${frag.sn} of level ${frag.level} after level details were reset`
  672. );
  673. return;
  674. }
  675. const videoCodec = currentLevel.videoCodec;
  676.  
  677. // time Offset is accurate if level PTS is known, or if playlist is not sliding (not live)
  678. const accurateTimeOffset = details.PTSKnown || !details.live;
  679. const initSegmentData = frag.initSegment?.data;
  680. const audioCodec = this._getAudioCodec(currentLevel);
  681.  
  682. // transmux the MPEG-TS data to ISO-BMFF segments
  683. // this.log(`Transmuxing ${frag.sn} of [${details.startSN} ,${details.endSN}],level ${frag.level}, cc ${frag.cc}`);
  684. const transmuxer = (this.transmuxer =
  685. this.transmuxer ||
  686. new TransmuxerInterface(
  687. this.hls,
  688. PlaylistLevelType.MAIN,
  689. this._handleTransmuxComplete.bind(this),
  690. this._handleTransmuxerFlush.bind(this)
  691. ));
  692. const partIndex = part ? part.index : -1;
  693. const partial = partIndex !== -1;
  694. const chunkMeta = new ChunkMetadata(
  695. frag.level,
  696. frag.sn as number,
  697. frag.stats.chunkCount,
  698. payload.byteLength,
  699. partIndex,
  700. partial
  701. );
  702. const initPTS = this.initPTS[frag.cc];
  703.  
  704. transmuxer.push(
  705. payload,
  706. initSegmentData,
  707. audioCodec,
  708. videoCodec,
  709. frag,
  710. part,
  711. details.totalduration,
  712. accurateTimeOffset,
  713. chunkMeta,
  714. initPTS
  715. );
  716. }
  717.  
  718. private onAudioTrackSwitching(
  719. event: Events.AUDIO_TRACK_SWITCHING,
  720. data: AudioTrackSwitchingData
  721. ) {
  722. // if any URL found on new audio track, it is an alternate audio track
  723. const fromAltAudio = this.altAudio;
  724. const altAudio = !!data.url;
  725. const trackId = data.id;
  726. // if we switch on main audio, ensure that main fragment scheduling is synced with media.buffered
  727. // don't do anything if we switch to alt audio: audio stream controller is handling it.
  728. // we will just have to change buffer scheduling on audioTrackSwitched
  729. if (!altAudio) {
  730. if (this.mediaBuffer !== this.media) {
  731. this.log(
  732. 'Switching on main audio, use media.buffered to schedule main fragment loading'
  733. );
  734. this.mediaBuffer = this.media;
  735. const fragCurrent = this.fragCurrent;
  736. // we need to refill audio buffer from main: cancel any frag loading to speed up audio switch
  737. if (fragCurrent?.loader) {
  738. this.log('Switching to main audio track, cancel main fragment load');
  739. fragCurrent.loader.abort();
  740. }
  741. // destroy transmuxer to force init segment generation (following audio switch)
  742. this.resetTransmuxer();
  743. // switch to IDLE state to load new fragment
  744. this.resetLoadingState();
  745. } else if (this.audioOnly) {
  746. // Reset audio transmuxer so when switching back to main audio we're not still appending where we left off
  747. this.resetTransmuxer();
  748. }
  749. const hls = this.hls;
  750. // If switching from alt to main audio, flush all audio and trigger track switched
  751. if (fromAltAudio) {
  752. hls.trigger(Events.BUFFER_FLUSHING, {
  753. startOffset: 0,
  754. endOffset: Number.POSITIVE_INFINITY,
  755. type: 'audio',
  756. });
  757. }
  758. hls.trigger(Events.AUDIO_TRACK_SWITCHED, {
  759. id: trackId,
  760. });
  761. }
  762. }
  763.  
  764. private onAudioTrackSwitched(
  765. event: Events.AUDIO_TRACK_SWITCHED,
  766. data: AudioTrackSwitchedData
  767. ) {
  768. const trackId = data.id;
  769. const altAudio = !!this.hls.audioTracks[trackId].url;
  770. if (altAudio) {
  771. const videoBuffer = this.videoBuffer;
  772. // if we switched on alternate audio, ensure that main fragment scheduling is synced with video sourcebuffer buffered
  773. if (videoBuffer && this.mediaBuffer !== videoBuffer) {
  774. this.log(
  775. 'Switching on alternate audio, use video.buffered to schedule main fragment loading'
  776. );
  777. this.mediaBuffer = videoBuffer;
  778. }
  779. }
  780. this.altAudio = altAudio;
  781. this.tick();
  782. }
  783.  
  784. private onBufferCreated(
  785. event: Events.BUFFER_CREATED,
  786. data: BufferCreatedData
  787. ) {
  788. const tracks = data.tracks;
  789. let mediaTrack;
  790. let name;
  791. let alternate = false;
  792. for (const type in tracks) {
  793. const track = tracks[type];
  794. if (track.id === 'main') {
  795. name = type;
  796. mediaTrack = track;
  797. // keep video source buffer reference
  798. if (type === 'video') {
  799. const videoTrack = tracks[type];
  800. if (videoTrack) {
  801. this.videoBuffer = videoTrack.buffer;
  802. }
  803. }
  804. } else {
  805. alternate = true;
  806. }
  807. }
  808. if (alternate && mediaTrack) {
  809. this.log(
  810. `Alternate track found, use ${name}.buffered to schedule main fragment loading`
  811. );
  812. this.mediaBuffer = mediaTrack.buffer;
  813. } else {
  814. this.mediaBuffer = this.media;
  815. }
  816. }
  817.  
  818. private onFragBuffered(event: Events.FRAG_BUFFERED, data: FragBufferedData) {
  819. const { frag, part } = data;
  820. if (frag && frag.type !== PlaylistLevelType.MAIN) {
  821. return;
  822. }
  823. if (this.fragContextChanged(frag)) {
  824. // If a level switch was requested while a fragment was buffering, it will emit the FRAG_BUFFERED event upon completion
  825. // Avoid setting state back to IDLE, since that will interfere with a level switch
  826. this.warn(
  827. `Fragment ${frag.sn}${part ? ' p: ' + part.index : ''} of level ${
  828. frag.level
  829. } finished buffering, but was aborted. state: ${this.state}`
  830. );
  831. if (this.state === State.PARSED) {
  832. this.state = State.IDLE;
  833. }
  834. return;
  835. }
  836. const stats = part ? part.stats : frag.stats;
  837. this.fragLastKbps = Math.round(
  838. (8 * stats.total) / (stats.buffering.end - stats.loading.first)
  839. );
  840. if (frag.sn !== 'initSegment') {
  841. this.fragPrevious = frag;
  842. }
  843. this.fragBufferedComplete(frag, part);
  844. }
  845.  
  846. private onError(event: Events.ERROR, data: ErrorData) {
  847. switch (data.details) {
  848. case ErrorDetails.FRAG_LOAD_ERROR:
  849. case ErrorDetails.FRAG_LOAD_TIMEOUT:
  850. case ErrorDetails.KEY_LOAD_ERROR:
  851. case ErrorDetails.KEY_LOAD_TIMEOUT:
  852. this.onFragmentOrKeyLoadError(PlaylistLevelType.MAIN, data);
  853. break;
  854. case ErrorDetails.LEVEL_LOAD_ERROR:
  855. case ErrorDetails.LEVEL_LOAD_TIMEOUT:
  856. if (this.state !== State.ERROR) {
  857. if (data.fatal) {
  858. // if fatal error, stop processing
  859. this.warn(`${data.details}`);
  860. this.state = State.ERROR;
  861. } else {
  862. // in case of non fatal error while loading level, if level controller is not retrying to load level , switch back to IDLE
  863. if (!data.levelRetry && this.state === State.WAITING_LEVEL) {
  864. this.state = State.IDLE;
  865. }
  866. }
  867. }
  868. break;
  869. case ErrorDetails.BUFFER_FULL_ERROR:
  870. // if in appending state
  871. if (
  872. data.parent === 'main' &&
  873. (this.state === State.PARSING || this.state === State.PARSED)
  874. ) {
  875. let flushBuffer = true;
  876. const bufferedInfo = this.getFwdBufferInfo(
  877. this.media,
  878. PlaylistLevelType.MAIN
  879. );
  880. // 0.5 : tolerance needed as some browsers stalls playback before reaching buffered end
  881. // reduce max buf len if current position is buffered
  882. if (bufferedInfo && bufferedInfo.len > 0.5) {
  883. flushBuffer = !this.reduceMaxBufferLength(bufferedInfo.len);
  884. }
  885. if (flushBuffer) {
  886. // current position is not buffered, but browser is still complaining about buffer full error
  887. // this happens on IE/Edge, refer to https://github.com/video-dev/hls.js/pull/708
  888. // in that case flush the whole buffer to recover
  889. this.warn(
  890. 'buffer full error also media.currentTime is not buffered, flush main'
  891. );
  892. // flush main buffer
  893. this.immediateLevelSwitch();
  894. }
  895. this.resetLoadingState();
  896. }
  897. break;
  898. default:
  899. break;
  900. }
  901. }
  902.  
  903. // Checks the health of the buffer and attempts to resolve playback stalls.
  904. private checkBuffer() {
  905. const { media, gapController } = this;
  906. if (!media || !gapController || !media.readyState) {
  907. // Exit early if we don't have media or if the media hasn't buffered anything yet (readyState 0)
  908. return;
  909. }
  910.  
  911. // Check combined buffer
  912. const buffered = BufferHelper.getBuffered(media);
  913.  
  914. if (!this.loadedmetadata && buffered.length) {
  915. this.loadedmetadata = true;
  916. this.seekToStartPos();
  917. } else {
  918. // Resolve gaps using the main buffer, whose ranges are the intersections of the A/V sourcebuffers
  919. gapController.poll(this.lastCurrentTime);
  920. }
  921.  
  922. this.lastCurrentTime = media.currentTime;
  923. }
  924.  
  925. private onFragLoadEmergencyAborted() {
  926. this.state = State.IDLE;
  927. // if loadedmetadata is not set, it means that we are emergency switch down on first frag
  928. // in that case, reset startFragRequested flag
  929. if (!this.loadedmetadata) {
  930. this.startFragRequested = false;
  931. this.nextLoadPosition = this.startPosition;
  932. }
  933. this.tickImmediate();
  934. }
  935.  
  936. private onBufferFlushed(
  937. event: Events.BUFFER_FLUSHED,
  938. { type }: BufferFlushedData
  939. ) {
  940. if (
  941. type !== ElementaryStreamTypes.AUDIO ||
  942. (this.audioOnly && !this.altAudio)
  943. ) {
  944. const media =
  945. (type === ElementaryStreamTypes.VIDEO
  946. ? this.videoBuffer
  947. : this.mediaBuffer) || this.media;
  948. this.afterBufferFlushed(media, type, PlaylistLevelType.MAIN);
  949. }
  950. }
  951.  
  952. private onLevelsUpdated(
  953. event: Events.LEVELS_UPDATED,
  954. data: LevelsUpdatedData
  955. ) {
  956. this.levels = data.levels;
  957. }
  958.  
  959. public swapAudioCodec() {
  960. this.audioCodecSwap = !this.audioCodecSwap;
  961. }
  962.  
  963. /**
  964. * Seeks to the set startPosition if not equal to the mediaElement's current time.
  965. * @private
  966. */
  967. private seekToStartPos() {
  968. const { media } = this;
  969. const currentTime = media.currentTime;
  970. let startPosition = this.startPosition;
  971. // only adjust currentTime if different from startPosition or if startPosition not buffered
  972. // at that stage, there should be only one buffered range, as we reach that code after first fragment has been buffered
  973. if (startPosition >= 0 && currentTime < startPosition) {
  974. if (media.seeking) {
  975. logger.log(
  976. `could not seek to ${startPosition}, already seeking at ${currentTime}`
  977. );
  978. return;
  979. }
  980. const buffered = BufferHelper.getBuffered(media);
  981. const bufferStart = buffered.length ? buffered.start(0) : 0;
  982. const delta = bufferStart - startPosition;
  983. if (delta > 0 && delta < this.config.maxBufferHole) {
  984. logger.log(
  985. `adjusting start position by ${delta} to match buffer start`
  986. );
  987. startPosition += delta;
  988. this.startPosition = startPosition;
  989. }
  990. this.log(
  991. `seek to target start position ${startPosition} from current time ${currentTime}`
  992. );
  993. media.currentTime = startPosition;
  994. }
  995. }
  996.  
  997. private _getAudioCodec(currentLevel) {
  998. let audioCodec = this.config.defaultAudioCodec || currentLevel.audioCodec;
  999. if (this.audioCodecSwap && audioCodec) {
  1000. this.log('Swapping audio codec');
  1001. if (audioCodec.indexOf('mp4a.40.5') !== -1) {
  1002. audioCodec = 'mp4a.40.2';
  1003. } else {
  1004. audioCodec = 'mp4a.40.5';
  1005. }
  1006. }
  1007.  
  1008. return audioCodec;
  1009. }
  1010.  
  1011. private _loadBitrateTestFrag(frag: Fragment) {
  1012. this._doFragLoad(frag).then((data) => {
  1013. const { hls } = this;
  1014. if (!data || hls.nextLoadLevel || this.fragContextChanged(frag)) {
  1015. return;
  1016. }
  1017. this.fragLoadError = 0;
  1018. this.state = State.IDLE;
  1019. this.startFragRequested = false;
  1020. this.bitrateTest = false;
  1021. const stats = frag.stats;
  1022. // Bitrate tests fragments are neither parsed nor buffered
  1023. stats.parsing.start =
  1024. stats.parsing.end =
  1025. stats.buffering.start =
  1026. stats.buffering.end =
  1027. self.performance.now();
  1028. hls.trigger(Events.FRAG_LOADED, data as FragLoadedData);
  1029. });
  1030. }
  1031.  
  1032. private _handleTransmuxComplete(transmuxResult: TransmuxerResult) {
  1033. const id = 'main';
  1034. const { hls } = this;
  1035. const { remuxResult, chunkMeta } = transmuxResult;
  1036.  
  1037. const context = this.getCurrentContext(chunkMeta);
  1038. if (!context) {
  1039. this.warn(
  1040. `The loading context changed while buffering fragment ${chunkMeta.sn} of level ${chunkMeta.level}. This chunk will not be buffered.`
  1041. );
  1042. this.resetLiveStartWhenNotLoaded(chunkMeta.level);
  1043. return;
  1044. }
  1045. const { frag, part, level } = context;
  1046. const { video, text, id3, initSegment } = remuxResult;
  1047. // The audio-stream-controller handles audio buffering if Hls.js is playing an alternate audio track
  1048. const audio = this.altAudio ? undefined : remuxResult.audio;
  1049.  
  1050. // Check if the current fragment has been aborted. We check this by first seeing if we're still playing the current level.
  1051. // If we are, subsequently check if the currently loading fragment (fragCurrent) has changed.
  1052. if (this.fragContextChanged(frag)) {
  1053. return;
  1054. }
  1055.  
  1056. this.state = State.PARSING;
  1057.  
  1058. if (initSegment) {
  1059. if (initSegment.tracks) {
  1060. this._bufferInitSegment(level, initSegment.tracks, frag, chunkMeta);
  1061. hls.trigger(Events.FRAG_PARSING_INIT_SEGMENT, {
  1062. frag,
  1063. id,
  1064. tracks: initSegment.tracks,
  1065. });
  1066. }
  1067.  
  1068. // This would be nice if Number.isFinite acted as a typeguard, but it doesn't. See: https://github.com/Microsoft/TypeScript/issues/10038
  1069. const initPTS = initSegment.initPTS as number;
  1070. const timescale = initSegment.timescale as number;
  1071. if (Number.isFinite(initPTS)) {
  1072. this.initPTS[frag.cc] = initPTS;
  1073. hls.trigger(Events.INIT_PTS_FOUND, { frag, id, initPTS, timescale });
  1074. }
  1075. }
  1076.  
  1077. // Avoid buffering if backtracking this fragment
  1078. if (video && remuxResult.independent !== false) {
  1079. if (level.details) {
  1080. const { startPTS, endPTS, startDTS, endDTS } = video;
  1081. if (part) {
  1082. part.elementaryStreams[video.type] = {
  1083. startPTS,
  1084. endPTS,
  1085. startDTS,
  1086. endDTS,
  1087. };
  1088. } else {
  1089. if (video.firstKeyFrame && video.independent) {
  1090. this.couldBacktrack = true;
  1091. }
  1092. if (video.dropped && video.independent) {
  1093. // Backtrack if dropped frames create a gap after currentTime
  1094. const pos = this.getLoadPosition() + this.config.maxBufferHole;
  1095. if (pos < startPTS) {
  1096. this.backtrack(frag);
  1097. return;
  1098. }
  1099. // Set video stream start to fragment start so that truncated samples do not distort the timeline, and mark it partial
  1100. frag.setElementaryStreamInfo(
  1101. video.type as ElementaryStreamTypes,
  1102. frag.start,
  1103. endPTS,
  1104. frag.start,
  1105. endDTS,
  1106. true
  1107. );
  1108. }
  1109. }
  1110. frag.setElementaryStreamInfo(
  1111. video.type as ElementaryStreamTypes,
  1112. startPTS,
  1113. endPTS,
  1114. startDTS,
  1115. endDTS
  1116. );
  1117. this.bufferFragmentData(video, frag, part, chunkMeta);
  1118. }
  1119. } else if (remuxResult.independent === false) {
  1120. this.backtrack(frag);
  1121. return;
  1122. }
  1123.  
  1124. if (audio) {
  1125. const { startPTS, endPTS, startDTS, endDTS } = audio;
  1126. if (part) {
  1127. part.elementaryStreams[ElementaryStreamTypes.AUDIO] = {
  1128. startPTS,
  1129. endPTS,
  1130. startDTS,
  1131. endDTS,
  1132. };
  1133. }
  1134. frag.setElementaryStreamInfo(
  1135. ElementaryStreamTypes.AUDIO,
  1136. startPTS,
  1137. endPTS,
  1138. startDTS,
  1139. endDTS
  1140. );
  1141. this.bufferFragmentData(audio, frag, part, chunkMeta);
  1142. }
  1143.  
  1144. if (id3?.samples?.length) {
  1145. const emittedID3: FragParsingMetadataData = {
  1146. frag,
  1147. id,
  1148. samples: id3.samples,
  1149. };
  1150. hls.trigger(Events.FRAG_PARSING_METADATA, emittedID3);
  1151. }
  1152. if (text) {
  1153. const emittedText: FragParsingUserdataData = {
  1154. frag,
  1155. id,
  1156. samples: text.samples,
  1157. };
  1158. hls.trigger(Events.FRAG_PARSING_USERDATA, emittedText);
  1159. }
  1160. }
  1161.  
  1162. private _bufferInitSegment(
  1163. currentLevel: Level,
  1164. tracks: TrackSet,
  1165. frag: Fragment,
  1166. chunkMeta: ChunkMetadata
  1167. ) {
  1168. if (this.state !== State.PARSING) {
  1169. return;
  1170. }
  1171.  
  1172. this.audioOnly = !!tracks.audio && !tracks.video;
  1173.  
  1174. // if audio track is expected to come from audio stream controller, discard any coming from main
  1175. if (this.altAudio && !this.audioOnly) {
  1176. delete tracks.audio;
  1177. }
  1178. // include levelCodec in audio and video tracks
  1179. const { audio, video, audiovideo } = tracks;
  1180. if (audio) {
  1181. let audioCodec = currentLevel.audioCodec;
  1182. const ua = navigator.userAgent.toLowerCase();
  1183. if (this.audioCodecSwitch) {
  1184. if (audioCodec) {
  1185. if (audioCodec.indexOf('mp4a.40.5') !== -1) {
  1186. audioCodec = 'mp4a.40.2';
  1187. } else {
  1188. audioCodec = 'mp4a.40.5';
  1189. }
  1190. }
  1191. // In the case that AAC and HE-AAC audio codecs are signalled in manifest,
  1192. // force HE-AAC, as it seems that most browsers prefers it.
  1193. // don't force HE-AAC if mono stream, or in Firefox
  1194. if (audio.metadata.channelCount !== 1 && ua.indexOf('firefox') === -1) {
  1195. audioCodec = 'mp4a.40.5';
  1196. }
  1197. }
  1198. // HE-AAC is broken on Android, always signal audio codec as AAC even if variant manifest states otherwise
  1199. if (ua.indexOf('android') !== -1 && audio.container !== 'audio/mpeg') {
  1200. // Exclude mpeg audio
  1201. audioCodec = 'mp4a.40.2';
  1202. this.log(`Android: force audio codec to ${audioCodec}`);
  1203. }
  1204. if (currentLevel.audioCodec && currentLevel.audioCodec !== audioCodec) {
  1205. this.log(
  1206. `Swapping manifest audio codec "${currentLevel.audioCodec}" for "${audioCodec}"`
  1207. );
  1208. }
  1209. audio.levelCodec = audioCodec;
  1210. audio.id = 'main';
  1211. this.log(
  1212. `Init audio buffer, container:${
  1213. audio.container
  1214. }, codecs[selected/level/parsed]=[${audioCodec || ''}/${
  1215. currentLevel.audioCodec || ''
  1216. }/${audio.codec}]`
  1217. );
  1218. }
  1219. if (video) {
  1220. video.levelCodec = currentLevel.videoCodec;
  1221. video.id = 'main';
  1222. this.log(
  1223. `Init video buffer, container:${
  1224. video.container
  1225. }, codecs[level/parsed]=[${currentLevel.videoCodec || ''}/${
  1226. video.codec
  1227. }]`
  1228. );
  1229. }
  1230. if (audiovideo) {
  1231. this.log(
  1232. `Init audiovideo buffer, container:${
  1233. audiovideo.container
  1234. }, codecs[level/parsed]=[${currentLevel.attrs.CODECS || ''}/${
  1235. audiovideo.codec
  1236. }]`
  1237. );
  1238. }
  1239. this.hls.trigger(Events.BUFFER_CODECS, tracks);
  1240. // loop through tracks that are going to be provided to bufferController
  1241. Object.keys(tracks).forEach((trackName) => {
  1242. const track = tracks[trackName];
  1243. const initSegment = track.initSegment;
  1244. if (initSegment?.byteLength) {
  1245. this.hls.trigger(Events.BUFFER_APPENDING, {
  1246. type: trackName as SourceBufferName,
  1247. data: initSegment,
  1248. frag,
  1249. part: null,
  1250. chunkMeta,
  1251. parent: frag.type,
  1252. });
  1253. }
  1254. });
  1255. // trigger handler right now
  1256. this.tick();
  1257. }
  1258.  
  1259. private backtrack(frag: Fragment) {
  1260. this.couldBacktrack = true;
  1261. // Causes findFragments to backtrack through fragments to find the keyframe
  1262. this.resetTransmuxer();
  1263. this.flushBufferGap(frag);
  1264. const data = this.fragmentTracker.backtrack(frag);
  1265. this.fragPrevious = null;
  1266. this.nextLoadPosition = frag.start;
  1267. if (data) {
  1268. this.resetFragmentLoading(frag);
  1269. } else {
  1270. // Change state to BACKTRACKING so that fragmentEntity.backtrack data can be added after _doFragLoad
  1271. this.state = State.BACKTRACKING;
  1272. }
  1273. }
  1274.  
  1275. private checkFragmentChanged() {
  1276. const video = this.media;
  1277. let fragPlayingCurrent: Fragment | null = null;
  1278. if (video && video.readyState > 1 && video.seeking === false) {
  1279. const currentTime = video.currentTime;
  1280. /* if video element is in seeked state, currentTime can only increase.
  1281. (assuming that playback rate is positive ...)
  1282. As sometimes currentTime jumps back to zero after a
  1283. media decode error, check this, to avoid seeking back to
  1284. wrong position after a media decode error
  1285. */
  1286.  
  1287. if (BufferHelper.isBuffered(video, currentTime)) {
  1288. fragPlayingCurrent = this.getAppendedFrag(currentTime);
  1289. } else if (BufferHelper.isBuffered(video, currentTime + 0.1)) {
  1290. /* ensure that FRAG_CHANGED event is triggered at startup,
  1291. when first video frame is displayed and playback is paused.
  1292. add a tolerance of 100ms, in case current position is not buffered,
  1293. check if current pos+100ms is buffered and use that buffer range
  1294. for FRAG_CHANGED event reporting */
  1295. fragPlayingCurrent = this.getAppendedFrag(currentTime + 0.1);
  1296. }
  1297. if (fragPlayingCurrent) {
  1298. const fragPlaying = this.fragPlaying;
  1299. const fragCurrentLevel = fragPlayingCurrent.level;
  1300. if (
  1301. !fragPlaying ||
  1302. fragPlayingCurrent.sn !== fragPlaying.sn ||
  1303. fragPlaying.level !== fragCurrentLevel ||
  1304. fragPlayingCurrent.urlId !== fragPlaying.urlId
  1305. ) {
  1306. this.hls.trigger(Events.FRAG_CHANGED, { frag: fragPlayingCurrent });
  1307. if (!fragPlaying || fragPlaying.level !== fragCurrentLevel) {
  1308. this.hls.trigger(Events.LEVEL_SWITCHED, {
  1309. level: fragCurrentLevel,
  1310. });
  1311. }
  1312. this.fragPlaying = fragPlayingCurrent;
  1313. }
  1314. }
  1315. }
  1316. }
  1317.  
  1318. get nextLevel() {
  1319. const frag = this.nextBufferedFrag;
  1320. if (frag) {
  1321. return frag.level;
  1322. } else {
  1323. return -1;
  1324. }
  1325. }
  1326.  
  1327. get currentLevel() {
  1328. const media = this.media;
  1329. if (media) {
  1330. const fragPlayingCurrent = this.getAppendedFrag(media.currentTime);
  1331. if (fragPlayingCurrent) {
  1332. return fragPlayingCurrent.level;
  1333. }
  1334. }
  1335. return -1;
  1336. }
  1337.  
  1338. get nextBufferedFrag() {
  1339. const media = this.media;
  1340. if (media) {
  1341. // first get end range of current fragment
  1342. const fragPlayingCurrent = this.getAppendedFrag(media.currentTime);
  1343. return this.followingBufferedFrag(fragPlayingCurrent);
  1344. } else {
  1345. return null;
  1346. }
  1347. }
  1348.  
  1349. get forceStartLoad() {
  1350. return this._forceStartLoad;
  1351. }
  1352. }