| Language / Name: |
diff --git a/demo/main.js b/demo/main.js
index 4ae20f18d6a..b63fca983a8 100644
--- a/demo/main.js
+++ b/demo/main.js
@@ -1393,9 +1393,12 @@ function updateAudioTrackInfo() {
const buttonEnabled = 'btn-primary" ';
const buttonDisabled = 'btn-success" ';
let html1 = '';
+ let html2 = '';
const audioTrackId = hls.audioTrack;
+ const nextAudioTrackId = hls.nextAudioTrack;
const len = hls.audioTracks.length;
const track = hls.audioTracks[audioTrackId];
+ const nextTrack = hls.audioTracks[nextAudioTrackId];
for (let i = 0; i < len; i++) {
html1 += buttonTemplate;
@@ -1413,10 +1416,30 @@ function updateAudioTrackInfo() {
'';
}
+ for (let i = 0; i < len; i++) {
+ html2 += buttonTemplate;
+ if (nextAudioTrackId === i) {
+ html2 += buttonEnabled;
+ } else {
+ html2 += buttonDisabled;
+ }
+
+ html2 +=
+ 'onclick="hls.nextAudioTrack=' +
+ i +
+ '">' +
+ hls.audioTracks[i].name +
+ '';
+ }
+
$('#audioTrackLabel').text(
track ? track.lang || track.name : 'None selected'
);
$('#audioTrackControl').html(html1);
+ $('#audioTrackLabel').text(
+ nextTrack ? nextTrack.lang || nextTrack.name : 'None selected'
+ );
+ $('#nextAudioTrackControl').html(html2);
}
function codecs2label(levelCodecs) {
diff --git a/docs/API.md b/docs/API.md
index e376be741c5..73002f31361 100644
--- a/docs/API.md
+++ b/docs/API.md
@@ -182,6 +182,7 @@ See [API Reference](https://hlsjs-dev.video-dev.org/api-docs/) for a complete li
- [`hls.allAudioTracks`](#hlsallaudiotracks)
- [`hls.audioTracks`](#hlsaudiotracks)
- [`hls.audioTrack`](#hlsaudiotrack)
+ - [`hls.nextAudioTrack`](#hlsnextaudiotrack)
- [Subtitle Tracks Control API](#subtitle-tracks-control-api)
- [`hls.setSubtitleOption(subtitleOption)`](#hlssetsubtitleoptionsubtitleoption)
- [`hls.allSubtitleTracks`](#hlsallsubtitletracks)
@@ -2101,6 +2102,10 @@ get : array of supported audio tracks in the active audio group ID
get/set : index of selected audio track in `hls.audioTracks`
+### `hls.nextAudioTrack`
+
+get/set : index of the next audio track that will be selected, allowing for seamless audio track switching
+
## Subtitle Tracks Control API
### `hls.setSubtitleOption(subtitleOption)`
@@ -2437,7 +2442,7 @@ Full list of Events is available below:
- `Hls.Events.AUDIO_TRACKS_UPDATED` - fired to notify that audio track lists has been updated
- data: { audioTracks : audioTracks }
- `Hls.Events.AUDIO_TRACK_SWITCHING` - fired when an audio track switching is requested
- - data: { id : audio track id, type : playlist type ('AUDIO' | 'main'), url : audio track URL }
+ - data: { id : audio track id, type : playlist type ('AUDIO' | 'main'), url : audio track URL, flushImmediate: boolean indicating whether audio buffer should be flushed immediately when switching }
- `Hls.Events.AUDIO_TRACK_SWITCHED` - fired when an audio track switch actually occurs
- data: { id : audio track id }
- `Hls.Events.AUDIO_TRACK_LOADING` - fired when an audio track loading starts
diff --git a/src/config.ts b/src/config.ts
index 24316382c05..3c35f143e39 100644
--- a/src/config.ts
+++ b/src/config.ts
@@ -224,6 +224,7 @@ export type StreamControllerConfig = {
testBandwidth: boolean;
liveSyncMode?: 'edge' | 'buffered';
startOnSegmentBoundary: boolean;
+ nextAudioTrackBufferFlushForwardOffset: number;
};
export type GapControllerConfig = {
@@ -380,6 +381,7 @@ export const hlsDefaultConfig: HlsConfig = {
backBufferLength: Infinity, // used by buffer-controller
frontBufferFlushThreshold: Infinity,
startOnSegmentBoundary: false, // used by stream-controller
+ nextAudioTrackBufferFlushForwardOffset: 0.25, // used by stream-controller
maxBufferSize: 60 * 1000 * 1000, // used by stream-controller
maxFragLookUpTolerance: 0.25, // used by stream-controller
maxBufferHole: 0.1, // used by stream-controller and gap-controller
diff --git a/src/controller/audio-stream-controller.ts b/src/controller/audio-stream-controller.ts
index 11aa46f877f..8ea97f8e4b6 100644
--- a/src/controller/audio-stream-controller.ts
+++ b/src/controller/audio-stream-controller.ts
@@ -19,6 +19,7 @@ import {
useAlternateAudio,
} from '../utils/rendition-helper';
import type { FragmentTracker } from './fragment-tracker';
+import type { Bufferable } from '../hls';
import type Hls from '../hls';
import type { Fragment, MediaFragment, Part } from '../loader/fragment';
import type KeyLoader from '../loader/key-loader';
@@ -64,8 +65,9 @@ class AudioStreamController
private mainFragLoading: FragLoadingData | null = null;
private audioOnly: boolean = false;
private bufferedTrack: MediaPlaylist | null = null;
- private switchingTrack: MediaPlaylist | null = null;
+ private switchingTrack: AudioTrackSwitchingData | null = null;
private trackId: number = -1;
+ private nextTrackId: number = -1;
private waitingData: WaitingForPTSData | null = null;
private mainDetails: LevelDetails | null = null;
private flushing: boolean = false;
@@ -177,7 +179,11 @@ class AudioStreamController
}
protected getLoadPosition(): number {
- if (!this.startFragRequested && this.nextLoadPosition >= 0) {
+ if (
+ !this.startFragRequested &&
+ this.nextLoadPosition >= 0 &&
+ this.switchingTrack?.flushImmediate !== false
+ ) {
return this.nextLoadPosition;
}
return super.getLoadPosition();
@@ -307,6 +313,7 @@ class AudioStreamController
}
this.lastCurrentTime = media.currentTime;
+ this.checkFragmentChanged();
}
private doTickIdle() {
@@ -345,11 +352,7 @@ class AudioStreamController
const bufferable = this.mediaBuffer ? this.mediaBuffer : this.media;
if (this.bufferFlushed && bufferable) {
this.bufferFlushed = false;
- this.afterBufferFlushed(
- bufferable,
- ElementaryStreamTypes.AUDIO,
- PlaylistLevelType.AUDIO,
- );
+ this.afterBufferFlushed(bufferable, ElementaryStreamTypes.AUDIO);
}
const bufferInfo = this.getFwdBufferInfo(
@@ -372,7 +375,11 @@ class AudioStreamController
const fragments = trackDetails.fragments;
const start = fragments[0].start;
const loadPosition = this.getLoadPosition();
- const targetBufferTime = this.flushing ? loadPosition : bufferInfo.end;
+ const targetBufferTime =
+ this.flushing ||
+ (this.switchingTrack && !this.switchingTrack.flushImmediate)
+ ? loadPosition
+ : bufferInfo.end;
if (this.switchingTrack && media) {
const pos = loadPosition;
@@ -488,6 +495,10 @@ class AudioStreamController
if (altAudio) {
this.switchingTrack = data;
// main audio track are handled by stream-controller, just do something if switching to alt audio track
+ if (!data.flushImmediate) {
+ this.nextTrackId = data.id;
+ this.nextLevelSwitch();
+ }
this.flushAudioIfNeeded(data);
if (this.state !== State.STOPPED) {
// switching to audio track, start timer if not already started
@@ -758,8 +769,6 @@ class AudioStreamController
const track = this.switchingTrack;
if (track) {
this.bufferedTrack = track;
- this.switchingTrack = null;
- this.hls.trigger(Events.AUDIO_TRACK_SWITCHED, { ...track });
}
}
this.fragBufferedComplete(frag, part);
@@ -768,6 +777,28 @@ class AudioStreamController
}
}
+ protected getBufferOutput(): Bufferable | null {
+ return this.mediaBuffer ? this.mediaBuffer : this.media;
+ }
+
+ protected checkFragmentChanged() {
+ const previousFrag = this.fragPlaying;
+ const fragChanged = super.checkFragmentChanged();
+ if (!fragChanged) {
+ return false;
+ }
+
+ const fragPlaying = this.fragPlaying;
+ const fragPreviousLevel = previousFrag?.level;
+ if (!fragPlaying || fragPlaying.level !== fragPreviousLevel) {
+ this.cleanupBackBuffer();
+ if (this.switchingTrack) {
+ this.completeAudioSwitch(this.switchingTrack);
+ }
+ }
+ return true;
+ }
+
protected onError(event: Events.ERROR, data: ErrorData) {
if (data.fatal) {
this.state = State.ERROR;
@@ -842,7 +873,7 @@ class AudioStreamController
}
const mediaBuffer = this.mediaBuffer || this.media;
if (mediaBuffer) {
- this.afterBufferFlushed(mediaBuffer, type, PlaylistLevelType.AUDIO);
+ this.afterBufferFlushed(mediaBuffer, type);
this.tick();
}
}
@@ -870,8 +901,18 @@ class AudioStreamController
}
this.state = State.PARSING;
- if (this.switchingTrack && audio) {
- this.completeAudioSwitch(this.switchingTrack);
+ if (audio && this.switchingTrack && !this.switchingTrack.flushImmediate) {
+ const { config } = this;
+ const bufferFlushDelay = config.nextAudioTrackBufferFlushForwardOffset;
+ const startOffset = Math.max(
+ this.getLoadPosition() + bufferFlushDelay,
+ frag.start,
+ );
+ super.flushMainBuffer(
+ startOffset,
+ Number.POSITIVE_INFINITY,
+ PlaylistLevelType.AUDIO,
+ );
}
if (initSegment?.tracks) {
@@ -1022,8 +1063,8 @@ class AudioStreamController
}
}
- private flushAudioIfNeeded(switchingTrack: MediaPlaylist) {
- if (this.media && this.bufferedTrack) {
+ private flushAudioIfNeeded(switchingTrack: AudioTrackSwitchingData) {
+ if (this.media && this.bufferedTrack && switchingTrack.flushImmediate) {
const { name, lang, assocLang, characteristics, audioCodec, channels } =
this.bufferedTrack;
if (
@@ -1045,12 +1086,18 @@ class AudioStreamController
}
}
- private completeAudioSwitch(switchingTrack: MediaPlaylist) {
+ private completeAudioSwitch(switchingTrack: AudioTrackSwitchingData) {
const { hls } = this;
- this.flushAudioIfNeeded(switchingTrack);
this.bufferedTrack = switchingTrack;
this.switchingTrack = null;
hls.trigger(Events.AUDIO_TRACK_SWITCHED, { ...switchingTrack });
}
+
+ /**
+ * Index of next audio track loaded as scheduled by audio stream controller.
+ */
+ get nextAudioTrack(): number {
+ return this.nextTrackId;
+ }
}
export default AudioStreamController;
diff --git a/src/controller/audio-track-controller.ts b/src/controller/audio-track-controller.ts
index 381dea97268..6e4473ea178 100644
--- a/src/controller/audio-track-controller.ts
+++ b/src/controller/audio-track-controller.ts
@@ -247,6 +247,16 @@ class AudioTrackController extends BasePlaylistController {
this.setAudioTrack(newId);
}
+ get nextAudioTrack(): number {
+ return this.trackId;
+ }
+
+ set nextAudioTrack(newId: number) {
+ // If audio track is selected from API then don't choose from the manifest default track
+ this.selectDefaultTrack = false;
+ this.setAudioTrack(newId, false);
+ }
+
public setAudioOption(
audioOption: MediaPlaylist | AudioSelectionOption | undefined,
): MediaPlaylist | null {
@@ -310,7 +320,7 @@ class AudioTrackController extends BasePlaylistController {
return null;
}
- private setAudioTrack(newId: number): void {
+ private setAudioTrack(newId: number, flushImmediate: boolean = true): void {
const tracks = this.tracksInGroup;
// check if level idx is valid
@@ -331,7 +341,10 @@ class AudioTrackController extends BasePlaylistController {
);
this.trackId = newId;
this.currentTrack = track;
- this.hls.trigger(Events.AUDIO_TRACK_SWITCHING, { ...track });
+ this.hls.trigger(Events.AUDIO_TRACK_SWITCHING, {
+ ...track,
+ flushImmediate,
+ });
// Do not reload track unless live
if (trackLoaded) {
return;
diff --git a/src/controller/base-stream-controller.ts b/src/controller/base-stream-controller.ts
index c7c32f2771d..e6c2d7bac97 100644
--- a/src/controller/base-stream-controller.ts
+++ b/src/controller/base-stream-controller.ts
@@ -39,6 +39,7 @@ import {
getPartWith,
updateFragPTSDTS,
} from '../utils/level-helper';
+import { estimatedAudioBitrate } from '../utils/mediacapabilities-helper';
import { appendUint8Array } from '../utils/mp4-tools';
import TimeRanges from '../utils/time-ranges';
import type { FragmentTracker } from './fragment-tracker';
@@ -100,6 +101,7 @@ export default class BaseStreamController
protected fragPrevious: MediaFragment | null = null;
protected fragCurrent: Fragment | null = null;
+ protected fragPlaying: Fragment | null = null;
protected fragmentTracker: FragmentTracker;
protected transmuxer: TransmuxerInterface | null = null;
protected _state: (typeof State)[keyof typeof State] = State.STOPPED;
@@ -198,6 +200,34 @@ export default class BaseStreamController
return this.buffering;
}
+ /**
+ * Get backtrack fragment. Returns null in base class.
+ * Override in stream-controller to return actual backtrack fragment.
+ */
+ protected get backtrackFragment(): Fragment | undefined {
+ return undefined;
+ }
+
+ /**
+ * Set backtrack fragment. No-op in base class.
+ * Override in stream-controller to set actual backtrack fragment.
+ */
+ protected set backtrackFragment(_value: Fragment | undefined) {}
+
+ /**
+ * Get could backtrack flag. Returns false in base class.
+ * Override in stream-controller to return actual value.
+ */
+ protected get couldBacktrack(): boolean {
+ return false;
+ }
+
+ /**
+ * Set could backtrack flag. No-op in base class.
+ * Override in stream-controller to set actual value.
+ */
+ protected set couldBacktrack(_value: boolean) {}
+
public pauseBuffering() {
this.buffering = false;
}
@@ -304,6 +334,7 @@ export default class BaseStreamController
event: Events.MEDIA_DETACHING,
data: MediaDetachingData,
) {
+ this.fragPlaying = null;
const transferringMedia = !!data.transferMedia;
const media = this.media;
if (media === null) {
@@ -335,7 +366,11 @@ export default class BaseStreamController
protected onManifestLoading() {
this.initPTS = [];
- this.levels = this.levelLastLoaded = this.fragCurrent = null;
+ this.fragPlaying =
+ this.levels =
+ this.levelLastLoaded =
+ this.fragCurrent =
+ null;
this.lastCurrentTime = this.startPosition = 0;
this.startFragRequested = false;
}
@@ -573,9 +608,7 @@ export default class BaseStreamController
bufferedInfo ? bufferedInfo.len : this.config.maxBufferLength,
);
// If backtracking, always remove from the tracker without reducing max buffer length
- const backtrackFragment = (this as any).backtrackFragment as
- | Fragment
- | undefined;
+ const backtrackFragment = this.backtrackFragment as Fragment | undefined;
const backtracked = backtrackFragment
? (frag.sn as number) - (backtrackFragment.sn as number)
: 0;
@@ -1321,12 +1354,9 @@ export default class BaseStreamController
return false;
}
- protected getAppendedFrag(
- position: number,
- playlistType: PlaylistLevelType = PlaylistLevelType.MAIN,
- ): Fragment | null {
+ protected getAppendedFrag(position: number): Fragment | null {
const fragOrPart = (this.fragmentTracker as any)
- ? this.fragmentTracker.getAppendedFrag(position, playlistType)
+ ? this.fragmentTracker.getAppendedFrag(position, this.playlistType)
: null;
if (fragOrPart && 'fragment' in fragOrPart) {
return fragOrPart.fragment;
@@ -2039,7 +2069,6 @@ export default class BaseStreamController
protected afterBufferFlushed(
media: Bufferable,
bufferType: SourceBufferName,
- playlistType: PlaylistLevelType,
) {
if (!media) {
return;
@@ -2050,7 +2079,7 @@ export default class BaseStreamController
this.fragmentTracker.detectEvictedFragments(
bufferType,
bufferedTimeRanges,
- playlistType,
+ this.playlistType,
);
if (this.state === State.ENDED) {
this.resetLoadingState();
@@ -2261,6 +2290,235 @@ export default class BaseStreamController
get state(): (typeof State)[keyof typeof State] {
return this._state;
}
+
+ /**
+ * Calculate optimal switch point by considering fetch delays and buffer info
+ * to avoid causing playback interruption
+ */
+ protected calculateOptimalSwitchPoint(
+ nextLevel: Level,
+ bufferInfo: BufferInfo,
+ ): { fetchdelay: number; okToFlushForwardBuffer: boolean } {
+ let fetchdelay = 0;
+ const { hls, media, config, levels, playlistType } = this;
+ const levelDetails = this.getLevelDetails();
+ if (media && !media.paused && levels) {
+ const maxBitrate =
+ playlistType === PlaylistLevelType.AUDIO
+ ? estimatedAudioBitrate(nextLevel.audioCodec, 128000)
+ : nextLevel.maxBitrate;
+ // add a safety delay of 1s
+ const ttfbSec = 1 + hls.ttfbEstimate / 1000;
+ const bandwidth = hls.bandwidthEstimate * config.abrBandWidthUpFactor;
+ const fragDuration =
+ (levelDetails &&
+ (this.loadingParts
+ ? levelDetails.partTarget
+ : levelDetails.averagetargetduration)) ||
+ this.fragCurrent?.duration ||
+ 6;
+ fetchdelay = ttfbSec + (maxBitrate * fragDuration) / bandwidth;
+ if (!nextLevel.details) {
+ fetchdelay += ttfbSec;
+ }
+ }
+
+ const currentTime = this.media?.currentTime || this.getLoadPosition();
+ // Do not flush in live stream with low buffer
+ const okToFlushForwardBuffer =
+ !levelDetails?.live || bufferInfo.end - currentTime > fetchdelay * 1.5;
+
+ return { fetchdelay, okToFlushForwardBuffer };
+ }
+
+ /**
+ * Generic track switching scheduler that prevents buffering interruptions
+ * by finding optimal flush points in the buffer
+ * This method can be overridden by subclasses with specific implementation details
+ */
+ protected scheduleTrackSwitch(
+ bufferInfo: BufferInfo,
+ fetchdelay: number,
+ okToFlushForwardBuffer: boolean,
+ ): void {
+ const { media, playlistType } = this;
+ if (!media || !bufferInfo) {
+ return;
+ }
+
+ // find buffer range that will be reached once new fragment will be fetched
+ const bufferedFrag = okToFlushForwardBuffer
+ ? this.getBufferedFrag(this.getLoadPosition() + fetchdelay)
+ : null;
+
+ if (bufferedFrag) {
+ // we can flush buffer range following this one without stalling playback
+ const nextBufferedFrag = this.followingBufferedFrag(bufferedFrag);
+ if (nextBufferedFrag) {
+ // if we are here, we can also cancel any loading/demuxing in progress, as they are useless
+ this.abortCurrentFrag();
+ // start flush position is in next buffered frag. Leave some padding for non-independent segments and smoother playback.
+ const maxStart = nextBufferedFrag.maxStartPTS
+ ? nextBufferedFrag.maxStartPTS
+ : nextBufferedFrag.start;
+ const fragDuration = nextBufferedFrag.duration;
+ const startPts = Math.max(
+ bufferedFrag.end,
+ maxStart +
+ Math.min(
+ Math.max(
+ fragDuration - this.config.maxFragLookUpTolerance,
+ fragDuration * (this.couldBacktrack ? 0.5 : 0.125),
+ ),
+ fragDuration * (this.couldBacktrack ? 0.75 : 0.25),
+ ),
+ );
+ const bufferType =
+ playlistType === PlaylistLevelType.MAIN ? null : 'audio';
+ // Flush forward buffer from next buffered frag start to infinity
+ this.flushMainBuffer(startPts, Number.POSITIVE_INFINITY, bufferType);
+ // Flush back buffer (excluding current fragment)
+ this.cleanupBackBuffer();
+ }
+ }
+ }
+
+ /**
+ * Handle back-buffer cleanup during track switching
+ */
+ protected cleanupBackBuffer(): void {
+ const { media, playlistType } = this;
+ if (!media) {
+ return;
+ }
+
+ // remove back-buffer
+ const fragPlayingCurrent = this.getAppendedFrag(this.getLoadPosition());
+ if (fragPlayingCurrent && fragPlayingCurrent.start > 1) {
+ const isAudio = playlistType === PlaylistLevelType.AUDIO;
+ // flush buffer preceding current fragment (flush until current fragment start offset)
+ // minus 1s to avoid video freezing, that could happen if we flush keyframe of current video ...
+ this.flushMainBuffer(
+ 0,
+ fragPlayingCurrent.start - (isAudio ? 0 : 1),
+ isAudio ? 'audio' : null,
+ );
+ }
+ }
+
+ /**
+ * Gets buffered fragment at the specified position
+ */
+ protected getBufferedFrag(position: number): Fragment | null {
+ return this.fragmentTracker.getBufferedFrag(position, this.playlistType);
+ }
+
+ /**
+ * Gets the next buffered fragment following the given fragment
+ */
+ protected followingBufferedFrag(frag: Fragment | null): Fragment | null {
+ if (frag) {
+ // try to get range of next fragment (500ms after this range)
+ return this.getBufferedFrag(frag.end + 0.5);
+ }
+ return null;
+ }
+
+ /**
+ * Aborts the current fragment loading and resets state
+ * Can be overridden by subclasses for specific behavior
+ */
+ protected abortCurrentFrag(): void {
+ const fragCurrent = this.fragCurrent;
+ this.fragCurrent = null;
+ if (fragCurrent) {
+ fragCurrent.abortRequests();
+ this.fragmentTracker.removeFragment(fragCurrent);
+ }
+ switch (this.state) {
+ case State.KEY_LOADING:
+ case State.FRAG_LOADING:
+ case State.FRAG_LOADING_WAITING_RETRY:
+ case State.PARSING:
+ case State.PARSED:
+ this.state = State.IDLE;
+ break;
+ }
+ this.nextLoadPosition = this.getLoadPosition();
+ }
+
+ protected checkFragmentChanged(): boolean {
+ const video = this.media;
+ let fragPlayingCurrent: Fragment | null = null;
+ if (video && video.readyState > 1 && video.seeking === false) {
+ const currentTime = video.currentTime;
+ /* if video element is in seeked state, currentTime can only increase.
+ (assuming that playback rate is positive ...)
+ As sometimes currentTime jumps back to zero after a
+ media decode error, check this, to avoid seeking back to
+ wrong position after a media decode error
+ */
+
+ if (BufferHelper.isBuffered(video, currentTime)) {
+ fragPlayingCurrent = this.getAppendedFrag(currentTime);
+ } else if (BufferHelper.isBuffered(video, currentTime + 0.1)) {
+ /* ensure that FRAG_CHANGED event is triggered at startup,
+ when first video frame is displayed and playback is paused.
+ add a tolerance of 100ms, in case current position is not buffered,
+ check if current pos+100ms is buffered and use that buffer range
+ for FRAG_CHANGED event reporting */
+ fragPlayingCurrent = this.getAppendedFrag(currentTime + 0.1);
+ }
+ if (fragPlayingCurrent) {
+ this.backtrackFragment = undefined;
+ const fragPlaying = this.fragPlaying;
+ const fragCurrentLevel = fragPlayingCurrent.level;
+ if (
+ !fragPlaying ||
+ fragPlayingCurrent.sn !== fragPlaying.sn ||
+ fragPlaying.level !== fragCurrentLevel
+ ) {
+ this.fragPlaying = fragPlayingCurrent;
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+ protected getBufferOutput(): Bufferable | null {
+ return null;
+ }
+
+ /**
+ * try to switch ASAP without breaking video playback:
+ * in order to ensure smooth but quick level switching,
+ * we need to find the next flushable buffer range
+ * we should take into account new segment fetch time
+ */
+ public nextLevelSwitch() {
+ const { levels, media, hls, config, playlistType } = this;
+ // ensure that media is defined and that metadata are available (to retrieve currentTime)
+ if (media?.readyState && levels && hls && config) {
+ const bufferOutput = this.getBufferOutput();
+ const bufferInfo = this.getFwdBufferInfo(bufferOutput, playlistType);
+ if (!bufferInfo) {
+ return;
+ }
+
+ const nextLevelId =
+ playlistType === PlaylistLevelType.AUDIO
+ ? hls.nextAudioTrack
+ : hls.nextLoadLevel;
+ const nextLevel = levels[nextLevelId];
+
+ const { fetchdelay, okToFlushForwardBuffer } =
+ this.calculateOptimalSwitchPoint(nextLevel, bufferInfo);
+
+ this.scheduleTrackSwitch(bufferInfo, fetchdelay, okToFlushForwardBuffer);
+ }
+ this.tickImmediate();
+ }
}
function interstitialsEnabled(config: HlsConfig): boolean {
diff --git a/src/controller/stream-controller.ts b/src/controller/stream-controller.ts
index e01c232ca7e..68b063943b1 100644
--- a/src/controller/stream-controller.ts
+++ b/src/controller/stream-controller.ts
@@ -49,16 +49,16 @@ import type {
import type { Level } from '../types/level';
import type { Track, TrackSet } from '../types/track';
import type { TransmuxerResult } from '../types/transmuxer';
-import type { BufferInfo } from '../utils/buffer-helper';
+import type { Bufferable, BufferInfo } from '../utils/buffer-helper';
-const TICK_INTERVAL = 100; // how often to tick in ms
-
-const enum AlternateAudio {
+export const enum AlternateAudio {
DISABLED = 0,
SWITCHING,
SWITCHED,
}
+const TICK_INTERVAL = 100; // how often to tick in ms
+
export default class StreamController
extends BaseStreamController
implements NetworkComponentAPI
@@ -69,9 +69,8 @@ export default class StreamController
private _hasEnoughToStart: boolean = false;
private altAudio: AlternateAudio = AlternateAudio.DISABLED;
private audioOnly: boolean = false;
- private fragPlaying: Fragment | null = null;
- private couldBacktrack: boolean = false;
- private backtrackFragment: Fragment | null = null;
+ private _couldBacktrack: boolean = false;
+ private _backtrackFragment: Fragment | undefined = undefined;
private audioCodecSwitch: boolean = false;
private videoBuffer: ExtendedSourceBuffer | null = null;
@@ -317,7 +316,7 @@ export default class StreamController
this.backtrackFragment &&
this.backtrackFragment.start > bufferInfo.end
) {
- this.backtrackFragment = null;
+ this.backtrackFragment = undefined;
}
const targetBufferTime = this.backtrackFragment
? this.backtrackFragment.start
@@ -339,7 +338,7 @@ export default class StreamController
this.fragmentTracker.removeFragment(backtrackFrag);
}
} else if (this.backtrackFragment && bufferInfo.len) {
- this.backtrackFragment = null;
+ this.backtrackFragment = undefined;
}
// Avoid loop loading by using nextLoadPosition set for backtracking and skipping consecutive GAP tags
if (frag && this.isLoopLoading(frag, targetBufferTime)) {
@@ -355,7 +354,7 @@ export default class StreamController
? this.videoBuffer
: this.mediaBuffer) || this.media;
if (mediaBuffer) {
- this.afterBufferFlushed(mediaBuffer, type, PlaylistLevelType.MAIN);
+ this.afterBufferFlushed(mediaBuffer, type);
}
}
frag = this.getNextFragmentLoopLoading(
@@ -402,21 +401,6 @@ export default class StreamController
}
}
- private getBufferedFrag(position: number) {
- return this.fragmentTracker.getBufferedFrag(
- position,
- PlaylistLevelType.MAIN,
- );
- }
-
- private followingBufferedFrag(frag: Fragment | null) {
- if (frag) {
- // try to get range of next fragment (500ms after this range)
- return this.getBufferedFrag(frag.end + 0.5);
- }
- return null;
- }
-
/*
on immediate level switch :
- pause playback if playing
@@ -429,106 +413,67 @@ export default class StreamController
}
/**
- * try to switch ASAP without breaking video playback:
- * in order to ensure smooth but quick level switching,
- * we need to find the next flushable buffer range
- * we should take into account new segment fetch time
+ * Get the buffer output to use for buffer calculations.
+ * Override to use altAudio logic in stream-controller.
*/
- public nextLevelSwitch() {
- const { levels, media, hls, config } = this;
- // ensure that media is defined and that metadata are available (to retrieve currentTime)
- if (media?.readyState && levels && hls && config) {
- const bufferInfo = this.getMainFwdBufferInfo();
- if (!bufferInfo) {
- return;
- }
- const levelDetails = this.getLevelDetails();
-
- let fetchdelay = 0;
- if (!media.paused) {
- // add a safety delay of 1s
- const ttfbSec = 1 + hls.ttfbEstimate / 1000;
- const bandwidth = hls.bandwidthEstimate * config.abrBandWidthUpFactor;
- const nextLevelId = hls.nextLoadLevel;
- const nextLevel = levels[nextLevelId];
- const fragDuration =
- (levelDetails &&
- (this.loadingParts
- ? levelDetails.partTarget
- : levelDetails.averagetargetduration)) ||
- this.fragCurrent?.duration ||
- 6;
- fetchdelay =
- ttfbSec + (nextLevel.maxBitrate * fragDuration) / bandwidth;
- if (!nextLevel.details) {
- fetchdelay += ttfbSec;
- }
- }
-
- // Do not flush in live stream with low buffer
-
- const okToFlushForwardBuffer =
- !levelDetails?.live ||
- (bufferInfo.len || 0) > levelDetails.targetduration * 2;
-
- // find buffer range that will be reached once new fragment will be fetched
- const bufferedFrag = okToFlushForwardBuffer
- ? this.getBufferedFrag(media.currentTime + fetchdelay)
- : null;
- if (bufferedFrag) {
- // we can flush buffer range following this one without stalling playback
- const nextBufferedFrag = this.followingBufferedFrag(bufferedFrag);
- if (nextBufferedFrag) {
- // if we are here, we can also cancel any loading/demuxing in progress, as they are useless
- this.abortCurrentFrag();
- // start flush position is in next buffered frag. Leave some padding for non-independent segments and smoother playback.
- const maxStart = nextBufferedFrag.maxStartPTS
- ? nextBufferedFrag.maxStartPTS
- : nextBufferedFrag.start;
- const fragDuration = nextBufferedFrag.duration;
- const startPts = Math.max(
- bufferedFrag.end,
- maxStart +
- Math.min(
- Math.max(
- fragDuration - this.config.maxFragLookUpTolerance,
- fragDuration * (this.couldBacktrack ? 0.5 : 0.125),
- ),
- fragDuration * (this.couldBacktrack ? 0.75 : 0.25),
- ),
- );
- this.flushMainBuffer(startPts, Number.POSITIVE_INFINITY);
- }
- }
- // remove back-buffer
- const fragPlayingCurrent = this.getAppendedFrag(media.currentTime);
- if (fragPlayingCurrent && fragPlayingCurrent.start > 1) {
- // flush buffer preceding current fragment (flush until current fragment start offset)
- // minus 1s to avoid video freezing, that could happen if we flush keyframe of current video ...
- this.flushMainBuffer(0, fragPlayingCurrent.start - 1);
- }
+ protected getBufferOutput(): Bufferable | null {
+ if (this.mediaBuffer && this.altAudio === AlternateAudio.SWITCHED) {
+ return this.mediaBuffer;
}
- this.tickImmediate();
+ return this.media;
}
- private abortCurrentFrag() {
- const fragCurrent = this.fragCurrent;
- this.fragCurrent = null;
- this.backtrackFragment = null;
- if (fragCurrent) {
- fragCurrent.abortRequests();
- this.fragmentTracker.removeFragment(fragCurrent);
+ protected checkFragmentChanged(): boolean {
+ const previousFrag = this.fragPlaying;
+ const fragChanged = super.checkFragmentChanged();
+ if (!fragChanged) {
+ return false;
}
- switch (this.state) {
- case State.KEY_LOADING:
- case State.FRAG_LOADING:
- case State.FRAG_LOADING_WAITING_RETRY:
- case State.PARSING:
- case State.PARSED:
- this.state = State.IDLE;
- break;
+
+ const fragPlaying = this.fragPlaying;
+ if (fragPlaying && previousFrag) {
+ const fragCurrentLevel = fragPlaying.level;
+ this.hls.trigger(Events.FRAG_CHANGED, { frag: fragPlaying });
+ if (previousFrag.level !== fragCurrentLevel) {
+ this.hls.trigger(Events.LEVEL_SWITCHED, {
+ level: fragCurrentLevel,
+ });
+ }
}
- this.nextLoadPosition = this.getLoadPosition();
+ return true;
+ }
+
+ /**
+ * Get backtrack fragment. Override to return actual backtrack fragment.
+ */
+ protected get backtrackFragment(): Fragment | undefined {
+ return this._backtrackFragment;
+ }
+
+ /**
+ * Set backtrack fragment. Override to set actual backtrack fragment.
+ */
+ protected set backtrackFragment(value: Fragment | undefined) {
+ this._backtrackFragment = value;
+ }
+
+ /**
+ * Get could backtrack flag. Override to return actual value.
+ */
+ protected get couldBacktrack(): boolean {
+ return this._couldBacktrack;
+ }
+
+ /**
+ * Set could backtrack flag. Override to set actual value.
+ */
+ protected set couldBacktrack(value: boolean) {
+ this._couldBacktrack = value;
+ }
+
+ protected abortCurrentFrag(): void {
+ this.backtrackFragment = undefined;
+ super.abortCurrentFrag();
}
protected flushMainBuffer(startOffset: number, endOffset: number) {
@@ -559,7 +504,6 @@ export default class StreamController
removeEventListener(media, 'seeked', this.onMediaSeeked);
}
this.videoBuffer = null;
- this.fragPlaying = null;
super.onMediaDetaching(event, data);
const transferringMedia = !!data.transferMedia;
if (transferringMedia) {
@@ -611,7 +555,7 @@ export default class StreamController
this.log('Trigger BUFFER_RESET');
this.hls.trigger(Events.BUFFER_RESET, undefined);
this.couldBacktrack = false;
- this.fragPlaying = this.backtrackFragment = null;
+ this.backtrackFragment = undefined;
this.altAudio = AlternateAudio.DISABLED;
this.audioOnly = false;
}
@@ -1100,7 +1044,7 @@ export default class StreamController
? this.videoBuffer
: this.mediaBuffer) || this.media;
if (mediaBuffer) {
- this.afterBufferFlushed(mediaBuffer, type, PlaylistLevelType.MAIN);
+ this.afterBufferFlushed(mediaBuffer, type);
this.tick();
}
}
@@ -1567,10 +1511,7 @@ export default class StreamController
public getMainFwdBufferInfo(): BufferInfo | null {
// Observe video SourceBuffer (this.mediaBuffer) only when alt-audio is used, otherwise observe combined media buffer
- const bufferOutput =
- this.mediaBuffer && this.altAudio === AlternateAudio.SWITCHED
- ? this.mediaBuffer
- : this.media;
+ const bufferOutput = this.getBufferOutput();
return this.getFwdBufferInfo(bufferOutput, PlaylistLevelType.MAIN);
}
@@ -1595,52 +1536,6 @@ export default class StreamController
this.state = State.IDLE;
}
- private checkFragmentChanged() {
- const video = this.media;
- let fragPlayingCurrent: Fragment | null = null;
- if (video && video.readyState > 1 && video.seeking === false) {
- const currentTime = video.currentTime;
- /* if video element is in seeked state, currentTime can only increase.
- (assuming that playback rate is positive ...)
- As sometimes currentTime jumps back to zero after a
- media decode error, check this, to avoid seeking back to
- wrong position after a media decode error
- */
-
- if (BufferHelper.isBuffered(video, currentTime)) {
- fragPlayingCurrent = this.getAppendedFrag(currentTime);
- } else if (BufferHelper.isBuffered(video, currentTime + 0.1)) {
- /* ensure that FRAG_CHANGED event is triggered at startup,
- when first video frame is displayed and playback is paused.
- add a tolerance of 100ms, in case current position is not buffered,
- check if current pos+100ms is buffered and use that buffer range
- for FRAG_CHANGED event reporting */
- fragPlayingCurrent = this.getAppendedFrag(currentTime + 0.1);
- }
- if (fragPlayingCurrent) {
- this.backtrackFragment = null;
- const fragPlaying = this.fragPlaying;
- const fragCurrentLevel = fragPlayingCurrent.level;
- if (
- !fragPlaying ||
- fragPlayingCurrent.sn !== fragPlaying.sn ||
- fragPlaying.level !== fragCurrentLevel
- ) {
- this.fragPlaying = fragPlayingCurrent;
- this.hls.trigger(Events.FRAG_CHANGED, { frag: fragPlayingCurrent });
- if (
- !fragPlaying ||
- fragPlaying.level !== (fragCurrentLevel as number | undefined)
- ) {
- this.hls.trigger(Events.LEVEL_SWITCHED, {
- level: fragCurrentLevel,
- });
- }
- }
- }
- }
- }
-
get nextLevel(): number {
const frag = this.nextBufferedFrag;
if (frag) {
diff --git a/src/hls.ts b/src/hls.ts
index cf61a67b01a..8bbec17580c 100644
--- a/src/hls.ts
+++ b/src/hls.ts
@@ -1068,6 +1068,28 @@ export default class Hls implements HlsEventEmitter {
}
}
+ /**
+ * Index of next audio track as scheduled by audio stream controller.
+ */
+ get nextAudioTrack(): number {
+ return this.audioStreamController?.nextAudioTrack ?? -1;
+ }
+
+ /**
+ * Set audio track index for next loaded data.
+ * This will switch the audio track asap, without interrupting playback.
+ * May abort current loading of data, and flush parts of buffer(outside
+ * currently played fragment region). Audio Track Switched event will be
+ * delayed until the currently playing fragment is of the next audio track.
+ * @param audioTrackId - Pass -1 for automatic level selection
+ */
+ set nextAudioTrack(audioTrackId: number) {
+ const { audioTrackController } = this;
+ if (audioTrackController) {
+ audioTrackController.nextAudioTrack = audioTrackId;
+ }
+ }
+
/**
* get the complete list of subtitle tracks across all media groups
*/
diff --git a/src/types/events.ts b/src/types/events.ts
index 186da9ae0f0..7cc5d00980f 100644
--- a/src/types/events.ts
+++ b/src/types/events.ts
@@ -253,7 +253,9 @@ export interface LevelPTSUpdatedData {
end: number;
}
-export interface AudioTrackSwitchingData extends MediaPlaylist {}
+export interface AudioTrackSwitchingData extends MediaPlaylist {
+ flushImmediate?: boolean;
+}
export interface AudioTrackSwitchedData extends MediaPlaylist {}
diff --git a/src/utils/mediacapabilities-helper.ts b/src/utils/mediacapabilities-helper.ts
index 280a30c9561..b3806ef75cf 100644
--- a/src/utils/mediacapabilities-helper.ts
+++ b/src/utils/mediacapabilities-helper.ts
@@ -261,8 +261,8 @@ function makeAudioConfigurations(
return [];
}
-function estimatedAudioBitrate(
- audioCodec: string,
+export function estimatedAudioBitrate(
+ audioCodec: string | undefined,
levelBitrate: number,
): number {
if (levelBitrate <= 1) {
diff --git a/tests/unit/controller/audio-stream-controller.ts b/tests/unit/controller/audio-stream-controller.ts
index aae6d9f79f1..ae51b25345b 100644
--- a/tests/unit/controller/audio-stream-controller.ts
+++ b/tests/unit/controller/audio-stream-controller.ts
@@ -3,7 +3,9 @@ import sinon from 'sinon';
import sinonChai from 'sinon-chai';
import { hlsDefaultConfig } from '../../../src/config';
import AudioStreamController from '../../../src/controller/audio-stream-controller';
-import { State } from '../../../src/controller/base-stream-controller';
+import BaseStreamController, {
+ State,
+} from '../../../src/controller/base-stream-controller';
import { FragmentTracker } from '../../../src/controller/fragment-tracker';
import { Events } from '../../../src/events';
import Hls from '../../../src/hls';
@@ -438,4 +440,131 @@ describe('AudioStreamController', function () {
expect(audioStreamController.tick).to.have.been.calledOnce;
});
});
+
+ describe('checkFragmentChanged', function () {
+ beforeEach(function () {
+ sandbox.stub(
+ Object.getPrototypeOf(audioStreamController),
+ 'cleanupBackBuffer',
+ );
+ });
+
+ it('should return false when super.checkFragmentChanged returns false', function () {
+ sandbox
+ .stub(BaseStreamController.prototype as any, 'checkFragmentChanged')
+ .returns(false);
+ expect((audioStreamController as any).checkFragmentChanged()).to.be.false;
+ });
+
+ it('should cleanup back buffer and complete audio switch when level changes', function () {
+ const mockFrag = { level: 1, sn: 1 } as any;
+ const previousFrag = { level: 0, sn: 0 } as any;
+ const mockSwitchingTrack = { id: 1, name: 'Test' } as any;
+
+ (audioStreamController as any).fragPlaying = previousFrag;
+ (audioStreamController as any).switchingTrack = mockSwitchingTrack;
+
+ sandbox
+ .stub(BaseStreamController.prototype as any, 'checkFragmentChanged')
+ .callsFake(function (this: any) {
+ this.fragPlaying = mockFrag;
+ return true;
+ });
+
+ sandbox.stub(audioStreamController.hls, 'trigger');
+
+ const result = (audioStreamController as any).checkFragmentChanged();
+
+ expect(result).to.be.true;
+ expect((audioStreamController as any).cleanupBackBuffer).to.have.been
+ .called;
+ expect(audioStreamController.hls.trigger).to.have.been.calledWith(
+ Events.AUDIO_TRACK_SWITCHED,
+ );
+ });
+
+ it('should not complete audio switch when level has not changed', function () {
+ const mockFrag = { level: 0, sn: 1 } as any;
+ const previousFrag = { level: 0, sn: 0 } as any;
+ const mockSwitchingTrack = { id: 0, name: 'Test' } as any;
+
+ (audioStreamController as any).fragPlaying = previousFrag;
+ (audioStreamController as any).switchingTrack = mockSwitchingTrack;
+
+ sandbox
+ .stub(BaseStreamController.prototype as any, 'checkFragmentChanged')
+ .callsFake(function (this: any) {
+ this.fragPlaying = mockFrag;
+ return true;
+ });
+
+ sandbox.stub(audioStreamController.hls, 'trigger');
+
+ const result = (audioStreamController as any).checkFragmentChanged();
+
+ expect(result).to.be.true;
+ expect(audioStreamController.hls.trigger).to.not.have.been.called;
+ });
+ });
+
+ describe('audio track switching with flushImmediate flag', function () {
+ beforeEach(function () {
+ audioStreamController.levels = tracks;
+ sandbox.stub(audioStreamController, 'nextLevelSwitch');
+ });
+
+ it('should call nextLevelSwitch when flushImmediate is false', function () {
+ const trackData = {
+ ...audioTracks[0],
+ flushImmediate: false,
+ };
+ (audioStreamController as any).nextTrackId = -1;
+
+ audioStreamController.onAudioTrackSwitching(
+ Events.AUDIO_TRACK_SWITCHING,
+ trackData,
+ );
+
+ expect((audioStreamController as any).nextTrackId).to.equal(0);
+ expect(audioStreamController.nextLevelSwitch).to.have.been.called;
+ });
+
+ it('should not call nextLevelSwitch when flushImmediate is true', function () {
+ const trackData = {
+ ...audioTracks[0],
+ flushImmediate: true,
+ };
+
+ audioStreamController.onAudioTrackSwitching(
+ Events.AUDIO_TRACK_SWITCHING,
+ trackData,
+ );
+
+ expect(audioStreamController.nextLevelSwitch).to.not.have.been.called;
+ });
+ });
+
+ describe('getLoadPosition with flushImmediate', function () {
+ beforeEach(function () {
+ (audioStreamController as any).startFragRequested = false;
+ (audioStreamController as any).nextLoadPosition = 10;
+ });
+
+ it('should return nextLoadPosition when flushImmediate is not false', function () {
+ (audioStreamController as any).switchingTrack = {
+ flushImmediate: true,
+ };
+ expect((audioStreamController as any).getLoadPosition()).to.equal(10);
+ });
+
+ it('should call super.getLoadPosition when flushImmediate is false', function () {
+ (audioStreamController as any).switchingTrack = {
+ flushImmediate: false,
+ };
+ sandbox
+ .stub(Object.getPrototypeOf(audioStreamController), 'getLoadPosition')
+ .returns(5);
+ expect((audioStreamController as any).getLoadPosition()).to.equal(5);
+ });
+ });
});
diff --git a/tests/unit/controller/audio-track-controller.ts b/tests/unit/controller/audio-track-controller.ts
index cccbed0ce91..37b3e4d158f 100644
--- a/tests/unit/controller/audio-track-controller.ts
+++ b/tests/unit/controller/audio-track-controller.ts
@@ -487,4 +487,70 @@ describe('AudioTrackController', function () {
expect(checkRetry).to.have.been.calledOnce;
});
});
+
+ describe('nextAudioTrack', function () {
+ beforeEach(function () {
+ hls.levelController = {
+ levels: [
+ {
+ audioGroups: ['1'],
+ },
+ ],
+ };
+ audioTrackController.tracks = tracks;
+ audioTrackController.onLevelLoading(Events.LEVEL_LOADING, {
+ level: 0,
+ });
+ });
+
+ it('should set next audio track with flushImmediate=false', function () {
+ const triggerSpy = sinon.spy(hls, 'trigger');
+ audioTrackController.nextAudioTrack = 1;
+
+ expect(audioTrackController.trackId).to.equal(1);
+ expect(triggerSpy).to.have.been.calledWith(
+ Events.AUDIO_TRACK_SWITCHING,
+ sinon.match({
+ id: 1,
+ flushImmediate: false,
+ }),
+ );
+ });
+
+ it('should set selectDefaultTrack to false when nextAudioTrack is set from API', function () {
+ (audioTrackController as any).selectDefaultTrack = true;
+ audioTrackController.nextAudioTrack = 1;
+ expect((audioTrackController as any).selectDefaultTrack).to.be.false;
+ });
+ });
+
+ describe('audioTrack with flushImmediate', function () {
+ beforeEach(function () {
+ hls.levelController = {
+ levels: [
+ {
+ audioGroups: ['1'],
+ },
+ ],
+ };
+ audioTrackController.tracks = tracks;
+ audioTrackController.onLevelLoading(Events.LEVEL_LOADING, {
+ level: 0,
+ });
+ });
+
+ it('should set audio track with flushImmediate=true by default', function () {
+ const triggerSpy = sinon.spy(hls, 'trigger');
+ audioTrackController.audioTrack = 1;
+
+ expect(audioTrackController.trackId).to.equal(1);
+ expect(triggerSpy).to.have.been.calledWith(
+ Events.AUDIO_TRACK_SWITCHING,
+ sinon.match({
+ id: 1,
+ flushImmediate: true,
+ }),
+ );
+ });
+ });
});
diff --git a/tests/unit/controller/base-stream-controller.ts b/tests/unit/controller/base-stream-controller.ts
index 538dd75b995..5dcae3f33c7 100644
--- a/tests/unit/controller/base-stream-controller.ts
+++ b/tests/unit/controller/base-stream-controller.ts
@@ -876,4 +876,456 @@ describe('BaseStreamController', function () {
});
});
});
+
+ describe('backtrackFragment and couldBacktrack properties', function () {
+ it('should return undefined for backtrackFragment by default', function () {
+ expect((baseStreamController as any).backtrackFragment).to.be.undefined;
+ });
+
+ it('should return false for couldBacktrack by default', function () {
+ expect((baseStreamController as any).couldBacktrack).to.be.false;
+ });
+ });
+
+ describe('calculateOptimalSwitchPoint', function () {
+ let mockLevel;
+ let mockBufferInfo;
+ let bandwidthStub;
+ let ttfbStub;
+
+ beforeEach(function () {
+ mockLevel = {
+ maxBitrate: 1000000,
+ audioCodec: 'mp4a.40.2',
+ details: null,
+ };
+ mockBufferInfo = {
+ len: 5,
+ end: 10,
+ start: 5,
+ };
+
+ bandwidthStub = sinon
+ .stub(baseStreamController.hls, 'bandwidthEstimate')
+ .get(() => 2000000);
+ ttfbStub = sinon
+ .stub(baseStreamController.hls, 'ttfbEstimate')
+ .get(() => 100);
+ baseStreamController.config.abrBandWidthUpFactor = 0.7;
+
+ (baseStreamController as any).levels = [mockLevel];
+ media.paused = false;
+ media.currentTime = 8;
+ });
+
+ afterEach(function () {
+ bandwidthStub?.restore();
+ ttfbStub?.restore();
+ });
+
+ it('should calculate fetchdelay when media is playing', function () {
+ const result = (baseStreamController as any).calculateOptimalSwitchPoint(
+ mockLevel,
+ mockBufferInfo,
+ );
+ expect(result.fetchdelay).to.be.greaterThan(0);
+ expect(result.okToFlushForwardBuffer).to.be.a('boolean');
+ });
+
+ it('should return fetchdelay=0 when media is paused', function () {
+ media.paused = true;
+ const result = (baseStreamController as any).calculateOptimalSwitchPoint(
+ mockLevel,
+ mockBufferInfo,
+ );
+ expect(result.fetchdelay).to.equal(0);
+ });
+
+ it('should add extra delay when level details are not available', function () {
+ const result1 = (baseStreamController as any).calculateOptimalSwitchPoint(
+ mockLevel,
+ mockBufferInfo,
+ );
+
+ mockLevel.details = {};
+ const result2 = (baseStreamController as any).calculateOptimalSwitchPoint(
+ mockLevel,
+ mockBufferInfo,
+ );
+
+ expect(result2.fetchdelay).to.be.lessThan(result1.fetchdelay);
+ });
+
+ it('should set okToFlushForwardBuffer to true for VOD with enough buffer', function () {
+ (baseStreamController as any).getLevelDetails = () => ({ live: false });
+ mockBufferInfo.end = 20;
+ media.currentTime = 5;
+
+ const result = (baseStreamController as any).calculateOptimalSwitchPoint(
+ mockLevel,
+ mockBufferInfo,
+ );
+
+ expect(result.okToFlushForwardBuffer).to.be.true;
+ });
+
+ it('should set okToFlushForwardBuffer to false for live with low buffer', function () {
+ (baseStreamController as any).getLevelDetails = () => ({ live: true });
+ mockBufferInfo.end = 10;
+ media.currentTime = 9.5;
+
+ const result = (baseStreamController as any).calculateOptimalSwitchPoint(
+ mockLevel,
+ mockBufferInfo,
+ );
+
+ expect(result.okToFlushForwardBuffer).to.be.false;
+ });
+ });
+
+ describe('getBufferedFrag', function () {
+ it('should call fragmentTracker.getBufferedFrag with correct parameters', function () {
+ const mockFrag = new Fragment(PlaylistLevelType.MAIN, 'test.ts');
+ fragmentTracker.getBufferedFrag = sinon.stub().returns(mockFrag);
+
+ const result = (baseStreamController as any).getBufferedFrag(10);
+
+ expect(fragmentTracker.getBufferedFrag).to.have.been.calledWith(
+ 10,
+ PlaylistLevelType.MAIN,
+ );
+ expect(result).to.equal(mockFrag);
+ });
+ });
+
+ describe('followingBufferedFrag', function () {
+ beforeEach(function () {
+ fragmentTracker.getBufferedFrag = sinon.stub();
+ });
+
+ it('should return next buffered fragment', function () {
+ const frag1 = new Fragment(PlaylistLevelType.MAIN, 'test1.ts');
+ frag1.setStart(0);
+ frag1.duration = 10;
+ const frag2 = new Fragment(PlaylistLevelType.MAIN, 'test2.ts');
+
+ fragmentTracker.getBufferedFrag.returns(frag2);
+
+ const result = (baseStreamController as any).followingBufferedFrag(frag1);
+
+ expect(fragmentTracker.getBufferedFrag).to.have.been.calledWith(
+ 10.5,
+ PlaylistLevelType.MAIN,
+ );
+ expect(result).to.equal(frag2);
+ });
+
+ it('should return null when frag is null', function () {
+ const result = (baseStreamController as any).followingBufferedFrag(null);
+ expect(result).to.be.null;
+ expect(fragmentTracker.getBufferedFrag).to.not.have.been.called;
+ });
+ });
+
+ describe('abortCurrentFrag', function () {
+ let mockFrag;
+
+ beforeEach(function () {
+ mockFrag = new Fragment(
+ PlaylistLevelType.MAIN,
+ 'test.ts',
+ ) as MediaFragment;
+ mockFrag.abortRequests = sinon.stub();
+ fragmentTracker.removeFragment = sinon.spy();
+ });
+
+ it('should abort current fragment and reset state to IDLE', function () {
+ baseStreamController.fragCurrent = mockFrag;
+ baseStreamController.state = State.FRAG_LOADING;
+
+ (baseStreamController as any).abortCurrentFrag();
+
+ expect(mockFrag.abortRequests).to.have.been.called;
+ expect(fragmentTracker.removeFragment).to.have.been.calledWith(mockFrag);
+ expect(baseStreamController.fragCurrent).to.be.null;
+ expect(baseStreamController.state).to.equal(State.IDLE);
+ });
+
+ it('should handle PARSING state', function () {
+ baseStreamController.fragCurrent = mockFrag;
+ baseStreamController.state = State.PARSING;
+
+ (baseStreamController as any).abortCurrentFrag();
+
+ expect(baseStreamController.state).to.equal(State.IDLE);
+ });
+
+ it('should not change state when already STOPPED', function () {
+ baseStreamController.fragCurrent = mockFrag;
+ baseStreamController.state = State.STOPPED;
+
+ (baseStreamController as any).abortCurrentFrag();
+
+ expect(baseStreamController.state).to.equal(State.STOPPED);
+ });
+
+ it('should update nextLoadPosition to current load position', function () {
+ baseStreamController.fragCurrent = mockFrag;
+ media.currentTime = 15;
+ baseStreamController.nextLoadPosition = 10;
+
+ (baseStreamController as any).abortCurrentFrag();
+
+ expect(baseStreamController.nextLoadPosition).to.equal(10);
+ });
+ });
+
+ describe('checkFragmentChanged', function () {
+ let mockFrag;
+
+ beforeEach(function () {
+ mockFrag = new Fragment(
+ PlaylistLevelType.MAIN,
+ 'test.ts',
+ ) as MediaFragment;
+ mockFrag.sn = 1;
+ mockFrag.level = 0;
+ mockFrag.setStart(5);
+ mockFrag.duration = 10;
+ fragmentTracker.getAppendedFrag = sinon.stub().returns(mockFrag);
+ media.readyState = 4;
+ media.seeking = false;
+ media.currentTime = 7;
+ media.buffered = new TimeRangesMock([5, 15]);
+ });
+
+ it('should return true when fragment changes', function () {
+ (baseStreamController as any).fragPlaying = null;
+
+ const result = (baseStreamController as any).checkFragmentChanged();
+
+ expect(result).to.be.true;
+ expect((baseStreamController as any).fragPlaying).to.equal(mockFrag);
+ });
+
+ it('should return false when fragment has not changed', function () {
+ (baseStreamController as any).fragPlaying = mockFrag;
+
+ const result = (baseStreamController as any).checkFragmentChanged();
+
+ expect(result).to.be.false;
+ });
+
+ it('should return false when media readyState is low', function () {
+ media.readyState = 1;
+
+ const result = (baseStreamController as any).checkFragmentChanged();
+
+ expect(result).to.be.false;
+ });
+
+ it('should clear backtrackFragment when fragment changes', function () {
+ (baseStreamController as any).backtrackFragment = new Fragment(
+ PlaylistLevelType.MAIN,
+ 'old.ts',
+ );
+ (baseStreamController as any).fragPlaying = null;
+
+ (baseStreamController as any).checkFragmentChanged();
+
+ expect((baseStreamController as any).backtrackFragment).to.be.undefined;
+ });
+ });
+
+ describe('cleanupBackBuffer', function () {
+ let mockFrag;
+
+ beforeEach(function () {
+ mockFrag = new Fragment(PlaylistLevelType.MAIN, 'test.ts');
+ mockFrag.setStart(10);
+ mockFrag.duration = 10;
+ fragmentTracker.getAppendedFrag = sinon.stub().returns(mockFrag);
+ media.currentTime = 15;
+ sinon.stub(baseStreamController as any, 'flushMainBuffer');
+ });
+
+ it('should flush back buffer', function () {
+ (baseStreamController as any).cleanupBackBuffer();
+
+ expect(
+ (baseStreamController as any).flushMainBuffer,
+ ).to.have.been.calledWith(0, sinon.match.number);
+ });
+
+ it('should not flush when fragment start is less than 1 second', function () {
+ mockFrag.setStart(0.5);
+
+ (baseStreamController as any).cleanupBackBuffer();
+
+ expect((baseStreamController as any).flushMainBuffer).to.not.have.been
+ .called;
+ });
+
+ it('should not flush when media is not available', function () {
+ baseStreamController.media = null;
+
+ (baseStreamController as any).cleanupBackBuffer();
+
+ expect((baseStreamController as any).flushMainBuffer).to.not.have.been
+ .called;
+ });
+ });
+
+ describe('scheduleTrackSwitch', function () {
+ let mockBufferInfo;
+ let mockBufferedFrag;
+ let mockNextFrag;
+
+ beforeEach(function () {
+ mockBufferInfo = {
+ len: 5,
+ end: 10,
+ start: 5,
+ };
+ mockBufferedFrag = new Fragment(PlaylistLevelType.MAIN, 'test1.ts');
+ mockBufferedFrag.setStart(5);
+ mockBufferedFrag.duration = 5;
+ mockNextFrag = new Fragment(PlaylistLevelType.MAIN, 'test2.ts');
+ mockNextFrag.setStart(10);
+ mockNextFrag.duration = 5;
+
+ fragmentTracker.getBufferedFrag = sinon.stub();
+ fragmentTracker.getBufferedFrag.onCall(0).returns(mockBufferedFrag);
+ fragmentTracker.getBufferedFrag.onCall(1).returns(mockNextFrag);
+
+ sinon.stub(baseStreamController as any, 'abortCurrentFrag');
+ sinon.stub(baseStreamController as any, 'flushMainBuffer');
+ sinon.stub(baseStreamController as any, 'cleanupBackBuffer');
+
+ media.currentTime = 8;
+ });
+
+ it('should schedule track switch when next fragment is available', function () {
+ (baseStreamController as any).scheduleTrackSwitch(
+ mockBufferInfo,
+ 2,
+ true,
+ );
+
+ expect((baseStreamController as any).abortCurrentFrag).to.have.been
+ .called;
+ expect((baseStreamController as any).flushMainBuffer).to.have.been.called;
+ expect((baseStreamController as any).cleanupBackBuffer).to.have.been
+ .called;
+ });
+
+ it('should not flush when okToFlushForwardBuffer is false', function () {
+ (baseStreamController as any).scheduleTrackSwitch(
+ mockBufferInfo,
+ 2,
+ false,
+ );
+
+ expect((baseStreamController as any).abortCurrentFrag).to.not.have.been
+ .called;
+ expect((baseStreamController as any).flushMainBuffer).to.not.have.been
+ .called;
+ });
+
+ it('should not flush when next fragment is not available', function () {
+ fragmentTracker.getBufferedFrag.onCall(1).returns(null);
+
+ (baseStreamController as any).scheduleTrackSwitch(
+ mockBufferInfo,
+ 2,
+ true,
+ );
+
+ expect((baseStreamController as any).flushMainBuffer).to.not.have.been
+ .called;
+ });
+
+ it('should not flush when media is not available', function () {
+ baseStreamController.media = null;
+
+ (baseStreamController as any).scheduleTrackSwitch(
+ mockBufferInfo,
+ 2,
+ true,
+ );
+
+ expect((baseStreamController as any).flushMainBuffer).to.not.have.been
+ .called;
+ });
+ });
+
+ describe('getBufferOutput', function () {
+ it('should return media from StreamController override', function () {
+ const result = (baseStreamController as any).getBufferOutput();
+ expect(result).to.not.be.null;
+ });
+ });
+
+ describe('nextLevelSwitch', function () {
+ let mockLevel;
+ let mockBufferInfo;
+
+ beforeEach(function () {
+ mockLevel = {
+ maxBitrate: 1000000,
+ audioCodec: 'mp4a.40.2',
+ details: null,
+ };
+ mockBufferInfo = {
+ len: 5,
+ end: 10,
+ start: 5,
+ };
+
+ (baseStreamController as any).levels = [mockLevel];
+ media.readyState = 4;
+ media.currentTime = 8;
+
+ sinon.stub(baseStreamController as any, 'getBufferOutput').returns(media);
+ sinon
+ .stub(baseStreamController as any, 'getFwdBufferInfo')
+ .returns(mockBufferInfo);
+ sinon
+ .stub(baseStreamController as any, 'calculateOptimalSwitchPoint')
+ .returns({
+ fetchdelay: 2,
+ okToFlushForwardBuffer: true,
+ });
+ sinon.stub(baseStreamController as any, 'scheduleTrackSwitch');
+ sinon.stub(baseStreamController as any, 'tickImmediate');
+
+ baseStreamController.hls.nextLoadLevel = 0;
+ });
+
+ it('should call scheduleTrackSwitch for MAIN playlist type', function () {
+ (baseStreamController as any).nextLevelSwitch();
+
+ expect((baseStreamController as any).scheduleTrackSwitch).to.have.been
+ .called;
+ expect(baseStreamController.tickImmediate).to.have.been.called;
+ });
+
+ it('should not call scheduleTrackSwitch when media is not ready', function () {
+ media.readyState = 0;
+
+ (baseStreamController as any).nextLevelSwitch();
+
+ expect((baseStreamController as any).scheduleTrackSwitch).to.not.have.been
+ .called;
+ });
+
+ it('should not call scheduleTrackSwitch when bufferInfo is null', function () {
+ (baseStreamController as any).getFwdBufferInfo.returns(null);
+
+ (baseStreamController as any).nextLevelSwitch();
+
+ expect((baseStreamController as any).scheduleTrackSwitch).to.not.have.been
+ .called;
+ });
+ });
});
diff --git a/tests/unit/controller/stream-controller.ts b/tests/unit/controller/stream-controller.ts
index 714cfdebdce..410b50f47a0 100644
--- a/tests/unit/controller/stream-controller.ts
+++ b/tests/unit/controller/stream-controller.ts
@@ -15,6 +15,7 @@ import { Level } from '../../../src/types/level';
import { PlaylistLevelType } from '../../../src/types/loader';
import { AttrList } from '../../../src/utils/attr-list';
import { mockFragments as mockFragmentArray } from '../../mocks/data';
+import { TimeRangesMock } from '../../mocks/time-ranges.mock';
import type { FragmentTracker } from '../../../src/controller/fragment-tracker';
import type StreamController from '../../../src/controller/stream-controller';
import type { MediaFragment } from '../../../src/loader/fragment';
@@ -654,4 +655,134 @@ describe('StreamController', function () {
});
});
});
+
+ describe('getBufferOutput', function () {
+ let media: any;
+ let mediaBuffer: any;
+
+ beforeEach(function () {
+ media = { buffered: new TimeRangesMock() };
+ mediaBuffer = { buffered: new TimeRangesMock() };
+ streamController['media'] = media;
+ });
+
+ it('should return media when altAudio is DISABLED', function () {
+ streamController['altAudio'] = 0; // AlternateAudio.DISABLED
+ streamController['mediaBuffer'] = mediaBuffer;
+ expect(streamController['getBufferOutput']()).to.equal(media);
+ });
+
+ it('should return media when altAudio is SWITCHING', function () {
+ streamController['altAudio'] = 1; // AlternateAudio.SWITCHING
+ streamController['mediaBuffer'] = mediaBuffer;
+ expect(streamController['getBufferOutput']()).to.equal(media);
+ });
+
+ it('should return mediaBuffer when altAudio is SWITCHED', function () {
+ streamController['altAudio'] = 2; // AlternateAudio.SWITCHED
+ streamController['mediaBuffer'] = mediaBuffer;
+ expect(streamController['getBufferOutput']()).to.equal(mediaBuffer);
+ });
+
+ it('should return media when mediaBuffer is null even if altAudio is SWITCHED', function () {
+ streamController['altAudio'] = 2; // AlternateAudio.SWITCHED
+ streamController['mediaBuffer'] = null;
+ expect(streamController['getBufferOutput']()).to.equal(media);
+ });
+ });
+
+ describe('checkFragmentChanged override', function () {
+ let media: any;
+ let mockFrag: Fragment;
+
+ beforeEach(function () {
+ mockFrag = new Fragment(PlaylistLevelType.MAIN, 'test.ts');
+ mockFrag.sn = 1;
+ mockFrag.level = 0;
+ mockFrag.setStart(5);
+ mockFrag.duration = 10;
+
+ media = {
+ readyState: 4,
+ seeking: false,
+ currentTime: 7,
+ buffered: new TimeRangesMock([5, 15]),
+ };
+ streamController['media'] = media;
+ streamController['fragmentTracker'].getAppendedFrag = sinon
+ .stub()
+ .returns(mockFrag);
+ });
+
+ it('should trigger FRAG_CHANGED event when fragment changes', function () {
+ const triggerSpy = sinon.spy(hls, 'trigger');
+ const oldFrag = new Fragment(PlaylistLevelType.MAIN, 'old.ts');
+ oldFrag.sn = 0;
+ oldFrag.level = 0;
+ streamController['fragPlaying'] = oldFrag;
+
+ const result = streamController['checkFragmentChanged']();
+
+ expect(result).to.be.true;
+ const calls = triggerSpy.getCalls();
+ let fragChangedCall;
+ for (let i = 0; i < calls.length; i++) {
+ if (calls[i].args[0] === Events.FRAG_CHANGED) {
+ fragChangedCall = calls[i];
+ break;
+ }
+ }
+ expect(fragChangedCall).to.exist;
+ expect(fragChangedCall?.args[1]).to.have.property('frag', mockFrag);
+ });
+
+ it('should trigger LEVEL_SWITCHED event when level changes', function () {
+ const triggerSpy = sinon.spy(hls, 'trigger');
+ const oldFrag = new Fragment(PlaylistLevelType.MAIN, 'old.ts');
+ oldFrag.sn = 0;
+ oldFrag.level = 1;
+
+ streamController['fragPlaying'] = oldFrag;
+
+ streamController['checkFragmentChanged']();
+
+ const calls = triggerSpy.getCalls();
+ let levelSwitchedCall;
+ for (let i = 0; i < calls.length; i++) {
+ if (calls[i].args[0] === Events.LEVEL_SWITCHED) {
+ levelSwitchedCall = calls[i];
+ break;
+ }
+ }
+ expect(levelSwitchedCall).to.exist;
+ expect(levelSwitchedCall?.args[1]).to.have.property('level', 0);
+ });
+
+ it('should return false when fragment has not changed', function () {
+ streamController['fragPlaying'] = mockFrag;
+
+ const result = streamController['checkFragmentChanged']();
+
+ expect(result).to.be.false;
+ });
+ });
+
+ describe('abortCurrentFrag override', function () {
+ it('should clear backtrackFragment and call super', function () {
+ const mockFrag = new Fragment(PlaylistLevelType.MAIN, 'test.ts');
+ mockFrag.abortRequests = sinon.stub();
+ streamController['fragCurrent'] = mockFrag;
+ streamController['backtrackFragment'] = new Fragment(
+ PlaylistLevelType.MAIN,
+ 'backtrack.ts',
+ );
+ streamController['state'] = State.FRAG_LOADING;
+
+ streamController['abortCurrentFrag']();
+
+ expect(streamController['backtrackFragment']).to.be.undefined;
+ expect(streamController['fragCurrent']).to.be.null;
+ expect(streamController['state']).to.equal(State.IDLE);
+ });
+ });
});
diff --git a/tests/unit/hls.ts b/tests/unit/hls.ts
index cf9779e5091..aa36faef13f 100644
--- a/tests/unit/hls.ts
+++ b/tests/unit/hls.ts
@@ -158,4 +158,44 @@ describe('Hls', function () {
expect(() => JSON.stringify(hls)).to.not.throw();
});
});
+
+ describe('nextAudioTrack', function () {
+ it('should return -1 when audioStreamController is not available', function () {
+ const hls = new Hls();
+ (hls as any).audioStreamController = null;
+ expect(hls.nextAudioTrack).to.equal(-1);
+ hls.destroy();
+ });
+
+ it('should not crash when audioTrackController is not available', function () {
+ const hls = new Hls();
+ (hls as any).audioTrackController = null;
+
+ expect(() => {
+ hls.nextAudioTrack = 2;
+ }).to.not.throw();
+
+ hls.destroy();
+ });
+
+ it('should set nextAudioTrack on audioTrackController', function () {
+ const hls = new Hls();
+ const mockAudioTrackController = {
+ nextAudioTrack: 0,
+ };
+ (hls as any).audioTrackController = mockAudioTrackController;
+
+ hls.nextAudioTrack = 2;
+
+ expect(mockAudioTrackController.nextAudioTrack).to.equal(2);
+ hls.destroy();
+ });
+
+ it('should return -1 when audioStreamController is undefined', function () {
+ const hls = new Hls();
+ (hls as any).audioStreamController = undefined;
+ expect(hls.nextAudioTrack).to.equal(-1);
+ hls.destroy();
+ });
+ });
});
diff --git a/tests/unit/utils/mediacapabilities-helper.ts b/tests/unit/utils/mediacapabilities-helper.ts
index c7518b24201..1dc94ef0172 100644
--- a/tests/unit/utils/mediacapabilities-helper.ts
+++ b/tests/unit/utils/mediacapabilities-helper.ts
@@ -1,7 +1,10 @@
import { expect } from 'chai';
import { Level } from '../../../src/types/level';
import { AttrList } from '../../../src/utils/attr-list';
-import { getMediaDecodingInfoPromise } from '../../../src/utils/mediacapabilities-helper';
+import {
+ estimatedAudioBitrate,
+ getMediaDecodingInfoPromise,
+} from '../../../src/utils/mediacapabilities-helper';
import type {
MediaAttributes,
MediaPlaylist,
@@ -144,3 +147,55 @@ describe('getMediaDecodingInfoPromise', function () {
});
});
});
+
+describe('estimatedAudioBitrate', function () {
+ it('should return min of levelBitrate/2 and 128000 when audioCodec is undefined', function () {
+ const result = estimatedAudioBitrate(undefined, 1000000);
+ expect(result).to.equal(128000);
+ });
+
+ it('should return min of levelBitrate/2 and 128000 when audioCodec is empty string', function () {
+ const result = estimatedAudioBitrate('', 1000000);
+ expect(result).to.equal(128000); // min(500000, 128000)
+ });
+
+ it('should estimate AAC bitrate as min of levelBitrate/2 and 128000', function () {
+ const result = estimatedAudioBitrate('mp4a.40.2', 1000000);
+ expect(result).to.equal(128000); // min(500000, 128000)
+ });
+
+ it('should estimate HE-AAC bitrate as min of levelBitrate/2 and 128000', function () {
+ const result = estimatedAudioBitrate('mp4a.40.5', 1000000);
+ expect(result).to.equal(128000); // min(500000, 128000)
+ });
+
+ it('should estimate AC-3 bitrate as min of levelBitrate/2 and 640000', function () {
+ const result = estimatedAudioBitrate('ac-3', 1000000);
+ expect(result).to.equal(500000); // min(500000, 640000)
+ });
+
+ it('should estimate E-AC-3 bitrate as min of levelBitrate/2 and 768000', function () {
+ const result = estimatedAudioBitrate('ec-3', 1000000);
+ expect(result).to.equal(500000); // min(500000, 768000)
+ });
+
+ it('should handle very low bitrates', function () {
+ const result = estimatedAudioBitrate('mp4a.40.2', 1);
+ expect(result).to.equal(1);
+ });
+
+ it('should handle very high bitrates with default codec', function () {
+ const result = estimatedAudioBitrate('mp4a.40.2', 10000000);
+ expect(result).to.equal(128000); // min(5000000, 128000)
+ });
+
+ it('should handle very high bitrates with ec-3 codec', function () {
+ const result = estimatedAudioBitrate('ec-3', 10000000);
+ expect(result).to.equal(768000); // min(5000000, 768000)
+ });
+
+ it('should use default estimate (128000) for unknown codecs', function () {
+ const result = estimatedAudioBitrate('unknown-codec', 1000000);
+ expect(result).to.equal(128000); // min(500000, 128000)
+ });
+});
|