Skip to content

Commit

Permalink
Refactor fetch loader
Browse files Browse the repository at this point in the history
  • Loading branch information
dsilhavy committed Nov 29, 2024
1 parent eb76383 commit 3a6d351
Showing 1 changed file with 39 additions and 31 deletions.
70 changes: 39 additions & 31 deletions src/streaming/net/FetchLoader.js
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,8 @@ function FetchLoader() {
function _getDownloadValuesMoofParsing() {
const calculatedThroughput = _calculateThroughputByMoofMdatTimes(moofStartTimeData, mdatEndTimeData);

console.log(`---dsi: ${calculatedThroughput}`);

if (calculatedThroughput) {
return totalBytesReceived * 8 / calculatedThroughput;
}
Expand Down Expand Up @@ -199,19 +201,17 @@ function FetchLoader() {

function _handleTopIsoBoxCompleted(boxesInfo) {
const endPositionOfLastTargetBox = boxesInfo.startOffsetOfLastFoundTargetBox + boxesInfo.sizeOfLastFoundTargetBox;
const data = _getDataForMediaSourceBufferAndAdjustReceivedData(endPositionOfLastTargetBox);

// Store the end time of each chunk download with its size in array EndTimeData
if (calculationMode === Constants.LOW_LATENCY_DOWNLOAD_TIME_CALCULATION_MODE.MOOF_PARSING && !lastChunkWasFinished) {
lastChunkWasFinished = true;
mdatEndTimeData.push({
timestamp: _getCurrentTimestamp(),
bytes: receivedData.length
bytes: data.length
});
}

const data = _getDataForMediaSourceBufferAndAdjustReceivedData(endPositionOfLastTargetBox);
console.log(`data length: ${data.length}`);

// Announce progress but don't track traces. Throughput measures are quite unstable
// when they are based in small amount of data
commonMediaRequest.customData.onprogress({
Expand Down Expand Up @@ -431,39 +431,43 @@ function FetchLoader() {
logger.warn(`[FetchLoader] Moof and Mdat data arrays have different lengths. Moof: ${filteredMoofStartTimeData.length}, Mdat: ${filteredMdatEndTimeData.length}`);
}

let chunkThroughputs = [];
// Compute the average throughput of the filtered chunk data
if (filteredMoofStartTimeData.length > 1) {
let shortDurationBytesReceived = 0;
let shortDurationStartTime = 0;
for (let i = 0; i < filteredMoofStartTimeData.length; i++) {
if (filteredMoofStartTimeData[i] && filteredMdatEndTimeData[i]) {
let chunkDownloadTime = filteredMdatEndTimeData[i].timestamp - filteredMoofStartTimeData[i].timestamp;
if (chunkDownloadTime > 1) {
chunkThroughputs.push((8 * filteredMdatEndTimeData[i].bytes) / chunkDownloadTime);
if (filteredMoofStartTimeData.length <= 1) {
return null;
}

let chunkThroughputValues = [];
let shortDurationBytesReceived = 0;
let shortDurationStartTime = 0;

for (let i = 0; i < filteredMoofStartTimeData.length; i++) {
if (filteredMoofStartTimeData[i] && filteredMdatEndTimeData[i]) {
let chunkDownloadTime = filteredMdatEndTimeData[i].timestamp - filteredMoofStartTimeData[i].timestamp;
if (chunkDownloadTime > 1) {
const throughput = _getThroughputInBitPerMs(filteredMdatEndTimeData[i].bytes, chunkDownloadTime);
chunkThroughputValues.push(throughput);
shortDurationStartTime = 0;
} else {
if (shortDurationStartTime === 0) {
shortDurationStartTime = filteredMoofStartTimeData[i].timestamp;
shortDurationBytesReceived = 0;
}
let cumulatedChunkDownloadTime = filteredMdatEndTimeData[i].timestamp - shortDurationStartTime;
if (cumulatedChunkDownloadTime > 1) {
shortDurationBytesReceived += filteredMdatEndTimeData[i].bytes;
const throughput = _getThroughputInBitPerMs(shortDurationBytesReceived, cumulatedChunkDownloadTime);
chunkThroughputValues.push(throughput);
shortDurationStartTime = 0;
} else {
if (shortDurationStartTime === 0) {
shortDurationStartTime = filteredMoofStartTimeData[i].timestamp;
shortDurationBytesReceived = 0;
}
let cumulatedChunkDownloadTime = filteredMdatEndTimeData[i].timestamp - shortDurationStartTime;
if (cumulatedChunkDownloadTime > 1) {
shortDurationBytesReceived += filteredMdatEndTimeData[i].bytes;
chunkThroughputs.push((8 * shortDurationBytesReceived) / cumulatedChunkDownloadTime);
shortDurationStartTime = 0;
} else {
// continue cumulating short duration data
shortDurationBytesReceived += filteredMdatEndTimeData[i].bytes;
}
// continue cumulating short duration data
shortDurationBytesReceived += filteredMdatEndTimeData[i].bytes;
}
}
}
}

if (chunkThroughputs.length > 0) {
const sumOfChunkThroughputs = chunkThroughputs.reduce((a, b) => a + b, 0);
return sumOfChunkThroughputs / chunkThroughputs.length;
}
if (chunkThroughputValues.length > 0) {
const sumOfChunkThroughputValues = chunkThroughputValues.reduce((a, b) => a + b, 0);
return sumOfChunkThroughputValues / chunkThroughputValues.length;
}

return null;
Expand All @@ -472,6 +476,10 @@ function FetchLoader() {
}
}

function _getThroughputInBitPerMs(bytes, timeInMs) {
return (8 * bytes) / timeInMs
}

setup();

instance = {
Expand Down

0 comments on commit 3a6d351

Please sign in to comment.