Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 9 additions & 2 deletions lib/media/media_source_engine.js
Original file line number Diff line number Diff line change
Expand Up @@ -1254,8 +1254,6 @@ shaka.media.MediaSourceEngine = class {
if (closedCaptions.length) {
this.textEngine_.storeAndAppendClosedCaptions(
closedCaptions,
reference.startTime,
reference.endTime,
timestampOffset);
}
}
Expand Down Expand Up @@ -1382,6 +1380,10 @@ shaka.media.MediaSourceEngine = class {
const ContentType = shaka.util.ManifestParserUtils.ContentType;
if (contentType == ContentType.VIDEO && this.captionParser_) {
this.captionParser_.remove(continuityTimelines);
// Get actual TextEngine buffer start, as it's not the same as video
// buffer and TextEngine does not support multiple buffered ranges.
const textStart = this.textEngine_.bufferStart() || 0;
this.textEngine_.remove(textStart, endTime);
}
if (contentType == ContentType.TEXT) {
await this.textEngine_.remove(startTime, endTime);
Expand Down Expand Up @@ -1413,6 +1415,11 @@ shaka.media.MediaSourceEngine = class {
}
await this.textEngine_.remove(0, Infinity);
} else {
// if we have CEA captions, we should clear those too.
if (contentType === ContentType.VIDEO && this.captionParser_ &&
this.textEngine_) {
await this.textEngine_.remove(0, Infinity);
}
// Note that not all platforms allow clearing to Infinity.
await this.enqueueOperation_(
contentType,
Expand Down
81 changes: 52 additions & 29 deletions lib/text/text_engine.js
Original file line number Diff line number Diff line change
Expand Up @@ -61,9 +61,7 @@ shaka.text.TextEngine = class {
* when we start displaying captions or switch caption tracks, we need to be
* able to get the cues for the other language and display them without
* re-fetching the video segments they were embedded in.
* Structure of closed caption map:
* closed caption id -> {start and end time -> cues}
* @private {!Map<string, !Map<string, !Array<shaka.text.Cue>>>}
* @private {!Map<string, !Array<shaka.text.Cue>>}
*/
this.closedCaptionsMap_ = new Map();
}
Expand Down Expand Up @@ -249,7 +247,10 @@ shaka.text.TextEngine = class {
async remove(startTime, endTime) {
// Start the operation asynchronously to avoid blocking the caller.
await Promise.resolve();

if (startTime >= endTime) {
return;
}
this.removeClosedCaptions_(startTime, endTime);
if (this.displayer_ && this.displayer_.remove(startTime, endTime)) {
if (this.bufferStart_ == null) {
goog.asserts.assert(
Expand Down Expand Up @@ -279,6 +280,8 @@ shaka.text.TextEngine = class {
goog.asserts.assert(
false, 'removal from the middle is not supported by TextEngine');
}

this.updateRangesWithClosedCaptions_();
}
}
}
Expand Down Expand Up @@ -353,15 +356,11 @@ shaka.text.TextEngine = class {
setSelectedClosedCaptionId(id, bufferEndTime) {
this.selectedClosedCaptionId_ = id;

const captionsMap = this.closedCaptionsMap_.get(id);
if (captionsMap) {
for (const startAndEndTime of captionsMap.keys()) {
/** @type {Array<!shaka.text.Cue>} */
const cues = captionsMap.get(startAndEndTime)
.filter((c) => c.endTime <= bufferEndTime);
if (cues) {
this.displayer_.append(cues);
}
const captions = this.closedCaptionsMap_.get(id);
if (captions) {
const cues = captions.filter((c) => c.endTime <= bufferEndTime);
if (cues.length) {
this.displayer_.append(cues);
}
}
}
Expand All @@ -384,25 +383,18 @@ shaka.text.TextEngine = class {
* text displayer. This is a side-channel used for embedded text only.
*
* @param {!Array<!shaka.extern.ICaptionDecoder.ClosedCaption>} closedCaptions
* @param {?number} startTime relative to the start of the presentation
* @param {?number} endTime relative to the start of the presentation
* @param {number} videoTimestampOffset the timestamp offset of the video
* stream in which these captions were embedded
*/
storeAndAppendClosedCaptions(
closedCaptions, startTime, endTime, videoTimestampOffset) {
const startAndEndTime = startTime + ' ' + endTime;
/** @type {!Map<string, !Map<string, !Array<!shaka.text.Cue>>>} */
storeAndAppendClosedCaptions(closedCaptions, videoTimestampOffset) {
/** @type {!Map<string, !Array<!shaka.text.Cue>>} */
const captionsMap = new Map();

for (const caption of closedCaptions) {
const id = caption.stream;
const cue = caption.cue;
if (!captionsMap.has(id)) {
captionsMap.set(id, new Map());
}
if (!captionsMap.get(id).has(startAndEndTime)) {
captionsMap.get(id).set(startAndEndTime, []);
captionsMap.set(id, []);
}

// Adjust CEA captions with respect to the timestamp offset of the video
Expand All @@ -416,22 +408,53 @@ shaka.text.TextEngine = class {
continue;
}

captionsMap.get(id).get(startAndEndTime).push(cue);
captionsMap.get(id).push(cue);
if (id == this.selectedClosedCaptionId_) {
this.displayer_.append([cue]);
}
}

for (const id of captionsMap.keys()) {
if (!this.closedCaptionsMap_.has(id)) {
this.closedCaptionsMap_.set(id, new Map());
this.closedCaptionsMap_.set(id, []);
}
for (const startAndEndTime of captionsMap.get(id).keys()) {
const cues = captionsMap.get(id).get(startAndEndTime);
this.closedCaptionsMap_.get(id).set(startAndEndTime, cues);
for (const cue of captionsMap.get(id)) {
this.closedCaptionsMap_.get(id).push(cue);
}
}

this.updateRangesWithClosedCaptions_();
}

/**
* @param {number} startTime
* @param {number} endTime
* @private
*/
removeClosedCaptions_(startTime, endTime) {
for (const id of this.closedCaptionsMap_.keys()) {
let captions = this.closedCaptionsMap_.get(id);
captions = captions.filter(
(cue) => cue.startTime < startTime || cue.endTime >= endTime);
this.closedCaptionsMap_.set(id, captions);
}
}

/**
* @private
*/
updateRangesWithClosedCaptions_() {
let startTime = Infinity;
let endTime = -Infinity;
for (const captions of this.closedCaptionsMap_.values()) {
for (const cue of captions) {
startTime = Math.min(startTime, cue.startTime);
endTime = Math.max(endTime, cue.endTime);
}
}
if (startTime === Infinity || endTime === -Infinity) {
return;
}
if (this.bufferStart_ == null) {
this.bufferStart_ = Math.max(startTime, this.appendWindowStart_);
} else {
Expand Down Expand Up @@ -465,7 +488,7 @@ shaka.text.TextEngine = class {
*/
getNumberOfClosedCaptionsInChannel(channelId) {
const channel = this.closedCaptionsMap_.get(channelId);
return channel ? channel.size : 0;
return channel ? channel.length : 0;
}
};

Expand Down
18 changes: 6 additions & 12 deletions test/text/text_engine_unit.js
Original file line number Diff line number Diff line change
Expand Up @@ -164,8 +164,7 @@ describe('TextEngine', () => {
};

textEngine.setSelectedClosedCaptionId('CC1', 0);
textEngine.storeAndAppendClosedCaptions(
[caption], /* startTime= */ 0, /* endTime= */ 2, /* offset= */ 0);
textEngine.storeAndAppendClosedCaptions([caption], /* offset= */ 0);
expect(mockDisplayer.appendSpy).toHaveBeenCalled();
});

Expand All @@ -179,8 +178,7 @@ describe('TextEngine', () => {
};

textEngine.setSelectedClosedCaptionId('CC3', 0);
textEngine.storeAndAppendClosedCaptions(
[caption], /* startTime= */ 0, /* endTime= */ 2, /* offset= */ 0);
textEngine.storeAndAppendClosedCaptions([caption], /* offset= */ 0);
expect(mockDisplayer.appendSpy).not.toHaveBeenCalled();
});

Expand All @@ -201,21 +199,18 @@ describe('TextEngine', () => {
textEngine.setSelectedClosedCaptionId('CC1', 0);
// Text Engine stores all the closed captions as a two layer map.
// {closed caption id -> {start and end time -> cues}}
textEngine.storeAndAppendClosedCaptions(
[caption0], /* startTime= */ 0, /* endTime= */ 1, /* offset= */ 0);
textEngine.storeAndAppendClosedCaptions([caption0], /* offset= */ 0);
expect(textEngine.getNumberOfClosedCaptionChannels()).toBe(1);
expect(textEngine.getNumberOfClosedCaptionsInChannel('CC1')).toBe(1);

textEngine.storeAndAppendClosedCaptions(
[caption1], /* startTime= */ 1, /* endTime= */ 2, /* offset= */ 0);
textEngine.storeAndAppendClosedCaptions([caption1], /* offset= */ 0);
// Caption1 has the same stream id with caption0, but different start and
// end time. The closed captions map should have 1 key CC1, and two values
// for two start and end times.
expect(textEngine.getNumberOfClosedCaptionChannels()).toBe(1);
expect(textEngine.getNumberOfClosedCaptionsInChannel('CC1')).toBe(2);

textEngine.storeAndAppendClosedCaptions(
[caption2], /* startTime= */ 1, /* endTime= */ 2, /* offset= */ 0);
textEngine.storeAndAppendClosedCaptions([caption2], /* offset= */ 0);
// Caption2 has a different stream id CC3, so the closed captions map
// should have two different keys, CC1 and CC3.
expect(textEngine.getNumberOfClosedCaptionChannels()).toBe(2);
Expand All @@ -231,8 +226,7 @@ describe('TextEngine', () => {
};

textEngine.setSelectedClosedCaptionId('CC1', 0);
textEngine.storeAndAppendClosedCaptions(
[caption], /* startTime= */ 0, /* endTime= */ 2, /* offset= */ 1000);
textEngine.storeAndAppendClosedCaptions([caption], /* offset= */ 1000);
expect(mockDisplayer.appendSpy).toHaveBeenCalledWith([
jasmine.objectContaining({
startTime: 1000,
Expand Down