Source: lib/util/stream_utils.js

  1. /*! @license
  2. * Shaka Player
  3. * Copyright 2016 Google LLC
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. goog.provide('shaka.util.StreamUtils');
  7. goog.require('goog.asserts');
  8. goog.require('shaka.config.AutoShowText');
  9. goog.require('shaka.lcevc.Dec');
  10. goog.require('shaka.log');
  11. goog.require('shaka.media.Capabilities');
  12. goog.require('shaka.text.TextEngine');
  13. goog.require('shaka.util.Functional');
  14. goog.require('shaka.util.LanguageUtils');
  15. goog.require('shaka.util.ManifestParserUtils');
  16. goog.require('shaka.util.MimeUtils');
  17. goog.require('shaka.util.MultiMap');
  18. goog.require('shaka.util.ObjectUtils');
  19. goog.require('shaka.util.Platform');
  20. goog.requireType('shaka.drm.DrmEngine');
  21. /**
  22. * @summary A set of utility functions for dealing with Streams and Manifests.
  23. * @export
  24. */
  25. shaka.util.StreamUtils = class {
  26. /**
  27. * In case of multiple usable codecs, choose one based on lowest average
  28. * bandwidth and filter out the rest.
  29. * Also filters out variants that have too many audio channels.
  30. * @param {!shaka.extern.Manifest} manifest
  31. * @param {!Array<string>} preferredVideoCodecs
  32. * @param {!Array<string>} preferredAudioCodecs
  33. * @param {!Array<string>} preferredDecodingAttributes
  34. * @param {!Array<string>} preferredTextFormats
  35. */
  36. static chooseCodecsAndFilterManifest(manifest, preferredVideoCodecs,
  37. preferredAudioCodecs, preferredDecodingAttributes, preferredTextFormats) {
  38. const StreamUtils = shaka.util.StreamUtils;
  39. const MimeUtils = shaka.util.MimeUtils;
  40. if (preferredTextFormats.length) {
  41. let subset = manifest.textStreams;
  42. for (const textFormat of preferredTextFormats) {
  43. const filtered = subset.filter((textStream) => {
  44. if (textStream.codecs.startsWith(textFormat) ||
  45. textStream.mimeType.startsWith(textFormat)) {
  46. return true;
  47. }
  48. return false;
  49. });
  50. if (filtered.length) {
  51. subset = filtered;
  52. break;
  53. }
  54. }
  55. manifest.textStreams = subset;
  56. }
  57. let variants = manifest.variants;
  58. // To start, choose the codecs based on configured preferences if available.
  59. if (preferredVideoCodecs.length || preferredAudioCodecs.length) {
  60. variants = StreamUtils.choosePreferredCodecs(variants,
  61. preferredVideoCodecs, preferredAudioCodecs);
  62. }
  63. if (preferredDecodingAttributes.length) {
  64. // group variants by resolution and choose preferred variants only
  65. /** @type {!shaka.util.MultiMap<shaka.extern.Variant>} */
  66. const variantsByResolutionMap = new shaka.util.MultiMap();
  67. for (const variant of variants) {
  68. variantsByResolutionMap
  69. .push(String(variant.video.width || 0), variant);
  70. }
  71. const bestVariants = [];
  72. variantsByResolutionMap.forEach((width, variantsByResolution) => {
  73. let highestMatch = 0;
  74. let matchingVariants = [];
  75. for (const variant of variantsByResolution) {
  76. const matchCount = preferredDecodingAttributes.filter(
  77. (attribute) => variant.decodingInfos[0][attribute],
  78. ).length;
  79. if (matchCount > highestMatch) {
  80. highestMatch = matchCount;
  81. matchingVariants = [variant];
  82. } else if (matchCount == highestMatch) {
  83. matchingVariants.push(variant);
  84. }
  85. }
  86. bestVariants.push(...matchingVariants);
  87. });
  88. variants = bestVariants;
  89. }
  90. const audioStreamsSet = new Set();
  91. const videoStreamsSet = new Set();
  92. for (const variant of variants) {
  93. if (variant.audio) {
  94. audioStreamsSet.add(variant.audio);
  95. }
  96. if (variant.video) {
  97. videoStreamsSet.add(variant.video);
  98. }
  99. }
  100. const audioStreams = Array.from(audioStreamsSet).sort((v1, v2) => {
  101. return v1.bandwidth - v2.bandwidth;
  102. });
  103. const validAudioIds = [];
  104. const validAudioStreamsMap = new Map();
  105. const getAudioId = (stream) => {
  106. let id = stream.language + (stream.channelsCount || 0) +
  107. (stream.audioSamplingRate || 0) + stream.roles.join(',') +
  108. stream.label + stream.groupId + stream.fastSwitching;
  109. if (stream.dependencyStream) {
  110. id += stream.dependencyStream.baseOriginalId || '';
  111. }
  112. return id;
  113. };
  114. for (const stream of audioStreams) {
  115. const groupId = getAudioId(stream);
  116. const validAudioStreams = validAudioStreamsMap.get(groupId) || [];
  117. if (!validAudioStreams.length) {
  118. validAudioStreams.push(stream);
  119. validAudioIds.push(stream.id);
  120. } else {
  121. const previousStream = validAudioStreams[validAudioStreams.length - 1];
  122. const previousCodec =
  123. MimeUtils.getNormalizedCodec(previousStream.codecs);
  124. const currentCodec =
  125. MimeUtils.getNormalizedCodec(stream.codecs);
  126. if (previousCodec == currentCodec) {
  127. if (!stream.bandwidth || !previousStream.bandwidth ||
  128. stream.bandwidth > previousStream.bandwidth) {
  129. validAudioStreams.push(stream);
  130. validAudioIds.push(stream.id);
  131. }
  132. }
  133. }
  134. validAudioStreamsMap.set(groupId, validAudioStreams);
  135. }
  136. // Keys based in MimeUtils.getNormalizedCodec. Lower is better
  137. const videoCodecPreference = {
  138. 'vp8': 1,
  139. 'avc': 1,
  140. 'dovi-avc': 0.95,
  141. 'vp9': 0.9,
  142. 'vp09': 0.9,
  143. 'hevc': 0.85,
  144. 'dovi-hevc': 0.8,
  145. 'dovi-p5': 0.75,
  146. 'av01': 0.7,
  147. 'dovi-av1': 0.65,
  148. 'vvc': 0.6,
  149. };
  150. const videoStreams = Array.from(videoStreamsSet)
  151. .sort((v1, v2) => {
  152. if (!v1.bandwidth || !v2.bandwidth || v1.bandwidth == v2.bandwidth) {
  153. if (v1.codecs && v2.codecs && v1.codecs != v2.codecs &&
  154. v1.width == v2.width) {
  155. const v1Codecs = MimeUtils.getNormalizedCodec(v1.codecs);
  156. const v2Codecs = MimeUtils.getNormalizedCodec(v2.codecs);
  157. if (v1Codecs != v2Codecs) {
  158. const indexV1 = videoCodecPreference[v1Codecs] || 1;
  159. const indexV2 = videoCodecPreference[v2Codecs] || 1;
  160. return indexV1 - indexV2;
  161. }
  162. }
  163. return v1.width - v2.width;
  164. }
  165. return v1.bandwidth - v2.bandwidth;
  166. });
  167. const isChangeTypeSupported =
  168. shaka.media.Capabilities.isChangeTypeSupported();
  169. const validVideoIds = [];
  170. const validVideoStreamsMap = new Map();
  171. const getVideoGroupId = (stream) => {
  172. let id = String(stream.width || '') + String(stream.height || '') +
  173. String(Math.round(stream.frameRate || 0)) + (stream.hdr || '') +
  174. stream.fastSwitching;
  175. if (stream.dependencyStream) {
  176. id += stream.dependencyStream.baseOriginalId || '';
  177. }
  178. return id;
  179. };
  180. for (const stream of videoStreams) {
  181. const groupId = getVideoGroupId(stream);
  182. const validVideoStreams = validVideoStreamsMap.get(groupId) || [];
  183. if (!validVideoStreams.length) {
  184. validVideoStreams.push(stream);
  185. validVideoIds.push(stream.id);
  186. } else {
  187. const previousStream = validVideoStreams[validVideoStreams.length - 1];
  188. if (!isChangeTypeSupported) {
  189. const previousCodec =
  190. MimeUtils.getNormalizedCodec(previousStream.codecs);
  191. const currentCodec =
  192. MimeUtils.getNormalizedCodec(stream.codecs);
  193. if (previousCodec !== currentCodec) {
  194. continue;
  195. }
  196. }
  197. const previousCodec =
  198. MimeUtils.getNormalizedCodec(previousStream.codecs);
  199. const currentCodec =
  200. MimeUtils.getNormalizedCodec(stream.codecs);
  201. if (previousCodec == currentCodec) {
  202. if (!stream.bandwidth || !previousStream.bandwidth ||
  203. stream.bandwidth > previousStream.bandwidth) {
  204. validVideoStreams.push(stream);
  205. validVideoIds.push(stream.id);
  206. }
  207. }
  208. }
  209. validVideoStreamsMap.set(groupId, validVideoStreams);
  210. }
  211. // Filter out any variants that don't match, forcing AbrManager to choose
  212. // from a single video codec and a single audio codec possible.
  213. manifest.variants = manifest.variants.filter((variant) => {
  214. const audio = variant.audio;
  215. const video = variant.video;
  216. if (audio) {
  217. if (!validAudioIds.includes(audio.id)) {
  218. shaka.log.debug('Dropping Variant (better codec available)', variant);
  219. return false;
  220. }
  221. }
  222. if (video) {
  223. if (!validVideoIds.includes(video.id)) {
  224. shaka.log.debug('Dropping Variant (better codec available)', variant);
  225. return false;
  226. }
  227. }
  228. return true;
  229. });
  230. }
  231. /**
  232. * Choose the codecs by configured preferred audio and video codecs.
  233. *
  234. * @param {!Array<shaka.extern.Variant>} variants
  235. * @param {!Array<string>} preferredVideoCodecs
  236. * @param {!Array<string>} preferredAudioCodecs
  237. * @return {!Array<shaka.extern.Variant>}
  238. */
  239. static choosePreferredCodecs(variants, preferredVideoCodecs,
  240. preferredAudioCodecs) {
  241. let subset = variants;
  242. for (const videoCodec of preferredVideoCodecs) {
  243. const filtered = subset.filter((variant) => {
  244. return variant.video && variant.video.codecs.startsWith(videoCodec);
  245. });
  246. if (filtered.length) {
  247. subset = filtered;
  248. break;
  249. }
  250. }
  251. for (const audioCodec of preferredAudioCodecs) {
  252. const filtered = subset.filter((variant) => {
  253. return variant.audio && variant.audio.codecs.startsWith(audioCodec);
  254. });
  255. if (filtered.length) {
  256. subset = filtered;
  257. break;
  258. }
  259. }
  260. return subset;
  261. }
  262. /**
  263. * Filter the variants in |manifest| to only include the variants that meet
  264. * the given restrictions.
  265. *
  266. * @param {!shaka.extern.Manifest} manifest
  267. * @param {shaka.extern.Restrictions} restrictions
  268. * @param {shaka.extern.Resolution} maxHwResolution
  269. */
  270. static filterByRestrictions(manifest, restrictions, maxHwResolution) {
  271. manifest.variants = manifest.variants.filter((variant) => {
  272. return shaka.util.StreamUtils.meetsRestrictions(
  273. variant, restrictions, maxHwResolution);
  274. });
  275. }
  276. /**
  277. * @param {shaka.extern.Variant} variant
  278. * @param {shaka.extern.Restrictions} restrictions
  279. * Configured restrictions from the user.
  280. * @param {shaka.extern.Resolution} maxHwRes
  281. * The maximum resolution the hardware can handle.
  282. * This is applied separately from user restrictions because the setting
  283. * should not be easily replaced by the user's configuration.
  284. * @return {boolean}
  285. * @export
  286. */
  287. static meetsRestrictions(variant, restrictions, maxHwRes) {
  288. /** @type {function(number, number, number):boolean} */
  289. const inRange = (x, min, max) => {
  290. return x >= min && x <= max;
  291. };
  292. const video = variant.video;
  293. // |video.width| and |video.height| can be undefined, which breaks
  294. // the math, so make sure they are there first.
  295. if (video && video.width && video.height) {
  296. let videoWidth = video.width;
  297. let videoHeight = video.height;
  298. if (videoHeight > videoWidth) {
  299. // Vertical video.
  300. [videoWidth, videoHeight] = [videoHeight, videoWidth];
  301. }
  302. if (!inRange(videoWidth,
  303. restrictions.minWidth,
  304. Math.min(restrictions.maxWidth, maxHwRes.width))) {
  305. return false;
  306. }
  307. if (!inRange(videoHeight,
  308. restrictions.minHeight,
  309. Math.min(restrictions.maxHeight, maxHwRes.height))) {
  310. return false;
  311. }
  312. if (!inRange(video.width * video.height,
  313. restrictions.minPixels,
  314. restrictions.maxPixels)) {
  315. return false;
  316. }
  317. }
  318. // |variant.video.frameRate| can be undefined, which breaks
  319. // the math, so make sure they are there first.
  320. if (variant && variant.video && variant.video.frameRate) {
  321. if (!inRange(variant.video.frameRate,
  322. restrictions.minFrameRate,
  323. restrictions.maxFrameRate)) {
  324. return false;
  325. }
  326. }
  327. // |variant.audio.channelsCount| can be undefined, which breaks
  328. // the math, so make sure they are there first.
  329. if (variant && variant.audio && variant.audio.channelsCount) {
  330. if (!inRange(variant.audio.channelsCount,
  331. restrictions.minChannelsCount,
  332. restrictions.maxChannelsCount)) {
  333. return false;
  334. }
  335. }
  336. if (!inRange(variant.bandwidth,
  337. restrictions.minBandwidth,
  338. restrictions.maxBandwidth)) {
  339. return false;
  340. }
  341. return true;
  342. }
  343. /**
  344. * @param {!Array<shaka.extern.Variant>} variants
  345. * @param {shaka.extern.Restrictions} restrictions
  346. * @param {shaka.extern.Resolution} maxHwRes
  347. * @return {boolean} Whether the tracks changed.
  348. */
  349. static applyRestrictions(variants, restrictions, maxHwRes) {
  350. let tracksChanged = false;
  351. for (const variant of variants) {
  352. const originalAllowed = variant.allowedByApplication;
  353. variant.allowedByApplication = shaka.util.StreamUtils.meetsRestrictions(
  354. variant, restrictions, maxHwRes);
  355. if (originalAllowed != variant.allowedByApplication) {
  356. tracksChanged = true;
  357. }
  358. }
  359. return tracksChanged;
  360. }
  361. /**
  362. * Alters the given Manifest to filter out any unplayable streams.
  363. *
  364. * @param {shaka.drm.DrmEngine} drmEngine
  365. * @param {shaka.extern.Manifest} manifest
  366. * @param {!Array<string>=} preferredKeySystems
  367. * @param {!Object<string, string>=} keySystemsMapping
  368. */
  369. static async filterManifest(drmEngine, manifest, preferredKeySystems = [],
  370. keySystemsMapping = {}) {
  371. await shaka.util.StreamUtils.filterManifestByMediaCapabilities(
  372. drmEngine, manifest, manifest.offlineSessionIds.length > 0,
  373. preferredKeySystems, keySystemsMapping);
  374. shaka.util.StreamUtils.filterTextStreams_(manifest);
  375. await shaka.util.StreamUtils.filterImageStreams_(manifest);
  376. }
  377. /**
  378. * Alters the given Manifest to filter out any streams unsupported by the
  379. * platform via MediaCapabilities.decodingInfo() API.
  380. *
  381. * @param {shaka.drm.DrmEngine} drmEngine
  382. * @param {shaka.extern.Manifest} manifest
  383. * @param {boolean} usePersistentLicenses
  384. * @param {!Array<string>} preferredKeySystems
  385. * @param {!Object<string, string>} keySystemsMapping
  386. */
  387. static async filterManifestByMediaCapabilities(
  388. drmEngine, manifest, usePersistentLicenses, preferredKeySystems,
  389. keySystemsMapping) {
  390. goog.asserts.assert(navigator.mediaCapabilities,
  391. 'MediaCapabilities should be valid.');
  392. if (shaka.util.Platform.isXboxOne()) {
  393. shaka.util.StreamUtils.overrideDolbyVisionCodecs(manifest.variants);
  394. }
  395. await shaka.util.StreamUtils.getDecodingInfosForVariants(
  396. manifest.variants, usePersistentLicenses, /* srcEquals= */ false,
  397. preferredKeySystems);
  398. let keySystem = null;
  399. if (drmEngine) {
  400. const drmInfo = drmEngine.getDrmInfo();
  401. if (drmInfo) {
  402. keySystem = drmInfo.keySystem;
  403. }
  404. }
  405. const StreamUtils = shaka.util.StreamUtils;
  406. manifest.variants = manifest.variants.filter((variant) => {
  407. const supported = StreamUtils.checkVariantSupported_(
  408. variant, keySystem, keySystemsMapping);
  409. // Filter out all unsupported variants.
  410. if (!supported) {
  411. shaka.log.debug('Dropping variant - not compatible with platform',
  412. StreamUtils.getVariantSummaryString_(variant));
  413. }
  414. return supported;
  415. });
  416. }
  417. /**
  418. * Maps Dolby Vision codecs to H.264 and H.265 equivalents as a workaround
  419. * to make Dolby Vision playback work on some platforms.
  420. *
  421. * Mapping is done according to the relevant Dolby documentation found here:
  422. * https://professionalsupport.dolby.com/s/article/How-to-signal-Dolby-Vision-in-MPEG-DASH?language=en_US
  423. * @param {!Array<!shaka.extern.Variant>} variants
  424. */
  425. static overrideDolbyVisionCodecs(variants) {
  426. /** @type {!Map<string, string>} */
  427. const codecMap = new Map()
  428. .set('dvav', 'avc3')
  429. .set('dva1', 'avc1')
  430. .set('dvhe', 'hev1')
  431. .set('dvh1', 'hvc1')
  432. .set('dvc1', 'vvc1')
  433. .set('dvi1', 'vvi1');
  434. /** @type {!Set<!shaka.extern.Stream>} */
  435. const streams = new Set();
  436. for (const variant of variants) {
  437. if (variant.video) {
  438. streams.add(variant.video);
  439. }
  440. }
  441. for (const video of streams) {
  442. for (const [dvCodec, replacement] of codecMap) {
  443. if (video.codecs.includes(dvCodec)) {
  444. video.codecs = video.codecs.replace(dvCodec, replacement);
  445. break;
  446. }
  447. }
  448. }
  449. }
  450. /**
  451. * @param {!shaka.extern.Variant} variant
  452. * @param {?string} keySystem
  453. * @param {!Object<string, string>} keySystemsMapping
  454. * @return {boolean}
  455. * @private
  456. */
  457. static checkVariantSupported_(variant, keySystem, keySystemsMapping) {
  458. const variantSupported = variant.decodingInfos.some((decodingInfo) => {
  459. if (!decodingInfo.supported) {
  460. return false;
  461. }
  462. if (keySystem) {
  463. const keySystemAccess = decodingInfo.keySystemAccess;
  464. if (keySystemAccess) {
  465. const currentKeySystem =
  466. keySystemsMapping[keySystemAccess.keySystem] ||
  467. keySystemAccess.keySystem;
  468. if (currentKeySystem != keySystem) {
  469. return false;
  470. }
  471. }
  472. }
  473. return true;
  474. });
  475. if (!variantSupported) {
  476. return false;
  477. }
  478. const isXboxOne = shaka.util.Platform.isXboxOne();
  479. const isFirefoxAndroid = shaka.util.Platform.isFirefox() &&
  480. shaka.util.Platform.isAndroid();
  481. // See: https://github.com/shaka-project/shaka-player/issues/3860
  482. const video = variant.video;
  483. const videoWidth = (video && video.width) || 0;
  484. const videoHeight = (video && video.height) || 0;
  485. // See: https://github.com/shaka-project/shaka-player/issues/3380
  486. // Note: it makes sense to drop early
  487. if (isXboxOne && video && (videoWidth > 1920 || videoHeight > 1080) &&
  488. (video.codecs.includes('avc1.') || video.codecs.includes('avc3.'))) {
  489. return false;
  490. }
  491. const videoDependencyStream = video && video.dependencyStream;
  492. if (videoDependencyStream &&
  493. !shaka.lcevc.Dec.isStreamSupported(videoDependencyStream)) {
  494. return false;
  495. }
  496. const audio = variant.audio;
  497. // See: https://github.com/shaka-project/shaka-player/issues/6111
  498. // It seems that Firefox Android reports that it supports
  499. // Opus + Widevine, but it is not actually supported.
  500. // It makes sense to drop early.
  501. if (isFirefoxAndroid && audio && audio.encrypted &&
  502. audio.codecs.toLowerCase().includes('opus')) {
  503. return false;
  504. }
  505. const audioDependencyStream = audio && audio.dependencyStream;
  506. if (audioDependencyStream) {
  507. return false;
  508. }
  509. return true;
  510. }
  511. /**
  512. * Queries mediaCapabilities for the decoding info for that decoding config,
  513. * and assigns it to the given variant.
  514. * If that query has been done before, instead return a cached result.
  515. * @param {!shaka.extern.Variant} variant
  516. * @param {!Array<!MediaDecodingConfiguration>} decodingConfigs
  517. * @private
  518. */
  519. static async getDecodingInfosForVariant_(variant, decodingConfigs) {
  520. /**
  521. * @param {?MediaCapabilitiesDecodingInfo} a
  522. * @param {!MediaCapabilitiesDecodingInfo} b
  523. * @return {!MediaCapabilitiesDecodingInfo}
  524. */
  525. const merge = (a, b) => {
  526. if (!a) {
  527. return b;
  528. } else {
  529. const res = shaka.util.ObjectUtils.shallowCloneObject(a);
  530. res.supported = a.supported && b.supported;
  531. res.powerEfficient = a.powerEfficient && b.powerEfficient;
  532. res.smooth = a.smooth && b.smooth;
  533. if (b.keySystemAccess && !res.keySystemAccess) {
  534. res.keySystemAccess = b.keySystemAccess;
  535. }
  536. return res;
  537. }
  538. };
  539. const StreamUtils = shaka.util.StreamUtils;
  540. /** @type {?MediaCapabilitiesDecodingInfo} */
  541. let finalResult = null;
  542. const promises = [];
  543. for (const decodingConfig of decodingConfigs) {
  544. const cacheKey =
  545. shaka.util.ObjectUtils.alphabeticalKeyOrderStringify(decodingConfig);
  546. const cache = StreamUtils.decodingConfigCache_;
  547. if (cache.has(cacheKey)) {
  548. shaka.log.v2('Using cached results of mediaCapabilities.decodingInfo',
  549. 'for key', cacheKey);
  550. finalResult = merge(finalResult, cache.get(cacheKey));
  551. } else {
  552. // Do a final pass-over of the decoding config: if a given stream has
  553. // multiple codecs, that suggests that it switches between those codecs
  554. // at points of the go-through.
  555. // mediaCapabilities by itself will report "not supported" when you
  556. // put in multiple different codecs, so each has to be checked
  557. // individually. So check each and take the worst result, to determine
  558. // overall variant compatibility.
  559. promises.push(StreamUtils
  560. .checkEachDecodingConfigCombination_(decodingConfig).then((res) => {
  561. /** @type {?MediaCapabilitiesDecodingInfo} */
  562. let acc = null;
  563. for (const result of (res || [])) {
  564. acc = merge(acc, result);
  565. }
  566. if (acc) {
  567. cache.set(cacheKey, acc);
  568. finalResult = merge(finalResult, acc);
  569. }
  570. }));
  571. }
  572. }
  573. await Promise.all(promises);
  574. if (finalResult) {
  575. variant.decodingInfos.push(finalResult);
  576. }
  577. }
  578. /**
  579. * @param {!MediaDecodingConfiguration} decodingConfig
  580. * @return {!Promise<?Array<!MediaCapabilitiesDecodingInfo>>}
  581. * @private
  582. */
  583. static checkEachDecodingConfigCombination_(decodingConfig) {
  584. let videoCodecs = [''];
  585. if (decodingConfig.video) {
  586. videoCodecs = shaka.util.MimeUtils.getCodecs(
  587. decodingConfig.video.contentType).split(',');
  588. }
  589. let audioCodecs = [''];
  590. if (decodingConfig.audio) {
  591. audioCodecs = shaka.util.MimeUtils.getCodecs(
  592. decodingConfig.audio.contentType).split(',');
  593. }
  594. const promises = [];
  595. for (const videoCodec of videoCodecs) {
  596. for (const audioCodec of audioCodecs) {
  597. const copy = shaka.util.ObjectUtils.cloneObject(decodingConfig);
  598. if (decodingConfig.video) {
  599. const mimeType = shaka.util.MimeUtils.getBasicType(
  600. copy.video.contentType);
  601. copy.video.contentType = shaka.util.MimeUtils.getFullType(
  602. mimeType, videoCodec);
  603. }
  604. if (decodingConfig.audio) {
  605. const mimeType = shaka.util.MimeUtils.getBasicType(
  606. copy.audio.contentType);
  607. copy.audio.contentType = shaka.util.MimeUtils.getFullType(
  608. mimeType, audioCodec);
  609. }
  610. promises.push(new Promise((resolve, reject) => {
  611. // On some (Android) WebView environments, decodingInfo will
  612. // not resolve or reject, at least if RESOURCE_PROTECTED_MEDIA_ID
  613. // is not set. This is a workaround for that issue.
  614. const TIMEOUT_FOR_DECODING_INFO_IN_SECONDS = 5;
  615. let promise;
  616. if (shaka.util.Platform.isAndroid()) {
  617. promise = shaka.util.Functional.promiseWithTimeout(
  618. TIMEOUT_FOR_DECODING_INFO_IN_SECONDS,
  619. navigator.mediaCapabilities.decodingInfo(copy),
  620. );
  621. } else {
  622. promise = navigator.mediaCapabilities.decodingInfo(copy);
  623. }
  624. promise.then((res) => {
  625. resolve(res);
  626. }).catch(reject);
  627. }));
  628. }
  629. }
  630. return Promise.all(promises).catch((e) => {
  631. shaka.log.info('MediaCapabilities.decodingInfo() failed.',
  632. JSON.stringify(decodingConfig), e);
  633. return null;
  634. });
  635. }
  636. /**
  637. * Get the decodingInfo results of the variants via MediaCapabilities.
  638. * This should be called after the DrmEngine is created and configured, and
  639. * before DrmEngine sets the mediaKeys.
  640. *
  641. * @param {!Array<shaka.extern.Variant>} variants
  642. * @param {boolean} usePersistentLicenses
  643. * @param {boolean} srcEquals
  644. * @param {!Array<string>} preferredKeySystems
  645. * @exportDoc
  646. */
  647. static async getDecodingInfosForVariants(variants, usePersistentLicenses,
  648. srcEquals, preferredKeySystems) {
  649. const gotDecodingInfo = variants.some((variant) =>
  650. variant.decodingInfos.length);
  651. if (gotDecodingInfo) {
  652. shaka.log.debug('Already got the variants\' decodingInfo.');
  653. return;
  654. }
  655. // Try to get preferred key systems first to avoid unneeded calls to CDM.
  656. for (const preferredKeySystem of preferredKeySystems) {
  657. let keySystemSatisfied = false;
  658. for (const variant of variants) {
  659. /** @type {!Array<!Array<!MediaDecodingConfiguration>>} */
  660. const decodingConfigs = shaka.util.StreamUtils.getDecodingConfigs_(
  661. variant, usePersistentLicenses, srcEquals)
  662. .filter((configs) => {
  663. // All configs in a batch will have the same keySystem.
  664. const config = configs[0];
  665. const keySystem = config.keySystemConfiguration &&
  666. config.keySystemConfiguration.keySystem;
  667. return keySystem === preferredKeySystem;
  668. });
  669. // The reason we are performing this await in a loop rather than
  670. // batching into a `promise.all` is performance related.
  671. // https://github.com/shaka-project/shaka-player/pull/4708#discussion_r1022581178
  672. for (const configs of decodingConfigs) {
  673. // eslint-disable-next-line no-await-in-loop
  674. await shaka.util.StreamUtils.getDecodingInfosForVariant_(
  675. variant, configs);
  676. }
  677. if (variant.decodingInfos.length) {
  678. keySystemSatisfied = true;
  679. }
  680. } // for (const variant of variants)
  681. if (keySystemSatisfied) {
  682. // Return if any preferred key system is already satisfied.
  683. return;
  684. }
  685. } // for (const preferredKeySystem of preferredKeySystems)
  686. for (const variant of variants) {
  687. /** @type {!Array<!Array<!MediaDecodingConfiguration>>} */
  688. const decodingConfigs = shaka.util.StreamUtils.getDecodingConfigs_(
  689. variant, usePersistentLicenses, srcEquals)
  690. .filter((configs) => {
  691. // All configs in a batch will have the same keySystem.
  692. const config = configs[0];
  693. const keySystem = config.keySystemConfiguration &&
  694. config.keySystemConfiguration.keySystem;
  695. // Avoid checking preferred systems twice.
  696. return !keySystem || !preferredKeySystems.includes(keySystem);
  697. });
  698. // The reason we are performing this await in a loop rather than
  699. // batching into a `promise.all` is performance related.
  700. // https://github.com/shaka-project/shaka-player/pull/4708#discussion_r1022581178
  701. for (const configs of decodingConfigs) {
  702. // eslint-disable-next-line no-await-in-loop
  703. await shaka.util.StreamUtils.getDecodingInfosForVariant_(
  704. variant, configs);
  705. }
  706. }
  707. }
  708. /**
  709. * Generate a batch of MediaDecodingConfiguration objects to get the
  710. * decodingInfo results for each variant.
  711. * Each batch shares the same DRM information, and represents the various
  712. * fullMimeType combinations of the streams.
  713. * @param {!shaka.extern.Variant} variant
  714. * @param {boolean} usePersistentLicenses
  715. * @param {boolean} srcEquals
  716. * @return {!Array<!Array<!MediaDecodingConfiguration>>}
  717. * @private
  718. */
  719. static getDecodingConfigs_(variant, usePersistentLicenses, srcEquals) {
  720. const audio = variant.audio;
  721. const video = variant.video;
  722. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  723. const ManifestParserUtils = shaka.util.ManifestParserUtils;
  724. const MimeUtils = shaka.util.MimeUtils;
  725. const StreamUtils = shaka.util.StreamUtils;
  726. const videoConfigs = [];
  727. const audioConfigs = [];
  728. if (video) {
  729. for (const fullMimeType of video.fullMimeTypes) {
  730. let videoCodecs = MimeUtils.getCodecs(fullMimeType);
  731. // For multiplexed streams with audio+video codecs, the config should
  732. // have AudioConfiguration and VideoConfiguration.
  733. // We ignore the multiplexed audio when there is normal audio also.
  734. if (videoCodecs.includes(',') && !audio) {
  735. const allCodecs = videoCodecs.split(',');
  736. const baseMimeType = MimeUtils.getBasicType(fullMimeType);
  737. videoCodecs = ManifestParserUtils.guessCodecs(
  738. ContentType.VIDEO, allCodecs);
  739. let audioCodecs = ManifestParserUtils.guessCodecs(
  740. ContentType.AUDIO, allCodecs);
  741. audioCodecs = StreamUtils.getCorrectAudioCodecs(
  742. audioCodecs, baseMimeType);
  743. const audioFullType = MimeUtils.getFullOrConvertedType(
  744. baseMimeType, audioCodecs, ContentType.AUDIO);
  745. audioConfigs.push({
  746. contentType: audioFullType,
  747. channels: 2,
  748. bitrate: variant.bandwidth || 1,
  749. samplerate: 1,
  750. spatialRendering: false,
  751. });
  752. }
  753. videoCodecs = StreamUtils.getCorrectVideoCodecs(videoCodecs);
  754. const fullType = MimeUtils.getFullOrConvertedType(
  755. MimeUtils.getBasicType(fullMimeType), videoCodecs,
  756. ContentType.VIDEO);
  757. // VideoConfiguration
  758. const videoConfig = {
  759. contentType: fullType,
  760. // NOTE: Some decoders strictly check the width and height fields and
  761. // won't decode smaller than 64x64. So if we don't have this info (as
  762. // is the case in some of our simpler tests), assume a 64x64
  763. // resolution to fill in this required field for MediaCapabilities.
  764. //
  765. // This became an issue specifically on Firefox on M1 Macs.
  766. width: video.width || 64,
  767. height: video.height || 64,
  768. bitrate: video.bandwidth || variant.bandwidth || 1,
  769. // framerate must be greater than 0, otherwise the config is invalid.
  770. framerate: video.frameRate || 1,
  771. };
  772. if (video.hdr) {
  773. switch (video.hdr) {
  774. case 'SDR':
  775. videoConfig.transferFunction = 'srgb';
  776. break;
  777. case 'PQ':
  778. videoConfig.transferFunction = 'pq';
  779. break;
  780. case 'HLG':
  781. videoConfig.transferFunction = 'hlg';
  782. break;
  783. }
  784. }
  785. if (video.colorGamut) {
  786. videoConfig.colorGamut = video.colorGamut;
  787. }
  788. videoConfigs.push(videoConfig);
  789. }
  790. }
  791. if (audio) {
  792. for (const fullMimeType of audio.fullMimeTypes) {
  793. const baseMimeType = MimeUtils.getBasicType(fullMimeType);
  794. const codecs = StreamUtils.getCorrectAudioCodecs(
  795. MimeUtils.getCodecs(fullMimeType), baseMimeType);
  796. const fullType = MimeUtils.getFullOrConvertedType(
  797. baseMimeType, codecs, ContentType.AUDIO);
  798. // AudioConfiguration
  799. audioConfigs.push({
  800. contentType: fullType,
  801. channels: audio.channelsCount || 2,
  802. bitrate: audio.bandwidth || variant.bandwidth || 1,
  803. samplerate: audio.audioSamplingRate || 1,
  804. spatialRendering: audio.spatialAudio,
  805. });
  806. }
  807. }
  808. // Generate each combination of video and audio config as a separate
  809. // MediaDecodingConfiguration, inside the main "batch".
  810. /** @type {!Array<!MediaDecodingConfiguration>} */
  811. const mediaDecodingConfigBatch = [];
  812. if (videoConfigs.length == 0) {
  813. videoConfigs.push(null);
  814. }
  815. if (audioConfigs.length == 0) {
  816. audioConfigs.push(null);
  817. }
  818. for (const videoConfig of videoConfigs) {
  819. for (const audioConfig of audioConfigs) {
  820. /** @type {!MediaDecodingConfiguration} */
  821. const mediaDecodingConfig = {
  822. type: srcEquals ? 'file' : 'media-source',
  823. };
  824. if (videoConfig) {
  825. mediaDecodingConfig.video = videoConfig;
  826. }
  827. if (audioConfig) {
  828. mediaDecodingConfig.audio = audioConfig;
  829. }
  830. mediaDecodingConfigBatch.push(mediaDecodingConfig);
  831. }
  832. }
  833. const videoDrmInfos = variant.video ? variant.video.drmInfos : [];
  834. const audioDrmInfos = variant.audio ? variant.audio.drmInfos : [];
  835. const allDrmInfos = videoDrmInfos.concat(audioDrmInfos);
  836. // Return a list containing the mediaDecodingConfig for unencrypted variant.
  837. if (!allDrmInfos.length) {
  838. return [mediaDecodingConfigBatch];
  839. }
  840. // A list of MediaDecodingConfiguration objects created for the variant.
  841. const configs = [];
  842. // Get all the drm info so that we can avoid using nested loops when we
  843. // just need the drm info.
  844. const drmInfoByKeySystems = new Map();
  845. for (const info of allDrmInfos) {
  846. if (!drmInfoByKeySystems.get(info.keySystem)) {
  847. drmInfoByKeySystems.set(info.keySystem, []);
  848. }
  849. drmInfoByKeySystems.get(info.keySystem).push(info);
  850. }
  851. const persistentState =
  852. usePersistentLicenses ? 'required' : 'optional';
  853. const sessionTypes =
  854. usePersistentLicenses ? ['persistent-license'] : ['temporary'];
  855. for (const keySystem of drmInfoByKeySystems.keys()) {
  856. const drmInfos = drmInfoByKeySystems.get(keySystem);
  857. // Get all the robustness info so that we can avoid using nested
  858. // loops when we just need the robustness.
  859. const drmInfosByRobustness = new Map();
  860. for (const info of drmInfos) {
  861. const keyName = `${info.videoRobustness},${info.audioRobustness}`;
  862. if (!drmInfosByRobustness.get(keyName)) {
  863. drmInfosByRobustness.set(keyName, []);
  864. }
  865. drmInfosByRobustness.get(keyName).push(info);
  866. }
  867. for (const drmInfosRobustness of drmInfosByRobustness.values()) {
  868. const modifiedMediaDecodingConfigBatch = [];
  869. for (const base of mediaDecodingConfigBatch) {
  870. // Create a copy of the mediaDecodingConfig.
  871. const config = /** @type {!MediaDecodingConfiguration} */
  872. (Object.assign({}, base));
  873. /** @type {!MediaCapabilitiesKeySystemConfiguration} */
  874. const keySystemConfig = {
  875. keySystem: keySystem,
  876. initDataType: 'cenc',
  877. persistentState: persistentState,
  878. distinctiveIdentifier: 'optional',
  879. sessionTypes: sessionTypes,
  880. };
  881. for (const info of drmInfosRobustness) {
  882. if (info.initData && info.initData.length) {
  883. const initDataTypes = new Set();
  884. for (const initData of info.initData) {
  885. initDataTypes.add(initData.initDataType);
  886. }
  887. if (initDataTypes.size > 1) {
  888. shaka.log.v2('DrmInfo contains more than one initDataType,',
  889. 'and we use the initDataType of the first initData.',
  890. info);
  891. }
  892. keySystemConfig.initDataType = info.initData[0].initDataType;
  893. }
  894. if (info.distinctiveIdentifierRequired) {
  895. keySystemConfig.distinctiveIdentifier = 'required';
  896. }
  897. if (info.persistentStateRequired) {
  898. keySystemConfig.persistentState = 'required';
  899. }
  900. if (info.sessionType) {
  901. keySystemConfig.sessionTypes = [info.sessionType];
  902. }
  903. if (audio) {
  904. if (!keySystemConfig.audio) {
  905. // KeySystemTrackConfiguration
  906. keySystemConfig.audio = {
  907. robustness: info.audioRobustness,
  908. };
  909. if (info.encryptionScheme) {
  910. keySystemConfig.audio.encryptionScheme =
  911. info.encryptionScheme;
  912. }
  913. } else {
  914. if (info.encryptionScheme) {
  915. keySystemConfig.audio.encryptionScheme =
  916. keySystemConfig.audio.encryptionScheme ||
  917. info.encryptionScheme;
  918. }
  919. keySystemConfig.audio.robustness =
  920. keySystemConfig.audio.robustness ||
  921. info.audioRobustness;
  922. }
  923. // See: https://github.com/shaka-project/shaka-player/issues/4659
  924. if (keySystemConfig.audio.robustness == '') {
  925. delete keySystemConfig.audio.robustness;
  926. }
  927. }
  928. if (video) {
  929. if (!keySystemConfig.video) {
  930. // KeySystemTrackConfiguration
  931. keySystemConfig.video = {
  932. robustness: info.videoRobustness,
  933. };
  934. if (info.encryptionScheme) {
  935. keySystemConfig.video.encryptionScheme =
  936. info.encryptionScheme;
  937. }
  938. } else {
  939. if (info.encryptionScheme) {
  940. keySystemConfig.video.encryptionScheme =
  941. keySystemConfig.video.encryptionScheme ||
  942. info.encryptionScheme;
  943. }
  944. keySystemConfig.video.robustness =
  945. keySystemConfig.video.robustness ||
  946. info.videoRobustness;
  947. }
  948. // See: https://github.com/shaka-project/shaka-player/issues/4659
  949. if (keySystemConfig.video.robustness == '') {
  950. delete keySystemConfig.video.robustness;
  951. }
  952. }
  953. }
  954. config.keySystemConfiguration = keySystemConfig;
  955. modifiedMediaDecodingConfigBatch.push(config);
  956. }
  957. configs.push(modifiedMediaDecodingConfigBatch);
  958. }
  959. }
  960. return configs;
  961. }
  962. /**
  963. * Generates the correct audio codec for MediaDecodingConfiguration and
  964. * for MediaSource.isTypeSupported.
  965. * @param {string} codecs
  966. * @param {string} mimeType
  967. * @return {string}
  968. */
  969. static getCorrectAudioCodecs(codecs, mimeType) {
  970. // According to RFC 6381 section 3.3, 'fLaC' is actually the correct
  971. // codec string. We still need to map it to 'flac', as some browsers
  972. // currently don't support 'fLaC', while 'flac' is supported by most
  973. // major browsers.
  974. // See https://bugs.chromium.org/p/chromium/issues/detail?id=1422728
  975. if (codecs.toLowerCase() == 'flac') {
  976. if (!shaka.util.Platform.isApple()) {
  977. return 'flac';
  978. } else {
  979. return 'fLaC';
  980. }
  981. }
  982. // The same is true for 'Opus'.
  983. if (codecs.toLowerCase() === 'opus') {
  984. if (!shaka.util.Platform.isApple()) {
  985. return 'opus';
  986. } else {
  987. if (shaka.util.MimeUtils.getContainerType(mimeType) == 'mp4') {
  988. return 'Opus';
  989. } else {
  990. return 'opus';
  991. }
  992. }
  993. }
  994. if (codecs.toLowerCase() == 'ac-3' &&
  995. shaka.util.Platform.requiresEC3InitSegments()) {
  996. return 'ec-3';
  997. }
  998. return codecs;
  999. }
  1000. /**
  1001. * Generates the correct video codec for MediaDecodingConfiguration and
  1002. * for MediaSource.isTypeSupported.
  1003. * @param {string} codec
  1004. * @return {string}
  1005. */
  1006. static getCorrectVideoCodecs(codec) {
  1007. if (codec.includes('avc1')) {
  1008. // Convert avc1 codec string from RFC-4281 to RFC-6381 for
  1009. // MediaSource.isTypeSupported
  1010. // Example, convert avc1.66.30 to avc1.42001e (0x42 == 66 and 0x1e == 30)
  1011. const avcData = codec.split('.');
  1012. if (avcData.length == 3) {
  1013. let result = avcData.shift() + '.';
  1014. result += parseInt(avcData.shift(), 10).toString(16);
  1015. result +=
  1016. ('000' + parseInt(avcData.shift(), 10).toString(16)).slice(-4);
  1017. return result;
  1018. }
  1019. } else if (codec == 'vp9') {
  1020. // MediaCapabilities supports 'vp09...' codecs, but not 'vp9'. Translate
  1021. // vp9 codec strings into 'vp09...', to allow such content to play with
  1022. // mediaCapabilities enabled.
  1023. // This means profile 0, level 4.1, 8-bit color. This supports 1080p @
  1024. // 60Hz. See https://en.wikipedia.org/wiki/VP9#Levels
  1025. //
  1026. // If we don't have more detailed codec info, assume this profile and
  1027. // level because it's high enough to likely accommodate the parameters we
  1028. // do have, such as width and height. If an implementation is checking
  1029. // the profile and level very strictly, we want older VP9 content to
  1030. // still work to some degree. But we don't want to set a level so high
  1031. // that it is rejected by a hardware decoder that can't handle the
  1032. // maximum requirements of the level.
  1033. //
  1034. // This became an issue specifically on Firefox on M1 Macs.
  1035. return 'vp09.00.41.08';
  1036. }
  1037. return codec;
  1038. }
  1039. /**
  1040. * Alters the given Manifest to filter out any streams incompatible with the
  1041. * current variant.
  1042. *
  1043. * @param {?shaka.extern.Variant} currentVariant
  1044. * @param {shaka.extern.Manifest} manifest
  1045. */
  1046. static filterManifestByCurrentVariant(currentVariant, manifest) {
  1047. const StreamUtils = shaka.util.StreamUtils;
  1048. manifest.variants = manifest.variants.filter((variant) => {
  1049. const audio = variant.audio;
  1050. const video = variant.video;
  1051. if (audio && currentVariant && currentVariant.audio) {
  1052. if (!StreamUtils.areStreamsCompatible_(audio, currentVariant.audio)) {
  1053. shaka.log.debug('Dropping variant - not compatible with active audio',
  1054. 'active audio',
  1055. StreamUtils.getStreamSummaryString_(currentVariant.audio),
  1056. 'variant.audio',
  1057. StreamUtils.getStreamSummaryString_(audio));
  1058. return false;
  1059. }
  1060. }
  1061. if (video && currentVariant && currentVariant.video) {
  1062. if (!StreamUtils.areStreamsCompatible_(video, currentVariant.video)) {
  1063. shaka.log.debug('Dropping variant - not compatible with active video',
  1064. 'active video',
  1065. StreamUtils.getStreamSummaryString_(currentVariant.video),
  1066. 'variant.video',
  1067. StreamUtils.getStreamSummaryString_(video));
  1068. return false;
  1069. }
  1070. }
  1071. return true;
  1072. });
  1073. }
  1074. /**
  1075. * Alters the given Manifest to filter out any unsupported text streams.
  1076. *
  1077. * @param {shaka.extern.Manifest} manifest
  1078. * @private
  1079. */
  1080. static filterTextStreams_(manifest) {
  1081. // Filter text streams.
  1082. manifest.textStreams = manifest.textStreams.filter((stream) => {
  1083. const fullMimeType = shaka.util.MimeUtils.getFullType(
  1084. stream.mimeType, stream.codecs);
  1085. const keep = shaka.text.TextEngine.isTypeSupported(fullMimeType);
  1086. if (!keep) {
  1087. shaka.log.debug('Dropping text stream. Is not supported by the ' +
  1088. 'platform.', stream);
  1089. }
  1090. return keep;
  1091. });
  1092. }
  1093. /**
  1094. * Alters the given Manifest to filter out any unsupported image streams.
  1095. *
  1096. * @param {shaka.extern.Manifest} manifest
  1097. * @private
  1098. */
  1099. static async filterImageStreams_(manifest) {
  1100. const imageStreams = [];
  1101. for (const stream of manifest.imageStreams) {
  1102. let mimeType = stream.mimeType;
  1103. if (mimeType == 'application/mp4' && stream.codecs == 'mjpg') {
  1104. mimeType = 'image/jpg';
  1105. }
  1106. if (!shaka.util.StreamUtils.supportedImageMimeTypes_.has(mimeType)) {
  1107. const minImage = shaka.util.StreamUtils.minImage_.get(mimeType);
  1108. if (minImage) {
  1109. // eslint-disable-next-line no-await-in-loop
  1110. const res = await shaka.util.StreamUtils.isImageSupported_(minImage);
  1111. shaka.util.StreamUtils.supportedImageMimeTypes_.set(mimeType, res);
  1112. } else {
  1113. shaka.util.StreamUtils.supportedImageMimeTypes_.set(mimeType, false);
  1114. }
  1115. }
  1116. const keep =
  1117. shaka.util.StreamUtils.supportedImageMimeTypes_.get(mimeType);
  1118. if (!keep) {
  1119. shaka.log.debug('Dropping image stream. Is not supported by the ' +
  1120. 'platform.', stream);
  1121. } else {
  1122. imageStreams.push(stream);
  1123. }
  1124. }
  1125. manifest.imageStreams = imageStreams;
  1126. }
  1127. /**
  1128. * @param {string} minImage
  1129. * @return {!Promise<boolean>}
  1130. * @private
  1131. */
  1132. static isImageSupported_(minImage) {
  1133. return new Promise((resolve) => {
  1134. const imageElement = /** @type {HTMLImageElement} */(new Image());
  1135. imageElement.src = minImage;
  1136. if ('decode' in imageElement) {
  1137. imageElement.decode().then(() => {
  1138. resolve(true);
  1139. }).catch(() => {
  1140. resolve(false);
  1141. });
  1142. } else {
  1143. imageElement.onload = imageElement.onerror = () => {
  1144. resolve(imageElement.height === 2);
  1145. };
  1146. }
  1147. });
  1148. }
  1149. /**
  1150. * @param {shaka.extern.Stream} s0
  1151. * @param {shaka.extern.Stream} s1
  1152. * @return {boolean}
  1153. * @private
  1154. */
  1155. static areStreamsCompatible_(s0, s1) {
  1156. // Basic mime types and basic codecs need to match.
  1157. // For example, we can't adapt between WebM and MP4,
  1158. // nor can we adapt between mp4a.* to ec-3.
  1159. // We can switch between text types on the fly,
  1160. // so don't run this check on text.
  1161. if (s0.mimeType != s1.mimeType) {
  1162. return false;
  1163. }
  1164. if (s0.codecs.split('.')[0] != s1.codecs.split('.')[0]) {
  1165. return false;
  1166. }
  1167. return true;
  1168. }
  1169. /**
  1170. * @param {shaka.extern.Variant} variant
  1171. * @return {shaka.extern.Track}
  1172. */
  1173. static variantToTrack(variant) {
  1174. const ManifestParserUtils = shaka.util.ManifestParserUtils;
  1175. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1176. /** @type {?shaka.extern.Stream} */
  1177. const audio = variant.audio;
  1178. /** @type {?shaka.extern.Stream} */
  1179. const video = variant.video;
  1180. /** @type {?string} */
  1181. const audioMimeType = audio ? audio.mimeType : null;
  1182. /** @type {?string} */
  1183. const videoMimeType = video ? video.mimeType : null;
  1184. /** @type {?string} */
  1185. const audioCodec = audio ? audio.codecs : null;
  1186. /** @type {?string} */
  1187. const videoCodec = video ? video.codecs : null;
  1188. /** @type {?string} */
  1189. const audioGroupId = audio ? audio.groupId : null;
  1190. /** @type {!Array<string>} */
  1191. const mimeTypes = [];
  1192. if (video) {
  1193. mimeTypes.push(video.mimeType);
  1194. }
  1195. if (audio) {
  1196. mimeTypes.push(audio.mimeType);
  1197. }
  1198. /** @type {?string} */
  1199. const mimeType = mimeTypes[0] || null;
  1200. /** @type {!Array<string>} */
  1201. const kinds = [];
  1202. if (audio) {
  1203. kinds.push(audio.kind);
  1204. }
  1205. if (video) {
  1206. kinds.push(video.kind);
  1207. }
  1208. /** @type {?string} */
  1209. const kind = kinds[0] || null;
  1210. /** @type {!Set<string>} */
  1211. const roles = new Set();
  1212. if (audio) {
  1213. for (const role of audio.roles) {
  1214. roles.add(role);
  1215. }
  1216. }
  1217. if (video) {
  1218. for (const role of video.roles) {
  1219. roles.add(role);
  1220. }
  1221. }
  1222. /** @type {shaka.extern.Track} */
  1223. const track = {
  1224. id: variant.id,
  1225. active: false,
  1226. type: 'variant',
  1227. bandwidth: variant.bandwidth,
  1228. language: variant.language,
  1229. label: null,
  1230. kind: kind,
  1231. width: null,
  1232. height: null,
  1233. frameRate: null,
  1234. pixelAspectRatio: null,
  1235. hdr: null,
  1236. colorGamut: null,
  1237. videoLayout: null,
  1238. mimeType: mimeType,
  1239. audioMimeType: audioMimeType,
  1240. videoMimeType: videoMimeType,
  1241. codecs: '',
  1242. audioCodec: audioCodec,
  1243. videoCodec: videoCodec,
  1244. primary: variant.primary,
  1245. roles: Array.from(roles),
  1246. audioRoles: null,
  1247. forced: false,
  1248. videoId: null,
  1249. audioId: null,
  1250. audioGroupId: audioGroupId,
  1251. channelsCount: null,
  1252. audioSamplingRate: null,
  1253. spatialAudio: false,
  1254. tilesLayout: null,
  1255. audioBandwidth: null,
  1256. videoBandwidth: null,
  1257. originalVideoId: null,
  1258. originalAudioId: null,
  1259. originalTextId: null,
  1260. originalImageId: null,
  1261. accessibilityPurpose: null,
  1262. originalLanguage: null,
  1263. };
  1264. if (video) {
  1265. track.videoId = video.id;
  1266. track.originalVideoId = video.originalId;
  1267. track.width = video.width || null;
  1268. track.height = video.height || null;
  1269. track.frameRate = video.frameRate || null;
  1270. track.pixelAspectRatio = video.pixelAspectRatio || null;
  1271. track.videoBandwidth = video.bandwidth || null;
  1272. track.hdr = video.hdr || null;
  1273. track.colorGamut = video.colorGamut || null;
  1274. track.videoLayout = video.videoLayout || null;
  1275. const dependencyStream = video.dependencyStream;
  1276. if (dependencyStream) {
  1277. track.width = dependencyStream.width || track.width;
  1278. track.height = dependencyStream.height || track.height;
  1279. track.videoCodec = dependencyStream.codecs || track.videoCodec;
  1280. }
  1281. if (videoCodec.includes(',')) {
  1282. track.channelsCount = video.channelsCount;
  1283. track.audioSamplingRate = video.audioSamplingRate;
  1284. track.spatialAudio = video.spatialAudio;
  1285. track.originalLanguage = video.originalLanguage;
  1286. track.audioMimeType = videoMimeType;
  1287. const allCodecs = videoCodec.split(',');
  1288. try {
  1289. track.videoCodec = ManifestParserUtils.guessCodecs(
  1290. ContentType.VIDEO, allCodecs);
  1291. track.audioCodec = ManifestParserUtils.guessCodecs(
  1292. ContentType.AUDIO, allCodecs);
  1293. } catch (e) {
  1294. // Ignore this error.
  1295. }
  1296. }
  1297. }
  1298. if (audio) {
  1299. track.audioId = audio.id;
  1300. track.originalAudioId = audio.originalId;
  1301. track.channelsCount = audio.channelsCount;
  1302. track.audioSamplingRate = audio.audioSamplingRate;
  1303. track.audioBandwidth = audio.bandwidth || null;
  1304. track.spatialAudio = audio.spatialAudio;
  1305. track.label = audio.label;
  1306. track.audioRoles = audio.roles;
  1307. track.accessibilityPurpose = audio.accessibilityPurpose;
  1308. track.originalLanguage = audio.originalLanguage;
  1309. const dependencyStream = audio.dependencyStream;
  1310. if (dependencyStream) {
  1311. track.audioCodec = dependencyStream.codecs || track.audioCodec;
  1312. }
  1313. }
  1314. /** @type {!Array<string>} */
  1315. const codecs = [];
  1316. if (track.videoCodec) {
  1317. codecs.push(track.videoCodec);
  1318. }
  1319. if (track.audioCodec) {
  1320. codecs.push(track.audioCodec);
  1321. }
  1322. track.codecs = codecs.join(', ');
  1323. return track;
  1324. }
  1325. /**
  1326. * @param {shaka.extern.Stream} stream
  1327. * @return {shaka.extern.Track}
  1328. */
  1329. static textStreamToTrack(stream) {
  1330. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1331. /** @type {shaka.extern.Track} */
  1332. const track = {
  1333. id: stream.id,
  1334. active: false,
  1335. type: ContentType.TEXT,
  1336. bandwidth: 0,
  1337. language: stream.language,
  1338. label: stream.label,
  1339. kind: stream.kind || null,
  1340. width: null,
  1341. height: null,
  1342. frameRate: null,
  1343. pixelAspectRatio: null,
  1344. hdr: null,
  1345. colorGamut: null,
  1346. videoLayout: null,
  1347. mimeType: stream.mimeType,
  1348. audioMimeType: null,
  1349. videoMimeType: null,
  1350. codecs: stream.codecs || null,
  1351. audioCodec: null,
  1352. videoCodec: null,
  1353. primary: stream.primary,
  1354. roles: stream.roles,
  1355. audioRoles: null,
  1356. forced: stream.forced,
  1357. videoId: null,
  1358. audioId: null,
  1359. audioGroupId: null,
  1360. channelsCount: null,
  1361. audioSamplingRate: null,
  1362. spatialAudio: false,
  1363. tilesLayout: null,
  1364. audioBandwidth: null,
  1365. videoBandwidth: null,
  1366. originalVideoId: null,
  1367. originalAudioId: null,
  1368. originalTextId: stream.originalId,
  1369. originalImageId: null,
  1370. accessibilityPurpose: stream.accessibilityPurpose,
  1371. originalLanguage: stream.originalLanguage,
  1372. };
  1373. return track;
  1374. }
  1375. /**
  1376. * @param {shaka.extern.Stream} stream
  1377. * @return {shaka.extern.Track}
  1378. */
  1379. static imageStreamToTrack(stream) {
  1380. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1381. let width = stream.width || null;
  1382. let height = stream.height || null;
  1383. // The stream width and height represent the size of the entire thumbnail
  1384. // sheet, so divide by the layout.
  1385. let reference = null;
  1386. // Note: segmentIndex is built by default for HLS, but not for DASH, but
  1387. // in DASH this information comes at the stream level and not at the
  1388. // segment level.
  1389. if (stream.segmentIndex) {
  1390. reference = stream.segmentIndex.earliestReference();
  1391. }
  1392. let layout = stream.tilesLayout;
  1393. if (reference) {
  1394. layout = reference.getTilesLayout() || layout;
  1395. }
  1396. if (layout && width != null) {
  1397. width /= Number(layout.split('x')[0]);
  1398. }
  1399. if (layout && height != null) {
  1400. height /= Number(layout.split('x')[1]);
  1401. }
  1402. // TODO: What happens if there are multiple grids, with different
  1403. // layout sizes, inside this image stream?
  1404. /** @type {shaka.extern.Track} */
  1405. const track = {
  1406. id: stream.id,
  1407. active: false,
  1408. type: ContentType.IMAGE,
  1409. bandwidth: stream.bandwidth || 0,
  1410. language: '',
  1411. label: null,
  1412. kind: null,
  1413. width,
  1414. height,
  1415. frameRate: null,
  1416. pixelAspectRatio: null,
  1417. hdr: null,
  1418. colorGamut: null,
  1419. videoLayout: null,
  1420. mimeType: stream.mimeType,
  1421. audioMimeType: null,
  1422. videoMimeType: null,
  1423. codecs: stream.codecs || null,
  1424. audioCodec: null,
  1425. videoCodec: null,
  1426. primary: false,
  1427. roles: [],
  1428. audioRoles: null,
  1429. forced: false,
  1430. videoId: null,
  1431. audioId: null,
  1432. audioGroupId: null,
  1433. channelsCount: null,
  1434. audioSamplingRate: null,
  1435. spatialAudio: false,
  1436. tilesLayout: layout || null,
  1437. audioBandwidth: null,
  1438. videoBandwidth: null,
  1439. originalVideoId: null,
  1440. originalAudioId: null,
  1441. originalTextId: null,
  1442. originalImageId: stream.originalId,
  1443. accessibilityPurpose: null,
  1444. originalLanguage: null,
  1445. };
  1446. return track;
  1447. }
  1448. /**
  1449. * Generate and return an ID for this track, since the ID field is optional.
  1450. *
  1451. * @param {TextTrack|AudioTrack} html5Track
  1452. * @return {number} The generated ID.
  1453. */
  1454. static html5TrackId(html5Track) {
  1455. if (!html5Track['__shaka_id']) {
  1456. html5Track['__shaka_id'] = shaka.util.StreamUtils.nextTrackId_++;
  1457. }
  1458. return html5Track['__shaka_id'];
  1459. }
  1460. /**
  1461. * @param {TextTrack} textTrack
  1462. * @return {shaka.extern.Track}
  1463. */
  1464. static html5TextTrackToTrack(textTrack) {
  1465. const StreamUtils = shaka.util.StreamUtils;
  1466. /** @type {shaka.extern.Track} */
  1467. const track = StreamUtils.html5TrackToGenericShakaTrack_(textTrack);
  1468. track.active = textTrack.mode != 'disabled';
  1469. track.type = 'text';
  1470. track.originalTextId = textTrack.id;
  1471. if (textTrack.kind == 'captions') {
  1472. // See: https://github.com/shaka-project/shaka-player/issues/6233
  1473. track.mimeType = 'unknown';
  1474. }
  1475. if (textTrack.kind == 'subtitles') {
  1476. track.mimeType = 'text/vtt';
  1477. }
  1478. if (textTrack.kind) {
  1479. track.roles = [textTrack.kind];
  1480. }
  1481. if (textTrack.kind == 'forced') {
  1482. track.forced = true;
  1483. }
  1484. return track;
  1485. }
  1486. /**
  1487. * @param {AudioTrack} audioTrack
  1488. * @return {shaka.extern.Track}
  1489. */
  1490. static html5AudioTrackToTrack(audioTrack) {
  1491. const StreamUtils = shaka.util.StreamUtils;
  1492. /** @type {shaka.extern.Track} */
  1493. const track = StreamUtils.html5TrackToGenericShakaTrack_(audioTrack);
  1494. track.active = audioTrack.enabled;
  1495. track.type = 'variant';
  1496. track.originalAudioId = audioTrack.id;
  1497. if (audioTrack.kind == 'main') {
  1498. track.primary = true;
  1499. }
  1500. if (audioTrack.kind) {
  1501. track.roles = [audioTrack.kind];
  1502. track.audioRoles = [audioTrack.kind];
  1503. track.label = audioTrack.label;
  1504. }
  1505. return track;
  1506. }
  1507. /**
  1508. * Creates a Track object with non-type specific fields filled out. The
  1509. * caller is responsible for completing the Track object with any
  1510. * type-specific information (audio or text).
  1511. *
  1512. * @param {TextTrack|AudioTrack} html5Track
  1513. * @return {shaka.extern.Track}
  1514. * @private
  1515. */
  1516. static html5TrackToGenericShakaTrack_(html5Track) {
  1517. const language = html5Track.language;
  1518. /** @type {shaka.extern.Track} */
  1519. const track = {
  1520. id: shaka.util.StreamUtils.html5TrackId(html5Track),
  1521. active: false,
  1522. type: '',
  1523. bandwidth: 0,
  1524. language: shaka.util.LanguageUtils.normalize(language || 'und'),
  1525. label: html5Track.label,
  1526. kind: html5Track.kind,
  1527. width: null,
  1528. height: null,
  1529. frameRate: null,
  1530. pixelAspectRatio: null,
  1531. hdr: null,
  1532. colorGamut: null,
  1533. videoLayout: null,
  1534. mimeType: null,
  1535. audioMimeType: null,
  1536. videoMimeType: null,
  1537. codecs: null,
  1538. audioCodec: null,
  1539. videoCodec: null,
  1540. primary: false,
  1541. roles: [],
  1542. forced: false,
  1543. audioRoles: null,
  1544. videoId: null,
  1545. audioId: null,
  1546. audioGroupId: null,
  1547. channelsCount: null,
  1548. audioSamplingRate: null,
  1549. spatialAudio: false,
  1550. tilesLayout: null,
  1551. audioBandwidth: null,
  1552. videoBandwidth: null,
  1553. originalVideoId: null,
  1554. originalAudioId: null,
  1555. originalTextId: null,
  1556. originalImageId: null,
  1557. accessibilityPurpose: null,
  1558. originalLanguage: language,
  1559. };
  1560. return track;
  1561. }
  1562. /**
  1563. * Determines if the given variant is playable.
  1564. * @param {!shaka.extern.Variant} variant
  1565. * @return {boolean}
  1566. */
  1567. static isPlayable(variant) {
  1568. return variant.allowedByApplication &&
  1569. variant.allowedByKeySystem &&
  1570. variant.disabledUntilTime == 0;
  1571. }
  1572. /**
  1573. * Filters out unplayable variants.
  1574. * @param {!Array<!shaka.extern.Variant>} variants
  1575. * @return {!Array<!shaka.extern.Variant>}
  1576. */
  1577. static getPlayableVariants(variants) {
  1578. return variants.filter((variant) => {
  1579. return shaka.util.StreamUtils.isPlayable(variant);
  1580. });
  1581. }
  1582. /**
  1583. * Chooses streams according to the given config.
  1584. * Works both for Stream and Track types due to their similarities.
  1585. *
  1586. * @param {!Array<!shaka.extern.Stream>|!Array<!shaka.extern.Track>} streams
  1587. * @param {string} preferredLanguage
  1588. * @param {string} preferredRole
  1589. * @param {boolean} preferredForced
  1590. * @return {!Array<!shaka.extern.Stream>|!Array<!shaka.extern.Track>}
  1591. */
  1592. static filterStreamsByLanguageAndRole(
  1593. streams, preferredLanguage, preferredRole, preferredForced) {
  1594. const LanguageUtils = shaka.util.LanguageUtils;
  1595. /** @type {!Array<!shaka.extern.Stream>|!Array<!shaka.extern.Track>} */
  1596. let chosen = streams;
  1597. // Start with the set of primary streams.
  1598. /** @type {!Array<!shaka.extern.Stream>|!Array<!shaka.extern.Track>} */
  1599. const primary = streams.filter((stream) => {
  1600. return stream.primary;
  1601. });
  1602. if (primary.length) {
  1603. chosen = primary;
  1604. }
  1605. // Now reduce the set to one language. This covers both arbitrary language
  1606. // choice and the reduction of the "primary" stream set to one language.
  1607. const firstLanguage = chosen.length ? chosen[0].language : '';
  1608. chosen = chosen.filter((stream) => {
  1609. return stream.language == firstLanguage;
  1610. });
  1611. // Find the streams that best match our language preference. This will
  1612. // override previous selections.
  1613. if (preferredLanguage) {
  1614. const closestLocale = LanguageUtils.findClosestLocale(
  1615. LanguageUtils.normalize(preferredLanguage),
  1616. streams.map((stream) => stream.language));
  1617. // Only replace |chosen| if we found a locale that is close to our
  1618. // preference.
  1619. if (closestLocale) {
  1620. chosen = streams.filter((stream) => {
  1621. const locale = LanguageUtils.normalize(stream.language);
  1622. return locale == closestLocale;
  1623. });
  1624. }
  1625. }
  1626. // Filter by forced preference
  1627. chosen = chosen.filter((stream) => {
  1628. return stream.forced == preferredForced;
  1629. });
  1630. // Now refine the choice based on role preference.
  1631. if (preferredRole) {
  1632. const roleMatches = shaka.util.StreamUtils.filterStreamsByRole_(
  1633. chosen, preferredRole);
  1634. if (roleMatches.length) {
  1635. return roleMatches;
  1636. } else {
  1637. shaka.log.warning('No exact match for the text role could be found.');
  1638. }
  1639. } else {
  1640. // Prefer text streams with no roles, if they exist.
  1641. const noRoleMatches = chosen.filter((stream) => {
  1642. return stream.roles.length == 0;
  1643. });
  1644. if (noRoleMatches.length) {
  1645. return noRoleMatches;
  1646. }
  1647. }
  1648. // Either there was no role preference, or it could not be satisfied.
  1649. // Choose an arbitrary role, if there are any, and filter out any other
  1650. // roles. This ensures we never adapt between roles.
  1651. const allRoles = chosen.map((stream) => {
  1652. return stream.roles;
  1653. }).reduce(shaka.util.Functional.collapseArrays, []);
  1654. if (!allRoles.length) {
  1655. return chosen;
  1656. }
  1657. return shaka.util.StreamUtils.filterStreamsByRole_(chosen, allRoles[0]);
  1658. }
  1659. /**
  1660. * Filter Streams by role.
  1661. * Works both for Stream and Track types due to their similarities.
  1662. *
  1663. * @param {!Array<!shaka.extern.Stream>|!Array<!shaka.extern.Track>} streams
  1664. * @param {string} preferredRole
  1665. * @return {!Array<!shaka.extern.Stream>|!Array<!shaka.extern.Track>}
  1666. * @private
  1667. */
  1668. static filterStreamsByRole_(streams, preferredRole) {
  1669. return streams.filter((stream) => {
  1670. return stream.roles.includes(preferredRole);
  1671. });
  1672. }
  1673. /**
  1674. * Checks if the given stream is an audio stream.
  1675. *
  1676. * @param {shaka.extern.Stream} stream
  1677. * @return {boolean}
  1678. */
  1679. static isAudio(stream) {
  1680. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1681. return stream.type == ContentType.AUDIO;
  1682. }
  1683. /**
  1684. * Checks if the given stream is a video stream.
  1685. *
  1686. * @param {shaka.extern.Stream} stream
  1687. * @return {boolean}
  1688. */
  1689. static isVideo(stream) {
  1690. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1691. return stream.type == ContentType.VIDEO;
  1692. }
  1693. /**
  1694. * Get all non-null streams in the variant as an array.
  1695. *
  1696. * @param {shaka.extern.Variant} variant
  1697. * @return {!Array<shaka.extern.Stream>}
  1698. */
  1699. static getVariantStreams(variant) {
  1700. const streams = [];
  1701. if (variant.audio) {
  1702. streams.push(variant.audio);
  1703. }
  1704. if (variant.video) {
  1705. streams.push(variant.video);
  1706. }
  1707. return streams;
  1708. }
  1709. /**
  1710. * Indicates if some of the variant's streams are fastSwitching.
  1711. *
  1712. * @param {shaka.extern.Variant} variant
  1713. * @return {boolean}
  1714. */
  1715. static isFastSwitching(variant) {
  1716. if (variant.audio && variant.audio.fastSwitching) {
  1717. return true;
  1718. }
  1719. if (variant.video && variant.video.fastSwitching) {
  1720. return true;
  1721. }
  1722. return false;
  1723. }
  1724. /**
  1725. * Set the best iframe stream to the original stream.
  1726. *
  1727. * @param {!shaka.extern.Stream} stream
  1728. * @param {!Array<!shaka.extern.Stream>} iFrameStreams
  1729. */
  1730. static setBetterIFrameStream(stream, iFrameStreams) {
  1731. if (!iFrameStreams.length) {
  1732. return;
  1733. }
  1734. const validStreams = iFrameStreams.filter((iFrameStream) =>
  1735. shaka.util.MimeUtils.getNormalizedCodec(stream.codecs) ==
  1736. shaka.util.MimeUtils.getNormalizedCodec(iFrameStream.codecs))
  1737. .sort((a, b) => {
  1738. if (!a.bandwidth || !b.bandwidth || a.bandwidth == b.bandwidth) {
  1739. return (a.width || 0) - (b.width || 0);
  1740. }
  1741. return a.bandwidth - b.bandwidth;
  1742. });
  1743. stream.trickModeVideo = validStreams[0];
  1744. if (validStreams.length > 1) {
  1745. const sameResolutionStream = validStreams.find((iFrameStream) =>
  1746. stream.width == iFrameStream.width &&
  1747. stream.height == iFrameStream.height);
  1748. if (sameResolutionStream) {
  1749. stream.trickModeVideo = sameResolutionStream;
  1750. }
  1751. }
  1752. }
  1753. /**
  1754. * Returns a string of a variant, with the attribute values of its audio
  1755. * and/or video streams for log printing.
  1756. * @param {shaka.extern.Variant} variant
  1757. * @return {string}
  1758. * @private
  1759. */
  1760. static getVariantSummaryString_(variant) {
  1761. const summaries = [];
  1762. if (variant.audio) {
  1763. summaries.push(shaka.util.StreamUtils.getStreamSummaryString_(
  1764. variant.audio));
  1765. }
  1766. if (variant.video) {
  1767. summaries.push(shaka.util.StreamUtils.getStreamSummaryString_(
  1768. variant.video));
  1769. }
  1770. return summaries.join(', ');
  1771. }
  1772. /**
  1773. * Returns a string of an audio or video stream for log printing.
  1774. * @param {shaka.extern.Stream} stream
  1775. * @return {string}
  1776. * @private
  1777. */
  1778. static getStreamSummaryString_(stream) {
  1779. // Accepted parameters for Chromecast can be found (internally) at
  1780. // go/cast-mime-params
  1781. if (shaka.util.StreamUtils.isAudio(stream)) {
  1782. return 'type=audio' +
  1783. ' codecs=' + stream.codecs +
  1784. ' bandwidth='+ stream.bandwidth +
  1785. ' channelsCount=' + stream.channelsCount +
  1786. ' audioSamplingRate=' + stream.audioSamplingRate;
  1787. }
  1788. if (shaka.util.StreamUtils.isVideo(stream)) {
  1789. return 'type=video' +
  1790. ' codecs=' + stream.codecs +
  1791. ' bandwidth=' + stream.bandwidth +
  1792. ' frameRate=' + stream.frameRate +
  1793. ' width=' + stream.width +
  1794. ' height=' + stream.height;
  1795. }
  1796. return 'unexpected stream type';
  1797. }
  1798. /**
  1799. * Clears underlying decoding config cache.
  1800. */
  1801. static clearDecodingConfigCache() {
  1802. shaka.util.StreamUtils.decodingConfigCache_.clear();
  1803. }
  1804. /**
  1805. * Check if we should show text on screen automatically.
  1806. *
  1807. * @param {?shaka.extern.Stream} audioStream
  1808. * @param {shaka.extern.Stream} textStream
  1809. * @param {!shaka.extern.PlayerConfiguration} config
  1810. * @return {boolean}
  1811. */
  1812. static shouldInitiallyShowText(audioStream, textStream, config) {
  1813. const AutoShowText = shaka.config.AutoShowText;
  1814. if (config.autoShowText == AutoShowText.NEVER) {
  1815. return false;
  1816. }
  1817. if (config.autoShowText == AutoShowText.ALWAYS) {
  1818. return true;
  1819. }
  1820. const LanguageUtils = shaka.util.LanguageUtils;
  1821. /** @type {string} */
  1822. const preferredTextLocale =
  1823. LanguageUtils.normalize(config.preferredTextLanguage);
  1824. /** @type {string} */
  1825. const textLocale = LanguageUtils.normalize(textStream.language);
  1826. if (config.autoShowText == AutoShowText.IF_PREFERRED_TEXT_LANGUAGE) {
  1827. // Only the text language match matters.
  1828. return LanguageUtils.areLanguageCompatible(
  1829. textLocale,
  1830. preferredTextLocale);
  1831. }
  1832. if (config.autoShowText == AutoShowText.IF_SUBTITLES_MAY_BE_NEEDED) {
  1833. if (!audioStream) {
  1834. return false;
  1835. }
  1836. /* The text should automatically be shown if the text is
  1837. * language-compatible with the user's text language preference, but not
  1838. * compatible with the audio. These are cases where we deduce that
  1839. * subtitles may be needed.
  1840. *
  1841. * For example:
  1842. * preferred | chosen | chosen |
  1843. * text | text | audio | show
  1844. * -----------------------------------
  1845. * en-CA | en | jp | true
  1846. * en | en-US | fr | true
  1847. * fr-CA | en-US | jp | false
  1848. * en-CA | en-US | en-US | false
  1849. *
  1850. */
  1851. /** @type {string} */
  1852. const audioLocale = LanguageUtils.normalize(audioStream.language);
  1853. return (
  1854. LanguageUtils.areLanguageCompatible(textLocale, preferredTextLocale) &&
  1855. !LanguageUtils.areLanguageCompatible(audioLocale, textLocale));
  1856. }
  1857. shaka.log.alwaysWarn('Invalid autoShowText setting!');
  1858. return false;
  1859. }
  1860. };
  1861. /**
  1862. * A cache of results from mediaCapabilities.decodingInfo, indexed by the
  1863. * (stringified) decodingConfig.
  1864. *
  1865. * @type {Map<string, !MediaCapabilitiesDecodingInfo>}
  1866. * @private
  1867. */
  1868. shaka.util.StreamUtils.decodingConfigCache_ = new Map();
  1869. /** @private {number} */
  1870. shaka.util.StreamUtils.nextTrackId_ = 0;
  1871. /**
  1872. * @enum {string}
  1873. */
  1874. shaka.util.StreamUtils.DecodingAttributes = {
  1875. SMOOTH: 'smooth',
  1876. POWER: 'powerEfficient',
  1877. };
  1878. /**
  1879. * @private {!Map<string, boolean>}
  1880. */
  1881. shaka.util.StreamUtils.supportedImageMimeTypes_ = new Map()
  1882. .set('image/svg+xml', true)
  1883. .set('image/png', true)
  1884. .set('image/jpeg', true)
  1885. .set('image/jpg', true);
  1886. /**
  1887. * @const {string}
  1888. * @private
  1889. */
  1890. // cspell: disable-next-line
  1891. shaka.util.StreamUtils.minWebPImage_ = 'data:image/webp;base64,UklGRjoAAABXRU' +
  1892. 'JQVlA4IC4AAACyAgCdASoCAAIALmk0mk0iIiIiIgBoSygABc6WWgAA/veff/0PP8bA//LwY' +
  1893. 'AAA';
  1894. /**
  1895. * @const {string}
  1896. * @private
  1897. */
  1898. // cspell: disable-next-line
  1899. shaka.util.StreamUtils.minAvifImage_ = 'data:image/avif;base64,AAAAIGZ0eXBhdm' +
  1900. 'lmAAAAAGF2aWZtaWYxbWlhZk1BMUIAAADybWV0YQAAAAAAAAAoaGRscgAAAAAAAAAAcGljd' +
  1901. 'AAAAAAAAAAAAAAAAGxpYmF2aWYAAAAADnBpdG0AAAAAAAEAAAAeaWxvYwAAAABEAAABAAEA' +
  1902. 'AAABAAABGgAAAB0AAAAoaWluZgAAAAAAAQAAABppbmZlAgAAAAABAABhdjAxQ29sb3IAAAA' +
  1903. 'AamlwcnAAAABLaXBjbwAAABRpc3BlAAAAAAAAAAIAAAACAAAAEHBpeGkAAAAAAwgICAAAAA' +
  1904. 'xhdjFDgQ0MAAAAABNjb2xybmNseAACAAIAAYAAAAAXaXBtYQAAAAAAAAABAAEEAQKDBAAAA' +
  1905. 'CVtZGF0EgAKCBgANogQEAwgMg8f8D///8WfhwB8+ErK42A=';
  1906. /**
  1907. * @const {!Map<string, string>}
  1908. * @private
  1909. */
  1910. shaka.util.StreamUtils.minImage_ = new Map()
  1911. .set('image/webp', shaka.util.StreamUtils.minWebPImage_)
  1912. .set('image/avif', shaka.util.StreamUtils.minAvifImage_);