From 7981e83dbc3cda818fa5dea20812dec51f4cc9a3 Mon Sep 17 00:00:00 2001 From: Adam Stankiewicz Date: Tue, 30 Jul 2024 12:29:04 -0400 Subject: [PATCH] fix: ensure transcript button appears in video player (#1135) * fix: ensure transcript button appears in video player * chore: unmute video by default * chore: fix * chore: lint * chore: lint * fix: only include vjstranscribe plugin if showTranscripts is true * chore: tests * fix: added test coverage and some nits --------- Co-authored-by: Maham Akif --- .../microlearning/VideoDetailPage.jsx | 12 ++- src/components/video/VideoJS.jsx | 79 +++++++++------ src/components/video/data/hooks.js | 58 +++++++++++ src/components/video/data/index.js | 4 + src/components/video/data/service.js | 34 ++----- src/components/video/data/tests/hooks.test.js | 70 +++++++++++++ .../video/data/tests/service.test.js | 98 +++++-------------- src/components/video/tests/VideoJS.test.jsx | 70 ++++++++++++- .../video/tests/VideoPlayer.test.jsx | 2 +- 9 files changed, 289 insertions(+), 138 deletions(-) create mode 100644 src/components/video/data/hooks.js create mode 100644 src/components/video/data/index.js create mode 100644 src/components/video/data/tests/hooks.test.js diff --git a/src/components/microlearning/VideoDetailPage.jsx b/src/components/microlearning/VideoDetailPage.jsx index b12d54dc84..81a28152c8 100644 --- a/src/components/microlearning/VideoDetailPage.jsx +++ b/src/components/microlearning/VideoDetailPage.jsx @@ -91,11 +91,11 @@ const VideoDetailPage = () => {

{videoData?.courseTitle}

- + {videoData?.videoDuration && `(${videoData?.videoDuration} minutes)`} -

+

{videoData?.videoSummary}

{videoData?.videoSkills?.length > 0 && ( @@ -123,9 +123,11 @@ const VideoDetailPage = () => { )} -
- -
+ { videoData?.videoUrl && ( +
+ +
+ )} {isDefinedAndNotNull(courseMetadata.activeCourseRun) && (
diff --git a/src/components/video/VideoJS.jsx b/src/components/video/VideoJS.jsx index 6ac66da59a..3cb25a6e81 100644 --- a/src/components/video/VideoJS.jsx +++ b/src/components/video/VideoJS.jsx @@ -1,11 +1,11 @@ -import React, { useEffect, useRef } from 'react'; +import { useCallback, useEffect, useRef } from 'react'; import PropTypes from 'prop-types'; import 'videojs-youtube'; import videojs from 'video.js'; import 'video.js/dist/video-js.css'; import { PLAYBACK_RATES } from './data/constants'; -import { fetchAndAddTranscripts } from './data/service'; +import { usePlayerOptions, useTranscripts } from './data'; window.videojs = videojs; // eslint-disable-next-line import/no-extraneous-dependencies @@ -15,7 +15,46 @@ const VideoJS = ({ options, onReady, customOptions }) => { const videoRef = useRef(null); const playerRef = useRef(null); + const transcripts = useTranscripts({ + player: playerRef.current, + customOptions, + }); + + const playerOptions = usePlayerOptions({ + transcripts, + options, + customOptions, + }); + + const handlePlayerReady = useCallback(() => { + // Add remote text tracks + const textTracks = Object.entries(transcripts.textTracks); + textTracks.forEach(([lang, webVttFileUrl]) => { + playerRef.current.addRemoteTextTrack({ + kind: 'subtitles', + src: webVttFileUrl, + srclang: lang, + label: lang, + }, false); + }); + + // Set playback rates + if (customOptions?.showPlaybackMenu) { + playerRef.current.playbackRates(PLAYBACK_RATES); + } + + // Callback to parent component, if provided + if (onReady) { + onReady(playerRef.current); + } + }, [customOptions?.showPlaybackMenu, onReady, transcripts.textTracks]); + useEffect(() => { + if (transcripts.isLoading) { + // While async transcripts data is loading, don't initialize the player + return; + } + // Make sure Video.js player is only initialized once if (!playerRef.current) { // The Video.js player needs to be _inside_ the component el for React 18 Strict Mode. @@ -24,46 +63,30 @@ const VideoJS = ({ options, onReady, customOptions }) => { videoElement.classList.add('vjs-big-play-centered'); videoRef.current.appendChild(videoElement); - // eslint-disable-next-line no-multi-assign - const player = playerRef.current = videojs(videoElement, options, () => { - if (onReady) { - onReady(player); - } - }); - - if (customOptions?.showPlaybackMenu) { - player.playbackRates(PLAYBACK_RATES); - } - - if (customOptions?.showTranscripts && customOptions?.transcriptUrls) { - fetchAndAddTranscripts(customOptions?.transcriptUrls, player); - } + playerRef.current = videojs(videoElement, playerOptions, handlePlayerReady); } else { - const player = playerRef.current; - - player.autoplay(options.autoplay); - player.src(options.sources); + playerRef.current.autoplay(playerOptions.autoplay); + playerRef.current.src(playerOptions.sources); } - }, [onReady, options, videoRef, customOptions]); + }, [transcripts.isLoading, playerOptions, handlePlayerReady]); // Dispose the Video.js player when the functional component unmounts useEffect(() => { - const player = playerRef.current; - - return () => { - if (player && !player.isDisposed()) { - player.dispose(); + const cleanup = () => { + if (playerRef.current && !playerRef.current.isDisposed()) { + playerRef.current.dispose(); playerRef.current = null; } }; - }, [playerRef]); + return cleanup; + }, []); return ( <>
- { customOptions?.showTranscripts &&
} + {customOptions?.showTranscripts &&
} ); }; diff --git a/src/components/video/data/hooks.js b/src/components/video/data/hooks.js new file mode 100644 index 0000000000..cf5100a165 --- /dev/null +++ b/src/components/video/data/hooks.js @@ -0,0 +1,58 @@ +import { useEffect, useState } from 'react'; +import { logError } from '@edx/frontend-platform/logging'; + +import { fetchAndAddTranscripts } from './service'; + +export function useTranscripts({ player, customOptions }) { + const shouldUseTranscripts = !!(customOptions?.showTranscripts && customOptions?.transcriptUrls); + const [isLoading, setIsLoading] = useState(shouldUseTranscripts); + const [textTracks, setTextTracks] = useState([]); + const [transcriptUrl, setTranscriptUrl] = useState(null); + + useEffect(() => { + const fetchFn = async () => { + setIsLoading(true); + if (shouldUseTranscripts) { + try { + const result = await fetchAndAddTranscripts(customOptions.transcriptUrls, player); + setTextTracks(result); + // We are only catering to English transcripts for now as we don't have the option to change + // the transcript language yet. + if (result.en) { + setTranscriptUrl(result.en); + } + } catch (error) { + logError(`Error fetching transcripts for player: ${error}`); + } finally { + setIsLoading(false); + } + } + }; + fetchFn(); + }, [customOptions?.transcriptUrls, player, shouldUseTranscripts]); + + return { + textTracks, + transcriptUrl, + isLoading, + }; +} + +export function usePlayerOptions({ + transcripts, + options, + customOptions, +}) { + const plugins = { ...options.plugins }; + if (customOptions?.showTranscripts && transcripts.transcriptUrl) { + const existingTranscribeUrls = plugins.vjstranscribe?.urls || []; + plugins.vjstranscribe = { + ...plugins.vjstranscribe, + urls: [transcripts.transcriptUrl, ...existingTranscribeUrls], + }; + } + return { + ...options, + plugins, + }; +} diff --git a/src/components/video/data/index.js b/src/components/video/data/index.js new file mode 100644 index 0000000000..dc268ff6c1 --- /dev/null +++ b/src/components/video/data/index.js @@ -0,0 +1,4 @@ +export * from './constants'; +export * from './hooks'; +export * from './service'; +export * from './utils'; diff --git a/src/components/video/data/service.js b/src/components/video/data/service.js index 2d8acff75d..000913dcd6 100644 --- a/src/components/video/data/service.js +++ b/src/components/video/data/service.js @@ -1,41 +1,25 @@ import { logError } from '@edx/frontend-platform/logging'; import { convertToWebVtt, createWebVttFile } from './utils'; -const fetchAndAddTranscripts = async (transcriptUrls, player) => { +const fetchAndAddTranscripts = async (transcriptUrls) => { + const data = {}; const transcriptPromises = Object.entries(transcriptUrls).map(([lang, url]) => fetch(url) - .then(response => { - if (!response.ok) { - logError(`Failed to fetch transcript for ${lang}`); - } - return response.json(); - }) - .then(transcriptData => { + .then(response => response.json()) + .then((transcriptData) => { const webVttData = convertToWebVtt(transcriptData); const webVttFileUrl = createWebVttFile(webVttData); - - player.addRemoteTextTrack({ - kind: 'subtitles', - src: webVttFileUrl, - srclang: lang, - label: lang, - }, false); - - // We are only catering to English transcripts for now as we don't have the option to change - // the transcript language yet. - if (lang === 'en') { - player.vjstranscribe({ - urls: [webVttFileUrl], - }); - } + data[lang] = webVttFileUrl; }) .catch(error => { - logError(`Error fetching or processing transcript for ${lang}:`, error); + logError(`Error fetching or processing transcript for ${lang}: ${error}`); })); try { await Promise.all(transcriptPromises); + return data; } catch (error) { - logError('Error fetching or processing transcripts:', error); + logError(`Error fetching or processing transcripts: ${error}`); + return data; } }; diff --git a/src/components/video/data/tests/hooks.test.js b/src/components/video/data/tests/hooks.test.js new file mode 100644 index 0000000000..1c0b24a2fb --- /dev/null +++ b/src/components/video/data/tests/hooks.test.js @@ -0,0 +1,70 @@ +import { renderHook } from '@testing-library/react-hooks'; +import { logError } from '@edx/frontend-platform/logging'; +import { useTranscripts } from '../hooks'; +import { fetchAndAddTranscripts } from '../service'; + +// Mocking dependencies +jest.mock('../service', () => ({ + fetchAndAddTranscripts: jest.fn(), +})); + +jest.mock('@edx/frontend-platform/logging', () => ({ + logError: jest.fn(), +})); + +describe('useTranscripts', () => { + const customOptions = { + showTranscripts: true, + transcriptUrls: { + en: 'https://example.com/transcript-en.txt', + }, + }; + const mockPlayer = {}; + + it('should set isLoading to true initially if showTranscripts is true', () => { + const { result } = renderHook(() => useTranscripts({ player: mockPlayer, customOptions })); + expect(result.current.isLoading).toBe(true); + }); + + it('should fetch and set textTracks and transcriptUrl correctly', async () => { + const textTracks = { en: 'https://example.com/transcript-en.txt' }; + fetchAndAddTranscripts.mockResolvedValueOnce(textTracks); + + const { result, waitForNextUpdate } = renderHook(() => useTranscripts({ player: mockPlayer, customOptions })); + + await waitForNextUpdate(); + + expect(result.current.isLoading).toBe(false); + expect(result.current.textTracks).toEqual(textTracks); + expect(result.current.transcriptUrl).toBe(textTracks.en); + }); + + it('should log error and set isLoading to false if fetching transcripts fails', async () => { + const errorMessage = 'Error fetching transcripts'; + fetchAndAddTranscripts.mockRejectedValueOnce(new Error(errorMessage)); + + const { result, waitForNextUpdate } = renderHook(() => useTranscripts({ player: mockPlayer, customOptions })); + + await waitForNextUpdate(); + + expect(logError).toHaveBeenCalledWith(`Error fetching transcripts for player: Error: ${errorMessage}`); + expect(result.current.isLoading).toBe(false); + expect(result.current.textTracks).toEqual([]); + expect(result.current.transcriptUrl).toBeNull(); + }); + + it('should not fetch transcripts if showTranscripts is false', async () => { + const customOptionsWithoutTranscripts = { + showTranscripts: false, + transcriptUrls: undefined, + }; + + const { result } = renderHook(() => useTranscripts({ + player: mockPlayer, + customOptions: customOptionsWithoutTranscripts, + })); + + expect(result.current.textTracks).toEqual([]); + expect(result.current.transcriptUrl).toBeNull(); + }); +}); diff --git a/src/components/video/data/tests/service.test.js b/src/components/video/data/tests/service.test.js index bb66cdeae7..7e166828f8 100644 --- a/src/components/video/data/tests/service.test.js +++ b/src/components/video/data/tests/service.test.js @@ -20,115 +20,61 @@ describe('fetchAndAddTranscripts', () => { const mockTranscriptUrls = { en: 'https://example.com/en-transcript.json', }; - const mockTranscriptData = { items: ['example'], }; - const mockWebVttData = 'WEBVTT\n\n1\n00:00:00.000 --> 00:00:05.000\nExample subtitle'; const mockWebVttFileUrl = 'https://example.com/en-transcript.vtt'; - - global.fetch = jest.fn().mockResolvedValue({ - ok: true, - json: () => Promise.resolve(mockTranscriptData), - }); - + fetch.mockResponseOnce(JSON.stringify(mockTranscriptData)); convertToWebVtt.mockReturnValue(mockWebVttData); createWebVttFile.mockReturnValue(mockWebVttFileUrl); - const player = { - addRemoteTextTrack: jest.fn(), - vjstranscribe: jest.fn(), - }; - - await fetchAndAddTranscripts(mockTranscriptUrls, player); + const result = await fetchAndAddTranscripts(mockTranscriptUrls); - expect(global.fetch).toHaveBeenCalledWith(mockTranscriptUrls.en); + expect(fetch).toHaveBeenCalledWith(mockTranscriptUrls.en); expect(convertToWebVtt).toHaveBeenCalledWith(mockTranscriptData); expect(createWebVttFile).toHaveBeenCalledWith(mockWebVttData); - expect(player.addRemoteTextTrack).toHaveBeenCalledWith( - { - kind: 'subtitles', - src: mockWebVttFileUrl, - srclang: 'en', - label: 'en', - }, - false, - ); - expect(player.vjstranscribe).toHaveBeenCalledWith({ - urls: [mockWebVttFileUrl], - }); - }); - it('should log an error if the transcript fetch fails', async () => { - const mockTranscriptUrls = { - en: 'https://example.com/en-transcript.json', - }; - - global.fetch = jest.fn().mockResolvedValue({ - ok: false, + expect(result).toEqual({ + en: mockWebVttFileUrl, }); - - const player = { - addRemoteTextTrack: jest.fn(), - vjstranscribe: jest.fn(), - }; - - await fetchAndAddTranscripts(mockTranscriptUrls, player); - - expect(global.fetch).toHaveBeenCalledWith(mockTranscriptUrls.en); - expect(logError).toHaveBeenCalledWith('Failed to fetch transcript for en'); }); - it('should log an error if JSON parsing or file creation fails', async () => { + it('should log an error if the transcript fetch, JSON parsing, or file creation fails', async () => { const mockTranscriptUrls = { en: 'https://example.com/en-transcript.json', }; + const error = new Error('failed to fetch!'); + fetch.mockRejectOnce(error); - global.fetch = jest.fn().mockResolvedValue({ - ok: true, - json: () => Promise.reject(new Error('Parsing error')), - }); - - const player = { - addRemoteTextTrack: jest.fn(), - vjstranscribe: jest.fn(), - }; + const result = await fetchAndAddTranscripts(mockTranscriptUrls); - await fetchAndAddTranscripts(mockTranscriptUrls, player); + expect(fetch).toHaveBeenCalledWith(mockTranscriptUrls.en); + expect(logError).toHaveBeenCalledWith(`Error fetching or processing transcript for en: ${error}`); - expect(global.fetch).toHaveBeenCalledWith(mockTranscriptUrls.en); - expect(logError).toHaveBeenCalledWith( - 'Error fetching or processing transcript for en:', - expect.any(Error), - ); + expect(result).toEqual({}); }); it('should log an error if there is an error during Promise.all', async () => { const mockTranscriptUrls = { en: 'https://example.com/en-transcript.json', }; - - global.fetch = jest.fn().mockResolvedValue({ - ok: true, - json: () => Promise.resolve({ items: [] }), - }); - - const player = { - addRemoteTextTrack: jest.fn(), - vjstranscribe: jest.fn(), + const mockTranscriptData = { + items: ['example'], }; - + fetch.mockResponseOnce(JSON.stringify(mockTranscriptData)); + const error = new Error('File creation error'); createWebVttFile.mockImplementation(() => { - throw new Error('File creation error'); + throw error; }); - await fetchAndAddTranscripts(mockTranscriptUrls, player); + const result = await fetchAndAddTranscripts(mockTranscriptUrls); - expect(global.fetch).toHaveBeenCalledWith(mockTranscriptUrls.en); + expect(fetch).toHaveBeenCalledWith(mockTranscriptUrls.en); expect(logError).toHaveBeenCalledWith( - 'Error fetching or processing transcript for en:', - expect.any(Error), + `Error fetching or processing transcript for en: ${error}`, ); + + expect(result).toEqual({}); }); }); diff --git a/src/components/video/tests/VideoJS.test.jsx b/src/components/video/tests/VideoJS.test.jsx index 8368cccfe7..c21aaf0be0 100644 --- a/src/components/video/tests/VideoJS.test.jsx +++ b/src/components/video/tests/VideoJS.test.jsx @@ -1,8 +1,15 @@ import React from 'react'; -import { VideoJS } from '..'; +import { waitFor } from '@testing-library/react'; import { renderWithRouter } from '../../../utils/tests'; +import VideoJS from '../VideoJS'; +import { useTranscripts } from '../data'; +// Mocking the 'videojs-vjstranscribe' and 'useTranscripts' hook jest.mock('videojs-vjstranscribe'); +jest.mock('../data', () => ({ + useTranscripts: jest.fn(), + usePlayerOptions: jest.fn(), +})); const hlsUrl = 'https://test-domain.com/test-prefix/id.m3u8'; const ytUrl = 'https://www.youtube.com/watch?v=oHg5SJYRHA0'; @@ -25,8 +32,16 @@ const YoutubeVideoOptions = { sources: [{ src: ytUrl, type: 'video/youtube' }], }; -describe('Videon JS component', () => { - it('Renders VideoJS components correctly.', () => { +describe('VideoJS', () => { + beforeEach(() => { + useTranscripts.mockReturnValue({ + isLoading: false, + textTracks: {}, + transcriptUrl: null, + }); + }); + + it('Renders VideoJS components correctly for HLS videos.', () => { const { container } = renderWithRouter(); expect(container.querySelector('.video-js-wrapper')).toBeTruthy(); expect(container.querySelector('.vjs-big-play-centered')).toBeTruthy(); @@ -39,4 +54,53 @@ describe('Videon JS component', () => { expect(container.querySelector('.vjs-big-play-centered')).toBeTruthy(); expect(container.querySelector('video-js')).toBeTruthy(); }); + + it('Renders VideoJS components correctly with transcripts.', async () => { + const customOptions = { + showTranscripts: true, + transcriptUrls: { + en: 'https://example.com/transcript-en.txt', + }, + }; + + useTranscripts.mockReturnValue({ + isLoading: false, + textTracks: { + en: 'https://example.com/transcript-en.txt', + }, + transcriptUrl: 'https://example.com/transcript-en.txt', + }); + + const { container } = renderWithRouter(); + + await waitFor(() => { + expect(container.querySelector('.video-js-wrapper')).toBeTruthy(); + expect(container.querySelector('.vjs-big-play-centered')).toBeTruthy(); + expect(container.querySelector('video-js')).toBeTruthy(); + expect(container.querySelector('#vjs-transcribe')).toBeTruthy(); + }); + }); + + it('Does not initialize VideoJS player while transcripts are loading.', async () => { + const customOptions = { + showTranscripts: true, + transcriptUrls: { + en: 'https://example.com/transcript-en.txt', + }, + }; + + useTranscripts.mockReturnValue({ + isLoading: true, + textTracks: {}, + transcriptUrl: null, + }); + + const { container } = renderWithRouter(); + + await waitFor(() => { + expect(container.querySelector('.video-js-wrapper')).toBeTruthy(); + expect(container.querySelector('.vjs-big-play-centered')).toBeFalsy(); + expect(container.querySelector('video-js')).toBeFalsy(); + }); + }); }); diff --git a/src/components/video/tests/VideoPlayer.test.jsx b/src/components/video/tests/VideoPlayer.test.jsx index 83d0cd4a38..61711c6108 100644 --- a/src/components/video/tests/VideoPlayer.test.jsx +++ b/src/components/video/tests/VideoPlayer.test.jsx @@ -1,6 +1,6 @@ import React from 'react'; import { waitFor } from '@testing-library/react'; -import { VideoPlayer } from '..'; // Assuming VideoPlayer is exported as named export +import VideoPlayer from '../VideoPlayer'; import { renderWithRouter } from '../../../utils/tests'; const hlsUrl = 'https://test-domain.com/test-prefix/id.m3u8';