fix(24-01): complete recording stop functionality

- Pass jamClient to useRecordingHelpers in JKSessionScreen
- Add thisClientStartedRecording to Redux for cross-instance state sharing
- Capture shouldCallRestStop before async operations to prevent race conditions
- Add comprehensive logging for debugging recording start/stop flow
- Fix error handling to properly parse 422 response details
- Update tests with new Redux state and API mock responses

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Nuwan 2026-02-23 20:41:31 +05:30
parent e1065430e5
commit c6d61de4eb
6 changed files with 971 additions and 43 deletions

View File

@ -23,7 +23,7 @@ import { getSessionHistory, getSession, joinSession as joinSessionRest, updateSe
import { syncTracksToServer } from '../../services/trackSyncService';
// Redux imports
import { openModal, closeModal, toggleModal, selectModal } from '../../store/features/sessionUISlice';
import { openModal, closeModal, toggleModal, selectModal, selectIsRecording, setRecordingStarted, setRecordingStopped } from '../../store/features/sessionUISlice';
import { selectMediaSummary, selectMetronomeTrackMixers, selectMixersReady } from '../../store/features/mixersSlice';
import {
fetchActiveSession,
@ -139,9 +139,13 @@ const JKSessionScreen = () => {
const inSession = useCallback(() => inSessionFlag, [inSessionFlag]);
const { globalObject, metronomeState, updateMetronomeState, closeMetronome, resetMetronome } = useGlobalContext();
const { getCurrentRecordingState, reset: resetRecordingState, currentlyRecording } = useRecordingHelpers();
const { getCurrentRecordingState, reset: resetRecordingState, stopRecording } = useRecordingHelpers(jamClient);
const { SessionPageEnter } = useSessionUtils();
// Redux recording state
const currentlyRecording = useSelector(selectIsRecording);
const [isStoppingRecording, setIsStoppingRecording] = useState(false);
// Redux media state and actions
const mediaSummary = useSelector(selectMediaSummary);
const metronomeTrackMixers = useSelector(selectMetronomeTrackMixers);
@ -1027,6 +1031,27 @@ const JKSessionScreen = () => {
}
};
// Handle Record button click - opens modal to start recording, or stops recording if already recording
const handleRecordButtonClick = async () => {
if (currentlyRecording) {
// Stop recording directly without opening modal
if (isStoppingRecording) return; // Prevent double-clicks
setIsStoppingRecording(true);
try {
await stopRecording();
toast.info('Recording stopped.');
} catch (error) {
console.error('Failed to stop recording:', error);
toast.error('Failed to stop recording.');
} finally {
setIsStoppingRecording(false);
}
} else {
// Open modal to start recording
dispatch(openModal('recording'));
}
};
// Handle Resync button click - performs audio resync via native client
const handleResync = useCallback(async (e) => {
e.preventDefault();
@ -1295,9 +1320,17 @@ const JKSessionScreen = () => {
{videoLoading && (<Spinner size="sm" />)}
&nbsp;Video
</Button>
<Button className='btn-custom-outline' outline size="md" onClick={() => dispatch(openModal('recording'))}>
<Button
className={currentlyRecording ? 'btn-recording-active' : 'btn-custom-outline'}
color={currentlyRecording ? 'danger' : undefined}
outline={!currentlyRecording}
size="md"
onClick={handleRecordButtonClick}
disabled={isStoppingRecording}
>
<img src={recordIcon} alt="Record" style={{ width: '20px', height: '20px', marginRight: '0.3rem' }} />
Record</Button>
{isStoppingRecording ? 'Stopping...' : (currentlyRecording ? 'Recording' : 'Record')}
</Button>
<Button className='btn-custom-outline' outline size="md" onClick={handleBroadcast}>
<img src={broadcastIcon} alt="Broadcast" style={{ width: '21px', height: '21px', marginRight: '0.3rem' }} />
Broadcast</Button>

View File

@ -0,0 +1,246 @@
import React from 'react';
import { render, screen, fireEvent, waitFor, act } from '@testing-library/react';
import { Provider } from 'react-redux';
import { configureStore } from '@reduxjs/toolkit';
import JKSessionRecordingModal from '../JKSessionRecordingModal';
import sessionUIReducer from '../../../store/features/sessionUISlice';
// Mock dependencies
jest.mock('react-toastify', () => ({
toast: {
info: jest.fn(),
error: jest.fn(),
warning: jest.fn(),
},
}));
const mockJamClient = {
GetAudioRecordingPreference: jest.fn().mockResolvedValue(2),
SetAudioRecordingPreference: jest.fn().mockResolvedValue(true),
getOpenVideoSources: jest.fn().mockResolvedValue({
webcam1: true,
webcam2: false,
screen_capture: true,
session_video: false,
}),
IsOBSAvailable: jest.fn().mockResolvedValue(false),
};
jest.mock('../../../context/JamServerContext', () => ({
useJamServerContext: () => ({
jamClient: mockJamClient,
}),
}));
jest.mock('../../../hooks/useRecordingHelpers', () => () => ({
currentlyRecording: false,
startingRecording: false,
stoppingRecording: false,
startRecording: jest.fn().mockResolvedValue({ id: 'test-recording-id' }),
stopRecording: jest.fn().mockResolvedValue(true),
}));
// Create a mock store
const createMockStore = (initialState = {}) => {
return configureStore({
reducer: {
sessionUI: sessionUIReducer,
},
preloadedState: {
sessionUI: {
modals: {
settings: false,
invite: false,
volume: false,
recording: false,
leave: false,
jamTrack: false,
backingTrack: false,
mediaControls: false,
metronome: false,
videoSettings: false,
},
panels: {
chat: true,
mixer: false,
participants: true,
tracks: true,
},
view: {
participantLayout: 'grid',
showParticipantVideos: true,
showMixer: false,
sidebarCollapsed: false,
},
mediaUI: {
showMyMixes: false,
showCustomMixes: false,
editingMixdownId: null,
creatingMixdown: false,
createMixdownErrors: null,
},
mixerUI: {
mixMode: 'PERSONAL',
currentMixerRange: { min: null, max: null },
},
openJamTrack: null,
jamTrackUI: {
lastUsedMixdownId: null,
volume: 100,
},
recording: {
isRecording: false,
recordingId: null,
thisClientStartedRecording: false,
},
...initialState,
},
},
});
};
// Helper function for rendering component with providers
const renderModal = async (props = {}, storeState = {}) => {
const store = createMockStore(storeState);
const defaultProps = {
isOpen: true,
toggle: jest.fn(),
};
let result;
await act(async () => {
result = render(
<Provider store={store}>
<JKSessionRecordingModal {...defaultProps} {...props} />
</Provider>
);
});
return {
...result,
store,
};
};
describe('JKSessionRecordingModal', () => {
beforeEach(() => {
jest.clearAllMocks();
});
describe('rendering', () => {
test('renders modal with title "Make Recording"', async () => {
await renderModal();
expect(screen.getByText('Make Recording')).toBeInTheDocument();
});
test('renders recording type radio buttons', async () => {
await renderModal();
expect(screen.getByLabelText('Audio only')).toBeInTheDocument();
expect(screen.getByLabelText('Audio and video')).toBeInTheDocument();
});
test('renders name input field', async () => {
await renderModal();
expect(screen.getByPlaceholderText('Recording Name')).toBeInTheDocument();
});
test('renders Start Recording button when not recording', async () => {
await renderModal();
expect(screen.getByRole('button', { name: /start recording/i })).toBeInTheDocument();
});
test('renders Close button', async () => {
await renderModal();
// There are two close buttons - X in header and Close in footer
const closeButtons = screen.getAllByRole('button', { name: /close/i });
expect(closeButtons.length).toBe(2);
});
});
describe('name validation', () => {
test('shows error when trying to start recording with empty name', async () => {
await renderModal();
const startButton = screen.getByRole('button', { name: /start recording/i });
await act(async () => {
fireEvent.click(startButton);
// Allow state updates to complete
await new Promise(resolve => setTimeout(resolve, 100));
});
expect(screen.getByText('Please enter a recording name.')).toBeInTheDocument();
});
test('shows error when name contains only whitespace', async () => {
await renderModal();
const nameInput = screen.getByPlaceholderText('Recording Name');
fireEvent.change(nameInput, { target: { value: ' ' } });
const startButton = screen.getByRole('button', { name: /start recording/i });
await act(async () => {
fireEvent.click(startButton);
// Allow state updates to complete
await new Promise(resolve => setTimeout(resolve, 100));
});
expect(screen.getByText('Please enter a recording name.')).toBeInTheDocument();
});
});
describe('recording type selection', () => {
test('audio only is selected by default', async () => {
await renderModal();
const audioOnlyRadio = screen.getByLabelText('Audio only');
expect(audioOnlyRadio).toBeChecked();
});
});
describe('close functionality', () => {
test('calls toggle when Close button is clicked', async () => {
const toggleMock = jest.fn();
await renderModal({ toggle: toggleMock });
// Get the Close button in the footer (second one, or by text content)
const closeButtons = screen.getAllByRole('button', { name: /close/i });
const footerCloseButton = closeButtons.find(btn => btn.textContent === 'Close');
fireEvent.click(footerCloseButton);
expect(toggleMock).toHaveBeenCalledTimes(1);
});
});
describe('form state management', () => {
test('can change recording name', async () => {
await renderModal();
const nameInput = screen.getByPlaceholderText('Recording Name');
fireEvent.change(nameInput, { target: { value: 'Test Recording' } });
expect(nameInput.value).toBe('Test Recording');
});
test('can toggle voice chat checkbox', async () => {
await renderModal();
const voiceChatCheckbox = screen.getByLabelText('Include voice chat in recording');
expect(voiceChatCheckbox).not.toBeChecked();
fireEvent.click(voiceChatCheckbox);
expect(voiceChatCheckbox).toBeChecked();
});
});
describe('modal visibility', () => {
test('does not render content when isOpen is false', async () => {
await renderModal({ isOpen: false });
expect(screen.queryByText('Make Recording')).not.toBeInTheDocument();
});
test('renders content when isOpen is true', async () => {
await renderModal({ isOpen: true });
expect(screen.getByText('Make Recording')).toBeInTheDocument();
});
});
});

View File

@ -1,6 +1,7 @@
import { useState, useCallback, useRef, useEffect, useContext } from 'react';
import { useSelector } from 'react-redux';
import { useSelector, useDispatch } from 'react-redux';
import { selectSessionId } from '../store/features/activeSessionSlice';
import { setRecordingStarted, setRecordingStopped, selectThisClientStartedRecording, selectRecordingId } from '../store/features/sessionUISlice';
import { useJamKazamApp } from '../context/JamKazamAppContext';
import { startRecording as startRecordingRest, stopRecording as stopRecordingRest, getRecordingPromise } from '../helpers/rest';
import { useGlobalContext } from '../context/GlobalContext';
@ -8,6 +9,7 @@ import { RECORD_TYPE_BOTH } from '../helpers/globals'
const useRecordingHelpers = (jamClient) => {
const app = useJamKazamApp();
const dispatch = useDispatch();
// Phase 4: Replace CurrentSessionContext with Redux
const sessionId = useSelector(selectSessionId);
@ -15,7 +17,9 @@ const useRecordingHelpers = (jamClient) => {
const [currentRecording, setCurrentRecording] = useState(null);
const [currentOrLastRecordingId, setCurrentOrLastRecordingId] = useState(null);
const [currentRecordingId, setCurrentRecordingId] = useState(null);
const [thisClientStartedRecording, setThisClientStartedRecording] = useState(false);
// thisClientStartedRecording now comes from Redux to share state across hook instances
const thisClientStartedRecording = useSelector(selectThisClientStartedRecording);
const reduxRecordingId = useSelector(selectRecordingId);
const [currentlyRecording, setCurrentlyRecording] = useState(false);
const [startingRecording, setStartingRecording] = useState(false);
const [stoppingRecording, setStoppingRecording] = useState(false);
@ -59,11 +63,15 @@ const useRecordingHelpers = (jamClient) => {
setCurrentRecording(null);
setCurrentRecordingId(null);
setStoppingRecording(false);
setThisClientStartedRecording(false);
}, []);
// thisClientStartedRecording is reset via setRecordingStopped Redux action
dispatch(setRecordingStopped());
}, [dispatch]);
// Group tracks to client
const groupTracksToClient = useCallback((recording) => {
if (!recording) {
return [];
}
const groupedTracks = {};
const recordingTracks = recording.recorded_tracks || [];
for (let i = 0; i < recordingTracks.length; i++) {
@ -87,21 +95,36 @@ const useRecordingHelpers = (jamClient) => {
setCurrentlyRecording(true);
setStoppingRecording(false);
setThisClientStartedRecording(true);
// Note: thisClientStartedRecording is now set via Redux in handleRecordingStartResult
// Context RecordingActions.startingRecording would be handled by parent or context
try {
console.log(`[RecordingState] Starting recording for session=${sessionId}`);
const recording = await startRecordingRest({ music_session_id: sessionId, record_video: recordVideo });
console.log(`[RecordingState] startRecordingRest succeeded:`, JSON.stringify(recording, null, 2));
setCurrentRecording(recording);
setCurrentRecordingId(recording.id);
setCurrentOrLastRecordingId(recording.id);
const groupedTracks = groupTracksToClient(recording);
console.log("jamClient#StartMediaRecording", recording.id, groupedTracks, recordSettings);
await jamClient.StartMediaRecording(recording.id, groupedTracks, recordSettings);
} catch (jqXHR) {
console.warn("failed to startRecording due to server issue:", jqXHR.responseJSON);
const details = { clientId: app.clientId, reason: 'rest', detail: jqXHR.responseJSON, isRecording: false };
return recording;
} catch (error) {
// apiFetch rejects with the Response object on non-2xx status
let errorDetail = error;
if (error && typeof error.json === 'function') {
try {
errorDetail = await error.json();
console.error("[RecordingState] startRecording failed with status", error.status, "details:", JSON.stringify(errorDetail, null, 2));
} catch (e) {
console.error("[RecordingState] startRecording failed with status", error.status, "could not parse response");
}
} else {
console.error("[RecordingState] startRecording failed:", error);
}
const details = { clientId: app.clientId, reason: 'rest', detail: errorDetail, isRecording: false };
// Trigger startedRecording event
setCurrentlyRecording(false);
// Context RecordingActions.startedRecording(details);
@ -116,11 +139,13 @@ const useRecordingHelpers = (jamClient) => {
setCurrentlyRecording(false);
setCurrentRecording(null);
setCurrentRecordingId(null);
// Update Redux state
dispatch(setRecordingStopped());
if (waitingOnStopTimer.current) {
clearTimeout(waitingOnStopTimer.current);
waitingOnStopTimer.current = null;
}
}, []);
}, [dispatch]);
// Attempt transition to stop
const attemptTransitionToStop = useCallback((recordingId, errorReason, errorDetail) => {
@ -142,9 +167,13 @@ const useRecordingHelpers = (jamClient) => {
// Stop recording
const stopRecording = useCallback(async (recordingId, reason, detail) => {
const userInitiated = recordingId == null && reason == null && detail == null;
const recording = await currentRecording;
console.log(`[RecordingState]: stopRecording userInitiated=${userInitiated} thisClientStartedRecording=${thisClientStartedRecording} reason=${reason} detail=${detail}`);
// CRITICAL: Capture thisClientStartedRecording at the START before any async operations.
// Another hook instance may dispatch setRecordingStopped() via handleRecordingStopResult
// callback which would set thisClientStartedRecording=false in Redux before we check it.
const shouldCallRestStop = thisClientStartedRecording;
console.log(`[RecordingState]: stopRecording userInitiated=${userInitiated} thisClientStartedRecording=${thisClientStartedRecording} shouldCallRestStop=${shouldCallRestStop} reason=${reason} detail=${detail}`);
if (stoppingRecording) {
console.log("ignoring stopRecording because we are already stopping");
@ -160,27 +189,35 @@ const useRecordingHelpers = (jamClient) => {
// Context RecordingActions.stoppingRecording
try {
const recording = await currentRecording;
const groupedTracks = groupTracksToClient(recording);
const recording = currentRecording;
// Use Redux recordingId as fallback when local state isn't available (different hook instance)
const recId = recordingId || currentRecordingId || reduxRecordingId;
await jamClient.FrontStopRecording(recording.id, groupedTracks);
// Get grouped tracks if we have the recording object, otherwise use empty array
const groupedTracks = recording ? groupTracksToClient(recording) : [];
if (thisClientStartedRecording) {
await jamClient.FrontStopRecording(recId, groupedTracks);
// Use the captured value, not the current Redux value which may have changed
if (shouldCallRestStop) {
try {
await stopRecordingRest({ id: recording.id });
console.log(`[RecordingState] Calling stopRecordingRest with id=${recId}`);
const stopResult = await stopRecordingRest({ id: recId });
console.log(`[RecordingState] stopRecordingRest completed:`, JSON.stringify(stopResult, null, 2));
console.log(`[RecordingState] stopRecordingRest - is_done=${stopResult?.is_done}, duration=${stopResult?.duration}`);
setWaitingOnServerStop(false);
attemptTransitionToStop(recording.id, reason, detail);
attemptTransitionToStop(recId, reason, detail);
setStoppingRecording(false);
} catch (error) {
setStoppingRecording(false);
if (error.status === 422) {
setWaitingOnServerStop(false);
attemptTransitionToStop(recording.id, reason, detail);
attemptTransitionToStop(recId, reason, detail);
} else {
console.error("unable to stop recording", error);
transitionToStopped();
const details = {
recordingId: recording.id,
recordingId: recId,
reason: 'rest',
details: error,
isRecording: false
@ -193,34 +230,41 @@ const useRecordingHelpers = (jamClient) => {
}
} catch (error) {
console.error("Error in stopRecording:", error);
transitionToStopped();
setStoppingRecording(false);
}
return true;
}, [currentRecording, thisClientStartedRecording, stoppingRecording, groupTracksToClient, jamClient, timeoutTransitionToStop, attemptTransitionToStop, transitionToStopped]);
}, [currentRecording, currentRecordingId, reduxRecordingId, thisClientStartedRecording, stoppingRecording, groupTracksToClient, jamClient, timeoutTransitionToStop, attemptTransitionToStop, transitionToStopped]);
// Abort recording
const abortRecording = useCallback(async (recordingId, errorReason, errorDetail) => {
await jamClient.AbortRecording(recordingId, { reason: errorReason, detail: errorDetail, success: false });
}, [jamClient]);
// Handle recording start result
// Handle recording start result - called when native client confirms recording start
// This is the authoritative confirmation that recording has actually started
const handleRecordingStartResult = useCallback((recordingId, result) => {
console.log("[RecordingState] handleRecordingStartResult", { recordingId, result, currentRecordingId, currentlyRecording });
const { success, reason, detail } = result;
if (success) {
// Recording started successfully - NOW update Redux to show "Recording" on toolbar
dispatch(setRecordingStarted(recordingId));
const details = { clientId: app.clientId, isRecording: true };
// Trigger startedRecording event
// Context RecordingActions.startedRecording(details)
} else {
setCurrentlyRecording(false);
// Recording failed to start - ensure Redux state is cleared
dispatch(setRecordingStopped());
console.error("unable to start the recording", reason, detail);
const details = { clientId: app.clientId, reason, detail, isRecording: false };
// Trigger startedRecording event
// Context RecordingActions.startedRecording(details)
}
}, [app.clientId, currentRecordingId, currentlyRecording]);
}, [app.clientId, currentRecordingId, currentlyRecording, dispatch]);
// Handle recording stop result
const handleRecordingStopResult = useCallback((recordingId, result) => {
@ -271,6 +315,8 @@ const useRecordingHelpers = (jamClient) => {
// Trigger startingRecording event
// Context RecordingActions.startingRecording(details)
setCurrentlyRecording(true);
// Update Redux state
dispatch(setRecordingStarted(recording.id));
const startedDetails = { clientId: app.clientId, recordingId: currentRecordingId, isRecording: true };
// Trigger startedRecording event
@ -281,7 +327,7 @@ const useRecordingHelpers = (jamClient) => {
console.error("[RecordingState] we've missed the stop of previous recording", currentRecordingId, recording.id);
// Show alert
}
}, [currentRecordingId, app.clientId]);
}, [currentRecordingId, app.clientId, dispatch]);
// Handle recording stopped
const handleRecordingStopped = useCallback(async (recordingId, result) => {
@ -294,6 +340,7 @@ const useRecordingHelpers = (jamClient) => {
// Context RecordingActions.stoppingRecording(stoppingDetails)
if (recordingId == null || recordingId === "") {
console.warn("[RecordingState] handleRecordingStopped called with empty recordingId - skipping REST stop call!");
transitionToStopped();
const stoppedDetails = { recordingId, reason, detail, isRecording: false };
// Context RecordingActions.stoppedRecording(stoppedDetails)
@ -301,14 +348,16 @@ const useRecordingHelpers = (jamClient) => {
}
try {
await stopRecordingRest({ id: recordingId });
console.log(`[RecordingState] handleRecordingStopped calling stopRecordingRest with id=${recordingId}`);
const stopResult = await stopRecordingRest({ id: recordingId });
console.log(`[RecordingState] handleRecordingStopped stopRecordingRest completed:`, JSON.stringify(stopResult, null, 2));
transitionToStopped();
const details = { recordingId, reason, detail, isRecording: false };
// Trigger stoppedRecording event
// Context RecordingActions.stoppedRecording(details)
} catch (error) {
if (error.status === 422) {
console.log("recording already stopped", error);
console.log("[RecordingState] handleRecordingStopped - recording already stopped (422)", error);
transitionToStopped();
const details = { recordingId, reason, detail, isRecording: false };
// Trigger stoppedRecording event
@ -403,9 +452,11 @@ const useRecordingHelpers = (jamClient) => {
};
}, [sessionId, jamClient, thisClientStartedRecording]);
// Initialize
// Initialize - register global handlers for native client callbacks
// Note: We don't delete callbacks on cleanup because multiple components may use this hook.
// JKSessionScreen stays mounted and needs callbacks to work even after modal closes.
// Since we use Redux for state, the callbacks will update the shared store correctly.
useEffect(() => {
// Register global handlers if needed
if (window) {
window.JK = window.JK || {};
window.JK.HandleRecordingStartResult = handleRecordingStartResult;
@ -414,17 +465,6 @@ const useRecordingHelpers = (jamClient) => {
window.JK.HandleRecordingStarted = handleRecordingStarted;
window.JK.HandleRecordingAborted = handleRecordingAborted;
}
// Cleanup function to remove global handlers
return () => {
if (window && window.JK) {
delete window.JK.HandleRecordingStartResult;
delete window.JK.HandleRecordingStopResult;
delete window.JK.HandleRecordingStopped;
delete window.JK.HandleRecordingStarted;
delete window.JK.HandleRecordingAborted;
}
};
}, [handleRecordingStartResult, handleRecordingStopResult, handleRecordingStopped, handleRecordingStarted, handleRecordingAborted]);
return {

View File

@ -22,6 +22,9 @@ import sessionUIReducer, {
setMixMode,
toggleMixMode,
setCurrentMixerRange,
// Recording actions
setRecordingStarted,
setRecordingStopped,
resetUI,
selectModal,
selectAllModals,
@ -39,7 +42,11 @@ import sessionUIReducer, {
// Phase 5: Mixer UI selectors
selectMixMode,
selectCurrentMixerRange,
selectMixerUI
selectMixerUI,
// Recording selectors
selectIsRecording,
selectRecordingId,
selectThisClientStartedRecording
} from '../sessionUISlice';
describe('sessionUISlice', () => {
@ -83,6 +90,18 @@ describe('sessionUISlice', () => {
min: null,
max: null
}
},
// JamTrack UI state
openJamTrack: null,
jamTrackUI: {
lastUsedMixdownId: null,
volume: 100
},
// Recording state
recording: {
isRecording: false,
recordingId: null,
thisClientStartedRecording: false
}
};
@ -495,4 +514,114 @@ describe('sessionUISlice', () => {
expect(selectMixerUI(mockStateWithPhase5)).toEqual(mockStateWithPhase5.sessionUI.mixerUI);
});
});
// Recording state tests
describe('recording actions', () => {
it('should handle setRecordingStarted', () => {
const actual = sessionUIReducer(initialState, setRecordingStarted('recording-123'));
expect(actual.recording.isRecording).toBe(true);
expect(actual.recording.recordingId).toBe('recording-123');
expect(actual.recording.thisClientStartedRecording).toBe(true);
});
it('should handle setRecordingStopped', () => {
const stateWithRecording = {
...initialState,
recording: {
isRecording: true,
recordingId: 'recording-456',
thisClientStartedRecording: true
}
};
const actual = sessionUIReducer(stateWithRecording, setRecordingStopped());
expect(actual.recording.isRecording).toBe(false);
expect(actual.recording.recordingId).toBeNull();
expect(actual.recording.thisClientStartedRecording).toBe(false);
});
it('should handle setRecordingStarted with different recording IDs', () => {
let state = sessionUIReducer(initialState, setRecordingStarted('recording-1'));
expect(state.recording.recordingId).toBe('recording-1');
// Starting a new recording should update the ID
state = sessionUIReducer(state, setRecordingStarted('recording-2'));
expect(state.recording.recordingId).toBe('recording-2');
expect(state.recording.isRecording).toBe(true);
});
it('should handle setRecordingStopped when not recording', () => {
// Should handle gracefully even if not recording
const actual = sessionUIReducer(initialState, setRecordingStopped());
expect(actual.recording.isRecording).toBe(false);
expect(actual.recording.recordingId).toBeNull();
});
});
// Recording selectors tests
describe('recording selectors', () => {
const mockStateRecording = {
sessionUI: {
...initialState,
recording: {
isRecording: true,
recordingId: 'active-recording-789',
thisClientStartedRecording: true
}
}
};
const mockStateNotRecording = {
sessionUI: {
...initialState,
recording: {
isRecording: false,
recordingId: null,
thisClientStartedRecording: false
}
}
};
it('selectIsRecording should return true when recording', () => {
expect(selectIsRecording(mockStateRecording)).toBe(true);
});
it('selectIsRecording should return false when not recording', () => {
expect(selectIsRecording(mockStateNotRecording)).toBe(false);
});
it('selectRecordingId should return correct recording ID', () => {
expect(selectRecordingId(mockStateRecording)).toBe('active-recording-789');
});
it('selectRecordingId should return null when not recording', () => {
expect(selectRecordingId(mockStateNotRecording)).toBeNull();
});
it('selectThisClientStartedRecording should return true when this client started recording', () => {
expect(selectThisClientStartedRecording(mockStateRecording)).toBe(true);
});
it('selectThisClientStartedRecording should return false when this client did not start recording', () => {
expect(selectThisClientStartedRecording(mockStateNotRecording)).toBe(false);
});
});
// Reset UI should also reset recording state
describe('resetUI with recording state', () => {
it('should reset recording state to initial values', () => {
const stateWithRecording = {
...initialState,
recording: {
isRecording: true,
recordingId: 'recording-to-reset',
thisClientStartedRecording: true
}
};
const actual = sessionUIReducer(stateWithRecording, resetUI());
expect(actual.recording.isRecording).toBe(false);
expect(actual.recording.recordingId).toBeNull();
expect(actual.recording.thisClientStartedRecording).toBe(false);
});
});
});

View File

@ -52,6 +52,13 @@ const initialState = {
jamTrackUI: {
lastUsedMixdownId: null, // User's last selected mixdown for this session
volume: 100 // Last used volume (0-100)
},
// Recording state
recording: {
isRecording: false,
recordingId: null,
thisClientStartedRecording: false
}
};
@ -175,6 +182,19 @@ export const sessionUISlice = createSlice({
state.openJamTrack = null;
},
// Recording state reducers
setRecordingStarted: (state, action) => {
state.recording.isRecording = true;
state.recording.recordingId = action.payload;
state.recording.thisClientStartedRecording = true;
},
setRecordingStopped: (state) => {
state.recording.isRecording = false;
state.recording.recordingId = null;
state.recording.thisClientStartedRecording = false;
},
// Reset UI state (useful when leaving session)
resetUI: (state) => {
return { ...initialState };
@ -213,6 +233,9 @@ export const {
setOpenJamTrack,
updateJamTrackUI,
clearOpenJamTrack,
// Recording actions
setRecordingStarted,
setRecordingStopped,
resetUI
} = sessionUISlice.actions;
@ -248,3 +271,8 @@ export const selectMixerUI = (state) => state.sessionUI.mixerUI;
// Phase 5: JamTrack UI selectors
export const selectOpenJamTrack = (state) => state.sessionUI.openJamTrack;
export const selectJamTrackUI = (state) => state.sessionUI.jamTrackUI;
// Recording selectors
export const selectIsRecording = (state) => state.sessionUI.recording.isRecording;
export const selectRecordingId = (state) => state.sessionUI.recording.recordingId;
export const selectThisClientStartedRecording = (state) => state.sessionUI.recording.thisClientStartedRecording;

View File

@ -0,0 +1,452 @@
import { test, expect, Page } from '@playwright/test';
import { loginToJamUI, createAndJoinSession } from '../utils/test-helpers';
import { APIInterceptor } from '../utils/api-interceptor';
/**
* Recording Controls Test Suite
*
* Tests recording functionality including:
* - Opening recording modal
* - Name validation
* - Starting recording flow
* - Button state transitions (Record Recording Stopping Record)
* - Stopping recording
*/
/**
* Helper function to mock jamClient recording methods
*/
async function mockJamClientRecording(page: Page) {
await page.evaluate(() => {
const mockMethods = {
StartMediaRecording: async (...args: any[]) => {
console.log('[MOCK] StartMediaRecording called', args);
// Simulate successful recording start by calling the callback
setTimeout(() => {
if ((window as any).JK?.HandleRecordingStartResult) {
(window as any).JK.HandleRecordingStartResult(args[0], { success: true, reason: '', detail: '' }, 'test-client-id');
}
}, 500);
return true;
},
FrontStopRecording: async (...args: any[]) => {
console.log('[MOCK] FrontStopRecording called', args);
return true;
},
GetAudioRecordingPreference: async () => {
console.log('[MOCK] GetAudioRecordingPreference called');
return 2; // Mix and stems
},
SetAudioRecordingPreference: async (...args: any[]) => {
console.log('[MOCK] SetAudioRecordingPreference called', args);
return true;
},
getOpenVideoSources: async () => {
console.log('[MOCK] getOpenVideoSources called');
return {
webcam1: true,
webcam2: false,
screen_capture: true,
session_video: false,
process_status: 'ok',
};
},
IsOBSAvailable: async () => {
console.log('[MOCK] IsOBSAvailable called');
return false;
},
RegisterRecordingCallbacks: async (...args: any[]) => {
console.log('[MOCK] RegisterRecordingCallbacks called', args);
return true;
},
SetVURefreshRate: async (...args: any[]) => {
console.log('[MOCK] SetVURefreshRate called', args);
return true;
},
SessionRegisterCallback: async (...args: any[]) => {
console.log('[MOCK] SessionRegisterCallback called', args);
return true;
},
SessionSetAlertCallback: async (...args: any[]) => {
console.log('[MOCK] SessionSetAlertCallback called', args);
return true;
},
SessionSetConnectionStatusRefreshRate: async (...args: any[]) => {
console.log('[MOCK] SessionSetConnectionStatusRefreshRate called', args);
return true;
},
getClientParentChildRole: async () => {
console.log('[MOCK] getClientParentChildRole called');
return 0;
},
getParentClientId: async () => {
console.log('[MOCK] getParentClientId called');
return '';
},
};
// Replace jamClient methods if it exists
if ((window as any).jamClient) {
Object.assign((window as any).jamClient, mockMethods);
console.log('[MOCK] jamClient recording methods replaced');
} else {
(window as any).jamClient = mockMethods;
console.log('[MOCK] jamClient created with recording methods');
}
});
}
/**
* Helper to intercept recording API calls
*/
async function mockRecordingAPI(page: Page) {
// Mock start recording API
await page.route('**/api/recordings/start', async (route) => {
console.log('[MOCK API] POST /api/recordings/start intercepted');
await route.fulfill({
status: 200,
contentType: 'application/json',
body: JSON.stringify({
id: 'mock-recording-id-' + Date.now(),
music_session_id: 'test-session-id',
duration: null,
is_done: false,
owner: { id: 1, name: 'Test User' },
recorded_tracks: [
{ client_id: 'test-client-id', track_id: 1 },
],
created_at: new Date().toISOString(),
}),
});
});
// Mock stop recording API
await page.route('**/api/recordings/*/stop', async (route) => {
console.log('[MOCK API] POST /api/recordings/*/stop intercepted');
await route.fulfill({
status: 200,
contentType: 'application/json',
body: JSON.stringify({
id: 'mock-recording-id',
music_session_id: 'test-session-id',
duration: 30,
is_done: true,
owner: { id: 1, name: 'Test User' },
recorded_tracks: [
{ client_id: 'test-client-id', track_id: 1 },
],
}),
});
});
// Mock get recording API
await page.route('**/api/recordings/*', async (route) => {
if (route.request().method() === 'GET') {
console.log('[MOCK API] GET /api/recordings/* intercepted');
await route.fulfill({
status: 200,
contentType: 'application/json',
body: JSON.stringify({
id: 'mock-recording-id',
music_session_id: 'test-session-id',
owner: { id: 1, name: 'Test User' },
recorded_tracks: [
{ client_id: 'test-client-id', track_id: 1 },
],
}),
});
} else {
await route.continue();
}
});
}
test.describe('Recording Controls', () => {
let apiInterceptor: APIInterceptor;
test.beforeEach(async ({ page }) => {
// Increase timeout for these tests
test.setTimeout(90000);
// Set up API interceptor
apiInterceptor = new APIInterceptor();
await apiInterceptor.intercept(page);
// Mock recording API endpoints
await mockRecordingAPI(page);
// Login and join a session
await loginToJamUI(page);
await createAndJoinSession(page);
// Mock jamClient recording methods after session is created
await mockJamClientRecording(page);
});
test('should display Record button in session toolbar', async ({ page }) => {
// Wait for session to load
await page.waitForTimeout(3000);
// Find Record button
const recordButton = page.locator('button:has-text("Record")');
await expect(recordButton).toBeVisible({ timeout: 10000 });
});
test('should open recording modal when Record button is clicked', async ({ page }) => {
await page.waitForTimeout(3000);
// Click Record button
const recordButton = page.locator('button:has-text("Record")');
await recordButton.click();
// Wait for modal to open
await page.waitForTimeout(1000);
// Verify modal title
const modalTitle = page.locator('text=Make Recording');
await expect(modalTitle).toBeVisible({ timeout: 5000 });
// Verify Start Recording button is present
const startButton = page.locator('button:has-text("Start Recording")');
await expect(startButton).toBeVisible();
});
test('should show validation error when starting recording without name', async ({ page }) => {
await page.waitForTimeout(3000);
// Open recording modal
const recordButton = page.locator('button:has-text("Record")');
await recordButton.click();
await page.waitForTimeout(500);
// Click Start Recording without entering a name
const startButton = page.locator('button:has-text("Start Recording")');
await startButton.click();
// Wait for error message
await page.waitForTimeout(500);
// Verify error message is displayed
const errorMessage = page.locator('text=Please enter a recording name');
await expect(errorMessage).toBeVisible({ timeout: 5000 });
});
test('should close modal when Close button is clicked', async ({ page }) => {
await page.waitForTimeout(3000);
// Open recording modal
const recordButton = page.locator('button:has-text("Record")');
await recordButton.click();
await page.waitForTimeout(500);
// Verify modal is open
await expect(page.locator('text=Make Recording')).toBeVisible();
// Click Close button
const closeButton = page.locator('.modal-footer button:has-text("Close")');
await closeButton.click();
// Verify modal is closed
await page.waitForTimeout(500);
await expect(page.locator('text=Make Recording')).not.toBeVisible();
});
test('should have audio-only selected by default', async ({ page }) => {
await page.waitForTimeout(3000);
// Open recording modal
const recordButton = page.locator('button:has-text("Record")');
await recordButton.click();
await page.waitForTimeout(500);
// Verify audio-only is checked
const audioOnlyRadio = page.locator('input[value="audio-only"]');
await expect(audioOnlyRadio).toBeChecked();
});
test('should allow selecting audio and video recording type', async ({ page }) => {
await page.waitForTimeout(3000);
// Open recording modal
const recordButton = page.locator('button:has-text("Record")');
await recordButton.click();
await page.waitForTimeout(500);
// Click audio-video radio
const audioVideoRadio = page.locator('input[value="audio-video"]');
await audioVideoRadio.click();
// Verify it's selected
await expect(audioVideoRadio).toBeChecked();
});
test('should allow entering recording name', async ({ page }) => {
await page.waitForTimeout(3000);
// Open recording modal
const recordButton = page.locator('button:has-text("Record")');
await recordButton.click();
await page.waitForTimeout(500);
// Enter recording name
const nameInput = page.locator('input[placeholder="Recording Name"]');
await nameInput.fill('My Test Recording');
// Verify input value
await expect(nameInput).toHaveValue('My Test Recording');
});
test('should allow toggling voice chat checkbox', async ({ page }) => {
await page.waitForTimeout(3000);
// Open recording modal
const recordButton = page.locator('button:has-text("Record")');
await recordButton.click();
await page.waitForTimeout(500);
// Find voice chat checkbox
const voiceChatCheckbox = page.locator('input[name="includeChat"]');
// Should be unchecked by default
await expect(voiceChatCheckbox).not.toBeChecked();
// Click to check
await voiceChatCheckbox.click();
await expect(voiceChatCheckbox).toBeChecked();
// Click to uncheck
await voiceChatCheckbox.click();
await expect(voiceChatCheckbox).not.toBeChecked();
});
test('should start recording when valid name is provided', async ({ page }) => {
await page.waitForTimeout(3000);
// Open recording modal
const recordButton = page.locator('button:has-text("Record")');
await recordButton.click();
await page.waitForTimeout(500);
// Enter recording name
const nameInput = page.locator('input[placeholder="Recording Name"]');
await nameInput.fill('Test Recording');
// Click Start Recording
const startButton = page.locator('button:has-text("Start Recording")');
await startButton.click();
// Wait for recording to start (modal should close)
await page.waitForTimeout(2000);
// Modal should be closed
await expect(page.locator('.modal-header:has-text("Make Recording")')).not.toBeVisible({ timeout: 5000 });
// Record button should now show "Recording"
// Note: This depends on the native client callback being triggered
// In real tests, this would need proper callback simulation
});
test.skip('should show Recording button state when recording is active', async ({ page }) => {
// This test requires full native client integration
// Skipped for now - would need to properly simulate HandleRecordingStartResult callback
await page.waitForTimeout(3000);
// Start recording
const recordButton = page.locator('button:has-text("Record")');
await recordButton.click();
await page.waitForTimeout(500);
const nameInput = page.locator('input[placeholder="Recording Name"]');
await nameInput.fill('Test Recording');
const startButton = page.locator('button:has-text("Start Recording")');
await startButton.click();
// Wait for state change
await page.waitForTimeout(3000);
// Button should show "Recording"
const recordingButton = page.locator('button:has-text("Recording")');
await expect(recordingButton).toBeVisible({ timeout: 10000 });
});
test.skip('should stop recording when Recording button is clicked', async ({ page }) => {
// This test requires the recording to be in active state first
// Skipped for now - would need full integration test setup
// Assuming recording is active...
const recordingButton = page.locator('button:has-text("Recording")');
// Click to stop
await recordingButton.click();
// Should show "Stopping..." briefly
const stoppingButton = page.locator('button:has-text("Stopping")');
await expect(stoppingButton).toBeVisible({ timeout: 5000 });
// Then should return to "Record"
await page.waitForTimeout(3000);
const recordButton = page.locator('button:has-text("Record")');
await expect(recordButton).toBeVisible({ timeout: 10000 });
});
});
test.describe('Recording Modal Form Controls', () => {
test.beforeEach(async ({ page }) => {
test.setTimeout(60000);
// Simple setup - just go to modal
const apiInterceptor = new APIInterceptor();
await apiInterceptor.intercept(page);
await mockRecordingAPI(page);
await loginToJamUI(page);
await createAndJoinSession(page);
await mockJamClientRecording(page);
// Open recording modal
await page.waitForTimeout(3000);
const recordButton = page.locator('button:has-text("Record")');
await recordButton.click();
await page.waitForTimeout(500);
});
test('should display all audio format options', async ({ page }) => {
// Find audio format select
const audioFormatSelect = page.locator('select').nth(0);
// Check options
await expect(audioFormatSelect.locator('option[value="wav"]')).toBeAttached();
await expect(audioFormatSelect.locator('option[value="mp3"]')).toBeAttached();
await expect(audioFormatSelect.locator('option[value="flac"]')).toBeAttached();
});
test('should display video format options', async ({ page }) => {
// Find video format select (disabled by default for audio-only)
const videoFormatSelect = page.locator('select').nth(1);
// Check options
await expect(videoFormatSelect.locator('option:has-text("MP4")')).toBeAttached();
await expect(videoFormatSelect.locator('option:has-text("FLC")')).toBeAttached();
});
test('should disable video format when audio-only is selected', async ({ page }) => {
// Verify audio-only is selected
const audioOnlyRadio = page.locator('input[value="audio-only"]');
await expect(audioOnlyRadio).toBeChecked();
// Video format should be disabled
const videoFormatSelect = page.locator('select').nth(1);
await expect(videoFormatSelect).toBeDisabled();
});
test('should enable video format when audio-video is selected', async ({ page }) => {
// Select audio-video
const audioVideoRadio = page.locator('input[value="audio-video"]');
await audioVideoRadio.click();
// Video format should be enabled (though OBS check may show alert)
// Note: OBS availability is mocked to return false, so an alert may appear
});
});