3339 lines
140 KiB
JavaScript
3339 lines
140 KiB
JavaScript
(function(context,$) {
|
|
|
|
"use strict";
|
|
|
|
context.JK = context.JK || {};
|
|
context.JK.SessionScreen = function(app) {
|
|
var TEMPOS = context.JK.TEMPOS;
|
|
var EVENTS = context.JK.EVENTS;
|
|
var MIX_MODES = context.JK.MIX_MODES;
|
|
var NAMED_MESSAGES = context.JK.NAMED_MESSAGES;
|
|
var gearUtils = context.JK.GearUtils;
|
|
var sessionUtils = context.JK.SessionUtils;
|
|
var modUtils = context.JK.ModUtils;
|
|
var logger = context.JK.logger;
|
|
var self = this;
|
|
var webcamViewer = new context.JK.WebcamViewer()
|
|
|
|
var defaultParticipant = {
|
|
tracks: [{
|
|
instrument_id: "unknown"
|
|
}],
|
|
user: {
|
|
first_name: 'Unknown',
|
|
last_name: 'User',
|
|
photo_url: null
|
|
}
|
|
};
|
|
|
|
// Be sure to copy/extend these instead of modifying in place
|
|
var trackVuOpts = {
|
|
vuType: "vertical",
|
|
lightCount: 13,
|
|
lightWidth: 3,
|
|
lightHeight: 17
|
|
};
|
|
// Must add faderId key to this
|
|
var trackFaderOpts = {
|
|
faderType: "vertical",
|
|
height: 83
|
|
};
|
|
|
|
// Recreate ChannelGroupIDs ENUM from C++
|
|
var ChannelGroupIds = {
|
|
"MasterGroup": 0,
|
|
"MonitorGroup": 1,
|
|
"AudioInputMusicGroup": 2,
|
|
"AudioInputChatGroup": 3,
|
|
"MediaTrackGroup": 4,
|
|
"StreamOutMusicGroup": 5,
|
|
"StreamOutChatGroup": 6,
|
|
"UserMusicInputGroup": 7,
|
|
"UserChatInputGroup": 8,
|
|
"PeerAudioInputMusicGroup": 9,
|
|
"PeerMediaTrackGroup": 10,
|
|
"JamTrackGroup": 11,
|
|
"MetronomeGroup": 12
|
|
};
|
|
|
|
var METRO_SOUND_LOOKUP = {
|
|
0 : "BuiltIn",
|
|
1 : "SineWave",
|
|
2 : "Beep",
|
|
3 : "Click",
|
|
4 : "Kick",
|
|
5 : "Snare",
|
|
6 : "MetroFile"
|
|
}
|
|
|
|
var sessionModel = null;
|
|
var sessionId;
|
|
var tracks = {};
|
|
var myTracks = [];
|
|
var masterMixers = [];
|
|
var personalMixers = [];
|
|
var allMixers = {};
|
|
var mixersByResourceId = {};
|
|
var mixersByTrackId = {};
|
|
var configureTrackDialog;
|
|
var addNewGearDialog;
|
|
var localRecordingsDialog = null;
|
|
var recordingFinishedDialog = null;
|
|
var friendSelectorDialog = null;
|
|
var inviteMusiciansUtil = null;
|
|
var screenActive = false;
|
|
var currentMixerRangeMin = null;
|
|
var currentMixerRangeMax = null;
|
|
var lookingForMixersCount = 0;
|
|
var lookingForMixersTimer = null;
|
|
var lookingForMixers = [];
|
|
var $recordingTimer = null;
|
|
var recordingTimerInterval = null;
|
|
var startTimeDate = null;
|
|
var startingRecording = false; // double-click guard
|
|
var claimedRecording = null;
|
|
var backing_track_path = null;
|
|
var jamTrack = null;
|
|
var musicianAccessOnJoin; // was this a private or public session when the user tried to joined?
|
|
|
|
var metronomeMixer = null;
|
|
var playbackControls = null;
|
|
var promptLeave = false;
|
|
var rateSessionDialog = null;
|
|
var friendInput = null;
|
|
var sessionPageDone = null;
|
|
var metroTempo = 120;
|
|
var metroCricket = false;
|
|
var metroSound = "Beep";
|
|
var $recordingManagerViewer = null;
|
|
var $screen = null;
|
|
var $mixModeDropdown = null;
|
|
var $templateMixerModeChange = null;
|
|
|
|
var $myTracksNoTracks = null;
|
|
var $otherAudioContainer = null;
|
|
var $myTracksContainer = null;
|
|
var $liveTracksContainer = null;
|
|
var downloadJamTrack = null;
|
|
var $closePlaybackRecording = null;
|
|
var $openBackingTrack = null;
|
|
var $metronomePlaybackSelect = null;
|
|
var $metronomePlaybackHelp = null;
|
|
var $templatePendingMetronome = null;
|
|
var $myTracks = null;
|
|
var $liveTracks = null;
|
|
var $audioTracks = null;
|
|
var $fluidTracks = null;
|
|
var $voiceChat = null;
|
|
var $openFtue = null;
|
|
var $tracksHolder = null;
|
|
|
|
var mediaTrackGroups = [ChannelGroupIds.MediaTrackGroup, ChannelGroupIds.JamTrackGroup, ChannelGroupIds.MetronomeGroup];
|
|
var muteBothMasterAndPersonalGroups = [ChannelGroupIds.AudioInputMusicGroup, ChannelGroupIds.MediaTrackGroup, ChannelGroupIds.JamTrackGroup, ChannelGroupIds.MetronomeGroup];
|
|
|
|
var rest = context.JK.Rest();
|
|
var RENDER_SESSION_DELAY = 750; // When I need to render a session, I have to wait a bit for the mixers to be there.
|
|
|
|
function beforeShow(data) {
|
|
sessionId = data.id;
|
|
if(!sessionId) {
|
|
window.location = '/client#/home';
|
|
}
|
|
promptLeave = true;
|
|
$myTracksContainer.empty();
|
|
displayDoneRecording(); // assumption is that you can't join a recording session, so this should be safe
|
|
|
|
var shareDialog = new JK.ShareDialog(context.JK.app, sessionId, "session");
|
|
shareDialog.initialize(context.JK.FacebookHelperInstance);
|
|
if(gon.global.video_available && gon.global.video_available!="none") {
|
|
webcamViewer.beforeShow()
|
|
}
|
|
}
|
|
|
|
function beforeDisconnect() {
|
|
return { freezeInteraction: true };
|
|
}
|
|
|
|
function initializeSession() {
|
|
|
|
// Subscribe for callbacks on audio events
|
|
context.jamClient.SessionRegisterCallback("JK.HandleBridgeCallback");
|
|
context.jamClient.RegisterRecordingCallbacks("JK.HandleRecordingStartResult", "JK.HandleRecordingStopResult", "JK.HandleRecordingStarted", "JK.HandleRecordingStopped", "JK.HandleRecordingAborted");
|
|
context.jamClient.SessionSetConnectionStatusRefreshRate(1000);
|
|
|
|
// If you load this page directly, the loading of the current user
|
|
// is happening in parallel. We can't join the session until the
|
|
// current user has been completely loaded. Poll for the current user
|
|
// before proceeding with session joining.
|
|
function checkForCurrentUser() {
|
|
if (context.JK.userMe) {
|
|
afterCurrentUserLoaded();
|
|
} else {
|
|
context.setTimeout(checkForCurrentUser, 100);
|
|
}
|
|
}
|
|
checkForCurrentUser();
|
|
|
|
context.JK.HelpBubbleHelper.jamtrackGuideSession($screen.find('li.open-a-jamtrack'), $screen)
|
|
}
|
|
|
|
function afterShow(data) {
|
|
|
|
$fluidTracks.addClass('showing');
|
|
$openBackingTrack.removeClass('disabled');
|
|
|
|
if(!context.JK.JamServer.connected) {
|
|
promptLeave = false;
|
|
app.notifyAlert("Not Connected", 'To create or join a session, you must be connected to the server.');
|
|
window.location = '/client#/home'
|
|
return;
|
|
}
|
|
|
|
// The SessionModel is a singleton.
|
|
// a client can only be in one session at a time,
|
|
// and other parts of the code want to know at any certain times
|
|
// about the current session, if any (for example, reconnect logic)
|
|
if(context.JK.CurrentSessionModel) {
|
|
context.JK.CurrentSessionModel.ensureEnded();
|
|
}
|
|
|
|
context.JK.CurrentSessionModel = sessionModel = new context.JK.SessionModel(
|
|
context.JK.app,
|
|
context.JK.JamServer,
|
|
context.jamClient,
|
|
self
|
|
);
|
|
|
|
sessionModel.start(sessionId);
|
|
// indicate that the screen is active, so that
|
|
// body-scoped drag handlers can go active
|
|
screenActive = true;
|
|
|
|
rest.getSessionHistory(data.id)
|
|
.done(function(musicSession) {
|
|
|
|
musicianAccessOnJoin = musicSession.musician_access;;
|
|
|
|
var shouldVerifyNetwork = musicSession.musician_access;
|
|
gearUtils.guardAgainstInvalidConfiguration(app, shouldVerifyNetwork)
|
|
.fail(function() {
|
|
promptLeave = false;
|
|
window.location = '/client#/home'
|
|
})
|
|
.done(function(){
|
|
var result = sessionUtils.SessionPageEnter();
|
|
|
|
gearUtils.guardAgainstActiveProfileMissing(app, result)
|
|
.fail(function(data) {
|
|
promptLeave = false;
|
|
if(data && data.reason == 'handled') {
|
|
if(data.nav == 'BACK') {
|
|
window.history.go(-1);
|
|
}
|
|
else {
|
|
window.location = data.nav;
|
|
}
|
|
}
|
|
else {
|
|
window.location = '/client#/home';
|
|
}
|
|
})
|
|
.done(function(){
|
|
|
|
sessionModel.waitForSessionPageEnterDone()
|
|
.done(function(userTracks) {
|
|
|
|
context.JK.CurrentSessionModel.setUserTracks(userTracks);
|
|
|
|
initializeSession();
|
|
})
|
|
.fail(function(data) {
|
|
if(data == "timeout") {
|
|
context.JK.alertSupportedNeeded('The audio system has not reported your configured tracks in a timely fashion.')
|
|
}
|
|
else if(data == 'session_over') {
|
|
// do nothing; session ended before we got the user track info. just bail
|
|
}
|
|
else {
|
|
context.JK.alertSupportedNeeded('Unable to determine configured tracks due to reason: ' + data)
|
|
}
|
|
promptLeave = false;
|
|
window.location = '/client#/home'
|
|
});
|
|
})
|
|
})
|
|
})
|
|
.fail(function() {
|
|
|
|
})
|
|
|
|
}
|
|
|
|
function notifyWithUserInfo(title , text, clientId) {
|
|
sessionModel.findUserBy({clientId: clientId})
|
|
.done(function(user) {
|
|
app.notify({
|
|
"title": title,
|
|
"text": user.name + " " + text,
|
|
"icon_url": context.JK.resolveAvatarUrl(user.photo_url)
|
|
});
|
|
})
|
|
.fail(function() {
|
|
app.notify({
|
|
"title": title,
|
|
"text": 'Someone ' + text,
|
|
"icon_url": "/assets/content/icon_alert_big.png"
|
|
});
|
|
});
|
|
}
|
|
|
|
|
|
function afterCurrentUserLoaded() {
|
|
|
|
// now check if the user can play in a session with others
|
|
var deferred = new $.Deferred();
|
|
if(musicianAccessOnJoin) {
|
|
deferred = context.JK.guardAgainstSinglePlayerProfile(app, function () {
|
|
promptLeave = false;
|
|
});
|
|
}
|
|
else {
|
|
deferred.resolve();
|
|
}
|
|
deferred.fail(function(result) {
|
|
if(!result.controlled_location) {
|
|
window.location="/client#/home"
|
|
}
|
|
})
|
|
.done(function() {logger.debug("user has passed all session guards")
|
|
promptLeave = true;
|
|
var sessionModel = context.JK.CurrentSessionModel;
|
|
|
|
$(sessionModel.recordingModel)
|
|
.on('startingRecording', function(e, data) {
|
|
displayStartingRecording();
|
|
lockControlsforJamTrackRecording();
|
|
})
|
|
.on('startedRecording', function(e, data) {
|
|
if(data.reason) {
|
|
var reason = data.reason;
|
|
var detail = data.detail;
|
|
|
|
var title = "Could Not Start Recording";
|
|
|
|
if(data.reason == 'client-no-response') {
|
|
notifyWithUserInfo(title, 'did not respond to the start signal.', detail);
|
|
}
|
|
else if(data.reason == 'empty-recording-id') {
|
|
app.notifyAlert(title, "No recording ID specified.");
|
|
}
|
|
else if(data.reason == 'missing-client') {
|
|
notifyWithUserInfo(title, 'could not be signalled to start recording.', detail);
|
|
}
|
|
else if(data.reason == 'already-recording') {
|
|
app.notifyAlert(title, 'Already recording. If this appears incorrect, try restarting JamKazam.');
|
|
}
|
|
else if(data.reason == 'recording-engine-unspecified') {
|
|
notifyWithUserInfo(title, 'had a problem writing recording data to disk.', detail);
|
|
}
|
|
else if(data.reason == 'recording-engine-create-directory') {
|
|
notifyWithUserInfo(title, 'had a problem creating a recording folder.', detail);
|
|
}
|
|
else if(data.reason == 'recording-engine-create-file') {
|
|
notifyWithUserInfo(title, 'had a problem creating a recording file.', detail);
|
|
}
|
|
else if(data.reason == 'recording-engine-sample-rate') {
|
|
notifyWithUserInfo(title, 'had a problem recording at the specified sample rate.', detail);
|
|
}
|
|
else if(data.reason == 'rest') {
|
|
var jqXHR = detail[0];
|
|
app.notifyServerError(jqXHR);
|
|
}
|
|
else {
|
|
notifyWithUserInfo(title, 'Error Reason: ' + reason);
|
|
}
|
|
displayDoneRecording();
|
|
}
|
|
else
|
|
{
|
|
displayStartedRecording();
|
|
displayWhoCreated(data.clientId);
|
|
lockControlsforJamTrackRecording();
|
|
}
|
|
})
|
|
.on('stoppingRecording', function(e, data) {
|
|
displayStoppingRecording(data);
|
|
unlockControlsforJamTrackRecording();
|
|
})
|
|
.on('stoppedRecording', function(e, data) {
|
|
|
|
unlockControlsforJamTrackRecording();
|
|
if(sessionModel.selfOpenedJamTracks()) {
|
|
|
|
var timeline = context.jamClient.GetJamTrackTimeline();
|
|
|
|
rest.addRecordingTimeline(data.recordingId, timeline)
|
|
.fail(function(){
|
|
app.notify(
|
|
{ title: "Unable to Add JamTrack Volume Data",
|
|
text: "The volume of the JamTrack will not be correct in the recorded mix." },
|
|
null,
|
|
true);
|
|
})
|
|
}
|
|
|
|
if(data.reason) {
|
|
logger.warn("Recording Discarded: ", data);
|
|
var reason = data.reason;
|
|
var detail = data.detail;
|
|
|
|
var title = "Recording Discarded";
|
|
|
|
if(data.reason == 'client-no-response') {
|
|
notifyWithUserInfo(title, 'did not respond to the stop signal.', detail);
|
|
}
|
|
else if(data.reason == 'missing-client') {
|
|
notifyWithUserInfo(title, 'could not be signalled to stop recording.', detail);
|
|
}
|
|
else if(data.reason == 'empty-recording-id') {
|
|
app.notifyAlert(title, "No recording ID specified.");
|
|
}
|
|
else if(data.reason == 'wrong-recording-id') {
|
|
app.notifyAlert(title, "Wrong recording ID specified.");
|
|
}
|
|
else if(data.reason == 'not-recording') {
|
|
app.notifyAlert(title, "Not currently recording.");
|
|
}
|
|
else if(data.reason == 'already-stopping') {
|
|
app.notifyAlert(title, "Already stopping the current recording.");
|
|
}
|
|
else if(data.reason == 'start-before-stop') {
|
|
notifyWithUserInfo(title, 'asked that we start a new recording; cancelling the current one.', detail);
|
|
}
|
|
else {
|
|
app.notifyAlert(title, "Error reason: " + reason);
|
|
}
|
|
|
|
displayDoneRecording();
|
|
}
|
|
else {
|
|
displayDoneRecording();
|
|
promptUserToSave(data.recordingId, timeline);
|
|
}
|
|
|
|
})
|
|
.on('abortedRecording', function(e, data) {
|
|
var reason = data.reason;
|
|
var detail = data.detail;
|
|
|
|
var title = "Recording Cancelled";
|
|
|
|
if(data.reason == 'client-no-response') {
|
|
notifyWithUserInfo(title, 'did not respond to the start signal.', detail);
|
|
}
|
|
else if(data.reason == 'missing-client') {
|
|
notifyWithUserInfo(title, 'could not be signalled to start recording.', detail);
|
|
}
|
|
else if(data.reason == 'populate-recording-info') {
|
|
notifyWithUserInfo(title, 'could not synchronize with the server.', detail);
|
|
}
|
|
else if(data.reason == 'recording-engine-unspecified') {
|
|
notifyWithUserInfo(title, 'had a problem writing recording data to disk.', detail);
|
|
}
|
|
else if(data.reason == 'recording-engine-create-directory') {
|
|
notifyWithUserInfo(title, 'had a problem creating a recording folder.', detail);
|
|
}
|
|
else if(data.reason == 'recording-engine-create-file') {
|
|
notifyWithUserInfo(title, 'had a problem creating a recording file.', detail);
|
|
}
|
|
else if(data.reason == 'recording-engine-sample-rate') {
|
|
notifyWithUserInfo(title, 'had a problem recording at the specified sample rate.', detail);
|
|
}
|
|
else {
|
|
app.notifyAlert(title, "Error reason: " + reason);
|
|
}
|
|
|
|
displayDoneRecording();
|
|
|
|
})
|
|
|
|
sessionModel.subscribe('sessionScreen', sessionChanged);
|
|
|
|
sessionModel.joinSession(sessionId)
|
|
.fail(function(xhr, textStatus, errorMessage) {
|
|
if(xhr.status == 404) {
|
|
// we tried to join the session, but it's already gone. kick user back to join session screen
|
|
promptLeave = false;
|
|
context.window.location = "/client#/findSession";
|
|
app.notify(
|
|
{ title: "Unable to Join Session",
|
|
text: "The session you attempted to join is over."
|
|
},
|
|
null,
|
|
true);
|
|
}
|
|
else if(xhr.status == 422) {
|
|
var response = JSON.parse(xhr.responseText);
|
|
if(response["errors"] && response["errors"]["tracks"] && response["errors"]["tracks"][0] == "Please select at least one track") {
|
|
app.notifyAlert("No Inputs Configured", $('<span>You will need to reconfigure your audio device.</span>'));
|
|
}
|
|
else if(response["errors"] && response["errors"]["music_session"] && response["errors"]["music_session"][0] == ["is currently recording"]) {
|
|
promptLeave = false;
|
|
context.window.location = "/client#/findSession";
|
|
app.notify( { title: "Unable to Join Session", text: "The session is currently recording." }, null, true);
|
|
}
|
|
else {
|
|
app.notifyServerError(xhr, 'Unable to Join Session');
|
|
}
|
|
}
|
|
else {
|
|
app.notifyServerError(xhr, 'Unable to Join Session');
|
|
}
|
|
});
|
|
})
|
|
|
|
}
|
|
|
|
// not leave session but leave screen
|
|
function beforeLeave(data) {
|
|
if(promptLeave) {
|
|
var leaveSessionWarningDialog = new context.JK.LeaveSessionWarningDialog(context.JK.app,
|
|
function() { promptLeave = false; context.location.hash = data.hash });
|
|
|
|
leaveSessionWarningDialog.initialize();
|
|
app.layout.showDialog('leave-session-warning');
|
|
return false;
|
|
}
|
|
return true;
|
|
}
|
|
|
|
function beforeHide(data) {
|
|
|
|
context.JK.HelpBubbleHelper.clearJamTrackGuide();
|
|
|
|
if(gon.global.video_available && gon.global.video_available!="none") {
|
|
webcamViewer.setVideoOff()
|
|
}
|
|
|
|
$fluidTracks.removeClass('showing');
|
|
if(screenActive) {
|
|
// this path is possible if FTUE is invoked on session page, and they cancel
|
|
sessionModel.leaveCurrentSession()
|
|
.fail(function(jqXHR) {
|
|
if(jqXHR.status != 404) {
|
|
logger.debug("leave session failed");
|
|
app.ajaxError(arguments)
|
|
}
|
|
});
|
|
}
|
|
screenActive = false;
|
|
|
|
sessionUtils.SessionPageLeave();
|
|
}
|
|
|
|
function getMetronomeMasterMixers() {
|
|
return _mixersForGroupId(ChannelGroupIds.MetronomeGroup, MIX_MODES.MASTER);
|
|
}
|
|
|
|
function checkMetronomeTransition() {
|
|
// trust backend over server
|
|
|
|
if(sessionModel.jamTracks() !== null || sessionModel.recordedJamTracks() !== null) {
|
|
// ignore all metronome events when jamtracks are open, because backend opens metronome mixer to play jamtrack tap-ins
|
|
logger.debug("ignore checkMetronomeTransition because JamTrack is open")
|
|
return;
|
|
}
|
|
|
|
var metronomeMasterMixers = getMetronomeMasterMixers();
|
|
|
|
if (metronomeMixer == null && metronomeMasterMixers.length > 0) {
|
|
logger.debug("monitoring metronome")
|
|
playbackControls.startMonitor(context.JK.PLAYBACK_MONITOR_MODE.METRONOME)
|
|
}
|
|
else if (metronomeMixer != null && metronomeMasterMixers.length == 0) {
|
|
playbackControls.stopMonitor();
|
|
}
|
|
metronomeMixer = metronomeMasterMixers.length > 0 ? metronomeMasterMixers : null;
|
|
}
|
|
|
|
function checkJamTrackTransition(currentSession) {
|
|
// handle jam tracks
|
|
|
|
// if we have a recording open, then don't go into JamTrack monitor mode even if a JamTrack is open
|
|
if (jamTrack == null && (currentSession && currentSession.jam_track != null && currentSession.claimed_recording == null)) {
|
|
logger.debug("monitoring jamtrack")
|
|
playbackControls.startMonitor(context.JK.PLAYBACK_MONITOR_MODE.JAMTRACK);
|
|
}
|
|
else if (jamTrack && (currentSession == null || (currentSession.jam_track == null && currentSession.claimed_recording == null))) {
|
|
logger.debug("stop monitoring jamtrack")
|
|
playbackControls.stopMonitor();
|
|
}
|
|
jamTrack = currentSession == null ? null : currentSession.jam_track;
|
|
}
|
|
|
|
function checkBackingTrackTransition(currentSession) {
|
|
// handle backing tracks
|
|
if (backing_track_path == null && (currentSession && currentSession.backing_track_path != null)) {
|
|
logger.debug("monitoring backing track")
|
|
playbackControls.startMonitor();
|
|
}
|
|
else if (backing_track_path && (currentSession == null || currentSession.backing_track_path == null)) {
|
|
logger.debug("stop monitoring backing track")
|
|
playbackControls.stopMonitor();
|
|
}
|
|
backing_track_path = currentSession == null ? null : currentSession.backing_track_path;
|
|
}
|
|
|
|
|
|
function checkRecordingTransition(currentSession) {
|
|
// handle claimed recordings
|
|
if (claimedRecording == null && (currentSession && currentSession.claimed_recording != null)) {
|
|
// this is a 'started with a claimed_recording' transition.
|
|
// we need to start a timer to watch for the state of the play session
|
|
logger.debug("monitoring recording")
|
|
playbackControls.startMonitor();
|
|
}
|
|
else if (claimedRecording && (currentSession == null || currentSession.claimed_recording == null)) {
|
|
logger.debug("stop monitoring recording")
|
|
playbackControls.stopMonitor();
|
|
}
|
|
claimedRecording = currentSession == null ? null : currentSession.claimed_recording;
|
|
}
|
|
|
|
function handleTransitionsInRecordingPlayback() {
|
|
// let's see if we detect a transition to start playback or stop playback
|
|
|
|
var currentSession = sessionModel.getCurrentSession();
|
|
|
|
checkRecordingTransition(currentSession);
|
|
checkBackingTrackTransition(currentSession);
|
|
checkJamTrackTransition(currentSession);
|
|
checkMetronomeTransition();
|
|
}
|
|
|
|
function sessionChanged() {
|
|
|
|
handleTransitionsInRecordingPlayback();
|
|
// TODO - in the specific case of a user changing their tracks using the configureTrack dialog,
|
|
// this event appears to fire before the underlying mixers have updated. I have no event to
|
|
// know definitively when the underlying mixers are up to date, so for now, we just delay slightly.
|
|
// This obviously has the possibility of introducing time-based bugs.
|
|
context.setTimeout(renderSession, RENDER_SESSION_DELAY);
|
|
}
|
|
|
|
/**
|
|
* the mixers object is a list. In order to find one by key,
|
|
* you must iterate. Convenience method to locate a particular
|
|
* mixer by id.
|
|
*/
|
|
function getMixer(mixerId, mode) {
|
|
|
|
if(mode === undefined) {
|
|
mode = sessionModel.getMixMode();
|
|
}
|
|
|
|
return allMixers[(mode ? 'M' : 'P') + mixerId];
|
|
}
|
|
|
|
function getMixerByResourceId(resourceId, mode) {
|
|
var mixerPair = mixersByResourceId[resourceId];
|
|
|
|
if(!mixerPair) {return null;}
|
|
|
|
if(mode === undefined) {
|
|
return mixerPair;
|
|
}
|
|
else {
|
|
if(mode == MIX_MODES.MASTER) {
|
|
return mixerPair.master;
|
|
}
|
|
else {
|
|
return mixerPair.personal;
|
|
}
|
|
}
|
|
}
|
|
|
|
function getMixerByTrackId(trackId, mode) {
|
|
var mixerPair = mixersByTrackId[trackId];
|
|
|
|
if(!mixerPair) {return null;}
|
|
|
|
if(mode === undefined) {
|
|
return mixerPair;
|
|
}
|
|
else {
|
|
if(mode == MIX_MODES.MASTER) {
|
|
return mixerPair.master;
|
|
}
|
|
else {
|
|
return mixerPair.personal;
|
|
}
|
|
}
|
|
}
|
|
|
|
function resetOtherAudioContent() {
|
|
if ($('.session-recordings .track').length === 0 && $('.session-recordings .download-jamtrack').length === 0 && $('.session-recordings .pending-metronome').length === 0) {
|
|
$('.session-recordings .when-empty').show();
|
|
$('.session-recording-name-wrapper').hide();
|
|
$('.session-recordings .recording-controls').hide();
|
|
$closePlaybackRecording.show();
|
|
$('.session-recordings .session-recording-name').text('(No audio loaded)')
|
|
$('.session-recordings').attr('media-state', 'closed')
|
|
$('.session-livetracks').attr('media-state', 'closed')
|
|
}
|
|
}
|
|
function didSelfOpenMedia() {
|
|
var localMediaMixers = _mixersForGroupIds([ChannelGroupIds.MediaTrackGroup, ChannelGroupIds.JamTrackGroup, ChannelGroupIds.MetronomeGroup], MIX_MODES.MASTER);
|
|
|
|
// if we find any local media mixers, then we are the opener of media
|
|
return localMediaMixers.length > 0;
|
|
}
|
|
|
|
function checkShowCloseControl() {
|
|
didSelfOpenMedia() ? $closePlaybackRecording.show() : $closePlaybackRecording.hide();
|
|
}
|
|
|
|
function renderSession() {
|
|
$myTracksContainer.empty();
|
|
$('.session-track').remove(); // Remove previous tracks
|
|
$voiceChat.hide();
|
|
_updateMixers();
|
|
_renderTracks();
|
|
_renderLocalMediaTracks();
|
|
_wireTopVolume();
|
|
_wireTopMix();
|
|
_addVoiceChat();
|
|
_initDialogs();
|
|
|
|
if ($('.session-livetracks .track').length === 0) {
|
|
$('.session-livetracks .when-empty').show();
|
|
}
|
|
checkPendingMetronome();
|
|
resetOtherAudioContent();
|
|
resizeFluid();
|
|
/**
|
|
if ($('.session-recordings .track').length === 0) {
|
|
$('.session-recordings .when-empty').show();
|
|
$('.session-recording-name-wrapper').hide();
|
|
$('.session-recordings .recording-controls').hide();
|
|
// should we show the close button? Only if the user opened the media
|
|
checkShowCloseControl();
|
|
} else {
|
|
$('.session-recordings .when-empty').hide();
|
|
$('.session-recording-name-wrapper').show();
|
|
$('.session-recordings .recording-controls').show();
|
|
checkShowCloseControl();
|
|
}
|
|
*/
|
|
|
|
// Handle long labels:
|
|
$(".track-label").dotdotdot()
|
|
$(".session-recording-name").dotdotdot()
|
|
} // renderSession
|
|
|
|
function _initDialogs() {
|
|
configureTrackDialog.initialize();
|
|
addNewGearDialog.initialize();
|
|
}
|
|
|
|
// Get the latest list of underlying audio mixer channels, and populates:
|
|
// * mixersByResourceId - a hash of resourceId / { master: mixer, personal: mixer } personal: can be null in case of PeerAudioInputMusicGroup
|
|
// * mixersByTrackId - a hash of track id / {master: mixer, personal: mixer}.
|
|
// * allMixers - a hash of mixer.id / mixer
|
|
// * masterMixers - array of master mode mixers
|
|
// * personalMixers - array of personal mode mixers
|
|
function _updateMixers() {
|
|
masterMixers = context.jamClient.SessionGetAllControlState(true);
|
|
personalMixers = context.jamClient.SessionGetAllControlState(false);
|
|
|
|
//logger.debug("masterMixers", masterMixers)
|
|
//logger.debug("personalMixers", personalMixers)
|
|
|
|
mixersByResourceId = {}
|
|
mixersByTrackId = {}
|
|
allMixers = {}
|
|
|
|
var i;
|
|
for(i = 0; i < masterMixers.length; i++) {
|
|
var masterMixer = masterMixers[i];
|
|
allMixers['M' + masterMixer.id] = masterMixer; // populate allMixers by mixer.id
|
|
|
|
// populate mixer pair
|
|
var mixerPair = {}
|
|
mixersByResourceId[masterMixer.rid] = mixerPair
|
|
mixersByTrackId[masterMixer.id] = mixerPair
|
|
mixerPair.master = masterMixer;
|
|
}
|
|
for(i = 0; i < personalMixers.length; i++) {
|
|
var personalMixer = personalMixers[i];
|
|
|
|
allMixers['P' + personalMixer.id] = personalMixer
|
|
|
|
// populate other side of mixer pair
|
|
|
|
var mixerPair = mixersByResourceId[personalMixer.rid]
|
|
if(!mixerPair) {
|
|
if(personalMixer.group_id != ChannelGroupIds.MonitorGroup) {
|
|
logger.warn("there is no master version of ", personalMixer)
|
|
}
|
|
|
|
mixerPair = {}
|
|
mixersByResourceId[personalMixer.rid] = mixerPair
|
|
}
|
|
mixersByTrackId[personalMixer.id] = mixerPair;
|
|
|
|
mixerPair.personal = personalMixer;
|
|
}
|
|
// Always add a hard-coded simplified 'mixer' for the L2M mix
|
|
|
|
/**
|
|
var l2m_mixer = {
|
|
id: '__L2M__',
|
|
range_low: -80,
|
|
range_high: 20,
|
|
volume_left: context.jamClient.SessionGetMasterLocalMix()
|
|
};
|
|
mixers.push(l2m_mixer);*/
|
|
}
|
|
|
|
function _mixersForGroupId(groupId, mixMode) {
|
|
var foundMixers = [];
|
|
var mixers = mixMode == MIX_MODES.MASTER ? masterMixers : personalMixers;
|
|
$.each(mixers, function(index, mixer) {
|
|
if ( mixer.group_id === groupId) {
|
|
foundMixers.push(mixer);
|
|
}
|
|
});
|
|
return foundMixers;
|
|
}
|
|
|
|
function _mixersForGroupIds(groupIds, mixMode) {
|
|
var foundMixers = [];
|
|
var mixers = mixMode == MIX_MODES.MASTER ? masterMixers : personalMixers;
|
|
$.each(mixers, function(index, mixer) {
|
|
var groupIdLen = groupIds.length;
|
|
for (var i = 0; i < groupIdLen; i++) {
|
|
if ( mixer.group_id === groupIds[i]) {
|
|
foundMixers.push(mixer);
|
|
}
|
|
}
|
|
});
|
|
return foundMixers;
|
|
}
|
|
|
|
function _getMyVoiceChatMixers() {
|
|
var mixers = _mixersForGroupId(ChannelGroupIds.AudioInputChatGroup, sessionModel.getMixMode());
|
|
|
|
if (mixers.length == 0) { return null; }
|
|
|
|
var oppositeMixers = _mixersForGroupId(ChannelGroupIds.AudioInputChatGroup, !sessionModel.getMixMode());
|
|
|
|
if(oppositeMixers.length == 0) {
|
|
logger.warn("unable to find opposite mixer for voice chat");
|
|
return null;
|
|
}
|
|
|
|
var mixer = mixers[0];
|
|
var oppositeMixer = oppositeMixers[0];
|
|
|
|
return {
|
|
mixer: mixer,
|
|
oppositeMixer: oppositeMixer,
|
|
vuMixer: mixer,
|
|
muteMixer: mixer
|
|
}
|
|
}
|
|
|
|
function _clientIdForUserInputMixer(mixerId, mixMode) {
|
|
var found = null;
|
|
var mixers = mixMode == MIX_MODES.MASTER ? masterMixers : personalMixers;
|
|
$.each(mixers, function(index, mixer) {
|
|
if (mixer.group_id === ChannelGroupIds.UserMusicInputGroup && mixer.id == mixerId) {
|
|
found = mixer.client_id;
|
|
return false;
|
|
}
|
|
});
|
|
return found;
|
|
}
|
|
|
|
// TODO FIXME - This needs to support multiple tracks for an individual
|
|
// client id and group.
|
|
function _mixerForClientId(clientId, groupIds, usedMixers) {
|
|
//logger.debug("clientId", clientId, "groupIds", groupIds, "mixers", mixers)
|
|
var foundMixer = null;
|
|
$.each(mixers, function(index, mixer) {
|
|
if (mixer.client_id === clientId) {
|
|
for (var i=0; i<groupIds.length; i++) {
|
|
if (mixer.group_id === groupIds[i]) {
|
|
if (!(mixer.id in usedMixers)) {
|
|
foundMixer = mixer;
|
|
return false;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
});
|
|
return foundMixer;
|
|
}
|
|
|
|
function _groupedMixersForClientId(clientId, groupIds, usedMixers, mixMode) {
|
|
//logger.debug("clientId", clientId, "groupIds", groupIds, "mixers", mixers)
|
|
var foundMixers = {};
|
|
var mixers = mixMode == MIX_MODES.MASTER ? masterMixers : personalMixers;
|
|
$.each(mixers, function(index, mixer) {
|
|
if (mixer.client_id === clientId) {
|
|
for (var i=0; i<groupIds.length; i++) {
|
|
if (mixer.group_id === groupIds[i]) {
|
|
if ((mixer.groupId != ChannelGroupIds.UserMusicInputGroup) && !(mixer.id in usedMixers)) {
|
|
var mixers = foundMixers[mixer.group_id]
|
|
if(!mixers) {
|
|
mixers = []
|
|
foundMixers[mixer.group_id] = mixers;
|
|
}
|
|
mixers.push(mixer)
|
|
}
|
|
}
|
|
}
|
|
}
|
|
});
|
|
return foundMixers;
|
|
}
|
|
|
|
function _wireTopVolume() {
|
|
var gainPercent = 0;
|
|
var mixerIds = [];
|
|
var mixers = sessionModel.isMasterMixMode() ? masterMixers : personalMixers;
|
|
$.each(mixers, function(index, mixer) {
|
|
if (sessionModel.isMasterMixMode() && mixer.group_id === ChannelGroupIds.MasterGroup) {
|
|
mixerIds.push(mixer.id);
|
|
gainPercent = percentFromMixerValue(
|
|
mixer.range_low, mixer.range_high, mixer.volume_left);
|
|
}
|
|
else if (!sessionModel.isMasterMixMode() && mixer.group_id === ChannelGroupIds.MonitorGroup) {
|
|
mixerIds.push(mixer.id);
|
|
gainPercent = percentFromMixerValue(
|
|
mixer.range_low, mixer.range_high, mixer.volume_left);
|
|
}
|
|
});
|
|
if(mixerIds.length == 0) {
|
|
logger.debug("did not find master/monitor volume", mixers)
|
|
|
|
}
|
|
|
|
var faderId = mixerIds.join(',');
|
|
var $volume = $('#volume');
|
|
$volume.attr('mixer-id', faderId);
|
|
var faderOpts = {
|
|
faderId: faderId,
|
|
faderType: "horizontal",
|
|
width: 50,
|
|
style: {
|
|
"background-image": "none",
|
|
"background-repeat":"no-repeat",
|
|
"height": "24px"
|
|
}
|
|
};
|
|
context.JK.FaderHelpers.renderFader($volume, faderOpts);
|
|
|
|
$volume.on('fader_change', faderChanged);
|
|
// Visually update fader to underlying mixer start value.
|
|
// Always do this, even if gainPercent is zero.
|
|
|
|
context.JK.FaderHelpers.setFaderValue(faderId, gainPercent);
|
|
}
|
|
|
|
/**
|
|
* This control has it's own Set/Get methods, so we don't need to
|
|
* line it up with some mixer later. We'll use a special mixer-id value
|
|
* to let us know we're dealing with the mix control.
|
|
*/
|
|
function _wireTopMix() {
|
|
var $mixSlider = $('#l2m');
|
|
var l2m_mixer = {
|
|
range_low: -80,
|
|
range_high: 20,
|
|
volume_left: context.jamClient.SessionGetMasterLocalMix()
|
|
};
|
|
// var gainPercent = percentFromMixerValue(
|
|
// l2m_mixer.range_low, l2m_mixer.range_high, l2m_mixer.volume_left);
|
|
|
|
var faderId = '#l2m'; // also the selector for renderFader
|
|
|
|
var faderOpts = {
|
|
faderId: faderId,
|
|
faderType: "horizontal",
|
|
width: 70,
|
|
style: {
|
|
"background-image": "none",
|
|
"background-repeat":"no-repeat",
|
|
"height": "24px"
|
|
}
|
|
};
|
|
context.JK.FaderHelpers.renderFader($mixSlider, faderOpts);
|
|
$mixSlider.on('fader_change', l2mChanged);
|
|
|
|
var value = context.jamClient.SessionGetMasterLocalMix();
|
|
context.JK.FaderHelpers.setFaderValue(faderId, percentFromMixerValue(-80, 20, value));
|
|
}
|
|
|
|
/**
|
|
* This has a specialized jamClient call, so custom handler.
|
|
*/
|
|
function l2mChanged(e, data) {
|
|
//var dbValue = context.JK.FaderHelpers.convertLinearToDb(newValue);
|
|
context.jamClient.SessionSetMasterLocalMix(data.percentage - 80);
|
|
}
|
|
|
|
function _addVoiceChat() {
|
|
// If, and only if, there is a mixer in group 3 (voice chat)
|
|
// Add the voice chat controls below my tracks, and hook up the mixer.
|
|
// Assumption is that there is only ever one, so we just take the first one.
|
|
var voiceChatMixers = _getMyVoiceChatMixers();
|
|
|
|
if(voiceChatMixers) {
|
|
var mixer = voiceChatMixers.mixer;
|
|
|
|
$voiceChat.show();
|
|
$voiceChat.attr('mixer-id', mixer.id);
|
|
var $voiceChatGain = $voiceChat.find('.voicechat-gain');
|
|
$voiceChatGain.attr('mixer-id', mixer.id);
|
|
var $voiceChatMute = $voiceChat.find('.voicechat-mute').attr('mixer-id', mixer.id).data('mixer', mixer).data('opposite-mixer', voiceChatMixers.oppositeMixer)
|
|
var gainPercent = percentFromMixerValue(
|
|
mixer.range_low, mixer.range_high, mixer.volume_left);
|
|
var faderOpts = {
|
|
faderId: mixer.id,
|
|
faderType: "horizontal",
|
|
width: 50
|
|
};
|
|
context.JK.FaderHelpers.renderFader($voiceChatGain, faderOpts);
|
|
$voiceChatGain.on('fader_change', faderChanged);
|
|
context.JK.FaderHelpers.setFaderValue(mixer.id, gainPercent);
|
|
if (mixer.mute) {
|
|
_toggleVisualMuteControl($voiceChatMute, mixer.mute);
|
|
}
|
|
}
|
|
}
|
|
|
|
function _renderLocalMediaTracks() {
|
|
// local media mixers come in different groups (MediaTrack, JamTrack, Metronome), but peer mixers are always PeerMediaTrackGroup
|
|
var localMediaMixers = _mixersForGroupIds(mediaTrackGroups, MIX_MODES.MASTER);
|
|
var peerLocalMediaMixers = _mixersForGroupId(ChannelGroupIds.PeerMediaTrackGroup, MIX_MODES.MASTER);
|
|
|
|
var recordedBackingTracks = sessionModel.recordedBackingTracks();
|
|
var backingTracks = sessionModel.backingTracks();
|
|
var recordedJamTracks = sessionModel.recordedJamTracks();
|
|
var jamTracks = sessionModel.jamTracks();
|
|
|
|
//logger.debug("localMediaMixers", localMediaMixers)
|
|
//logger.debug("peerMediaMixers", peerLocalMediaMixers)
|
|
|
|
// with mixer info, we use these to decide what kind of tracks are open in the backend
|
|
|
|
// each mixer has a media_type field, which describes the type of media track it is.
|
|
// * JamTrack
|
|
// * BackingTrack
|
|
// * RecordingTrack
|
|
// * MetronomeTrack
|
|
// * "" - adhoc track (not supported visually)
|
|
|
|
// it is supposed to be the case that there are only one type of track open at a time, however, that's a business policy/logic
|
|
// constraint; and may be buggy. **So, we should render whatever we have, so that it's obvious what's really going on.**
|
|
|
|
// so, let's group up all mixers by type, and then ask them to be rendered
|
|
|
|
var recordingTrackMixers = [];
|
|
var backingTrackMixers = [];
|
|
var jamTrackMixers = [];
|
|
var metronomeTrackMixers = [];
|
|
var adhocTrackMixers = [];
|
|
|
|
function groupByType(mixers, isLocalMixer) {
|
|
context._.each(mixers, function(mixer) {
|
|
var mediaType = mixer.media_type;
|
|
var groupId = mixer.group_id;
|
|
|
|
if(mediaType == 'MetronomeTrack' || groupId==ChannelGroupIds.MetronomeGroup) {
|
|
// Metronomes come across with a blank media type, so check group_id:
|
|
metronomeTrackMixers.push(mixer);
|
|
}
|
|
else if(mediaType == null || mediaType == "" || mediaType == 'RecordingTrack') {
|
|
// additional check; if we can match an id in backing tracks or recorded backing track,
|
|
// we need to remove it from the recorded track set, but move it to the backing track set
|
|
|
|
var isJamTrack = false;
|
|
|
|
if(jamTracks) {
|
|
// check if the ID matches that of an open jam track
|
|
context._.each(jamTracks, function (jamTrack) {
|
|
if (mixer.id == jamTrack.id) {
|
|
isJamTrack = true;
|
|
return false; // break
|
|
}
|
|
})
|
|
}
|
|
|
|
if(!isJamTrack && recordedJamTracks) {
|
|
// then check if the ID matches that of a open, recorded jam track
|
|
context._.each(recordedJamTracks, function (recordedJamTrack) {
|
|
if (mixer.id == recordedJamTrack.id) {
|
|
isJamTrack = true;
|
|
return false; // break
|
|
}
|
|
})
|
|
}
|
|
|
|
if(isJamTrack) {
|
|
jamTrackMixers.push(mixer)
|
|
}
|
|
else {
|
|
var isBackingTrack = false
|
|
if (recordedBackingTracks) {
|
|
context._.each(recordedBackingTracks, function (recordedBackingTrack) {
|
|
if (mixer.id == 'L' + recordedBackingTrack.client_track_id) {
|
|
isBackingTrack = true;
|
|
return false; // break
|
|
}
|
|
})
|
|
}
|
|
if (backingTracks) {
|
|
context._.each(backingTracks, function (backingTrack) {
|
|
if (mixer.id == 'L' + backingTrack.client_track_id) {
|
|
isBackingTrack = true;
|
|
return false; // break
|
|
}
|
|
})
|
|
}
|
|
|
|
if (isBackingTrack) {
|
|
backingTrackMixers.push(mixer)
|
|
}
|
|
else {
|
|
// couldn't resolve this as a JamTrack or Backing track, must be a normal recorded file
|
|
recordingTrackMixers.push(mixer);
|
|
}
|
|
}
|
|
} else if(mediaType == 'PeerMediaTrack' || mediaType == 'BackingTrack') {
|
|
// BackingTrack
|
|
backingTrackMixers.push(mixer);
|
|
} else if(mediaType == 'JamTrack') {
|
|
jamTrackMixers.push(mixer);
|
|
mixer.group_id == ChannelGroupIds.MediaTrackGroup;
|
|
} else if(mediaType == null || mediaType == "" || mediaType == 'RecordingTrack') {
|
|
// mediaType == null is for backwards compat with older clients. Can be removed soon
|
|
recordingTrackMixers.push(mixer)
|
|
} else {
|
|
logger.warn("Unknown track type: " + mediaType)
|
|
adhocTrackMixers.push(mixer);
|
|
}
|
|
});
|
|
}
|
|
|
|
groupByType(localMediaMixers, true);
|
|
groupByType(peerLocalMediaMixers, false);
|
|
|
|
if(recordingTrackMixers.length > 0) {
|
|
renderRecordingTracks(recordingTrackMixers)
|
|
}
|
|
if(backingTrackMixers.length > 0) {
|
|
renderBackingTracks(backingTrackMixers)
|
|
}
|
|
if(jamTrackMixers.length > 0) {
|
|
renderJamTracks(jamTrackMixers);
|
|
}
|
|
if(metronomeTrackMixers.length > 0 && sessionModel.jamTracks() === null && sessionModel.recordedJamTracks() == null) {
|
|
renderMetronomeTracks(metronomeTrackMixers);
|
|
}
|
|
if(adhocTrackMixers.length > 0) {
|
|
logger.warn("some tracks are open that we don't know how to show")
|
|
}
|
|
|
|
checkMetronomeTransition();
|
|
}
|
|
|
|
// this method is pretty complicated because it forks on a key bit of state:
|
|
// sessionModel.isPlayingRecording()
|
|
// a backing track opened as part of a recording has a different behavior and presence on the server (recording.recorded_backing_tracks)
|
|
// than a backing track opend ad-hoc (connection.backing_tracks)
|
|
function renderBackingTracks(backingTrackMixers) {
|
|
|
|
var backingTracks = []
|
|
if(sessionModel.isPlayingRecording()) {
|
|
// only return managed mixers for recorded backing tracks
|
|
backingTrackMixers = context._.filter(backingTrackMixers, function(mixer){return mixer.managed || mixer.managed === undefined})
|
|
backingTracks = sessionModel.recordedBackingTracks();
|
|
}
|
|
else {
|
|
// only return un-managed (ad-hoc) mixers for normal backing tracks
|
|
backingTracks = sessionModel.backingTracks();
|
|
backingTrackMixers = context._.filter(backingTrackMixers, function(mixer){return !mixer.managed})
|
|
if(backingTrackMixers.length > 1) {
|
|
logger.error("multiple, managed backing track mixers encountered", backingTrackMixers)
|
|
app.notify({
|
|
title: "Multiple Backing Tracks Encountered",
|
|
text: "Only one backing track can be open a time.",
|
|
icon_url: "/assets/content/icon_alert_big.png"
|
|
});
|
|
return false;
|
|
}
|
|
}
|
|
|
|
var noCorrespondingTracks = false;
|
|
$.each(backingTrackMixers, function(index, mixer) {
|
|
|
|
// find the track or tracks that correspond to the mixer
|
|
var correspondingTracks = []
|
|
|
|
var noCorrespondingTracks = false;
|
|
if(sessionModel.isPlayingRecording()) {
|
|
$.each(backingTracks, function (i, backingTrack) {
|
|
if(mixer.persisted_track_id == backingTrack.client_track_id || // occurs if this client is the one that opened the track
|
|
mixer.id == 'L' + backingTrack.client_track_id) { // occurs if this client is a remote participant
|
|
correspondingTracks.push(backingTrack)
|
|
}
|
|
});
|
|
}
|
|
else
|
|
{
|
|
// if this is just an open backing track, then we can assume that the 1st backingTrackMixer is ours
|
|
correspondingTracks.push(backingTracks[0])
|
|
}
|
|
|
|
if (correspondingTracks.length == 0) {
|
|
noCorrespondingTracks = true;
|
|
logger.debug("renderBackingTracks: could not map backing tracks")
|
|
app.notify({
|
|
title: "Unable to Open Backing Track",
|
|
text: "Could not correlate server and client tracks",
|
|
icon_url: "/assets/content/icon_alert_big.png"
|
|
});
|
|
return false;
|
|
}
|
|
|
|
// now we have backing track and mixer in hand; we can render
|
|
var backingTrack = correspondingTracks[0]
|
|
|
|
// pluck the 1st mixer, and assume that all other mixers in this group are of the same type (between JamTrack vs Peer)
|
|
// if it's a locally opened track (MediaTrackGroup), then we can say this person is the opener
|
|
var isOpener = mixer.group_id == ChannelGroupIds.MediaTrackGroup;
|
|
|
|
if(isOpener) {
|
|
var oppositeMixer = getMixerByResourceId(mixer.rid, MIX_MODES.PERSONAL);
|
|
var mixerId = mixer.id + "," + oppositeMixer.id
|
|
}
|
|
else {
|
|
var mixerId = mixer.id;
|
|
}
|
|
|
|
var shortFilename = context.JK.getNameOfFile(backingTrack.filename);
|
|
|
|
if(!sessionModel.isPlayingRecording()) {
|
|
// if a recording is being played back, do not set this header, because renderRecordedTracks already did
|
|
// ugly.
|
|
$('.session-recording-name').text(shortFilename);
|
|
}
|
|
|
|
var instrumentIcon = context.JK.getInstrumentIcon45(backingTrack.instrument_id);
|
|
var photoUrl = "/assets/content/icon_recording.png";
|
|
|
|
|
|
// Default trackData to participant + no Mixer state.
|
|
var trackData = {
|
|
type: 'backing_track',
|
|
trackId: backingTrack.id,
|
|
clientId: backingTrack.client_id,
|
|
name: 'Backing',
|
|
filename: backingTrack.filename,
|
|
instrumentIcon: instrumentIcon,
|
|
avatar: photoUrl,
|
|
latency: "good",
|
|
gainPercent: 0,
|
|
muteClass: 'muted',
|
|
showLoop: isOpener && !sessionModel.isPlayingRecording(),
|
|
loopState: mixer.loop,
|
|
mixerId: "",
|
|
avatarClass: 'avatar-recording',
|
|
preMasteredClass: ""
|
|
};
|
|
|
|
var gainPercent = percentFromMixerValue(
|
|
mixer.range_low, mixer.range_high, mixer.volume_left);
|
|
var muteClass = "enabled";
|
|
if (mixer.mute) {
|
|
muteClass = "muted";
|
|
}
|
|
|
|
trackData.gainPercent = gainPercent;
|
|
trackData.muteClass = muteClass;
|
|
trackData.mixerId = mixerId; // the master mixer controls the volume control for recordings (no personal controls in either master or personal mode)
|
|
trackData.vuMixerId = mixer.id; // the master mixer controls the VUs for recordings (no personal controls in either master or personal mode)
|
|
trackData.muteMixerId = mixer.id; // the master mixer controls the mute for recordings (no personal controls in either master or personal mode)
|
|
trackData.mediaTrackOpener = isOpener;
|
|
trackData.mediaControlsDisabled = !isOpener;
|
|
trackData.showHelpAboutMediaMixers = sessionModel.isPersonalMixMode() && isOpener;
|
|
|
|
_addRecordingTrack(trackData, mixer, oppositeMixer);
|
|
});
|
|
}
|
|
|
|
function renderJamTracks(jamTrackMixersOrig) {
|
|
logger.debug("rendering jam tracks")
|
|
|
|
var jamTrackMixers = jamTrackMixersOrig.slice();
|
|
var jamTracks = []
|
|
var jamTrackName = 'JamTrack';
|
|
if(sessionModel.isPlayingRecording()) {
|
|
// only return managed mixers for recorded backing tracks
|
|
jamTracks = sessionModel.recordedJamTracks();
|
|
jamTrackName = sessionModel.recordedJamTrackName();
|
|
}
|
|
else {
|
|
// only return un-managed (ad-hoc) mixers for normal backing tracks
|
|
jamTracks = sessionModel.jamTracks();
|
|
jamTrackName = sessionModel.jamTrackName();
|
|
}
|
|
|
|
|
|
|
|
// pluck the 1st mixer, and assume that all other mixers in this group are of the same type (between JamTrack vs Peer)
|
|
// if it's a locally opened track (JamTrackGroup), then we can say this person is the opener
|
|
var isOpener = jamTrackMixers[0].group_id == ChannelGroupIds.JamTrackGroup;
|
|
|
|
// using the server's info in conjuction with the client's, draw the recording tracks
|
|
if(jamTracks) {
|
|
$('.session-recording-name').text(jamTrackName);
|
|
|
|
var noCorrespondingTracks = false;
|
|
$.each(jamTracks, function(index, jamTrack) {
|
|
var mixer = null;
|
|
var preMasteredClass = "";
|
|
// find the track or tracks that correspond to the mixer
|
|
var correspondingTracks = []
|
|
$.each(jamTrackMixersOrig, function(i, matchMixer) {
|
|
if(matchMixer.id == jamTrack.id) {
|
|
correspondingTracks.push(jamTrack);
|
|
mixer = matchMixer;
|
|
}
|
|
});
|
|
|
|
if(correspondingTracks.length == 0) {
|
|
noCorrespondingTracks = true;
|
|
logger.error("could not correlate jam tracks", jamTrackMixers, jamTracks)
|
|
app.notify({
|
|
title: "Unable to Open JamTrack",
|
|
text: "Could not correlate server and client tracks",
|
|
icon_url: "/assets/content/icon_alert_big.png"});
|
|
return false;
|
|
}
|
|
|
|
// prune found recorded tracks
|
|
jamTracks = $.grep(jamTracks, function(value) {
|
|
return $.inArray(value, correspondingTracks) < 0;
|
|
});
|
|
|
|
// prune found mixers
|
|
jamTrackMixers.splice(mixer);
|
|
|
|
var oneOfTheTracks = correspondingTracks[0];
|
|
var instrumentIcon = context.JK.getInstrumentIcon45(oneOfTheTracks.instrument.id);
|
|
var photoUrl = "/assets/content/icon_recording.png";
|
|
|
|
var name = oneOfTheTracks.part
|
|
if (!name) {
|
|
name = '';
|
|
}
|
|
|
|
if(isOpener) {
|
|
var oppositeMixer = getMixerByResourceId(mixer.rid, MIX_MODES.PERSONAL);
|
|
var mixerId = mixer.id + "," + oppositeMixer.id
|
|
}
|
|
else {
|
|
var mixerId = mixer.id;
|
|
}
|
|
|
|
// Default trackData to participant + no Mixer state.
|
|
var trackData = {
|
|
type: 'jam_track',
|
|
trackId: oneOfTheTracks.id,
|
|
clientId: oneOfTheTracks.client_id,
|
|
name: name,
|
|
instrumentIcon: instrumentIcon,
|
|
avatar: photoUrl,
|
|
latency: "good",
|
|
gainPercent: 0,
|
|
muteClass: 'muted',
|
|
mixerId: "",
|
|
avatarClass : 'avatar-recording',
|
|
preMasteredClass: ""
|
|
};
|
|
|
|
var gainPercent = percentFromMixerValue(
|
|
mixer.range_low, mixer.range_high, mixer.volume_left);
|
|
var muteClass = "enabled";
|
|
if (mixer.mute) {
|
|
muteClass = "muted";
|
|
}
|
|
trackData.gainPercent = gainPercent;
|
|
trackData.muteClass = muteClass;
|
|
trackData.mixerId = mixerId; // the master mixer controls the volume control for recordings (no personal controls in either master or personal mode)
|
|
trackData.vuMixerId = mixer.id; // the master mixer controls the VUs for recordings (no personal controls in either master or personal mode)
|
|
trackData.muteMixerId = mixer.id; // the master mixer controls the mute for recordings (no personal controls in either master or personal mode)
|
|
trackData.mediaTrackOpener = isOpener;
|
|
trackData.mediaControlsDisabled = !isOpener;
|
|
trackData.showHelpAboutMediaMixers = sessionModel.isPersonalMixMode() && isOpener;
|
|
|
|
_addRecordingTrack(trackData, mixer, oppositeMixer);
|
|
});
|
|
|
|
if(!noCorrespondingTracks && jamTracks.length > 0) {
|
|
logger.error("unable to find all jam tracks against client tracks");
|
|
app.notify({title:"All tracks not found",
|
|
text: "Some tracks in the jam tracks are not present in the playback",
|
|
icon_url: "/assets/content/icon_alert_big.png"})
|
|
}
|
|
}
|
|
}
|
|
|
|
function renderMetronomeTracks(metronomeTrackMixers) {
|
|
logger.debug("rendering metronome track")
|
|
|
|
// pluck the 1st mixer, and assume that all other mixers in this group are of the same type (between JamTrack vs Peer)
|
|
// if it's a locally opened track (MediaTrackGroup), then we can say this person is the opener
|
|
var name = "Metronome"
|
|
|
|
// using the server's info in conjuction with the client's, draw the recording tracks
|
|
if(metronomeTrackMixers.length > 0) {
|
|
var metronome = {}
|
|
$('.session-recording-name').text(name);//sessionModel.getCurrentSession().backing_track_path);
|
|
|
|
var noCorrespondingTracks = false;
|
|
var mixer = metronomeTrackMixers[0]
|
|
var preMasteredClass = "";
|
|
// find the track or tracks that correspond to the mixer
|
|
var correspondingTracks = []
|
|
correspondingTracks.push(metronome);
|
|
|
|
if(correspondingTracks.length == 0) {
|
|
noCorrespondingTracks = true;
|
|
app.notify({
|
|
title: "Unable to Open Metronome",
|
|
text: "Could not correlate server and client tracks",
|
|
icon_url: "/assets/content/icon_metronome_small.png"});
|
|
return false;
|
|
}
|
|
|
|
// prune found recorded tracks
|
|
// Metronomes = $.grep(Metronomes, function(value) {
|
|
// return $.inArray(value, correspondingTracks) < 0;
|
|
// });
|
|
|
|
var oneOfTheTracks = correspondingTracks[0];
|
|
var instrumentIcon = context.JK.getInstrumentIcon45(oneOfTheTracks.instrument_id);
|
|
var photoUrl = "/assets/content/icon_metronome_small.png";
|
|
|
|
var oppositeMixer = getMixerByResourceId(mixer.rid, MIX_MODES.PERSONAL);
|
|
var mixerId = mixer.id + "," + oppositeMixer.id
|
|
|
|
// Default trackData to participant + no Mixer state.
|
|
var trackData = {
|
|
type: 'metronome',
|
|
trackId: "MS" + oneOfTheTracks.id,
|
|
clientId: oneOfTheTracks.client_id,
|
|
name: "Metronome",
|
|
instrumentIcon: photoUrl,
|
|
avatar: instrumentIcon,
|
|
latency: "good",
|
|
gainPercent: 0,
|
|
muteClass: 'muted',
|
|
mixerId: "",
|
|
avatarClass : 'avatar-recording',
|
|
preMasteredClass: "",
|
|
showMetronomeControls: true
|
|
};
|
|
|
|
var gainPercent = percentFromMixerValue(
|
|
mixer.range_low, mixer.range_high, mixer.volume_left);
|
|
var muteClass = "enabled";
|
|
if (mixer.mute) {
|
|
muteClass = "muted";
|
|
}
|
|
trackData.gainPercent = gainPercent;
|
|
trackData.muteClass = muteClass;
|
|
trackData.mixerId = mixerId; // the master mixer controls the volume control for recordings (no personal controls in either master or personal mode)
|
|
trackData.vuMixerId = mixer.id; // the master mixer controls the VUs for recordings (no personal controls in either master or personal mode)
|
|
trackData.muteMixerId = mixer.id; // the master mixer controls the mute for recordings (no personal controls in either master or personal mode)
|
|
trackData.mediaTrackOpener = true
|
|
trackData.mediaControlsDisabled = false
|
|
trackData.showHelpAboutMediaMixers = false
|
|
|
|
_addRecordingTrack(trackData, mixer, oppositeMixer);
|
|
}// if
|
|
setFormFromMetronome()
|
|
metroCricket = context.jamClient.getMetronomeCricketTestState();
|
|
setMetronomePlaybackMode()
|
|
$closePlaybackRecording.show();
|
|
}
|
|
|
|
|
|
function renderRecordingTracks(recordingMixers) {
|
|
// get the server's info for the recording
|
|
var recordedTracks = sessionModel.recordedTracks();
|
|
var recordedBackingTracks = sessionModel.recordedBackingTracks();
|
|
|
|
// pluck the 1st mixer, and assume that all other mixers in this group are of the same type (between Local vs Peer)
|
|
// if it's a locally opened track (MediaTrackGroup), then we can say this person is the opener
|
|
|
|
var isOpener = recordingMixers[0].group_id == ChannelGroupIds.MediaTrackGroup;
|
|
|
|
// using the server's info in conjuction with the client's, draw the recording tracks
|
|
if(recordedTracks) {
|
|
$('.session-recording-name').text(sessionModel.getCurrentSession().claimed_recording.name);
|
|
|
|
var noCorrespondingTracks = false;
|
|
$.each(recordingMixers, function(index, mixer) {
|
|
var preMasteredClass = "";
|
|
// find the track or tracks that correspond to the mixer
|
|
var correspondingTracks = []
|
|
$.each(recordedTracks, function(i, recordedTrack) {
|
|
if(mixer.id.indexOf("L") == 0) {
|
|
if(mixer.id.substring(1) == recordedTrack.client_track_id) {
|
|
correspondingTracks.push(recordedTrack);
|
|
}
|
|
}
|
|
else if(mixer.id.indexOf("C") == 0) {
|
|
if(mixer.id.substring(1) == recordedTrack.client_id) {
|
|
correspondingTracks.push(recordedTrack);
|
|
preMasteredClass = "pre-mastered-track";
|
|
}
|
|
}
|
|
else {
|
|
// this should not be possible
|
|
alert("Invalid state: the recorded track had neither persisted_track_id or persisted_client_id");
|
|
}
|
|
});
|
|
|
|
if(correspondingTracks.length == 0) {
|
|
noCorrespondingTracks = true;
|
|
logger.debug("unable to correlate all recorded tracks", recordingMixers, recordedTracks)
|
|
app.notify({
|
|
title: "Unable to Open Recording",
|
|
text: "Could not correlate server and client tracks",
|
|
icon_url: "/assets/content/icon_alert_big.png"});
|
|
return false;
|
|
}
|
|
|
|
// prune found recorded tracks
|
|
recordedTracks = $.grep(recordedTracks, function(value) {
|
|
return $.inArray(value, correspondingTracks) < 0;
|
|
});
|
|
|
|
var oneOfTheTracks = correspondingTracks[0];
|
|
var instrumentIcon = context.JK.getInstrumentIcon45(oneOfTheTracks.instrument_id);
|
|
var photoUrl = "/assets/content/icon_recording.png";
|
|
|
|
var name = oneOfTheTracks.user.name;
|
|
if (!(name)) {
|
|
name = oneOfTheTracks.user.first_name + ' ' + oneOfTheTracks.user.last_name;
|
|
}
|
|
|
|
if(isOpener) {
|
|
var oppositeMixer = getMixerByResourceId(mixer.rid, MIX_MODES.PERSONAL);
|
|
var mixerId = mixer.id + "," + oppositeMixer.id
|
|
}
|
|
else {
|
|
var mixerId = mixer.id;
|
|
}
|
|
|
|
// Default trackData to participant + no Mixer state.
|
|
var trackData = {
|
|
type: 'recorded_track',
|
|
trackId: oneOfTheTracks.id,
|
|
clientId: oneOfTheTracks.client_id,
|
|
name: name,
|
|
instrumentIcon: instrumentIcon,
|
|
avatar: photoUrl,
|
|
latency: "good",
|
|
gainPercent: 0,
|
|
muteClass: 'muted',
|
|
mixerId: "",
|
|
avatarClass : 'avatar-recording',
|
|
preMasteredClass: preMasteredClass
|
|
};
|
|
|
|
var gainPercent = percentFromMixerValue(
|
|
mixer.range_low, mixer.range_high, mixer.volume_left);
|
|
var muteClass = "enabled";
|
|
if (mixer.mute) {
|
|
muteClass = "muted";
|
|
}
|
|
trackData.gainPercent = gainPercent;
|
|
trackData.muteClass = muteClass;
|
|
trackData.mixerId = mixerId; // the master mixer controls the volume control for recordings (no personal controls in either master or personal mode)
|
|
trackData.vuMixerId = mixer.id; // the master mixer controls the VUs for recordings (no personal controls in either master or personal mode)
|
|
trackData.muteMixerId = mixer.id; // the master mixer controls the mute for recordings (no personal controls in either master or personal mode)
|
|
trackData.mediaControlsDisabled = !isOpener;
|
|
trackData.mediaTrackOpener = isOpener;
|
|
trackData.showHelpAboutMediaMixers = sessionModel.isPersonalMixMode() && isOpener;
|
|
|
|
_addRecordingTrack(trackData, mixer, oppositeMixer);
|
|
});
|
|
|
|
if(!noCorrespondingTracks && recordedTracks.length > 0) {
|
|
logger.error("unable to find all recorded tracks against client tracks");
|
|
app.notify({title:"All tracks not found",
|
|
text: "Some tracks in the recording are not present in the playback",
|
|
icon_url: "/assets/content/icon_alert_big.png"})
|
|
}
|
|
}
|
|
}
|
|
|
|
function trackMuteSelected(e, data) {
|
|
var muteOption = data.muteOption; // muteOption is going to be either 'master' or 'personal'. We mute the correct one, based on track info
|
|
|
|
var $muteControl = $(this);
|
|
|
|
// mixer is the mixer object returned from the backend corresponding to the mixer in this particular mode
|
|
// oppositeMixer is the mixer correspond to the opposite mode.
|
|
// Note that oppositeMixer is not ever set for ChannelGroupIds.AudioInputMusicGroup or ChannelGroupIds.MediaTrackGroup
|
|
|
|
var mixer = $muteControl.data('mixer')
|
|
var oppositeMixer = $muteControl.data('opposite-mixer')
|
|
|
|
logger.debug("muting tracks. current mixer id=" + mixer.id + ", opposite mixer id=" + oppositeMixer.id)
|
|
|
|
var mixerPair = {}
|
|
if(sessionModel.isMasterMixMode()) {
|
|
mixerPair.master = mixer;
|
|
mixerPair.personal = oppositeMixer;
|
|
}
|
|
else {
|
|
mixerPair.master = oppositeMixer;
|
|
mixerPair.personal = mixer;
|
|
}
|
|
if(muteOption == 'master') {
|
|
_toggleAudioMute(mixerPair.master.id, true, mixerPair.master.mode);
|
|
_toggleAudioMute(mixerPair.personal.id, true, mixerPair.personal.mode);
|
|
}
|
|
else {
|
|
_toggleAudioMute(mixerPair.personal.id, true, mixerPair.personal.mode);
|
|
_toggleAudioMute(mixerPair.master.id, false, mixerPair.master.mode);
|
|
}
|
|
|
|
_toggleVisualMuteControl($muteControl, true);
|
|
}
|
|
|
|
// find backend mixer based on track data, and target client_id
|
|
function findMixerForTrack(client_id, track, myTrack) {
|
|
var mixer = null; // what is the best mixer for this track/client ID?
|
|
var oppositeMixer = null; // what is the corresponding mixer in the opposite mode?
|
|
var vuMixer = null;
|
|
var muteMixer = null;
|
|
|
|
var mixMode = sessionModel.getMixMode();
|
|
if(myTrack) {
|
|
|
|
// when it's your track, look it up by the backend resource ID
|
|
mixer = getMixerByTrackId(track.client_track_id, mixMode)
|
|
vuMixer = mixer;
|
|
muteMixer = mixer;
|
|
|
|
// sanity checks
|
|
if(mixer && (mixer.group_id != ChannelGroupIds.AudioInputMusicGroup)) { logger.error("found local mixer that was not of groupID: AudioInputMusicGroup", mixer) }
|
|
|
|
if(mixer) {
|
|
// find the matching AudioInputMusicGroup for the opposite mode
|
|
oppositeMixer = getMixerByTrackId(track.client_track_id, !mixMode)
|
|
|
|
if(mixMode == MIX_MODES.PERSONAL) {
|
|
muteMixer = oppositeMixer; // make the master mixer the mute mixer
|
|
}
|
|
|
|
// sanity checks
|
|
if(!oppositeMixer) {logger.error("unable to find opposite mixer for local mixer", mixer)}
|
|
else if(oppositeMixer.group_id != ChannelGroupIds.AudioInputMusicGroup) { logger.error("found local mixer in opposite mode that was not of groupID: AudioInputMusicGroup", mixer, oppositeMixer)}
|
|
}
|
|
else {
|
|
logger.debug("local track is not present: ", track)
|
|
}
|
|
}
|
|
else {
|
|
if(mixMode === MIX_MODES.MASTER) {
|
|
|
|
// when it's a remote track and in master mode, we should find the PeerAudioInputMusicGroup
|
|
mixer = getMixerByTrackId(track.client_track_id, MIX_MODES.MASTER)
|
|
if(mixer && (mixer.group_id != ChannelGroupIds.PeerAudioInputMusicGroup)) { logger.error("found remote mixer that was not of groupID: PeerAudioInputMusicGroup", mixer) }
|
|
|
|
vuMixer = mixer;
|
|
muteMixer = mixer;
|
|
|
|
if(mixer) {
|
|
// we should be able to find a UserMusicInputGroup for this clientId in personal mode
|
|
var oppositeMixers = _groupedMixersForClientId(client_id, [ ChannelGroupIds.UserMusicInputGroup], {}, MIX_MODES.PERSONAL);
|
|
if (oppositeMixers[ChannelGroupIds.UserMusicInputGroup]) { oppositeMixer = oppositeMixers[ChannelGroupIds.UserMusicInputGroup][0]; }
|
|
|
|
if(!oppositeMixer) {logger.error("unable to find UserMusicInputGroup corresponding to PeerAudioInputMusicGroup mixer", mixer ) }
|
|
}
|
|
}
|
|
else {
|
|
// when it's a remote track and in personal mode, we want the 'Peer Stream', which is UserMusicInputGroup
|
|
// this spans N tracks for the remote user
|
|
var mixers = _groupedMixersForClientId(client_id, [ ChannelGroupIds.UserMusicInputGroup], {}, MIX_MODES.PERSONAL);
|
|
if (mixers[ChannelGroupIds.UserMusicInputGroup]) { mixer = mixers[ChannelGroupIds.UserMusicInputGroup][0]; }
|
|
|
|
vuMixer = mixer;
|
|
muteMixer = mixer;
|
|
|
|
if(mixer) {
|
|
// now grab the PeerAudioInputMusicGroup in master mode to satisfy the 'opposite' mixer
|
|
oppositeMixer = getMixerByTrackId(track.client_track_id, MIX_MODES.MASTER)
|
|
if(!oppositeMixer) {logger.debug("unable to find a PeerAudioInputMusicGroup master mixer matching a UserMusicInput", track.client_track_id, mixersByTrackId)}
|
|
else if(oppositeMixer.group_id != ChannelGroupIds.PeerAudioInputMusicGroup) { logger.error("found remote mixer that was not of groupID: PeerAudioInputMusicGroup", mixer) }
|
|
|
|
vuMixer = oppositeMixer; // for personal mode, use the PeerAudioInputMusicGroup's VUs
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
return {
|
|
mixer: mixer,
|
|
oppositeMixer: oppositeMixer,
|
|
vuMixer: vuMixer,
|
|
muteMixer: muteMixer
|
|
}
|
|
}
|
|
function _renderTracks() {
|
|
myTracks = [];
|
|
|
|
// Participants are here now, but the mixers don't update right away.
|
|
// Draw tracks from participants, then setup timers to look for the
|
|
// mixers that go with those participants, if they're missing.
|
|
|
|
lookingForMixers = [] // clear this back out as we are restarting from scratch
|
|
lookingForMixersCount = 0;
|
|
$.each(sessionModel.participants(), function(index, participant) {
|
|
|
|
var name = participant.user.name;
|
|
if (!(name)) {
|
|
name = participant.user.first_name + ' ' + participant.user.last_name;
|
|
}
|
|
|
|
var myTrack = app.clientId == participant.client_id;
|
|
|
|
// special case; if it's me and I have no tracks, show info about this sort of use of the app
|
|
if (myTrack && participant.tracks.length == 0) {
|
|
$tracksHolder.addClass('no-local-tracks')
|
|
$liveTracksContainer.addClass('no-local-tracks')
|
|
}
|
|
else {
|
|
$tracksHolder.removeClass('no-local-tracks')
|
|
$liveTracksContainer.removeClass('no-local-tracks')
|
|
}
|
|
|
|
// loop through all tracks for each participant
|
|
$.each(participant.tracks, function (index, track) {
|
|
var instrumentIcon = context.JK.getInstrumentIcon45(track.instrument_id);
|
|
var photoUrl = context.JK.resolveAvatarUrl(participant.user.photo_url);
|
|
|
|
// Default trackData to participant + no Mixer state.
|
|
var trackData = {
|
|
trackId: track.id,
|
|
connection_id: track.connection_id,
|
|
client_track_id: track.client_track_id,
|
|
client_resource_id: track.client_resource_id,
|
|
clientId: participant.client_id,
|
|
name: name,
|
|
instrumentIcon: instrumentIcon,
|
|
avatar: photoUrl,
|
|
latency: "good",
|
|
gainPercent: 0,
|
|
muteClass: 'muted',
|
|
mixerId: "",
|
|
avatarClass: 'avatar-med',
|
|
preMasteredClass: "",
|
|
myTrack: myTrack
|
|
};
|
|
|
|
var mixerData = findMixerForTrack(participant.client_id, track, myTrack)
|
|
var mixer = mixerData.mixer;
|
|
var vuMixer = mixerData.vuMixer;
|
|
var muteMixer = mixerData.muteMixer;
|
|
var oppositeMixer = mixerData.oppositeMixer;
|
|
|
|
|
|
if (mixer && oppositeMixer) {
|
|
myTrack = (mixer.group_id === ChannelGroupIds.AudioInputMusicGroup);
|
|
if (!myTrack) {
|
|
// it only makes sense to track 'audio established' for tracks that don't belong to you
|
|
sessionModel.setAudioEstablished(participant.client_id, true);
|
|
}
|
|
|
|
var gainPercent = percentFromMixerValue(
|
|
mixer.range_low, mixer.range_high, mixer.volume_left);
|
|
var muteClass = "enabled";
|
|
if (mixer.mute) {
|
|
muteClass = "muted";
|
|
}
|
|
|
|
trackData.gainPercent = gainPercent;
|
|
trackData.muteClass = muteClass;
|
|
trackData.mixerId = mixer.id;
|
|
trackData.vuMixerId = vuMixer.id;
|
|
trackData.oppositeMixer = oppositeMixer;
|
|
trackData.muteMixerId = muteMixer.id;
|
|
trackData.noaudio = false;
|
|
trackData.group_id = mixer.group_id;
|
|
context.jamClient.SessionSetUserName(participant.client_id, name);
|
|
|
|
} else { // No mixer to match, yet
|
|
lookingForMixers.push({track: track, clientId: participant.client_id})
|
|
trackData.noaudio = true;
|
|
if (!(lookingForMixersTimer)) {
|
|
logger.debug("waiting for mixer to show up for track: " + track.id)
|
|
lookingForMixersTimer = context.setInterval(lookForMixers, 500);
|
|
}
|
|
}
|
|
|
|
var allowDelete = myTrack && index > 0;
|
|
_addTrack(allowDelete, trackData, mixer, oppositeMixer);
|
|
|
|
// Show settings icons only for my tracks
|
|
if (myTrack) {
|
|
myTracks.push(trackData);
|
|
}
|
|
});
|
|
|
|
});
|
|
|
|
configureTrackDialog = new context.JK.ConfigureTrackDialog(app, myTracks, sessionId, sessionModel);
|
|
addNewGearDialog = new context.JK.AddNewGearDialog(app, self);
|
|
}
|
|
|
|
function connectTrackToMixer(trackSelector, track, mixerId, gainPercent, groupId, mixer, oppositeMixer) {
|
|
var vuOpts = $.extend({}, trackVuOpts);
|
|
var faderOpts = $.extend({}, trackFaderOpts);
|
|
faderOpts.faderId = mixerId;
|
|
var vuLeftSelector = trackSelector + " .track-vu-left";
|
|
var vuRightSelector = trackSelector + " .track-vu-right";
|
|
var faderSelector = trackSelector + " .track-gain";
|
|
var $fader = $(faderSelector).attr('mixer-id', mixerId).data('groupId', groupId).data('mixer', mixer).data('opposite-mixer', oppositeMixer);
|
|
if(track.mediaControlsDisabled) {
|
|
$fader.data('media-controls-disabled', true).data('media-track-opener', track.mediaTrackOpener) // this we be applied later to the fader handle $element
|
|
}
|
|
$fader.data('showHelpAboutMediaMixers', track.showHelpAboutMediaMixers)
|
|
|
|
|
|
var $track = $(trackSelector);
|
|
// Set mixer-id attributes and render VU/Fader
|
|
|
|
if (!track.hideVU) {
|
|
context.JK.VuHelpers.renderVU(vuLeftSelector, vuOpts);
|
|
$track.find('.track-vu-left').attr('mixer-id', track.vuMixerId + '_vul').data('groupId', groupId).data('mixer', mixer).data('opposite-mixer', oppositeMixer)
|
|
context.JK.VuHelpers.renderVU(vuRightSelector, vuOpts);
|
|
$track.find('.track-vu-right').attr('mixer-id', track.vuMixerId + '_vur').data('groupId', groupId).data('mixer', mixer).data('opposite-mixer', oppositeMixer)
|
|
}
|
|
|
|
if (track.showMetronomeControls) {
|
|
$track.find('.metronome-selects').removeClass("hidden")
|
|
} else {
|
|
$track.find('.metronome-selects').addClass("hidden")
|
|
}
|
|
|
|
// if (track.showMetroSound) {
|
|
// $track.find('.metro-sound-select').removeClass("hidden")
|
|
// }
|
|
|
|
context.JK.FaderHelpers.renderFader($fader, faderOpts);
|
|
// Set gain position
|
|
context.JK.FaderHelpers.setFaderValue(mixerId, gainPercent);
|
|
|
|
if(track.faderChanged) {
|
|
$fader.on('fader_change', track.faderChanged);
|
|
} else {
|
|
$fader.on('fader_change', faderChanged);
|
|
}
|
|
|
|
return $track;
|
|
}
|
|
|
|
// Function called on an interval when participants change. Mixers seem to
|
|
// show up later, so we render the tracks from participants, but keep track
|
|
// of the ones there weren't any mixers for, and continually try to find them
|
|
// and get them connected to the mixers underneath.
|
|
function lookForMixers() {
|
|
lookingForMixersCount++;
|
|
_updateMixers();
|
|
var usedMixers = {};
|
|
var keysToDelete = [];
|
|
context._.each(lookingForMixers, function(data) {
|
|
var clientId = data.clientId;
|
|
var track = data.track;
|
|
|
|
var myTrack = app.clientId == clientId;
|
|
|
|
var mixerData = findMixerForTrack(clientId, track, myTrack)
|
|
|
|
var mixer = mixerData.mixer;
|
|
var oppositeMixer = mixerData.oppositeMixer;
|
|
var vuMixer = mixerData.vuMixer;
|
|
var muteMixer = mixerData.muteMixer;
|
|
|
|
if (mixer && oppositeMixer) {
|
|
if(!myTrack) {
|
|
// it only makes sense to track 'audio established' for tracks that don't belong to you
|
|
sessionModel.setAudioEstablished(clientId, true);
|
|
}
|
|
var participant = (sessionModel.getParticipant(clientId) || {name:'unknown'}).name;
|
|
logger.debug("found mixer=" + mixer.id + ", participant=" + participant)
|
|
usedMixers[mixer.id] = true;
|
|
keysToDelete.push(data);
|
|
var gainPercent = percentFromMixerValue(
|
|
mixer.range_low, mixer.range_high, mixer.volume_left);
|
|
var trackSelector = 'div.track[track-id="' + track.id + '"]';
|
|
|
|
connectTrackToMixer(trackSelector, track, mixer.id, gainPercent, mixer.group_id, mixer, oppositeMixer);
|
|
var $track = $('div.track[client-id="' + clientId + '"]');
|
|
var $trackIconMute = $track.find('.track-icon-mute')
|
|
$trackIconMute.attr('mixer-id', muteMixer.id).data('mixer', mixer).data('opposite-mixer', oppositeMixer)
|
|
$trackIconMute.muteSelector().on(EVENTS.MUTE_SELECTED, trackMuteSelected)
|
|
|
|
// hide overlay for all tracks associated with this client id (if one mixer is present, then all tracks are valid)
|
|
$('.disabled-track-overlay', $track).hide();
|
|
$('.track-connection', $track).removeClass('red yellow green').addClass('grey');
|
|
// Set mute state
|
|
_toggleVisualMuteControl($trackIconMute, mixer.mute || oppositeMixer.mute);
|
|
}
|
|
else {
|
|
// if 1 second has gone by and still no mixer, then we gray the participant's tracks
|
|
if(lookingForMixersCount == 2) {
|
|
var $track = $('div.track[client-id="' + clientId + '"]');
|
|
$('.disabled-track-overlay', $track).show();
|
|
$('.track-connection', $track).removeClass('red yellow green').addClass('red');
|
|
}
|
|
// if 5 seconds have gone by and no mixer, then we tell the server failed to establish audio
|
|
else if(lookingForMixersCount == 10) {
|
|
if(!myTrack) {
|
|
// it only makes sense to track 'audio established' for tracks that don't belong to you
|
|
sessionModel.setAudioEstablished(clientId, false);
|
|
}
|
|
}
|
|
var participant = (sessionModel.getParticipant(clientId) || { user: {name: 'unknown'}}).user.name;
|
|
logger.debug("still looking for mixer for participant=" + participant + ", clientId=" + clientId)
|
|
}
|
|
})
|
|
|
|
for (var i=0; i<keysToDelete.length; i++) {
|
|
var index = lookingForMixers.indexOf(keysToDelete[i]);
|
|
lookingForMixers.splice(index, 1);
|
|
}
|
|
|
|
if (lookingForMixers.length === 0 ||
|
|
lookingForMixersCount > 20) {
|
|
lookingForMixersCount = 0;
|
|
lookingForMixers = []
|
|
context.clearTimeout(lookingForMixersTimer);
|
|
lookingForMixersTimer = null;
|
|
}
|
|
}
|
|
|
|
// Given a mixerID and a value between 0.0-1.0,
|
|
// light up the proper VU lights.
|
|
function _updateVU(mixerId, value, isClipping) {
|
|
|
|
// Special-case for mono tracks. If mono, and it's a _vul id,
|
|
// update both sides, otherwise do nothing.
|
|
// If it's a stereo track, just do the normal thing.
|
|
var selector;
|
|
var pureMixerId = mixerId.replace("_vul", "");
|
|
pureMixerId = pureMixerId.replace("_vur", "");
|
|
var mixer = getMixer(pureMixerId, sessionModel.getMixMode());
|
|
if(!mixer) {
|
|
// try again, in the opposite mode (awful that this is necessary)
|
|
mixer = getMixer(pureMixerId, !sessionModel.getMixMode());
|
|
}
|
|
if (mixer) {
|
|
if (!(mixer.stereo)) { // mono track
|
|
if (mixerId.substr(-4) === "_vul") {
|
|
// Do the left
|
|
selector = $tracksHolder.find('[mixer-id="' + pureMixerId + '_vul"]');
|
|
context.JK.VuHelpers.updateVU(selector, value);
|
|
// Do the right
|
|
selector = $tracksHolder.find('[mixer-id="' + pureMixerId + '_vur"]');
|
|
context.JK.VuHelpers.updateVU(selector, value);
|
|
} // otherwise, it's a mono track, _vur event - ignore.
|
|
} else { // stereo track
|
|
selector = $tracksHolder.find('[mixer-id="' + mixerId + '"]');
|
|
context.JK.VuHelpers.updateVU(selector, value);
|
|
}
|
|
}
|
|
}
|
|
|
|
function _addTrack(allowDelete, trackData, mixer, oppositeMixer) {
|
|
|
|
var $destination = $myTracksContainer;
|
|
if (trackData.clientId !== app.clientId) {
|
|
$destination = $liveTracksContainer
|
|
$('.session-livetracks .when-empty').hide();
|
|
}
|
|
var template = $('#template-session-track').html();
|
|
var newTrack = $(context.JK.fillTemplate(template, trackData));
|
|
newTrack.data('track_data', trackData)
|
|
var audioOverlay = $('.disabled-track-overlay', newTrack);
|
|
var $trackIconMute = newTrack.find('.track-icon-mute')
|
|
$trackIconMute.muteSelector().on(EVENTS.MUTE_SELECTED, trackMuteSelected)
|
|
$trackIconMute.data('mixer', mixer).data('opposite-mixer', oppositeMixer)
|
|
|
|
audioOverlay.hide(); // always start with overlay hidden, and only show if no audio persists
|
|
$destination.append(newTrack);
|
|
|
|
// Render VU meters and gain fader
|
|
var trackSelector = $destination.selector + ' .session-track[track-id="' + trackData.trackId + '"]';
|
|
var gainPercent = trackData.gainPercent || 0;
|
|
connectTrackToMixer(trackSelector, trackData, trackData.mixerId, gainPercent, trackData.group_id, mixer, oppositeMixer);
|
|
|
|
var $closeButton = $('#div-track-close', 'div[track-id="' + trackData.trackId + '"]');
|
|
if (!allowDelete) {
|
|
$closeButton.hide();
|
|
}
|
|
else {
|
|
$closeButton.click(deleteTrack);
|
|
}
|
|
|
|
// is this used?
|
|
tracks[trackData.trackId] = new context.JK.SessionTrack(trackData.clientId);
|
|
}
|
|
|
|
// something is being shown now in the other audio area
|
|
function otherAudioFilled() {
|
|
$('.session-recordings .when-empty').hide();
|
|
$('.session-recording-name-wrapper').show();
|
|
$('.session-recordings').attr('media-state', 'open');
|
|
$('.session-livetracks').attr('media-state', 'open');
|
|
}
|
|
|
|
|
|
function resizeFluid() {
|
|
var trackWidth = 78; // 70 width + 8 margin
|
|
var trackPadding = 30; // 15px left and right
|
|
|
|
var numLiveTracks = $liveTracks.find('.track').length;
|
|
var numAudioTracks = $audioTracks.find('.track').length;
|
|
var totalWidth = $fluidTracks.width();
|
|
|
|
|
|
// calculate desired audio tracks width
|
|
var minimumLiveTrackWidth = numLiveTracks * trackWidth + trackPadding;
|
|
var otherAudioWidth = numAudioTracks * trackWidth + trackPadding;
|
|
var liveTrackWidth = totalWidth - otherAudioWidth;
|
|
|
|
// live tracks get precedence over audio tracks, if there is a content over width usage
|
|
if(liveTrackWidth < minimumLiveTrackWidth) {
|
|
logger.debug("live track width trumping mode")
|
|
liveTrackWidth = minimumLiveTrackWidth;
|
|
otherAudioWidth = totalWidth - liveTrackWidth;
|
|
}
|
|
|
|
var otherAudioWidthPct = Math.floor(100 * otherAudioWidth/totalWidth);
|
|
var liveTrackWidthPct = Math.ceil(100 * liveTrackWidth/totalWidth);
|
|
|
|
//logger.debug("resizeFluid: ", minimumLiveTrackWidth, otherAudioWidth, otherAudioWidthPct, liveTrackWidthPct, liveTrackWidthPct)
|
|
$audioTracks.css('width', otherAudioWidthPct + '%');
|
|
$liveTracks.css('width', liveTrackWidthPct + '%');
|
|
}
|
|
|
|
function _addRecordingTrack(trackData, mixer, oppositeMixer) {
|
|
otherAudioFilled();
|
|
$('.session-recordings .recording-controls').show();
|
|
|
|
var parentSelector = '#session-recordedtracks-container';
|
|
var $destination = $(parentSelector);
|
|
|
|
var template = $('#template-session-track').html();
|
|
var newTrack = $(context.JK.fillTemplate(template, trackData));
|
|
newTrack.data('track_data', trackData);
|
|
$otherAudioContainer.append(newTrack);
|
|
if(trackData.preMasteredClass) {
|
|
context.JK.helpBubble($('.track-instrument', newTrack), 'pre-processed-track', {}, {offsetParent: newTrack.closest('.content-body')});
|
|
}
|
|
|
|
// Render VU meters and gain fader
|
|
var trackSelector = $otherAudioContainer.selector + ' .session-track[track-id="' + trackData.trackId + '"]';
|
|
var gainPercent = trackData.gainPercent || 0;
|
|
var $track = connectTrackToMixer(trackSelector, trackData, trackData.mixerId, gainPercent, null);
|
|
var $trackIconMute = $track.find('.track-icon-mute')
|
|
if(trackData.mediaControlsDisabled) {
|
|
$trackIconMute.data('media-controls-disabled', true).data('media-track-opener', trackData.mediaTrackOpener)
|
|
}
|
|
$trackIconMute.data('mixer', mixer).data('opposite-mixer', oppositeMixer)
|
|
$trackIconMute.data('showHelpAboutMediaMixers', trackData.showHelpAboutMediaMixers)
|
|
|
|
|
|
if(trackData.showLoop) {
|
|
var $trackIconLoop = $track.find('.track-icon-loop')
|
|
var $trackIconLoopCheckbox = $trackIconLoop.find('input');
|
|
$trackIconLoopCheckbox.prop('checked', trackData.loopState);
|
|
|
|
context.JK.checkbox($trackIconLoopCheckbox)
|
|
$trackIconLoopCheckbox.on('ifChanged', function() {
|
|
var loop = $trackIconLoopCheckbox.is(':checked')
|
|
_toggleAudioLoop(mixer.id, loop, getMixer(mixer.id).mode)
|
|
});
|
|
$trackIconLoop.show()
|
|
}
|
|
|
|
// is this used?
|
|
tracks[trackData.trackId] = new context.JK.SessionTrack(trackData.clientId);
|
|
}
|
|
|
|
/**
|
|
* Will be called when fader changes. The fader id (provided at subscribe time),
|
|
* the new value (0-100) and whether the fader is still being dragged are passed.
|
|
*/
|
|
function faderChanged(e, data) {
|
|
var $target = $(this);
|
|
var faderId = $target.attr('mixer-id');
|
|
var groupId = $target.data('groupId');
|
|
var mixerIds = faderId.split(',');
|
|
|
|
// media tracks are the only controls that sometimes set two mixers right now
|
|
var hasMasterAndPersonalControls = mixerIds.length == 2;
|
|
|
|
$.each(mixerIds, function(i,v) {
|
|
var broadcast = !(data.dragging); // If fader is still dragging, don't broadcast
|
|
|
|
var mode = undefined;
|
|
if(hasMasterAndPersonalControls) {
|
|
mode = i == 0 ? MIX_MODES.MASTER : MIX_MODES.PERSONAL;
|
|
}
|
|
var mixer = fillTrackVolumeObject(v, mode, broadcast);
|
|
|
|
setMixerVolume(mixer, data.percentage);
|
|
|
|
if(groupId == ChannelGroupIds.UserMusicInputGroup) {
|
|
// there may be other mixers with this same ID in the case of a Peer Music Stream, so update them as well
|
|
context.JK.FaderHelpers.setFaderValue(v, data.percentage);
|
|
}
|
|
});
|
|
}
|
|
|
|
// function tempoFaderChanged(e, data) {
|
|
// var $target = $(this);
|
|
// var faderId = $target.attr('mixer-id');
|
|
// var groupId = $target.data('groupId');
|
|
// var mixerIds = faderId.split(',');
|
|
// $.each(mixerIds, function(i,v) {
|
|
// // TODO Interpolate tempo values if we decide to go this way:
|
|
// if(groupId == ChannelGroupIds.UserMusicInputGroup) {
|
|
// // there may be other mixers with this same ID in the case of a Peer Music Stream, so update them as well
|
|
// }
|
|
// });
|
|
// }
|
|
|
|
function handleMetronomeCallback(args) {
|
|
logger.debug("MetronomeCallback: ", args)
|
|
metroTempo = args.bpm
|
|
metroCricket = args.cricket;
|
|
metroSound = METRO_SOUND_LOOKUP[args.sound];
|
|
|
|
setMetronomePlaybackMode();
|
|
setFormFromMetronome();
|
|
|
|
// This isn't actually there, so we rely on the metroSound as set from select on form:
|
|
// metroSound = args.sound
|
|
context.JK.CurrentSessionModel.refreshCurrentSession(true);
|
|
}
|
|
|
|
function handleVolumeChangeCallback(mixerId, isLeft, value, isMuted) {
|
|
// Visually update mixer
|
|
// There is no need to actually set the back-end mixer value as the
|
|
// back-end will already have updated the audio mixer directly prior to sending
|
|
// me this event. I simply need to visually show the new fader position.
|
|
// TODO: Use mixer's range
|
|
var faderValue = percentFromMixerValue(-80, 20, value);
|
|
context.JK.FaderHelpers.setFaderValue(mixerId, faderValue);
|
|
var $muteControl = $('[control="mute"][mixer-id="' + mixerId + '"]');
|
|
_toggleVisualMuteControl($muteControl, isMuted);
|
|
}
|
|
|
|
function handleBridgeCallback(vuData) {
|
|
var j;
|
|
var eventName = null;
|
|
var mixerId = null;
|
|
var value = null;
|
|
var vuInfo = null;
|
|
for (j = 0; j < vuData.length; j++) {
|
|
vuInfo = vuData[j];
|
|
var eventName = vuInfo[0];
|
|
var vuVal = 0.0;
|
|
if(eventName === "vu") {
|
|
var mixerId = vuInfo[1];
|
|
var leftValue = vuInfo[2];
|
|
var leftClipping = vuInfo[3];
|
|
var rightValue = vuInfo[4];
|
|
var rightClipping = vuInfo[5];
|
|
// TODO - no guarantee range will be -80 to 20. Get from the
|
|
// GetControlState for this mixer which returns min/max
|
|
// value is a DB value from -80 to 20. Convert to float from 0.0-1.0
|
|
_updateVU(mixerId + "_vul", (leftValue + 80) / 100, leftClipping);
|
|
_updateVU(mixerId + "_vur", (rightValue + 80) / 100, rightClipping);
|
|
}
|
|
else if(eventName === 'connection_status') {
|
|
var mixerId = vuInfo[1];
|
|
var value = vuInfo[2];
|
|
|
|
// Connection Quality Change
|
|
var connectionClass = 'green';
|
|
if (value < 7) {
|
|
connectionClass = 'yellow';
|
|
}
|
|
if (value < 4) {
|
|
connectionClass = 'red';
|
|
}
|
|
var mixerPair = getMixerByTrackId(mixerId);
|
|
|
|
var clientId = mixerPair ? mixerPair.master.client_id : null;
|
|
|
|
if(clientId) {
|
|
var $connection = $('.session-track[client-id="' + clientId + '"] .track-connection');
|
|
if($connection.length == 0) {
|
|
logger.debug("connection status: looking for clientId: " + clientId + ", mixer: " + mixerId)
|
|
}
|
|
else {
|
|
$connection.removeClass('red yellow green grey');
|
|
$connection.addClass(connectionClass);
|
|
}
|
|
}
|
|
}
|
|
else if(eventName === 'add' || eventName === 'remove') {
|
|
// TODO - _renderSession. Note I get streams of these in
|
|
// sequence, so have Nat fix, or buffer/spam protect
|
|
// Note - this is already handled from websocket events.
|
|
// However, there may be use of these two events to avoid
|
|
// the polling-style check for when a mixer has been added
|
|
// to match a participant track.
|
|
}
|
|
else {
|
|
logger.debug('non-vu event: ' + JSON.stringify(vuInfo));
|
|
}
|
|
}
|
|
}
|
|
|
|
function handleBackingTrackSelectedCallback(result) {
|
|
|
|
$openBackingTrack.removeClass('disabled');
|
|
|
|
if(!sessionModel.inSession()) {
|
|
return;
|
|
}
|
|
|
|
if(result.success) {
|
|
logger.debug("backing track selected: " + result.file);
|
|
|
|
rest.openBackingTrack({id: context.JK.CurrentSessionModel.id(), backing_track_path: result.file})
|
|
.done(function(response) {
|
|
var openResult = context.jamClient.SessionOpenBackingTrackFile(result.file, false);
|
|
|
|
if(openResult) {
|
|
sessionModel.setBackingTrack(result.file);
|
|
}
|
|
else {
|
|
app.notify({
|
|
"title": "Couldn't Open Backing Track",
|
|
"text": "Is the file a valid audio file?",
|
|
"icon_url": "/assets/content/icon_alert_big.png"
|
|
});
|
|
closeBackingTrack();
|
|
}
|
|
|
|
})
|
|
.fail(function(jqXHR) {
|
|
app.notifyServerError(jqXHR, "Unable to Open Backing Track For Playback");
|
|
})
|
|
}
|
|
else {
|
|
logger.debug("no backing track selected")
|
|
}
|
|
}
|
|
function deleteSession(evt) {
|
|
var sessionId = $(evt.currentTarget).attr("action-id");
|
|
if (sessionId) {
|
|
$.ajax({
|
|
type: "DELETE",
|
|
url: "/api/sessions/" + sessionId,
|
|
success: function(response) {
|
|
context.location="/client#/home";
|
|
},
|
|
error: function(jqXHR, textStatus, errorThrown) {
|
|
logger.error("Error deleting session " + sessionId);
|
|
}
|
|
});
|
|
}
|
|
}
|
|
|
|
function deleteTrack(evt) {
|
|
var trackId = $(evt.currentTarget).attr("track-id");
|
|
sessionModel.deleteTrack(sessionId, trackId);
|
|
}
|
|
|
|
function _toggleVisualMuteControl($control, mute) {
|
|
if (mute) {
|
|
$control.removeClass('enabled');
|
|
$control.addClass('muted');
|
|
} else {
|
|
$control.removeClass('muted');
|
|
$control.addClass('enabled');
|
|
}
|
|
}
|
|
|
|
function _toggleAudioMute(mixerId, muting, mode) {
|
|
fillTrackVolumeObject(mixerId, mode);
|
|
context.trackVolumeObject.mute = muting;
|
|
|
|
if(mode === undefined) {
|
|
mode = sessionModel.getMixMode();
|
|
}
|
|
context.jamClient.SessionSetControlState(mixerId, mode);
|
|
}
|
|
|
|
function _toggleAudioLoop(mixerId, loop, mode) {
|
|
fillTrackVolumeObject(mixerId, mode);
|
|
context.trackVolumeObject.loop = loop;
|
|
|
|
if(mode === undefined) {
|
|
mode = sessionModel.getMixMode();
|
|
}
|
|
context.jamClient.SessionSetControlState(mixerId, mode);
|
|
}
|
|
|
|
function showMuteDropdowns($control) {
|
|
$control.btOn();
|
|
}
|
|
|
|
function toggleMute(evt) {
|
|
var $control = $(evt.currentTarget);
|
|
var muting = ($control.hasClass('enabled'));
|
|
var mixerIds = $control.attr('mixer-id').split(',');
|
|
|
|
// track icons have a special mute behavior
|
|
if($control.is('.track-icon-mute')) {
|
|
|
|
var mediaControlsDisabled = $control.data('media-controls-disabled');
|
|
if(mediaControlsDisabled) {
|
|
var mediaTrackOpener = $control.data('media-track-opener');
|
|
context.JK.prodBubble($control, 'media-controls-disabled', {mediaTrackOpener:mediaTrackOpener}, {positions:['bottom'], offsetParent: $control.closest('.screen')})
|
|
return false;
|
|
}
|
|
|
|
if(sessionModel.areControlsLockedForJamTrackRecording() && $control.closest('.session-track').data('track_data').type == 'jam_track') {
|
|
context.JK.prodBubble($control, 'jamtrack-controls-disabled', {}, {positions:['bottom'], offsetParent: $control.closest('.screen')})
|
|
return false;
|
|
}
|
|
|
|
if($control.data('showHelpAboutMediaMixers')) {
|
|
if(!sessionModel.hasShownAudioMediaMixerHelp()) {
|
|
context.JK.prodBubble($control, 'volume-media-mixers', {}, {positions:['bottom'], offsetParent: $control.closest('.screen')})
|
|
sessionModel.markShownAudioMediaMixerHelp()
|
|
}
|
|
}
|
|
|
|
|
|
|
|
$.each(mixerIds, function(i,v) {
|
|
var mixerId = v;
|
|
// behavior: if this is the user's track in personal mode, then we mute the track globally
|
|
// otherwise, for any other track (user+master mode, or remote track in any mode)
|
|
// we just mute the type of track for that mode
|
|
var mixer = $control.data('mixer');
|
|
var oppositeMixer = $control.data('opposite-mixer')
|
|
|
|
if(mixer && oppositeMixer && (muteBothMasterAndPersonalGroups.indexOf(mixer.group_id) > -1)) {
|
|
// this is the user's local track; mute both personal and master mode
|
|
logger.debug("muting both master and personal mode mixers")
|
|
_toggleAudioMute(mixer.id, muting, mixer.mode)
|
|
_toggleAudioMute(oppositeMixer.id, muting, oppositeMixer.mode)
|
|
}
|
|
else {
|
|
logger.debug("muting mixer")
|
|
_toggleAudioMute(mixer.id, muting, mixer.mode)
|
|
}
|
|
|
|
// look for all controls matching this mixer id (important when it's personal mode + UserMusicInputGroup)
|
|
var $controls = $screen.find('.track-icon-mute[mixer-id="' + mixerId +'"]');
|
|
_toggleVisualMuteControl($controls, muting);
|
|
});
|
|
}
|
|
else {
|
|
// this path is taken for voice chat, but maybe others eventually
|
|
$.each(mixerIds, function(i,v) {
|
|
var mixerId = v;
|
|
|
|
var mixer = $control.data('mixer');
|
|
var oppositeMixer = $control.data('opposite-mixer')
|
|
|
|
if(mixer && oppositeMixer && mixer.group_id == ChannelGroupIds.AudioInputChatGroup) {
|
|
_toggleAudioMute(mixer.id, muting, mixer.mode);
|
|
_toggleAudioMute(oppositeMixer.id, muting, oppositeMixer.mode);
|
|
}
|
|
else {
|
|
_toggleAudioMute(mixerId, muting);
|
|
}
|
|
});
|
|
_toggleVisualMuteControl($control, muting);
|
|
}
|
|
|
|
|
|
}
|
|
|
|
function fillTrackVolumeObject(mixerId, mode, broadcast) {
|
|
_updateMixers();
|
|
var _broadcast = true;
|
|
if (broadcast !== undefined) {
|
|
_broadcast = broadcast;
|
|
}
|
|
var mixer = getMixer(mixerId, mode);
|
|
context.trackVolumeObject.clientID = mixer.client_id;
|
|
context.trackVolumeObject.broadcast = _broadcast;
|
|
context.trackVolumeObject.master = mixer.master;
|
|
context.trackVolumeObject.monitor = mixer.monitor;
|
|
context.trackVolumeObject.mute = mixer.mute;
|
|
context.trackVolumeObject.name = mixer.name;
|
|
context.trackVolumeObject.record = mixer.record;
|
|
context.trackVolumeObject.volL = mixer.volume_left;
|
|
|
|
// today we treat all tracks as mono, but this is required to make a stereo track happy
|
|
//context.trackVolumeObject.volR = mixer.volume_right;
|
|
context.trackVolumeObject.volR = mixer.volume_left;
|
|
|
|
context.trackVolumeObject.loop = mixer.loop;
|
|
// trackVolumeObject doesn't have a place for range min/max
|
|
currentMixerRangeMin = mixer.range_low;
|
|
currentMixerRangeMax = mixer.range_high;
|
|
return mixer;
|
|
}
|
|
|
|
// Given a mixer's min/max and current value, return it as
|
|
// a percent from 0-100. Return an integer.
|
|
function percentFromMixerValue(min, max, value) {
|
|
try {
|
|
var range = Math.abs(max - min);
|
|
var magnitude = value - min;
|
|
var percent = Math.round(100*(magnitude/range));
|
|
return percent;
|
|
} catch(err) {
|
|
return 0;
|
|
}
|
|
}
|
|
|
|
// Given a mixer's min/max and a percent value, return it as
|
|
// the mixer's value. Returns an integer.
|
|
function percentToMixerValue(min, max, percent) {
|
|
var range = Math.abs(max - min);
|
|
var multiplier = percent/100; // Change 85 into 0.85
|
|
var value = min + (multiplier * range);
|
|
// Protect against percents < 0 and > 100
|
|
if (value < min) {
|
|
value = min;
|
|
}
|
|
if (value > max) {
|
|
value = max;
|
|
}
|
|
return value;
|
|
}
|
|
|
|
// Given a volume percent (0-100), set the underlying
|
|
// audio volume level of the passed mixerId to the correct
|
|
// value.
|
|
function setMixerVolume(mixer, volumePercent) {
|
|
// The context.trackVolumeObject has been filled with the mixer values
|
|
// that go with mixerId, and the range of that mixer
|
|
// has been set in currentMixerRangeMin-Max.
|
|
// All that needs doing is to translate the incoming percent
|
|
// into the real value ont the sliders range. Set Left/Right
|
|
// volumes on trackVolumeObject, and call SetControlState to stick.
|
|
var sliderValue = percentToMixerValue(
|
|
currentMixerRangeMin, currentMixerRangeMax, volumePercent);
|
|
context.trackVolumeObject.volL = context.JK.FaderHelpers.convertPercentToAudioTaper(volumePercent);
|
|
context.trackVolumeObject.volR = context.JK.FaderHelpers.convertPercentToAudioTaper(volumePercent);
|
|
// Special case for L2M mix:
|
|
if (mixer.id === '__L2M__') {
|
|
logger.debug("L2M volumePercent=" + volumePercent);
|
|
var dbValue = context.JK.FaderHelpers.convertLinearToDb(volumePercent);
|
|
context.jamClient.SessionSetMasterLocalMix(dbValue);
|
|
// context.jamClient.SessionSetMasterLocalMix(sliderValue);
|
|
} else {
|
|
//var isMediaMixer = mediaTrackGroups.indexOf(mixer.group_id) > -1;
|
|
|
|
// if this is a media file (Metronome, JamTrack, BackingTrack, RecordedTrack), then we only modify master
|
|
//var mixMode = isMediaMixer ? MIX_MODES.MASTER : sessionModel.getMixMode();
|
|
context.jamClient.SessionSetControlState(mixer.id, mixer.mode);
|
|
}
|
|
}
|
|
|
|
function bailOut() {
|
|
promptLeave = false;
|
|
context.window.location = '/client#/home';
|
|
}
|
|
|
|
function sessionLeave(evt) {
|
|
evt.preventDefault();
|
|
rateSession();
|
|
bailOut();
|
|
return false;
|
|
}
|
|
|
|
function rateSession() {
|
|
if (rateSessionDialog === null) {
|
|
rateSessionDialog = new context.JK.RateSessionDialog(context.JK.app);
|
|
rateSessionDialog.initialize();
|
|
}
|
|
rateSessionDialog.showDialog();
|
|
return true;
|
|
}
|
|
|
|
function sessionResync(evt) {
|
|
evt.preventDefault();
|
|
var response = context.jamClient.SessionAudioResync();
|
|
if (response) {
|
|
app.notify({
|
|
"title": "Error",
|
|
"text": response,
|
|
"icon_url": "/assets/content/icon_alert_big.png"});
|
|
}
|
|
return false;
|
|
}
|
|
|
|
function sessionWebCam(e) {
|
|
e.preventDefault();
|
|
if(webcamViewer.isVideoShared()) {
|
|
$('#session-webcam').removeClass("selected")
|
|
} else {
|
|
$('#session-webcam').addClass("selected")
|
|
}
|
|
|
|
webcamViewer.toggleWebcam()
|
|
return false;
|
|
}
|
|
|
|
// http://stackoverflow.com/questions/2604450/how-to-create-a-jquery-clock-timer
|
|
function updateRecordingTimer() {
|
|
|
|
function pretty_time_string(num) {
|
|
return ( num < 10 ? "0" : "" ) + num;
|
|
}
|
|
|
|
var total_seconds = (new Date - startTimeDate) / 1000;
|
|
|
|
var hours = Math.floor(total_seconds / 3600);
|
|
total_seconds = total_seconds % 3600;
|
|
|
|
var minutes = Math.floor(total_seconds / 60);
|
|
total_seconds = total_seconds % 60;
|
|
|
|
var seconds = Math.floor(total_seconds);
|
|
|
|
hours = pretty_time_string(hours);
|
|
minutes = pretty_time_string(minutes);
|
|
seconds = pretty_time_string(seconds);
|
|
|
|
if(hours > 0) {
|
|
var currentTimeString = hours + ":" + minutes + ":" + seconds;
|
|
}
|
|
else {
|
|
var currentTimeString = minutes + ":" + seconds;
|
|
}
|
|
|
|
$recordingTimer.text('(' + currentTimeString + ')');
|
|
}
|
|
|
|
function displayStartingRecording() {
|
|
|
|
$('#recording-start-stop').addClass('currently-recording');
|
|
|
|
$('#recording-status').text("Starting...")
|
|
}
|
|
|
|
function displayStartedRecording() {
|
|
// the commented out code reflects dropping the counter as your recording to save space
|
|
startTimeDate = new Date;
|
|
//$recordingTimer = $("<span id='recording-timer'>(0:00)</span>");
|
|
var $recordingStatus = $('<span></span>').append("<span>Stop Recording</span>")//.append($recordingTimer);
|
|
$('#recording-status').html( $recordingStatus );
|
|
//recordingTimerInterval = setInterval(updateRecordingTimer, 1000);
|
|
}
|
|
|
|
function displayStoppingRecording(data) {
|
|
if(data) {
|
|
if(data.reason) {
|
|
app.notify({
|
|
"title": "Recording Aborted",
|
|
"text": "The recording was aborted due to '" + data.reason + '"',
|
|
"icon_url": "/assets/content/icon_alert_big.png"
|
|
});
|
|
}
|
|
}
|
|
|
|
$('#recording-status').text("Stopping...");
|
|
}
|
|
|
|
function displayDoneRecording() {
|
|
if(recordingTimerInterval) {
|
|
clearInterval(recordingTimerInterval);
|
|
recordingTimerInterval = null;
|
|
startTimeDate = null;
|
|
}
|
|
|
|
$recordingTimer = null;
|
|
|
|
$('#recording-start-stop').removeClass('currently-recording');
|
|
$('#recording-status').text("Make Recording");
|
|
}
|
|
|
|
function lockControlsforJamTrackRecording() {
|
|
sessionModel.lockControlsforJamTrackRecording();
|
|
}
|
|
|
|
function unlockControlsforJamTrackRecording() {
|
|
sessionModel.unlockControlsforJamTrackRecording();
|
|
}
|
|
|
|
function displayWhoCreated(clientId) {
|
|
if(app.clientId != clientId) { // don't show to creator
|
|
sessionModel.findUserBy({clientId: clientId})
|
|
.done(function(user) {
|
|
app.notify({
|
|
"title": "Recording Started",
|
|
"text": user.name + " started a recording",
|
|
"icon_url": context.JK.resolveAvatarUrl(user.photo_url)
|
|
});
|
|
})
|
|
.fail(function() {
|
|
app.notify({
|
|
"title": "Recording Started",
|
|
"text": "Oops! Can't determine who started this recording",
|
|
"icon_url": "/assets/content/icon_alert_big.png"
|
|
});
|
|
})
|
|
}
|
|
}
|
|
|
|
function promptUserToSave(recordingId, timeline) {
|
|
rest.getRecording( {id: recordingId} )
|
|
.done(function(recording) {
|
|
if(timeline) {
|
|
recording.timeline = timeline.global
|
|
}
|
|
recordingFinishedDialog.setRecording(recording);
|
|
app.layout.showDialog('recordingFinished').one(EVENTS.DIALOG_CLOSED, function(e, data) {
|
|
if(data.result && data.result.keep){
|
|
context.JK.prodBubble($recordingManagerViewer, 'file-manager-poke', {}, {positions:['top', 'left', 'right', 'bottom'], offsetParent: $screen.parent()})
|
|
}
|
|
})
|
|
})
|
|
.fail(app.ajaxError);
|
|
}
|
|
|
|
function checkPendingMetronome() {
|
|
|
|
if(sessionModel.jamTracks() !== null || sessionModel.recordedJamTracks() !== null) {
|
|
// ignore all metronome events when jamtracks are open, because backend opens metronome mixer to play jamtrack tap-ins
|
|
logger.debug("ignore checkPendingMetronome because JamTrack is open")
|
|
return;
|
|
}
|
|
|
|
//logger.debug("checkPendingMetronome", sessionModel.isMetronomeOpen(), getMetronomeMasterMixers().length)
|
|
if(sessionModel.isMetronomeOpen() && getMetronomeMasterMixers().length == 0) {
|
|
var pendingMetronome = $($templatePendingMetronome.html())
|
|
|
|
// hide the open options
|
|
otherAudioFilled();
|
|
// fill out the 'media' name
|
|
$('.session-recordings .session-recording-name').text('Metronome')
|
|
// and hide the close button
|
|
$closePlaybackRecording.hide();
|
|
|
|
// avoid double addition of pending metronome
|
|
if($otherAudioContainer.find('.pending-metronome').length === 0) {
|
|
$otherAudioContainer.append(pendingMetronome)
|
|
}
|
|
|
|
}
|
|
else {
|
|
$('.session-recordings .pending-metronome').remove()
|
|
}
|
|
|
|
}
|
|
|
|
function openBackingTrack(e) {
|
|
|
|
if($openBackingTrack.is('.disabled')) {
|
|
logger.debug("backing track dialog already open")
|
|
return false;
|
|
}
|
|
|
|
// just ignore the click if they are currently recording for now
|
|
if(sessionModel.recordingModel.isRecording()) {
|
|
app.notify({
|
|
"title": "Currently Recording",
|
|
"text": "You can't open a backing track while creating a recording.",
|
|
"icon_url": "/assets/content/icon_alert_big.png"
|
|
});
|
|
return false;
|
|
}
|
|
|
|
$openBackingTrack.addClass('disabled');
|
|
context.jamClient.ShowSelectBackingTrackDialog("window.JK.HandleBackingTrackSelectedCallback");
|
|
return false;
|
|
}
|
|
|
|
function openJamTrack(e) {
|
|
// just ignore the click if they are currently recording for now
|
|
if(sessionModel.recordingModel.isRecording()) {
|
|
app.notify({
|
|
"title": "Currently Recording",
|
|
"text": "You can't open a jam track while creating a recording.",
|
|
"icon_url": "/assets/content/icon_alert_big.png"
|
|
});
|
|
return false;
|
|
}
|
|
|
|
app.layout.showDialog('open-jam-track-dialog').one(EVENTS.DIALOG_CLOSED, function(e, data) {
|
|
|
|
// once the dialog is closed, see if the user has a jamtrack selected
|
|
if(!data.canceled && data.result.jamTrack) {
|
|
|
|
var jamTrack = data.result.jamTrack;
|
|
|
|
$('.session-recording-name').text('');
|
|
|
|
// hide 'other audio' placeholder
|
|
otherAudioFilled();
|
|
|
|
if(downloadJamTrack) {
|
|
// if there was one showing before somehow, destroy it.
|
|
logger.warn("destroying existing JamTrack")
|
|
downloadJamTrack.root.remove();
|
|
downloadJamTrack.destroy();
|
|
downloadJamTrack = null
|
|
}
|
|
|
|
downloadJamTrack = new context.JK.DownloadJamTrack(app, jamTrack, 'large');
|
|
|
|
// the widget indicates when it gets to any transition; we can hide it once it reaches completion
|
|
$(downloadJamTrack).on(EVENTS.JAMTRACK_DOWNLOADER_STATE_CHANGED, function(e, data) {
|
|
|
|
if(data.state == downloadJamTrack.states.synchronized) {
|
|
logger.debug("jamtrack synchronized; hide widget and show tracks")
|
|
downloadJamTrack.root.remove();
|
|
downloadJamTrack.destroy();
|
|
downloadJamTrack = null;
|
|
|
|
// XXX: test with this removed; it should be unnecessary
|
|
context.jamClient.JamTrackStopPlay();
|
|
|
|
var sampleRate = context.jamClient.GetSampleRate()
|
|
var sampleRateForFilename = sampleRate == 48 ? '48' : '44'
|
|
var fqId = jamTrack.id + '-' + sampleRateForFilename
|
|
|
|
if(jamTrack.jmep)
|
|
{
|
|
logger.debug("setting jmep data")
|
|
|
|
context.jamClient.JamTrackLoadJmep(fqId, jamTrack.jmep)
|
|
}
|
|
else {
|
|
logger.debug("no jmep data for jamtrack")
|
|
}
|
|
|
|
// JamTrackPlay means 'load'
|
|
var result = context.jamClient.JamTrackPlay(fqId);
|
|
|
|
if(!result) {
|
|
app.notify(
|
|
{ title: "JamTrack Can Not Open",
|
|
text: "Unable to open your JamTrack. Please contact support@jamkazam.com"
|
|
}, null, true);
|
|
} else {
|
|
playJamTrack(jamTrack.id);
|
|
}
|
|
}
|
|
})
|
|
|
|
// show it on the page
|
|
$otherAudioContainer.append(downloadJamTrack.root)
|
|
|
|
// kick off the download JamTrack process
|
|
downloadJamTrack.init()
|
|
}
|
|
else {
|
|
logger.debug("OpenJamTrack dialog closed with no selection; ignoring", data)
|
|
}
|
|
})
|
|
|
|
return false;
|
|
}
|
|
|
|
function playJamTrack(jamTrackId) {
|
|
var participantCnt=sessionModel.participants().length
|
|
rest.playJamTrack(jamTrackId)
|
|
.done(function() {
|
|
app.refreshUser();
|
|
})
|
|
context.stats.write('web.jamtrack.open', {
|
|
value: 1,
|
|
session_size: participantCnt,
|
|
user_id: context.JK.currentUserId,
|
|
user_name: context.JK.currentUserName
|
|
})
|
|
}// function
|
|
|
|
function openBackingTrackFile(e) {
|
|
// just ignore the click if they are currently recording for now
|
|
if(sessionModel.recordingModel.isRecording()) {
|
|
app.notify({
|
|
"title": "Currently Recording",
|
|
"text": "You can't open a backing track while creating a recording.",
|
|
"icon_url": "/assets/content/icon_alert_big.png"
|
|
});
|
|
return false;
|
|
} else {
|
|
context.jamClient.openBackingTrackFile(sessionModel.backing_track)
|
|
context.stats.write('web.backingtrack.open', {
|
|
value: 1,
|
|
session_size: participantCnt,
|
|
user_id: context.JK.currentUserId,
|
|
user_name: context.JK.currentUserName
|
|
})
|
|
//context.JK.CurrentSessionModel.refreshCurrentSession(true);
|
|
}
|
|
return false;
|
|
}
|
|
|
|
function unstableNTPClocks() {
|
|
var unstable = []
|
|
|
|
// This should be handled in the below loop, actually:
|
|
var myState = context.jamClient.getMyNetworkState()
|
|
|
|
var map;
|
|
|
|
$.each(sessionModel.participants(), function(index, participant) {
|
|
|
|
var isSelf = participant.client_id == app.clientId;
|
|
|
|
if(isSelf) {
|
|
var isStable = myState.ntp_stable;
|
|
}
|
|
else {
|
|
map = context.jamClient.getPeerState(participant.client_id)
|
|
var isStable = map.ntp_stable;
|
|
}
|
|
|
|
if (!isStable) {
|
|
var name = participant.user.name;
|
|
if (!(name)) {
|
|
name = participant.user.first_name + ' ' + participant.user.last_name;
|
|
}
|
|
|
|
if (isSelf) {
|
|
name += " (this computer)"
|
|
}
|
|
|
|
unstable.push(name)
|
|
}
|
|
});
|
|
|
|
return unstable
|
|
}
|
|
|
|
function openMetronome(e) {
|
|
// just ignore the click if they are currently recording for now
|
|
|
|
if(sessionModel.recordingModel.isRecording()) {
|
|
app.notify({
|
|
"title": "Currently Recording",
|
|
"text": "You can't open a metronome while creating a recording.",
|
|
"icon_url": "/assets/content/icon_alert_big.png"
|
|
});
|
|
return false;
|
|
} else {
|
|
var unstable = unstableNTPClocks()
|
|
if (sessionModel.participants().length > 1 && unstable.length > 0) {
|
|
var names = unstable.join(", ")
|
|
logger.debug("Unstable clocks: ", names, unstable)
|
|
context.JK.Banner.showAlert("Couldn't open metronome", context._.template($('#template-help-metronome-unstable').html(), {names: names}, { variable: 'data' }));
|
|
} else {
|
|
var data = {
|
|
value: 1,
|
|
session_size: sessionModel.participants().length,
|
|
user_id: context.JK.currentUserId,
|
|
user_name: context.JK.currentUserName }
|
|
context.stats.write('web.metronome.open', data)
|
|
var bpm = 120;
|
|
logger.debug("opening the metronome with bpm: " + bpm + ", sound:" + metroSound)
|
|
rest.openMetronome({id: sessionModel.id()})
|
|
.done(function() {
|
|
context.jamClient.SessionStopPlay();
|
|
context.jamClient.SessionOpenMetronome(bpm, metroSound, 1, 0);
|
|
})
|
|
.fail(function(jqXHR) {
|
|
logger.debug(jqXHR, jqXHR)
|
|
app.notify({
|
|
"title": "Couldn't open metronome",
|
|
"text": "Couldn't inform the server to open metronome. msg=" + jqXHR.responseText,
|
|
"icon_url": "/assets/content/icon_alert_big.png"
|
|
});
|
|
});
|
|
}
|
|
|
|
|
|
return false;
|
|
}
|
|
}
|
|
|
|
|
|
function openRecording(e) {
|
|
// just ignore the click if they are currently recording for now
|
|
if(sessionModel.recordingModel.isRecording()) {
|
|
app.notify({
|
|
"title": "Currently Recording",
|
|
"text": "You can't open a recording while creating a recording.",
|
|
"icon_url": "/assets/content/icon_alert_big.png"
|
|
});
|
|
return false;
|
|
}
|
|
|
|
if(!localRecordingsDialog.isShowing()) {
|
|
app.layout.showDialog('localRecordings');
|
|
}
|
|
|
|
return false;
|
|
}
|
|
|
|
function closeOpenMedia() {
|
|
if(sessionModel.recordedTracks()) {
|
|
closeRecording();
|
|
}
|
|
else if(sessionModel.jamTracks() || downloadJamTrack) {
|
|
closeJamTrack();
|
|
}
|
|
else if(sessionModel.backingTrack() && sessionModel.backingTrack().path) {
|
|
closeBackingTrack();
|
|
}
|
|
else if(getMetronomeMasterMixers().length > 0) {
|
|
closeMetronomeTrack();
|
|
}
|
|
else {
|
|
logger.error("don't know how to close open media");
|
|
}
|
|
return false;
|
|
}
|
|
|
|
function closeBackingTrack() {
|
|
|
|
if (sessionModel.recordingModel.isRecording()) {
|
|
logger.debug("can't close backing track while recording")
|
|
return false;
|
|
}
|
|
|
|
rest.closeBackingTrack({id: sessionModel.id()})
|
|
.done(function() {
|
|
//sessionModel.refreshCurrentSession(true);
|
|
})
|
|
.fail(function(jqXHR) {
|
|
app.notify({
|
|
"title": "Couldn't Close Backing Track",
|
|
"text": "Couldn't inform the server to close Backing Track. msg=" + jqXHR.responseText,
|
|
"icon_url": "/assets/content/icon_alert_big.png"
|
|
});
|
|
});
|
|
|
|
// '' closes all open backing tracks
|
|
context.jamClient.SessionStopPlay();
|
|
context.jamClient.SessionCloseBackingTrackFile('');
|
|
|
|
return false;
|
|
}
|
|
|
|
function closeJamTrack() {
|
|
logger.debug("closing jam track");
|
|
|
|
if (sessionModel.recordingModel.isRecording()) {
|
|
logger.debug("can't close jamtrack while recording")
|
|
app.notify({title: 'Can Not Close JamTrack', text: 'A JamTrack can not be closed while recording.'})
|
|
return false;
|
|
}
|
|
|
|
if(!sessionModel.selfOpenedJamTracks()) {
|
|
logger.debug("can't close jamtrack if not the opener")
|
|
app.notify({title: 'Can Not Close JamTrack', text: 'Only the person who opened the JamTrack can close it.'})
|
|
return false;
|
|
}
|
|
|
|
if(!sessionModel.selfOpenedJamTracks()) {
|
|
logger.debug("can't close jamtrack if not the opener")
|
|
return false;
|
|
}
|
|
|
|
if(downloadJamTrack) {
|
|
logger.debug("closing DownloadJamTrack widget")
|
|
downloadJamTrack.root.remove();
|
|
downloadJamTrack.destroy();
|
|
downloadJamTrack = null;
|
|
|
|
// this is necessary because a syncing widget means no jamtracks are loaded;
|
|
// so removing the widget will not cause a backend media change event (and so renderSession will not be called, ultimately)
|
|
resetOtherAudioContent();
|
|
}
|
|
|
|
rest.closeJamTrack({id: sessionModel.id()})
|
|
.done(function() {
|
|
sessionModel.refreshCurrentSession(true);
|
|
})
|
|
.fail(function(jqXHR) {
|
|
app.notify({
|
|
"title": "Couldn't Close JamTrack",
|
|
"text": "Couldn't inform the server to close JamTrack. msg=" + jqXHR.responseText,
|
|
"icon_url": "/assets/content/icon_alert_big.png"
|
|
});
|
|
});
|
|
|
|
context.jamClient.JamTrackStopPlay();
|
|
|
|
return false;
|
|
}
|
|
|
|
function closeMetronomeTrack() {
|
|
rest.closeMetronome({id: sessionModel.id()})
|
|
.done(function() {
|
|
context.jamClient.SessionCloseMetronome();
|
|
sessionModel.refreshCurrentSession(true);
|
|
})
|
|
.fail(function(jqXHR) {
|
|
app.notify({
|
|
"title": "Couldn't Close MetronomeTrack",
|
|
"text": "Couldn't inform the server to close MetronomeTrack. msg=" + jqXHR.responseText,
|
|
"icon_url": "/assets/content/icon_alert_big.png"
|
|
});
|
|
});
|
|
return false;
|
|
}
|
|
|
|
function closeRecording() {
|
|
logger.debug("closing recording");
|
|
|
|
rest.stopPlayClaimedRecording({id: sessionModel.id(), claimed_recording_id: sessionModel.getCurrentSession().claimed_recording.id})
|
|
.done(function(response) {
|
|
//sessionModel.refreshCurrentSession(true);
|
|
// update session info
|
|
context.JK.CurrentSessionModel.updateSession(response);
|
|
})
|
|
.fail(function(jqXHR) {
|
|
app.notify({
|
|
"title": "Couldn't Stop Recording Playback",
|
|
"text": "Couldn't inform the server to stop playback. msg=" + jqXHR.responseText,
|
|
"icon_url": "/assets/content/icon_alert_big.png"
|
|
});
|
|
});
|
|
|
|
context.jamClient.CloseRecording();
|
|
|
|
return false;
|
|
}
|
|
|
|
function onPause(e, data) {
|
|
|
|
// if a JamTrack is open, and the user hits 'pause' or 'stop', we need to automatically stop the recording
|
|
if(sessionModel.jamTracks() && sessionModel.recordingModel.isRecording()) {
|
|
logger.debug("preemptive jamtrack stop")
|
|
startStopRecording();
|
|
}
|
|
|
|
if(!data.endReached) {
|
|
logger.debug("calling jamClient.SessionPausePlay. endReached:", data.endReached);
|
|
context.jamClient.SessionPausePlay();
|
|
}
|
|
}
|
|
|
|
function onStop(e, data) {
|
|
|
|
// if a JamTrack is open, and the user hits 'pause' or 'stop', we need to automatically stop the recording
|
|
if(sessionModel.jamTracks() && sessionModel.recordingModel.isRecording()) {
|
|
logger.debug("preemptive jamtrack stop")
|
|
startStopRecording();
|
|
}
|
|
|
|
if(!data.endReached) {
|
|
logger.debug("calling jamClient.SessionStopPlay. endReached:", data.endReached);
|
|
context.jamClient.SessionStopPlay();
|
|
}
|
|
}
|
|
|
|
function onPlay(e, data) {
|
|
logger.debug("calling jamClient.SessionStartPlay");
|
|
context.jamClient.SessionStartPlay(data.playbackMode);
|
|
}
|
|
|
|
function onChangePlayPosition(e, data){
|
|
|
|
var seek = data.positionMs;
|
|
|
|
if(data.playbackMonitorMode == context.JK.PLAYBACK_MONITOR_MODE.JAMTRACK) {
|
|
// if positionMs == 0, then seek it back to whatever the earliest play start is to catch all the prelude
|
|
|
|
if(seek == 0) {
|
|
var duration = context.jamClient.SessionGetJamTracksPlayDurationMs();
|
|
seek = duration.start;
|
|
}
|
|
}
|
|
|
|
logger.debug("calling jamClient.SessionTrackSeekMs(" + seek + ")");
|
|
|
|
if(data.playbackMonitorMode == context.JK.PLAYBACK_MONITOR_MODE.JAMTRACK) {
|
|
// this doesn't ever show anything, because of blocking nature of the seek call
|
|
//var $mediaSeeking = $screen.find('.media-seeking')
|
|
//$mediaSeeking.attr('data-mode', 'SEEKING')
|
|
context.jamClient.SessionJamTrackSeekMs(seek);
|
|
//$mediaSeeking.attr('data-mode', '')
|
|
}
|
|
else {
|
|
context.jamClient.SessionTrackSeekMs(seek);
|
|
}
|
|
}
|
|
|
|
function startStopRecording() {
|
|
|
|
// check first if a jamtrack is loaded, and playing; if so, tell user to stop the play
|
|
/**if(sessionModel.jamTracks() && context.jamClient.isSessionTrackPlaying()) {
|
|
app.notify(
|
|
{ title: "Can't Recording a Play JamTrack",
|
|
text: "Stop the JamTrack before trying to recording." },
|
|
null,
|
|
true);
|
|
|
|
return;
|
|
}*/
|
|
|
|
if(sessionModel.recordingModel.isRecording()) {
|
|
sessionModel.recordingModel.stopRecording();
|
|
}
|
|
else {
|
|
sessionModel.recordingModel.startRecording();
|
|
}
|
|
}
|
|
|
|
function inviteMusicians() {
|
|
friendInput = inviteMusiciansUtil.inviteSessionUpdate('#update-session-invite-musicians',
|
|
sessionId);
|
|
inviteMusiciansUtil.loadFriends();
|
|
$(friendInput).show();
|
|
}
|
|
|
|
function setFormFromMetronome() {
|
|
$("select.metro-tempo").val(metroTempo)
|
|
$("select.metro-sound").val(metroSound)
|
|
}
|
|
|
|
function setMetronomePlaybackMode() {
|
|
$metronomePlaybackSelect.metronomeSetPlaybackMode(metroCricket ? 'cricket' : 'self')
|
|
}
|
|
|
|
function setMetronomeFromForm() {
|
|
var tempo = $("select.metro-tempo:visible option:selected").val()
|
|
var sound = $("select.metro-sound:visible option:selected").val()
|
|
|
|
var t = parseInt(tempo)
|
|
var s
|
|
if (tempo==NaN || tempo==0 || tempo==null) {
|
|
t = 120
|
|
}
|
|
|
|
if (sound==null || typeof(sound)=='undefined' || sound=="") {
|
|
s = "Beep"
|
|
} else {
|
|
s = sound
|
|
}
|
|
|
|
logger.debug("Setting tempo and sound:", t, s)
|
|
metroTempo = t
|
|
metroSound = s
|
|
context.jamClient.SessionSetMetronome(t, s, 1, 0);
|
|
}
|
|
|
|
function onMetronomeChanged(e, data) {
|
|
setMetronomeFromForm()
|
|
}
|
|
|
|
function metronomePlaybackModeChanged(e, data) {
|
|
|
|
var mode = data.playbackMode; // will be either 'self' or 'cricket'
|
|
|
|
logger.debug("setting metronome playback mode: ", mode)
|
|
var isCricket = mode == 'cricket';
|
|
context.jamClient.setMetronomeCricketTestState(isCricket);
|
|
}
|
|
|
|
function onMixerModeChanged(e, data) {
|
|
$mixModeDropdown.easyDropDown('select', data.mode, true);
|
|
setTimeout(renderSession, 1);
|
|
}
|
|
|
|
function onUserChangeMixMode(e) {
|
|
var mode = $mixModeDropdown.val() == "master" ? MIX_MODES.MASTER : MIX_MODES.PERSONAL;
|
|
|
|
context.jamClient.SetMixerMode(mode)
|
|
|
|
modUtils.shouldShow(NAMED_MESSAGES.MASTER_VS_PERSONAL_MIX).done(function(shouldShow) {
|
|
if(shouldShow) {
|
|
var modeChangeHtml = $($templateMixerModeChange.html());
|
|
context.JK.Banner.show({title: 'Master vs. Personal Mix', text: modeChangeHtml, no_show: NAMED_MESSAGES.MASTER_VS_PERSONAL_MIX});
|
|
}
|
|
})
|
|
|
|
return true;
|
|
}
|
|
|
|
function showFTUEWhenNoInputs( ) {
|
|
//app.afterFtue = function() { window.location.reload };
|
|
//app.layout.startNewFtue();
|
|
window.location = '/client#/account/audio'
|
|
}
|
|
|
|
function events() {
|
|
$('#session-leave').on('click', sessionLeave);
|
|
$('#session-resync').on('click', sessionResync);
|
|
$('#session-webcam').on('click', sessionWebCam);
|
|
$('#session-contents').on("click", '[action="delete"]', deleteSession);
|
|
$tracksHolder.on('click', 'div[control="mute"]', toggleMute);
|
|
$('#recording-start-stop').on('click', startStopRecording);
|
|
$('#open-a-recording').on('click', openRecording);
|
|
$('#open-a-jamtrack').on('click', openJamTrack);
|
|
$openBackingTrack.on('click', openBackingTrack);
|
|
$('#open-a-metronome').on('click', openMetronome);
|
|
$('#session-invite-musicians').on('click', inviteMusicians);
|
|
$('#session-invite-musicians2').on('click', inviteMusicians);
|
|
$('#track-settings').click(function() {
|
|
|
|
if(gearUtils.isNoInputProfile()) {
|
|
// show FTUE
|
|
// showFTUEWhenNoInputs();
|
|
app.notify({title:'Settings Disabled', text:'You can not alter any settings for the System Default playback device.'})
|
|
return false;
|
|
}
|
|
else {
|
|
configureTrackDialog.refresh();
|
|
configureTrackDialog.showVoiceChatPanel(true);
|
|
configureTrackDialog.showMusicAudioPanel(true);
|
|
}
|
|
});
|
|
|
|
$openFtue.click(function() {
|
|
showFTUEWhenNoInputs();
|
|
return false;
|
|
})
|
|
|
|
$closePlaybackRecording.on('click', closeOpenMedia);
|
|
$(playbackControls)
|
|
.on('pause', onPause)
|
|
.on('stop', onStop)
|
|
.on('play', onPlay)
|
|
.on('change-position', onChangePlayPosition);
|
|
$(friendInput).focus(function() { $(this).val(''); })
|
|
$(document).on(EVENTS.MIXER_MODE_CHANGED, onMixerModeChanged)
|
|
$mixModeDropdown.change(onUserChangeMixMode)
|
|
$(document).on("change", ".metronome-select", onMetronomeChanged)
|
|
$metronomePlaybackSelect.metronomePlaybackMode().on(EVENTS.METRONOME_PLAYBACK_MODE_SELECTED, metronomePlaybackModeChanged)
|
|
context.JK.helpBubble($metronomePlaybackHelp, 'metromone-playback-modes', {} , {offsetParent: $screen, width:'400px'});
|
|
$(document).on('layout_resized', function() {
|
|
resizeFluid();
|
|
});
|
|
}
|
|
|
|
this.initialize = function(localRecordingsDialogInstance, recordingFinishedDialogInstance, friendSelectorDialog) {
|
|
inviteMusiciansUtil = new JK.InviteMusiciansUtil(JK.app);
|
|
inviteMusiciansUtil.initialize(friendSelectorDialog);
|
|
localRecordingsDialog = localRecordingsDialogInstance;
|
|
recordingFinishedDialog = recordingFinishedDialogInstance;
|
|
context.jamClient.SetVURefreshRate(150);
|
|
context.jamClient.RegisterVolChangeCallBack("JK.HandleVolumeChangeCallback");
|
|
playbackControls = new context.JK.PlaybackControls($('.session-recordings .recording-controls'));
|
|
context.jamClient.setMetronomeOpenCallback("JK.HandleMetronomeCallback")
|
|
|
|
var screenBindings = {
|
|
'beforeShow': beforeShow,
|
|
'afterShow': afterShow,
|
|
'beforeHide': beforeHide,
|
|
'beforeLeave' : beforeLeave,
|
|
'beforeDisconnect' : beforeDisconnect,
|
|
};
|
|
app.bindScreen('session', screenBindings);
|
|
|
|
$recordingManagerViewer = $('#recording-manager-viewer');
|
|
$screen = $('#session-screen');
|
|
$mixModeDropdown = $screen.find('select.monitor-mode')
|
|
$templateMixerModeChange = $('#template-mixer-mode-change');
|
|
$otherAudioContainer = $('#session-recordedtracks-container');
|
|
$myTracksNoTracks = $('#session-mytracks-notracks')
|
|
$openFtue = $screen.find('.open-ftue-no-tracks')
|
|
$myTracksContainer = $('#session-mytracks-container')
|
|
$liveTracksContainer = $('#session-livetracks-container');
|
|
$closePlaybackRecording = $('#close-playback-recording')
|
|
$openBackingTrack = $('#open-a-backingtrack');
|
|
$metronomePlaybackSelect = $('#metronome-playback-select')
|
|
$metronomePlaybackHelp = $('#metronome-playback-help')
|
|
$templatePendingMetronome = $('#template-pending-metronome');
|
|
$myTracks = $screen.find('.session-mytracks');
|
|
$liveTracks = $screen.find('.session-livetracks');
|
|
$audioTracks = $screen.find('.session-recordings');
|
|
$fluidTracks = $screen.find('.session-fluidtracks');
|
|
$voiceChat = $screen.find('#voice-chat');
|
|
$tracksHolder = $screen.find('#tracks')
|
|
if(gon.global.video_available && gon.global.video_available!="none") {
|
|
webcamViewer.init($(".webcam-container"))
|
|
webcamViewer.setVideoOff()
|
|
}
|
|
events();
|
|
|
|
|
|
// make sure no previous plays are still going on by accident
|
|
context.jamClient.SessionStopPlay();
|
|
if(context.jamClient.SessionRemoveAllPlayTracks) {
|
|
// upgrade guard
|
|
context.jamClient.SessionRemoveAllPlayTracks();
|
|
}
|
|
};
|
|
|
|
|
|
this.tracks = tracks;
|
|
|
|
this.getCurrentSession = function() {
|
|
return sessionModel.getCurrentSession();
|
|
};
|
|
|
|
this.refreshCurrentSession = function(force) {
|
|
sessionModel.refreshCurrentSession(force);
|
|
};
|
|
|
|
this.setPromptLeave = function(_promptLeave) {
|
|
promptLeave = _promptLeave;
|
|
}
|
|
|
|
this.onPlaybackStateChange = function(change_type){
|
|
// if it's play_stop or play_start, poke the playControls
|
|
|
|
if(change_type == 'play_start') {
|
|
playbackControls.onPlayStartEvent();
|
|
}
|
|
else if(change_type == 'play_stop'){
|
|
playbackControls.onPlayStopEvent();
|
|
}
|
|
else if(change_type == 'play_pause'){
|
|
playbackControls.onPlayPauseEvent();
|
|
}
|
|
}
|
|
|
|
context.JK.HandleVolumeChangeCallback = handleVolumeChangeCallback;
|
|
context.JK.HandleMetronomeCallback = handleMetronomeCallback;
|
|
context.JK.HandleBridgeCallback = handleBridgeCallback;
|
|
context.JK.HandleBackingTrackSelectedCallback = handleBackingTrackSelectedCallback;
|
|
};
|
|
|
|
})(window,jQuery); |