jam-cloud/web/app/assets/javascripts/configureTrackDialog.js

119 lines
4.0 KiB
JavaScript

(function (context, $) {
"use strict";
context.JK = context.JK || {};
context.JK.ConfigureTracksDialog = function (app) {
var logger = context.JK.logger;
var ASSIGNMENT = context.JK.ASSIGNMENT;
var VOICE_CHAT = context.JK.VOICE_CHAT;
var $dialog = null;
var $instructions = null;
var $musicAudioTab = null;
var $musicAudioTabSelector = null;
var $voiceChatTab = null;
var $voiceChatTabSelector = null;
var configure_audio_instructions = {
"Win32": "Choose the audio device you would like to use for this session. If needed, use arrow buttons to assign audio inputs " +
"to your tracks, to indicate what instrument you are playing on each track, and to assign audio outputs for listening. " +
"If you want to use a new audio device you have not tested/certified for latency using JamKazam, click the Add New Audio " +
"Gear button to test that device.",
"MacOSX": "Choose the audio device you would like to use for this session. If needed, use arrow buttons to assign audio inputs " +
"to your tracks, to indicate what instrument you are playing on each track, and to assign audio outputs for listening. " +
"If you want to use a new audio device you have not tested/certified for latency using JamKazam, click the Add New Audio " +
"Gear button to test that device.",
"Unix": "Choose the audio device you would like to use for this session. If needed, use arrow buttons to assign audio inputs " +
"to your tracks, to indicate what instrument you are playing on each track, and to assign audio outputs for listening. " +
"If you want to use a new audio device you have not tested/certified for latency using JamKazam, click the Add New Audio " +
"Gear button to test that device."
};
var configure_voice_instructions = "If you are using a microphone to capture your instrumental or vocal audio, you can simply use that mic " +
"for both music and chat. Otherwise, choose a device to use for voice chat, and use arrow buttons to " +
"select an input on that device.";
function setInstructions(type) {
if (type === 'audio') {
var os = context.jamClient.GetOSAsString();
$instructions.html(configure_audio_instructions[os]);
}
else if (type === 'voice') {
$instructions.html(configure_voice_instructions);
}
else {
throw "unknown type in setInstructions(" + type + ')';
}
}
function activateTab(type) {
if (type === 'voice') {
$musicAudioTab.hide();
$voiceChatTab.show();
$musicAudioTabSelector.removeClass('selected');
$voiceChatTabSelector.addClass('selected');
}
else {
$musicAudioTab.show();
$voiceChatTab.hide();
$musicAudioTabSelector.addClass('selected');
$voiceChatTabSelector.removeClass('selected');
}
}
function validateVoiceChatSettings() {
return true;
}
function showMusicAudioPanel() {
setInstructions('audio');
activateTab('audio');
}
function validateAudioSettings() {
return true;
}
function showVoiceChatPanel() {
}
function events() {
$musicAudioTabSelector.click(function () {
// validate voice chat settings
if (validateVoiceChatSettings(true)) {
showMusicAudioPanel(false);
}
});
$voiceChatTabSelector.click(function () {
// validate audio settings
if (validateAudioSettings(true)) {
showVoiceChatPanel(false);
}
});
}
function initialize() {
$dialog = $('#configure-tracks-dialog');
$instructions = $dialog.find('.instructions span');
$musicAudioTab = $dialog.find('div[tab-id="music-audio"]');
$musicAudioTabSelector = $dialog.find('.tab-configure-audio');
$voiceChatTab = $dialog.find('div[tab-id="voice-chat"]');
$voiceChatTabSelector = $dialog.find('.tab-configure-voice');
events();
}
this.initialize = initialize;
return this;
};
})(window, jQuery);