Skip to content

Commit

Permalink
Fix mute button bugs and visualizer bug
Browse files Browse the repository at this point in the history
  • Loading branch information
tylermaginnis committed Sep 15, 2024
1 parent c3abe49 commit 5fbd719
Showing 1 changed file with 91 additions and 67 deletions.
158 changes: 91 additions & 67 deletions index.html
Original file line number Diff line number Diff line change
Expand Up @@ -135,11 +135,13 @@
border-radius: 8px;
display: flex;
justify-content: space-around;
flex-wrap: wrap; /* Add this line to allow wrapping */
}
.visual-section {
flex: 1;
text-align: center;
#visualizationCanvas {
width: 100%; /* Make canvas responsive */
height: auto; /* Maintain aspect ratio */
}

.visual-section h3 {
margin-bottom: 10px;
color: #66ff66;
Expand Down Expand Up @@ -563,6 +565,9 @@ <h3>Strategies</h3>

// Function to create a rich organ sound with careful layering
function createOrganOscillator(frequency, startTime, duration, isChord = false, chord = []) {
if ((isChord && isChordsMuted) || (!isChord && isMelodyMuted)) {
return; // Skip creating oscillator if muted
}
const osc1 = audioContext.createOscillator();
const gainNode = audioContext.createGain();
const panner = audioContext.createStereoPanner();
Expand Down Expand Up @@ -608,7 +613,6 @@ <h3>Strategies</h3>

oscillators.push({ osc1, gainNode });
logToConsole(`Organ ${isChord ? "Chord" : "Melody"} played: <span class="frequency">${frequency.toFixed(2)}Hz</span>`, 'info');

// Update Visual Feedback
if (isChord) {
currentChordDiv.innerHTML = chord.map(freq => freq.toFixed(2) + 'Hz').join(', ');
Expand Down Expand Up @@ -783,8 +787,9 @@ <h3>Strategies</h3>
startPlaying();
playButton.disabled = true;
stopButton.disabled = false;
startVisualization(); // Start visualization when play button is clicked
});

stopButton.addEventListener('click', () => {
stopPlaying();
playButton.disabled = false;
Expand Down Expand Up @@ -813,6 +818,12 @@ <h3>Strategies</h3>
}
}, { once: true });

window.addEventListener('resize', () => {
const canvas = document.getElementById('visualizationCanvas');
canvas.width = canvas.parentElement.clientWidth;
canvas.height = canvas.width / 4; // Maintain aspect ratio
});

const canvas = document.getElementById('visualizationCanvas');
const canvasContext = canvas.getContext('2d');
let analyser;
Expand Down Expand Up @@ -852,30 +863,45 @@ <h3>Strategies</h3>
}

function initializeAudio() {
if (!audioContext) {
audioContext = new (window.AudioContext || window.webkitAudioContext)();

// Create master gain node
masterGain = audioContext.createGain();
masterGain.gain.setValueAtTime(masterVolume, audioContext.currentTime);

// Create and connect reverb
reverbNode = createReverb(audioContext);
reverbNode.connect(masterGain);
masterGain.connect(audioContext.destination);

// Create analyser node
analyser = audioContext.createAnalyser();
analyser.fftSize = 2048; // Set FFT size for better resolution
masterGain.connect(analyser);

logToConsole('AudioContext initialized with reverb and analyser.', 'info');
} else if (audioContext.state === 'suspended') {
audioContext.resume().then(() => {
logToConsole('AudioContext resumed after user interaction.', 'info');
});
}
if (!audioContext) {
audioContext = new (window.AudioContext || window.webkitAudioContext)();

// Create master gain node
masterGain = audioContext.createGain();
masterGain.gain.setValueAtTime(masterVolume, audioContext.currentTime);

// Create and connect reverb
reverbNode = createReverb(audioContext);
reverbNode.connect(masterGain);
masterGain.connect(audioContext.destination);

// Create analyser node
analyser = audioContext.createAnalyser();
analyser.fftSize = 2048; // Set FFT size for better resolution
masterGain.connect(analyser);

// Adjust canvas size dynamically
const canvas = document.getElementById('visualizationCanvas');
canvas.width = canvas.parentElement.clientWidth;
canvas.height = canvas.width / 4; // Maintain aspect ratio

// Clear the canvas to prevent it from going black
canvasContext.clearRect(0, 0, canvas.width, canvas.height);

// Start visualization immediately after initialization
const lightModeElement = document.getElementById('lightMode');
let lightMode = lightModeElement ? lightModeElement.checked : false;
updateVisualizerColors(lightMode);
startVisualization();

logToConsole('AudioContext initialized with reverb and analyser.', 'info');
} else if (audioContext.state === 'suspended') {
audioContext.resume().then(() => {
logToConsole('AudioContext resumed after user interaction.', 'info');
});
}
}


// Ensure drawVisualization is called continuously
function startVisualization() {
Expand All @@ -889,14 +915,12 @@ <h3>Strategies</h3>
const darkModeToggle = document.getElementById('darkModeToggle');


// Load user preferences
document.addEventListener('DOMContentLoaded', () => {
const darkMode = localStorage.getItem('darkMode') === 'true';
document.body.classList.toggle('light-mode', !darkMode);
darkModeToggle.checked = darkMode;
updateVisualizerColors(darkMode);
// Load other preferences...
});
// Load user preferences
document.addEventListener('DOMContentLoaded', () => {
const darkMode = localStorage.getItem('darkMode') === 'true';
document.body.classList.toggle('light-mode', !darkMode);
// Load other preferences...
});

// Save user preferences
darkModeToggle.addEventListener('change', () => {
Expand All @@ -909,48 +933,48 @@ <h3>Strategies</h3>

// Function to update visualizer colors based on mode
function updateVisualizerColors(darkMode) {
if (darkMode) {
canvasContext.fillStyle = 'rgb(17, 17, 17)'; // Dark background
canvasContext.strokeStyle = 'rgb(0, 255, 0)'; // Green lines
} else {
canvasContext.fillStyle = 'rgb(240, 240, 240)'; // Light background
canvasContext.strokeStyle = 'rgb(75, 0, 130)'; // Dark purple lines
if (darkMode) {
canvasContext.fillStyle = 'rgb(17, 17, 17)'; // Dark background
canvasContext.strokeStyle = 'rgb(0, 255, 0)'; // Green lines
} else {
canvasContext.fillStyle = 'rgb(240, 240, 240)'; // Light background
canvasContext.strokeStyle = 'rgb(75, 0, 130)'; // Dark purple lines
}
}
}

// Ensure drawVisualization is called continuously
function drawVisualization() {
if (!analyser) return;
requestAnimationFrame(drawVisualization);
const bufferLength = analyser.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
analyser.getByteTimeDomainData(dataArray);
// Ensure drawVisualization is called continuously
function drawVisualization() {
if (!analyser) return;
requestAnimationFrame(drawVisualization);
const bufferLength = analyser.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
analyser.getByteTimeDomainData(dataArray);

canvasContext.fillRect(0, 0, canvas.width, canvas.height);
canvasContext.fillRect(0, 0, canvas.width, canvas.height);

canvasContext.lineWidth = 2;
canvasContext.beginPath();
canvasContext.lineWidth = 2;
canvasContext.beginPath();

const sliceWidth = canvas.width * 1.0 / bufferLength;
let x = 0;

const sliceWidth = canvas.width * 1.0 / bufferLength;
let x = 0;
for (let i = 0; i < bufferLength; i++) {
const v = dataArray[i] / 128.0;
const y = v * canvas.height / 2;

for (let i = 0; i < bufferLength; i++) {
const v = dataArray[i] / 128.0;
const y = v * canvas.height / 2;
if (i === 0) {
canvasContext.moveTo(x, y);
} else {
canvasContext.lineTo(x, y);
}

if (i === 0) {
canvasContext.moveTo(x, y);
} else {
canvasContext.lineTo(x, y);
x += sliceWidth;
}

x += sliceWidth;
canvasContext.lineTo(canvas.width, canvas.height / 2);
canvasContext.stroke();
}

canvasContext.lineTo(canvas.width, canvas.height / 2);
canvasContext.stroke();
}

// Start visualization when audio context is initialized
document.body.addEventListener('click', startVisualization, { once: true });

Expand Down

0 comments on commit 5fbd719

Please sign in to comment.