diff --git a/index.html b/index.html
index 13ca6f3..ca5fef3 100644
--- a/index.html
+++ b/index.html
@@ -135,11 +135,13 @@
border-radius: 8px;
display: flex;
justify-content: space-around;
+ flex-wrap: wrap; /* Add this line to allow wrapping */
}
- .visual-section {
- flex: 1;
- text-align: center;
+ #visualizationCanvas {
+ width: 100%; /* Make canvas responsive */
+ height: auto; /* Maintain aspect ratio */
}
+
.visual-section h3 {
margin-bottom: 10px;
color: #66ff66;
@@ -563,6 +565,9 @@
Strategies
// Function to create a rich organ sound with careful layering
function createOrganOscillator(frequency, startTime, duration, isChord = false, chord = []) {
+ if ((isChord && isChordsMuted) || (!isChord && isMelodyMuted)) {
+ return; // Skip creating oscillator if muted
+ }
const osc1 = audioContext.createOscillator();
const gainNode = audioContext.createGain();
const panner = audioContext.createStereoPanner();
@@ -608,7 +613,6 @@ Strategies
oscillators.push({ osc1, gainNode });
logToConsole(`Organ ${isChord ? "Chord" : "Melody"} played: ${frequency.toFixed(2)}Hz`, 'info');
-
// Update Visual Feedback
if (isChord) {
currentChordDiv.innerHTML = chord.map(freq => freq.toFixed(2) + 'Hz').join(', ');
@@ -783,8 +787,9 @@ Strategies
startPlaying();
playButton.disabled = true;
stopButton.disabled = false;
+ startVisualization(); // Start visualization when play button is clicked
});
-
+
stopButton.addEventListener('click', () => {
stopPlaying();
playButton.disabled = false;
@@ -813,6 +818,12 @@ Strategies
}
}, { once: true });
+ window.addEventListener('resize', () => {
+ const canvas = document.getElementById('visualizationCanvas');
+ canvas.width = canvas.parentElement.clientWidth;
+ canvas.height = canvas.width / 4; // Maintain aspect ratio
+ });
+
const canvas = document.getElementById('visualizationCanvas');
const canvasContext = canvas.getContext('2d');
let analyser;
@@ -852,30 +863,45 @@ Strategies
}
function initializeAudio() {
- if (!audioContext) {
- audioContext = new (window.AudioContext || window.webkitAudioContext)();
-
- // Create master gain node
- masterGain = audioContext.createGain();
- masterGain.gain.setValueAtTime(masterVolume, audioContext.currentTime);
-
- // Create and connect reverb
- reverbNode = createReverb(audioContext);
- reverbNode.connect(masterGain);
- masterGain.connect(audioContext.destination);
-
- // Create analyser node
- analyser = audioContext.createAnalyser();
- analyser.fftSize = 2048; // Set FFT size for better resolution
- masterGain.connect(analyser);
-
- logToConsole('AudioContext initialized with reverb and analyser.', 'info');
- } else if (audioContext.state === 'suspended') {
- audioContext.resume().then(() => {
- logToConsole('AudioContext resumed after user interaction.', 'info');
- });
- }
+ if (!audioContext) {
+ audioContext = new (window.AudioContext || window.webkitAudioContext)();
+
+ // Create master gain node
+ masterGain = audioContext.createGain();
+ masterGain.gain.setValueAtTime(masterVolume, audioContext.currentTime);
+
+ // Create and connect reverb
+ reverbNode = createReverb(audioContext);
+ reverbNode.connect(masterGain);
+ masterGain.connect(audioContext.destination);
+
+ // Create analyser node
+ analyser = audioContext.createAnalyser();
+ analyser.fftSize = 2048; // Set FFT size for better resolution
+ masterGain.connect(analyser);
+
+ // Adjust canvas size dynamically
+ const canvas = document.getElementById('visualizationCanvas');
+ canvas.width = canvas.parentElement.clientWidth;
+ canvas.height = canvas.width / 4; // Maintain aspect ratio
+
+ // Clear the canvas to prevent it from going black
+ canvasContext.clearRect(0, 0, canvas.width, canvas.height);
+
+ // Start visualization immediately after initialization
+ const lightModeElement = document.getElementById('lightMode');
+ let lightMode = lightModeElement ? lightModeElement.checked : false;
+ updateVisualizerColors(lightMode);
+ startVisualization();
+
+ logToConsole('AudioContext initialized with reverb and analyser.', 'info');
+ } else if (audioContext.state === 'suspended') {
+ audioContext.resume().then(() => {
+ logToConsole('AudioContext resumed after user interaction.', 'info');
+ });
}
+}
+
// Ensure drawVisualization is called continuously
function startVisualization() {
@@ -889,14 +915,12 @@ Strategies
const darkModeToggle = document.getElementById('darkModeToggle');
-// Load user preferences
-document.addEventListener('DOMContentLoaded', () => {
- const darkMode = localStorage.getItem('darkMode') === 'true';
- document.body.classList.toggle('light-mode', !darkMode);
- darkModeToggle.checked = darkMode;
- updateVisualizerColors(darkMode);
- // Load other preferences...
-});
+ // Load user preferences
+ document.addEventListener('DOMContentLoaded', () => {
+ const darkMode = localStorage.getItem('darkMode') === 'true';
+ document.body.classList.toggle('light-mode', !darkMode);
+ // Load other preferences...
+ });
// Save user preferences
darkModeToggle.addEventListener('change', () => {
@@ -909,48 +933,48 @@ Strategies
// Function to update visualizer colors based on mode
function updateVisualizerColors(darkMode) {
- if (darkMode) {
- canvasContext.fillStyle = 'rgb(17, 17, 17)'; // Dark background
- canvasContext.strokeStyle = 'rgb(0, 255, 0)'; // Green lines
- } else {
- canvasContext.fillStyle = 'rgb(240, 240, 240)'; // Light background
- canvasContext.strokeStyle = 'rgb(75, 0, 130)'; // Dark purple lines
+ if (darkMode) {
+ canvasContext.fillStyle = 'rgb(17, 17, 17)'; // Dark background
+ canvasContext.strokeStyle = 'rgb(0, 255, 0)'; // Green lines
+ } else {
+ canvasContext.fillStyle = 'rgb(240, 240, 240)'; // Light background
+ canvasContext.strokeStyle = 'rgb(75, 0, 130)'; // Dark purple lines
+ }
}
-}
-// Ensure drawVisualization is called continuously
-function drawVisualization() {
- if (!analyser) return;
- requestAnimationFrame(drawVisualization);
- const bufferLength = analyser.frequencyBinCount;
- const dataArray = new Uint8Array(bufferLength);
- analyser.getByteTimeDomainData(dataArray);
+ // Ensure drawVisualization is called continuously
+ function drawVisualization() {
+ if (!analyser) return;
+ requestAnimationFrame(drawVisualization);
+ const bufferLength = analyser.frequencyBinCount;
+ const dataArray = new Uint8Array(bufferLength);
+ analyser.getByteTimeDomainData(dataArray);
- canvasContext.fillRect(0, 0, canvas.width, canvas.height);
+ canvasContext.fillRect(0, 0, canvas.width, canvas.height);
- canvasContext.lineWidth = 2;
- canvasContext.beginPath();
+ canvasContext.lineWidth = 2;
+ canvasContext.beginPath();
+
+ const sliceWidth = canvas.width * 1.0 / bufferLength;
+ let x = 0;
- const sliceWidth = canvas.width * 1.0 / bufferLength;
- let x = 0;
+ for (let i = 0; i < bufferLength; i++) {
+ const v = dataArray[i] / 128.0;
+ const y = v * canvas.height / 2;
- for (let i = 0; i < bufferLength; i++) {
- const v = dataArray[i] / 128.0;
- const y = v * canvas.height / 2;
+ if (i === 0) {
+ canvasContext.moveTo(x, y);
+ } else {
+ canvasContext.lineTo(x, y);
+ }
- if (i === 0) {
- canvasContext.moveTo(x, y);
- } else {
- canvasContext.lineTo(x, y);
+ x += sliceWidth;
}
- x += sliceWidth;
+ canvasContext.lineTo(canvas.width, canvas.height / 2);
+ canvasContext.stroke();
}
- canvasContext.lineTo(canvas.width, canvas.height / 2);
- canvasContext.stroke();
-}
-
// Start visualization when audio context is initialized
document.body.addEventListener('click', startVisualization, { once: true });