aboutsummaryrefslogtreecommitdiff
path: root/xp/vco/scripts/app.js
blob: cec78be82943498862477fbec1de0fe251c93265 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
// fork getUserMedia for multiple browser versions, for those
// that need prefixes

navigator.getUserMedia = (navigator.getUserMedia ||
						navigator.webkitGetUserMedia ||
						navigator.mozGetUserMedia ||
						navigator.msGetUserMedia);

// set up forked web audio context, for multiple browsers
// window. is needed otherwise Safari explodes

var audioCtx = new (window.AudioContext || window.webkitAudioContext)();
var source;
var stream;

// grab the mute button to use below

var mute = document.querySelector('.mute');

//set up the different audio nodes we will use for the app

var analyser = audioCtx.createAnalyser();
analyser.minDecibels = -90;
analyser.maxDecibels = -10;
analyser.smoothingTimeConstant = 0.85;

var distortion = audioCtx.createWaveShaper();
var gainNode = audioCtx.createGain();
var biquadFilter = audioCtx.createBiquadFilter();
var convolver = audioCtx.createConvolver();

// set up canvas context for visualizer

var canvas = document.querySelector('.visualizer');
var canvasCtx = canvas.getContext("2d");
var bar = document.getElementById("bar");
var intendedWidth = document.querySelector('.wrapper').clientWidth;

canvas.setAttribute('width',intendedWidth*2);
canvas.style.width=intendedWidth+'px';


var drawVisual;

//main block for doing the audio recording

if (navigator.getUserMedia) {
console.log('getUserMedia supported.');
navigator.getUserMedia (
	// constraints - only audio needed for this app
	{
		audio: true
	},

	// Success callback
	function(stream) {
		source = audioCtx.createMediaStreamSource(stream);
		source.connect(analyser);
		analyser.connect(distortion);
		distortion.connect(biquadFilter);
		biquadFilter.connect(convolver);
		convolver.connect(gainNode);
		gainNode.connect(audioCtx.destination);

		visualize();

	},

	// Error callback
	function(err) {
		console.log('The following gUM error occured: ' + err);
	}
);
} else {
console.log('getUserMedia not supported on your browser!');
}

function visualize() {
	WIDTH = canvas.width;
	HEIGHT = canvas.height;
	analyser.fftSize = 2048;
	var bufferLength = analyser.fftSize;
	console.log(bufferLength);
	var dataArray = new Uint8Array(bufferLength);

	canvasCtx.clearRect(0, 0, WIDTH, HEIGHT);

	var draw = function() {

		drawVisual = requestAnimationFrame(draw);

		analyser.getByteTimeDomainData(dataArray);

		canvasCtx.fillStyle = 'rgb(200, 200, 200)';
		canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);

		canvasCtx.lineWidth = 2;
		canvasCtx.strokeStyle = 'rgb(0, 0, 0)';

		canvasCtx.beginPath();

		var sliceWidth = WIDTH * 1.0 / bufferLength;
		var x = 0, s = 0;

		for(var i = 0; i < bufferLength; i++) {

		var v = dataArray[i] / 128.0;
		var t=Math.abs(dataArray[i]-128.)/128.0;
		s+=t*t;
		var y = v * HEIGHT/2;

		if(i === 0) {
			canvasCtx.moveTo(x, y);
		} else {
			canvasCtx.lineTo(x, y);
		}

		x += sliceWidth;
		}
		s/=bufferLength;s=Math.sqrt(s);s=1-s;
		bar.style.width=100*s+'%';

		canvasCtx.lineTo(canvas.width, canvas.height/2);
		canvasCtx.stroke();
	};

	draw();

}