Since starting at ITP, I have really enjoyed creating different musical instruments. In physical computing, I created a theremin and I was excited to try to attempt to create one in p5.js. This past week we were working with sound and video, so I felt like this was an appropriate class to work with webcam input to create a sound. My sketch is compiled of a oscillating sound that changes the frequency when your hand gets closer to the webcam. The sketch is is ultimately using brightness to change the frequency and the background color.
As your hand gets closer to the webcam, the brightness of your hand increases the frequency. If you completely cover the webcam, allowing no light to pass, the tone stops.
I worked with Shiffman's video about a brightness mirror that detects individual pixel brightness and this sounds oscillator example in the p5.js references to create this sketch.
Here's the sketch.
And here is the code:
var video; var vScale = 16; var cols = 48; var rows = 64; var osc; var fft; function setup() { createCanvas(400,400); pixelDensity(1); video = createCapture(VIDEO); //video.hide(); video.size(cols, rows); osc = new p5.Oscillator(); osc.setType('sine'); fft = new p5.FFT(); osc.start(); } function draw() { var bright = 0; video.loadPixels(); for (var y = 0; y < video.height; y++) { for (var x = 0; x < video.width; x++) { var index = (video.width - x + 1 + (y * video.width)) * 4; var r = video.pixels[index + 0]; var g = video.pixels[index + 1]; var b = video.pixels[index + 2]; if (r && b && g) { bright += ((r + g + b) / 3); } } } bright /= (cols * rows); background(bright/2, bright, 255 * log(bright)); osc.freq(bright * 3); var waveform = fft.waveform(); // analyze the waveform beginShape(); strokeWeight(3); for (var i = 0; i < waveform.length; i+=100) { var w = map(i, 0, waveform.length, 0, width); var z = map(waveform[i], -1, 1, height, 0); vertex(w, z); ellipse(w, z, 20); } endShape(); }