"use strict"; // // Voicegardens front-end Javascript // var archiveUrl = window.location + "add-to-archive"; var canvas; var microphone; var frameRate = 60; var mouseInitialDrag = false; var newSoundJustRecorded = false; var playButton; var recordButton; var recorder; var recording; var recordingTimeout = 30000; // 30 seconds (in milliseconds) var screenX; var screenY; var shapes = []; var stopButton; var toScreenX; var toScreenY; var zoom = 1; var zoomSensitivity = 0.005; function record() { /** * Starting recording. **/ if (microphone.enabled) { setTimeout(recorder.record(recording), recordingTimeout); } } function stop() { /** * Stop recording a new recording. **/ if (recorder.recording) { recorder.stop(); newSoundJustRecorded = true; } } function play() { /** * Play the recording. **/ if (recording.isLoaded()) { recording.play(); } } function archive() { /** * Send the recording to the back-end. **/ var soundBlob = recording.getBlob(); var formData = new FormData(); var date = new Date(); var filename = date.getTime().toString() + ".wav"; formData.append("file", soundBlob, filename); var config = new Headers({ "Content-Type": "multipart/form-data" }); axios.post(archiveUrl, formData, config).catch(function(error) { console.log( "Upload failed!", "Received the following message:", error.response.data ); }); } function showArchive() { window.location.href = "/archive"; } function setupRecording() { /** * Setup logic for recording. **/ microphone = new p5.AudioIn(); microphone.start(); recorder = new p5.SoundRecorder(); recorder.setInput(microphone); recording = new p5.SoundFile(); recordButton = createButton("record"); recordButton.position(10, 5); recordButton.mousePressed(record); stopButton = createButton("stop"); stopButton.position(10, 40); stopButton.mousePressed(stop); playButton = createButton("play"); playButton.position(10, 75); playButton.mousePressed(play); playButton = createButton("archive"); playButton.position(10, 110); playButton.mousePressed(archive); playButton = createButton("view archive"); playButton.position(10, 145); playButton.mousePressed(showArchive); } function getSoundInfo() { /** * Retrieve sound information like pitch, amplitude, duration, etc. **/ amplitude = recording.getPeaks(); duration = recording.duration(); // pitch (frequency?) I think we can use fft.analyze() and then find the // highest value (0 -> 1024) that has a non-zero value this gives us the // highest frequency from the recording // https://p5js.org/reference/#/p5.FFT // https://p5js.org/reference/#/p5.FFT/analyze // nuance? // "I meant the amount of variation in the voice - i.e is it one single // monotone note or does it go up and down octaves or start soft and high and // become deep and guttural etc." // // How do to do this? Unsure ... } class GeneratedShape { constructor() { /** * Initialise the new shape. **/ this.w = 40; this.h = 40; this.x = random(windowWidth); this.y = random(windowHeight); this.xSpeed = 1.4; this.ySpeed = 1.6; this.xDirection = 1; this.yDirection = 1; this.synth = new p5.MonoSynth(); } collide(shapes) { /** * Detect if the shape collides with another shape. **/ if (shapes.length === 1) { return false; } for (var shape of shapes) { if (this === shape) { continue; } var collision = collideCircleCircle( this.x, this.y, this.w, shape.x, shape.y, shape.w ); if (collision === true) { return true; } } return false; } sound() { /** * Play a sound after a collision is detected. **/ var notes = ["G2", "C3", "G3"]; var duration = 0.8; var time = 0; var velocity = 0.5; var index = random(0, notes.length + 1); this.synth.play(notes[index], velocity, time, duration); } move() { /** * Move the shape around the canvas. **/ this.x = this.x + this.xSpeed * this.xDirection; this.y = this.y + this.ySpeed * this.yDirection; if (this.x > width - this.w || this.x < this.w) { this.xDirection *= -1; } if (this.y > height - this.h || this.y < this.h) { this.yDirection *= -1; } } display() { /** * Show the shape on the canvas. **/ // TODO: use getSoundInfo function to influence how shape is drawn ellipse(this.x, this.y, this.w, this.h); } } function setup() { /** * The p5.js initial setup function. **/ createCanvas(windowWidth, windowHeight); screenX = toScreenX = 0; screenY = toScreenY = 0; setupRecording(); frameRate(frameRate); fill("#F38630"); } function draw() { /** * The p5.js draw loop. **/ background("#69D2E7"); screenX = lerp(screenX, toScreenX, 0.1); screenY = lerp(screenY, toScreenY, 0.1); translate(screenX, screenY); scale(zoom); if (newSoundJustRecorded === true) { shapes.push(new GeneratedShape()); newSoundJustRecorded = false; } for (var shape of shapes) { shape.move(); shape.display(); if (shape.collide(shapes) === true) { shape.sound(); } } } function mouseWheel(event) { /** * Mouse wheel zoom handling. **/ zoom += zoomSensitivity * event.delta; } function mouseDragged() { /** * Mouse drag movement handling. **/ toScreenX += mouseX - pmouseX; toScreenY += mouseY - pmouseY; } function windowResized() { /** * Canvas re-draw handling. **/ resizeCanvas(windowWidth, windowHeight); }