|
|
|
"use strict";
|
|
|
|
|
|
|
|
//
|
|
|
|
// Voicegardens front-end Javascript
|
|
|
|
//
|
|
|
|
|
|
|
|
var archiveUrl = window.location + "add-to-archive";
|
|
|
|
var backgroundImage;
|
|
|
|
var canvas;
|
|
|
|
var microphone;
|
|
|
|
var newSoundJustRecorded = false;
|
|
|
|
var playButton;
|
|
|
|
var recordButton;
|
|
|
|
var recorder;
|
|
|
|
var recording;
|
|
|
|
var recordingTimeout = 30000; // 30 seconds (in milliseconds)
|
|
|
|
var screenH;
|
|
|
|
var screenW;
|
|
|
|
var screenX;
|
|
|
|
var screenY;
|
|
|
|
var shapes = [];
|
|
|
|
var stopButton;
|
|
|
|
var toH;
|
|
|
|
var toW;
|
|
|
|
var toX;
|
|
|
|
var toY;
|
|
|
|
var zoom = 0.01; // zoom step per mouse tick
|
|
|
|
|
|
|
|
function record() {
|
|
|
|
/**
|
|
|
|
* Starting recording.
|
|
|
|
**/
|
|
|
|
if (microphone.enabled) {
|
|
|
|
setTimeout(recorder.record(recording), recordingTimeout);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
function stop() {
|
|
|
|
/**
|
|
|
|
* Stop recording a new recording.
|
|
|
|
**/
|
|
|
|
if (recorder.recording) {
|
|
|
|
recorder.stop();
|
|
|
|
newSoundJustRecorded = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
function play() {
|
|
|
|
/**
|
|
|
|
* Play the recording.
|
|
|
|
**/
|
|
|
|
if (recording.isLoaded()) {
|
|
|
|
recording.play();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
function archive() {
|
|
|
|
/**
|
|
|
|
* Send the recording to the back-end.
|
|
|
|
**/
|
|
|
|
var soundBlob = recording.getBlob();
|
|
|
|
|
|
|
|
var formData = new FormData();
|
|
|
|
var date = new Date();
|
|
|
|
var filename = date.getTime().toString() + ".wav";
|
|
|
|
formData.append("file", soundBlob, filename);
|
|
|
|
|
|
|
|
var config = new Headers({ "Content-Type": "multipart/form-data" });
|
|
|
|
|
|
|
|
axios.post(archiveUrl, formData, config).catch(function(error) {
|
|
|
|
console.log(
|
|
|
|
"Upload failed!",
|
|
|
|
"Received the following message:",
|
|
|
|
error.response.data
|
|
|
|
);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
function setupRecording() {
|
|
|
|
/**
|
|
|
|
* Setup logic for recording.
|
|
|
|
**/
|
|
|
|
microphone = new p5.AudioIn();
|
|
|
|
microphone.start();
|
|
|
|
|
|
|
|
recorder = new p5.SoundRecorder();
|
|
|
|
recorder.setInput(microphone);
|
|
|
|
|
|
|
|
recording = new p5.SoundFile();
|
|
|
|
|
|
|
|
recordButton = createButton("record");
|
|
|
|
recordButton.position(10, 5);
|
|
|
|
recordButton.mousePressed(record);
|
|
|
|
|
|
|
|
stopButton = createButton("stop");
|
|
|
|
stopButton.position(10, 40);
|
|
|
|
stopButton.mousePressed(stop);
|
|
|
|
|
|
|
|
playButton = createButton("play");
|
|
|
|
playButton.position(10, 70);
|
|
|
|
playButton.mousePressed(play);
|
|
|
|
|
|
|
|
playButton = createButton("archive");
|
|
|
|
playButton.position(10, 105);
|
|
|
|
playButton.mousePressed(archive);
|
|
|
|
}
|
|
|
|
|
|
|
|
function getSoundInfo() {
|
|
|
|
/**
|
|
|
|
* Retrieve sound information like pitch, amplitude, duration, etc.
|
|
|
|
**/
|
|
|
|
amplitude = recording.getPeaks();
|
|
|
|
duration = recording.duration();
|
|
|
|
|
|
|
|
// pitch (frequency?) I think we can use fft.analyze() and then find the
|
|
|
|
// highest value (0 -> 1024) that has a non-zero value this gives us the
|
|
|
|
// highest frequency from the recording
|
|
|
|
// https://p5js.org/reference/#/p5.FFT
|
|
|
|
// https://p5js.org/reference/#/p5.FFT/analyze
|
|
|
|
|
|
|
|
// nuance?
|
|
|
|
// "I meant the amount of variation in the voice - i.e is it one single
|
|
|
|
// monotone note or does it go up and down octaves or start soft and high and
|
|
|
|
// become deep and guttural etc."
|
|
|
|
//
|
|
|
|
// How do to do this? Unsure ...
|
|
|
|
}
|
|
|
|
|
|
|
|
class GeneratedShape {
|
|
|
|
constructor() {
|
|
|
|
/**
|
|
|
|
* Initialise the new shape.
|
|
|
|
**/
|
|
|
|
this.w = 20;
|
|
|
|
this.h = 20;
|
|
|
|
|
|
|
|
this.x = random(width);
|
|
|
|
this.y = random(height);
|
|
|
|
|
|
|
|
this.xSpeed = 1.8;
|
|
|
|
this.ySpeed = 1.8;
|
|
|
|
|
|
|
|
this.xDirection = 1;
|
|
|
|
this.yDirection = 1;
|
|
|
|
|
|
|
|
this.synth = new p5.MonoSynth();
|
|
|
|
}
|
|
|
|
|
|
|
|
collide(shapes) {
|
|
|
|
/**
|
|
|
|
* Detect if the shape collides with another shape.
|
|
|
|
**/
|
|
|
|
if (shapes.length === 1) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
for (var shape of shapes) {
|
|
|
|
if (this === shape) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
var collision = collideCircleCircle(
|
|
|
|
this.x,
|
|
|
|
this.y,
|
|
|
|
this.w,
|
|
|
|
shape.x,
|
|
|
|
shape.y,
|
|
|
|
shape.w
|
|
|
|
);
|
|
|
|
|
|
|
|
if (collision === true) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
sound() {
|
|
|
|
/**
|
|
|
|
* Play a sound after a collision is detected.
|
|
|
|
**/
|
|
|
|
var notes = ["G2", "C3", "G3"];
|
|
|
|
var duration = 0.8;
|
|
|
|
var time = 0;
|
|
|
|
var velocity = 0.5;
|
|
|
|
var index = random(0, notes.length + 1);
|
|
|
|
this.synth.play(notes[index], velocity, time, duration);
|
|
|
|
}
|
|
|
|
|
|
|
|
move() {
|
|
|
|
/**
|
|
|
|
* Move the shape around the canvas.
|
|
|
|
**/
|
|
|
|
this.x = this.x + this.xSpeed * this.xDirection;
|
|
|
|
this.y = this.y + this.ySpeed * this.yDirection;
|
|
|
|
|
|
|
|
if (this.x > width - this.w || this.x < this.w) {
|
|
|
|
this.xDirection *= -1;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (this.y > height - this.h || this.y < this.h) {
|
|
|
|
this.yDirection *= -1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
display() {
|
|
|
|
/**
|
|
|
|
* Show the shape on the canvas.
|
|
|
|
**/
|
|
|
|
// TODO: use getSoundInfo function to influence how shape is drawn
|
|
|
|
ellipse(this.x, this.y, this.w, this.h);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
function preload() {
|
|
|
|
/**
|
|
|
|
* Pre-load images for the background.
|
|
|
|
**/
|
|
|
|
backgroundImage = loadImage("../static/images/solarpunk.png");
|
|
|
|
}
|
|
|
|
|
|
|
|
function setup() {
|
|
|
|
/**
|
|
|
|
* The p5.js initial setup function.
|
|
|
|
**/
|
|
|
|
createCanvas(windowWidth, windowHeight);
|
|
|
|
|
|
|
|
screenW = toW = backgroundImage.width;
|
|
|
|
screenH = toH = backgroundImage.height;
|
|
|
|
screenX = toX = screenW / 2;
|
|
|
|
screenY = toY = screenH / 2;
|
|
|
|
|
|
|
|
// setupRecording();
|
|
|
|
// fill("red");
|
|
|
|
}
|
|
|
|
|
|
|
|
function draw() {
|
|
|
|
/**
|
|
|
|
* The p5.js draw loop.
|
|
|
|
**/
|
|
|
|
clear();
|
|
|
|
noStroke();
|
|
|
|
|
|
|
|
screenX = lerp(screenX, toX, 0.1);
|
|
|
|
screenY = lerp(screenY, toY, 0.1);
|
|
|
|
screenW = lerp(screenW, toW, 0.1);
|
|
|
|
screenH = lerp(screenH, toH, 0.1);
|
|
|
|
|
|
|
|
image(
|
|
|
|
backgroundImage,
|
|
|
|
screenX - screenW / 2,
|
|
|
|
screenY - screenH / 2,
|
|
|
|
screenW,
|
|
|
|
screenH
|
|
|
|
);
|
|
|
|
|
|
|
|
if (newSoundJustRecorded === true) {
|
|
|
|
shapes.push(new GeneratedShape());
|
|
|
|
newSoundJustRecorded = false;
|
|
|
|
}
|
|
|
|
|
|
|
|
for (var shape of shapes) {
|
|
|
|
shape.move();
|
|
|
|
shape.display();
|
|
|
|
|
|
|
|
if (shape.collide(shapes) === true) {
|
|
|
|
shape.sound();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
function mouseWheel(event) {
|
|
|
|
/**
|
|
|
|
* Mouse wheel zoom handling.
|
|
|
|
**/
|
|
|
|
var delta = -event.delta;
|
|
|
|
|
|
|
|
// zoom in
|
|
|
|
if (delta > 0) {
|
|
|
|
for (var i = 0; i < delta; i++) {
|
|
|
|
if (toW > 5 * width) return; // max zoom
|
|
|
|
toX -= zoom * (mouseX - toX);
|
|
|
|
toY -= zoom * (mouseY - toY);
|
|
|
|
toW *= zoom + 1;
|
|
|
|
toH *= zoom + 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// zoom out
|
|
|
|
if (delta < 0) {
|
|
|
|
for (var i = 0; i < -delta; i++) {
|
|
|
|
if (toW < width) return; // min zoom
|
|
|
|
toX += (zoom / (zoom + 1)) * (mouseX - toX);
|
|
|
|
toY += (zoom / (zoom + 1)) * (mouseY - toY);
|
|
|
|
toH /= zoom + 1;
|
|
|
|
toW /= zoom + 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
function mouseDragged(event) {
|
|
|
|
/**
|
|
|
|
* Mouse drag movement handling.
|
|
|
|
**/
|
|
|
|
toX += mouseX - pmouseX;
|
|
|
|
toY += mouseY - pmouseY;
|
|
|
|
}
|