An online landscape, built as a tool to explore the many aspects of the human voice. https://voicegardens.org
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 
 
 

580 lines
15 KiB

"use strict";
//
// Voicegardens front-end Javascript
//
// URL which exposes the archive saving API end-point
var archiveUrl = window.location + "add-to-archive";
var archiveListingUrl = window.location + "archive";
// The x,y coordinates which gives the center of the canvas
var centerX;
var centerY;
// The canvas frame rate which controls how many times the draw function is
// called. So far as we have seen, this can affect performance. Try the value
// of 30 and also 60 to see the difference. Higher values may also give more
// fun animation effects.
var frameRate = 30;
// Sound recording API objects
var microphone;
var recorder;
var recording;
// Boolean which is only true when the user stops the recording of a sound.
// This then triggers the generation of a shape based on that recording.
var newSoundJustRecorded = false;
// All user clickable buttons
var recordButton;
var stopButton;
var leafButton;
var aboutButton;
// The x,y coordinates which shows where the window position is. This is useful
// because we use the `translate` function to offset the window view in
// relation to the canvas (users can drag their view across the "environment")
// and we need to record where that x position leaves us.
var screenX = 0;
var screenY = 0;
var toScreenX = 0;
var toScreenY = 0;
// All shapes generated
var shapes = [];
// Sound properties
var amplitude;
var duration;
// The background colour choices for the environment
var bgColour;
function record() {
/**
* Start recording a sound.
**/
userStartAudio();
if (microphone.enabled === true) {
recorder.record(recording);
}
}
function stop() {
/**
* Stop recording a new sound.
**/
if (recorder.recording === true) {
recorder.stop();
newSoundJustRecorded = true;
}
}
function sendToArchive() {
/**
* Send the sound to the back-end for archiving.
**/
var soundBlob = recording.getBlob();
var formData = new FormData();
var date = new Date();
var filename = date.getTime().toString() + ".wav";
formData.append("file", soundBlob, filename);
var config = new Headers({ "Content-Type": "multipart/form-data" });
axios.post(archiveUrl, formData, config).catch(function(error) {
console.log(
"Upload failed!",
"Received the following message:",
error.response.data
);
});
}
function goToAbout() {
window.location.href = "/about";
}
function gardenShapes() {
/*
* Re-generate a shape back into the environment. If we have reached the
* shape limit, then replace the first shape instead of adding one more. This
* has to do with performance.
*/
axios
.get(archiveListingUrl)
.then(function(response) {
if (!response.data.length) return;
let randomIndex = floor(random(0, response.data.length - 1));
let url = response.data[randomIndex];
axios
.get(url, { responseType: "blob" })
.then(function(response) {
let data = new p5.File(response.data);
let sound = new p5.SoundFile(data, function() {
let amp = sound.getPeaks(1)[0] * 100;
let dur = sound.duration();
let shape = new GeneratedShape(sound, amp, dur);
shape.sound();
shapes.push(shape);
});
})
.catch(function(error) {
console.log(
"Retrieving single recording from archive failed!",
"Received the following message:",
error.response.data
);
});
})
.catch(function(error) {
console.log(
"Retrieving archive listing failed!",
"Received the following message:",
error.response.data
);
});
}
function setupRecording() {
/**
* Setup logic for sound recording.
**/
microphone = new p5.AudioIn();
microphone.start();
recorder = new p5.SoundRecorder();
recorder.setInput(microphone);
recording = new p5.SoundFile();
recordButton = createImg("../static/images/RECORD-COLOR.png");
recordButton.mousePressed(record);
recordButton.class("button record");
stopButton = createImg("../static/images/STOP-BW.png");
stopButton.mousePressed(stop);
stopButton.class("button stop");
leafButton = createImg("../static/images/GARDEN-BW.png");
leafButton.class("button leaf");
leafButton.mousePressed(gardenShapes);
aboutButton = createImg("../static/images/ABOUT-BW.png");
aboutButton.mousePressed(goToAbout);
aboutButton.class("button about");
}
class GeneratedShape {
constructor(soundRecorded, soundAmplitude, soundDuration) {
/**
* Initialise the new shape.
**/
// sound and properties passed into shape creation
this.soundRecorded = soundRecorded;
this.soundDuration = soundDuration;
// ensure that erroneous minus amplitudes are at least given 0
if (soundAmplitude < 0) this.soundAmplitude = 0.0;
else this.soundAmplitude = soundAmplitude;
// mouse hover awareness for sound playing
this.hovering = false;
// The opacity of the shape. This controls whether we can see the shape or
// not (transparency). It starts at zero as we want to fade the shapes in
// when they enter the environment
this.opacity = 0;
// the colour of the shape
this.colour = this.chooseColour();
// Acceleration x,y values which control at which speed the shape
// accelerates towards the intended x,y destination.
this.accelX = 0.0;
this.accelY = 0.0;
// The speed at which the shape 'springs' towards its final destination.
this.springing = random(0.0006, 0.0009);
// The speed at which the shape rocks back and forth when it is in the
// process of coming to a halt.
this.damping = 0.98;
// Value that controls the tightness or looseness of the curves between the
// x,y values of the shape. AFAIK, this value can go between -5 and +5.
// With +5 we have very sharp curves and edges.
this.organicConstant = 1.0;
// The x,y values which determine where the shape is currently. These are
// required in order to calculate where the shape is currently so that we
// can then go about moving it to the new destination.
this.startXs = [];
this.startYs = [];
// The x,y values which track the new position of the shape (and therefore
// update the `startXs` and `startYs`) as the shape moves about the
// environment
this.xs = [];
this.ys = [];
// vector listing of the above xs, ys so that we can pass these lists to
// the collidePolyPoly function for collision detection which expects
// vector objects, not plain x, y coordinates as in xs, ys.
this.vectors = [];
// Angles between xs and ys.
this.angles = [];
// Curve movement wobble frequencies.
this.frequencies = [];
// Random x,y values (only randomly chosen once, then fixed) which are used
// in the calculation of the curve drawing between the x,y vectors of the
// shape
this.randXs = [];
this.randYs = [];
// Number of edges of the shape
this.edges = this.soundAmplitude + 3;
// The distance between the xs and ys influencing the size of the shape.
// The randomXs, randomYs also influence the shape size as they are added
// when drawing the space between xs and ys.
if (this.soundDuration > 10) this.soundDuration = this.soundDuration / 10;
this.radius = ceil(this.soundDuration * 40);
// ???
this.angle = radians(360 / this.edges);
// ???
this.centerX = random(windowWidth) - toScreenX;
this.centerY = random(windowHeight) - toScreenY;
// new destination for the shapes
this.destX = random(windowWidth);
this.destY = random(windowHeight);
// The x, y destination values which the shape moves towards. This can be
// calculated against the `mouseX` and `mouseY` values, for example. Then
// the shape will follow the mouse.
this.deltaX = this.destX - this.centerX - toScreenX;
this.deltaY = this.destY - this.centerY - toScreenY;
// time management for timing when to make new random position
// movements
this.drawTimer = 0;
this.nextTick = random(1000, 9000);
this.tickTimer = 0;
this.initialise();
}
initialise() {
/**
* Initialise the shape values.
**/
for (let i = 0; i < this.edges; i++) {
this.startXs[i] = 0;
this.startYs[i] = 0;
this.xs[i] = 0;
this.ys[i] = 0;
this.vectors[i] = createVector(this.xs[i], this.ys[i]);
this.angles[i] = 0;
// this directly influences the shape of the size alongside the
// this.radius shape value
this.randXs[i] = this.soundDuration * random(-10, 40);
this.randYs[i] = this.soundDuration * random(-10, 40);
}
for (let i = 0; i < this.edges; i++) {
this.frequencies[i] = random(5, 12);
}
}
collide(shapes) {
/**
* Detect if the shape collides with another shape. Returns a tuple of type
* [bool, shape] where bool = if there was a collision and shape = the
* collided shape.
**/
if (shapes.length === 1) {
return [false, undefined];
}
for (let i = 0; i < shapes.length; i++) {
let shape = shapes[i];
if (this === shape) {
continue;
}
// don't detect if one shape is fully inside another
let interiorCollision = false;
var collision = collidePolyPoly(
this.vectors,
shape.vectors,
interiorCollision
);
if (collision === true) {
return [true, shape];
}
}
return [false, undefined];
}
sound() {
/**
* Play a sound after a collision is detected.
**/
if (this.soundRecorded.isLoaded() && !this.soundRecorded.isPlaying()) {
this.soundRecorded.play();
}
}
docolour() {
/**
* Draw colour and fade-in shape.
**/
if (this.opacity != 256) {
if (this.opacity < 256) {
// shape should fade in, so increment alpha value
let currentAlpha = this.colour._getAlpha();
this.colour.setAlpha(currentAlpha + random(0, 3));
} else {
// shape has faded-in, show it as fully visible now
this.opacity = 256;
}
}
fill(this.colour);
}
chooseColour() {
/**
* Choose a colour for the shape.
**/
let colourChoices = [
color("#4F6EE8"),
color("#626788"),
color("#334171"),
color("#1529C2"),
color("#A17AA3"),
color("#606CEB"),
color("#8A77D5"),
color("#EB4913"),
color("#FC6012"),
color("#D94C14"),
color("#F08A60"),
color("#F8988F"),
color("#6E4F47"),
color("#93E35B"),
color("#DE3F16"),
color("#D1611F"),
color("#C22F0A"),
color("#C97814"),
color("#EDA714"),
color("#D5894A"),
color("#448F54"),
color("#61C26F"),
color("#ACE9B2"),
color("#CC25B6"),
color("#D695F0"),
color("#C5C2F0"),
color("#CC3D25"),
color("#A3614E"),
color("#F0DBA9"),
color("#7C4531")
];
let index = floor(random(0, colourChoices.length));
let chosenColour = colourChoices[index];
// set shape opacity to 0 initially to enable fade-in
chosenColour.setAlpha(this.opacity);
return chosenColour;
}
curve() {
/**
* Curve the shape.
**/
curveTightness(this.organicConstant);
beginShape();
for (let i = 0; i < this.edges; i++) {
curveVertex(this.xs[i], this.ys[i]);
}
for (let i = 0; i < this.edges - 1; i++) {
curveVertex(this.xs[i], this.ys[i]);
}
endShape(CLOSE);
}
tick() {
/**
* Manage internal time for each shape.
**/
this.drawTimer = millis();
if (this.drawTimer >= this.nextTick + this.tickTimer) {
this.tickTimer = millis();
this.nextTick = random(1000, 9000);
return true;
}
return false;
}
draw() {
/**
* Draw the shape vectors.
**/
this.docolour();
// set the start x,y positions for the shape on each draw loop
for (let i = 0; i < this.edges; i++) {
this.startXs[i] =
this.centerX + cos(this.angle * i) * this.radius + this.randXs[i];
this.startYs[i] =
this.centerY + sin(this.angle * i) * this.radius + this.randYs[i];
}
this.curve();
}
hover() {
/**
* React to mouse hovering.
**/
let isHovering = collidePointPoly(
mouseX - screenX,
mouseY - screenY,
this.vectors
);
if (isHovering === true) {
if (this.hovering === false) {
this.sound();
this.hovering = true;
}
} else {
this.hovering = false;
}
}
move() {
/**
* Move the shape vectors.
**/
this.deltaX = this.destX - this.centerX - toScreenX;
this.deltaY = this.destY - this.centerY - toScreenY;
this.deltaX *= this.springing;
this.deltaY *= this.springing;
this.accelX += this.deltaX;
this.accelY += this.deltaY;
this.centerX += this.accelX;
this.centerY += this.accelY;
this.accelX *= this.damping;
this.accelY *= this.damping;
this.organicConstant = 1 - (abs(this.accelX) + abs(this.accelY)) * 0.1;
for (let i = 0; i < this.edges; i++) {
this.xs[i] =
this.startXs[i] + sin(radians(this.angles[i])) * (this.accelX * 2);
this.ys[i] =
this.startYs[i] + sin(radians(this.angles[i])) * (this.accelY * 2);
this.vectors[i] = createVector(this.xs[i], this.ys[i]);
this.angles[i] += this.frequencies[i];
}
}
}
function setup() {
/**
* The initial setup function called once on start.
**/
createCanvas(windowWidth, windowHeight);
frameRate(frameRate);
setupRecording();
bgColour = color("#C3EFDB");
}
function draw() {
/**
* The draw loop which is called x times a second where x is the frameRate.
**/
background(bgColour);
blendMode(BLEND);
smooth();
noStroke();
// offset the window view based on new values of x,y related to the screen.
// These values are generated once the user drags the screen with the mouse.
screenX = lerp(screenX, toScreenX, 0.2);
screenY = lerp(screenY, toScreenY, 0.2);
translate(screenX, screenY);
// generate a new shape after a sound recording
if (newSoundJustRecorded === true) {
// build a new copy of the recording to store on the shape object
let soundBlob = recording.getBlob();
let data = new p5.File(soundBlob);
let sound = new p5.SoundFile(data, function() {
let amp = sound.getPeaks(1)[0] * 100;
let dur = sound.duration();
let shape = new GeneratedShape(sound, amp, dur);
shape.sound();
shapes.push(shape);
sendToArchive();
});
newSoundJustRecorded = false;
}
for (let i = 0; i < shapes.length; i++) {
let shape = shapes[i];
shape.hover();
shape.draw();
shape.move();
if (shape.tick() === true) {
shape.destX = random(windowWidth);
shape.destY = random(windowHeight);
}
// play recordings when shapes collide
let [collision, collidedShape] = shape.collide(shapes);
if (collision === true) {
shape.sound();
collidedShape.sound();
}
}
}
function mouseDragged() {
/**
* Mouse drag movement handling.
**/
toScreenX += mouseX - pmouseX;
toScreenY += mouseY - pmouseY;
}
function windowResized() {
/**
* Canvas re-draw handling.
**/
resizeCanvas(windowWidth, windowHeight);
}