suroh
1 year ago
commit
936998c6f2
19 changed files with 6956 additions and 0 deletions
@ -0,0 +1,19 @@ |
|||||
|
/* eslint-env node */ |
||||
|
module.exports = { |
||||
|
root: true, |
||||
|
'extends': [ |
||||
|
'eslint:recommended' |
||||
|
], |
||||
|
env: { |
||||
|
browser: false, |
||||
|
node: true, |
||||
|
}, |
||||
|
parserOptions: { |
||||
|
ecmaVersion: 'latest', |
||||
|
sourceType: 'module' |
||||
|
}, |
||||
|
rules: { |
||||
|
quotes: ['error', 'single'], |
||||
|
semi: ['error', 'never'] |
||||
|
} |
||||
|
} |
@ -0,0 +1,118 @@ |
|||||
|
# ---> Node |
||||
|
# Logs |
||||
|
logs |
||||
|
*.log |
||||
|
npm-debug.log* |
||||
|
yarn-debug.log* |
||||
|
yarn-error.log* |
||||
|
lerna-debug.log* |
||||
|
|
||||
|
# Diagnostic reports (https://nodejs.org/api/report.html) |
||||
|
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json |
||||
|
|
||||
|
# Runtime data |
||||
|
pids |
||||
|
*.pid |
||||
|
*.seed |
||||
|
*.pid.lock |
||||
|
|
||||
|
# Directory for instrumented libs generated by jscoverage/JSCover |
||||
|
lib-cov |
||||
|
|
||||
|
# Coverage directory used by tools like istanbul |
||||
|
coverage |
||||
|
*.lcov |
||||
|
|
||||
|
# nyc test coverage |
||||
|
.nyc_output |
||||
|
|
||||
|
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) |
||||
|
.grunt |
||||
|
|
||||
|
# Bower dependency directory (https://bower.io/) |
||||
|
bower_components |
||||
|
|
||||
|
# node-waf configuration |
||||
|
.lock-wscript |
||||
|
|
||||
|
# Compiled binary addons (https://nodejs.org/api/addons.html) |
||||
|
build/Release |
||||
|
|
||||
|
# Dependency directories |
||||
|
node_modules/ |
||||
|
jspm_packages/ |
||||
|
|
||||
|
# Snowpack dependency directory (https://snowpack.dev/) |
||||
|
web_modules/ |
||||
|
|
||||
|
# TypeScript cache |
||||
|
*.tsbuildinfo |
||||
|
|
||||
|
# Optional npm cache directory |
||||
|
.npm |
||||
|
|
||||
|
# Optional eslint cache |
||||
|
.eslintcache |
||||
|
|
||||
|
# Microbundle cache |
||||
|
.rpt2_cache/ |
||||
|
.rts2_cache_cjs/ |
||||
|
.rts2_cache_es/ |
||||
|
.rts2_cache_umd/ |
||||
|
|
||||
|
# Optional REPL history |
||||
|
.node_repl_history |
||||
|
|
||||
|
# Output of 'npm pack' |
||||
|
*.tgz |
||||
|
|
||||
|
# Yarn Integrity file |
||||
|
.yarn-integrity |
||||
|
|
||||
|
# dotenv environment variables file |
||||
|
.env |
||||
|
.env.test |
||||
|
|
||||
|
# parcel-bundler cache (https://parceljs.org/) |
||||
|
.cache |
||||
|
.parcel-cache |
||||
|
|
||||
|
# Next.js build output |
||||
|
.next |
||||
|
out |
||||
|
|
||||
|
# Nuxt.js build / generate output |
||||
|
.nuxt |
||||
|
dist |
||||
|
|
||||
|
# Gatsby files |
||||
|
.cache/ |
||||
|
# Comment in the public line in if your project uses Gatsby and not Next.js |
||||
|
# https://nextjs.org/blog/next-9-1#public-directory-support |
||||
|
# public |
||||
|
|
||||
|
# vuepress build output |
||||
|
.vuepress/dist |
||||
|
|
||||
|
# Serverless directories |
||||
|
.serverless/ |
||||
|
|
||||
|
# FuseBox cache |
||||
|
.fusebox/ |
||||
|
|
||||
|
# DynamoDB Local files |
||||
|
.dynamodb/ |
||||
|
|
||||
|
# TernJS port file |
||||
|
.tern-port |
||||
|
|
||||
|
# Stores VSCode versions used for testing VSCode extensions |
||||
|
.vscode-test |
||||
|
|
||||
|
# yarn v2 |
||||
|
.yarn/cache |
||||
|
.yarn/unplugged |
||||
|
.yarn/build-state.yml |
||||
|
.yarn/install-state.gz |
||||
|
.pnp.* |
||||
|
|
@ -0,0 +1,25 @@ |
|||||
|
import MapModel from '../db/models/map.js' |
||||
|
|
||||
|
const getAllMaps = async (req, res) => { |
||||
|
const maps = await MapModel.query() |
||||
|
|
||||
|
res.json({ maps }) |
||||
|
} |
||||
|
|
||||
|
const getMapByName = async (req, res) => { |
||||
|
const name = req.params.mapName |
||||
|
try { |
||||
|
let map = await MapModel.query().where({ name }).withGraphFetched('map_points').first() |
||||
|
|
||||
|
if (!map) { |
||||
|
const created = await MapModel.query().insert({ name }) |
||||
|
map = await MapModel.query().findById(created.id).withGraphFetched('map_points') |
||||
|
} |
||||
|
|
||||
|
res.json(map) |
||||
|
} catch (error) { |
||||
|
console.error(error) |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
export { getMapByName, getAllMaps } |
@ -0,0 +1,16 @@ |
|||||
|
import MapModel from '../db/models/map.js' |
||||
|
|
||||
|
const setPoint = async (req, res, next) => { |
||||
|
try { |
||||
|
const { mapId, point } = req.body |
||||
|
|
||||
|
const map = await MapModel.query().findById(mapId) |
||||
|
const p = await map.$relatedQuery('map_points').insert(point) |
||||
|
|
||||
|
res.json(p) |
||||
|
} catch (err) { |
||||
|
next(err) |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
export { setPoint } |
@ -0,0 +1,23 @@ |
|||||
|
import knexConfig from '../knexfile.js' |
||||
|
import Knex from 'knex' |
||||
|
import { Model } from 'objection' |
||||
|
import { newDb } from 'pg-mem' |
||||
|
|
||||
|
const environment = process.env.NODE_ENV || 'development' |
||||
|
|
||||
|
// variable for exporting the db
|
||||
|
let DB |
||||
|
|
||||
|
if (environment == 'test') { |
||||
|
const mem = newDb() |
||||
|
DB = mem.adapters.createKnex(0, { |
||||
|
migrations: { |
||||
|
directory: './db/migrations' |
||||
|
}, |
||||
|
}) |
||||
|
Model.knex(DB) |
||||
|
} else { |
||||
|
DB = Knex(knexConfig[environment]) |
||||
|
} |
||||
|
|
||||
|
export default DB |
@ -0,0 +1,34 @@ |
|||||
|
/** |
||||
|
* @param { import("knex").Knex } knex |
||||
|
* @returns { Promise<void> } |
||||
|
*/ |
||||
|
const up = (knex) => { |
||||
|
return knex.schema |
||||
|
.createTable('maps', (table) => { |
||||
|
table.increments().primary() |
||||
|
table.string('name').notNullable().unique() |
||||
|
table.timestamp('created_at').defaultTo(knex.fn.now()) |
||||
|
table.timestamp('updated_at').defaultTo(knex.fn.now()) |
||||
|
}) |
||||
|
.createTable('map_points', (table) => { |
||||
|
table.increments().primary() |
||||
|
table.string('name') |
||||
|
table.string('notes') |
||||
|
table.point('location') |
||||
|
table.timestamp('created_at').defaultTo(knex.fn.now()) |
||||
|
table.timestamp('updated_at').defaultTo(knex.fn.now()) |
||||
|
table.integer('map_id').references('id').inTable('maps') |
||||
|
}) |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* @param { import("knex").Knex } knex |
||||
|
* @returns { Promise<void> } |
||||
|
*/ |
||||
|
const down = (knex) => { |
||||
|
return knex.schema |
||||
|
.raw('DROP TABLE maps CASCADE') |
||||
|
.dropTable('map_points') |
||||
|
} |
||||
|
|
||||
|
export { up, down } |
@ -0,0 +1,21 @@ |
|||||
|
import { Model } from 'objection' |
||||
|
import Point from './point.js' |
||||
|
|
||||
|
class MapModel extends Model { |
||||
|
static tableName = 'maps' |
||||
|
|
||||
|
static get relationMappings() { |
||||
|
return { |
||||
|
map_points: { |
||||
|
relation: Model.HasManyRelation, |
||||
|
modelClass: Point, |
||||
|
join: { |
||||
|
from: 'maps.id', |
||||
|
to: 'map_points.map_id', |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
export default MapModel |
@ -0,0 +1,7 @@ |
|||||
|
import { Model } from 'objection' |
||||
|
|
||||
|
class PointModel extends Model { |
||||
|
static tableName = 'map_points' |
||||
|
} |
||||
|
|
||||
|
export default PointModel |
@ -0,0 +1,6 @@ |
|||||
|
import App from './server.js' |
||||
|
import 'dotenv/config' |
||||
|
|
||||
|
App.listen(process.env.PORT, () => { |
||||
|
console.log(`Ethermap listening for connections on port ${process.env.PORT}`) |
||||
|
}) |
@ -0,0 +1,64 @@ |
|||||
|
// Update with your config settings.
|
||||
|
import 'dotenv/config' |
||||
|
import { dirname } from 'path' |
||||
|
import { fileURLToPath } from 'url' |
||||
|
|
||||
|
const __dirname = dirname(fileURLToPath(import.meta.url)) |
||||
|
|
||||
|
/** |
||||
|
* @type { Object.<string, import("knex").Knex.Config> } |
||||
|
*/ |
||||
|
export default { |
||||
|
development: { |
||||
|
client: 'pg', |
||||
|
connection: { |
||||
|
database: process.env.DB_NAME, |
||||
|
user: process.env.DB_USER, |
||||
|
password: process.env.DB_PASS, |
||||
|
host: process.env.DB_HOST, |
||||
|
port: process.env.DB_PORT |
||||
|
}, |
||||
|
migrations: { |
||||
|
directory: __dirname + '/db/migrations' |
||||
|
}, |
||||
|
seeds: { |
||||
|
directory: __dirname + '/db/seeds' |
||||
|
} |
||||
|
}, |
||||
|
|
||||
|
staging: { |
||||
|
client: 'pg', |
||||
|
connection: { |
||||
|
database: process.env.DB_NAME, |
||||
|
user: process.env.DB_USER, |
||||
|
password: process.env.DB_PASS, |
||||
|
host: process.env.DB_HOST, |
||||
|
port: process.env.DB_PORT |
||||
|
}, |
||||
|
pool: { |
||||
|
min: 2, |
||||
|
max: 10 |
||||
|
}, |
||||
|
migrations: { |
||||
|
tableName: 'knex_migrations' |
||||
|
} |
||||
|
}, |
||||
|
|
||||
|
production: { |
||||
|
client: 'pg', |
||||
|
connection: { |
||||
|
database: process.env.DB_NAME, |
||||
|
user: process.env.DB_USER, |
||||
|
password: process.env.DB_PASS, |
||||
|
host: process.env.DB_HOST, |
||||
|
port: process.env.DB_PORT |
||||
|
}, |
||||
|
pool: { |
||||
|
min: 2, |
||||
|
max: 10 |
||||
|
}, |
||||
|
migrations: { |
||||
|
tableName: 'knex_migrations' |
||||
|
} |
||||
|
} |
||||
|
} |
@ -0,0 +1,6 @@ |
|||||
|
// TODO@me update error handler
|
||||
|
export default (err, _, res) => { |
||||
|
res.status(500).json({ message: err.message }) |
||||
|
} |
||||
|
|
||||
|
|
File diff suppressed because it is too large
@ -0,0 +1,36 @@ |
|||||
|
{ |
||||
|
"name": "ethermap", |
||||
|
"version": "0.0.1", |
||||
|
"description": "collaborative map tool inspired by etherpad", |
||||
|
"main": "index.js", |
||||
|
"type": "module", |
||||
|
"scripts": { |
||||
|
"dev": "nodemon index.js", |
||||
|
"test": "ava", |
||||
|
"test:routes": "ava ./tests/routes.js", |
||||
|
"test:db": "ava ./tests/db.js", |
||||
|
"migrate:latest": "knex migrate:latest", |
||||
|
"migrate:drop": "knex migrate:down" |
||||
|
}, |
||||
|
"keywords": [ |
||||
|
"ethermap", |
||||
|
"map", |
||||
|
"collaborative" |
||||
|
], |
||||
|
"author": "", |
||||
|
"license": "GPL-3.0-or-later", |
||||
|
"dependencies": { |
||||
|
"dotenv": "^16.3.1", |
||||
|
"express": "^4.18.2", |
||||
|
"knex": "^2.5.1", |
||||
|
"objection": "^3.1.1", |
||||
|
"pg": "^8.11.3" |
||||
|
}, |
||||
|
"devDependencies": { |
||||
|
"ava": "^5.3.1", |
||||
|
"eslint": "^8.48.0", |
||||
|
"nodemon": "^3.0.1", |
||||
|
"pg-mem": "^2.6.13", |
||||
|
"supertest": "^6.3.3" |
||||
|
} |
||||
|
} |
@ -0,0 +1,8 @@ |
|||||
|
import { Router } from 'express' |
||||
|
import { getMapByName } from '../controllers/maps.js' |
||||
|
|
||||
|
const router = Router() |
||||
|
|
||||
|
router.get('/:mapName', getMapByName) |
||||
|
|
||||
|
export default router |
@ -0,0 +1,8 @@ |
|||||
|
import { Router } from 'express' |
||||
|
import { setPoint } from '../controllers/points.js' |
||||
|
|
||||
|
const router = Router() |
||||
|
|
||||
|
router.post('/addpoint', setPoint) |
||||
|
|
||||
|
export default router |
@ -0,0 +1,15 @@ |
|||||
|
import { Router } from 'express' |
||||
|
|
||||
|
import { getAllMaps } from '../controllers/maps.js' |
||||
|
|
||||
|
const router = Router() |
||||
|
|
||||
|
router.get('/', (_, res) => { |
||||
|
res.send('ethermap') |
||||
|
}) |
||||
|
|
||||
|
router.get('/maps', getAllMaps) |
||||
|
|
||||
|
|
||||
|
export default router |
||||
|
|
@ -0,0 +1,30 @@ |
|||||
|
// web server
|
||||
|
import express from 'express' |
||||
|
|
||||
|
// database
|
||||
|
import DB from './db/DB.js' |
||||
|
import { Model } from 'objection' |
||||
|
|
||||
|
// middleware
|
||||
|
import ErrorMiddleware from './middleware/errors.js' |
||||
|
|
||||
|
// database setup
|
||||
|
Model.knex(DB) |
||||
|
|
||||
|
// webserver setup
|
||||
|
const app = express() |
||||
|
app.use(express.json()) |
||||
|
|
||||
|
// routes
|
||||
|
import rootRouter from './routes/root.js' |
||||
|
import mapsRouter from './routes/maps.js' |
||||
|
import pointsRouter from './routes/points.js' |
||||
|
|
||||
|
app.use(rootRouter) |
||||
|
app.use('/m', mapsRouter) |
||||
|
app.use('/p', pointsRouter) |
||||
|
|
||||
|
// error middleware
|
||||
|
app.use(ErrorMiddleware) |
||||
|
|
||||
|
export default app |
@ -0,0 +1,37 @@ |
|||||
|
// testing tools
|
||||
|
import test from 'ava' |
||||
|
import db from '../db/DB.js' |
||||
|
|
||||
|
// db model
|
||||
|
import MapModel from '../db/models/map.js' |
||||
|
|
||||
|
test.before(async () => { |
||||
|
await db.migrate.latest() |
||||
|
}) |
||||
|
|
||||
|
test('Selecting maps should return array', async t => { |
||||
|
const maps = await MapModel.query() |
||||
|
|
||||
|
t.truthy(maps) |
||||
|
}) |
||||
|
|
||||
|
test.serial('Inserting map returns map object', async t => { |
||||
|
const map = await MapModel.query().insert({ name: 'milo' }) |
||||
|
|
||||
|
t.is(map.name, 'milo') |
||||
|
}) |
||||
|
|
||||
|
test.serial('Insert point for existing map returns point', async t => { |
||||
|
const map = await MapModel.query().where({ name: 'milo' }).first() |
||||
|
const point = await map.$relatedQuery('map_points').insert({ |
||||
|
name: 'pointy', |
||||
|
location: '(50.8552,4.3454)', |
||||
|
}) |
||||
|
|
||||
|
t.is(point.name, 'pointy') |
||||
|
t.is(point.location, '(50.8552,4.3454)') |
||||
|
}) |
||||
|
|
||||
|
test.after(async () => { |
||||
|
await db.migrate.down() |
||||
|
}) |
@ -0,0 +1,93 @@ |
|||||
|
// testing tools
|
||||
|
import test from 'ava' |
||||
|
import request from 'supertest' |
||||
|
|
||||
|
// express app
|
||||
|
import App from '../server.js' |
||||
|
import db from '../db/DB.js' |
||||
|
|
||||
|
test.before(async t => { |
||||
|
await db.migrate.latest() |
||||
|
}) |
||||
|
|
||||
|
test.serial('get "/" route should return body of "ethermap"', async t => { |
||||
|
const res = await request(App).get('/') |
||||
|
|
||||
|
t.is(res.status, 200) |
||||
|
t.is(res.text, 'ethermap') |
||||
|
}) |
||||
|
|
||||
|
test.serial('get "/maps" route should return an object containing an array called "maps"', async t => { |
||||
|
const res = await request(App).get('/maps') |
||||
|
|
||||
|
t.is(res.status, 200) |
||||
|
t.truthy(res.body.maps?.constructor === Array) |
||||
|
}) |
||||
|
|
||||
|
test.serial('get "/m/:mapName" route should return map with matching name', async t => { |
||||
|
const res = await request(App).get('/m/bingo') |
||||
|
|
||||
|
t.is(res.status, 200) |
||||
|
t.is(res.body.name, 'bingo') |
||||
|
}) |
||||
|
|
||||
|
test.serial('get "/m/:mapName" route with different mapName should create new map with different id', async t => { |
||||
|
const res = await request(App).get('/m/cheese') |
||||
|
|
||||
|
t.is(res.status, 200) |
||||
|
t.truthy(res.body.id) |
||||
|
t.not(res.body.id, 1) |
||||
|
}) |
||||
|
|
||||
|
test.serial('get "/m/:mapName" route with existing mapName should return same id', async t => { |
||||
|
const res = await request(App).get('/m/bingo') |
||||
|
|
||||
|
t.is(res.status, 200) |
||||
|
t.is(res.body.id, 1) |
||||
|
}) |
||||
|
|
||||
|
test.serial('post "/p/addpoint" body containing a name, location and map_id should return a point', async t => { |
||||
|
const { body: { id: mapId } } = await request(App).get('/m/bingo') |
||||
|
const res = await request(App) |
||||
|
.post('/p/addpoint') |
||||
|
.send({ |
||||
|
mapId, |
||||
|
point: { |
||||
|
name: 'pointy', |
||||
|
location: '(50.8552,4.3454)', |
||||
|
} |
||||
|
}) |
||||
|
|
||||
|
t.is(res.status, 200) |
||||
|
t.is(res.body.id, 1) |
||||
|
t.is(res.body.map_id, mapId) |
||||
|
t.is(res.body.name, 'pointy') |
||||
|
}) |
||||
|
|
||||
|
test.serial('get "/m/:mapName" with associated points should return a map with an array of points', async t => { |
||||
|
const res = await request(App).get('/m/bingo') |
||||
|
|
||||
|
t.is(res.status, 200) |
||||
|
t.truthy(res.body.map_points) |
||||
|
t.is(res.body.map_points.length, 1) |
||||
|
}) |
||||
|
|
||||
|
test.serial('post "/p/addpoint" with incorrect data keys throws 500 error', async t => { |
||||
|
const { body: { id: mapId } } = await request(App).get('/m/bingo') |
||||
|
const error = await request(App) |
||||
|
.post('/p/addpoint') |
||||
|
.send({ |
||||
|
mapId, |
||||
|
point: { |
||||
|
title: 'pointy', |
||||
|
coords: '(50.8552,4.3454)', |
||||
|
} |
||||
|
}) |
||||
|
|
||||
|
t.is(error.status, 500) |
||||
|
}) |
||||
|
|
||||
|
test.after(async () => { |
||||
|
await db.migrate.down() |
||||
|
}) |
||||
|
|
Loading…
Reference in new issue