Some refactoring and cache generation for graphs
This commit is contained in:
parent
b3548cfee7
commit
c4df7a735a
20
api/graph.js
20
api/graph.js
|
@ -22,15 +22,12 @@ graphApi.get(
|
|||
INNER JOIN maps AS b
|
||||
ON a.map = b.map
|
||||
WHERE a.NAME = ?
|
||||
AND a.map LIKE '%'
|
||||
GROUP BY a.map
|
||||
ORDER BY a.timestamp;
|
||||
`)
|
||||
|
||||
let currentPoints = 0
|
||||
let array = []
|
||||
for (const finish of finishes.iterate(player)) {
|
||||
console.log(finish)
|
||||
currentPoints += finish.Points
|
||||
array.push({ t: new Date(finish.Timestamp), y: currentPoints })
|
||||
}
|
||||
|
@ -52,22 +49,11 @@ graphApi.get(
|
|||
response: "No query ('host/path?q=query') provided!"
|
||||
})
|
||||
}
|
||||
let map = req.query.q
|
||||
const finishes = sqlite.prepare(`SELECT * FROM graphRecordCache WHERE map = ? ORDER BY Timestamp`)
|
||||
|
||||
const finishes = sqlite.prepare(
|
||||
`
|
||||
SELECT * FROM race WHERE map = ? ORDER BY Timestamp;
|
||||
`)
|
||||
let currentFinish
|
||||
let currentBest = 0;
|
||||
let array = []
|
||||
for (const record of finishes.iterate(map)) {
|
||||
currentFinish = record.Time
|
||||
if(currentFinish <= currentBest || currentBest == 0) {
|
||||
currentBest = currentFinish
|
||||
array.push({ player: record.Name, Time: record.Time, Date: new Date(record.Timestamp) })
|
||||
}
|
||||
}
|
||||
for (const record of finishes.iterate(req.query.q))
|
||||
array.push({ t: new Date(record.timestamp), y: record.time, player: record.player})
|
||||
|
||||
return res.json({
|
||||
success: true,
|
||||
|
|
|
@ -9,7 +9,7 @@ playerApi.get(
|
|||
async (req, res) => {
|
||||
let player = req.params.player
|
||||
|
||||
/* Misc, may be worth to cache this? */
|
||||
/* Misc */
|
||||
const firstFinish = sqlite.prepare(`SELECT server as server, map as map, time as time, Timestamp as date FROM race WHERE name = ? ORDER BY Timestamp ASC LIMIT 1`).get(player)
|
||||
|
||||
/* Points */
|
||||
|
|
2
index.js
2
index.js
|
@ -4,7 +4,7 @@ import api from './api/api.js'
|
|||
import { generateDB } from "./libs/database/generate.js"
|
||||
import { sqlite, dbInit } from "./libs/database/init.js"
|
||||
import { ddnssStart } from './libs/ddnss/handler.js'
|
||||
//import tasks from './db/tasks.js'
|
||||
import tasks from './libs/database/tasks.js'
|
||||
|
||||
dotenv.config()
|
||||
|
||||
|
|
|
@ -12,14 +12,21 @@ const log = initLog("DB Generation")
|
|||
export function generateDB() {
|
||||
/* TODO: Clean this up as it is a mess */
|
||||
/* TODO: Remove useless ones */
|
||||
|
||||
log("Generating map index...")
|
||||
execMany([
|
||||
`CREATE INDEX IF NOT EXISTS "idx_maps_map" ON "maps" ("map")`,
|
||||
`CREATE INDEX IF NOT EXISTS "idx_maps_category" ON "maps" ("server");`
|
||||
])
|
||||
|
||||
log("Generating race index...")
|
||||
execMany([
|
||||
`CREATE INDEX IF NOT EXISTS "idx_race_Map_2" ON "race" ("Map","Name")`,
|
||||
`CREATE INDEX IF NOT EXISTS "idx_race_Name" ON "race" ("Name","Timestamp")`,
|
||||
`CREATE INDEX IF NOT EXISTS "idx_race_Server" ON "race" ("Server")`,
|
||||
`CREATE INDEX IF NOT EXISTS "idx_race_MapTimestamp" ON "race" ("Map","Timestamp")`,
|
||||
`CREATE INDEX IF NOT EXISTS "idx_race_Timestamp" ON "race" ("Timestamp")`,
|
||||
`CREATE INDEX IF NOT EXISTS "idx_race_MapNameTime" ON "race" ("Map", "Name", "Time")`
|
||||
`CREATE INDEX IF NOT EXISTS "idx_race_player" ON "race" ("Name")`,
|
||||
`CREATE INDEX IF NOT EXISTS "idx_race_name" ON "race" ("Name","Timestamp")`,
|
||||
`CREATE INDEX IF NOT EXISTS "idx_race_server" ON "race" ("Server")`,
|
||||
`CREATE INDEX IF NOT EXISTS "idx_race_mapTimestamp" ON "race" ("Map","Timestamp")`,
|
||||
`CREATE INDEX IF NOT EXISTS "idx_race_timestamp" ON "race" ("Timestamp")`,
|
||||
`CREATE INDEX IF NOT EXISTS "idx_race_mapNameTime" ON "race" ("Map", "Name", "Time")`
|
||||
])
|
||||
|
||||
log("Creating rankings table...")
|
||||
|
@ -45,9 +52,9 @@ export function generateDB() {
|
|||
|
||||
log("Generating teamrace index...")
|
||||
execMany([
|
||||
`CREATE INDEX IF NOT EXISTS "idx_teamrace_Map" ON "teamrace" ("Map")`,
|
||||
`CREATE INDEX IF NOT EXISTS "idx_teamrace_map" ON "teamrace" ("Map")`,
|
||||
`CREATE INDEX IF NOT EXISTS "idx_teamrace_ID" ON "teamrace" ("ID")`,
|
||||
`CREATE INDEX IF NOT EXISTS "idx_teamrace_MapID" ON "teamrace" ("Map", "ID")`
|
||||
`CREATE INDEX IF NOT EXISTS "idx_teamrace_mapID" ON "teamrace" ("Map", "ID")`
|
||||
])
|
||||
|
||||
log("Creating teamrankings table...")
|
||||
|
@ -79,6 +86,22 @@ export function generateDB() {
|
|||
"points" INTEGER NOT NULL);
|
||||
`)
|
||||
|
||||
log("Generating graphRecordCache...")
|
||||
sqlite.exec(`
|
||||
CREATE TABLE IF NOT EXISTS "graphRecordCache" (
|
||||
"map" varchar(128) NOT NULL,
|
||||
"player" varchar(16) NOT NULL,
|
||||
"time" float NOT NULL DEFAULT 0,
|
||||
"timestamp" timestamp NOT NULL DEFAULT current_timestamp,
|
||||
"Server" char(4) NOT NULL DEFAULT '');
|
||||
`)
|
||||
tasks.processTimeGraph()
|
||||
|
||||
execMany([
|
||||
`CREATE INDEX IF NOT EXISTS "idx_graphCache_player" ON "graphRecordCache" ("player")`,
|
||||
`CREATE INDEX IF NOT EXISTS "idx_graphCache_map" ON "graphRecordCache" ("map");`
|
||||
])
|
||||
|
||||
log("Inserting points to DB...")
|
||||
tasks.processAllPoints()
|
||||
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
import Database from 'better-sqlite3'
|
||||
import initLog from '../utils/log.js'
|
||||
|
||||
/* Export DB for use in other files */
|
||||
export let sqlite = undefined
|
||||
export let skinDB = undefined
|
||||
|
||||
const log = initLog("DB Init")
|
||||
|
||||
/**
|
||||
* This initalizes the ddnet.sqlite and skindata.sqlite DB...
|
||||
* @module db/dbInit
|
||||
|
@ -21,6 +24,6 @@ export function dbInit() {
|
|||
/* Unsafe mode */
|
||||
sqlite.unsafeMode()
|
||||
|
||||
console.log("Loaded in 'ddnet.sqlite'!")
|
||||
console.log("Loaded in 'skindata.sqlite'!")
|
||||
log("Loaded in ddnet.sqlite...")
|
||||
log("Loaded in skindata.sqlite...")
|
||||
}
|
||||
|
|
|
@ -86,6 +86,43 @@ export function processTeamRankings() {
|
|||
.run(map.Map)
|
||||
}
|
||||
|
||||
/**
|
||||
* This generates a cache for all the dates the top record has been improved for each map...
|
||||
* @module libs/database/processTimeGraph
|
||||
*/
|
||||
export function processTimeGraph() {
|
||||
|
||||
let currentFinish
|
||||
let currentBest = 0;
|
||||
|
||||
const maps = sqlite.prepare(`SELECT map FROM maps`);
|
||||
const finishes = sqlite.prepare(`SELECT * FROM race WHERE map = ? ORDER BY Timestamp`)
|
||||
for (const map of maps.iterate()) {
|
||||
let currentFinish
|
||||
let currentBest = 0;
|
||||
|
||||
for (const record of finishes.iterate(map.Map)) {
|
||||
currentFinish = record.Time
|
||||
if (currentFinish <= currentBest || currentBest == 0) {
|
||||
currentBest = currentFinish
|
||||
|
||||
sqlite.prepare(`
|
||||
INSERT INTO "graphRecordCache"
|
||||
(
|
||||
map, player, time, timestamp, server
|
||||
) VALUES (?, ?, ?, ?, ?)
|
||||
`).run(
|
||||
map.Map,
|
||||
record.Name,
|
||||
record.Time,
|
||||
record.Timestamp,
|
||||
record.Server
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This inserts all types of points into a table...
|
||||
* @module db/processAllPoints
|
||||
|
@ -146,5 +183,6 @@ export function processAllPoints() {
|
|||
export default {
|
||||
processAllPoints,
|
||||
processRankings,
|
||||
processTeamRankings
|
||||
processTeamRankings,
|
||||
processTimeGraph
|
||||
}
|
|
@ -9,8 +9,6 @@ export async function ddnssStart() {
|
|||
const getServers = await fetch('https://ddnet.tw/status/index.json');
|
||||
const servers = await getServers.json();
|
||||
|
||||
console.log(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
const connection = `${server.ip}:${server.port}`
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/**
|
||||
* This function creates a custom logging method that adds a prefix evrytime used.
|
||||
* This function creates a custom logging method that adds a prefix everytime used.
|
||||
* This is so that you can see what component has done what.
|
||||
* Example:
|
||||
* The database-component would log with the prefix 'database'
|
||||
|
|
Loading…
Reference in New Issue
Block a user