Skip to content

Commit

Permalink
Remove cache warmer, fix bugs, improve crawler
Browse files Browse the repository at this point in the history
* Control of disk cache warming now moved to an improved implementation in the Collector (so can be toggled on and off during backups)
* Fixed dereferencing bug in the systems API endpoint when attempting to look up a system that could not be found
* Improved the crawler to make it easier to run performance tests against a local instance
  • Loading branch information
iaincollins committed Jan 22, 2025
1 parent 986b20e commit 7954abf
Show file tree
Hide file tree
Showing 8 changed files with 22 additions and 69 deletions.
17 changes: 0 additions & 17 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ const koaCompress = require('koa-compress')

console.log('Loading libraries …')
const router = require('./router')
const warmCache = require('./lib/warm-cache')
const updateCommodityTicker = require('./lib/cron-tasks/commodity-ticker')
const updateGalnetNews = require('./lib/cron-tasks/galnet-news')

Expand Down Expand Up @@ -64,22 +63,6 @@ const updateGalnetNews = require('./lib/cron-tasks/galnet-news')
app.listen(ARDENT_API_LOCAL_PORT)
console.log(printStats())

// Schedule task to try to keep the cache warm
if (process?.env?.NODE_ENV === 'development') {
console.log('Cache warming disabled')
} else {
console.log('Cache warming enabled')
// Ensure this happens at startup without forcing the server to wait for it
warmCache()
// Schedule cache warming to re-run every couple of minutes. This results in
// performance very similar to a pure RAM disk most of the time (because
// the data _is_ in RAM) but withotu the complexity of having to deal with
// actually syncing an in-memory database to disk, because the OS will
// handle it. A physical disk parition for this data in a RAID 1 array with
// a RAM drive would be ideal, but this is lower effort on my part.
cron.schedule('0 */2 * * * *', () => warmCache())
}

console.log('Ardent API service started!')
})()

Expand Down
10 changes: 5 additions & 5 deletions lib/crawler.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,18 +3,18 @@
const dbAsync = require('./db/db-async')
const { ARDENT_API_HOSTNAME } = require('./consts')

module.exports = async (debug = true) => {
module.exports = async (baseUrl = `https://${ARDENT_API_HOSTNAME}`) => {
console.time('Crawl commodities')
try {
const commodities = await dbAsync.all('SELECT DISTINCT(commodityName) FROM commodities')
if (debug === true) console.log(`Warming cache for ${ARDENT_API_HOSTNAME}`)
console.log(`Fetching commodity imports from ${baseUrl}`)
for (let i = 0; i < commodities.length; i++) {
const { commodityName } = commodities[i]
const url = `https://${ARDENT_API_HOSTNAME}/v1/commodity/name/${commodityName}/imports`
if (debug === true) console.time(`${i+1} of ${commodities.length} ${commodityName}`)
const url = `${baseUrl}/v1/commodity/name/${commodityName}/imports`
console.time(`${i+1} of ${commodities.length} ${commodityName}`)
const res = await fetch(url)
if (!res.ok) console.error(`Crawler error fetching: ${url}`)
if (debug === true) console.timeEnd(`${i+1} of ${commodities.length} ${commodityName}`)
console.timeEnd(`${i+1} of ${commodities.length} ${commodityName}`)
}
} catch (e) {
return console.error('Crawler failed:', e)
Expand Down
23 changes: 0 additions & 23 deletions lib/warm-cache.js

This file was deleted.

16 changes: 5 additions & 11 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 2 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
{
"name": "ardent-api",
"version": "4.11.2",
"version": "4.12.0",
"description": "Ardent API provides access to data submitted to EDDN",
"main": "index.js",
"scripts": {
"start": "node index.js",
"warm-cache": "node scripts/warm-cache.js",
"crawler": "node scripts/crawler.js",
"crawler:dev": "cross-env NODE_ENV=development node scripts/crawler.js",
"dev": "cross-env NODE_ENV=development node index.js",
"lint": "npx standard --fix"
},
Expand All @@ -22,7 +22,6 @@
"homepage": "https://github.com/iaincollins/ardent-api#readme",
"dependencies": {
"better-sqlite3": "^8.3.0",
"command-exists": "^1.2.9",
"cross-env": "^7.0.3",
"dotenv": "^16.0.3",
"koa": "^2.14.2",
Expand Down
12 changes: 8 additions & 4 deletions router/api/systems.js
Original file line number Diff line number Diff line change
Expand Up @@ -351,8 +351,10 @@ module.exports = (router) => {
if (maxDistance > MAX_NEARBY_SYSTEMS_DISTANCE) { maxDistance = MAX_NEARBY_SYSTEMS_DISTANCE }
maxDistance = parseInt(maxDistance)

const { systemAddress, systemX, systemY, systemZ } = await getSystemByName(systemName)
if (!systemAddress) return NotFoundResponse(ctx, 'System not found')
// Validate system name
const system = await getSystemByName(systemName)
if (!system) return NotFoundResponse(ctx, 'System not found')
const { systemX, systemY, systemZ } = system

const filters = [
`AND (c.demand >= ${parseInt(minVolume)} OR c.demand = 0)`, // Zero is infinite demand
Expand Down Expand Up @@ -417,8 +419,10 @@ module.exports = (router) => {
if (maxDistance > MAX_NEARBY_SYSTEMS_DISTANCE) { maxDistance = MAX_NEARBY_SYSTEMS_DISTANCE }
maxDistance = parseInt(maxDistance)

const { systemAddress, systemX, systemY, systemZ } = await getSystemByName(systemName)
if (!systemAddress) return NotFoundResponse(ctx, 'System not found')
// Validate system name
const system = await getSystemByName(systemName)
if (!system) return NotFoundResponse(ctx, 'System not found')
const { systemX, systemY, systemZ } = system

const filters = [
`AND c.stock >= ${parseInt(minVolume)}`
Expand Down
3 changes: 2 additions & 1 deletion scripts/crawler.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
const crawler = require('../lib/crawler')
;(async () => {
await crawler()
const baseUrl = process.env.NODE_ENV === 'development' ? 'http://localhost:3001/api' : undefined
await crawler(baseUrl)
process.exit()
})()
5 changes: 0 additions & 5 deletions scripts/warm-cache.js

This file was deleted.

0 comments on commit 7954abf

Please sign in to comment.