Skip to content

Commit

Permalink
Adjust cache warming schedule; add back crawler
Browse files Browse the repository at this point in the history
  • Loading branch information
iaincollins committed Jan 22, 2025
1 parent 290e6ea commit 986b20e
Show file tree
Hide file tree
Showing 4 changed files with 38 additions and 3 deletions.
10 changes: 8 additions & 2 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -68,10 +68,16 @@ const updateGalnetNews = require('./lib/cron-tasks/galnet-news')
if (process?.env?.NODE_ENV === 'development') {
console.log('Cache warming disabled')
} else {
// Ensure this happens at startup without forcing the server to wait for it
console.log('Cache warming enabled')
cron.schedule('0 */5 * * * *', () => warmCache())
// Ensure this happens at startup without forcing the server to wait for it
warmCache()
// Schedule cache warming to re-run every couple of minutes. This results in
// performance very similar to a pure RAM disk most of the time (because
// the data _is_ in RAM) but withotu the complexity of having to deal with
// actually syncing an in-memory database to disk, because the OS will
// handle it. A physical disk parition for this data in a RAID 1 array with
// a RAM drive would be ideal, but this is lower effort on my part.
cron.schedule('0 */2 * * * *', () => warmCache())
}

console.log('Ardent API service started!')
Expand Down
23 changes: 23 additions & 0 deletions lib/crawler.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
// This script was originally written to force cache warming, but with that
// code having been refactored am now using it for performance testing.
const dbAsync = require('./db/db-async')
const { ARDENT_API_HOSTNAME } = require('./consts')

module.exports = async (debug = true) => {
console.time('Crawl commodities')
try {
const commodities = await dbAsync.all('SELECT DISTINCT(commodityName) FROM commodities')
if (debug === true) console.log(`Warming cache for ${ARDENT_API_HOSTNAME}`)
for (let i = 0; i < commodities.length; i++) {
const { commodityName } = commodities[i]
const url = `https://${ARDENT_API_HOSTNAME}/v1/commodity/name/${commodityName}/imports`
if (debug === true) console.time(`${i+1} of ${commodities.length} ${commodityName}`)
const res = await fetch(url)
if (!res.ok) console.error(`Crawler error fetching: ${url}`)
if (debug === true) console.timeEnd(`${i+1} of ${commodities.length} ${commodityName}`)
}
} catch (e) {
return console.error('Crawler failed:', e)
}
console.timeEnd('Crawl commodities')
}
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
{
"name": "ardent-api",
"version": "4.11.1",
"version": "4.11.2",
"description": "Ardent API provides access to data submitted to EDDN",
"main": "index.js",
"scripts": {
"start": "node index.js",
"warm-cache": "node scripts/warm-cache.js",
"crawler": "node scripts/crawler.js",
"dev": "cross-env NODE_ENV=development node index.js",
"lint": "npx standard --fix"
},
Expand Down
5 changes: 5 additions & 0 deletions scripts/crawler.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
const crawler = require('../lib/crawler')
;(async () => {
await crawler()
process.exit()
})()

0 comments on commit 986b20e

Please sign in to comment.