Cleanup tasks
This commit is contained in:
parent
397a98ffd4
commit
180ae1f444
|
|
@ -19,7 +19,7 @@ If you speak a language other than English and you want to help translate Crab F
|
|||
|
||||
1. Clone the repo.
|
||||
2. Run `yarn` in both backend and frontend folders.
|
||||
3. Run `node index.js` in the backend folder to start the API.
|
||||
3. Run `node index.js` in the backend folder to start the API. **Note:** you will need a google cloud app set up with datastore enabled and set your `GOOGLE_APPLICATION_CREDENTIALS` environment variable to your service key path.
|
||||
4. Run `yarn start` in the frontend folder to start the front end.
|
||||
|
||||
### 🔌 Browser extension
|
||||
|
|
@ -34,6 +34,7 @@ The browser extension in `crabfit-browser-extension` can be tested by first runn
|
|||
### ⚙️ Backend
|
||||
1. In the backend folder `cd crabfit-backend`
|
||||
2. Deploy the backend `gcloud app deploy --project=crabfit --version=v1`
|
||||
3. To deploy cron jobs (i.e. monthly cleanup of old events), run `gcloud app deploy cron.yaml`
|
||||
|
||||
### 🔌 Browser extension
|
||||
Compress everything inside the `crabfit-browser-extension` folder and use that zip to deploy using Chrome web store and Mozilla Add-on store.
|
||||
|
|
|
|||
7
crabfit-backend/cron.yaml
Normal file
7
crabfit-backend/cron.yaml
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
cron:
|
||||
- description: "clean up old events"
|
||||
url: /tasks/cleanup
|
||||
schedule: every monday 09:00
|
||||
- description: "clean up old events without a visited date"
|
||||
url: /tasks/legacyCleanup
|
||||
schedule: every tuesday 09:00
|
||||
|
|
@ -14,6 +14,9 @@ const createPerson = require('./routes/createPerson');
|
|||
const login = require('./routes/login');
|
||||
const updatePerson = require('./routes/updatePerson');
|
||||
|
||||
const taskCleanup = require('./routes/taskCleanup');
|
||||
const taskLegacyCleanup = require('./routes/taskLegacyCleanup');
|
||||
|
||||
const app = express();
|
||||
const port = 8080;
|
||||
const corsOptions = {
|
||||
|
|
@ -47,6 +50,10 @@ app.post('/event/:eventId/people', createPerson);
|
|||
app.post('/event/:eventId/people/:personName', login);
|
||||
app.patch('/event/:eventId/people/:personName', updatePerson);
|
||||
|
||||
// Tasks
|
||||
app.get('/tasks/cleanup', taskCleanup);
|
||||
app.get('/tasks/legacyCleanup', taskLegacyCleanup);
|
||||
|
||||
app.listen(port, () => {
|
||||
console.log(`Crabfit API listening at http://localhost:${port} in ${process.env.NODE_ENV === 'production' ? 'prod' : 'dev'} mode`)
|
||||
});
|
||||
|
|
|
|||
46
crabfit-backend/routes/taskCleanup.js
Normal file
46
crabfit-backend/routes/taskCleanup.js
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
const dayjs = require('dayjs');
|
||||
|
||||
module.exports = async (req, res) => {
|
||||
if (req.header('X-Appengine-Cron') === undefined) {
|
||||
return res.status(400).send('This task can only be run from a cron job');
|
||||
}
|
||||
|
||||
const threeMonthsAgo = dayjs().subtract(3, 'month').unix();
|
||||
|
||||
console.log('Running cleanup task at', dayjs().format('h:mma D MMM YYYY'));
|
||||
|
||||
try {
|
||||
// Fetch events that haven't been visited in over 3 months
|
||||
const eventQuery = req.datastore.createQuery(req.types.event).filter('visited', '<', threeMonthsAgo);
|
||||
let oldEvents = (await req.datastore.runQuery(eventQuery))[0];
|
||||
|
||||
if (oldEvents && oldEvents.length > 0) {
|
||||
let oldEventIds = oldEvents.map(e => e[req.datastore.KEY].name);
|
||||
console.log('Found', oldEventIds.length, 'events to remove');
|
||||
|
||||
// Fetch availabilities linked to the events discovered
|
||||
let peopleDiscovered = 0;
|
||||
await Promise.all(oldEventIds.map(async (eventId) => {
|
||||
const peopleQuery = req.datastore.createQuery(req.types.person).filter('eventId', eventId);
|
||||
let oldPeople = (await req.datastore.runQuery(peopleQuery))[0];
|
||||
|
||||
if (oldPeople && oldPeople.length > 0) {
|
||||
peopleDiscovered += oldPeople.length;
|
||||
await req.datastore.delete(oldPeople.map(person => person[req.datastore.KEY]));
|
||||
}
|
||||
}));
|
||||
|
||||
await req.datastore.delete(oldEvents.map(event => event[req.datastore.KEY]));
|
||||
|
||||
console.log('Cleanup successful:', oldEventIds.length, 'events and', peopleDiscovered, 'people removed');
|
||||
|
||||
res.sendStatus(200);
|
||||
} else {
|
||||
console.log('Found', 0, 'events to remove, ending cleanup');
|
||||
res.sendStatus(404);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
res.sendStatus(404);
|
||||
}
|
||||
};
|
||||
68
crabfit-backend/routes/taskLegacyCleanup.js
Normal file
68
crabfit-backend/routes/taskLegacyCleanup.js
Normal file
|
|
@ -0,0 +1,68 @@
|
|||
const dayjs = require('dayjs');
|
||||
|
||||
module.exports = async (req, res) => {
|
||||
if (req.header('X-Appengine-Cron') === undefined) {
|
||||
return res.status(400).send('This task can only be run from a cron job');
|
||||
}
|
||||
|
||||
const threeMonthsAgo = dayjs().subtract(3, 'month').unix();
|
||||
|
||||
console.log('Running LEGACY cleanup task at', dayjs().format('h:mma D MMM YYYY'));
|
||||
|
||||
try {
|
||||
// Fetch events that haven't been visited in over 3 months
|
||||
const eventQuery = req.datastore.createQuery(req.types.event).order('created');
|
||||
let oldEvents = (await req.datastore.runQuery(eventQuery))[0];
|
||||
|
||||
oldEvents = oldEvents.filter(event => !event.hasOwnProperty('visited'));
|
||||
|
||||
if (oldEvents && oldEvents.length > 0) {
|
||||
console.log('Found', oldEvents.length, 'events that were missing a visited date');
|
||||
|
||||
// Filter events that are older than 3 months and missing a visited date
|
||||
oldEvents = oldEvents.filter(event => event.created < threeMonthsAgo);
|
||||
|
||||
if (oldEvents && oldEvents.length > 0) {
|
||||
let oldEventIds = oldEvents.map(e => e[req.datastore.KEY].name);
|
||||
|
||||
// Fetch availabilities linked to the events discovered
|
||||
let eventsRemoved = 0;
|
||||
let peopleRemoved = 0;
|
||||
await Promise.all(oldEventIds.map(async (eventId) => {
|
||||
const peopleQuery = req.datastore.createQuery(req.types.person).filter('eventId', eventId);
|
||||
let oldPeople = (await req.datastore.runQuery(peopleQuery))[0];
|
||||
|
||||
let deleteEvent = true;
|
||||
if (oldPeople && oldPeople.length > 0) {
|
||||
oldPeople.forEach(person => {
|
||||
if (person.created >= threeMonthsAgo) {
|
||||
deleteEvent = false;
|
||||
}
|
||||
});
|
||||
}
|
||||
if (deleteEvent) {
|
||||
if (oldPeople && oldPeople.length > 0) {
|
||||
peopleRemoved += oldPeople.length;
|
||||
await req.datastore.delete(oldPeople.map(person => person[req.datastore.KEY]));
|
||||
}
|
||||
eventsRemoved++;
|
||||
await req.datastore.delete(req.datastore.key([req.types.event, eventId]));
|
||||
}
|
||||
}));
|
||||
|
||||
console.log('Legacy cleanup successful:', eventsRemoved, 'events and', peopleRemoved, 'people removed');
|
||||
|
||||
res.sendStatus(200);
|
||||
} else {
|
||||
console.log('Found', 0, 'events that are older than 3 months and missing a visited date, ending LEGACY cleanup');
|
||||
res.sendStatus(404);
|
||||
}
|
||||
} else {
|
||||
console.error('Found no events that are missing a visited date, ending LEGACY cleanup [DISABLE ME!]');
|
||||
res.sendStatus(404);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
res.sendStatus(404);
|
||||
}
|
||||
};
|
||||
|
|
@ -217,3 +217,29 @@ paths:
|
|||
description: "Not found"
|
||||
400:
|
||||
description: "Invalid data"
|
||||
"/tasks/cleanup":
|
||||
get:
|
||||
summary: "Delete events inactive for more than 3 months"
|
||||
operationId: "taskCleanup"
|
||||
tags:
|
||||
- tasks
|
||||
responses:
|
||||
200:
|
||||
description: "OK"
|
||||
404:
|
||||
description: "Not found"
|
||||
400:
|
||||
description: "Not called from a cron job"
|
||||
"/tasks/legacyCleanup":
|
||||
get:
|
||||
summary: "Delete events inactive for more than 3 months that don't have a visited date"
|
||||
operationId: "taskLegacyCleanup"
|
||||
tags:
|
||||
- tasks
|
||||
responses:
|
||||
200:
|
||||
description: "OK"
|
||||
404:
|
||||
description: "Not found"
|
||||
400:
|
||||
description: "Not called from a cron job"
|
||||
|
|
|
|||
Loading…
Reference in a new issue