Skip to content
Snippets Groups Projects
Commit 69e2540d authored by Audrey Hamelers's avatar Audrey Hamelers
Browse files

#1185 add pdf and ftp cronjobs

parent 11440d23
No related branches found
No related tags found
3 merge requests!380Pipeline changes,!379Build step added for the master branch,!378k8s release
......@@ -2,29 +2,23 @@ const Client = require('ftp')
const logger = require('@pubsweet/logger')
const config = require('config')
const fs = require('fs')
const async = require('async')
const { execSync } = require('child_process')
const ftpUser = require('../utils/ftpUser')
const { errorDevEmail } = require('../email')
const { execSync } = require('child_process')
const FtpAccount = require('../xpub-model/entities/ftpAccount/data-access')
const async = require('async')
const { checkJobStatus } = require('../job-runner')
const { FtpAccountManager } = require('../xpub-model')
if (!process.env.ENABLE_CRONJOB_FROMFTPBULKUPLOAD) {
logger.info(
'ENABLE_CRONJOB_FROMFTP-BULKUPLOAD not defined. fromFtp-BulkUpload cronjob exits.',
'ENABLE_CRONJOB_FROMFTPBULKUPLOAD not defined. fromFtp-BulkUpload cronjob exits.',
)
process.exit(0)
}
const BULK_UPLOAD_FILE_EXT = new RegExp(/\S+\.tar\.gz$/i)
;(async () => {
const ftpBulkUploaders = await FtpAccount.selectBulkUploaders()
ftpBulkUploaders.forEach(async user => {
queue.push(user)
})
})()
// Create queue
const queue = async.queue(async user => {
logger.info(
`Bulk Upload: Initialising job for file download from FTP folder: ${user.username}`,
......@@ -61,10 +55,28 @@ const queue = async.queue(async user => {
return true
})
queue.drain(() => {
logger.info('Cron job completed.')
// Check and run cronjob
;(async () => {
await checkJobStatus(
'from-ftp-bulkupload',
async () => {
const ftpBulkUploaders = await FtpAccountManager.findBulkUploaders()
if (!ftpBulkUploaders) {
logger.info('No FTP bulk upload users in database. Exiting.')
return false
}
logger.info('Queueing bulk upload users.')
ftpBulkUploaders.forEach(async user => {
queue.push(user)
})
await queue.drain()
logger.info('Cron job completed. Exiting.')
},
// FTP healthcheck
true,
)
process.exit(0)
})
})()
queue.error((err, task) => {
logger.error(`Error in task ${task}: ${err}`)
......
......@@ -5,7 +5,8 @@ const config = require('config')
const fs = require('fs')
const ftpUser = require('../utils/ftpUser')
const { errorDevEmail } = require('../email')
const FtpAccount = require('../xpub-model/entities/ftpAccount/data-access')
const { checkJobStatus } = require('../job-runner')
const { FtpAccountManager } = require('../xpub-model')
if (!process.env.ENABLE_CRONJOB_FROMFTPTAGGERIMPORT) {
logger.info(
......@@ -18,32 +19,41 @@ const BULK_UPLOAD_FILE_EXT = new RegExp(/\S+\.tar\.gz$/i)
const ftp = new Client()
;(async () => {
const ftpTagger = await FtpAccount.findByFtpUsername('beta_plus_tagger')
await checkJobStatus(
'from-ftp-taggerimport',
async () => {
const ftpTagger = await FtpAccountManager.findByFtpUsername(
'beta_plus_tagger',
)
// create local directories
const parentRootPathLocal = `${process.env.HOME}/${config.get(
'ftp_directory',
)}/${ftpTagger.username}`
const rootPathLocal = `${parentRootPathLocal}/Done`
const ignoreNewFolder = `${parentRootPathLocal}/New`
const errorFolder = `${parentRootPathLocal}/Error`
// create local directories
const parentRootPathLocal = `${process.env.HOME}/${config.get(
'ftp_directory',
)}/${ftpTagger.username}`
const rootPathLocal = `${parentRootPathLocal}/Done`
const ignoreNewFolder = `${parentRootPathLocal}/New`
const errorFolder = `${parentRootPathLocal}/Error`
const cmd = `mkdir -p ${rootPathLocal} ${ignoreNewFolder} ${errorFolder}`
execSync(cmd)
const cmd = `mkdir -p ${rootPathLocal} ${ignoreNewFolder} ${errorFolder}`
execSync(cmd)
logger.info('Tagger Import: downloading files from FTP')
const beforeUpdate = Date.now()
try {
const val = await checkNewFtpPackage(ftpTagger.username, rootPathLocal)
logger.info(
`Tagger Import: ${val} file(s) downloaded to local directory ${rootPathLocal} in ${Date.now() -
beforeUpdate} ms`,
)
} catch (err) {
errorDevEmail('Tagger Import Cron Job', err)
logger.error(err)
}
close(ftp)
logger.info('Tagger Import: downloading files from FTP')
const beforeUpdate = Date.now()
try {
const val = await checkNewFtpPackage(ftpTagger.username, rootPathLocal)
logger.info(
`Tagger Import: ${val} file(s) downloaded to local directory ${rootPathLocal} in ${Date.now() -
beforeUpdate} ms`,
)
} catch (err) {
errorDevEmail('Tagger Import Cron Job', err)
logger.error(err)
}
close(ftp)
},
// FTP healthcheck
true,
)
process.exit(0)
})()
......
......@@ -10,17 +10,20 @@ const config = require('config')
const getUser = require('../utils/user.js')
const { ManuscriptManager, FileManager } = require('../xpub-model')
const files = require('../utils/files.js')
const { checkJobStatus } = require('../job-runner')
const { baseUrl } = config.get('pubsweet-server')
const pdfTransformerApi = config.get('ncbiPdfTransformerApi')
;(async () => {
const beforeUpdate = Date.now()
const adminUser = await getUser.getAdminUser()
await getDeposits(adminUser.id)
logger.info(
`PDF conversion process finished in ${Date.now() - beforeUpdate} ms`,
)
await checkJobStatus('pdf-converter', async () => {
const beforeUpdate = Date.now()
const adminUser = await getUser.getAdminUser()
await getDeposits(adminUser.id)
logger.info(
`PDF conversion process finished in ${Date.now() - beforeUpdate} ms`,
)
})
process.exit()
})()
......
......@@ -3,7 +3,7 @@ const FtpAccount = require('./data-access')
const FtpAccountManager = {
findByFtpUsername: FtpAccount.findByFtpUsername,
findAll: FtpAccount.selectAll,
findByType: FtpAccount.selectBulkUploaders,
findBulkUploaders: FtpAccount.selectBulkUploaders,
modelName: 'FtpAccount',
model: FtpAccount,
}
......
......@@ -112,6 +112,31 @@ exports.seed = async (knex, Promise) => {
frequency: '0 5 * * 0,3',
description: 'Updates the local NLM Catalog-based journal list',
},
{
name: 'pdf-converter',
running: false,
last_status: 'pass',
last_pass: midnight,
frequency: '*/1 * * * *',
description:
'Runs checks to send and recieve files from NCBI PDF converter',
},
{
name: 'from-ftp-bulkupload',
running: false,
last_status: 'pass',
last_pass: midnight,
frequency: '*/3 * * * *',
description: 'Retrieves bulk upload files from FTP',
},
{
name: 'from-ftp-taggerimport',
running: false,
last_status: 'pass',
last_pass: midnight,
frequency: '*/3 * * * *',
description: 'Retrieves tagger files from FTP',
},
]
// Inserts seed entries if name not already in database
......
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment