npm install node-cron axios mongoose dotenv
// src/models/UrlConfig.js
const mongoose = require("mongoose");
const urlConfigSchema = new mongoose.Schema({
name: { type: String, required: true }, // A friendly name for the job
url: { type: String, required: true }, // The URL to fetch data from
schedule: { type: String, required: true, default: "*/15 * * * *" }, // Cron schedule
isActive: { type: Boolean, default: true }, // Whether the job is active
lastRun: Date, // Timestamp of the last run
nextRun: Date, // Calculated timestamp of the next run
status: {
type: String,
enum: ["idle", "running", "completed", "failed"],
default: "idle" // Job state
},
}, { timestamps: true }); // Automatically adds createdAt and updatedAt
module.exports = mongoose.model("UrlConfig", urlConfigSchema);
Explanation:
The schema defines job properties:
running
or failed
).const cron = require("node-cron");
const axios = require("axios");
const UrlConfig = require("../models/UrlConfig");
const jobs = new Map(); // Stores all active cron jobs
async function initializeCronManager() {
try {
await loadActiveConfigurations(); // Load configurations on startup
cron.schedule("* * * * *", loadActiveConfigurations); // Reload configs every minute
console.log("Cron Manager initialized");
} catch (error) {
console.error("Initialization failed:", error);
}
}
Explanation:
Map
to store active cron jobs, with job IDs as keys.
async function loadActiveConfigurations() {
const activeConfigs = await UrlConfig.find({ isActive: true }); // Fetch active jobs
const activeIds = new Set(activeConfigs.map(config => config.id));
for (const jobId of jobs.keys()) {
if (!activeIds.has(jobId)) stopJob(jobId); // Stop jobs no longer active
}
for (const config of activeConfigs) {
await startOrUpdateJob(config); // Start or update active jobs
}
}
Explanation:
isActive: true
from the database.
async function startOrUpdateJob(config) {
const { id, schedule, name, url } = config;
if (jobs.get(id)?.schedule === schedule) return; // Skip if schedule hasn't changed
stopJob(id); // Stop existing job if it exists
const job = cron.schedule(schedule, () => executeJob(config)); // Schedule new job
jobs.set(id, { job, schedule }); // Add to active jobs
console.log(`Started job: ${name}`);
}
Explanation:
cron.schedule
to create a job and adds it to the jobs
map.
function stopJob(jobId) {
const jobInfo = jobs.get(jobId);
if (jobInfo) {
jobInfo.job.stop(); // Stops the job
jobs.delete(jobId); // Removes it from the map
console.log(`Stopped job: ${jobId}`);
}
}
Explanation:
jobs
map.
async function executeJob(config) {
const { id, name, url, schedule } = config;
try {
await UrlConfig.findByIdAndUpdate(id, { status: "running", lastRun: new Date() }); // Update status
const response = await axios.get(url); // Fetch URL
console.log(`Response from ${name}:`, response.data);
await UrlConfig.findByIdAndUpdate(id, {
status: "completed",
nextRun: calculateNextRun(schedule) // Update next run time
});
} catch (error) {
console.error(`Execution failed for ${name}:`, error);
await UrlConfig.findByIdAndUpdate(id, { status: "failed" });
}
}
Explanation:
running
and logs the lastRun
time.axios
to fetch data from the specified URL.completed
or failed
based on the outcome.
function calculateNextRun(schedule) {
return cron.parseExpression(schedule).next().toDate(); // Determine next execution
}
Explanation:
cron.parseExpression
to calculate the next execution time based on the schedule.
require("dotenv").config();
const mongoose = require("mongoose");
const { initializeCronManager } = require("./src/services/cronManager");
async function startApplication() {
try {
await mongoose.connect(process.env.MONGODB_URI);
console.log("Connected to MongoDB");
await initializeCronManager(); // Start cron manager
console.log("Application started");
} catch (error) {
console.error("Startup failed:", error);
process.exit(1); // Exit on failure
}
}
startApplication();
Explanation:
.env
.With this system, you can dynamically control all your cron jobs by simply updating the MongoDB database. It’s ideal for automating tasks like background monitoring or periodic API calls.