Best Practices

This guide covers recommended practices for using Dispatched in production environments, focusing on security, reliability, and performance.

Security Best Practices

API Key Management

  1. Environment Variables
# .env file
DISPATCHED_API_KEY=disp_k1_xxxxxxxxxxxx
DISPATCHED_WEBHOOK_SECRET=disp_whsec_xxxxxxxxxxxx
// Load with a secure config manager
require('dotenv').config();
const apiKey = process.env.DISPATCHED_API_KEY;
const webhookSecret = process.env.DISPATCHED_WEBHOOK_SECRET;
  1. Key Rotation
  • Rotate API keys periodically
  • Use different keys for different environments
  • Revoke compromised keys immediately
  • Monitor key usage in the dashboard

Webhook Security

  1. Signature Verification
function verifyWebhook(req) {
  const webhookSecret = process.env.DISPATCHED_WEBHOOK_SECRET;
  const authHeader = req.headers.authorization;

  if (!authHeader || authHeader !== `Bearer ${webhookSecret}`) {
    throw new Error('Invalid webhook signature');
  }
}

app.post('/webhook', express.json(), (req, res) => {
  try {
    verifyWebhook(req);
    // Process webhook...
  } catch (error) {
    return res.status(401).json({ error: 'Unauthorized' });
  }
});
  1. HTTPS Only
  • Always use HTTPS for webhook endpoints
  • Configure TLS 1.2 or higher
  • Keep SSL certificates up to date

Reliability Practices

Idempotency

Implement idempotent webhook handlers to handle duplicate deliveries. This is rare but can happen due to network issues or retries.

const processJob = async (jobId, payload) => {
  // Check for existing transaction
  const exists = await db.transactions.findOne({ jobId });
  if (exists) {
    return { status: 'already_processed' };
  }

  // Record the transaction as processing
  await db.transactions.create({
    jobId,
    status: 'processing',
    processedAt: new Date()
  });

  // Process the job
  try {
    const result = await performWork(payload);
    await db.transactions.update({
        where: { jobId },
        data: {
            status: 'completed',
            processedAt: new Date(),
            result
        }
    });
  } catch (error) {
    // Handle remove the transaction or update status
    await db.transactions.update({
        where: { jobId },
        data: {
            status: 'failed',
            processedAt: new Date()
        }
    });
  }

  return { status: 'success', result };
};

app.post('/webhook', async (req, res) => {
  const { jobId, payload } = req.body;

  try {
    const result = await processJob(jobId, payload);
    res.status(200).json(result);
  } catch (error) {
    res.status(500).json({ error: error.message });
  }
});

Error Handling

Implement comprehensive error handling:

class JobError extends Error {
  constructor(message, retryable = true) {
    super(message);
    this.retryable = retryable;
  }
}

const processWebhook = async (req, res) => {
  const { jobId, payload } = req.body;

  try {
    // Validate incoming data
    if (!isValidPayload(payload)) {
      throw new JobError('Invalid payload', false);
    }

    // Process the job
    const result = await processJob(payload);

    res.status(200).json({ status: 'success', result });
  } catch (error) {
    // Log error with context
    console.error('Job processing failed', {
      jobId,
      error: error.message,
      stack: error.stack,
      retryable: error.retryable
    });

    // Determine if we should retry
    if (error.retryable) {
      res.status(500).json({ error: error.message });
    } else {
      res.status(200).json({
        status: 'failed',
        error: error.message
      });
    }
  }
};

Performance Optimization

Payload Size

  • Keep payloads small and focused (payloads have a max size of 10KB)
  • Store large data externally and pass references
// ❌ Bad: Large payload
{
  "payload": {
    "task": "process_image",
    "imageData": "base64_encoded_large_image_data_here..."
  }
}

// ✅ Good: Reference-based payload
{
  "payload": {
    "task": "process_image",
    "imageUrl": "s3://my-bucket/images/123.jpg",
    "metadata": {
      "size": "original",
      "format": "jpg"
    }
  }
}

Batch Processing

Group related operations when possible:

// ❌ Bad: Multiple individual jobs
for (const userId of userIds) {
  await dispatchJob({
    task: "send_notification",
    userId
  });
}

// ✅ Good: Single batch job
await dispatchJob({
  task: "send_batch_notifications",
  userIds: userIds,
  batchId: "batch_123"
});

Monitoring and Logging

Structured Logging

const logger = require('./logger');

app.post('/webhook', async (req, res) => {
  const { jobId, attempt, attemptNumber, payload } = req.body;

  logger.info('Processing job', {
    jobId,
    attempt,
    attemptNumber,
    task: payload.task,
    startedAt: new Date().toISOString()
  });

  try {
    await processJob(payload);

    logger.info('Job completed', {
      jobId,
      attempt,
      attemptNumber,
      duration: Date.now() - startTime
    });

    res.status(200).json({ status: 'success' });
  } catch (error) {
    logger.error('Job failed', {
      jobId,
      attempt,
      attemptNumber,
      error: error.message,
      stack: error.stack
    });

    res.status(500).json({ error: error.message });
  }
});

Next Steps