How to upload a document to an AWS s3 bucket for use with pdf-reports node package when payload is 'too large'

I’m using @journeyapps/pdf-reports node package to generate pdf’s in my app.

I’ve run into the below error a few times, which seems to point to a payload that is too large to handle. This must be an internal request that the pdf.generatePdf function makes.

(CloudCode Logs):
11:03:48.042 [TASK:INFO] Error: Failed to generate PDF: Request Entity Too Large

{"message": "Request Too Long"}, Error: Failed to generate PDF: Request Entity Too Large

{"message": "Request Too Long"}

The error seems to be the payload sent to the pdf-reports generator is too large. It points to a JourneyApps domain (source) which should allow up to a 5MB payload similar to CloudCode limits.

I see there is an option to generate the pdf directly from an url.

// Generate from an online URL
await pdf.generatePdf({ url: "" });

Does anyone maybe have an example of how I can upload my HTML payload to an AWS s3 bucket in order to send the HTML file link to pdf.generatePdf function?

Hi @forumfred

See the below code snippet which showcase a start to end example of how you would generate a pdf report via AWS s3 bucket file link as the url you pass to the pdf.generatePdf function.

// For this to work, setup an AWS s3 bucket and populate the env vars

const handlebars = require("handlebars");
const pdf = require("@journeyapps/pdf-reports");

// AWS bucket connection setup
const AWS = require('aws-sdk');
AWS.config = { sslEnabled: true };
const s3Options = {
    region: 'us-east-1',
    accessKeyId: process.env.APP__AWS_ACCESS_KEY,
    secretAccessKey: process.env.APP__AWS_SECRET_KEY
const s3 = new AWS.S3(s3Options);
const EXPIRES_AFTER = 86400 * 7;

export async function run() {

    const today = new Date().valueOf()
    const html = `
    <!DOCTYPE html>

    const _pdfTemplate = handlebars.compile(html);
    const _pdfHtml = _pdfTemplate({ title: "Title Here ..." });

    // Upload _pdfHtml to s3 bucket and then retrieve file url
    const buffer = Buffer.from(_pdfHtml, 'utf8');
    let documentDetails = await upload(buffer, 'test_pdfHtml' + today + '.html', 'text/html')

    if (documentDetails) {
        console.log("HTML link - ", documentDetails.url)

        // Generate PDF from link
        const pdf_document = await pdf.generatePdf({ url: documentDetails.url });

        // Here you can save the document on a db object
        // For testing we'll just upload the pdf to s3 bucket to then view the link of the final document
        const pdf_buffer = await pdf_document.toBuffer()
        documentDetails = await upload(pdf_buffer, 'test_pdf' + today + '.html', 'application/pdf')

        if (documentDetails) {
            console.log("PDF link - ", documentDetails.url)

async function upload(buffer, filename, ContentType) {
    if (!buffer || !(buffer instanceof Buffer)) {
        return Promise.reject(new Error('DocumentStore: buffer is not a Buffer'));

    await s3.putObject({
        Bucket: process.env.APP__DOCUMENT_BUCKET_NAME,
        Key: filename,
        Body: buffer,
        ContentType: ContentType

    return getDocumentDetails(filename);

function getDocumentDetails(filename) {
    var params = { Bucket: process.env.APP__DOCUMENT_BUCKET_NAME, Key: filename, Expires: EXPIRES_AFTER };
    var url = s3.getSignedUrl('getObject', params);
    var expiry = new Date( + EXPIRES_AFTER * 1000);

    var documentDetails = {
        url: url,
        expiresAt: expiry
    return documentDetails;

For details on how to set environment variables (process.env. VARIABLE_NAME), see this forum post