fix: sequentially convert each pdf page into a BufferedImage to avoid getting MLE for large pdf files

This commit is contained in:
sbplat 2023-12-28 03:23:55 +00:00
parent 4e991e7ec2
commit 1f10693eaf

View file

@ -205,11 +205,6 @@ public class PdfUtils {
int pageCount = document.getNumberOfPages();
List<BufferedImage> images = new ArrayList<>();
// Create images of all pages
for (int i = 0; i < pageCount; i++) {
images.add(pdfRenderer.renderImageWithDPI(i, DPI, colorType));
}
// Create a ByteArrayOutputStream to save the image(s) to
ByteArrayOutputStream baos = new ByteArrayOutputStream();
@ -226,8 +221,8 @@ public class PdfUtils {
writer.setOutput(ios);
writer.prepareWriteSequence(null);
for (int i = 0; i < images.size(); ++i) {
BufferedImage image = images.get(i);
for (int i = 0; i < pageCount; ++i) {
BufferedImage image = pdfRenderer.renderImageWithDPI(i, DPI, colorType);
writer.writeToSequence(new IIOImage(image, null, null), param);
}
@ -240,8 +235,9 @@ public class PdfUtils {
BufferedImage combined = new BufferedImage(images.get(0).getWidth(), images.get(0).getHeight() * pageCount, BufferedImage.TYPE_INT_RGB);
Graphics g = combined.getGraphics();
for (int i = 0; i < images.size(); i++) {
g.drawImage(images.get(i), 0, i * images.get(0).getHeight(), null);
for (int i = 0; i < pageCount; ++i) {
BufferedImage image = pdfRenderer.renderImageWithDPI(i, DPI, colorType);
g.drawImage(image, 0, i * image.getHeight(), null);
}
// Write the image to the output stream
@ -253,8 +249,8 @@ public class PdfUtils {
} else {
// Zip the images and return as byte array
try (ZipOutputStream zos = new ZipOutputStream(baos)) {
for (int i = 0; i < images.size(); i++) {
BufferedImage image = images.get(i);
for (int i = 0; i < pageCount; ++i) {
BufferedImage image = pdfRenderer.renderImageWithDPI(i, DPI, colorType);
try (ByteArrayOutputStream baosImage = new ByteArrayOutputStream()) {
ImageIO.write(image, imageType, baosImage);