pipeline change
This commit is contained in:
parent
73ab1936a3
commit
46d4ae8fc5
4 changed files with 64 additions and 19 deletions
|
@ -4,10 +4,12 @@ import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.boot.SpringApplication;
|
import org.springframework.boot.SpringApplication;
|
||||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||||
import org.springframework.core.env.Environment;
|
import org.springframework.core.env.Environment;
|
||||||
|
import org.springframework.scheduling.annotation.EnableScheduling;
|
||||||
|
|
||||||
import jakarta.annotation.PostConstruct;
|
import jakarta.annotation.PostConstruct;
|
||||||
|
|
||||||
@SpringBootApplication
|
@SpringBootApplication
|
||||||
|
@EnableScheduling
|
||||||
public class SPdfApplication {
|
public class SPdfApplication {
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
|
|
|
@ -85,7 +85,7 @@ public class ConvertImgPDFController {
|
||||||
|
|
||||||
@PostMapping(consumes = "multipart/form-data", value = "/img-to-pdf")
|
@PostMapping(consumes = "multipart/form-data", value = "/img-to-pdf")
|
||||||
@Operation(summary = "Convert images to a PDF file",
|
@Operation(summary = "Convert images to a PDF file",
|
||||||
description = "This endpoint converts one or more images to a PDF file. Users can specify whether to stretch the images to fit the PDF page, and whether to automatically rotate the images. Input:Image Output:PDF Type:MISO")
|
description = "This endpoint converts one or more images to a PDF file. Users can specify whether to stretch the images to fit the PDF page, and whether to automatically rotate the images. Input:Image Output:PDF Type:SISO?")
|
||||||
public ResponseEntity<byte[]> convertToPdf(
|
public ResponseEntity<byte[]> convertToPdf(
|
||||||
@RequestPart(required = true, value = "fileInput")
|
@RequestPart(required = true, value = "fileInput")
|
||||||
@Parameter(description = "The input images to be converted to a PDF file")
|
@Parameter(description = "The input images to be converted to a PDF file")
|
||||||
|
|
|
@ -61,10 +61,22 @@ public class Controller {
|
||||||
final String watchedFoldersDir = "watchedFolders/";
|
final String watchedFoldersDir = "watchedFolders/";
|
||||||
@Scheduled(fixedRate = 5000)
|
@Scheduled(fixedRate = 5000)
|
||||||
public void scanFolders() {
|
public void scanFolders() {
|
||||||
try (Stream<Path> paths = Files.walk(Paths.get(watchedFoldersDir))) {
|
Path watchedFolderPath = Paths.get(watchedFoldersDir);
|
||||||
|
if (!Files.exists(watchedFolderPath)) {
|
||||||
|
try {
|
||||||
|
Files.createDirectories(watchedFolderPath);
|
||||||
|
} catch (IOException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try (Stream<Path> paths = Files.walk(watchedFolderPath)) {
|
||||||
paths.filter(Files::isDirectory).forEach(t -> {
|
paths.filter(Files::isDirectory).forEach(t -> {
|
||||||
try {
|
try {
|
||||||
|
if (!t.equals(watchedFolderPath) && !t.endsWith("processing")) {
|
||||||
handleDirectory(t);
|
handleDirectory(t);
|
||||||
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
}
|
}
|
||||||
|
@ -76,6 +88,11 @@ public class Controller {
|
||||||
|
|
||||||
private void handleDirectory(Path dir) throws Exception {
|
private void handleDirectory(Path dir) throws Exception {
|
||||||
Path jsonFile = dir.resolve(jsonFileName);
|
Path jsonFile = dir.resolve(jsonFileName);
|
||||||
|
Path processingDir = dir.resolve("processing"); // Directory to move files during processing
|
||||||
|
if (!Files.exists(processingDir)) {
|
||||||
|
Files.createDirectory(processingDir);
|
||||||
|
}
|
||||||
|
|
||||||
if (Files.exists(jsonFile)) {
|
if (Files.exists(jsonFile)) {
|
||||||
// Read JSON file
|
// Read JSON file
|
||||||
String jsonString;
|
String jsonString;
|
||||||
|
@ -90,6 +107,10 @@ public class Controller {
|
||||||
PipelineConfig config;
|
PipelineConfig config;
|
||||||
try {
|
try {
|
||||||
config = objectMapper.readValue(jsonString, PipelineConfig.class);
|
config = objectMapper.readValue(jsonString, PipelineConfig.class);
|
||||||
|
// Assuming your PipelineConfig class has getters for all necessary fields, you can perform checks here
|
||||||
|
if (config.getOperations() == null || config.getOutputDir() == null || config.getName() == null) {
|
||||||
|
throw new IOException("Invalid JSON format");
|
||||||
|
}
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
return;
|
return;
|
||||||
|
@ -115,9 +136,15 @@ public class Controller {
|
||||||
files = new File[]{new File(fileInput)};
|
files = new File[]{new File(fileInput)};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Call handleData for each operation
|
// Prepare the files for processing
|
||||||
|
File[] filesToProcess = files.clone();
|
||||||
|
for (File file : filesToProcess) {
|
||||||
|
Files.move(file.toPath(), processingDir.resolve(file.getName()));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process the files
|
||||||
try {
|
try {
|
||||||
List<Resource> resources = handleFiles(files, jsonString);
|
List<Resource> resources = handleFiles(filesToProcess, jsonString);
|
||||||
|
|
||||||
// Move resultant files and rename them as per config in JSON file
|
// Move resultant files and rename them as per config in JSON file
|
||||||
for (Resource resource : resources) {
|
for (Resource resource : resources) {
|
||||||
|
@ -131,8 +158,17 @@ public class Controller {
|
||||||
|
|
||||||
Files.move(resource.getFile().toPath(), Paths.get(config.getOutputDir(), outputFileName));
|
Files.move(resource.getFile().toPath(), Paths.get(config.getOutputDir(), outputFileName));
|
||||||
}
|
}
|
||||||
} catch (IOException e) {
|
|
||||||
e.printStackTrace();
|
// If successful, delete the original files
|
||||||
|
for (File file : filesToProcess) {
|
||||||
|
Files.deleteIfExists(processingDir.resolve(file.getName()));
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
// If an error occurs, move the original files back
|
||||||
|
for (File file : filesToProcess) {
|
||||||
|
Files.move(processingDir.resolve(file.getName()), file.toPath());
|
||||||
|
}
|
||||||
|
throw e;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -140,6 +176,7 @@ public class Controller {
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
List<Resource> processFiles(List<Resource> outputFiles, String jsonString) throws Exception{
|
List<Resource> processFiles(List<Resource> outputFiles, String jsonString) throws Exception{
|
||||||
ObjectMapper mapper = new ObjectMapper();
|
ObjectMapper mapper = new ObjectMapper();
|
||||||
JsonNode jsonNode = mapper.readTree(jsonString);
|
JsonNode jsonNode = mapper.readTree(jsonString);
|
||||||
|
|
|
@ -120,6 +120,8 @@ let operationSettings = {};
|
||||||
fetch('v3/api-docs')
|
fetch('v3/api-docs')
|
||||||
.then(response => response.json())
|
.then(response => response.json())
|
||||||
.then(data => {
|
.then(data => {
|
||||||
|
|
||||||
|
apiDocs = data.paths;
|
||||||
let operationsDropdown = document.getElementById('operationsDropdown');
|
let operationsDropdown = document.getElementById('operationsDropdown');
|
||||||
const ignoreOperations = ["/handleData", "operationToIgnore"]; // Add the operations you want to ignore here
|
const ignoreOperations = ["/handleData", "operationToIgnore"]; // Add the operations you want to ignore here
|
||||||
|
|
||||||
|
@ -130,7 +132,7 @@ fetch('v3/api-docs')
|
||||||
// Group operations by tags
|
// Group operations by tags
|
||||||
Object.keys(data.paths).forEach(operationPath => {
|
Object.keys(data.paths).forEach(operationPath => {
|
||||||
let operation = data.paths[operationPath].post;
|
let operation = data.paths[operationPath].post;
|
||||||
if (operation && !ignoreOperations.includes(operationPath)) {
|
if (operation && !ignoreOperations.includes(operationPath) && !operation.description.includes("Type:MISO")) {
|
||||||
let operationTag = operation.tags[0]; // This assumes each operation has exactly one tag
|
let operationTag = operation.tags[0]; // This assumes each operation has exactly one tag
|
||||||
if (!operationsByTag[operationTag]) {
|
if (!operationsByTag[operationTag]) {
|
||||||
operationsByTag[operationTag] = [];
|
operationsByTag[operationTag] = [];
|
||||||
|
@ -168,13 +170,17 @@ document.getElementById('addOperationBtn').addEventListener('click', function()
|
||||||
|
|
||||||
let listItem = document.createElement('li');
|
let listItem = document.createElement('li');
|
||||||
listItem.className = "list-group-item";
|
listItem.className = "list-group-item";
|
||||||
|
let hasSettings = (apiDocs[selectedOperation] && apiDocs[selectedOperation].post &&
|
||||||
|
apiDocs[selectedOperation].post.parameters && apiDocs[selectedOperation].post.parameters.length > 0);
|
||||||
|
|
||||||
|
|
||||||
listItem.innerHTML = `
|
listItem.innerHTML = `
|
||||||
<div class="d-flex justify-content-between align-items-center w-100">
|
<div class="d-flex justify-content-between align-items-center w-100">
|
||||||
<div class="operationName">${selectedOperation}</div>
|
<div class="operationName">${selectedOperation}</div>
|
||||||
<div class="arrows d-flex">
|
<div class="arrows d-flex">
|
||||||
<button class="btn btn-secondary move-up btn-margin"><span>↑</span></button>
|
<button class="btn btn-secondary move-up btn-margin"><span>↑</span></button>
|
||||||
<button class="btn btn-secondary move-down btn-margin"><span>↓</span></button>
|
<button class="btn btn-secondary move-down btn-margin"><span>↓</span></button>
|
||||||
<button class="btn btn-warning pipelineSettings btn-margin"><span>⚙️</span></button>
|
<button class="btn btn-warning pipelineSettings btn-margin" ${hasSettings ? "" : "disabled"}><span style="color: ${hasSettings ? "black" : "grey"};">⚙️</span></button>
|
||||||
<button class="btn btn-danger remove"><span>X</span></button>
|
<button class="btn btn-danger remove"><span>X</span></button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
Loading…
Reference in a new issue