redone routes
This commit is contained in:
@@ -1,184 +1,200 @@
|
||||
import { FastifyInstance } from "fastify"
|
||||
import multipart from "@fastify/multipart"
|
||||
import { s3 } from "../utils/s3"
|
||||
import {GetObjectCommand, PutObjectCommand} from "@aws-sdk/client-s3"
|
||||
import {getSignedUrl} from "@aws-sdk/s3-request-presigner";
|
||||
import {
|
||||
GetObjectCommand,
|
||||
PutObjectCommand
|
||||
} from "@aws-sdk/client-s3"
|
||||
import { getSignedUrl } from "@aws-sdk/s3-request-presigner"
|
||||
import archiver from "archiver"
|
||||
import {secrets} from "../utils/secrets"
|
||||
import { secrets } from "../utils/secrets"
|
||||
|
||||
import { eq, inArray } from "drizzle-orm"
|
||||
import {
|
||||
files,
|
||||
createddocuments,
|
||||
customers
|
||||
} from "../../db/schema"
|
||||
|
||||
|
||||
export default async function fileRoutes(server: FastifyInstance) {
|
||||
await server.register(multipart,{
|
||||
limits: {
|
||||
fileSize: 20 * 1024 * 1024, // 20 MB
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------
|
||||
// MULTIPART INIT
|
||||
// -------------------------------------------------------------
|
||||
await server.register(multipart, {
|
||||
limits: { fileSize: 20 * 1024 * 1024 } // 20 MB
|
||||
})
|
||||
|
||||
|
||||
// -------------------------------------------------------------
|
||||
// UPLOAD FILE
|
||||
// -------------------------------------------------------------
|
||||
server.post("/files/upload", async (req, reply) => {
|
||||
const tenantId = req.user?.tenant_id
|
||||
if (!tenantId) return reply.code(401).send({ error: "Unauthorized" })
|
||||
try {
|
||||
const tenantId = req.user?.tenant_id
|
||||
if (!tenantId) return reply.code(401).send({ error: "Unauthorized" })
|
||||
|
||||
const data:any = await req.file()
|
||||
const fileBuffer = await data.toBuffer()
|
||||
const data: any = await req.file()
|
||||
if (!data?.file) return reply.code(400).send({ error: "No file uploaded" })
|
||||
const fileBuffer = await data.toBuffer()
|
||||
|
||||
console.log(data)
|
||||
const meta = data.fields?.meta?.value ? JSON.parse(data.fields.meta.value) : {}
|
||||
|
||||
// 1️⃣ DB-Eintrag erzeugen
|
||||
const inserted = await server.db
|
||||
.insert(files)
|
||||
.values({ tenant: tenantId })
|
||||
.returning()
|
||||
|
||||
let meta = JSON.parse(data.fields?.meta?.value)
|
||||
const created = inserted[0]
|
||||
if (!created) throw new Error("Could not create DB entry")
|
||||
|
||||
if (!data.file) return reply.code(400).send({ error: "No file uploaded" })
|
||||
|
||||
|
||||
const {data:createdFileData,error:createdFileError} = await server.supabase
|
||||
.from("files")
|
||||
.insert({
|
||||
tenant: tenantId,
|
||||
})
|
||||
.select()
|
||||
.single()
|
||||
|
||||
if(createdFileError) {
|
||||
console.log(createdFileError)
|
||||
return reply.code(500).send({ error: "Internal Server Error" })
|
||||
} else if(createdFileData && data.file) {
|
||||
const fileKey = `${tenantId}/filesbyid/${createdFileData.id}/${data.filename}`
|
||||
// 2️⃣ Datei in S3 speichern
|
||||
const fileKey = `${tenantId}/filesbyid/${created.id}/${data.filename}`
|
||||
|
||||
await s3.send(new PutObjectCommand({
|
||||
Bucket: secrets.S3_BUCKET,
|
||||
Key: fileKey,
|
||||
Body: fileBuffer,
|
||||
ContentType: data.mimetype,
|
||||
ContentType: data.mimetype
|
||||
}))
|
||||
|
||||
//Update File with Corresponding Path
|
||||
const {data:updateFileData, error:updateFileError} = await server.supabase
|
||||
.from("files")
|
||||
.update({
|
||||
// 3️⃣ DB updaten: meta + path
|
||||
await server.db
|
||||
.update(files)
|
||||
.set({
|
||||
...meta,
|
||||
path: fileKey,
|
||||
path: fileKey
|
||||
})
|
||||
.eq("id", createdFileData.id)
|
||||
|
||||
if(updateFileError) {
|
||||
console.log(updateFileError)
|
||||
return reply.code(500).send({ error: "Internal Server Error" })
|
||||
|
||||
} else {
|
||||
/*const {data:tagData, error:tagError} = await server.supabase
|
||||
.from("filetagmembers")
|
||||
.insert(tags.map(tag => {
|
||||
return {
|
||||
file_id: createdFileData.id,
|
||||
tag_id: tag
|
||||
}
|
||||
}))*/
|
||||
|
||||
return { id: createdFileData.id, filename: data.filename, path: fileKey }
|
||||
.where(eq(files.id, created.id))
|
||||
|
||||
return {
|
||||
id: created.id,
|
||||
filename: data.filename,
|
||||
path: fileKey
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
return reply.code(500).send({ error: "Upload failed" })
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
|
||||
// -------------------------------------------------------------
|
||||
// GET FILE OR LIST FILES
|
||||
// -------------------------------------------------------------
|
||||
server.get("/files/:id?", async (req, reply) => {
|
||||
const { id } = req.params as { id?: string }
|
||||
try {
|
||||
const { id } = req.params as { id?: string }
|
||||
|
||||
if(id) {
|
||||
try {
|
||||
const {data,error} = await server.supabase.from("files").select("*").eq("id", id).single()
|
||||
// 🔹 EINZELNE DATEI
|
||||
if (id) {
|
||||
const rows = await server.db
|
||||
.select()
|
||||
.from(files)
|
||||
.where(eq(files.id, id))
|
||||
|
||||
return {...data}
|
||||
} catch (err) {
|
||||
req.log.error(err);
|
||||
reply.code(500).send({ error: "Could not generate presigned URL" });
|
||||
const file = rows[0]
|
||||
if (!file) return reply.code(404).send({ error: "Not found" })
|
||||
|
||||
return file
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
const {data:supabaseFileEntries,error} = await server.supabase.from("files").select("*, createddocument(*, customer(*))").eq("tenant",req.user.tenant_id)
|
||||
|
||||
// 🔹 ALLE DATEIEN DES TENANTS (mit createddocument + customer)
|
||||
const tenantId = req.user?.tenant_id
|
||||
if (!tenantId) return reply.code(401).send({ error: "Unauthorized" })
|
||||
|
||||
const list = await server.db
|
||||
.select({
|
||||
...files,
|
||||
createddocument: createddocuments,
|
||||
customer: customers
|
||||
})
|
||||
.from(files)
|
||||
.leftJoin(
|
||||
createddocuments,
|
||||
eq(files.createddocument, createddocuments.id)
|
||||
)
|
||||
.leftJoin(
|
||||
customers,
|
||||
eq(createddocuments.customer, customers.id)
|
||||
)
|
||||
.where(eq(files.tenant, tenantId))
|
||||
|
||||
return { files: supabaseFileEntries }
|
||||
} catch (err) {
|
||||
req.log.error(err)
|
||||
reply.code(500).send({ error: "Could not generate presigned URLs" })
|
||||
}
|
||||
return { files: list }
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
return reply.code(500).send({ error: "Could not load files" })
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
|
||||
|
||||
// -------------------------------------------------------------
|
||||
// DOWNLOAD (SINGLE OR MULTI ZIP)
|
||||
// -------------------------------------------------------------
|
||||
server.post("/files/download/:id?", async (req, reply) => {
|
||||
const { id } = req.params as { id?: string }
|
||||
|
||||
// @ts-ignore
|
||||
const ids = req.body?.ids || []
|
||||
|
||||
|
||||
try {
|
||||
if (id) {
|
||||
// 🔹 Einzeldownload
|
||||
const { data, error } = await server.supabase
|
||||
.from("files")
|
||||
.select("*")
|
||||
.eq("id", id)
|
||||
.single()
|
||||
const { id } = req.params as { id?: string }
|
||||
const ids = req.body?.ids || []
|
||||
|
||||
if (error || !data) {
|
||||
return reply.code(404).send({ error: "File not found" })
|
||||
}
|
||||
// -------------------------------------------------
|
||||
// 1️⃣ SINGLE DOWNLOAD
|
||||
// -------------------------------------------------
|
||||
if (id) {
|
||||
const rows = await server.db
|
||||
.select()
|
||||
.from(files)
|
||||
.where(eq(files.id, id))
|
||||
|
||||
const file = rows[0]
|
||||
if (!file) return reply.code(404).send({ error: "File not found" })
|
||||
|
||||
const command = new GetObjectCommand({
|
||||
Bucket: secrets.S3_BUCKET,
|
||||
Key: data.path,
|
||||
Key: file.path!
|
||||
})
|
||||
|
||||
const { Body, ContentType } = await s3.send(command)
|
||||
|
||||
const chunks: any[] = []
|
||||
// @ts-ignore
|
||||
for await (const chunk of Body) {
|
||||
chunks.push(chunk)
|
||||
}
|
||||
for await (const chunk of Body as any) chunks.push(chunk)
|
||||
const buffer = Buffer.concat(chunks)
|
||||
|
||||
reply.header("Content-Type", ContentType || "application/octet-stream")
|
||||
reply.header(
|
||||
"Content-Disposition",
|
||||
`attachment; filename="${data.path.split("/").pop()}"`
|
||||
)
|
||||
reply.header("Content-Disposition", `attachment; filename="${file.path?.split("/").pop()}"`)
|
||||
return reply.send(buffer)
|
||||
}
|
||||
|
||||
console.log(ids)
|
||||
|
||||
// -------------------------------------------------
|
||||
// 2️⃣ MULTI DOWNLOAD → ZIP
|
||||
// -------------------------------------------------
|
||||
if (Array.isArray(ids) && ids.length > 0) {
|
||||
// 🔹 Multi-Download → ZIP zurückgeben
|
||||
const { data: supabaseFiles, error } = await server.supabase
|
||||
.from("files")
|
||||
.select("*")
|
||||
.in("id", ids)
|
||||
const rows = await server.db
|
||||
.select()
|
||||
.from(files)
|
||||
.where(inArray(files.id, ids))
|
||||
|
||||
if (error || !supabaseFiles?.length) {
|
||||
return reply.code(404).send({ error: "Files not found" })
|
||||
}
|
||||
|
||||
console.log(supabaseFiles)
|
||||
if (!rows.length) return reply.code(404).send({ error: "Files not found" })
|
||||
|
||||
reply.header("Content-Type", "application/zip")
|
||||
reply.header("Content-Disposition", "attachment; filename=dateien.zip")
|
||||
reply.header("Content-Disposition", `attachment; filename="dateien.zip"`)
|
||||
|
||||
const archive = archiver("zip", { zlib: { level: 9 } })
|
||||
archive.on("warning", console.warn)
|
||||
|
||||
for (const entry of supabaseFiles) {
|
||||
const command = new GetObjectCommand({
|
||||
for (const entry of rows) {
|
||||
const cmd = new GetObjectCommand({
|
||||
Bucket: secrets.S3_BUCKET,
|
||||
Key: entry.path,
|
||||
Key: entry.path!
|
||||
})
|
||||
const { Body } = await s3.send(cmd)
|
||||
|
||||
const { Body } = await s3.send(command)
|
||||
const filename = entry.path.split("/").pop() || entry.id
|
||||
console.log(filename)
|
||||
archive.append(Body as any, { name: filename })
|
||||
archive.append(Body as any, {
|
||||
name: entry.path?.split("/").pop() || entry.id
|
||||
})
|
||||
}
|
||||
|
||||
await archive.finalize()
|
||||
@@ -186,80 +202,80 @@ export default async function fileRoutes(server: FastifyInstance) {
|
||||
}
|
||||
|
||||
return reply.code(400).send({ error: "No id or ids provided" })
|
||||
|
||||
} catch (err) {
|
||||
console.log(err)
|
||||
reply.code(500).send({ error: "Download failed" })
|
||||
console.error(err)
|
||||
return reply.code(500).send({ error: "Download failed" })
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
|
||||
// -------------------------------------------------------------
|
||||
// GENERATE PRESIGNED URL(S)
|
||||
// -------------------------------------------------------------
|
||||
server.post("/files/presigned/:id?", async (req, reply) => {
|
||||
const { id } = req.params as { id: string };
|
||||
const { ids } = req.body as { ids: string[] }
|
||||
try {
|
||||
const { id } = req.params as { id?: string }
|
||||
const { ids } = req.body as { ids?: string[] }
|
||||
const tenantId = req.user?.tenant_id
|
||||
|
||||
if(id) {
|
||||
try {
|
||||
const {data,error} = await server.supabase.from("files").select("*").eq("id", id).single()
|
||||
if (!tenantId) return reply.code(401).send({ error: "Unauthorized" })
|
||||
|
||||
const command = new GetObjectCommand({
|
||||
Bucket: secrets.S3_BUCKET,
|
||||
Key: data.path,
|
||||
});
|
||||
// -------------------------------------------------
|
||||
// SINGLE FILE PRESIGNED URL
|
||||
// -------------------------------------------------
|
||||
if (id) {
|
||||
const rows = await server.db
|
||||
.select()
|
||||
.from(files)
|
||||
.where(eq(files.id, id))
|
||||
|
||||
// URL für 15 Minuten gültig
|
||||
const url = await getSignedUrl(s3, command, { expiresIn: 900 });
|
||||
const file = rows[0]
|
||||
if (!file) return reply.code(404).send({ error: "Not found" })
|
||||
|
||||
return { ...data, url };
|
||||
} catch (err) {
|
||||
req.log.error(err);
|
||||
reply.code(500).send({ error: "Could not generate presigned URL" });
|
||||
}
|
||||
} else {
|
||||
if (!Array.isArray(ids) || ids.length === 0) {
|
||||
return reply.code(400).send({ error: "No file keys provided" })
|
||||
}
|
||||
const url = await getSignedUrl(
|
||||
s3,
|
||||
new GetObjectCommand({ Bucket: secrets.S3_BUCKET, Key: file.path! }),
|
||||
{ expiresIn: 900 }
|
||||
)
|
||||
|
||||
try {
|
||||
const {data:supabaseFileEntries,error} = await server.supabase.from("files").select("*, createddocument(*, customer(*))").eq("tenant",req.user.tenant_id).is("archived",false)
|
||||
return { ...file, url }
|
||||
} else {
|
||||
// -------------------------------------------------
|
||||
// MULTIPLE PRESIGNED URLs
|
||||
// -------------------------------------------------
|
||||
if (!ids || !Array.isArray(ids) || ids.length === 0) {
|
||||
return reply.code(400).send({ error: "No ids provided" })
|
||||
}
|
||||
|
||||
console.log(error)
|
||||
const rows = await server.db
|
||||
.select()
|
||||
.from(files)
|
||||
.where(eq(files.tenant, tenantId))
|
||||
|
||||
let filteredFiles = supabaseFileEntries.filter(i => ids.includes(i.id))
|
||||
filteredFiles = filteredFiles.filter(i => i.path)
|
||||
const selected = rows.filter(f => ids.includes(f.id))
|
||||
|
||||
console.log(filteredFiles.filter(i => !i.path))
|
||||
|
||||
|
||||
|
||||
let urls = await Promise.all(
|
||||
ids.map(async (id) => {
|
||||
let file = filteredFiles.find(i => i.id === id)
|
||||
|
||||
if(!file) return
|
||||
|
||||
let key = file.path
|
||||
if(!key) console.log(file)
|
||||
|
||||
const command = new GetObjectCommand({
|
||||
Bucket: secrets.S3_BUCKET,
|
||||
Key: key,
|
||||
})
|
||||
|
||||
const url = await getSignedUrl(s3, command, { expiresIn: 900 }) // 15 min gültig
|
||||
|
||||
|
||||
return {...filteredFiles.find(i => i.id === id), url}
|
||||
const output = await Promise.all(
|
||||
selected.map(async (file) => {
|
||||
const url = await getSignedUrl(
|
||||
s3,
|
||||
new GetObjectCommand({ Bucket: secrets.S3_BUCKET, Key: file.path! }),
|
||||
{ expiresIn: 900 }
|
||||
)
|
||||
return { ...file, url }
|
||||
})
|
||||
)
|
||||
|
||||
urls = urls.filter(i => i)
|
||||
|
||||
return { files: urls }
|
||||
} catch (err) {
|
||||
console.log(err)
|
||||
reply.code(500).send({ error: "Could not generate presigned URLs" })
|
||||
return { files: output }
|
||||
}
|
||||
|
||||
|
||||
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
return reply.code(500).send({ error: "Could not create presigned URLs" })
|
||||
}
|
||||
|
||||
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user