Compare commits
149 Commits
17cd3dc3a3
...
dev
| Author | SHA1 | Date | |
|---|---|---|---|
| 52c182cb5f | |||
| 9cef3964e9 | |||
| cf0fb724a2 | |||
| bbb893dd6c | |||
| 724f152d70 | |||
| 27be8241bf | |||
| d27e437ba6 | |||
| f5253b29f4 | |||
| 0141a243ce | |||
| a0e1b8c0eb | |||
| 45fb45845a | |||
| 409db82368 | |||
| 30d761f899 | |||
| 70636f6ac5 | |||
| 59392a723c | |||
| c782492ab5 | |||
| 844af30b18 | |||
| 6fded3993a | |||
| f26d6bd4f3 | |||
| 2621cc0d8d | |||
| a8238dc9ba | |||
| 49d35f080d | |||
| 189a52b3cd | |||
| 3f8ce5daf7 | |||
| 087ba1126e | |||
| db4e9612a0 | |||
| cb4917c536 | |||
| 9f32eb5439 | |||
| f596b46364 | |||
| 117da523d2 | |||
| c2901dc0a9 | |||
| 8c2a8a7998 | |||
| 1dc74947f4 | |||
| f63e793c88 | |||
| 29a84b899d | |||
| be706a70f8 | |||
| 474b3e762c | |||
| f793d4cce6 | |||
| c3f46cd184 | |||
| 6bf336356d | |||
| 55699da42c | |||
| 053f184a33 | |||
| 6541cb2adf | |||
| 7dca84947e | |||
| 45fd6fda08 | |||
| 31e80fb386 | |||
| 7ea28cc6c0 | |||
| c0faa398b8 | |||
| 19be1f0d03 | |||
| c43d3225e3 | |||
| 7125d15b3f | |||
| 4b7cf171c8 | |||
| 59fdedfaa0 | |||
| 71d249d8bf | |||
| e496a62b36 | |||
| 0bfef0806b | |||
| 5c69388f1c | |||
| 7ed0388acb | |||
| 3aa0c7d77a | |||
| 77aa277347 | |||
| 2fff1ca8a8 | |||
| e58929d9a0 | |||
| 90560ecd2c | |||
| b07953fb7d | |||
| 01ef3c5a42 | |||
| 2aed851224 | |||
| c56fcfbd14 | |||
| ca2020b9c6 | |||
| c87212d54a | |||
| db22d47900 | |||
| 143485e107 | |||
| c1d4b24418 | |||
| 9655d4fa05 | |||
| 4efe452f1c | |||
| cb21a85736 | |||
| d2b70e5883 | |||
| 1a065b649c | |||
| 34c58c3755 | |||
| 37d8a414d3 | |||
| 7f4f232c32 | |||
| d6f257bcc6 | |||
| 3109f4d5ff | |||
| 235b33ae08 | |||
| 2d135b7068 | |||
| 8831320a4c | |||
| 000d409e4d | |||
| 160124a184 | |||
| 26dad422ec | |||
| e59cbade53 | |||
| 6423886930 | |||
| 6adf09faa0 | |||
| d7f3920763 | |||
| 3af92ebf71 | |||
| 5ab90830a0 | |||
| 4f72919269 | |||
| f2c9dcc900 | |||
| b4ec792cc0 | |||
| 9b3f48defe | |||
| 5edc90bd4d | |||
| d140251aa0 | |||
| e7fb2df5c7 | |||
| f27fd3f6da | |||
| d3e2b106af | |||
| 769d2059ca | |||
| 53349fae83 | |||
| d8eb1559c8 | |||
| 6b9de04d83 | |||
| 529ec0c77d | |||
| 246677b750 | |||
| c839714945 | |||
| 8614917a05 | |||
| 2de80ea6ca | |||
| 6f5fed0ffb | |||
| 767152c535 | |||
| 3128893ba2 | |||
| bcf460cfd5 | |||
| da704be925 | |||
| c049730599 | |||
| 0194345ed8 | |||
| 82d8cd36b9 | |||
| 66110da6c4 | |||
| 267648074c | |||
| 32b4c40e11 | |||
| f044195d86 | |||
| 202e20ddd5 | |||
| 905f2e7bf4 | |||
| b39a52fb20 | |||
| 098bd02808 | |||
| b35c991634 | |||
| dc376894be | |||
| 90788f22da | |||
| d901ebe365 | |||
| 7f6ba99328 | |||
| 8afdf06c8e | |||
| e1205a8de5 | |||
| 08da93b6c3 | |||
| b0ace924d4 | |||
| 593118c181 | |||
| 67d2a05ac4 | |||
| b95d539907 | |||
| 76b363fdaf | |||
| 2cb0d9b607 | |||
| 037a10e93e | |||
| c36b9aa872 | |||
| 14a9435a5a | |||
| 8126c2d3f4 | |||
| a892b7a6e4 | |||
| be7b219569 | |||
| 998d725528 |
38
.gitea/ISSUE_TEMPLATE/bug_report.md
Normal file
38
.gitea/ISSUE_TEMPLATE/bug_report.md
Normal file
@@ -0,0 +1,38 @@
|
||||
---
|
||||
name: 🐛 Bug Report
|
||||
about: Erstelle einen Bericht, um uns zu helfen, das Projekt zu verbessern.
|
||||
title: '[BUG] '
|
||||
labels: bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Beschreibung**
|
||||
Eine klare und prägnante Beschreibung des Fehlers.
|
||||
|
||||
**Reproduktion**
|
||||
Schritte, um den Fehler zu reproduzieren:
|
||||
|
||||
Entweder:
|
||||
1. Gehe zu '...'
|
||||
2. Klicke auf '...'
|
||||
3. Scrolle runter zu '...'
|
||||
4. Siehe Fehler
|
||||
|
||||
Oder Link zur Seite
|
||||
|
||||
**Erwartetes Verhalten**
|
||||
Eine klare Beschreibung dessen, was du erwartet hast.
|
||||
|
||||
**Screenshots**
|
||||
Falls zutreffend, füge hier Screenshots oder Gifs hinzu, um das Problem zu verdeutlichen.
|
||||
|
||||
**Achtung: Achte bitte auf Datenschutz deiner Daten sowie der Daten deiner Kunden. Sollten ein Screenshot nur mit Daten möglich sein, schwärze diese bitte vor dem Upload.**
|
||||
|
||||
**Umgebung:**
|
||||
- Betriebssystem: [z.B. Windows, macOS, Linux]
|
||||
- Browser / Version (falls relevant): [z.B. Chrome 120]
|
||||
- Projekt-Version: [z.B. v1.0.2]
|
||||
|
||||
**Zusätzlicher Kontext**
|
||||
Füge hier alle anderen Informationen zum Problem hinzu.
|
||||
20
.gitea/ISSUE_TEMPLATE/feature_request.md
Normal file
20
.gitea/ISSUE_TEMPLATE/feature_request.md
Normal file
@@ -0,0 +1,20 @@
|
||||
---
|
||||
name: ✨ Feature Request
|
||||
about: Schlage eine Idee für dieses Projekt vor.
|
||||
title: '[FEATURE] '
|
||||
labels: enhancement
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Ist dein Feature-Wunsch mit einem Problem verbunden?**
|
||||
Eine klare Beschreibung des Problems (z.B. "Ich bin immer genervt, wenn...").
|
||||
|
||||
**Lösungsvorschlag**
|
||||
Eine klare Beschreibung dessen, was du dir wünschst und wie es funktionieren soll.
|
||||
|
||||
**Alternativen**
|
||||
Hast du über alternative Lösungen oder Workarounds nachgedacht?
|
||||
|
||||
**Zusätzlicher Kontext**
|
||||
Hier ist Platz für weitere Informationen, Skizzen oder Beispiele von anderen Tools.
|
||||
110
README.md
110
README.md
@@ -1 +1,109 @@
|
||||
TEST
|
||||
|
||||
|
||||
|
||||
# Docker Compose Setup
|
||||
|
||||
## ENV Vars
|
||||
|
||||
- DOMAIN
|
||||
- PDF_LICENSE
|
||||
- DB_PASS
|
||||
- DB_USER
|
||||
- CONTACT_EMAIL
|
||||
|
||||
## Docker Compose File
|
||||
~~~
|
||||
services:
|
||||
frontend:
|
||||
image: git.federspiel.tech/flfeders/fedeo/frontend:main
|
||||
restart: always
|
||||
environment:
|
||||
- NUXT_PUBLIC_API_BASE=https://${DOMAIN}/backend
|
||||
- NUXT_PUBLIC_PDF_LICENSE=${PDF_LICENSE}
|
||||
networks:
|
||||
- traefik
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.docker.network=traefik"
|
||||
- "traefik.port=3000"
|
||||
# Middlewares
|
||||
- "traefik.http.middlewares.fedeo-frontend-redirect-web-secure.redirectscheme.scheme=https"
|
||||
# Web Entrypoint
|
||||
- "traefik.http.routers.fedeo-frontend.middlewares=fedeo-frontend-redirect-web-secure"
|
||||
- "traefik.http.routers.fedeo-frontend.rule=Host(`${DOMAIN}`) && PathPrefix(`/`)"
|
||||
- "traefik.http.routers.fedeo-frontend.entrypoints=web"
|
||||
# Web Secure Entrypoint
|
||||
- "traefik.http.routers.fedeo-frontend-secure.rule=Host(`${DOMAIN}`) && PathPrefix(`/`)"
|
||||
- "traefik.http.routers.fedeo-frontend-secure.entrypoints=web-secured" #
|
||||
- "traefik.http.routers.fedeo-frontend-secure.tls.certresolver=mytlschallenge"
|
||||
backend:
|
||||
image: git.federspiel.tech/flfeders/fedeo/backend:main
|
||||
restart: always
|
||||
environment:
|
||||
- INFISICAL_CLIENT_ID=
|
||||
- INFISICAL_CLIENT_SECRET=
|
||||
- NODE_ENV=production
|
||||
networks:
|
||||
- traefik
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.docker.network=traefik"
|
||||
- "traefik.port=3100"
|
||||
# Middlewares
|
||||
- "traefik.http.middlewares.fedeo-backend-redirect-web-secure.redirectscheme.scheme=https"
|
||||
- "traefik.http.middlewares.fedeo-backend-strip.stripprefix.prefixes=/backend"
|
||||
# Web Entrypoint
|
||||
- "traefik.http.routers.fedeo-backend.middlewares=fedeo-backend-redirect-web-secure"
|
||||
- "traefik.http.routers.fedeo-backend.rule=Host(`${DOMAIN}`) && PathPrefix(`/backend`)"
|
||||
- "traefik.http.routers.fedeo-backend.entrypoints=web"
|
||||
# Web Secure Entrypoint
|
||||
- "traefik.http.routers.fedeo-backend-secure.rule=Host(`${DOMAIN}`) && PathPrefix(`/backend`)"
|
||||
- "traefik.http.routers.fedeo-backend-secure.entrypoints=web-secured" #
|
||||
- "traefik.http.routers.fedeo-backend-secure.tls.certresolver=mytlschallenge"
|
||||
- "traefik.http.routers.fedeo-backend-secure.middlewares=fedeo-backend-strip"
|
||||
# db:
|
||||
# image: postgres
|
||||
# restart: always
|
||||
# shm_size: 128mb
|
||||
# environment:
|
||||
# POSTGRES_PASSWORD:
|
||||
# POSTGRES_USER:
|
||||
# POSTGRES_DB:
|
||||
# volumes:
|
||||
# - ./pg-data:/var/lib/postgresql/data
|
||||
# ports:
|
||||
# - "5432:5432"
|
||||
traefik:
|
||||
image: traefik:v2.11
|
||||
restart: unless-stopped
|
||||
container_name: traefik
|
||||
command:
|
||||
- "--api.insecure=false"
|
||||
- "--api.dashboard=false"
|
||||
- "--api.debug=false"
|
||||
- "--providers.docker=true"
|
||||
- "--providers.docker.exposedbydefault=false"
|
||||
- "--providers.docker.network=traefik"
|
||||
- "--entrypoints.web.address=:80"
|
||||
- "--entrypoints.web-secured.address=:443"
|
||||
- "--accesslog=true"
|
||||
- "--accesslog.filepath=/logs/access.log"
|
||||
- "--accesslog.bufferingsize=5000"
|
||||
- "--accesslog.fields.defaultMode=keep"
|
||||
- "--accesslog.fields.headers.defaultMode=keep"
|
||||
- "--certificatesresolvers.mytlschallenge.acme.tlschallenge=true" #
|
||||
- "--certificatesresolvers.mytlschallenge.acme.email=${CONTACT_EMAIL}"
|
||||
- "--certificatesresolvers.mytlschallenge.acme.storage=/letsencrypt/acme.json"
|
||||
ports:
|
||||
- 80:80
|
||||
- 443:443
|
||||
volumes:
|
||||
- "./traefik/letsencrypt:/letsencrypt" # <== Volume for certs (TLS)
|
||||
- "/var/run/docker.sock:/var/run/docker.sock:ro"
|
||||
- "./traefik/logs:/logs"
|
||||
networks:
|
||||
- traefik
|
||||
networks:
|
||||
traefik:
|
||||
external: false
|
||||
~~~
|
||||
3
backend/.secretlintrc.json
Normal file
3
backend/.secretlintrc.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"rules": []
|
||||
}
|
||||
@@ -1,10 +1,27 @@
|
||||
import { drizzle } from "drizzle-orm/node-postgres"
|
||||
import { Pool } from "pg"
|
||||
// src/db/index.ts
|
||||
import { drizzle } from "drizzle-orm/node-postgres";
|
||||
import { Pool } from "pg";
|
||||
import * as schema from "./schema";
|
||||
import {secrets} from "../src/utils/secrets";
|
||||
|
||||
const pool = new Pool({
|
||||
connectionString: secrets.DATABASE_URL,
|
||||
max: 10, // je nach Last
|
||||
})
|
||||
console.log("[DB INIT] 1. Suche Connection String...");
|
||||
|
||||
export const db = drizzle(pool)
|
||||
// Checken woher die URL kommt
|
||||
let connectionString = process.env.DATABASE_URL || secrets.DATABASE_URL;
|
||||
if (connectionString) {
|
||||
console.log("[DB INIT] -> Gefunden in process.env.DATABASE_URL");
|
||||
} else {
|
||||
console.error("[DB INIT] ❌ KEIN CONNECTION STRING GEFUNDEN! .env nicht geladen?");
|
||||
}
|
||||
|
||||
export const pool = new Pool({
|
||||
connectionString,
|
||||
max: 10,
|
||||
});
|
||||
|
||||
// TEST: Ist die DB wirklich da?
|
||||
pool.query('SELECT NOW()')
|
||||
.then(res => console.log(`[DB INIT] ✅ VERBINDUNG ERFOLGREICH! Zeit auf DB: ${res.rows[0].now}`))
|
||||
.catch(err => console.error(`[DB INIT] ❌ VERBINDUNGSFEHLER:`, err.message));
|
||||
|
||||
export const db = drizzle(pool, { schema });
|
||||
2
backend/db/migrations/0003_woozy_adam_destine.sql
Normal file
2
backend/db/migrations/0003_woozy_adam_destine.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
-- No-op migration: Datei war im Journal referenziert, aber fehlte im Repository.
|
||||
SELECT 1;
|
||||
2
backend/db/migrations/0004_stormy_onslaught.sql
Normal file
2
backend/db/migrations/0004_stormy_onslaught.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
-- No-op migration: Datei war im Journal referenziert, aber fehlte im Repository.
|
||||
SELECT 1;
|
||||
123
backend/db/migrations/0005_green_shinobi_shaw.sql
Normal file
123
backend/db/migrations/0005_green_shinobi_shaw.sql
Normal file
@@ -0,0 +1,123 @@
|
||||
CREATE TABLE "m2m_api_keys" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"updated_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"tenant_id" bigint NOT NULL,
|
||||
"user_id" uuid NOT NULL,
|
||||
"created_by" uuid,
|
||||
"name" text NOT NULL,
|
||||
"key_prefix" text NOT NULL,
|
||||
"key_hash" text NOT NULL,
|
||||
"active" boolean DEFAULT true NOT NULL,
|
||||
"last_used_at" timestamp with time zone,
|
||||
"expires_at" timestamp with time zone,
|
||||
CONSTRAINT "m2m_api_keys_key_hash_unique" UNIQUE("key_hash")
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "staff_time_events" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"tenant_id" bigint NOT NULL,
|
||||
"user_id" uuid NOT NULL,
|
||||
"actor_type" text NOT NULL,
|
||||
"actor_user_id" uuid,
|
||||
"event_time" timestamp with time zone NOT NULL,
|
||||
"event_type" text NOT NULL,
|
||||
"source" text NOT NULL,
|
||||
"invalidates_event_id" uuid,
|
||||
"related_event_id" uuid,
|
||||
"metadata" jsonb,
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
CONSTRAINT "time_events_actor_user_check" CHECK (
|
||||
(actor_type = 'system' AND actor_user_id IS NULL)
|
||||
OR
|
||||
(actor_type = 'user' AND actor_user_id IS NOT NULL)
|
||||
)
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "serialtypes" (
|
||||
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "serialtypes_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"intervall" text,
|
||||
"icon" text,
|
||||
"tenant" bigint NOT NULL,
|
||||
"archived" boolean DEFAULT false NOT NULL,
|
||||
"updated_at" timestamp with time zone,
|
||||
"updated_by" uuid
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "serial_executions" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"tenant" bigint NOT NULL,
|
||||
"execution_date" timestamp NOT NULL,
|
||||
"status" text DEFAULT 'draft',
|
||||
"created_by" text,
|
||||
"created_at" timestamp DEFAULT now(),
|
||||
"summary" text
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "public_links" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"token" text NOT NULL,
|
||||
"tenant" integer NOT NULL,
|
||||
"default_profile" uuid,
|
||||
"is_protected" boolean DEFAULT false NOT NULL,
|
||||
"pin_hash" text,
|
||||
"config" jsonb DEFAULT '{}'::jsonb,
|
||||
"name" text NOT NULL,
|
||||
"description" text,
|
||||
"active" boolean DEFAULT true NOT NULL,
|
||||
"created_at" timestamp DEFAULT now(),
|
||||
"updated_at" timestamp DEFAULT now(),
|
||||
CONSTRAINT "public_links_token_unique" UNIQUE("token")
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "wiki_pages" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"tenant_id" bigint NOT NULL,
|
||||
"parent_id" uuid,
|
||||
"title" text NOT NULL,
|
||||
"content" jsonb,
|
||||
"is_folder" boolean DEFAULT false NOT NULL,
|
||||
"sort_order" integer DEFAULT 0 NOT NULL,
|
||||
"entity_type" text,
|
||||
"entity_id" bigint,
|
||||
"entity_uuid" uuid,
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"updated_at" timestamp with time zone,
|
||||
"created_by" uuid,
|
||||
"updated_by" uuid
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "time_events" DISABLE ROW LEVEL SECURITY;--> statement-breakpoint
|
||||
DROP TABLE "time_events" CASCADE;--> statement-breakpoint
|
||||
ALTER TABLE "projects" ALTER COLUMN "active_phase" SET DEFAULT 'Erstkontakt';--> statement-breakpoint
|
||||
ALTER TABLE "createddocuments" ADD COLUMN "serialexecution" uuid;--> statement-breakpoint
|
||||
ALTER TABLE "devices" ADD COLUMN "last_seen" timestamp with time zone;--> statement-breakpoint
|
||||
ALTER TABLE "devices" ADD COLUMN "last_debug_info" jsonb;--> statement-breakpoint
|
||||
ALTER TABLE "files" ADD COLUMN "size" bigint;--> statement-breakpoint
|
||||
ALTER TABLE "m2m_api_keys" ADD CONSTRAINT "m2m_api_keys_tenant_id_tenants_id_fk" FOREIGN KEY ("tenant_id") REFERENCES "public"."tenants"("id") ON DELETE cascade ON UPDATE cascade;--> statement-breakpoint
|
||||
ALTER TABLE "m2m_api_keys" ADD CONSTRAINT "m2m_api_keys_user_id_auth_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."auth_users"("id") ON DELETE cascade ON UPDATE cascade;--> statement-breakpoint
|
||||
ALTER TABLE "m2m_api_keys" ADD CONSTRAINT "m2m_api_keys_created_by_auth_users_id_fk" FOREIGN KEY ("created_by") REFERENCES "public"."auth_users"("id") ON DELETE set null ON UPDATE cascade;--> statement-breakpoint
|
||||
ALTER TABLE "staff_time_events" ADD CONSTRAINT "staff_time_events_tenant_id_tenants_id_fk" FOREIGN KEY ("tenant_id") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "staff_time_events" ADD CONSTRAINT "staff_time_events_user_id_auth_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "staff_time_events" ADD CONSTRAINT "staff_time_events_actor_user_id_auth_users_id_fk" FOREIGN KEY ("actor_user_id") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "staff_time_events" ADD CONSTRAINT "staff_time_events_invalidates_event_id_staff_time_events_id_fk" FOREIGN KEY ("invalidates_event_id") REFERENCES "public"."staff_time_events"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "staff_time_events" ADD CONSTRAINT "staff_time_events_related_event_id_staff_time_events_id_fk" FOREIGN KEY ("related_event_id") REFERENCES "public"."staff_time_events"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "serialtypes" ADD CONSTRAINT "serialtypes_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "serialtypes" ADD CONSTRAINT "serialtypes_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "serial_executions" ADD CONSTRAINT "serial_executions_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "public_links" ADD CONSTRAINT "public_links_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "public_links" ADD CONSTRAINT "public_links_default_profile_auth_profiles_id_fk" FOREIGN KEY ("default_profile") REFERENCES "public"."auth_profiles"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "wiki_pages" ADD CONSTRAINT "wiki_pages_tenant_id_tenants_id_fk" FOREIGN KEY ("tenant_id") REFERENCES "public"."tenants"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "wiki_pages" ADD CONSTRAINT "wiki_pages_parent_id_wiki_pages_id_fk" FOREIGN KEY ("parent_id") REFERENCES "public"."wiki_pages"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "wiki_pages" ADD CONSTRAINT "wiki_pages_created_by_auth_users_id_fk" FOREIGN KEY ("created_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "wiki_pages" ADD CONSTRAINT "wiki_pages_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
CREATE INDEX "idx_time_events_tenant_user_time" ON "staff_time_events" USING btree ("tenant_id","user_id","event_time");--> statement-breakpoint
|
||||
CREATE INDEX "idx_time_events_created_at" ON "staff_time_events" USING btree ("created_at");--> statement-breakpoint
|
||||
CREATE INDEX "idx_time_events_invalidates" ON "staff_time_events" USING btree ("invalidates_event_id");--> statement-breakpoint
|
||||
CREATE INDEX "wiki_pages_tenant_idx" ON "wiki_pages" USING btree ("tenant_id");--> statement-breakpoint
|
||||
CREATE INDEX "wiki_pages_parent_idx" ON "wiki_pages" USING btree ("parent_id");--> statement-breakpoint
|
||||
CREATE INDEX "wiki_pages_entity_int_idx" ON "wiki_pages" USING btree ("tenant_id","entity_type","entity_id");--> statement-breakpoint
|
||||
CREATE INDEX "wiki_pages_entity_uuid_idx" ON "wiki_pages" USING btree ("tenant_id","entity_type","entity_uuid");--> statement-breakpoint
|
||||
ALTER TABLE "createddocuments" ADD CONSTRAINT "createddocuments_serialexecution_serial_executions_id_fk" FOREIGN KEY ("serialexecution") REFERENCES "public"."serial_executions"("id") ON DELETE no action ON UPDATE no action;
|
||||
1
backend/db/migrations/0006_nifty_price_lock.sql
Normal file
1
backend/db/migrations/0006_nifty_price_lock.sql
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE "services" ADD COLUMN "priceUpdateLocked" boolean DEFAULT false NOT NULL;
|
||||
1
backend/db/migrations/0007_bright_default_tax_type.sql
Normal file
1
backend/db/migrations/0007_bright_default_tax_type.sql
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE "customers" ADD COLUMN "customTaxType" text;
|
||||
16
backend/db/migrations/0008_quick_contracttypes.sql
Normal file
16
backend/db/migrations/0008_quick_contracttypes.sql
Normal file
@@ -0,0 +1,16 @@
|
||||
CREATE TABLE "contracttypes" (
|
||||
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "contracttypes_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"tenant" bigint NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"description" text,
|
||||
"paymentType" text,
|
||||
"recurring" boolean DEFAULT false NOT NULL,
|
||||
"archived" boolean DEFAULT false NOT NULL,
|
||||
"updated_at" timestamp with time zone,
|
||||
"updated_by" uuid
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "contracttypes" ADD CONSTRAINT "contracttypes_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "contracttypes" ADD CONSTRAINT "contracttypes_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;
|
||||
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE "contracts" ADD COLUMN "contracttype" bigint;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "contracts" ADD CONSTRAINT "contracts_contracttype_contracttypes_id_fk" FOREIGN KEY ("contracttype") REFERENCES "public"."contracttypes"("id") ON DELETE no action ON UPDATE no action;
|
||||
3
backend/db/migrations/0010_sudden_billing_interval.sql
Normal file
3
backend/db/migrations/0010_sudden_billing_interval.sql
Normal file
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE "contracttypes" ADD COLUMN "billingInterval" text;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "contracts" ADD COLUMN "billingInterval" text;
|
||||
16
backend/db/migrations/0011_mighty_member_bankaccounts.sql
Normal file
16
backend/db/migrations/0011_mighty_member_bankaccounts.sql
Normal file
@@ -0,0 +1,16 @@
|
||||
CREATE TABLE "entitybankaccounts" (
|
||||
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "entitybankaccounts_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"tenant" bigint NOT NULL,
|
||||
"iban_encrypted" jsonb NOT NULL,
|
||||
"bic_encrypted" jsonb NOT NULL,
|
||||
"bank_name_encrypted" jsonb NOT NULL,
|
||||
"description" text,
|
||||
"updated_at" timestamp with time zone,
|
||||
"updated_by" uuid,
|
||||
"archived" boolean DEFAULT false NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "entitybankaccounts" ADD CONSTRAINT "entitybankaccounts_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "entitybankaccounts" ADD CONSTRAINT "entitybankaccounts_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;
|
||||
73
backend/db/migrations/0012_shiny_customer_inventory.sql
Normal file
73
backend/db/migrations/0012_shiny_customer_inventory.sql
Normal file
@@ -0,0 +1,73 @@
|
||||
CREATE TABLE "customerspaces" (
|
||||
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "customerspaces_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"type" text NOT NULL,
|
||||
"tenant" bigint NOT NULL,
|
||||
"customer" bigint NOT NULL,
|
||||
"spaceNumber" text NOT NULL,
|
||||
"parentSpace" bigint,
|
||||
"infoData" jsonb DEFAULT '{"zip":"","city":"","streetNumber":""}'::jsonb NOT NULL,
|
||||
"description" text,
|
||||
"archived" boolean DEFAULT false NOT NULL,
|
||||
"updated_at" timestamp with time zone,
|
||||
"updated_by" uuid
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "customerinventoryitems" (
|
||||
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "customerinventoryitems_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"description" text,
|
||||
"tenant" bigint NOT NULL,
|
||||
"customer" bigint NOT NULL,
|
||||
"customerspace" bigint,
|
||||
"customerInventoryId" text NOT NULL,
|
||||
"serialNumber" text,
|
||||
"quantity" bigint DEFAULT 0 NOT NULL,
|
||||
"manufacturer" text,
|
||||
"manufacturerNumber" text,
|
||||
"purchaseDate" date,
|
||||
"purchasePrice" double precision DEFAULT 0,
|
||||
"currentValue" double precision,
|
||||
"product" bigint,
|
||||
"archived" boolean DEFAULT false NOT NULL,
|
||||
"updated_at" timestamp with time zone,
|
||||
"updated_by" uuid
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "customerspaces" ADD CONSTRAINT "customerspaces_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "customerspaces" ADD CONSTRAINT "customerspaces_customer_customers_id_fk" FOREIGN KEY ("customer") REFERENCES "public"."customers"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "customerspaces" ADD CONSTRAINT "customerspaces_parentSpace_customerspaces_id_fk" FOREIGN KEY ("parentSpace") REFERENCES "public"."customerspaces"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "customerspaces" ADD CONSTRAINT "customerspaces_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_customer_customers_id_fk" FOREIGN KEY ("customer") REFERENCES "public"."customers"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_customerspace_customerspaces_id_fk" FOREIGN KEY ("customerspace") REFERENCES "public"."customerspaces"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_product_products_id_fk" FOREIGN KEY ("product") REFERENCES "public"."products"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "customerinventoryitems_tenant_customerInventoryId_idx" ON "customerinventoryitems" USING btree ("tenant","customerInventoryId");
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "historyitems" ADD COLUMN "customerspace" bigint;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "historyitems" ADD COLUMN "customerinventoryitem" bigint;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "historyitems" ADD CONSTRAINT "historyitems_customerspace_customerspaces_id_fk" FOREIGN KEY ("customerspace") REFERENCES "public"."customerspaces"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "historyitems" ADD CONSTRAINT "historyitems_customerinventoryitem_customerinventoryitems_id_fk" FOREIGN KEY ("customerinventoryitem") REFERENCES "public"."customerinventoryitems"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "tenants" ALTER COLUMN "numberRanges" SET DEFAULT '{"vendors":{"prefix":"","suffix":"","nextNumber":10000},"customers":{"prefix":"","suffix":"","nextNumber":10000},"products":{"prefix":"AT-","suffix":"","nextNumber":1000},"quotes":{"prefix":"AN-","suffix":"","nextNumber":1000},"confirmationOrders":{"prefix":"AB-","suffix":"","nextNumber":1000},"invoices":{"prefix":"RE-","suffix":"","nextNumber":1000},"spaces":{"prefix":"LP-","suffix":"","nextNumber":1000},"customerspaces":{"prefix":"KLP-","suffix":"","nextNumber":1000},"inventoryitems":{"prefix":"IA-","suffix":"","nextNumber":1000},"customerinventoryitems":{"prefix":"KIA-","suffix":"","nextNumber":1000},"projects":{"prefix":"PRJ-","suffix":"","nextNumber":1000},"costcentres":{"prefix":"KST-","suffix":"","nextNumber":1000}}'::jsonb;
|
||||
--> statement-breakpoint
|
||||
UPDATE "tenants"
|
||||
SET "numberRanges" = COALESCE("numberRanges", '{}'::jsonb) || jsonb_build_object(
|
||||
'customerspaces', COALESCE("numberRanges"->'customerspaces', '{"prefix":"KLP-","suffix":"","nextNumber":1000}'::jsonb),
|
||||
'customerinventoryitems', COALESCE("numberRanges"->'customerinventoryitems', '{"prefix":"KIA-","suffix":"","nextNumber":1000}'::jsonb)
|
||||
);
|
||||
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE "customerinventoryitems" ADD COLUMN "vendor" bigint;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_vendor_vendors_id_fk" FOREIGN KEY ("vendor") REFERENCES "public"."vendors"("id") ON DELETE no action ON UPDATE no action;
|
||||
20
backend/db/migrations/0014_smart_memberrelations.sql
Normal file
20
backend/db/migrations/0014_smart_memberrelations.sql
Normal file
@@ -0,0 +1,20 @@
|
||||
CREATE TABLE "memberrelations" (
|
||||
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "memberrelations_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"tenant" bigint NOT NULL,
|
||||
"type" text NOT NULL,
|
||||
"billingInterval" text NOT NULL,
|
||||
"billingAmount" double precision DEFAULT 0 NOT NULL,
|
||||
"archived" boolean DEFAULT false NOT NULL,
|
||||
"updated_at" timestamp with time zone,
|
||||
"updated_by" uuid
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "customers" ADD COLUMN "memberrelation" bigint;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "memberrelations" ADD CONSTRAINT "memberrelations_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "memberrelations" ADD CONSTRAINT "memberrelations_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "customers" ADD CONSTRAINT "customers_memberrelation_memberrelations_id_fk" FOREIGN KEY ("memberrelation") REFERENCES "public"."memberrelations"("id") ON DELETE no action ON UPDATE no action;
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
ALTER TABLE "historyitems" ADD COLUMN "memberrelation" bigint;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "historyitems" ADD CONSTRAINT "historyitems_memberrelation_memberrelations_id_fk" FOREIGN KEY ("memberrelation") REFERENCES "public"."memberrelations"("id") ON DELETE no action ON UPDATE no action;
|
||||
|
||||
@@ -0,0 +1,33 @@
|
||||
ALTER TABLE "customers" ADD COLUMN IF NOT EXISTS "memberrelation" bigint;
|
||||
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM pg_constraint
|
||||
WHERE conname = 'customers_memberrelation_memberrelations_id_fk'
|
||||
) THEN
|
||||
ALTER TABLE "customers"
|
||||
ADD CONSTRAINT "customers_memberrelation_memberrelations_id_fk"
|
||||
FOREIGN KEY ("memberrelation")
|
||||
REFERENCES "public"."memberrelations"("id")
|
||||
ON DELETE no action
|
||||
ON UPDATE no action;
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
UPDATE "customers"
|
||||
SET "memberrelation" = ("infoData"->>'memberrelation')::bigint
|
||||
WHERE
|
||||
"memberrelation" IS NULL
|
||||
AND "type" = 'Mitglied'
|
||||
AND jsonb_typeof(COALESCE("infoData", '{}'::jsonb)) = 'object'
|
||||
AND COALESCE("infoData", '{}'::jsonb) ? 'memberrelation'
|
||||
AND ("infoData"->>'memberrelation') ~ '^[0-9]+$';
|
||||
|
||||
UPDATE "customers"
|
||||
SET "infoData" = COALESCE("infoData", '{}'::jsonb) - 'memberrelation'
|
||||
WHERE
|
||||
"type" = 'Mitglied'
|
||||
AND jsonb_typeof(COALESCE("infoData", '{}'::jsonb)) = 'object'
|
||||
AND COALESCE("infoData", '{}'::jsonb) ? 'memberrelation';
|
||||
108
backend/db/migrations/0017_slow_the_hood.sql
Normal file
108
backend/db/migrations/0017_slow_the_hood.sql
Normal file
@@ -0,0 +1,108 @@
|
||||
CREATE TABLE "contracttypes" (
|
||||
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "contracttypes_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"tenant" bigint NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"description" text,
|
||||
"paymentType" text,
|
||||
"recurring" boolean DEFAULT false NOT NULL,
|
||||
"billingInterval" text,
|
||||
"archived" boolean DEFAULT false NOT NULL,
|
||||
"updated_at" timestamp with time zone,
|
||||
"updated_by" uuid
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "customerinventoryitems" (
|
||||
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "customerinventoryitems_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"description" text,
|
||||
"tenant" bigint NOT NULL,
|
||||
"customer" bigint NOT NULL,
|
||||
"customerspace" bigint,
|
||||
"customerInventoryId" text NOT NULL,
|
||||
"serialNumber" text,
|
||||
"quantity" bigint DEFAULT 0 NOT NULL,
|
||||
"manufacturer" text,
|
||||
"manufacturerNumber" text,
|
||||
"purchaseDate" date,
|
||||
"purchasePrice" double precision DEFAULT 0,
|
||||
"currentValue" double precision,
|
||||
"product" bigint,
|
||||
"vendor" bigint,
|
||||
"archived" boolean DEFAULT false NOT NULL,
|
||||
"updated_at" timestamp with time zone,
|
||||
"updated_by" uuid
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "customerspaces" (
|
||||
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "customerspaces_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"type" text NOT NULL,
|
||||
"tenant" bigint NOT NULL,
|
||||
"customer" bigint NOT NULL,
|
||||
"spaceNumber" text NOT NULL,
|
||||
"parentSpace" bigint,
|
||||
"infoData" jsonb DEFAULT '{"zip":"","city":"","streetNumber":""}'::jsonb NOT NULL,
|
||||
"description" text,
|
||||
"archived" boolean DEFAULT false NOT NULL,
|
||||
"updated_at" timestamp with time zone,
|
||||
"updated_by" uuid
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "entitybankaccounts" (
|
||||
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "entitybankaccounts_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"tenant" bigint NOT NULL,
|
||||
"iban_encrypted" jsonb NOT NULL,
|
||||
"bic_encrypted" jsonb NOT NULL,
|
||||
"bank_name_encrypted" jsonb NOT NULL,
|
||||
"description" text,
|
||||
"updated_at" timestamp with time zone,
|
||||
"updated_by" uuid,
|
||||
"archived" boolean DEFAULT false NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "memberrelations" (
|
||||
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "memberrelations_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"tenant" bigint NOT NULL,
|
||||
"type" text NOT NULL,
|
||||
"billingInterval" text NOT NULL,
|
||||
"billingAmount" double precision DEFAULT 0 NOT NULL,
|
||||
"archived" boolean DEFAULT false NOT NULL,
|
||||
"updated_at" timestamp with time zone,
|
||||
"updated_by" uuid
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "tenants" ALTER COLUMN "numberRanges" SET DEFAULT '{"vendors":{"prefix":"","suffix":"","nextNumber":10000},"customers":{"prefix":"","suffix":"","nextNumber":10000},"products":{"prefix":"AT-","suffix":"","nextNumber":1000},"quotes":{"prefix":"AN-","suffix":"","nextNumber":1000},"confirmationOrders":{"prefix":"AB-","suffix":"","nextNumber":1000},"invoices":{"prefix":"RE-","suffix":"","nextNumber":1000},"spaces":{"prefix":"LP-","suffix":"","nextNumber":1000},"customerspaces":{"prefix":"KLP-","suffix":"","nextNumber":1000},"inventoryitems":{"prefix":"IA-","suffix":"","nextNumber":1000},"customerinventoryitems":{"prefix":"KIA-","suffix":"","nextNumber":1000},"projects":{"prefix":"PRJ-","suffix":"","nextNumber":1000},"costcentres":{"prefix":"KST-","suffix":"","nextNumber":1000}}'::jsonb;--> statement-breakpoint
|
||||
ALTER TABLE "contracts" ADD COLUMN "contracttype" bigint;--> statement-breakpoint
|
||||
ALTER TABLE "contracts" ADD COLUMN "billingInterval" text;--> statement-breakpoint
|
||||
ALTER TABLE "customers" ADD COLUMN "customTaxType" text;--> statement-breakpoint
|
||||
ALTER TABLE "customers" ADD COLUMN "memberrelation" bigint;--> statement-breakpoint
|
||||
ALTER TABLE "historyitems" ADD COLUMN "customerspace" bigint;--> statement-breakpoint
|
||||
ALTER TABLE "historyitems" ADD COLUMN "customerinventoryitem" bigint;--> statement-breakpoint
|
||||
ALTER TABLE "historyitems" ADD COLUMN "memberrelation" bigint;--> statement-breakpoint
|
||||
ALTER TABLE "services" ADD COLUMN "priceUpdateLocked" boolean DEFAULT false NOT NULL;--> statement-breakpoint
|
||||
ALTER TABLE "contracttypes" ADD CONSTRAINT "contracttypes_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "contracttypes" ADD CONSTRAINT "contracttypes_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_customer_customers_id_fk" FOREIGN KEY ("customer") REFERENCES "public"."customers"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_customerspace_customerspaces_id_fk" FOREIGN KEY ("customerspace") REFERENCES "public"."customerspaces"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_product_products_id_fk" FOREIGN KEY ("product") REFERENCES "public"."products"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_vendor_vendors_id_fk" FOREIGN KEY ("vendor") REFERENCES "public"."vendors"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "customerspaces" ADD CONSTRAINT "customerspaces_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "customerspaces" ADD CONSTRAINT "customerspaces_customer_customers_id_fk" FOREIGN KEY ("customer") REFERENCES "public"."customers"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "customerspaces" ADD CONSTRAINT "customerspaces_parentSpace_customerspaces_id_fk" FOREIGN KEY ("parentSpace") REFERENCES "public"."customerspaces"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "customerspaces" ADD CONSTRAINT "customerspaces_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "entitybankaccounts" ADD CONSTRAINT "entitybankaccounts_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "entitybankaccounts" ADD CONSTRAINT "entitybankaccounts_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "memberrelations" ADD CONSTRAINT "memberrelations_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "memberrelations" ADD CONSTRAINT "memberrelations_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "contracts" ADD CONSTRAINT "contracts_contracttype_contracttypes_id_fk" FOREIGN KEY ("contracttype") REFERENCES "public"."contracttypes"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "customers" ADD CONSTRAINT "customers_memberrelation_memberrelations_id_fk" FOREIGN KEY ("memberrelation") REFERENCES "public"."memberrelations"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "historyitems" ADD CONSTRAINT "historyitems_customerspace_customerspaces_id_fk" FOREIGN KEY ("customerspace") REFERENCES "public"."customerspaces"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "historyitems" ADD CONSTRAINT "historyitems_customerinventoryitem_customerinventoryitems_id_fk" FOREIGN KEY ("customerinventoryitem") REFERENCES "public"."customerinventoryitems"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "historyitems" ADD CONSTRAINT "historyitems_memberrelation_memberrelations_id_fk" FOREIGN KEY ("memberrelation") REFERENCES "public"."memberrelations"("id") ON DELETE no action ON UPDATE no action;
|
||||
3
backend/db/migrations/0018_account_chart.sql
Normal file
3
backend/db/migrations/0018_account_chart.sql
Normal file
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE "accounts" ADD COLUMN "accountChart" text DEFAULT 'skr03' NOT NULL;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "tenants" ADD COLUMN "accountChart" text DEFAULT 'skr03' NOT NULL;
|
||||
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE "createddocuments"
|
||||
ALTER COLUMN "customSurchargePercentage" TYPE double precision
|
||||
USING "customSurchargePercentage"::double precision;
|
||||
@@ -36,6 +36,104 @@
|
||||
"when": 1765716877146,
|
||||
"tag": "0004_stormy_onslaught",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 5,
|
||||
"version": "7",
|
||||
"when": 1771096926109,
|
||||
"tag": "0005_green_shinobi_shaw",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 6,
|
||||
"version": "7",
|
||||
"when": 1772000000000,
|
||||
"tag": "0006_nifty_price_lock",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 7,
|
||||
"version": "7",
|
||||
"when": 1772000100000,
|
||||
"tag": "0007_bright_default_tax_type",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 8,
|
||||
"version": "7",
|
||||
"when": 1773000000000,
|
||||
"tag": "0008_quick_contracttypes",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 9,
|
||||
"version": "7",
|
||||
"when": 1773000100000,
|
||||
"tag": "0009_heavy_contract_contracttype",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 10,
|
||||
"version": "7",
|
||||
"when": 1773000200000,
|
||||
"tag": "0010_sudden_billing_interval",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 11,
|
||||
"version": "7",
|
||||
"when": 1773000300000,
|
||||
"tag": "0011_mighty_member_bankaccounts",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 12,
|
||||
"version": "7",
|
||||
"when": 1773000400000,
|
||||
"tag": "0012_shiny_customer_inventory",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 13,
|
||||
"version": "7",
|
||||
"when": 1773000500000,
|
||||
"tag": "0013_brisk_customer_inventory_vendor",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 14,
|
||||
"version": "7",
|
||||
"when": 1773000600000,
|
||||
"tag": "0014_smart_memberrelations",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 15,
|
||||
"version": "7",
|
||||
"when": 1773000700000,
|
||||
"tag": "0015_wise_memberrelation_history",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 16,
|
||||
"version": "7",
|
||||
"when": 1773000800000,
|
||||
"tag": "0016_fix_memberrelation_column_usage",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 17,
|
||||
"version": "7",
|
||||
"when": 1771704862789,
|
||||
"tag": "0017_slow_the_hood",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 18,
|
||||
"version": "7",
|
||||
"when": 1773000900000,
|
||||
"tag": "0018_account_chart",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -16,6 +16,7 @@ export const accounts = pgTable("accounts", {
|
||||
|
||||
number: text("number").notNull(),
|
||||
label: text("label").notNull(),
|
||||
accountChart: text("accountChart").notNull().default("skr03"),
|
||||
|
||||
description: text("description"),
|
||||
})
|
||||
|
||||
@@ -11,6 +11,7 @@ import {
|
||||
import { tenants } from "./tenants"
|
||||
import { customers } from "./customers"
|
||||
import { contacts } from "./contacts"
|
||||
import { contracttypes } from "./contracttypes"
|
||||
import { authUsers } from "./auth_users"
|
||||
|
||||
export const contracts = pgTable(
|
||||
@@ -48,6 +49,9 @@ export const contracts = pgTable(
|
||||
contact: bigint("contact", { mode: "number" }).references(
|
||||
() => contacts.id
|
||||
),
|
||||
contracttype: bigint("contracttype", { mode: "number" }).references(
|
||||
() => contracttypes.id
|
||||
),
|
||||
|
||||
bankingIban: text("bankingIban"),
|
||||
bankingBIC: text("bankingBIC"),
|
||||
@@ -57,6 +61,7 @@ export const contracts = pgTable(
|
||||
sepaDate: timestamp("sepaDate", { withTimezone: true }),
|
||||
|
||||
paymentType: text("paymentType"),
|
||||
billingInterval: text("billingInterval"),
|
||||
invoiceDispatch: text("invoiceDispatch"),
|
||||
|
||||
ownFields: jsonb("ownFields").notNull().default({}),
|
||||
|
||||
40
backend/db/schema/contracttypes.ts
Normal file
40
backend/db/schema/contracttypes.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import {
|
||||
pgTable,
|
||||
bigint,
|
||||
timestamp,
|
||||
text,
|
||||
boolean,
|
||||
uuid,
|
||||
} from "drizzle-orm/pg-core"
|
||||
|
||||
import { tenants } from "./tenants"
|
||||
import { authUsers } from "./auth_users"
|
||||
|
||||
export const contracttypes = pgTable("contracttypes", {
|
||||
id: bigint("id", { mode: "number" })
|
||||
.primaryKey()
|
||||
.generatedByDefaultAsIdentity(),
|
||||
|
||||
createdAt: timestamp("created_at", { withTimezone: true })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
|
||||
tenant: bigint("tenant", { mode: "number" })
|
||||
.notNull()
|
||||
.references(() => tenants.id),
|
||||
|
||||
name: text("name").notNull(),
|
||||
description: text("description"),
|
||||
|
||||
paymentType: text("paymentType"),
|
||||
recurring: boolean("recurring").notNull().default(false),
|
||||
billingInterval: text("billingInterval"),
|
||||
|
||||
archived: boolean("archived").notNull().default(false),
|
||||
|
||||
updatedAt: timestamp("updated_at", { withTimezone: true }),
|
||||
updatedBy: uuid("updated_by").references(() => authUsers.id),
|
||||
})
|
||||
|
||||
export type ContractType = typeof contracttypes.$inferSelect
|
||||
export type NewContractType = typeof contracttypes.$inferInsert
|
||||
@@ -6,6 +6,7 @@ import {
|
||||
jsonb,
|
||||
boolean,
|
||||
smallint,
|
||||
doublePrecision,
|
||||
uuid,
|
||||
} from "drizzle-orm/pg-core"
|
||||
|
||||
@@ -96,7 +97,7 @@ export const createddocuments = pgTable("createddocuments", {
|
||||
|
||||
taxType: text("taxType"),
|
||||
|
||||
customSurchargePercentage: smallint("customSurchargePercentage")
|
||||
customSurchargePercentage: doublePrecision("customSurchargePercentage")
|
||||
.notNull()
|
||||
.default(0),
|
||||
|
||||
|
||||
66
backend/db/schema/customerinventoryitems.ts
Normal file
66
backend/db/schema/customerinventoryitems.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
import {
|
||||
pgTable,
|
||||
bigint,
|
||||
timestamp,
|
||||
text,
|
||||
boolean,
|
||||
doublePrecision,
|
||||
uuid,
|
||||
date,
|
||||
} from "drizzle-orm/pg-core"
|
||||
|
||||
import { tenants } from "./tenants"
|
||||
import { customers } from "./customers"
|
||||
import { customerspaces } from "./customerspaces"
|
||||
import { products } from "./products"
|
||||
import { vendors } from "./vendors"
|
||||
import { authUsers } from "./auth_users"
|
||||
|
||||
export const customerinventoryitems = pgTable("customerinventoryitems", {
|
||||
id: bigint("id", { mode: "number" })
|
||||
.primaryKey()
|
||||
.generatedByDefaultAsIdentity(),
|
||||
|
||||
createdAt: timestamp("created_at", { withTimezone: true })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
|
||||
name: text("name").notNull(),
|
||||
|
||||
description: text("description"),
|
||||
|
||||
tenant: bigint("tenant", { mode: "number" })
|
||||
.notNull()
|
||||
.references(() => tenants.id),
|
||||
|
||||
customer: bigint("customer", { mode: "number" })
|
||||
.notNull()
|
||||
.references(() => customers.id),
|
||||
|
||||
customerspace: bigint("customerspace", { mode: "number" }).references(
|
||||
() => customerspaces.id
|
||||
),
|
||||
|
||||
customerInventoryId: text("customerInventoryId").notNull(),
|
||||
serialNumber: text("serialNumber"),
|
||||
|
||||
quantity: bigint("quantity", { mode: "number" }).notNull().default(0),
|
||||
|
||||
manufacturer: text("manufacturer"),
|
||||
manufacturerNumber: text("manufacturerNumber"),
|
||||
|
||||
purchaseDate: date("purchaseDate"),
|
||||
purchasePrice: doublePrecision("purchasePrice").default(0),
|
||||
currentValue: doublePrecision("currentValue"),
|
||||
|
||||
product: bigint("product", { mode: "number" }).references(() => products.id),
|
||||
vendor: bigint("vendor", { mode: "number" }).references(() => vendors.id),
|
||||
|
||||
archived: boolean("archived").notNull().default(false),
|
||||
|
||||
updatedAt: timestamp("updated_at", { withTimezone: true }),
|
||||
updatedBy: uuid("updated_by").references(() => authUsers.id),
|
||||
})
|
||||
|
||||
export type CustomerInventoryItem = typeof customerinventoryitems.$inferSelect
|
||||
export type NewCustomerInventoryItem = typeof customerinventoryitems.$inferInsert
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
} from "drizzle-orm/pg-core"
|
||||
import { tenants } from "./tenants"
|
||||
import { authUsers } from "./auth_users"
|
||||
import { memberrelations } from "./memberrelations"
|
||||
|
||||
export const customers = pgTable(
|
||||
"customers",
|
||||
@@ -62,6 +63,8 @@ export const customers = pgTable(
|
||||
updatedBy: uuid("updated_by").references(() => authUsers.id),
|
||||
|
||||
customPaymentType: text("custom_payment_type"), // ENUM payment_types separat?
|
||||
customTaxType: text("customTaxType"),
|
||||
memberrelation: bigint("memberrelation", { mode: "number" }).references(() => memberrelations.id),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
54
backend/db/schema/customerspaces.ts
Normal file
54
backend/db/schema/customerspaces.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import {
|
||||
pgTable,
|
||||
bigint,
|
||||
timestamp,
|
||||
text,
|
||||
boolean,
|
||||
jsonb,
|
||||
uuid,
|
||||
} from "drizzle-orm/pg-core"
|
||||
|
||||
import { tenants } from "./tenants"
|
||||
import { customers } from "./customers"
|
||||
import { authUsers } from "./auth_users"
|
||||
|
||||
export const customerspaces = pgTable("customerspaces", {
|
||||
id: bigint("id", { mode: "number" })
|
||||
.primaryKey()
|
||||
.generatedByDefaultAsIdentity(),
|
||||
|
||||
createdAt: timestamp("created_at", { withTimezone: true })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
|
||||
name: text("name").notNull(),
|
||||
type: text("type").notNull(),
|
||||
|
||||
tenant: bigint("tenant", { mode: "number" })
|
||||
.notNull()
|
||||
.references(() => tenants.id),
|
||||
|
||||
customer: bigint("customer", { mode: "number" })
|
||||
.notNull()
|
||||
.references(() => customers.id),
|
||||
|
||||
space_number: text("spaceNumber").notNull(),
|
||||
|
||||
parentSpace: bigint("parentSpace", { mode: "number" }).references(
|
||||
() => customerspaces.id
|
||||
),
|
||||
|
||||
info_data: jsonb("infoData")
|
||||
.notNull()
|
||||
.default({ zip: "", city: "", streetNumber: "" }),
|
||||
|
||||
description: text("description"),
|
||||
|
||||
archived: boolean("archived").notNull().default(false),
|
||||
|
||||
updatedAt: timestamp("updated_at", { withTimezone: true }),
|
||||
updatedBy: uuid("updated_by").references(() => authUsers.id),
|
||||
})
|
||||
|
||||
export type CustomerSpace = typeof customerspaces.$inferSelect
|
||||
export type NewCustomerSpace = typeof customerspaces.$inferInsert
|
||||
@@ -3,7 +3,7 @@ import {
|
||||
uuid,
|
||||
timestamp,
|
||||
text,
|
||||
bigint,
|
||||
bigint, jsonb,
|
||||
} from "drizzle-orm/pg-core"
|
||||
|
||||
import { tenants } from "./tenants"
|
||||
@@ -23,6 +23,11 @@ export const devices = pgTable("devices", {
|
||||
password: text("password"),
|
||||
|
||||
externalId: text("externalId"),
|
||||
|
||||
lastSeen: timestamp("last_seen", { withTimezone: true }),
|
||||
|
||||
// Hier speichern wir den ganzen Payload (RSSI, Heap, IP, etc.)
|
||||
lastDebugInfo: jsonb("last_debug_info"),
|
||||
})
|
||||
|
||||
export type Device = typeof devices.$inferSelect
|
||||
|
||||
39
backend/db/schema/entitybankaccounts.ts
Normal file
39
backend/db/schema/entitybankaccounts.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import {
|
||||
pgTable,
|
||||
bigint,
|
||||
timestamp,
|
||||
text,
|
||||
boolean,
|
||||
jsonb,
|
||||
uuid,
|
||||
} from "drizzle-orm/pg-core"
|
||||
import { tenants } from "./tenants"
|
||||
import { authUsers } from "./auth_users"
|
||||
|
||||
export const entitybankaccounts = pgTable("entitybankaccounts", {
|
||||
id: bigint("id", { mode: "number" })
|
||||
.primaryKey()
|
||||
.generatedByDefaultAsIdentity(),
|
||||
|
||||
createdAt: timestamp("created_at", { withTimezone: true })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
|
||||
tenant: bigint("tenant", { mode: "number" })
|
||||
.notNull()
|
||||
.references(() => tenants.id),
|
||||
|
||||
ibanEncrypted: jsonb("iban_encrypted").notNull(),
|
||||
bicEncrypted: jsonb("bic_encrypted").notNull(),
|
||||
bankNameEncrypted: jsonb("bank_name_encrypted").notNull(),
|
||||
|
||||
description: text("description"),
|
||||
|
||||
updatedAt: timestamp("updated_at", { withTimezone: true }),
|
||||
updatedBy: uuid("updated_by").references(() => authUsers.id),
|
||||
|
||||
archived: boolean("archived").notNull().default(false),
|
||||
})
|
||||
|
||||
export type EntityBankAccount = typeof entitybankaccounts.$inferSelect
|
||||
export type NewEntityBankAccount = typeof entitybankaccounts.$inferInsert
|
||||
@@ -73,6 +73,7 @@ export const files = pgTable("files", {
|
||||
createdBy: uuid("created_by").references(() => authUsers.id),
|
||||
|
||||
authProfile: uuid("auth_profile").references(() => authProfiles.id),
|
||||
size: bigint("size", { mode: "number" }),
|
||||
})
|
||||
|
||||
export type File = typeof files.$inferSelect
|
||||
|
||||
@@ -20,6 +20,8 @@ import { tasks } from "./tasks"
|
||||
import { vehicles } from "./vehicles"
|
||||
import { bankstatements } from "./bankstatements"
|
||||
import { spaces } from "./spaces"
|
||||
import { customerspaces } from "./customerspaces"
|
||||
import { customerinventoryitems } from "./customerinventoryitems"
|
||||
import { costcentres } from "./costcentres"
|
||||
import { ownaccounts } from "./ownaccounts"
|
||||
import { createddocuments } from "./createddocuments"
|
||||
@@ -32,6 +34,7 @@ import { events } from "./events"
|
||||
import { inventoryitemgroups } from "./inventoryitemgroups"
|
||||
import { authUsers } from "./auth_users"
|
||||
import {files} from "./files";
|
||||
import { memberrelations } from "./memberrelations";
|
||||
|
||||
export const historyitems = pgTable("historyitems", {
|
||||
id: bigint("id", { mode: "number" })
|
||||
@@ -99,6 +102,12 @@ export const historyitems = pgTable("historyitems", {
|
||||
|
||||
space: bigint("space", { mode: "number" }).references(() => spaces.id),
|
||||
|
||||
customerspace: bigint("customerspace", { mode: "number" }).references(() => customerspaces.id),
|
||||
|
||||
customerinventoryitem: bigint("customerinventoryitem", { mode: "number" }).references(() => customerinventoryitems.id),
|
||||
|
||||
memberrelation: bigint("memberrelation", { mode: "number" }).references(() => memberrelations.id),
|
||||
|
||||
config: jsonb("config"),
|
||||
|
||||
projecttype: bigint("projecttype", { mode: "number" }).references(
|
||||
|
||||
@@ -14,7 +14,7 @@ export const hourrates = pgTable("hourrates", {
|
||||
|
||||
name: text("name").notNull(),
|
||||
|
||||
purchasePrice: doublePrecision("purchasePrice").notNull(),
|
||||
purchase_price: doublePrecision("purchasePrice").notNull(),
|
||||
sellingPrice: doublePrecision("sellingPrice").notNull(),
|
||||
|
||||
archived: boolean("archived").notNull().default(false),
|
||||
|
||||
@@ -13,15 +13,19 @@ export * from "./checks"
|
||||
export * from "./citys"
|
||||
export * from "./contacts"
|
||||
export * from "./contracts"
|
||||
export * from "./contracttypes"
|
||||
export * from "./costcentres"
|
||||
export * from "./countrys"
|
||||
export * from "./createddocuments"
|
||||
export * from "./createdletters"
|
||||
export * from "./customers"
|
||||
export * from "./customerspaces"
|
||||
export * from "./customerinventoryitems"
|
||||
export * from "./devices"
|
||||
export * from "./documentboxes"
|
||||
export * from "./enums"
|
||||
export * from "./events"
|
||||
export * from "./entitybankaccounts"
|
||||
export * from "./files"
|
||||
export * from "./filetags"
|
||||
export * from "./folders"
|
||||
@@ -42,7 +46,9 @@ export * from "./incominginvoices"
|
||||
export * from "./inventoryitemgroups"
|
||||
export * from "./inventoryitems"
|
||||
export * from "./letterheads"
|
||||
export * from "./memberrelations"
|
||||
export * from "./movements"
|
||||
export * from "./m2m_api_keys"
|
||||
export * from "./notifications_event_types"
|
||||
export * from "./notifications_items"
|
||||
export * from "./notifications_preferences"
|
||||
@@ -72,3 +78,4 @@ export * from "./staff_time_events"
|
||||
export * from "./serialtypes"
|
||||
export * from "./serialexecutions"
|
||||
export * from "./public_links"
|
||||
export * from "./wikipages"
|
||||
|
||||
48
backend/db/schema/m2m_api_keys.ts
Normal file
48
backend/db/schema/m2m_api_keys.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import {
|
||||
pgTable,
|
||||
uuid,
|
||||
bigint,
|
||||
text,
|
||||
timestamp,
|
||||
boolean,
|
||||
} from "drizzle-orm/pg-core"
|
||||
|
||||
import { tenants } from "./tenants"
|
||||
import { authUsers } from "./auth_users"
|
||||
|
||||
export const m2mApiKeys = pgTable("m2m_api_keys", {
|
||||
id: uuid("id").primaryKey().defaultRandom(),
|
||||
|
||||
createdAt: timestamp("created_at", { withTimezone: true })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
|
||||
updatedAt: timestamp("updated_at", { withTimezone: true })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
|
||||
tenantId: bigint("tenant_id", { mode: "number" })
|
||||
.notNull()
|
||||
.references(() => tenants.id, { onDelete: "cascade", onUpdate: "cascade" }),
|
||||
|
||||
userId: uuid("user_id")
|
||||
.notNull()
|
||||
.references(() => authUsers.id, { onDelete: "cascade", onUpdate: "cascade" }),
|
||||
|
||||
createdBy: uuid("created_by").references(() => authUsers.id, {
|
||||
onDelete: "set null",
|
||||
onUpdate: "cascade",
|
||||
}),
|
||||
|
||||
name: text("name").notNull(),
|
||||
keyPrefix: text("key_prefix").notNull(),
|
||||
keyHash: text("key_hash").notNull().unique(),
|
||||
|
||||
active: boolean("active").notNull().default(true),
|
||||
|
||||
lastUsedAt: timestamp("last_used_at", { withTimezone: true }),
|
||||
expiresAt: timestamp("expires_at", { withTimezone: true }),
|
||||
})
|
||||
|
||||
export type M2mApiKey = typeof m2mApiKeys.$inferSelect
|
||||
export type NewM2mApiKey = typeof m2mApiKeys.$inferInsert
|
||||
39
backend/db/schema/memberrelations.ts
Normal file
39
backend/db/schema/memberrelations.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import {
|
||||
pgTable,
|
||||
bigint,
|
||||
timestamp,
|
||||
text,
|
||||
boolean,
|
||||
uuid,
|
||||
doublePrecision,
|
||||
} from "drizzle-orm/pg-core"
|
||||
|
||||
import { tenants } from "./tenants"
|
||||
import { authUsers } from "./auth_users"
|
||||
|
||||
export const memberrelations = pgTable("memberrelations", {
|
||||
id: bigint("id", { mode: "number" })
|
||||
.primaryKey()
|
||||
.generatedByDefaultAsIdentity(),
|
||||
|
||||
createdAt: timestamp("created_at", { withTimezone: true })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
|
||||
tenant: bigint("tenant", { mode: "number" })
|
||||
.notNull()
|
||||
.references(() => tenants.id),
|
||||
|
||||
type: text("type").notNull(),
|
||||
billingInterval: text("billingInterval").notNull(),
|
||||
billingAmount: doublePrecision("billingAmount").notNull().default(0),
|
||||
|
||||
archived: boolean("archived").notNull().default(false),
|
||||
|
||||
updatedAt: timestamp("updated_at", { withTimezone: true }),
|
||||
updatedBy: uuid("updated_by").references(() => authUsers.id),
|
||||
})
|
||||
|
||||
export type MemberRelation = typeof memberrelations.$inferSelect
|
||||
export type NewMemberRelation = typeof memberrelations.$inferInsert
|
||||
|
||||
@@ -71,7 +71,7 @@ export const projects = pgTable("projects", {
|
||||
updatedAt: timestamp("updated_at", { withTimezone: true }),
|
||||
updatedBy: uuid("updated_by").references(() => authUsers.id),
|
||||
|
||||
active_phase: text("active_phase"),
|
||||
active_phase: text("active_phase").default("Erstkontakt"),
|
||||
})
|
||||
|
||||
export type Project = typeof projects.$inferSelect
|
||||
|
||||
@@ -54,6 +54,7 @@ export const services = pgTable("services", {
|
||||
|
||||
materialComposition: jsonb("materialComposition").notNull().default([]),
|
||||
personalComposition: jsonb("personalComposition").notNull().default([]),
|
||||
priceUpdateLocked: boolean("priceUpdateLocked").notNull().default(false),
|
||||
|
||||
updatedAt: timestamp("updated_at", { withTimezone: true }),
|
||||
updatedBy: uuid("updated_by").references(() => authUsers.id),
|
||||
|
||||
@@ -74,6 +74,48 @@ export const tenants = pgTable(
|
||||
timeTracking: true,
|
||||
planningBoard: true,
|
||||
workingTimeTracking: true,
|
||||
dashboard: true,
|
||||
historyitems: true,
|
||||
tasks: true,
|
||||
wiki: true,
|
||||
files: true,
|
||||
createdletters: true,
|
||||
documentboxes: true,
|
||||
helpdesk: true,
|
||||
email: true,
|
||||
members: true,
|
||||
customers: true,
|
||||
vendors: true,
|
||||
contactsList: true,
|
||||
staffTime: true,
|
||||
createDocument: true,
|
||||
serialInvoice: true,
|
||||
incomingInvoices: true,
|
||||
costcentres: true,
|
||||
accounts: true,
|
||||
ownaccounts: true,
|
||||
banking: true,
|
||||
spaces: true,
|
||||
customerspaces: true,
|
||||
customerinventoryitems: true,
|
||||
inventoryitems: true,
|
||||
inventoryitemgroups: true,
|
||||
products: true,
|
||||
productcategories: true,
|
||||
services: true,
|
||||
servicecategories: true,
|
||||
memberrelations: true,
|
||||
staffProfiles: true,
|
||||
hourrates: true,
|
||||
projecttypes: true,
|
||||
contracttypes: true,
|
||||
plants: true,
|
||||
settingsNumberRanges: true,
|
||||
settingsEmailAccounts: true,
|
||||
settingsBanking: true,
|
||||
settingsTexttemplates: true,
|
||||
settingsTenant: true,
|
||||
export: true,
|
||||
}),
|
||||
|
||||
ownFields: jsonb("ownFields"),
|
||||
@@ -88,10 +130,13 @@ export const tenants = pgTable(
|
||||
confirmationOrders: { prefix: "AB-", suffix: "", nextNumber: 1000 },
|
||||
invoices: { prefix: "RE-", suffix: "", nextNumber: 1000 },
|
||||
spaces: { prefix: "LP-", suffix: "", nextNumber: 1000 },
|
||||
customerspaces: { prefix: "KLP-", suffix: "", nextNumber: 1000 },
|
||||
inventoryitems: { prefix: "IA-", suffix: "", nextNumber: 1000 },
|
||||
customerinventoryitems: { prefix: "KIA-", suffix: "", nextNumber: 1000 },
|
||||
projects: { prefix: "PRJ-", suffix: "", nextNumber: 1000 },
|
||||
costcentres: { prefix: "KST-", suffix: "", nextNumber: 1000 },
|
||||
}),
|
||||
accountChart: text("accountChart").notNull().default("skr03"),
|
||||
|
||||
standardEmailForInvoices: text("standardEmailForInvoices"),
|
||||
|
||||
|
||||
99
backend/db/schema/wikipages.ts
Normal file
99
backend/db/schema/wikipages.ts
Normal file
@@ -0,0 +1,99 @@
|
||||
import {
|
||||
pgTable,
|
||||
bigint,
|
||||
text,
|
||||
timestamp,
|
||||
boolean,
|
||||
jsonb,
|
||||
integer,
|
||||
index,
|
||||
uuid,
|
||||
AnyPgColumn
|
||||
} from "drizzle-orm/pg-core"
|
||||
import { relations } from "drizzle-orm"
|
||||
import { tenants } from "./tenants"
|
||||
import { authUsers } from "./auth_users"
|
||||
|
||||
export const wikiPages = pgTable(
|
||||
"wiki_pages",
|
||||
{
|
||||
// ID des Wiki-Eintrags selbst (neu = UUID)
|
||||
id: uuid("id")
|
||||
.primaryKey()
|
||||
.defaultRandom(),
|
||||
|
||||
tenantId: bigint("tenant_id", { mode: "number" })
|
||||
.notNull()
|
||||
.references(() => tenants.id, { onDelete: "cascade" }),
|
||||
|
||||
parentId: uuid("parent_id")
|
||||
.references((): AnyPgColumn => wikiPages.id, { onDelete: "cascade" }),
|
||||
|
||||
title: text("title").notNull(),
|
||||
|
||||
content: jsonb("content"),
|
||||
|
||||
isFolder: boolean("is_folder").notNull().default(false),
|
||||
|
||||
sortOrder: integer("sort_order").notNull().default(0),
|
||||
|
||||
// --- POLYMORPHE BEZIEHUNG (Split) ---
|
||||
|
||||
// Art der Entität (z.B. 'customer', 'invoice', 'iot_device')
|
||||
entityType: text("entity_type"),
|
||||
|
||||
// SPALTE 1: Für Legacy-Tabellen (BigInt)
|
||||
// Nutzung: Wenn entityType='customer', wird hier die ID 1050 gespeichert
|
||||
entityId: bigint("entity_id", { mode: "number" }),
|
||||
|
||||
// SPALTE 2: Für neue Tabellen (UUID)
|
||||
// Nutzung: Wenn entityType='iot_device', wird hier die UUID gespeichert
|
||||
entityUuid: uuid("entity_uuid"),
|
||||
|
||||
// ------------------------------------
|
||||
|
||||
createdAt: timestamp("created_at", { withTimezone: true })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
|
||||
updatedAt: timestamp("updated_at", { withTimezone: true }),
|
||||
|
||||
createdBy: uuid("created_by").references(() => authUsers.id),
|
||||
updatedBy: uuid("updated_by").references(() => authUsers.id),
|
||||
},
|
||||
(table) => ({
|
||||
tenantIdx: index("wiki_pages_tenant_idx").on(table.tenantId),
|
||||
parentIdx: index("wiki_pages_parent_idx").on(table.parentId),
|
||||
|
||||
// ZWEI separate Indexe für schnelle Lookups, je nachdem welche ID genutzt wird
|
||||
// Fall 1: Suche nach Notizen für Kunde 1050
|
||||
entityIntIdx: index("wiki_pages_entity_int_idx")
|
||||
.on(table.tenantId, table.entityType, table.entityId),
|
||||
|
||||
// Fall 2: Suche nach Notizen für IoT-Device 550e84...
|
||||
entityUuidIdx: index("wiki_pages_entity_uuid_idx")
|
||||
.on(table.tenantId, table.entityType, table.entityUuid),
|
||||
})
|
||||
)
|
||||
|
||||
export const wikiPagesRelations = relations(wikiPages, ({ one, many }) => ({
|
||||
tenant: one(tenants, {
|
||||
fields: [wikiPages.tenantId],
|
||||
references: [tenants.id],
|
||||
}),
|
||||
parent: one(wikiPages, {
|
||||
fields: [wikiPages.parentId],
|
||||
references: [wikiPages.id],
|
||||
relationName: "parent_child",
|
||||
}),
|
||||
children: many(wikiPages, {
|
||||
relationName: "parent_child",
|
||||
}),
|
||||
author: one(authUsers, {
|
||||
fields: [wikiPages.createdBy],
|
||||
references: [authUsers.id],
|
||||
}),
|
||||
}))
|
||||
|
||||
export type WikiPage = typeof wikiPages.$inferSelect
|
||||
export type NewWikiPage = typeof wikiPages.$inferInsert
|
||||
@@ -6,6 +6,6 @@ export default defineConfig({
|
||||
schema: "./db/schema",
|
||||
out: "./db/migrations",
|
||||
dbCredentials: {
|
||||
url: secrets.DATABASE_URL || process.env.DATABASE_URL,
|
||||
url: secrets.DATABASE_URL || "postgres://postgres:wJw7aNpEBJdcxgoct6GXNpvY4Cn6ECqu@fedeo-db-001.vpn.internal:5432/fedeo",
|
||||
},
|
||||
})
|
||||
@@ -5,9 +5,14 @@
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"dev": "tsx watch src/index.ts",
|
||||
"fill": "ts-node src/webdav/fill-file-sizes.ts",
|
||||
"dev:dav": "tsx watch src/webdav/server.ts",
|
||||
"build": "tsc",
|
||||
"start": "node dist/src/index.js",
|
||||
"schema:index": "ts-node scripts/generate-schema-index.ts"
|
||||
"schema:index": "ts-node scripts/generate-schema-index.ts",
|
||||
"bankcodes:update": "tsx scripts/generate-de-bank-codes.ts",
|
||||
"members:import:csv": "tsx scripts/import-members-csv.ts",
|
||||
"accounts:import:skr42": "ts-node scripts/import-skr42-accounts.ts"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -27,7 +32,6 @@
|
||||
"@infisical/sdk": "^4.0.6",
|
||||
"@mmote/niimbluelib": "^0.0.1-alpha.29",
|
||||
"@prisma/client": "^6.15.0",
|
||||
"@supabase/supabase-js": "^2.56.1",
|
||||
"@zip.js/zip.js": "^2.7.73",
|
||||
"archiver": "^7.0.1",
|
||||
"axios": "^1.12.1",
|
||||
@@ -48,6 +52,7 @@
|
||||
"pg": "^8.16.3",
|
||||
"pngjs": "^7.0.0",
|
||||
"sharp": "^0.34.5",
|
||||
"webdav-server": "^2.6.2",
|
||||
"xmlbuilder": "^15.1.1",
|
||||
"zpl-image": "^0.2.0",
|
||||
"zpl-renderer-js": "^2.0.2"
|
||||
|
||||
95
backend/scripts/generate-de-bank-codes.ts
Normal file
95
backend/scripts/generate-de-bank-codes.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
import fs from "node:fs/promises"
|
||||
import path from "node:path"
|
||||
import https from "node:https"
|
||||
|
||||
const DEFAULT_SOURCE_URL =
|
||||
"https://www.bundesbank.de/resource/blob/602632/bec25ca5df1eb62fefadd8325dafe67c/472B63F073F071307366337C94F8C870/blz-aktuell-txt-data.txt"
|
||||
|
||||
const OUTPUT_NAME_FILE = path.resolve("src/utils/deBankCodes.ts")
|
||||
const OUTPUT_BIC_FILE = path.resolve("src/utils/deBankBics.ts")
|
||||
|
||||
function fetchBuffer(url: string): Promise<Buffer> {
|
||||
return new Promise((resolve, reject) => {
|
||||
https
|
||||
.get(url, (res) => {
|
||||
if (res.statusCode && res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) {
|
||||
return resolve(fetchBuffer(res.headers.location))
|
||||
}
|
||||
|
||||
if (res.statusCode !== 200) {
|
||||
return reject(new Error(`Download failed with status ${res.statusCode}`))
|
||||
}
|
||||
|
||||
const chunks: Buffer[] = []
|
||||
res.on("data", (chunk) => chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)))
|
||||
res.on("end", () => resolve(Buffer.concat(chunks)))
|
||||
res.on("error", reject)
|
||||
})
|
||||
.on("error", reject)
|
||||
})
|
||||
}
|
||||
|
||||
function escapeTsString(value: string) {
|
||||
return value.replace(/\\/g, "\\\\").replace(/"/g, '\\"')
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const source = process.env.BLZ_SOURCE_URL || DEFAULT_SOURCE_URL
|
||||
const sourceFile = process.env.BLZ_SOURCE_FILE
|
||||
let raw: Buffer
|
||||
|
||||
if (sourceFile) {
|
||||
console.log(`Reading BLZ source file: ${sourceFile}`)
|
||||
raw = await fs.readFile(sourceFile)
|
||||
} else {
|
||||
console.log(`Downloading BLZ source: ${source}`)
|
||||
raw = await fetchBuffer(source)
|
||||
}
|
||||
const content = raw.toString("latin1")
|
||||
|
||||
const lines = content.split(/\r?\n/)
|
||||
const nameMap = new Map<string, string>()
|
||||
const bicMap = new Map<string, string>()
|
||||
|
||||
for (const line of lines) {
|
||||
if (!line || line.length < 150) continue
|
||||
const blz = line.slice(0, 8).trim()
|
||||
const name = line.slice(9, 67).trim()
|
||||
const bic = line.slice(139, 150).trim()
|
||||
|
||||
if (!/^\d{8}$/.test(blz) || !name) continue
|
||||
if (!nameMap.has(blz)) nameMap.set(blz, name)
|
||||
if (bic && !bicMap.has(blz)) bicMap.set(blz, bic)
|
||||
}
|
||||
|
||||
const sortedNames = [...nameMap.entries()].sort(([a], [b]) => a.localeCompare(b))
|
||||
const sortedBics = [...bicMap.entries()].sort(([a], [b]) => a.localeCompare(b))
|
||||
|
||||
const nameOutputLines = [
|
||||
"// Lokale Bankleitzahl-zu-Institut Zuordnung (DE).",
|
||||
"// Quelle: Deutsche Bundesbank, BLZ-Datei (vollstaendig).",
|
||||
"export const DE_BANK_CODE_TO_NAME: Record<string, string> = {",
|
||||
...sortedNames.map(([blz, name]) => ` "${blz}": "${escapeTsString(name)}",`),
|
||||
"}",
|
||||
"",
|
||||
]
|
||||
|
||||
const bicOutputLines = [
|
||||
"// Lokale Bankleitzahl-zu-BIC Zuordnung (DE).",
|
||||
"// Quelle: Deutsche Bundesbank, BLZ-Datei (vollstaendig).",
|
||||
"export const DE_BANK_CODE_TO_BIC: Record<string, string> = {",
|
||||
...sortedBics.map(([blz, bic]) => ` "${blz}": "${escapeTsString(bic)}",`),
|
||||
"}",
|
||||
"",
|
||||
]
|
||||
|
||||
await fs.writeFile(OUTPUT_NAME_FILE, nameOutputLines.join("\n"), "utf8")
|
||||
await fs.writeFile(OUTPUT_BIC_FILE, bicOutputLines.join("\n"), "utf8")
|
||||
console.log(`Wrote ${sortedNames.length} bank names to ${OUTPUT_NAME_FILE}`)
|
||||
console.log(`Wrote ${sortedBics.length} bank BICs to ${OUTPUT_BIC_FILE}`)
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
||||
@@ -1,6 +1,5 @@
|
||||
import Fastify from "fastify";
|
||||
import swaggerPlugin from "./plugins/swagger"
|
||||
import supabasePlugin from "./plugins/supabase";
|
||||
import dayjsPlugin from "./plugins/dayjs";
|
||||
import healthRoutes from "./routes/health";
|
||||
import meRoutes from "./routes/auth/me";
|
||||
@@ -29,6 +28,7 @@ import staffTimeRoutes from "./routes/staff/time";
|
||||
import staffTimeConnectRoutes from "./routes/staff/timeconnects";
|
||||
import userRoutes from "./routes/auth/user";
|
||||
import publiclinksAuthenticatedRoutes from "./routes/publiclinks/publiclinks-authenticated";
|
||||
import wikiRoutes from "./routes/wiki";
|
||||
|
||||
//Public Links
|
||||
import publiclinksNonAuthenticatedRoutes from "./routes/publiclinks/publiclinks-non-authenticated";
|
||||
@@ -42,9 +42,11 @@ import helpdeskInboundEmailRoutes from "./routes/helpdesk.inbound.email";
|
||||
import deviceRoutes from "./routes/internal/devices";
|
||||
import tenantRoutesInternal from "./routes/internal/tenant";
|
||||
import staffTimeRoutesInternal from "./routes/internal/time";
|
||||
import authM2mInternalRoutes from "./routes/internal/auth.m2m";
|
||||
|
||||
//Devices
|
||||
import devicesRFIDRoutes from "./routes/devices/rfid";
|
||||
import devicesManagementRoutes from "./routes/devices/management";
|
||||
|
||||
|
||||
import {sendMail} from "./utils/mailer";
|
||||
@@ -52,6 +54,7 @@ import {loadSecrets, secrets} from "./utils/secrets";
|
||||
import {initMailer} from "./utils/mailer"
|
||||
import {initS3} from "./utils/s3";
|
||||
|
||||
|
||||
//Services
|
||||
import servicesPlugin from "./plugins/services";
|
||||
|
||||
@@ -70,8 +73,6 @@ async function main() {
|
||||
|
||||
// Plugins Global verfügbar
|
||||
await app.register(swaggerPlugin);
|
||||
await app.register(corsPlugin);
|
||||
await app.register(supabasePlugin);
|
||||
await app.register(tenantPlugin);
|
||||
await app.register(dayjsPlugin);
|
||||
await app.register(dbPlugin);
|
||||
@@ -107,6 +108,7 @@ async function main() {
|
||||
|
||||
await app.register(async (m2mApp) => {
|
||||
await m2mApp.register(authM2m)
|
||||
await m2mApp.register(authM2mInternalRoutes)
|
||||
await m2mApp.register(helpdeskInboundEmailRoutes)
|
||||
await m2mApp.register(deviceRoutes)
|
||||
await m2mApp.register(tenantRoutesInternal)
|
||||
@@ -115,8 +117,10 @@ async function main() {
|
||||
|
||||
await app.register(async (devicesApp) => {
|
||||
await devicesApp.register(devicesRFIDRoutes)
|
||||
await devicesApp.register(devicesManagementRoutes)
|
||||
},{prefix: "/devices"})
|
||||
|
||||
await app.register(corsPlugin);
|
||||
|
||||
//Geschützte Routes
|
||||
|
||||
@@ -141,11 +145,13 @@ async function main() {
|
||||
await subApp.register(userRoutes);
|
||||
await subApp.register(publiclinksAuthenticatedRoutes);
|
||||
await subApp.register(resourceRoutes);
|
||||
await subApp.register(wikiRoutes);
|
||||
|
||||
},{prefix: "/api"})
|
||||
|
||||
app.ready(async () => {
|
||||
try {
|
||||
console.log("Testing DB Connection:")
|
||||
const result = await app.db.execute("SELECT NOW()");
|
||||
console.log("✓ DB connection OK: " + JSON.stringify(result.rows[0]));
|
||||
} catch (err) {
|
||||
|
||||
@@ -19,15 +19,14 @@ import {
|
||||
and,
|
||||
} from "drizzle-orm"
|
||||
|
||||
|
||||
export function syncDokuboxService (server: FastifyInstance) {
|
||||
let badMessageDetected = false
|
||||
let badMessageMessageSent = false
|
||||
|
||||
let client: ImapFlow | null = null
|
||||
|
||||
// -------------------------------------------------------------
|
||||
// IMAP CLIENT INITIALIZEN
|
||||
// -------------------------------------------------------------
|
||||
export async function initDokuboxClient() {
|
||||
async function initDokuboxClient() {
|
||||
client = new ImapFlow({
|
||||
host: secrets.DOKUBOX_IMAP_HOST,
|
||||
port: secrets.DOKUBOX_IMAP_PORT,
|
||||
@@ -44,13 +43,7 @@ export async function initDokuboxClient() {
|
||||
await client.connect()
|
||||
}
|
||||
|
||||
|
||||
|
||||
// -------------------------------------------------------------
|
||||
// MAIN SYNC FUNCTION (DRIZZLE VERSION)
|
||||
// -------------------------------------------------------------
|
||||
export const syncDokubox = (server: FastifyInstance) =>
|
||||
async () => {
|
||||
const syncDokubox = async () => {
|
||||
|
||||
console.log("Perform Dokubox Sync")
|
||||
|
||||
@@ -130,11 +123,6 @@ export const syncDokubox = (server: FastifyInstance) =>
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// -------------------------------------------------------------
|
||||
// TENANT ERKENNEN + FOLDER/FILETYPES (DRIZZLE VERSION)
|
||||
// -------------------------------------------------------------
|
||||
const getMessageConfigDrizzle = async (
|
||||
server: FastifyInstance,
|
||||
message,
|
||||
@@ -257,3 +245,12 @@ const getMessageConfigDrizzle = async (
|
||||
filetype: filetypeId
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
run: async () => {
|
||||
await initDokuboxClient()
|
||||
await syncDokubox()
|
||||
console.log("Service: Dokubox sync finished")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,9 +8,108 @@ import {
|
||||
files,
|
||||
filetags,
|
||||
incominginvoices,
|
||||
vendors,
|
||||
} from "../../../db/schema"
|
||||
|
||||
import { eq, and, isNull, not } from "drizzle-orm"
|
||||
import { eq, and, isNull, not, desc } from "drizzle-orm"
|
||||
|
||||
type InvoiceAccount = {
|
||||
account?: number | null
|
||||
description?: string | null
|
||||
taxType?: string | number | null
|
||||
}
|
||||
|
||||
const normalizeAccounts = (accounts: unknown): InvoiceAccount[] => {
|
||||
if (!Array.isArray(accounts)) return []
|
||||
return accounts
|
||||
.map((entry: any) => ({
|
||||
account: typeof entry?.account === "number" ? entry.account : null,
|
||||
description: typeof entry?.description === "string" ? entry.description : null,
|
||||
taxType: entry?.taxType ?? null,
|
||||
}))
|
||||
.filter((entry) => entry.account !== null || entry.description || entry.taxType !== null)
|
||||
}
|
||||
|
||||
const buildLearningContext = (historicalInvoices: any[]) => {
|
||||
if (!historicalInvoices.length) return null
|
||||
|
||||
const vendorProfiles = new Map<number, {
|
||||
vendorName: string
|
||||
paymentTypes: Map<string, number>
|
||||
accountUsage: Map<number, number>
|
||||
sampleDescriptions: string[]
|
||||
}>()
|
||||
|
||||
const recentExamples: any[] = []
|
||||
|
||||
for (const invoice of historicalInvoices) {
|
||||
const accounts = normalizeAccounts(invoice.accounts)
|
||||
const vendorId = typeof invoice.vendorId === "number" ? invoice.vendorId : null
|
||||
const vendorName = typeof invoice.vendorName === "string" ? invoice.vendorName : "Unknown"
|
||||
|
||||
if (vendorId) {
|
||||
if (!vendorProfiles.has(vendorId)) {
|
||||
vendorProfiles.set(vendorId, {
|
||||
vendorName,
|
||||
paymentTypes: new Map(),
|
||||
accountUsage: new Map(),
|
||||
sampleDescriptions: [],
|
||||
})
|
||||
}
|
||||
|
||||
const profile = vendorProfiles.get(vendorId)!
|
||||
if (invoice.paymentType) {
|
||||
const key = String(invoice.paymentType)
|
||||
profile.paymentTypes.set(key, (profile.paymentTypes.get(key) ?? 0) + 1)
|
||||
}
|
||||
for (const account of accounts) {
|
||||
if (typeof account.account === "number") {
|
||||
profile.accountUsage.set(account.account, (profile.accountUsage.get(account.account) ?? 0) + 1)
|
||||
}
|
||||
}
|
||||
if (invoice.description && profile.sampleDescriptions.length < 3) {
|
||||
profile.sampleDescriptions.push(String(invoice.description).slice(0, 120))
|
||||
}
|
||||
}
|
||||
|
||||
if (recentExamples.length < 20) {
|
||||
recentExamples.push({
|
||||
vendorId,
|
||||
vendorName,
|
||||
paymentType: invoice.paymentType ?? null,
|
||||
accounts: accounts.map((entry) => ({
|
||||
account: entry.account,
|
||||
description: entry.description ?? null,
|
||||
taxType: entry.taxType ?? null,
|
||||
})),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const vendorPatterns = Array.from(vendorProfiles.entries())
|
||||
.map(([vendorId, profile]) => {
|
||||
const commonPaymentType = Array.from(profile.paymentTypes.entries())
|
||||
.sort((a, b) => b[1] - a[1])[0]?.[0] ?? null
|
||||
const topAccounts = Array.from(profile.accountUsage.entries())
|
||||
.sort((a, b) => b[1] - a[1])
|
||||
.slice(0, 4)
|
||||
.map(([accountId, count]) => ({ accountId, count }))
|
||||
|
||||
return {
|
||||
vendorId,
|
||||
vendorName: profile.vendorName,
|
||||
commonPaymentType,
|
||||
topAccounts,
|
||||
sampleDescriptions: profile.sampleDescriptions,
|
||||
}
|
||||
})
|
||||
.slice(0, 50)
|
||||
|
||||
return JSON.stringify({
|
||||
vendorPatterns,
|
||||
recentExamples,
|
||||
})
|
||||
}
|
||||
|
||||
export function prepareIncomingInvoices(server: FastifyInstance) {
|
||||
const processInvoices = async (tenantId:number) => {
|
||||
@@ -72,13 +171,34 @@ export function prepareIncomingInvoices(server: FastifyInstance) {
|
||||
continue
|
||||
}
|
||||
|
||||
const historicalInvoices = await server.db
|
||||
.select({
|
||||
vendorId: incominginvoices.vendor,
|
||||
vendorName: vendors.name,
|
||||
paymentType: incominginvoices.paymentType,
|
||||
description: incominginvoices.description,
|
||||
accounts: incominginvoices.accounts,
|
||||
})
|
||||
.from(incominginvoices)
|
||||
.leftJoin(vendors, eq(incominginvoices.vendor, vendors.id))
|
||||
.where(
|
||||
and(
|
||||
eq(incominginvoices.tenant, tenantId),
|
||||
eq(incominginvoices.archived, false)
|
||||
)
|
||||
)
|
||||
.orderBy(desc(incominginvoices.createdAt))
|
||||
.limit(120)
|
||||
|
||||
const learningContext = buildLearningContext(historicalInvoices)
|
||||
|
||||
// -------------------------------------------------------------
|
||||
// 3️⃣ Jede Datei einzeln durch GPT jagen & IncomingInvoice erzeugen
|
||||
// -------------------------------------------------------------
|
||||
for (const file of filesRes) {
|
||||
console.log(`Processing file ${file.id} for tenant ${tenantId}`)
|
||||
|
||||
const data = await getInvoiceDataFromGPT(server,file, tenantId)
|
||||
const data = await getInvoiceDataFromGPT(server,file, tenantId, learningContext ?? undefined)
|
||||
|
||||
if (!data) {
|
||||
server.log.warn(`GPT returned no data for file ${file.id}`)
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
// modules/helpdesk/helpdesk.contact.service.ts
|
||||
import { FastifyInstance } from 'fastify'
|
||||
import { and, eq, or } from "drizzle-orm";
|
||||
import { helpdesk_contacts } from "../../../db/schema";
|
||||
|
||||
export async function getOrCreateContact(
|
||||
server: FastifyInstance,
|
||||
@@ -9,30 +11,35 @@ export async function getOrCreateContact(
|
||||
if (!email && !phone) throw new Error('Contact must have at least an email or phone')
|
||||
|
||||
// Bestehenden Kontakt prüfen
|
||||
const { data: existing, error: findError } = await server.supabase
|
||||
.from('helpdesk_contacts')
|
||||
.select('*')
|
||||
.eq('tenant_id', tenant_id)
|
||||
.or(`email.eq.${email || ''},phone.eq.${phone || ''}`)
|
||||
.maybeSingle()
|
||||
const matchConditions = []
|
||||
if (email) matchConditions.push(eq(helpdesk_contacts.email, email))
|
||||
if (phone) matchConditions.push(eq(helpdesk_contacts.phone, phone))
|
||||
|
||||
if (findError) throw findError
|
||||
if (existing) return existing
|
||||
const existing = await server.db
|
||||
.select()
|
||||
.from(helpdesk_contacts)
|
||||
.where(
|
||||
and(
|
||||
eq(helpdesk_contacts.tenantId, tenant_id),
|
||||
or(...matchConditions)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (existing[0]) return existing[0]
|
||||
|
||||
// Anlegen
|
||||
const { data: created, error: insertError } = await server.supabase
|
||||
.from('helpdesk_contacts')
|
||||
.insert({
|
||||
tenant_id,
|
||||
const created = await server.db
|
||||
.insert(helpdesk_contacts)
|
||||
.values({
|
||||
tenantId: tenant_id,
|
||||
email,
|
||||
phone,
|
||||
display_name,
|
||||
customer_id,
|
||||
contact_id
|
||||
displayName: display_name,
|
||||
customerId: customer_id,
|
||||
contactId: contact_id
|
||||
})
|
||||
.select()
|
||||
.single()
|
||||
.returning()
|
||||
|
||||
if (insertError) throw insertError
|
||||
return created
|
||||
return created[0]
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
import { FastifyInstance } from 'fastify'
|
||||
import { getOrCreateContact } from './helpdesk.contact.service.js'
|
||||
import {useNextNumberRangeNumber} from "../../utils/functions";
|
||||
import { and, desc, eq } from "drizzle-orm";
|
||||
import { customers, helpdesk_contacts, helpdesk_conversations } from "../../../db/schema";
|
||||
|
||||
export async function createConversation(
|
||||
server: FastifyInstance,
|
||||
@@ -25,24 +27,34 @@ export async function createConversation(
|
||||
|
||||
const {usedNumber } = await useNextNumberRangeNumber(server, tenant_id, "tickets")
|
||||
|
||||
const { data, error } = await server.supabase
|
||||
.from('helpdesk_conversations')
|
||||
.insert({
|
||||
tenant_id,
|
||||
contact_id: contactRecord.id,
|
||||
channel_instance_id,
|
||||
const inserted = await server.db
|
||||
.insert(helpdesk_conversations)
|
||||
.values({
|
||||
tenantId: tenant_id,
|
||||
contactId: contactRecord.id,
|
||||
channelInstanceId: channel_instance_id,
|
||||
subject: subject || null,
|
||||
status: 'open',
|
||||
created_at: new Date().toISOString(),
|
||||
customer_id,
|
||||
contact_person_id,
|
||||
ticket_number: usedNumber
|
||||
createdAt: new Date(),
|
||||
customerId: customer_id,
|
||||
contactPersonId: contact_person_id,
|
||||
ticketNumber: usedNumber
|
||||
})
|
||||
.select()
|
||||
.single()
|
||||
.returning()
|
||||
|
||||
if (error) throw error
|
||||
return data
|
||||
const data = inserted[0]
|
||||
|
||||
return {
|
||||
...data,
|
||||
channel_instance_id: data.channelInstanceId,
|
||||
contact_id: data.contactId,
|
||||
contact_person_id: data.contactPersonId,
|
||||
created_at: data.createdAt,
|
||||
customer_id: data.customerId,
|
||||
last_message_at: data.lastMessageAt,
|
||||
tenant_id: data.tenantId,
|
||||
ticket_number: data.ticketNumber,
|
||||
}
|
||||
}
|
||||
|
||||
export async function getConversations(
|
||||
@@ -52,22 +64,34 @@ export async function getConversations(
|
||||
) {
|
||||
const { status, limit = 50 } = opts || {}
|
||||
|
||||
let query = server.supabase.from('helpdesk_conversations').select('*, customer_id(*)').eq('tenant_id', tenant_id)
|
||||
const filters = [eq(helpdesk_conversations.tenantId, tenant_id)]
|
||||
if (status) filters.push(eq(helpdesk_conversations.status, status))
|
||||
|
||||
if (status) query = query.eq('status', status)
|
||||
query = query.order('last_message_at', { ascending: false }).limit(limit)
|
||||
|
||||
const { data, error } = await query
|
||||
if (error) throw error
|
||||
|
||||
const mappedData = data.map(entry => {
|
||||
return {
|
||||
...entry,
|
||||
customer: entry.customer_id
|
||||
}
|
||||
const data = await server.db
|
||||
.select({
|
||||
conversation: helpdesk_conversations,
|
||||
contact: helpdesk_contacts,
|
||||
customer: customers,
|
||||
})
|
||||
.from(helpdesk_conversations)
|
||||
.leftJoin(helpdesk_contacts, eq(helpdesk_contacts.id, helpdesk_conversations.contactId))
|
||||
.leftJoin(customers, eq(customers.id, helpdesk_conversations.customerId))
|
||||
.where(and(...filters))
|
||||
.orderBy(desc(helpdesk_conversations.lastMessageAt))
|
||||
.limit(limit)
|
||||
|
||||
return mappedData
|
||||
return data.map((entry) => ({
|
||||
...entry.conversation,
|
||||
helpdesk_contacts: entry.contact,
|
||||
channel_instance_id: entry.conversation.channelInstanceId,
|
||||
contact_id: entry.conversation.contactId,
|
||||
contact_person_id: entry.conversation.contactPersonId,
|
||||
created_at: entry.conversation.createdAt,
|
||||
customer_id: entry.customer,
|
||||
last_message_at: entry.conversation.lastMessageAt,
|
||||
tenant_id: entry.conversation.tenantId,
|
||||
ticket_number: entry.conversation.ticketNumber,
|
||||
}))
|
||||
}
|
||||
|
||||
export async function updateConversationStatus(
|
||||
@@ -78,13 +102,22 @@ export async function updateConversationStatus(
|
||||
const valid = ['open', 'in_progress', 'waiting_for_customer', 'answered', 'closed']
|
||||
if (!valid.includes(status)) throw new Error('Invalid status')
|
||||
|
||||
const { data, error } = await server.supabase
|
||||
.from('helpdesk_conversations')
|
||||
.update({ status })
|
||||
.eq('id', conversation_id)
|
||||
.select()
|
||||
.single()
|
||||
const updated = await server.db
|
||||
.update(helpdesk_conversations)
|
||||
.set({ status })
|
||||
.where(eq(helpdesk_conversations.id, conversation_id))
|
||||
.returning()
|
||||
|
||||
if (error) throw error
|
||||
return data
|
||||
const data = updated[0]
|
||||
return {
|
||||
...data,
|
||||
channel_instance_id: data.channelInstanceId,
|
||||
contact_id: data.contactId,
|
||||
contact_person_id: data.contactPersonId,
|
||||
created_at: data.createdAt,
|
||||
customer_id: data.customerId,
|
||||
last_message_at: data.lastMessageAt,
|
||||
tenant_id: data.tenantId,
|
||||
ticket_number: data.ticketNumber,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
// modules/helpdesk/helpdesk.message.service.ts
|
||||
import { FastifyInstance } from 'fastify'
|
||||
import { asc, eq } from "drizzle-orm";
|
||||
import { helpdesk_conversations, helpdesk_messages } from "../../../db/schema";
|
||||
|
||||
export async function addMessage(
|
||||
server: FastifyInstance,
|
||||
@@ -23,38 +25,53 @@ export async function addMessage(
|
||||
) {
|
||||
if (!payload?.text) throw new Error('Message payload requires text content')
|
||||
|
||||
const { data: message, error } = await server.supabase
|
||||
.from('helpdesk_messages')
|
||||
.insert({
|
||||
tenant_id,
|
||||
conversation_id,
|
||||
author_user_id,
|
||||
const inserted = await server.db
|
||||
.insert(helpdesk_messages)
|
||||
.values({
|
||||
tenantId: tenant_id,
|
||||
conversationId: conversation_id,
|
||||
authorUserId: author_user_id,
|
||||
direction,
|
||||
payload,
|
||||
raw_meta,
|
||||
created_at: new Date().toISOString(),
|
||||
rawMeta: raw_meta,
|
||||
externalMessageId: external_message_id,
|
||||
receivedAt: new Date(),
|
||||
})
|
||||
.select()
|
||||
.single()
|
||||
.returning()
|
||||
|
||||
if (error) throw error
|
||||
const message = inserted[0]
|
||||
|
||||
// Letzte Nachricht aktualisieren
|
||||
await server.supabase
|
||||
.from('helpdesk_conversations')
|
||||
.update({ last_message_at: new Date().toISOString() })
|
||||
.eq('id', conversation_id)
|
||||
await server.db
|
||||
.update(helpdesk_conversations)
|
||||
.set({ lastMessageAt: new Date() })
|
||||
.where(eq(helpdesk_conversations.id, conversation_id))
|
||||
|
||||
return message
|
||||
return {
|
||||
...message,
|
||||
author_user_id: message.authorUserId,
|
||||
conversation_id: message.conversationId,
|
||||
created_at: message.createdAt,
|
||||
external_message_id: message.externalMessageId,
|
||||
raw_meta: message.rawMeta,
|
||||
tenant_id: message.tenantId,
|
||||
}
|
||||
}
|
||||
|
||||
export async function getMessages(server: FastifyInstance, conversation_id: string) {
|
||||
const { data, error } = await server.supabase
|
||||
.from('helpdesk_messages')
|
||||
.select('*')
|
||||
.eq('conversation_id', conversation_id)
|
||||
.order('created_at', { ascending: true })
|
||||
const data = await server.db
|
||||
.select()
|
||||
.from(helpdesk_messages)
|
||||
.where(eq(helpdesk_messages.conversationId, conversation_id))
|
||||
.orderBy(asc(helpdesk_messages.createdAt))
|
||||
|
||||
if (error) throw error
|
||||
return data
|
||||
return data.map((message) => ({
|
||||
...message,
|
||||
author_user_id: message.authorUserId,
|
||||
conversation_id: message.conversationId,
|
||||
created_at: message.createdAt,
|
||||
external_message_id: message.externalMessageId,
|
||||
raw_meta: message.rawMeta,
|
||||
tenant_id: message.tenantId,
|
||||
}))
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
// services/notification.service.ts
|
||||
import type { FastifyInstance } from 'fastify';
|
||||
import {secrets} from "../utils/secrets";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { notificationsEventTypes, notificationsItems } from "../../db/schema";
|
||||
|
||||
export type NotificationStatus = 'queued' | 'sent' | 'failed';
|
||||
|
||||
@@ -34,16 +36,16 @@ export class NotificationService {
|
||||
*/
|
||||
async trigger(input: TriggerInput) {
|
||||
const { tenantId, userId, eventType, title, message, payload } = input;
|
||||
const supabase = this.server.supabase;
|
||||
|
||||
// 1) Event-Typ prüfen (aktiv?)
|
||||
const { data: eventTypeRow, error: etErr } = await supabase
|
||||
.from('notifications_event_types')
|
||||
.select('event_key,is_active')
|
||||
.eq('event_key', eventType)
|
||||
.maybeSingle();
|
||||
const eventTypeRows = await this.server.db
|
||||
.select()
|
||||
.from(notificationsEventTypes)
|
||||
.where(eq(notificationsEventTypes.eventKey, eventType))
|
||||
.limit(1)
|
||||
const eventTypeRow = eventTypeRows[0]
|
||||
|
||||
if (etErr || !eventTypeRow || eventTypeRow.is_active !== true) {
|
||||
if (!eventTypeRow || eventTypeRow.isActive !== true) {
|
||||
throw new Error(`Unbekannter oder inaktiver Event-Typ: ${eventType}`);
|
||||
}
|
||||
|
||||
@@ -54,40 +56,40 @@ export class NotificationService {
|
||||
}
|
||||
|
||||
// 3) Notification anlegen (status: queued)
|
||||
const { data: inserted, error: insErr } = await supabase
|
||||
.from('notifications_items')
|
||||
.insert({
|
||||
tenant_id: tenantId,
|
||||
user_id: userId,
|
||||
event_type: eventType,
|
||||
const insertedRows = await this.server.db
|
||||
.insert(notificationsItems)
|
||||
.values({
|
||||
tenantId,
|
||||
userId,
|
||||
eventType,
|
||||
title,
|
||||
message,
|
||||
payload: payload ?? null,
|
||||
channel: 'email',
|
||||
status: 'queued'
|
||||
})
|
||||
.select('id')
|
||||
.single();
|
||||
.returning({ id: notificationsItems.id })
|
||||
const inserted = insertedRows[0]
|
||||
|
||||
if (insErr || !inserted) {
|
||||
throw new Error(`Fehler beim Einfügen der Notification: ${insErr?.message}`);
|
||||
if (!inserted) {
|
||||
throw new Error("Fehler beim Einfügen der Notification");
|
||||
}
|
||||
|
||||
// 4) E-Mail versenden
|
||||
try {
|
||||
await this.sendEmail(user.email, title, message);
|
||||
|
||||
await supabase
|
||||
.from('notifications_items')
|
||||
.update({ status: 'sent', sent_at: new Date().toISOString() })
|
||||
.eq('id', inserted.id);
|
||||
await this.server.db
|
||||
.update(notificationsItems)
|
||||
.set({ status: 'sent', sentAt: new Date() })
|
||||
.where(eq(notificationsItems.id, inserted.id));
|
||||
|
||||
return { success: true, id: inserted.id };
|
||||
} catch (err: any) {
|
||||
await supabase
|
||||
.from('notifications_items')
|
||||
.update({ status: 'failed', error: String(err?.message || err) })
|
||||
.eq('id', inserted.id);
|
||||
await this.server.db
|
||||
.update(notificationsItems)
|
||||
.set({ status: 'failed', error: String(err?.message || err) })
|
||||
.where(eq(notificationsItems.id, inserted.id));
|
||||
|
||||
this.server.log.error({ err, notificationId: inserted.id }, 'E-Mail Versand fehlgeschlagen');
|
||||
return { success: false, error: err?.message || 'E-Mail Versand fehlgeschlagen' };
|
||||
|
||||
249
backend/src/modules/service-price-recalculation.service.ts
Normal file
249
backend/src/modules/service-price-recalculation.service.ts
Normal file
@@ -0,0 +1,249 @@
|
||||
import { and, eq } from "drizzle-orm";
|
||||
import * as schema from "../../db/schema";
|
||||
import { FastifyInstance } from "fastify";
|
||||
|
||||
type CompositionRow = {
|
||||
product?: number | string | null;
|
||||
service?: number | string | null;
|
||||
hourrate?: string | null;
|
||||
quantity?: number | string | null;
|
||||
price?: number | string | null;
|
||||
purchasePrice?: number | string | null;
|
||||
[key: string]: any;
|
||||
};
|
||||
|
||||
function toNumber(value: any): number {
|
||||
const num = Number(value ?? 0);
|
||||
return Number.isFinite(num) ? num : 0;
|
||||
}
|
||||
|
||||
function round2(value: number): number {
|
||||
return Number(value.toFixed(2));
|
||||
}
|
||||
|
||||
function getJsonNumber(source: unknown, key: string): number {
|
||||
if (!source || typeof source !== "object") return 0;
|
||||
return toNumber((source as Record<string, unknown>)[key]);
|
||||
}
|
||||
|
||||
function normalizeId(value: unknown): number | null {
|
||||
if (value === null || value === undefined || value === "") return null;
|
||||
const num = Number(value);
|
||||
return Number.isFinite(num) ? num : null;
|
||||
}
|
||||
|
||||
function normalizeUuid(value: unknown): string | null {
|
||||
if (typeof value !== "string") return null;
|
||||
const trimmed = value.trim();
|
||||
return trimmed.length ? trimmed : null;
|
||||
}
|
||||
|
||||
function sanitizeCompositionRows(value: unknown): CompositionRow[] {
|
||||
if (!Array.isArray(value)) return [];
|
||||
return value.filter((entry): entry is CompositionRow => !!entry && typeof entry === "object");
|
||||
}
|
||||
|
||||
export async function recalculateServicePricesForTenant(server: FastifyInstance, tenantId: number, updatedBy?: string | null) {
|
||||
const [services, products, hourrates] = await Promise.all([
|
||||
server.db.select().from(schema.services).where(eq(schema.services.tenant, tenantId)),
|
||||
server.db.select().from(schema.products).where(eq(schema.products.tenant, tenantId)),
|
||||
server.db.select().from(schema.hourrates).where(eq(schema.hourrates.tenant, tenantId)),
|
||||
]);
|
||||
|
||||
const serviceMap = new Map(services.map((item) => [item.id, item]));
|
||||
const productMap = new Map(products.map((item) => [item.id, item]));
|
||||
const hourrateMap = new Map(hourrates.map((item) => [item.id, item]));
|
||||
|
||||
const memo = new Map<number, {
|
||||
sellingTotal: number;
|
||||
purchaseTotal: number;
|
||||
materialTotal: number;
|
||||
materialPurchaseTotal: number;
|
||||
workerTotal: number;
|
||||
workerPurchaseTotal: number;
|
||||
materialComposition: CompositionRow[];
|
||||
personalComposition: CompositionRow[];
|
||||
}>();
|
||||
const stack = new Set<number>();
|
||||
|
||||
const calculateService = (serviceId: number) => {
|
||||
if (memo.has(serviceId)) return memo.get(serviceId)!;
|
||||
|
||||
const service = serviceMap.get(serviceId);
|
||||
const emptyResult = {
|
||||
sellingTotal: 0,
|
||||
purchaseTotal: 0,
|
||||
materialTotal: 0,
|
||||
materialPurchaseTotal: 0,
|
||||
workerTotal: 0,
|
||||
workerPurchaseTotal: 0,
|
||||
materialComposition: [],
|
||||
personalComposition: [],
|
||||
};
|
||||
|
||||
if (!service) return emptyResult;
|
||||
if (stack.has(serviceId)) return emptyResult;
|
||||
|
||||
// Gesperrte Leistungen bleiben bei automatischen Preis-Updates unverändert.
|
||||
if (service.priceUpdateLocked) {
|
||||
const lockedResult = {
|
||||
sellingTotal: getJsonNumber(service.sellingPriceComposed, "total") || toNumber(service.sellingPrice),
|
||||
purchaseTotal: getJsonNumber(service.purchasePriceComposed, "total"),
|
||||
materialTotal: getJsonNumber(service.sellingPriceComposed, "material"),
|
||||
materialPurchaseTotal: getJsonNumber(service.purchasePriceComposed, "material"),
|
||||
workerTotal: getJsonNumber(service.sellingPriceComposed, "worker"),
|
||||
workerPurchaseTotal: getJsonNumber(service.purchasePriceComposed, "worker"),
|
||||
materialComposition: sanitizeCompositionRows(service.materialComposition),
|
||||
personalComposition: sanitizeCompositionRows(service.personalComposition),
|
||||
};
|
||||
memo.set(serviceId, lockedResult);
|
||||
return lockedResult;
|
||||
}
|
||||
|
||||
stack.add(serviceId);
|
||||
try {
|
||||
const materialComposition = sanitizeCompositionRows(service.materialComposition);
|
||||
const personalComposition = sanitizeCompositionRows(service.personalComposition);
|
||||
const hasMaterialComposition = materialComposition.length > 0;
|
||||
const hasPersonalComposition = personalComposition.length > 0;
|
||||
|
||||
// Ohne Zusammensetzung keine automatische Überschreibung:
|
||||
// manuell gepflegte Preise sollen erhalten bleiben.
|
||||
if (!hasMaterialComposition && !hasPersonalComposition) {
|
||||
const manualResult = {
|
||||
sellingTotal: getJsonNumber(service.sellingPriceComposed, "total") || toNumber(service.sellingPrice),
|
||||
purchaseTotal: getJsonNumber(service.purchasePriceComposed, "total"),
|
||||
materialTotal: getJsonNumber(service.sellingPriceComposed, "material"),
|
||||
materialPurchaseTotal: getJsonNumber(service.purchasePriceComposed, "material"),
|
||||
workerTotal: getJsonNumber(service.sellingPriceComposed, "worker"),
|
||||
workerPurchaseTotal: getJsonNumber(service.purchasePriceComposed, "worker"),
|
||||
materialComposition,
|
||||
personalComposition,
|
||||
};
|
||||
memo.set(serviceId, manualResult);
|
||||
return manualResult;
|
||||
}
|
||||
|
||||
let materialTotal = 0;
|
||||
let materialPurchaseTotal = 0;
|
||||
|
||||
const normalizedMaterialComposition = materialComposition.map((entry) => {
|
||||
const quantity = toNumber(entry.quantity);
|
||||
const productId = normalizeId(entry.product);
|
||||
const childServiceId = normalizeId(entry.service);
|
||||
|
||||
let sellingPrice = toNumber(entry.price);
|
||||
let purchasePrice = toNumber(entry.purchasePrice);
|
||||
|
||||
if (productId) {
|
||||
const product = productMap.get(productId);
|
||||
sellingPrice = toNumber(product?.selling_price);
|
||||
purchasePrice = toNumber(product?.purchase_price);
|
||||
} else if (childServiceId) {
|
||||
const child = calculateService(childServiceId);
|
||||
sellingPrice = toNumber(child.sellingTotal);
|
||||
purchasePrice = toNumber(child.purchaseTotal);
|
||||
}
|
||||
|
||||
materialTotal += quantity * sellingPrice;
|
||||
materialPurchaseTotal += quantity * purchasePrice;
|
||||
|
||||
return {
|
||||
...entry,
|
||||
price: round2(sellingPrice),
|
||||
purchasePrice: round2(purchasePrice),
|
||||
};
|
||||
});
|
||||
|
||||
let workerTotal = 0;
|
||||
let workerPurchaseTotal = 0;
|
||||
const normalizedPersonalComposition = personalComposition.map((entry) => {
|
||||
const quantity = toNumber(entry.quantity);
|
||||
const hourrateId = normalizeUuid(entry.hourrate);
|
||||
|
||||
let sellingPrice = toNumber(entry.price);
|
||||
let purchasePrice = toNumber(entry.purchasePrice);
|
||||
|
||||
if (hourrateId) {
|
||||
const hourrate = hourrateMap.get(hourrateId);
|
||||
if (hourrate) {
|
||||
sellingPrice = toNumber(hourrate.sellingPrice);
|
||||
purchasePrice = toNumber(hourrate.purchase_price);
|
||||
}
|
||||
}
|
||||
|
||||
workerTotal += quantity * sellingPrice;
|
||||
workerPurchaseTotal += quantity * purchasePrice;
|
||||
|
||||
return {
|
||||
...entry,
|
||||
price: round2(sellingPrice),
|
||||
purchasePrice: round2(purchasePrice),
|
||||
};
|
||||
});
|
||||
|
||||
const result = {
|
||||
sellingTotal: round2(materialTotal + workerTotal),
|
||||
purchaseTotal: round2(materialPurchaseTotal + workerPurchaseTotal),
|
||||
materialTotal: round2(materialTotal),
|
||||
materialPurchaseTotal: round2(materialPurchaseTotal),
|
||||
workerTotal: round2(workerTotal),
|
||||
workerPurchaseTotal: round2(workerPurchaseTotal),
|
||||
materialComposition: normalizedMaterialComposition,
|
||||
personalComposition: normalizedPersonalComposition,
|
||||
};
|
||||
|
||||
memo.set(serviceId, result);
|
||||
return result;
|
||||
} finally {
|
||||
stack.delete(serviceId);
|
||||
}
|
||||
};
|
||||
|
||||
for (const service of services) {
|
||||
calculateService(service.id);
|
||||
}
|
||||
|
||||
const updates = services
|
||||
.filter((service) => !service.priceUpdateLocked)
|
||||
.map(async (service) => {
|
||||
const calc = memo.get(service.id);
|
||||
if (!calc) return;
|
||||
|
||||
const sellingPriceComposed = {
|
||||
worker: calc.workerTotal,
|
||||
material: calc.materialTotal,
|
||||
total: calc.sellingTotal,
|
||||
};
|
||||
|
||||
const purchasePriceComposed = {
|
||||
worker: calc.workerPurchaseTotal,
|
||||
material: calc.materialPurchaseTotal,
|
||||
total: calc.purchaseTotal,
|
||||
};
|
||||
|
||||
const unchanged =
|
||||
JSON.stringify(service.materialComposition ?? []) === JSON.stringify(calc.materialComposition) &&
|
||||
JSON.stringify(service.personalComposition ?? []) === JSON.stringify(calc.personalComposition) &&
|
||||
JSON.stringify(service.sellingPriceComposed ?? {}) === JSON.stringify(sellingPriceComposed) &&
|
||||
JSON.stringify(service.purchasePriceComposed ?? {}) === JSON.stringify(purchasePriceComposed) &&
|
||||
round2(toNumber(service.sellingPrice)) === calc.sellingTotal;
|
||||
|
||||
if (unchanged) return;
|
||||
|
||||
await server.db
|
||||
.update(schema.services)
|
||||
.set({
|
||||
materialComposition: calc.materialComposition,
|
||||
personalComposition: calc.personalComposition,
|
||||
sellingPriceComposed,
|
||||
purchasePriceComposed,
|
||||
sellingPrice: calc.sellingTotal,
|
||||
updatedAt: new Date(),
|
||||
updatedBy: updatedBy ?? null,
|
||||
})
|
||||
.where(and(eq(schema.services.id, service.id), eq(schema.services.tenant, tenantId)));
|
||||
});
|
||||
|
||||
await Promise.all(updates);
|
||||
}
|
||||
@@ -1,6 +1,9 @@
|
||||
import { FastifyInstance } from "fastify";
|
||||
import fp from "fastify-plugin";
|
||||
import { secrets } from "../utils/secrets";
|
||||
import { and, eq } from "drizzle-orm";
|
||||
import { authUsers, m2mApiKeys } from "../../db/schema";
|
||||
import { createHash } from "node:crypto";
|
||||
|
||||
/**
|
||||
* Fastify Plugin für Machine-to-Machine Authentifizierung.
|
||||
@@ -12,26 +15,99 @@ import { secrets } from "../utils/secrets";
|
||||
* server.register(m2mAuthPlugin, { allowedPrefix: '/internal' })
|
||||
*/
|
||||
export default fp(async (server: FastifyInstance, opts: { allowedPrefix?: string } = {}) => {
|
||||
//const allowedPrefix = opts.allowedPrefix || "/internal";
|
||||
const hashApiKey = (apiKey: string) =>
|
||||
createHash("sha256").update(apiKey, "utf8").digest("hex")
|
||||
|
||||
server.addHook("preHandler", async (req, reply) => {
|
||||
try {
|
||||
// Nur prüfen, wenn Route unterhalb des Prefix liegt
|
||||
//if (!req.url.startsWith(allowedPrefix)) return;
|
||||
const apiKeyHeader = req.headers["x-api-key"];
|
||||
const apiKey = Array.isArray(apiKeyHeader) ? apiKeyHeader[0] : apiKeyHeader;
|
||||
|
||||
const apiKey = req.headers["x-api-key"];
|
||||
|
||||
if (!apiKey || apiKey !== secrets.M2M_API_KEY) {
|
||||
if (!apiKey) {
|
||||
server.log.warn(`[M2M Auth] Ungültiger oder fehlender API-Key bei ${req.url}`);
|
||||
return reply.status(401).send({ error: "Unauthorized" });
|
||||
}
|
||||
|
||||
// Zusatzinformationen im Request (z. B. interne Kennung)
|
||||
const keyHash = hashApiKey(apiKey);
|
||||
|
||||
const keyRows = await server.db
|
||||
.select({
|
||||
id: m2mApiKeys.id,
|
||||
tenantId: m2mApiKeys.tenantId,
|
||||
userId: m2mApiKeys.userId,
|
||||
active: m2mApiKeys.active,
|
||||
expiresAt: m2mApiKeys.expiresAt,
|
||||
name: m2mApiKeys.name,
|
||||
userEmail: authUsers.email,
|
||||
})
|
||||
.from(m2mApiKeys)
|
||||
.innerJoin(authUsers, eq(authUsers.id, m2mApiKeys.userId))
|
||||
.where(and(
|
||||
eq(m2mApiKeys.keyHash, keyHash),
|
||||
eq(m2mApiKeys.active, true)
|
||||
))
|
||||
.limit(1)
|
||||
|
||||
let key = keyRows[0]
|
||||
if (!key) {
|
||||
const fallbackValid = apiKey === secrets.M2M_API_KEY
|
||||
if (!fallbackValid) {
|
||||
server.log.warn(`[M2M Auth] Ungültiger API-Key bei ${req.url}`)
|
||||
return reply.status(401).send({ error: "Unauthorized" })
|
||||
}
|
||||
|
||||
// Backward compatibility mode for one global key.
|
||||
// The caller must provide user/tenant identifiers in headers.
|
||||
const tenantIdHeader = req.headers["x-tenant-id"]
|
||||
const userIdHeader = req.headers["x-user-id"]
|
||||
const tenantId = Number(Array.isArray(tenantIdHeader) ? tenantIdHeader[0] : tenantIdHeader)
|
||||
const userId = Array.isArray(userIdHeader) ? userIdHeader[0] : userIdHeader
|
||||
|
||||
if (!tenantId || !userId) {
|
||||
return reply.status(401).send({ error: "Missing x-tenant-id or x-user-id for legacy M2M key" })
|
||||
}
|
||||
|
||||
const users = await server.db
|
||||
.select({ email: authUsers.email })
|
||||
.from(authUsers)
|
||||
.where(eq(authUsers.id, userId))
|
||||
.limit(1)
|
||||
|
||||
if (!users[0]) {
|
||||
return reply.status(401).send({ error: "Unknown user for legacy M2M key" })
|
||||
}
|
||||
|
||||
req.user = {
|
||||
user_id: userId,
|
||||
email: users[0].email,
|
||||
tenant_id: tenantId
|
||||
}
|
||||
} else {
|
||||
if (key.expiresAt && new Date(key.expiresAt).getTime() < Date.now()) {
|
||||
return reply.status(401).send({ error: "Expired API key" })
|
||||
}
|
||||
|
||||
req.user = {
|
||||
user_id: key.userId,
|
||||
email: key.userEmail,
|
||||
tenant_id: key.tenantId
|
||||
}
|
||||
|
||||
await server.db
|
||||
.update(m2mApiKeys)
|
||||
.set({ lastUsedAt: new Date(), updatedAt: new Date() })
|
||||
.where(eq(m2mApiKeys.id, key.id))
|
||||
}
|
||||
|
||||
(req as any).m2m = {
|
||||
verified: true,
|
||||
type: "internal",
|
||||
key: apiKey,
|
||||
};
|
||||
|
||||
req.role = "m2m"
|
||||
req.permissions = []
|
||||
req.hasPermission = () => false
|
||||
} catch (err) {
|
||||
// @ts-ignore
|
||||
server.log.error("[M2M Auth] Fehler beim Prüfen des API-Keys:", err);
|
||||
|
||||
@@ -9,13 +9,15 @@ export default fp(async (server: FastifyInstance) => {
|
||||
"http://localhost:3001", // dein Nuxt-Frontend
|
||||
"http://127.0.0.1:3000", // dein Nuxt-Frontend
|
||||
"http://192.168.1.227:3001", // dein Nuxt-Frontend
|
||||
"http://192.168.1.234:3000", // dein Nuxt-Frontend
|
||||
"http://192.168.1.113:3000", // dein Nuxt-Frontend
|
||||
"https://beta.fedeo.de", // dein Nuxt-Frontend
|
||||
"https://app.fedeo.de", // dein Nuxt-Frontend
|
||||
"capacitor://localhost", // dein Nuxt-Frontend
|
||||
],
|
||||
methods: ["GET", "POST", "PUT", "DELETE", "OPTIONS"],
|
||||
allowedHeaders: ["Content-Type", "Authorization", "Context", "X-Public-Pin"],
|
||||
methods: ["GET", "POST", "PUT", "DELETE", "OPTIONS","PATCH",
|
||||
"PROPFIND", "PROPPATCH", "MKCOL", "COPY", "MOVE", "LOCK", "UNLOCK"],
|
||||
allowedHeaders: ["Content-Type", "Authorization", "Context", "X-Public-Pin","Depth", "Overwrite", "Destination", "Lock-Token", "If"],
|
||||
exposedHeaders: ["Authorization", "Content-Disposition", "Content-Type", "Content-Length"], // optional, falls du ihn auch auslesen willst
|
||||
credentials: true, // wichtig, falls du Cookies nutzt
|
||||
});
|
||||
|
||||
@@ -1,31 +1,22 @@
|
||||
import fp from "fastify-plugin"
|
||||
import {drizzle, NodePgDatabase} from "drizzle-orm/node-postgres"
|
||||
import { Pool } from "pg"
|
||||
import * as schema from "../../db/schema"
|
||||
// src/plugins/db.ts
|
||||
import fp from "fastify-plugin";
|
||||
import { NodePgDatabase } from "drizzle-orm/node-postgres";
|
||||
import * as schema from "../../db/schema";
|
||||
import { db, pool } from "../../db"; // <--- Importiert jetzt die globale Instanz
|
||||
|
||||
export default fp(async (server, opts) => {
|
||||
const pool = new Pool({
|
||||
host: "100.102.185.225",
|
||||
port: Number(process.env.DB_PORT || 5432),
|
||||
user: "postgres",
|
||||
password: "wJw7aNpEBJdcxgoct6GXNpvY4Cn6ECqu",
|
||||
database: "fedeo",
|
||||
ssl: process.env.DB_DISABLE_SSL === "true" ? false : undefined,
|
||||
})
|
||||
|
||||
// Drizzle instance
|
||||
const db = drizzle(pool, { schema })
|
||||
// Wir nutzen die db, die wir in src/db/index.ts erstellt haben
|
||||
server.decorate("db", db);
|
||||
|
||||
// Dekorieren -> überall server.db
|
||||
server.decorate("db", db)
|
||||
|
||||
// Graceful Shutdown
|
||||
// Graceful Shutdown: Wenn Fastify ausgeht, schließen wir den Pool
|
||||
server.addHook("onClose", async () => {
|
||||
await pool.end()
|
||||
})
|
||||
console.log("[DB] Closing connection pool...");
|
||||
await pool.end();
|
||||
});
|
||||
|
||||
server.log.info("Drizzle database connected")
|
||||
})
|
||||
console.log("[Fastify] Database attached from shared instance");
|
||||
});
|
||||
|
||||
declare module "fastify" {
|
||||
interface FastifyInstance {
|
||||
|
||||
@@ -58,8 +58,6 @@ const queryConfigPlugin: FastifyPluginAsync<QueryConfigPluginOptions> = async (
|
||||
|
||||
const query = req.query as Record<string, any>
|
||||
|
||||
console.log(query)
|
||||
|
||||
// Pagination deaktivieren?
|
||||
const disablePagination =
|
||||
query.noPagination === 'true' ||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
// /plugins/services.ts
|
||||
import fp from "fastify-plugin";
|
||||
import { bankStatementService } from "../modules/cron/bankstatementsync.service";
|
||||
//import {initDokuboxClient, syncDokubox} from "../modules/cron/dokuboximport.service";
|
||||
import {syncDokuboxService} from "../modules/cron/dokuboximport.service";
|
||||
import { FastifyInstance } from "fastify";
|
||||
import {prepareIncomingInvoices} from "../modules/cron/prepareIncomingInvoices";
|
||||
|
||||
@@ -9,7 +9,7 @@ declare module "fastify" {
|
||||
interface FastifyInstance {
|
||||
services: {
|
||||
bankStatements: ReturnType<typeof bankStatementService>;
|
||||
//dokuboxSync: ReturnType<typeof syncDokubox>;
|
||||
dokuboxSync: ReturnType<typeof syncDokuboxService>;
|
||||
prepareIncomingInvoices: ReturnType<typeof prepareIncomingInvoices>;
|
||||
};
|
||||
}
|
||||
@@ -18,7 +18,7 @@ declare module "fastify" {
|
||||
export default fp(async function servicePlugin(server: FastifyInstance) {
|
||||
server.decorate("services", {
|
||||
bankStatements: bankStatementService(server),
|
||||
//dokuboxSync: syncDokubox(server),
|
||||
dokuboxSync: syncDokuboxService(server),
|
||||
prepareIncomingInvoices: prepareIncomingInvoices(server),
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
import { FastifyInstance } from "fastify";
|
||||
import fp from "fastify-plugin";
|
||||
import { createClient, SupabaseClient } from "@supabase/supabase-js";
|
||||
import {secrets} from "../utils/secrets";
|
||||
|
||||
export default fp(async (server: FastifyInstance) => {
|
||||
const supabaseUrl = secrets.SUPABASE_URL
|
||||
const supabaseServiceKey = secrets.SUPABASE_SERVICE_ROLE_KEY
|
||||
const supabase: SupabaseClient = createClient(supabaseUrl, supabaseServiceKey);
|
||||
|
||||
// Fastify um supabase erweitern
|
||||
server.decorate("supabase", supabase);
|
||||
});
|
||||
|
||||
declare module "fastify" {
|
||||
interface FastifyInstance {
|
||||
supabase: SupabaseClient;
|
||||
}
|
||||
}
|
||||
@@ -5,26 +5,33 @@ import swaggerUi from "@fastify/swagger-ui";
|
||||
|
||||
export default fp(async (server: FastifyInstance) => {
|
||||
await server.register(swagger, {
|
||||
mode: "dynamic", // wichtig: generiert echtes OpenAPI JSON
|
||||
mode: "dynamic",
|
||||
openapi: {
|
||||
info: {
|
||||
title: "Multi-Tenant API",
|
||||
description: "API Dokumentation für dein Backend",
|
||||
title: "FEDEO Backend API",
|
||||
description: "OpenAPI specification for the FEDEO backend",
|
||||
version: "1.0.0",
|
||||
},
|
||||
servers: [{ url: "http://localhost:3000" }],
|
||||
servers: [{ url: "/" }],
|
||||
components: {
|
||||
securitySchemes: {
|
||||
bearerAuth: {
|
||||
type: "http",
|
||||
scheme: "bearer",
|
||||
bearerFormat: "JWT"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
// @ts-ignore
|
||||
await server.register(swaggerUi, {
|
||||
routePrefix: "/docs", // UI erreichbar unter http://localhost:3000/docs
|
||||
swagger: {
|
||||
info: {
|
||||
title: "Multi-Tenant API",
|
||||
version: "1.0.0",
|
||||
},
|
||||
},
|
||||
exposeRoute: true,
|
||||
routePrefix: "/docs",
|
||||
});
|
||||
|
||||
// Stable raw spec path
|
||||
server.get("/openapi.json", async (_req, reply) => {
|
||||
return reply.send(server.swagger());
|
||||
});
|
||||
});
|
||||
@@ -1,5 +1,7 @@
|
||||
import { FastifyInstance, FastifyRequest } from "fastify";
|
||||
import fp from "fastify-plugin";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { tenants } from "../../db/schema";
|
||||
|
||||
export default fp(async (server: FastifyInstance) => {
|
||||
server.addHook("preHandler", async (req, reply) => {
|
||||
@@ -9,11 +11,12 @@ export default fp(async (server: FastifyInstance) => {
|
||||
return;
|
||||
}
|
||||
// Tenant aus DB laden
|
||||
const { data: tenant } = await server.supabase
|
||||
.from("tenants")
|
||||
.select("*")
|
||||
.eq("portalDomain", host)
|
||||
.single();
|
||||
const rows = await server.db
|
||||
.select()
|
||||
.from(tenants)
|
||||
.where(eq(tenants.portalDomain, host))
|
||||
.limit(1);
|
||||
const tenant = rows[0];
|
||||
|
||||
|
||||
if(!tenant) {
|
||||
|
||||
@@ -94,6 +94,7 @@ export default async function adminRoutes(server: FastifyInstance) {
|
||||
short: tenants.short,
|
||||
locked: tenants.locked,
|
||||
numberRanges: tenants.numberRanges,
|
||||
accountChart: tenants.accountChart,
|
||||
extraModules: tenants.extraModules,
|
||||
})
|
||||
.from(authTenantUsers)
|
||||
|
||||
@@ -1,11 +1,60 @@
|
||||
import { FastifyInstance } from "fastify"
|
||||
import bcrypt from "bcrypt"
|
||||
import { eq } from "drizzle-orm"
|
||||
import jwt from "jsonwebtoken"
|
||||
import { secrets } from "../../utils/secrets"
|
||||
|
||||
import { authUsers } from "../../../db/schema" // wichtig: Drizzle Schema importieren!
|
||||
|
||||
export default async function authRoutesAuthenticated(server: FastifyInstance) {
|
||||
|
||||
server.post("/auth/refresh", {
|
||||
schema: {
|
||||
tags: ["Auth"],
|
||||
summary: "Refresh JWT for current authenticated user",
|
||||
response: {
|
||||
200: {
|
||||
type: "object",
|
||||
properties: {
|
||||
token: { type: "string" },
|
||||
},
|
||||
required: ["token"],
|
||||
},
|
||||
401: {
|
||||
type: "object",
|
||||
properties: {
|
||||
error: { type: "string" },
|
||||
},
|
||||
required: ["error"],
|
||||
},
|
||||
},
|
||||
},
|
||||
}, async (req, reply) => {
|
||||
if (!req.user?.user_id) {
|
||||
return reply.code(401).send({ error: "Unauthorized" })
|
||||
}
|
||||
|
||||
const token = jwt.sign(
|
||||
{
|
||||
user_id: req.user.user_id,
|
||||
email: req.user.email,
|
||||
tenant_id: req.user.tenant_id,
|
||||
},
|
||||
secrets.JWT_SECRET!,
|
||||
{ expiresIn: "6h" }
|
||||
)
|
||||
|
||||
reply.setCookie("token", token, {
|
||||
path: "/",
|
||||
httpOnly: true,
|
||||
sameSite: process.env.NODE_ENV === "production" ? "none" : "lax",
|
||||
secure: process.env.NODE_ENV === "production",
|
||||
maxAge: 60 * 60 * 6,
|
||||
})
|
||||
|
||||
return { token }
|
||||
})
|
||||
|
||||
server.post("/auth/password/change", {
|
||||
schema: {
|
||||
tags: ["Auth"],
|
||||
|
||||
@@ -137,7 +137,7 @@ export default async function authRoutes(server: FastifyInstance) {
|
||||
httpOnly: true,
|
||||
sameSite: process.env.NODE_ENV === "production" ? "none" : "lax",
|
||||
secure: process.env.NODE_ENV === "production",
|
||||
maxAge: 60 * 60 * 3,
|
||||
maxAge: 60 * 60 * 6,
|
||||
});
|
||||
|
||||
return { token };
|
||||
|
||||
@@ -51,9 +51,11 @@ export default async function meRoutes(server: FastifyInstance) {
|
||||
name: tenants.name,
|
||||
short: tenants.short,
|
||||
locked: tenants.locked,
|
||||
features: tenants.features,
|
||||
extraModules: tenants.extraModules,
|
||||
businessInfo: tenants.businessInfo,
|
||||
numberRanges: tenants.numberRanges,
|
||||
accountChart: tenants.accountChart,
|
||||
dokuboxkey: tenants.dokuboxkey,
|
||||
standardEmailForInvoices: tenants.standardEmailForInvoices,
|
||||
standardPaymentDays: tenants.standardPaymentDays,
|
||||
|
||||
@@ -4,10 +4,19 @@ import dayjs from "dayjs"
|
||||
|
||||
import { secrets } from "../utils/secrets"
|
||||
import { insertHistoryItem } from "../utils/history"
|
||||
import { decrypt, encrypt } from "../utils/crypt"
|
||||
import { DE_BANK_CODE_TO_NAME } from "../utils/deBankCodes"
|
||||
import { DE_BANK_CODE_TO_BIC } from "../utils/deBankBics"
|
||||
|
||||
import {
|
||||
bankrequisitions,
|
||||
bankstatements,
|
||||
createddocuments,
|
||||
customers,
|
||||
entitybankaccounts,
|
||||
incominginvoices,
|
||||
statementallocations,
|
||||
vendors,
|
||||
} from "../../db/schema"
|
||||
|
||||
import {
|
||||
@@ -17,6 +26,322 @@ import {
|
||||
|
||||
|
||||
export default async function bankingRoutes(server: FastifyInstance) {
|
||||
const normalizeIban = (value?: string | null) =>
|
||||
String(value || "").replace(/\s+/g, "").toUpperCase()
|
||||
|
||||
const pickPartnerBankData = (statement: any, partnerType: "customer" | "vendor") => {
|
||||
if (!statement) return null
|
||||
|
||||
const prefersDebit = partnerType === "customer"
|
||||
? Number(statement.amount) >= 0
|
||||
: Number(statement.amount) > 0
|
||||
|
||||
const primary = prefersDebit
|
||||
? { iban: statement.debIban }
|
||||
: { iban: statement.credIban }
|
||||
const fallback = prefersDebit
|
||||
? { iban: statement.credIban }
|
||||
: { iban: statement.debIban }
|
||||
|
||||
const primaryIban = normalizeIban(primary.iban)
|
||||
if (primaryIban) {
|
||||
return {
|
||||
iban: primaryIban,
|
||||
}
|
||||
}
|
||||
|
||||
const fallbackIban = normalizeIban(fallback.iban)
|
||||
if (fallbackIban) {
|
||||
return {
|
||||
iban: fallbackIban,
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
const mergePartnerIban = (infoData: Record<string, any>, iban: string, bankAccountId?: number | null) => {
|
||||
if (!iban && !bankAccountId) return infoData || {}
|
||||
const info = infoData && typeof infoData === "object" ? { ...infoData } : {}
|
||||
|
||||
if (iban) {
|
||||
const existing = Array.isArray(info.bankingIbans) ? info.bankingIbans : []
|
||||
const merged = [...new Set([...existing.map((i: string) => normalizeIban(i)), iban])]
|
||||
info.bankingIbans = merged
|
||||
if (!info.bankingIban) info.bankingIban = iban
|
||||
}
|
||||
|
||||
if (bankAccountId) {
|
||||
const existingIds = Array.isArray(info.bankAccountIds) ? info.bankAccountIds : []
|
||||
if (!existingIds.includes(bankAccountId)) {
|
||||
info.bankAccountIds = [...existingIds, bankAccountId]
|
||||
}
|
||||
}
|
||||
|
||||
return info
|
||||
}
|
||||
|
||||
const ibanLengthByCountry: Record<string, number> = {
|
||||
DE: 22,
|
||||
AT: 20,
|
||||
CH: 21,
|
||||
NL: 18,
|
||||
BE: 16,
|
||||
FR: 27,
|
||||
ES: 24,
|
||||
IT: 27,
|
||||
LU: 20,
|
||||
}
|
||||
|
||||
const isValidIbanLocal = (iban: string) => {
|
||||
const normalized = normalizeIban(iban)
|
||||
if (!normalized || normalized.length < 15 || normalized.length > 34) return false
|
||||
if (!/^[A-Z]{2}[0-9]{2}[A-Z0-9]+$/.test(normalized)) return false
|
||||
|
||||
const country = normalized.slice(0, 2)
|
||||
const expectedLength = ibanLengthByCountry[country]
|
||||
if (expectedLength && normalized.length !== expectedLength) return false
|
||||
|
||||
const rearranged = normalized.slice(4) + normalized.slice(0, 4)
|
||||
let numeric = ""
|
||||
for (const ch of rearranged) {
|
||||
if (ch >= "A" && ch <= "Z") numeric += (ch.charCodeAt(0) - 55).toString()
|
||||
else numeric += ch
|
||||
}
|
||||
|
||||
let remainder = 0
|
||||
for (const digit of numeric) {
|
||||
remainder = (remainder * 10 + Number(digit)) % 97
|
||||
}
|
||||
|
||||
return remainder === 1
|
||||
}
|
||||
|
||||
const resolveGermanBankDataFromIbanLocal = (iban: string) => {
|
||||
const normalized = normalizeIban(iban)
|
||||
if (!isValidIbanLocal(normalized)) return null
|
||||
|
||||
// Für DE-IBANs kann die BLZ aus Position 5-12 lokal gelesen werden.
|
||||
if (normalized.startsWith("DE") && normalized.length === 22) {
|
||||
const bankCode = normalized.slice(4, 12)
|
||||
const bankName = DE_BANK_CODE_TO_NAME[bankCode] || `Unbekannt (BLZ ${bankCode})`
|
||||
const bic = DE_BANK_CODE_TO_BIC[bankCode] || null
|
||||
return {
|
||||
bankName,
|
||||
bic,
|
||||
bankCode,
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
const resolveEntityBankAccountId = async (
|
||||
tenantId: number,
|
||||
userId: string,
|
||||
iban: string
|
||||
) => {
|
||||
const normalizedIban = normalizeIban(iban)
|
||||
if (!normalizedIban) return null
|
||||
|
||||
const bankData = resolveGermanBankDataFromIbanLocal(normalizedIban)
|
||||
|
||||
const allAccounts = await server.db
|
||||
.select({
|
||||
id: entitybankaccounts.id,
|
||||
ibanEncrypted: entitybankaccounts.ibanEncrypted,
|
||||
bankNameEncrypted: entitybankaccounts.bankNameEncrypted,
|
||||
bicEncrypted: entitybankaccounts.bicEncrypted,
|
||||
})
|
||||
.from(entitybankaccounts)
|
||||
.where(eq(entitybankaccounts.tenant, tenantId))
|
||||
|
||||
const existing = allAccounts.find((row) => {
|
||||
if (!row.ibanEncrypted) return false
|
||||
try {
|
||||
const decryptedIban = decrypt(row.ibanEncrypted as any)
|
||||
return normalizeIban(decryptedIban) === normalizedIban
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
})
|
||||
|
||||
if (existing?.id) {
|
||||
if (bankData) {
|
||||
let currentBankName = ""
|
||||
let currentBic = ""
|
||||
try {
|
||||
currentBankName = String(decrypt(existing.bankNameEncrypted as any) || "").trim()
|
||||
} catch {
|
||||
currentBankName = ""
|
||||
}
|
||||
try {
|
||||
currentBic = String(decrypt((existing as any).bicEncrypted as any) || "").trim()
|
||||
} catch {
|
||||
currentBic = ""
|
||||
}
|
||||
|
||||
const nextBankName = bankData?.bankName || "Unbekannt"
|
||||
const nextBic = bankData?.bic || "UNBEKANNT"
|
||||
if (currentBankName !== nextBankName || currentBic !== nextBic) {
|
||||
await server.db
|
||||
.update(entitybankaccounts)
|
||||
.set({
|
||||
bankNameEncrypted: encrypt(nextBankName),
|
||||
bicEncrypted: encrypt(nextBic),
|
||||
updatedAt: new Date(),
|
||||
updatedBy: userId,
|
||||
})
|
||||
.where(and(eq(entitybankaccounts.id, Number(existing.id)), eq(entitybankaccounts.tenant, tenantId)))
|
||||
}
|
||||
}
|
||||
|
||||
return Number(existing.id)
|
||||
}
|
||||
|
||||
const [created] = await server.db
|
||||
.insert(entitybankaccounts)
|
||||
.values({
|
||||
tenant: tenantId,
|
||||
ibanEncrypted: encrypt(normalizedIban),
|
||||
bicEncrypted: encrypt(bankData?.bic || "UNBEKANNT"),
|
||||
bankNameEncrypted: encrypt(bankData?.bankName || "Unbekannt"),
|
||||
description: "Automatisch aus Bankbuchung übernommen",
|
||||
updatedAt: new Date(),
|
||||
updatedBy: userId,
|
||||
})
|
||||
.returning({ id: entitybankaccounts.id })
|
||||
|
||||
return created?.id ? Number(created.id) : null
|
||||
}
|
||||
|
||||
server.get("/banking/iban/:iban", async (req, reply) => {
|
||||
try {
|
||||
const { iban } = req.params as { iban: string }
|
||||
const normalized = normalizeIban(iban)
|
||||
if (!normalized) {
|
||||
return reply.code(400).send({ error: "IBAN missing" })
|
||||
}
|
||||
|
||||
const valid = isValidIbanLocal(normalized)
|
||||
const bankData = resolveGermanBankDataFromIbanLocal(normalized)
|
||||
|
||||
return reply.send({
|
||||
iban: normalized,
|
||||
valid,
|
||||
bic: bankData?.bic || null,
|
||||
bankName: bankData?.bankName || null,
|
||||
bankCode: bankData?.bankCode || null,
|
||||
})
|
||||
} catch (err) {
|
||||
server.log.error(err)
|
||||
return reply.code(500).send({ error: "Failed to resolve IBAN data" })
|
||||
}
|
||||
})
|
||||
|
||||
const assignIbanFromStatementToCustomer = async (tenantId: number, userId: string, statementId: number, createdDocumentId?: number) => {
|
||||
if (!createdDocumentId) return
|
||||
|
||||
const [statement] = await server.db
|
||||
.select()
|
||||
.from(bankstatements)
|
||||
.where(and(eq(bankstatements.id, statementId), eq(bankstatements.tenant, tenantId)))
|
||||
.limit(1)
|
||||
|
||||
if (!statement) return
|
||||
|
||||
const [doc] = await server.db
|
||||
.select({ customer: createddocuments.customer })
|
||||
.from(createddocuments)
|
||||
.where(and(eq(createddocuments.id, createdDocumentId), eq(createddocuments.tenant, tenantId)))
|
||||
.limit(1)
|
||||
|
||||
const customerId = doc?.customer
|
||||
if (!customerId) return
|
||||
|
||||
const partnerBank = pickPartnerBankData(statement, "customer")
|
||||
if (!partnerBank?.iban) return
|
||||
|
||||
const [customer] = await server.db
|
||||
.select({ id: customers.id, infoData: customers.infoData })
|
||||
.from(customers)
|
||||
.where(and(eq(customers.id, customerId), eq(customers.tenant, tenantId)))
|
||||
.limit(1)
|
||||
|
||||
if (!customer) return
|
||||
|
||||
const bankAccountId = await resolveEntityBankAccountId(
|
||||
tenantId,
|
||||
userId,
|
||||
partnerBank.iban
|
||||
)
|
||||
|
||||
const newInfoData = mergePartnerIban(
|
||||
(customer.infoData || {}) as Record<string, any>,
|
||||
partnerBank.iban,
|
||||
bankAccountId
|
||||
)
|
||||
await server.db
|
||||
.update(customers)
|
||||
.set({
|
||||
infoData: newInfoData,
|
||||
updatedAt: new Date(),
|
||||
updatedBy: userId,
|
||||
})
|
||||
.where(and(eq(customers.id, customerId), eq(customers.tenant, tenantId)))
|
||||
}
|
||||
|
||||
const assignIbanFromStatementToVendor = async (tenantId: number, userId: string, statementId: number, incomingInvoiceId?: number) => {
|
||||
if (!incomingInvoiceId) return
|
||||
|
||||
const [statement] = await server.db
|
||||
.select()
|
||||
.from(bankstatements)
|
||||
.where(and(eq(bankstatements.id, statementId), eq(bankstatements.tenant, tenantId)))
|
||||
.limit(1)
|
||||
|
||||
if (!statement) return
|
||||
|
||||
const [invoice] = await server.db
|
||||
.select({ vendor: incominginvoices.vendor })
|
||||
.from(incominginvoices)
|
||||
.where(and(eq(incominginvoices.id, incomingInvoiceId), eq(incominginvoices.tenant, tenantId)))
|
||||
.limit(1)
|
||||
|
||||
const vendorId = invoice?.vendor
|
||||
if (!vendorId) return
|
||||
|
||||
const partnerBank = pickPartnerBankData(statement, "vendor")
|
||||
if (!partnerBank?.iban) return
|
||||
|
||||
const [vendor] = await server.db
|
||||
.select({ id: vendors.id, infoData: vendors.infoData })
|
||||
.from(vendors)
|
||||
.where(and(eq(vendors.id, vendorId), eq(vendors.tenant, tenantId)))
|
||||
.limit(1)
|
||||
|
||||
if (!vendor) return
|
||||
|
||||
const bankAccountId = await resolveEntityBankAccountId(
|
||||
tenantId,
|
||||
userId,
|
||||
partnerBank.iban
|
||||
)
|
||||
|
||||
const newInfoData = mergePartnerIban(
|
||||
(vendor.infoData || {}) as Record<string, any>,
|
||||
partnerBank.iban,
|
||||
bankAccountId
|
||||
)
|
||||
await server.db
|
||||
.update(vendors)
|
||||
.set({
|
||||
infoData: newInfoData,
|
||||
updatedAt: new Date(),
|
||||
updatedBy: userId,
|
||||
})
|
||||
.where(and(eq(vendors.id, vendorId), eq(vendors.tenant, tenantId)))
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------
|
||||
// 🔐 GoCardLess Token Handling
|
||||
@@ -171,9 +496,35 @@ export default async function bankingRoutes(server: FastifyInstance) {
|
||||
|
||||
const createdRecord = inserted[0]
|
||||
|
||||
if (createdRecord?.createddocument) {
|
||||
try {
|
||||
await assignIbanFromStatementToCustomer(
|
||||
req.user.tenant_id,
|
||||
req.user.user_id,
|
||||
Number(createdRecord.bankstatement),
|
||||
Number(createdRecord.createddocument)
|
||||
)
|
||||
} catch (err) {
|
||||
server.log.warn({ err, allocationId: createdRecord.id }, "Konnte IBAN nicht automatisch beim Kunden hinterlegen")
|
||||
}
|
||||
}
|
||||
|
||||
if (createdRecord?.incominginvoice) {
|
||||
try {
|
||||
await assignIbanFromStatementToVendor(
|
||||
req.user.tenant_id,
|
||||
req.user.user_id,
|
||||
Number(createdRecord.bankstatement),
|
||||
Number(createdRecord.incominginvoice)
|
||||
)
|
||||
} catch (err) {
|
||||
server.log.warn({ err, allocationId: createdRecord.id }, "Konnte IBAN nicht automatisch beim Lieferanten hinterlegen")
|
||||
}
|
||||
}
|
||||
|
||||
await insertHistoryItem(server, {
|
||||
entity: "bankstatements",
|
||||
entityId: createdRecord.id,
|
||||
entityId: Number(createdRecord.bankstatement),
|
||||
action: "created",
|
||||
created_by: req.user.user_id,
|
||||
tenant_id: req.user.tenant_id,
|
||||
@@ -216,7 +567,7 @@ export default async function bankingRoutes(server: FastifyInstance) {
|
||||
|
||||
await insertHistoryItem(server, {
|
||||
entity: "bankstatements",
|
||||
entityId: id,
|
||||
entityId: Number(old.bankstatement),
|
||||
action: "deleted",
|
||||
created_by: req.user.user_id,
|
||||
tenant_id: req.user.tenant_id,
|
||||
|
||||
58
backend/src/routes/devices/management.ts
Normal file
58
backend/src/routes/devices/management.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import { FastifyInstance } from "fastify";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { db } from "../../../db"; // <--- PFAD ZUR DB INSTANZ ANPASSEN
|
||||
import { devices } from "../../../db/schema";
|
||||
|
||||
// Definition, was wir vom ESP32 erwarten
|
||||
interface HealthBody {
|
||||
terminal_id: string;
|
||||
ip_address?: string;
|
||||
wifi_rssi?: number;
|
||||
uptime_seconds?: number;
|
||||
heap_free?: number;
|
||||
[key: string]: any; // Erlaubt weitere Felder
|
||||
}
|
||||
|
||||
export default async function devicesManagementRoutes(server: FastifyInstance) {
|
||||
server.post<{ Body: HealthBody }>(
|
||||
"/health",
|
||||
async (req, reply) => {
|
||||
try {
|
||||
const data = req.body;
|
||||
|
||||
// 1. Validierung: Haben wir eine ID?
|
||||
if (!data.terminal_id) {
|
||||
console.warn("Health Check ohne terminal_id empfangen:", data);
|
||||
return reply.code(400).send({ error: "terminal_id missing" });
|
||||
}
|
||||
|
||||
console.log(`Health Ping von Device ${data.terminal_id}`, data);
|
||||
|
||||
// 2. Datenbank Update
|
||||
// Wir suchen das Gerät mit der passenden externalId
|
||||
const result = await server.db
|
||||
.update(devices)
|
||||
.set({
|
||||
lastSeen: new Date(), // Setzt Zeit auf JETZT
|
||||
lastDebugInfo: data // Speichert das ganze JSON
|
||||
})
|
||||
.where(eq(devices.externalId, data.terminal_id))
|
||||
.returning({ id: devices.id }); // Gibt ID zurück, falls gefunden
|
||||
|
||||
// 3. Checken ob Gerät gefunden wurde
|
||||
if (result.length === 0) {
|
||||
console.warn(`Unbekanntes Terminal versucht Health Check: ${data.terminal_id}`);
|
||||
// Optional: 404 senden oder ignorieren (Sicherheit)
|
||||
return reply.code(404).send({ error: "Device not found" });
|
||||
}
|
||||
|
||||
// Alles OK
|
||||
return reply.code(200).send({ status: "ok" });
|
||||
|
||||
} catch (err: any) {
|
||||
console.error("Health Check Error:", err);
|
||||
return reply.code(500).send({ error: err.message });
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
@@ -7,31 +7,33 @@ export default async function devicesRFIDRoutes(server: FastifyInstance) {
|
||||
"/rfid/createevent/:terminal_id",
|
||||
async (req, reply) => {
|
||||
try {
|
||||
// 1. Timestamp aus dem Body holen (optional)
|
||||
const { rfid_id, timestamp } = req.body as {
|
||||
rfid_id: string,
|
||||
timestamp?: number // Kann undefined sein (Live) oder Zahl (Offline)
|
||||
};
|
||||
|
||||
const {rfid_id} = req.body as {rfid_id: string};
|
||||
const { terminal_id } = req.params as { terminal_id: string };
|
||||
|
||||
if (!rfid_id || !terminal_id) {
|
||||
console.log(`Missing Params`);
|
||||
return reply.code(400).send(`Missing Params`)
|
||||
return reply.code(400).send(`Missing Params`);
|
||||
}
|
||||
|
||||
// 2. Gerät suchen
|
||||
const device = await server.db
|
||||
.select()
|
||||
.from(devices)
|
||||
.where(
|
||||
eq(devices.externalId, terminal_id)
|
||||
|
||||
)
|
||||
.where(eq(devices.externalId, terminal_id))
|
||||
.limit(1)
|
||||
.then(rows => rows[0]);
|
||||
|
||||
if (!device) {
|
||||
console.log(`Device ${terminal_id} not found`);
|
||||
return reply.code(400).send(`Device ${terminal_id} not found`)
|
||||
|
||||
return reply.code(400).send(`Device ${terminal_id} not found`);
|
||||
}
|
||||
|
||||
// 3. User-Profil suchen
|
||||
const profile = await server.db
|
||||
.select()
|
||||
.from(authProfiles)
|
||||
@@ -46,53 +48,54 @@ export default async function devicesRFIDRoutes(server: FastifyInstance) {
|
||||
|
||||
if (!profile) {
|
||||
console.log(`Profile for Token ${rfid_id} not found`);
|
||||
return reply.code(400).send(`Profile for Token ${rfid_id} not found`)
|
||||
|
||||
return reply.code(400).send(`Profile for Token ${rfid_id} not found`);
|
||||
}
|
||||
|
||||
// 4. Letztes Event suchen (für Status-Toggle Work Start/End)
|
||||
const lastEvent = await server.db
|
||||
.select()
|
||||
.from(stafftimeevents)
|
||||
.where(
|
||||
eq(stafftimeevents.user_id, profile.user_id)
|
||||
)
|
||||
.orderBy(desc(stafftimeevents.eventtime)) // <-- Sortierung: Neuestes zuerst
|
||||
.where(eq(stafftimeevents.user_id, profile.user_id))
|
||||
.orderBy(desc(stafftimeevents.eventtime))
|
||||
.limit(1)
|
||||
.then(rows => rows[0]);
|
||||
|
||||
console.log(lastEvent)
|
||||
// 5. Zeitstempel Logik (WICHTIG!)
|
||||
// Der ESP32 sendet Unix-Timestamp in SEKUNDEN. JS braucht MILLISEKUNDEN.
|
||||
// Wenn kein Timestamp kommt (0 oder undefined), nehmen wir JETZT.
|
||||
const actualEventTime = (timestamp && timestamp > 0)
|
||||
? new Date(timestamp * 1000)
|
||||
: new Date();
|
||||
|
||||
// 6. Event Typ bestimmen (Toggle Logik)
|
||||
// Falls noch nie gestempelt wurde (lastEvent undefined), fangen wir mit start an.
|
||||
const nextEventType = (lastEvent?.eventtype === "work_start")
|
||||
? "work_end"
|
||||
: "work_start";
|
||||
|
||||
const dataToInsert = {
|
||||
tenant_id: device.tenant,
|
||||
user_id: profile.user_id,
|
||||
actortype: "system",
|
||||
eventtime: new Date(),
|
||||
eventtype: lastEvent.eventtype === "work_start" ? "work_end" : "work_start",
|
||||
source: "WEB"
|
||||
}
|
||||
eventtime: actualEventTime, // Hier nutzen wir die berechnete Zeit
|
||||
eventtype: nextEventType,
|
||||
source: "TERMINAL" // Habe ich von WEB auf TERMINAL geändert (optional)
|
||||
};
|
||||
|
||||
console.log(dataToInsert)
|
||||
console.log(`New Event for ${profile.user_id}: ${nextEventType} @ ${actualEventTime.toISOString()}`);
|
||||
|
||||
const [created] = await server.db
|
||||
.insert(stafftimeevents)
|
||||
//@ts-ignore
|
||||
.values(dataToInsert)
|
||||
.returning()
|
||||
.returning();
|
||||
|
||||
return created;
|
||||
|
||||
return created
|
||||
} catch (err: any) {
|
||||
console.error(err)
|
||||
return reply.code(400).send({ error: err.message })
|
||||
console.error(err);
|
||||
return reply.code(400).send({ error: err.message });
|
||||
}
|
||||
|
||||
|
||||
|
||||
console.log(req.body)
|
||||
|
||||
return
|
||||
|
||||
|
||||
}
|
||||
);
|
||||
}
|
||||
@@ -1,6 +1,4 @@
|
||||
import { FastifyInstance } from "fastify";
|
||||
import jwt from "jsonwebtoken";
|
||||
import {insertHistoryItem} from "../utils/history";
|
||||
import {buildExportZip} from "../utils/export/datev";
|
||||
import {s3} from "../utils/s3";
|
||||
import {GetObjectCommand, PutObjectCommand} from "@aws-sdk/client-s3"
|
||||
@@ -9,8 +7,11 @@ import dayjs from "dayjs";
|
||||
import {randomUUID} from "node:crypto";
|
||||
import {secrets} from "../utils/secrets";
|
||||
import {createSEPAExport} from "../utils/export/sepa";
|
||||
import {generatedexports} from "../../db/schema";
|
||||
import {eq} from "drizzle-orm";
|
||||
|
||||
const createDatevExport = async (server:FastifyInstance,req:any,startDate,endDate,beraternr,mandantennr) => {
|
||||
try {
|
||||
console.log(startDate,endDate,beraternr,mandantennr)
|
||||
|
||||
// 1) ZIP erzeugen
|
||||
@@ -44,28 +45,29 @@ const createDatevExport = async (server:FastifyInstance,req:any,startDate,endDat
|
||||
|
||||
console.log(url)
|
||||
|
||||
// 5) In Supabase-DB speichern
|
||||
const { data, error } = await server.supabase
|
||||
.from("exports")
|
||||
.insert([
|
||||
{
|
||||
tenant_id: req.user.tenant_id,
|
||||
start_date: startDate,
|
||||
end_date: endDate,
|
||||
valid_until: dayjs().add(24,"hours").toISOString(),
|
||||
file_path: fileKey,
|
||||
url: url,
|
||||
created_at: new Date().toISOString(),
|
||||
},
|
||||
])
|
||||
.select()
|
||||
.single()
|
||||
// 5) In Haupt-DB speichern
|
||||
const inserted = await server.db
|
||||
.insert(generatedexports)
|
||||
.values({
|
||||
tenantId: req.user.tenant_id,
|
||||
startDate: new Date(startDate),
|
||||
endDate: new Date(endDate),
|
||||
validUntil: dayjs().add(24, "hours").toDate(),
|
||||
filePath: fileKey,
|
||||
url,
|
||||
type: "datev",
|
||||
})
|
||||
.returning()
|
||||
|
||||
console.log(data)
|
||||
console.log(inserted[0])
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
export default async function exportRoutes(server: FastifyInstance) {
|
||||
//Export DATEV
|
||||
server.post("/exports/datev", async (req, reply) => {
|
||||
@@ -114,9 +116,22 @@ export default async function exportRoutes(server: FastifyInstance) {
|
||||
//List Exports Available for Download
|
||||
|
||||
server.get("/exports", async (req,reply) => {
|
||||
const {data,error} = await server.supabase.from("exports").select().eq("tenant_id",req.user.tenant_id)
|
||||
const data = await server.db
|
||||
.select({
|
||||
id: generatedexports.id,
|
||||
created_at: generatedexports.createdAt,
|
||||
tenant_id: generatedexports.tenantId,
|
||||
start_date: generatedexports.startDate,
|
||||
end_date: generatedexports.endDate,
|
||||
valid_until: generatedexports.validUntil,
|
||||
type: generatedexports.type,
|
||||
url: generatedexports.url,
|
||||
file_path: generatedexports.filePath,
|
||||
})
|
||||
.from(generatedexports)
|
||||
.where(eq(generatedexports.tenantId, req.user.tenant_id))
|
||||
|
||||
console.log(data,error)
|
||||
console.log(data)
|
||||
reply.send(data)
|
||||
|
||||
})
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { FastifyInstance } from "fastify";
|
||||
import {createInvoicePDF, createTimeSheetPDF} from "../utils/pdf";
|
||||
//import {encodeBase64ToNiimbot, generateLabel, useNextNumberRangeNumber} from "../utils/functions";
|
||||
import {encodeBase64ToNiimbot, generateLabel, useNextNumberRangeNumber} from "../utils/functions";
|
||||
import dayjs from "dayjs";
|
||||
//import { ready as zplReady } from 'zpl-renderer-js'
|
||||
//import { renderZPL } from "zpl-image";
|
||||
@@ -15,7 +15,6 @@ import timezone from "dayjs/plugin/timezone.js";
|
||||
import {generateTimesEvaluation} from "../modules/time/evaluation.service";
|
||||
import {citys} from "../../db/schema";
|
||||
import {eq} from "drizzle-orm";
|
||||
import {useNextNumberRangeNumber} from "../utils/functions";
|
||||
import {executeManualGeneration, finishManualGeneration} from "../modules/serialexecution.service";
|
||||
dayjs.extend(customParseFormat)
|
||||
dayjs.extend(isoWeek)
|
||||
@@ -100,31 +99,25 @@ export default async function functionRoutes(server: FastifyInstance) {
|
||||
|
||||
server.get('/functions/check-zip/:zip', async (req, reply) => {
|
||||
const { zip } = req.params as { zip: string }
|
||||
const normalizedZip = String(zip || "").replace(/\D/g, "")
|
||||
|
||||
if (!zip) {
|
||||
return reply.code(400).send({ error: 'ZIP is required' })
|
||||
if (normalizedZip.length !== 5) {
|
||||
return reply.code(400).send({ error: 'ZIP must contain exactly 5 digits' })
|
||||
}
|
||||
|
||||
try {
|
||||
//@ts-ignore
|
||||
const data = await server.db.select().from(citys).where(eq(citys.zip,zip))
|
||||
|
||||
|
||||
/*const { data, error } = await server.supabase
|
||||
.from('citys')
|
||||
const data = await server.db
|
||||
.select()
|
||||
.eq('zip', zip)
|
||||
.maybeSingle()
|
||||
.from(citys)
|
||||
.where(eq(citys.zip, Number(normalizedZip)))
|
||||
|
||||
if (error) {
|
||||
console.log(error)
|
||||
return reply.code(500).send({ error: 'Database error' })
|
||||
}*/
|
||||
|
||||
if (!data) {
|
||||
if (!data.length) {
|
||||
return reply.code(404).send({ error: 'ZIP not found' })
|
||||
}
|
||||
|
||||
const city = data[0]
|
||||
|
||||
//districtMap
|
||||
const bundeslaender = [
|
||||
{ code: 'DE-BW', name: 'Baden-Württemberg' },
|
||||
@@ -148,9 +141,8 @@ export default async function functionRoutes(server: FastifyInstance) {
|
||||
|
||||
|
||||
return reply.send({
|
||||
...data,
|
||||
//@ts-ignore
|
||||
state_code: bundeslaender.find(i => i.name === data.countryName)
|
||||
...city,
|
||||
state_code: bundeslaender.find(i => i.name === city.countryName)?.code || null
|
||||
})
|
||||
} catch (err) {
|
||||
console.log(err)
|
||||
@@ -179,44 +171,25 @@ export default async function functionRoutes(server: FastifyInstance) {
|
||||
await server.services.prepareIncomingInvoices.run(req.user.tenant_id)
|
||||
})
|
||||
|
||||
server.post('/functions/services/syncdokubox', async (req, reply) => {
|
||||
|
||||
/*server.post('/print/zpl/preview', async (req, reply) => {
|
||||
const { zpl, widthMm = 50, heightMm = 30, dpmm = 8, asBase64 = false } = req.body as {zpl:string,widthMm:number,heightMm:number,dpmm:number,asBase64:string}
|
||||
|
||||
console.log(widthMm,heightMm,dpmm)
|
||||
|
||||
if (!zpl) {
|
||||
return reply.code(400).send({ error: 'Missing ZPL string' })
|
||||
}
|
||||
|
||||
try {
|
||||
// 1️⃣ Renderer initialisieren
|
||||
const { api } = await zplReady
|
||||
|
||||
// 2️⃣ Rendern (liefert base64-encoded PNG)
|
||||
const base64Png = await api.zplToBase64Async(zpl, widthMm, heightMm, dpmm)
|
||||
|
||||
return await encodeBase64ToNiimbot(base64Png, 'top')
|
||||
} catch (err) {
|
||||
console.error('[ZPL Preview Error]', err)
|
||||
return reply.code(500).send({ error: err.message || 'Failed to render ZPL' })
|
||||
}
|
||||
await server.services.dokuboxSync.run()
|
||||
})
|
||||
|
||||
server.post('/print/label', async (req, reply) => {
|
||||
const { context, width=584, heigth=354 } = req.body as {context:any,width:number,heigth:number}
|
||||
const { context, width = 584, height = 354 } = req.body as {context:any,width:number,height:number}
|
||||
|
||||
try {
|
||||
const base64 = await generateLabel(context,width,heigth)
|
||||
const base64 = await generateLabel(context,width,height)
|
||||
|
||||
return {
|
||||
encoded: await encodeBase64ToNiimbot(base64, 'top'),
|
||||
base64: base64
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('[ZPL Preview Error]', err)
|
||||
return reply.code(500).send({ error: err.message || 'Failed to render ZPL' })
|
||||
console.error('[Label Render Error]', err)
|
||||
return reply.code(500).send({ error: err.message || 'Failed to render label' })
|
||||
}
|
||||
})*/
|
||||
})
|
||||
|
||||
}
|
||||
@@ -3,12 +3,11 @@ import { FastifyInstance } from "fastify";
|
||||
export default async function routes(server: FastifyInstance) {
|
||||
server.get("/ping", async () => {
|
||||
// Testquery gegen DB
|
||||
const { data, error } = await server.supabase.from("tenants").select("id").limit(1);
|
||||
const result = await server.db.execute("SELECT NOW()");
|
||||
|
||||
return {
|
||||
status: "ok",
|
||||
db: error ? "not connected" : "connected",
|
||||
tenant_count: data?.length ?? 0
|
||||
db: JSON.stringify(result.rows[0]),
|
||||
};
|
||||
});
|
||||
}
|
||||
@@ -3,8 +3,9 @@ import { FastifyPluginAsync } from 'fastify'
|
||||
import { createConversation } from '../modules/helpdesk/helpdesk.conversation.service.js'
|
||||
import { addMessage } from '../modules/helpdesk/helpdesk.message.service.js'
|
||||
import { getOrCreateContact } from '../modules/helpdesk/helpdesk.contact.service.js'
|
||||
import {extractDomain, findCustomerOrContactByEmailOrDomain} from "../utils/helpers";
|
||||
import {useNextNumberRangeNumber} from "../utils/functions";
|
||||
import { findCustomerOrContactByEmailOrDomain } from "../utils/helpers";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { helpdesk_conversations, helpdesk_messages } from "../../db/schema";
|
||||
|
||||
// -------------------------------------------------------------
|
||||
// 📧 Interne M2M-Route für eingehende E-Mails
|
||||
@@ -52,12 +53,12 @@ const helpdeskInboundEmailRoutes: FastifyPluginAsync = async (server) => {
|
||||
// 3️⃣ Konversation anhand In-Reply-To suchen
|
||||
let conversationId: string | null = null
|
||||
if (in_reply_to) {
|
||||
const { data: msg } = await server.supabase
|
||||
.from('helpdesk_messages')
|
||||
.select('conversation_id')
|
||||
.eq('external_message_id', in_reply_to)
|
||||
.maybeSingle()
|
||||
conversationId = msg?.conversation_id || null
|
||||
const msg = await server.db
|
||||
.select({ conversationId: helpdesk_messages.conversationId })
|
||||
.from(helpdesk_messages)
|
||||
.where(eq(helpdesk_messages.externalMessageId, in_reply_to))
|
||||
.limit(1)
|
||||
conversationId = msg[0]?.conversationId || null
|
||||
}
|
||||
|
||||
// 4️⃣ Neue Konversation anlegen falls keine existiert
|
||||
@@ -73,12 +74,12 @@ const helpdeskInboundEmailRoutes: FastifyPluginAsync = async (server) => {
|
||||
})
|
||||
conversationId = conversation.id
|
||||
} else {
|
||||
const { data } = await server.supabase
|
||||
.from('helpdesk_conversations')
|
||||
.select('*')
|
||||
.eq('id', conversationId)
|
||||
.single()
|
||||
conversation = data
|
||||
const rows = await server.db
|
||||
.select()
|
||||
.from(helpdesk_conversations)
|
||||
.where(eq(helpdesk_conversations.id, conversationId))
|
||||
.limit(1)
|
||||
conversation = rows[0]
|
||||
}
|
||||
|
||||
// 5️⃣ Nachricht speichern
|
||||
@@ -96,7 +97,7 @@ const helpdeskInboundEmailRoutes: FastifyPluginAsync = async (server) => {
|
||||
return res.status(201).send({
|
||||
success: true,
|
||||
conversation_id: conversationId,
|
||||
ticket_number: conversation.ticket_number,
|
||||
ticket_number: conversation?.ticket_number || conversation?.ticketNumber,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
@@ -3,70 +3,9 @@ import { FastifyPluginAsync } from 'fastify'
|
||||
import { createConversation } from '../modules/helpdesk/helpdesk.conversation.service.js'
|
||||
import { addMessage } from '../modules/helpdesk/helpdesk.message.service.js'
|
||||
import { getOrCreateContact } from '../modules/helpdesk/helpdesk.contact.service.js'
|
||||
|
||||
/**
|
||||
* Öffentliche Route zum Empfang eingehender Kontaktformular-Nachrichten.
|
||||
* Authentifizierung: über `public_token` aus helpdesk_channel_instances
|
||||
*/
|
||||
|
||||
function extractDomain(email) {
|
||||
if (!email) return null
|
||||
const parts = email.split("@")
|
||||
return parts.length === 2 ? parts[1].toLowerCase() : null
|
||||
}
|
||||
|
||||
async function findCustomerOrContactByEmailOrDomain(server,fromMail, tenantId) {
|
||||
const sender = fromMail
|
||||
const senderDomain = extractDomain(sender)
|
||||
if (!senderDomain) return null
|
||||
|
||||
|
||||
// 1️⃣ Direkter Match über contacts
|
||||
const { data: contactMatch } = await server.supabase
|
||||
.from("contacts")
|
||||
.select("id, customer")
|
||||
.eq("email", sender)
|
||||
.eq("tenant", tenantId)
|
||||
.maybeSingle()
|
||||
|
||||
if (contactMatch?.customer_id) return {
|
||||
customer: contactMatch.customer,
|
||||
contact: contactMatch.id
|
||||
}
|
||||
|
||||
// 2️⃣ Kunden laden, bei denen E-Mail oder Rechnungsmail passt
|
||||
const { data: customers, error } = await server.supabase
|
||||
.from("customers")
|
||||
.select("id, infoData")
|
||||
.eq("tenant", tenantId)
|
||||
|
||||
if (error) {
|
||||
console.error(`[Helpdesk] Fehler beim Laden der Kunden:`, error.message)
|
||||
return null
|
||||
}
|
||||
|
||||
// 3️⃣ Durch Kunden iterieren und prüfen
|
||||
for (const c of customers || []) {
|
||||
const info = c.infoData || {}
|
||||
const email = info.email?.toLowerCase()
|
||||
const invoiceEmail = info.invoiceEmail?.toLowerCase()
|
||||
|
||||
const emailDomain = extractDomain(email)
|
||||
const invoiceDomain = extractDomain(invoiceEmail)
|
||||
|
||||
// exakter Match oder Domain-Match
|
||||
if (
|
||||
sender === email ||
|
||||
sender === invoiceEmail ||
|
||||
senderDomain === emailDomain ||
|
||||
senderDomain === invoiceDomain
|
||||
) {
|
||||
return {customer: c.id, contact:null}
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
import { findCustomerOrContactByEmailOrDomain } from "../utils/helpers";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { helpdesk_channel_instances } from "../../db/schema";
|
||||
|
||||
const helpdeskInboundRoutes: FastifyPluginAsync = async (server) => {
|
||||
// Öffentliche POST-Route
|
||||
@@ -85,17 +24,18 @@ const helpdeskInboundRoutes: FastifyPluginAsync = async (server) => {
|
||||
}
|
||||
|
||||
// 1️⃣ Kanalinstanz anhand des Tokens ermitteln
|
||||
const { data: channel, error: channelError } = await server.supabase
|
||||
.from('helpdesk_channel_instances')
|
||||
.select('*')
|
||||
.eq('public_token', public_token)
|
||||
.single()
|
||||
const channels = await server.db
|
||||
.select()
|
||||
.from(helpdesk_channel_instances)
|
||||
.where(eq(helpdesk_channel_instances.publicToken, public_token))
|
||||
.limit(1)
|
||||
const channel = channels[0]
|
||||
|
||||
if (channelError || !channel) {
|
||||
if (!channel) {
|
||||
return res.status(404).send({ error: 'Invalid channel token' })
|
||||
}
|
||||
|
||||
const tenant_id = channel.tenant_id
|
||||
const tenant_id = channel.tenantId
|
||||
const channel_instance_id = channel.id
|
||||
|
||||
// @ts-ignore
|
||||
|
||||
@@ -5,6 +5,13 @@ import { addMessage, getMessages } from '../modules/helpdesk/helpdesk.message.se
|
||||
import { getOrCreateContact } from '../modules/helpdesk/helpdesk.contact.service.js'
|
||||
import {decrypt, encrypt} from "../utils/crypt";
|
||||
import nodemailer from "nodemailer"
|
||||
import { eq } from "drizzle-orm";
|
||||
import {
|
||||
helpdesk_channel_instances,
|
||||
helpdesk_contacts,
|
||||
helpdesk_conversations,
|
||||
helpdesk_messages,
|
||||
} from "../../db/schema";
|
||||
|
||||
const helpdeskRoutes: FastifyPluginAsync = async (server) => {
|
||||
// 📩 1. Liste aller Konversationen
|
||||
@@ -58,15 +65,30 @@ const helpdeskRoutes: FastifyPluginAsync = async (server) => {
|
||||
const tenant_id = req.user?.tenant_id
|
||||
const {id: conversation_id} = req.params as {id: string}
|
||||
|
||||
const { data, error } = await server.supabase
|
||||
.from('helpdesk_conversations')
|
||||
.select('*, helpdesk_contacts(*)')
|
||||
.eq('tenant_id', tenant_id)
|
||||
.eq('id', conversation_id)
|
||||
.single()
|
||||
const rows = await server.db
|
||||
.select({
|
||||
conversation: helpdesk_conversations,
|
||||
contact: helpdesk_contacts
|
||||
})
|
||||
.from(helpdesk_conversations)
|
||||
.leftJoin(helpdesk_contacts, eq(helpdesk_contacts.id, helpdesk_conversations.contactId))
|
||||
.where(eq(helpdesk_conversations.id, conversation_id))
|
||||
|
||||
if (error) return res.status(404).send({ error: 'Conversation not found' })
|
||||
return res.send(data)
|
||||
const data = rows[0]
|
||||
if (!data || data.conversation.tenantId !== tenant_id) return res.status(404).send({ error: 'Conversation not found' })
|
||||
|
||||
return res.send({
|
||||
...data.conversation,
|
||||
channel_instance_id: data.conversation.channelInstanceId,
|
||||
contact_id: data.conversation.contactId,
|
||||
contact_person_id: data.conversation.contactPersonId,
|
||||
created_at: data.conversation.createdAt,
|
||||
customer_id: data.conversation.customerId,
|
||||
last_message_at: data.conversation.lastMessageAt,
|
||||
tenant_id: data.conversation.tenantId,
|
||||
ticket_number: data.conversation.ticketNumber,
|
||||
helpdesk_contacts: data.contact,
|
||||
})
|
||||
})
|
||||
|
||||
// 🔄 4. Konversation Status ändern
|
||||
@@ -181,36 +203,39 @@ const helpdeskRoutes: FastifyPluginAsync = async (server) => {
|
||||
safeConfig.smtp.pass = encrypt(safeConfig.smtp.pass)
|
||||
}
|
||||
|
||||
// Speichern in Supabase
|
||||
const { data, error } = await server.supabase
|
||||
.from("helpdesk_channel_instances")
|
||||
.insert({
|
||||
tenant_id,
|
||||
type_id,
|
||||
const inserted = await server.db
|
||||
.insert(helpdesk_channel_instances)
|
||||
.values({
|
||||
tenantId: tenant_id,
|
||||
typeId: type_id,
|
||||
name,
|
||||
config: safeConfig,
|
||||
is_active,
|
||||
isActive: is_active,
|
||||
})
|
||||
.select()
|
||||
.single()
|
||||
.returning()
|
||||
|
||||
if (error) throw error
|
||||
const data = inserted[0]
|
||||
if (!data) throw new Error("Konnte Channel nicht erstellen")
|
||||
const responseConfig: any = data.config
|
||||
|
||||
// sensible Felder aus Response entfernen
|
||||
if (data.config?.imap) {
|
||||
delete data.config.imap.host
|
||||
delete data.config.imap.user
|
||||
delete data.config.imap.pass
|
||||
if (responseConfig?.imap) {
|
||||
delete responseConfig.imap.host
|
||||
delete responseConfig.imap.user
|
||||
delete responseConfig.imap.pass
|
||||
}
|
||||
if (data.config?.smtp) {
|
||||
delete data.config.smtp.host
|
||||
delete data.config.smtp.user
|
||||
delete data.config.smtp.pass
|
||||
if (responseConfig?.smtp) {
|
||||
delete responseConfig.smtp.host
|
||||
delete responseConfig.smtp.user
|
||||
delete responseConfig.smtp.pass
|
||||
}
|
||||
|
||||
reply.send({
|
||||
message: "E-Mail-Channel erfolgreich erstellt",
|
||||
channel: data,
|
||||
channel: {
|
||||
...data,
|
||||
config: responseConfig
|
||||
},
|
||||
})
|
||||
} catch (err) {
|
||||
console.error("Fehler bei Channel-Erstellung:", err)
|
||||
@@ -234,29 +259,29 @@ const helpdeskRoutes: FastifyPluginAsync = async (server) => {
|
||||
const { text } = req.body as { text: string }
|
||||
|
||||
// 🔹 Konversation inkl. Channel + Kontakt laden
|
||||
const { data: conv, error: convErr } = await server.supabase
|
||||
.from("helpdesk_conversations")
|
||||
.select(`
|
||||
id,
|
||||
tenant_id,
|
||||
subject,
|
||||
channel_instance_id,
|
||||
helpdesk_contacts(email),
|
||||
helpdesk_channel_instances(config, name),
|
||||
ticket_number
|
||||
`)
|
||||
.eq("id", conversationId)
|
||||
.single()
|
||||
const rows = await server.db
|
||||
.select({
|
||||
conversation: helpdesk_conversations,
|
||||
contact: helpdesk_contacts,
|
||||
channel: helpdesk_channel_instances,
|
||||
})
|
||||
.from(helpdesk_conversations)
|
||||
.leftJoin(helpdesk_contacts, eq(helpdesk_contacts.id, helpdesk_conversations.contactId))
|
||||
.leftJoin(helpdesk_channel_instances, eq(helpdesk_channel_instances.id, helpdesk_conversations.channelInstanceId))
|
||||
.where(eq(helpdesk_conversations.id, conversationId))
|
||||
.limit(1)
|
||||
|
||||
const conv = rows[0]
|
||||
|
||||
console.log(conv)
|
||||
|
||||
if (convErr || !conv) {
|
||||
if (!conv) {
|
||||
reply.status(404).send({ error: "Konversation nicht gefunden" })
|
||||
return
|
||||
}
|
||||
|
||||
const contact = conv.helpdesk_contacts as unknown as {email: string}
|
||||
const channel = conv.helpdesk_channel_instances as unknown as {name: string}
|
||||
const contact = conv.contact as unknown as {email: string}
|
||||
const channel = conv.channel as unknown as {name: string, config: any}
|
||||
|
||||
console.log(contact)
|
||||
if (!contact?.email) {
|
||||
@@ -288,7 +313,7 @@ const helpdeskRoutes: FastifyPluginAsync = async (server) => {
|
||||
const mailOptions = {
|
||||
from: `"${channel?.name}" <${user}>`,
|
||||
to: contact.email,
|
||||
subject: `${conv.ticket_number} | ${conv.subject}` || `${conv.ticket_number} | Antwort vom FEDEO Helpdesk`,
|
||||
subject: `${conv.conversation.ticketNumber} | ${conv.conversation.subject}` || `${conv.conversation.ticketNumber} | Antwort vom FEDEO Helpdesk`,
|
||||
text,
|
||||
}
|
||||
|
||||
@@ -296,24 +321,22 @@ const helpdeskRoutes: FastifyPluginAsync = async (server) => {
|
||||
console.log(`[Helpdesk SMTP] Gesendet an ${contact.email}: ${info.messageId}`)
|
||||
|
||||
// 💾 Nachricht speichern
|
||||
const { error: insertErr } = await server.supabase
|
||||
.from("helpdesk_messages")
|
||||
.insert({
|
||||
tenant_id: conv.tenant_id,
|
||||
conversation_id: conversationId,
|
||||
await server.db
|
||||
.insert(helpdesk_messages)
|
||||
.values({
|
||||
tenantId: conv.conversation.tenantId,
|
||||
conversationId: conversationId,
|
||||
direction: "outgoing",
|
||||
payload: { type: "text", text },
|
||||
external_message_id: info.messageId,
|
||||
received_at: new Date().toISOString(),
|
||||
externalMessageId: info.messageId,
|
||||
receivedAt: new Date(),
|
||||
})
|
||||
|
||||
if (insertErr) throw insertErr
|
||||
|
||||
// 🔁 Konversation aktualisieren
|
||||
await server.supabase
|
||||
.from("helpdesk_conversations")
|
||||
.update({ last_message_at: new Date().toISOString() })
|
||||
.eq("id", conversationId)
|
||||
await server.db
|
||||
.update(helpdesk_conversations)
|
||||
.set({ lastMessageAt: new Date() })
|
||||
.where(eq(helpdesk_conversations.id, conversationId))
|
||||
|
||||
reply.send({
|
||||
message: "E-Mail erfolgreich gesendet",
|
||||
|
||||
@@ -1,12 +1,39 @@
|
||||
// src/routes/resources/history.ts
|
||||
import { FastifyInstance } from "fastify";
|
||||
import { and, asc, eq, inArray } from "drizzle-orm";
|
||||
import { authProfiles, historyitems } from "../../db/schema";
|
||||
|
||||
const columnMap: Record<string, string> = {
|
||||
const columnMap: Record<string, any> = {
|
||||
customers: historyitems.customer,
|
||||
members: historyitems.customer,
|
||||
vendors: historyitems.vendor,
|
||||
projects: historyitems.project,
|
||||
plants: historyitems.plant,
|
||||
contacts: historyitems.contact,
|
||||
tasks: historyitems.task,
|
||||
vehicles: historyitems.vehicle,
|
||||
events: historyitems.event,
|
||||
files: historyitems.file,
|
||||
products: historyitems.product,
|
||||
inventoryitems: historyitems.inventoryitem,
|
||||
inventoryitemgroups: historyitems.inventoryitemgroup,
|
||||
checks: historyitems.check,
|
||||
costcentres: historyitems.costcentre,
|
||||
ownaccounts: historyitems.ownaccount,
|
||||
documentboxes: historyitems.documentbox,
|
||||
hourrates: historyitems.hourrate,
|
||||
services: historyitems.service,
|
||||
customerspaces: historyitems.customerspace,
|
||||
customerinventoryitems: historyitems.customerinventoryitem,
|
||||
memberrelations: historyitems.memberrelation,
|
||||
};
|
||||
|
||||
const insertFieldMap: Record<string, string> = {
|
||||
customers: "customer",
|
||||
members: "customer",
|
||||
vendors: "vendor",
|
||||
projects: "project",
|
||||
plants: "plant",
|
||||
contracts: "contract",
|
||||
contacts: "contact",
|
||||
tasks: "task",
|
||||
vehicles: "vehicle",
|
||||
@@ -15,17 +42,61 @@ const columnMap: Record<string, string> = {
|
||||
products: "product",
|
||||
inventoryitems: "inventoryitem",
|
||||
inventoryitemgroups: "inventoryitemgroup",
|
||||
absencerequests: "absencerequest",
|
||||
checks: "check",
|
||||
costcentres: "costcentre",
|
||||
ownaccounts: "ownaccount",
|
||||
documentboxes: "documentbox",
|
||||
hourrates: "hourrate",
|
||||
services: "service",
|
||||
roles: "role",
|
||||
};
|
||||
customerspaces: "customerspace",
|
||||
customerinventoryitems: "customerinventoryitem",
|
||||
memberrelations: "memberrelation",
|
||||
}
|
||||
|
||||
const parseId = (value: string) => {
|
||||
if (/^\d+$/.test(value)) return Number(value)
|
||||
return value
|
||||
}
|
||||
|
||||
export default async function resourceHistoryRoutes(server: FastifyInstance) {
|
||||
server.get("/history", {
|
||||
schema: {
|
||||
tags: ["History"],
|
||||
summary: "Get all history entries for the active tenant",
|
||||
},
|
||||
}, async (req: any) => {
|
||||
const data = await server.db
|
||||
.select()
|
||||
.from(historyitems)
|
||||
.where(eq(historyitems.tenant, req.user?.tenant_id))
|
||||
.orderBy(asc(historyitems.createdAt));
|
||||
|
||||
const userIds = Array.from(
|
||||
new Set(data.map((item) => item.createdBy).filter(Boolean))
|
||||
) as string[];
|
||||
|
||||
const profiles = userIds.length > 0
|
||||
? await server.db
|
||||
.select()
|
||||
.from(authProfiles)
|
||||
.where(and(
|
||||
eq(authProfiles.tenant_id, req.user?.tenant_id),
|
||||
inArray(authProfiles.user_id, userIds)
|
||||
))
|
||||
: [];
|
||||
|
||||
const profileByUserId = new Map(
|
||||
profiles.map((profile) => [profile.user_id, profile])
|
||||
);
|
||||
|
||||
return data.map((historyitem) => ({
|
||||
...historyitem,
|
||||
created_at: historyitem.createdAt,
|
||||
created_by: historyitem.createdBy,
|
||||
created_by_profile: historyitem.createdBy ? profileByUserId.get(historyitem.createdBy) || null : null,
|
||||
}));
|
||||
});
|
||||
|
||||
server.get<{
|
||||
Params: { resource: string; id: string }
|
||||
}>("/resource/:resource/:id/history", {
|
||||
@@ -49,29 +120,36 @@ export default async function resourceHistoryRoutes(server: FastifyInstance) {
|
||||
return reply.code(400).send({ error: `History not supported for resource '${resource}'` });
|
||||
}
|
||||
|
||||
const { data, error } = await server.supabase
|
||||
.from("historyitems")
|
||||
.select("*")
|
||||
.eq(column, id)
|
||||
.order("created_at", { ascending: true });
|
||||
const data = await server.db
|
||||
.select()
|
||||
.from(historyitems)
|
||||
.where(eq(column, parseId(id)))
|
||||
.orderBy(asc(historyitems.createdAt));
|
||||
|
||||
if (error) {
|
||||
server.log.error(error);
|
||||
return reply.code(500).send({ error: "Failed to fetch history" });
|
||||
}
|
||||
const userIds = Array.from(
|
||||
new Set(data.map((item) => item.createdBy).filter(Boolean))
|
||||
) as string[]
|
||||
|
||||
const {data:users, error:usersError} = await server.supabase
|
||||
.from("auth_users")
|
||||
.select("*, auth_profiles(*), tenants!auth_tenant_users(*)")
|
||||
const profiles = userIds.length > 0
|
||||
? await server.db
|
||||
.select()
|
||||
.from(authProfiles)
|
||||
.where(and(
|
||||
eq(authProfiles.tenant_id, req.user?.tenant_id),
|
||||
inArray(authProfiles.user_id, userIds)
|
||||
))
|
||||
: []
|
||||
|
||||
const filteredUsers = (users ||[]).filter(i => i.tenants.find((t:any) => t.id === req.user?.tenant_id))
|
||||
const profileByUserId = new Map(
|
||||
profiles.map((profile) => [profile.user_id, profile])
|
||||
)
|
||||
|
||||
const dataCombined = data.map(historyitem => {
|
||||
return {
|
||||
const dataCombined = data.map((historyitem) => ({
|
||||
...historyitem,
|
||||
created_by_profile: filteredUsers.find(i => i.id === historyitem.created_by) ? filteredUsers.find(i => i.id === historyitem.created_by).auth_profiles[0] : null
|
||||
}
|
||||
})
|
||||
created_at: historyitem.createdAt,
|
||||
created_by: historyitem.createdBy,
|
||||
created_by_profile: historyitem.createdBy ? profileByUserId.get(historyitem.createdBy) || null : null,
|
||||
}))
|
||||
|
||||
|
||||
|
||||
@@ -128,29 +206,33 @@ export default async function resourceHistoryRoutes(server: FastifyInstance) {
|
||||
const userId = (req.user as any)?.user_id;
|
||||
|
||||
|
||||
const fkField = columnMap[resource];
|
||||
const fkField = insertFieldMap[resource];
|
||||
if (!fkField) {
|
||||
return reply.code(400).send({ error: `Unknown resource: ${resource}` });
|
||||
}
|
||||
|
||||
const { data, error } = await server.supabase
|
||||
.from("historyitems")
|
||||
.insert({
|
||||
const inserted = await server.db
|
||||
.insert(historyitems)
|
||||
.values({
|
||||
text,
|
||||
[fkField]: id,
|
||||
[fkField]: parseId(id),
|
||||
oldVal: old_val || null,
|
||||
newVal: new_val || null,
|
||||
config: config || null,
|
||||
tenant: (req.user as any)?.tenant_id,
|
||||
created_by: userId
|
||||
createdBy: userId
|
||||
})
|
||||
.select()
|
||||
.single();
|
||||
.returning()
|
||||
|
||||
if (error) {
|
||||
return reply.code(500).send({ error: error.message });
|
||||
const data = inserted[0]
|
||||
if (!data) {
|
||||
return reply.code(500).send({ error: "Failed to create history entry" });
|
||||
}
|
||||
|
||||
return reply.code(201).send(data);
|
||||
return reply.code(201).send({
|
||||
...data,
|
||||
created_at: data.createdAt,
|
||||
created_by: data.createdBy
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
63
backend/src/routes/internal/auth.m2m.ts
Normal file
63
backend/src/routes/internal/auth.m2m.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import { FastifyInstance } from "fastify"
|
||||
import jwt from "jsonwebtoken"
|
||||
import { and, eq } from "drizzle-orm"
|
||||
import { authTenantUsers } from "../../../db/schema"
|
||||
import { secrets } from "../../utils/secrets"
|
||||
|
||||
export default async function authM2mInternalRoutes(server: FastifyInstance) {
|
||||
server.post("/auth/m2m/token", {
|
||||
schema: {
|
||||
tags: ["Auth"],
|
||||
summary: "Exchange M2M API key for a short-lived JWT",
|
||||
body: {
|
||||
type: "object",
|
||||
properties: {
|
||||
expires_in_seconds: { type: "number" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}, async (req, reply) => {
|
||||
try {
|
||||
if (!req.user?.user_id || !req.user?.tenant_id || !req.user?.email) {
|
||||
return reply.code(401).send({ error: "Unauthorized" })
|
||||
}
|
||||
|
||||
const membership = await server.db
|
||||
.select()
|
||||
.from(authTenantUsers)
|
||||
.where(and(
|
||||
eq(authTenantUsers.user_id, req.user.user_id),
|
||||
eq(authTenantUsers.tenant_id, Number(req.user.tenant_id))
|
||||
))
|
||||
.limit(1)
|
||||
|
||||
if (!membership[0]) {
|
||||
return reply.code(403).send({ error: "User is not assigned to tenant" })
|
||||
}
|
||||
|
||||
const requestedTtl = Number((req.body as any)?.expires_in_seconds ?? 900)
|
||||
const ttlSeconds = Math.min(3600, Math.max(60, requestedTtl))
|
||||
|
||||
const token = jwt.sign(
|
||||
{
|
||||
user_id: req.user.user_id,
|
||||
email: req.user.email,
|
||||
tenant_id: req.user.tenant_id,
|
||||
},
|
||||
secrets.JWT_SECRET!,
|
||||
{ expiresIn: ttlSeconds }
|
||||
)
|
||||
|
||||
return {
|
||||
token_type: "Bearer",
|
||||
access_token: token,
|
||||
expires_in_seconds: ttlSeconds,
|
||||
user_id: req.user.user_id,
|
||||
tenant_id: req.user.tenant_id
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("POST /internal/auth/m2m/token ERROR:", err)
|
||||
return reply.code(500).send({ error: "Internal Server Error" })
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,21 +1,22 @@
|
||||
// routes/notifications.routes.ts
|
||||
import { FastifyInstance } from 'fastify';
|
||||
import { NotificationService, UserDirectory } from '../modules/notification.service';
|
||||
import { eq } from "drizzle-orm";
|
||||
import { authUsers } from "../../db/schema";
|
||||
|
||||
// Beispiel: E-Mail aus eigener User-Tabelle laden
|
||||
const getUserDirectory: UserDirectory = async (server:FastifyInstance, userId, tenantId) => {
|
||||
const { data, error } = await server.supabase
|
||||
.from('auth_users')
|
||||
.select('email')
|
||||
.eq('id', userId)
|
||||
.maybeSingle();
|
||||
if (error || !data) return null;
|
||||
const rows = await server.db
|
||||
.select({ email: authUsers.email })
|
||||
.from(authUsers)
|
||||
.where(eq(authUsers.id, userId))
|
||||
.limit(1)
|
||||
const data = rows[0]
|
||||
if (!data) return null;
|
||||
return { email: data.email };
|
||||
};
|
||||
|
||||
export default async function notificationsRoutes(server: FastifyInstance) {
|
||||
// wichtig: server.supabase ist über app verfügbar
|
||||
|
||||
const svc = new NotificationService(server, getUserDirectory);
|
||||
|
||||
server.post('/notifications/trigger', async (req, reply) => {
|
||||
|
||||
@@ -1,40 +1,19 @@
|
||||
import { FastifyRequest, FastifyReply, FastifyInstance } from 'fastify';
|
||||
import { publicLinkService } from '../../modules/publiclinks.service';
|
||||
|
||||
import dayjs from 'dayjs'; // Falls nicht installiert: npm install dayjs
|
||||
|
||||
export default async function publiclinksNonAuthenticatedRoutes(server: FastifyInstance) {
|
||||
server.get("/workflows/context/:token", async (req, reply) => {
|
||||
const { token } = req.params as { token: string };
|
||||
|
||||
// Wir lesen die PIN aus dem Header (Best Practice für Security)
|
||||
const pin = req.headers['x-public-pin'] as string | undefined;
|
||||
|
||||
try {
|
||||
const context = await publicLinkService.getLinkContext(server, token, pin);
|
||||
|
||||
return reply.send(context);
|
||||
|
||||
} catch (error: any) {
|
||||
// Spezifische Fehlercodes für das Frontend
|
||||
if (error.message === "Link_NotFound") {
|
||||
return reply.code(404).send({ error: "Link nicht gefunden oder abgelaufen" });
|
||||
}
|
||||
|
||||
if (error.message === "Pin_Required") {
|
||||
return reply.code(401).send({
|
||||
error: "PIN erforderlich",
|
||||
code: "PIN_REQUIRED",
|
||||
requirePin: true
|
||||
});
|
||||
}
|
||||
|
||||
if (error.message === "Pin_Invalid") {
|
||||
return reply.code(403).send({
|
||||
error: "PIN falsch",
|
||||
code: "PIN_INVALID",
|
||||
requirePin: true
|
||||
});
|
||||
}
|
||||
if (error.message === "Link_NotFound") return reply.code(404).send({ error: "Link nicht gefunden" });
|
||||
if (error.message === "Pin_Required") return reply.code(401).send({ error: "PIN erforderlich", requirePin: true });
|
||||
if (error.message === "Pin_Invalid") return reply.code(403).send({ error: "PIN falsch", requirePin: true });
|
||||
|
||||
server.log.error(error);
|
||||
return reply.code(500).send({ error: "Interner Server Fehler" });
|
||||
@@ -43,49 +22,31 @@ export default async function publiclinksNonAuthenticatedRoutes(server: FastifyI
|
||||
|
||||
server.post("/workflows/submit/:token", async (req, reply) => {
|
||||
const { token } = req.params as { token: string };
|
||||
// PIN sicher aus dem Header lesen
|
||||
const pin = req.headers['x-public-pin'] as string | undefined;
|
||||
// Der Body enthält { profile, project, service, ... }
|
||||
const payload = req.body;
|
||||
|
||||
console.log(payload)
|
||||
const body = req.body as any;
|
||||
|
||||
try {
|
||||
// Service aufrufen (führt die 3 Schritte aus: Lieferschein -> Zeit -> History)
|
||||
const result = await publicLinkService.submitFormData(server, token, payload, pin);
|
||||
const quantity = parseFloat(body.quantity) || 0;
|
||||
|
||||
// 201 Created zurückgeben
|
||||
// Wir nutzen das vom User gewählte deliveryDate
|
||||
// Falls kein Datum geschickt wurde, Fallback auf Heute
|
||||
const baseDate = body.deliveryDate ? dayjs(body.deliveryDate) : dayjs();
|
||||
|
||||
const payload = {
|
||||
...body,
|
||||
// Wir mappen das deliveryDate auf die Zeitstempel
|
||||
// Start ist z.B. 08:00 Uhr am gewählten Tag, Ende ist Start + Menge
|
||||
startDate: baseDate.hour(8).minute(0).toDate(),
|
||||
endDate: baseDate.hour(8).add(quantity, 'hour').toDate(),
|
||||
deliveryDate: baseDate.format('YYYY-MM-DD')
|
||||
};
|
||||
|
||||
const result = await publicLinkService.submitFormData(server, token, payload, pin);
|
||||
return reply.code(201).send(result);
|
||||
|
||||
} catch (error: any) {
|
||||
console.log(error);
|
||||
|
||||
// Fehler-Mapping für saubere HTTP Codes
|
||||
if (error.message === "Link_NotFound") {
|
||||
return reply.code(404).send({ error: "Link ungültig oder nicht aktiv" });
|
||||
}
|
||||
|
||||
if (error.message === "Pin_Required") {
|
||||
return reply.code(401).send({ error: "PIN erforderlich" });
|
||||
}
|
||||
|
||||
if (error.message === "Pin_Invalid") {
|
||||
return reply.code(403).send({ error: "PIN ist falsch" });
|
||||
}
|
||||
|
||||
if (error.message === "Profile_Missing") {
|
||||
return reply.code(400).send({ error: "Kein Mitarbeiter-Profil gefunden (weder im Link noch in der Eingabe)" });
|
||||
}
|
||||
|
||||
if (error.message === "Project not found" || error.message === "Service not found") {
|
||||
return reply.code(400).send({ error: "Ausgewähltes Projekt oder Leistung existiert nicht mehr." });
|
||||
}
|
||||
|
||||
// Fallback für alle anderen Fehler (z.B. DB Constraints)
|
||||
return reply.code(500).send({
|
||||
error: "Interner Fehler beim Speichern",
|
||||
details: error.message
|
||||
});
|
||||
server.log.error(error);
|
||||
return reply.code(500).send({ error: "Fehler beim Speichern", details: error.message });
|
||||
}
|
||||
});
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,9 +1,9 @@
|
||||
import { FastifyInstance } from "fastify"
|
||||
import { asc, desc } from "drizzle-orm"
|
||||
import { asc, desc, eq } from "drizzle-orm"
|
||||
import { sortData } from "../utils/sort"
|
||||
|
||||
// Schema imports
|
||||
import { accounts, units,countrys } from "../../db/schema"
|
||||
import { accounts, units, countrys, tenants } from "../../db/schema"
|
||||
|
||||
const TABLE_MAP: Record<string, any> = {
|
||||
accounts,
|
||||
@@ -35,11 +35,49 @@ export default async function resourceRoutesSpecial(server: FastifyInstance) {
|
||||
}
|
||||
|
||||
// ---------------------------------------
|
||||
// 📌 SELECT: wir ignorieren select string (wie Supabase)
|
||||
// 📌 SELECT: select-string wird in dieser Route bewusst ignoriert
|
||||
// Drizzle kann kein dynamisches Select aus String!
|
||||
// Wir geben IMMER alle Spalten zurück → kompatibel zum Frontend
|
||||
// ---------------------------------------
|
||||
|
||||
if (resource === "accounts") {
|
||||
const [tenant] = await server.db
|
||||
.select({
|
||||
accountChart: tenants.accountChart,
|
||||
})
|
||||
.from(tenants)
|
||||
.where(eq(tenants.id, Number(req.user.tenant_id)))
|
||||
.limit(1)
|
||||
|
||||
const activeAccountChart = tenant?.accountChart || "skr03"
|
||||
let data
|
||||
if (sort && (accounts as any)[sort]) {
|
||||
const col = (accounts as any)[sort]
|
||||
data = ascQuery === "true"
|
||||
? await server.db
|
||||
.select()
|
||||
.from(accounts)
|
||||
.where(eq(accounts.accountChart, activeAccountChart))
|
||||
.orderBy(asc(col))
|
||||
: await server.db
|
||||
.select()
|
||||
.from(accounts)
|
||||
.where(eq(accounts.accountChart, activeAccountChart))
|
||||
.orderBy(desc(col))
|
||||
} else {
|
||||
data = await server.db
|
||||
.select()
|
||||
.from(accounts)
|
||||
.where(eq(accounts.accountChart, activeAccountChart))
|
||||
}
|
||||
|
||||
return sortData(
|
||||
data,
|
||||
sort as any,
|
||||
ascQuery === "true"
|
||||
)
|
||||
}
|
||||
|
||||
let query = server.db.select().from(table)
|
||||
|
||||
// ---------------------------------------
|
||||
|
||||
@@ -22,7 +22,7 @@ export default async function staffTimeRoutes(server: FastifyInstance) {
|
||||
|
||||
server.post("/staff/time/event", async (req, reply) => {
|
||||
try {
|
||||
const userId = req.user.user_id
|
||||
const actorId = req.user.user_id;
|
||||
const tenantId = req.user.tenant_id
|
||||
|
||||
const body = req.body as any
|
||||
@@ -35,17 +35,15 @@ export default async function staffTimeRoutes(server: FastifyInstance) {
|
||||
|
||||
const dataToInsert = {
|
||||
tenant_id: tenantId,
|
||||
user_id: userId,
|
||||
user_id: body.user_id,
|
||||
actortype: "user",
|
||||
actoruser_id: userId,
|
||||
actoruser_id: actorId,
|
||||
eventtime: normalizeDate(body.eventtime),
|
||||
eventtype: body.eventtype,
|
||||
source: "WEB",
|
||||
payload: body.payload // Payload (z.B. Description) mit speichern
|
||||
}
|
||||
|
||||
console.log(dataToInsert)
|
||||
|
||||
const [created] = await server.db
|
||||
.insert(stafftimeevents)
|
||||
//@ts-ignore
|
||||
@@ -62,17 +60,17 @@ export default async function staffTimeRoutes(server: FastifyInstance) {
|
||||
// 🆕 POST /staff/time/edit (Bearbeiten durch Invalidieren + Neu erstellen)
|
||||
server.post("/staff/time/edit", async (req, reply) => {
|
||||
try {
|
||||
const userId = req.user.user_id;
|
||||
// 1. Der "Actor" ist der, der gerade eingeloggt ist (z.B. Manager)
|
||||
const actorId = req.user.user_id;
|
||||
const tenantId = req.user.tenant_id;
|
||||
|
||||
// Wir erwarten das komplette Paket für die Änderung
|
||||
const {
|
||||
originalEventIds, // Array der IDs, die "gelöscht" werden sollen (Start ID, End ID)
|
||||
newStart, // ISO String
|
||||
newEnd, // ISO String
|
||||
newType, // z.B. 'work', 'vacation'
|
||||
originalEventIds,
|
||||
newStart,
|
||||
newEnd,
|
||||
newType,
|
||||
description,
|
||||
reason // Warum wurde geändert? (Audit)
|
||||
reason
|
||||
} = req.body as {
|
||||
originalEventIds: string[],
|
||||
newStart: string,
|
||||
@@ -86,41 +84,67 @@ export default async function staffTimeRoutes(server: FastifyInstance) {
|
||||
return reply.code(400).send({ error: "Keine Events zum Bearbeiten angegeben." });
|
||||
}
|
||||
|
||||
// 1. Transaction starten (damit alles oder nichts passiert)
|
||||
// -----------------------------------------------------------
|
||||
// SCHRITT A: Den eigentlichen Besitzer (Mitarbeiter) ermitteln
|
||||
// -----------------------------------------------------------
|
||||
// Wir holen uns das erste Event aus der Liste, um zu sehen, wem es gehört.
|
||||
const existingEvents = await server.db
|
||||
.select({
|
||||
user_id: stafftimeevents.user_id,
|
||||
tenant_id: stafftimeevents.tenant_id
|
||||
})
|
||||
.from(stafftimeevents)
|
||||
.where(and(
|
||||
eq(stafftimeevents.id, originalEventIds[0]),
|
||||
eq(stafftimeevents.tenant_id, tenantId) // Sicherheitscheck: Nur im eigenen Tenant
|
||||
))
|
||||
.limit(1);
|
||||
|
||||
if (existingEvents.length === 0) {
|
||||
return reply.code(404).send({ error: "Ursprüngliches Event nicht gefunden oder Zugriff verweigert." });
|
||||
}
|
||||
|
||||
// Das ist der Mitarbeiter, dem die Zeit gehört
|
||||
const targetUserId = existingEvents[0].user_id;
|
||||
|
||||
|
||||
// -----------------------------------------------------------
|
||||
// SCHRITT B: Transaktion durchführen
|
||||
// -----------------------------------------------------------
|
||||
await server.db.transaction(async (tx) => {
|
||||
|
||||
// A. INVALIDIEREN (Die alten Events "löschen")
|
||||
// Wir erstellen für jedes alte Event ein 'invalidated' Event
|
||||
// 1. INVALIDIEREN
|
||||
// Wir nutzen 'targetUserId' als Besitzer des Events, aber 'actorId' als Auslöser
|
||||
const invalidations = originalEventIds.map(id => ({
|
||||
tenant_id: tenantId,
|
||||
user_id: userId, // Gehört dem Mitarbeiter
|
||||
user_id: targetUserId, // <--- WICHTIG: Gehört dem Mitarbeiter
|
||||
actortype: "user",
|
||||
actoruser_id: userId, // Wer hat geändert?
|
||||
actoruser_id: actorId, // <--- WICHTIG: Geändert durch Manager/Self
|
||||
eventtime: new Date(),
|
||||
eventtype: "invalidated", // <--- NEUER TYP: Muss in loadValidEvents gefiltert werden!
|
||||
eventtype: "invalidated",
|
||||
source: "WEB",
|
||||
related_event_id: id, // Zeigt auf das alte Event
|
||||
related_event_id: id,
|
||||
invalidates_event_id: id,
|
||||
metadata: {
|
||||
reason: reason || "Bearbeitung",
|
||||
replaced_by_edit: true
|
||||
}
|
||||
}));
|
||||
|
||||
// Batch Insert
|
||||
// @ts-ignore
|
||||
await tx.insert(stafftimeevents).values(invalidations);
|
||||
|
||||
// B. NEU ERSTELLEN (Die korrigierten Events anlegen)
|
||||
// 2. NEU ERSTELLEN
|
||||
|
||||
// Start Event
|
||||
// @ts-ignore
|
||||
await tx.insert(stafftimeevents).values({
|
||||
tenant_id: tenantId,
|
||||
user_id: userId,
|
||||
user_id: targetUserId, // <--- Gehört dem Mitarbeiter
|
||||
actortype: "user",
|
||||
actoruser_id: userId,
|
||||
actoruser_id: actorId, // <--- Erstellt durch Manager/Self
|
||||
eventtime: new Date(newStart),
|
||||
eventtype: `${newType}_start`, // z.B. work_start
|
||||
eventtype: `${newType}_start`,
|
||||
source: "WEB",
|
||||
payload: { description: description || "" }
|
||||
});
|
||||
@@ -130,11 +154,11 @@ export default async function staffTimeRoutes(server: FastifyInstance) {
|
||||
// @ts-ignore
|
||||
await tx.insert(stafftimeevents).values({
|
||||
tenant_id: tenantId,
|
||||
user_id: userId,
|
||||
user_id: targetUserId, // <--- Gehört dem Mitarbeiter
|
||||
actortype: "user",
|
||||
actoruser_id: userId,
|
||||
actoruser_id: actorId, // <--- Erstellt durch Manager/Self
|
||||
eventtime: new Date(newEnd),
|
||||
eventtype: `${newType}_end`, // z.B. work_end
|
||||
eventtype: `${newType}_end`,
|
||||
source: "WEB"
|
||||
});
|
||||
}
|
||||
@@ -365,7 +389,9 @@ export default async function staffTimeRoutes(server: FastifyInstance) {
|
||||
const evaluatedUserId = targetUserId || actingUserId;
|
||||
|
||||
const startDate = new Date(from);
|
||||
const endDate = new Date(to);
|
||||
let endDateQuery = new Date(to);
|
||||
endDateQuery.setDate(endDateQuery.getDate() + 1);
|
||||
const endDate = endDateQuery;
|
||||
|
||||
if (isNaN(startDate.getTime()) || isNaN(endDate.getTime())) {
|
||||
return reply.code(400).send({ error: "Ungültiges Datumsformat." });
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { FastifyInstance } from 'fastify'
|
||||
import { StaffTimeEntryConnect } from '../../types/staff'
|
||||
import { asc, eq } from "drizzle-orm";
|
||||
import { stafftimenetryconnects } from "../../../db/schema";
|
||||
|
||||
export default async function staffTimeConnectRoutes(server: FastifyInstance) {
|
||||
|
||||
@@ -8,16 +10,21 @@ export default async function staffTimeConnectRoutes(server: FastifyInstance) {
|
||||
'/staff/time/:id/connects',
|
||||
async (req, reply) => {
|
||||
const { id } = req.params
|
||||
const { started_at, stopped_at, project_id, customer_id, task_id, ticket_id, notes } = req.body
|
||||
const { started_at, stopped_at, project_id, notes } = req.body
|
||||
const parsedProjectId = project_id ? Number(project_id) : null
|
||||
|
||||
const { data, error } = await server.supabase
|
||||
.from('staff_time_entry_connects')
|
||||
.insert([{ time_entry_id: id, started_at, stopped_at, project_id, customer_id, task_id, ticket_id, notes }])
|
||||
.select()
|
||||
.maybeSingle()
|
||||
const data = await server.db
|
||||
.insert(stafftimenetryconnects)
|
||||
.values({
|
||||
stafftimeentry: id,
|
||||
started_at: new Date(started_at),
|
||||
stopped_at: new Date(stopped_at),
|
||||
project_id: parsedProjectId,
|
||||
notes
|
||||
})
|
||||
.returning()
|
||||
|
||||
if (error) return reply.code(400).send({ error: error.message })
|
||||
return reply.send(data)
|
||||
return reply.send(data[0])
|
||||
}
|
||||
)
|
||||
|
||||
@@ -26,13 +33,12 @@ export default async function staffTimeConnectRoutes(server: FastifyInstance) {
|
||||
'/staff/time/:id/connects',
|
||||
async (req, reply) => {
|
||||
const { id } = req.params
|
||||
const { data, error } = await server.supabase
|
||||
.from('staff_time_entry_connects')
|
||||
.select('*')
|
||||
.eq('time_entry_id', id)
|
||||
.order('started_at', { ascending: true })
|
||||
const data = await server.db
|
||||
.select()
|
||||
.from(stafftimenetryconnects)
|
||||
.where(eq(stafftimenetryconnects.stafftimeentry, id))
|
||||
.orderBy(asc(stafftimenetryconnects.started_at))
|
||||
|
||||
if (error) return reply.code(400).send({ error: error.message })
|
||||
return reply.send(data)
|
||||
}
|
||||
)
|
||||
@@ -42,15 +48,20 @@ export default async function staffTimeConnectRoutes(server: FastifyInstance) {
|
||||
'/staff/time/connects/:connectId',
|
||||
async (req, reply) => {
|
||||
const { connectId } = req.params
|
||||
const { data, error } = await server.supabase
|
||||
.from('staff_time_entry_connects')
|
||||
.update({ ...req.body, updated_at: new Date().toISOString() })
|
||||
.eq('id', connectId)
|
||||
.select()
|
||||
.maybeSingle()
|
||||
const patchData = { ...req.body } as any
|
||||
if (patchData.started_at) patchData.started_at = new Date(patchData.started_at)
|
||||
if (patchData.stopped_at) patchData.stopped_at = new Date(patchData.stopped_at)
|
||||
if (patchData.project_id !== undefined) {
|
||||
patchData.project_id = patchData.project_id ? Number(patchData.project_id) : null
|
||||
}
|
||||
|
||||
if (error) return reply.code(400).send({ error: error.message })
|
||||
return reply.send(data)
|
||||
const data = await server.db
|
||||
.update(stafftimenetryconnects)
|
||||
.set({ ...patchData, updated_at: new Date() })
|
||||
.where(eq(stafftimenetryconnects.id, connectId))
|
||||
.returning()
|
||||
|
||||
return reply.send(data[0])
|
||||
}
|
||||
)
|
||||
|
||||
@@ -59,12 +70,10 @@ export default async function staffTimeConnectRoutes(server: FastifyInstance) {
|
||||
'/staff/time/connects/:connectId',
|
||||
async (req, reply) => {
|
||||
const { connectId } = req.params
|
||||
const { error } = await server.supabase
|
||||
.from('staff_time_entry_connects')
|
||||
.delete()
|
||||
.eq('id', connectId)
|
||||
await server.db
|
||||
.delete(stafftimenetryconnects)
|
||||
.where(eq(stafftimenetryconnects.id, connectId))
|
||||
|
||||
if (error) return reply.code(400).send({ error: error.message })
|
||||
return reply.send({ success: true })
|
||||
}
|
||||
)
|
||||
|
||||
@@ -1,18 +1,26 @@
|
||||
import { FastifyInstance } from "fastify"
|
||||
import jwt from "jsonwebtoken"
|
||||
import { secrets } from "../utils/secrets"
|
||||
import { createHash, randomBytes } from "node:crypto"
|
||||
|
||||
import {
|
||||
authTenantUsers,
|
||||
authUsers,
|
||||
authProfiles,
|
||||
tenants
|
||||
tenants,
|
||||
m2mApiKeys
|
||||
} from "../../db/schema"
|
||||
|
||||
import {and, eq, inArray} from "drizzle-orm"
|
||||
import {and, desc, eq, inArray} from "drizzle-orm"
|
||||
|
||||
|
||||
export default async function tenantRoutes(server: FastifyInstance) {
|
||||
const generateApiKey = () => {
|
||||
const raw = randomBytes(32).toString("base64url")
|
||||
return `fedeo_m2m_${raw}`
|
||||
}
|
||||
const hashApiKey = (apiKey: string) =>
|
||||
createHash("sha256").update(apiKey, "utf8").digest("hex")
|
||||
|
||||
|
||||
// -------------------------------------------------------------
|
||||
@@ -73,7 +81,7 @@ export default async function tenantRoutes(server: FastifyInstance) {
|
||||
httpOnly: true,
|
||||
sameSite: process.env.NODE_ENV === "production" ? "none" : "lax",
|
||||
secure: process.env.NODE_ENV === "production",
|
||||
maxAge: 60 * 60 * 3,
|
||||
maxAge: 60 * 60 * 6,
|
||||
})
|
||||
|
||||
return { token }
|
||||
@@ -241,4 +249,172 @@ export default async function tenantRoutes(server: FastifyInstance) {
|
||||
}
|
||||
})
|
||||
|
||||
// -------------------------------------------------------------
|
||||
// M2M API KEYS
|
||||
// -------------------------------------------------------------
|
||||
server.get("/tenant/api-keys", async (req, reply) => {
|
||||
try {
|
||||
const tenantId = req.user?.tenant_id
|
||||
if (!tenantId) return reply.code(401).send({ error: "Unauthorized" })
|
||||
|
||||
const keys = await server.db
|
||||
.select({
|
||||
id: m2mApiKeys.id,
|
||||
name: m2mApiKeys.name,
|
||||
tenant_id: m2mApiKeys.tenantId,
|
||||
user_id: m2mApiKeys.userId,
|
||||
active: m2mApiKeys.active,
|
||||
key_prefix: m2mApiKeys.keyPrefix,
|
||||
created_at: m2mApiKeys.createdAt,
|
||||
updated_at: m2mApiKeys.updatedAt,
|
||||
expires_at: m2mApiKeys.expiresAt,
|
||||
last_used_at: m2mApiKeys.lastUsedAt,
|
||||
})
|
||||
.from(m2mApiKeys)
|
||||
.where(eq(m2mApiKeys.tenantId, tenantId))
|
||||
.orderBy(desc(m2mApiKeys.createdAt))
|
||||
|
||||
return keys
|
||||
} catch (err) {
|
||||
console.error("/tenant/api-keys GET ERROR:", err)
|
||||
return reply.code(500).send({ error: "Internal Server Error" })
|
||||
}
|
||||
})
|
||||
|
||||
server.post("/tenant/api-keys", async (req, reply) => {
|
||||
try {
|
||||
const tenantId = req.user?.tenant_id
|
||||
const creatorUserId = req.user?.user_id
|
||||
if (!tenantId || !creatorUserId) {
|
||||
return reply.code(401).send({ error: "Unauthorized" })
|
||||
}
|
||||
|
||||
const { name, user_id, expires_at } = req.body as {
|
||||
name: string
|
||||
user_id: string
|
||||
expires_at?: string | null
|
||||
}
|
||||
|
||||
if (!name || !user_id) {
|
||||
return reply.code(400).send({ error: "name and user_id are required" })
|
||||
}
|
||||
|
||||
const userMembership = await server.db
|
||||
.select()
|
||||
.from(authTenantUsers)
|
||||
.where(and(
|
||||
eq(authTenantUsers.tenant_id, tenantId),
|
||||
eq(authTenantUsers.user_id, user_id)
|
||||
))
|
||||
.limit(1)
|
||||
|
||||
if (!userMembership[0]) {
|
||||
return reply.code(400).send({ error: "user_id is not assigned to this tenant" })
|
||||
}
|
||||
|
||||
const plainApiKey = generateApiKey()
|
||||
const keyPrefix = plainApiKey.slice(0, 16)
|
||||
const keyHash = hashApiKey(plainApiKey)
|
||||
|
||||
const inserted = await server.db
|
||||
.insert(m2mApiKeys)
|
||||
.values({
|
||||
tenantId,
|
||||
userId: user_id,
|
||||
createdBy: creatorUserId,
|
||||
name,
|
||||
keyPrefix,
|
||||
keyHash,
|
||||
expiresAt: expires_at ? new Date(expires_at) : null,
|
||||
})
|
||||
.returning({
|
||||
id: m2mApiKeys.id,
|
||||
name: m2mApiKeys.name,
|
||||
tenant_id: m2mApiKeys.tenantId,
|
||||
user_id: m2mApiKeys.userId,
|
||||
key_prefix: m2mApiKeys.keyPrefix,
|
||||
created_at: m2mApiKeys.createdAt,
|
||||
expires_at: m2mApiKeys.expiresAt,
|
||||
active: m2mApiKeys.active,
|
||||
})
|
||||
|
||||
return reply.code(201).send({
|
||||
...inserted[0],
|
||||
api_key: plainApiKey, // only returned once
|
||||
})
|
||||
} catch (err) {
|
||||
console.error("/tenant/api-keys POST ERROR:", err)
|
||||
return reply.code(500).send({ error: "Internal Server Error" })
|
||||
}
|
||||
})
|
||||
|
||||
server.patch("/tenant/api-keys/:id", async (req, reply) => {
|
||||
try {
|
||||
const tenantId = req.user?.tenant_id
|
||||
if (!tenantId) return reply.code(401).send({ error: "Unauthorized" })
|
||||
|
||||
const { id } = req.params as { id: string }
|
||||
const { name, active, expires_at } = req.body as {
|
||||
name?: string
|
||||
active?: boolean
|
||||
expires_at?: string | null
|
||||
}
|
||||
|
||||
const updateData: any = {
|
||||
updatedAt: new Date()
|
||||
}
|
||||
if (name !== undefined) updateData.name = name
|
||||
if (active !== undefined) updateData.active = active
|
||||
if (expires_at !== undefined) updateData.expiresAt = expires_at ? new Date(expires_at) : null
|
||||
|
||||
const updated = await server.db
|
||||
.update(m2mApiKeys)
|
||||
.set(updateData)
|
||||
.where(and(
|
||||
eq(m2mApiKeys.id, id),
|
||||
eq(m2mApiKeys.tenantId, tenantId)
|
||||
))
|
||||
.returning({
|
||||
id: m2mApiKeys.id,
|
||||
name: m2mApiKeys.name,
|
||||
tenant_id: m2mApiKeys.tenantId,
|
||||
user_id: m2mApiKeys.userId,
|
||||
active: m2mApiKeys.active,
|
||||
key_prefix: m2mApiKeys.keyPrefix,
|
||||
updated_at: m2mApiKeys.updatedAt,
|
||||
expires_at: m2mApiKeys.expiresAt,
|
||||
last_used_at: m2mApiKeys.lastUsedAt,
|
||||
})
|
||||
|
||||
if (!updated[0]) {
|
||||
return reply.code(404).send({ error: "API key not found" })
|
||||
}
|
||||
|
||||
return updated[0]
|
||||
} catch (err) {
|
||||
console.error("/tenant/api-keys PATCH ERROR:", err)
|
||||
return reply.code(500).send({ error: "Internal Server Error" })
|
||||
}
|
||||
})
|
||||
|
||||
server.delete("/tenant/api-keys/:id", async (req, reply) => {
|
||||
try {
|
||||
const tenantId = req.user?.tenant_id
|
||||
if (!tenantId) return reply.code(401).send({ error: "Unauthorized" })
|
||||
|
||||
const { id } = req.params as { id: string }
|
||||
await server.db
|
||||
.delete(m2mApiKeys)
|
||||
.where(and(
|
||||
eq(m2mApiKeys.id, id),
|
||||
eq(m2mApiKeys.tenantId, tenantId)
|
||||
))
|
||||
|
||||
return { success: true }
|
||||
} catch (err) {
|
||||
console.error("/tenant/api-keys DELETE ERROR:", err)
|
||||
return reply.code(500).send({ error: "Internal Server Error" })
|
||||
}
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
344
backend/src/routes/wiki.ts
Normal file
344
backend/src/routes/wiki.ts
Normal file
@@ -0,0 +1,344 @@
|
||||
import { FastifyInstance } from "fastify"
|
||||
import { and, eq, isNull, asc, inArray } from "drizzle-orm"
|
||||
// WICHTIG: Hier müssen die Schemas der Entitäten importiert werden!
|
||||
import {
|
||||
wikiPages,
|
||||
authUsers,
|
||||
// Bereits vorhanden
|
||||
customers,
|
||||
projects,
|
||||
plants,
|
||||
products,
|
||||
inventoryitems,
|
||||
customerinventoryitems,
|
||||
customerspaces,
|
||||
// NEU HINZUGEFÜGT (Basierend auf deinem DataStore)
|
||||
tasks,
|
||||
contacts,
|
||||
contracts,
|
||||
vehicles,
|
||||
vendors,
|
||||
spaces,
|
||||
inventoryitemgroups,
|
||||
services,
|
||||
hourrates,
|
||||
events,
|
||||
productcategories,
|
||||
servicecategories,
|
||||
ownaccounts
|
||||
} from "../../db/schema/"
|
||||
|
||||
// Konfiguration: Welche Entitäten sollen im Wiki auftauchen?
|
||||
const ENTITY_CONFIG: Record<string, { table: any, labelField: any, rootLabel: string, idField: 'id' | 'uuid' }> = {
|
||||
// --- BEREITS VORHANDEN ---
|
||||
'customers': { table: customers, labelField: customers.name, rootLabel: 'Kunden', idField: 'id' },
|
||||
'projects': { table: projects, labelField: projects.name, rootLabel: 'Projekte', idField: 'id' },
|
||||
'plants': { table: plants, labelField: plants.name, rootLabel: 'Objekte', idField: 'id' },
|
||||
'products': { table: products, labelField: products.name, rootLabel: 'Artikel', idField: 'id' },
|
||||
'inventoryitems': { table: inventoryitems, labelField: inventoryitems.name, rootLabel: 'Inventarartikel', idField: 'id' },
|
||||
'customerinventoryitems': { table: customerinventoryitems, labelField: customerinventoryitems.name, rootLabel: 'Kundeninventar', idField: 'id' },
|
||||
'customerspaces': { table: customerspaces, labelField: customerspaces.name, rootLabel: 'Kundenlagerplätze', idField: 'id' },
|
||||
|
||||
// --- NEU BASIEREND AUF DATASTORE ---
|
||||
'tasks': { table: tasks, labelField: tasks.name, rootLabel: 'Aufgaben', idField: 'id' },
|
||||
'contacts': { table: contacts, labelField: contacts.fullName, rootLabel: 'Kontakte', idField: 'id' },
|
||||
'contracts': { table: contracts, labelField: contracts.name, rootLabel: 'Verträge', idField: 'id' },
|
||||
'vehicles': { table: vehicles, labelField: vehicles.license_plate, rootLabel: 'Fahrzeuge', idField: 'id' },
|
||||
'vendors': { table: vendors, labelField: vendors.name, rootLabel: 'Lieferanten', idField: 'id' },
|
||||
'spaces': { table: spaces, labelField: spaces.name, rootLabel: 'Lagerplätze', idField: 'id' },
|
||||
'inventoryitemgroups': { table: inventoryitemgroups, labelField: inventoryitemgroups.name, rootLabel: 'Inventarartikelgruppen', idField: 'id' },
|
||||
'services': { table: services, labelField: services.name, rootLabel: 'Leistungen', idField: 'id' },
|
||||
'hourrates': { table: hourrates, labelField: hourrates.name, rootLabel: 'Stundensätze', idField: 'id' },
|
||||
'events': { table: events, labelField: events.name, rootLabel: 'Termine', idField: 'id' },
|
||||
'productcategories': { table: productcategories, labelField: productcategories.name, rootLabel: 'Artikelkategorien', idField: 'id' },
|
||||
'servicecategories': { table: servicecategories, labelField: servicecategories.name, rootLabel: 'Leistungskategorien', idField: 'id' },
|
||||
'ownaccounts': { table: ownaccounts, labelField: ownaccounts.name, rootLabel: 'Zusätzliche Buchungskonten', idField: 'id' },
|
||||
}
|
||||
|
||||
// Types
|
||||
interface WikiTreeQuery {
|
||||
entityType?: string
|
||||
entityId?: number
|
||||
entityUuid?: string
|
||||
}
|
||||
|
||||
interface WikiCreateBody {
|
||||
title: string
|
||||
parentId?: string
|
||||
isFolder?: boolean
|
||||
entityType?: string
|
||||
entityId?: number
|
||||
entityUuid?: string
|
||||
}
|
||||
|
||||
interface WikiUpdateBody {
|
||||
title?: string
|
||||
content?: any
|
||||
parentId?: string | null
|
||||
sortOrder?: number
|
||||
isFolder?: boolean
|
||||
}
|
||||
|
||||
export default async function wikiRoutes(server: FastifyInstance) {
|
||||
|
||||
// ---------------------------------------------------------
|
||||
// 1. GET /wiki/tree
|
||||
// Lädt Struktur: Entweder gefiltert (Widget) oder Global (mit virtuellen Ordnern)
|
||||
// ---------------------------------------------------------
|
||||
server.get<{ Querystring: WikiTreeQuery }>("/wiki/tree", async (req, reply) => {
|
||||
const user = req.user
|
||||
const { entityType, entityId, entityUuid } = req.query
|
||||
|
||||
// FALL A: WIDGET-ANSICHT (Spezifische Entität)
|
||||
// Wenn wir spezifisch filtern, wollen wir nur die echten Seiten ohne virtuelle Ordner
|
||||
if (entityType && (entityId || entityUuid)) {
|
||||
const filters = [
|
||||
eq(wikiPages.tenantId, user.tenant_id),
|
||||
eq(wikiPages.entityType, entityType)
|
||||
]
|
||||
|
||||
if (entityId) filters.push(eq(wikiPages.entityId, Number(entityId)))
|
||||
else if (entityUuid) filters.push(eq(wikiPages.entityUuid, entityUuid))
|
||||
|
||||
return server.db
|
||||
.select({
|
||||
id: wikiPages.id,
|
||||
parentId: wikiPages.parentId,
|
||||
title: wikiPages.title,
|
||||
isFolder: wikiPages.isFolder,
|
||||
sortOrder: wikiPages.sortOrder,
|
||||
entityType: wikiPages.entityType,
|
||||
updatedAt: wikiPages.updatedAt,
|
||||
})
|
||||
.from(wikiPages)
|
||||
.where(and(...filters))
|
||||
.orderBy(asc(wikiPages.sortOrder), asc(wikiPages.title))
|
||||
}
|
||||
|
||||
// FALL B: GLOBALE ANSICHT (Haupt-Wiki)
|
||||
// Wir laden ALLES und bauen virtuelle Ordner für die Entitäten
|
||||
|
||||
// 1. Alle Wiki-Seiten des Tenants laden
|
||||
const allPages = await server.db
|
||||
.select({
|
||||
id: wikiPages.id,
|
||||
parentId: wikiPages.parentId,
|
||||
title: wikiPages.title,
|
||||
isFolder: wikiPages.isFolder,
|
||||
sortOrder: wikiPages.sortOrder,
|
||||
entityType: wikiPages.entityType,
|
||||
entityId: wikiPages.entityId, // Wichtig für Zuordnung
|
||||
entityUuid: wikiPages.entityUuid, // Wichtig für Zuordnung
|
||||
updatedAt: wikiPages.updatedAt,
|
||||
})
|
||||
.from(wikiPages)
|
||||
.where(eq(wikiPages.tenantId, user.tenant_id))
|
||||
.orderBy(asc(wikiPages.sortOrder), asc(wikiPages.title))
|
||||
|
||||
// Trennen in Standard-Seiten und Entity-Seiten
|
||||
const standardPages = allPages.filter(p => !p.entityType)
|
||||
const entityPages = allPages.filter(p => p.entityType)
|
||||
|
||||
const virtualNodes: any[] = []
|
||||
|
||||
// 2. Virtuelle Ordner generieren
|
||||
// Wir iterieren durch unsere Config (Kunden, Projekte...)
|
||||
await Promise.all(Object.entries(ENTITY_CONFIG).map(async ([typeKey, config]) => {
|
||||
|
||||
// Haben wir überhaupt Notizen für diesen Typ?
|
||||
const pagesForType = entityPages.filter(p => p.entityType === typeKey)
|
||||
if (pagesForType.length === 0) return
|
||||
|
||||
// IDs sammeln, um Namen aus der DB zu holen
|
||||
// Wir unterscheiden zwischen ID (int) und UUID
|
||||
let entities: any[] = []
|
||||
|
||||
if (config.idField === 'id') {
|
||||
const ids = [...new Set(pagesForType.map(p => p.entityId).filter((id): id is number => id !== null))]
|
||||
if (ids.length > 0) {
|
||||
//@ts-ignore - Drizzle Typisierung bei dynamischen Tables ist tricky
|
||||
entities = await server.db.select({ id: config.table.id, label: config.labelField })
|
||||
.from(config.table)
|
||||
//@ts-ignore
|
||||
.where(inArray(config.table.id, ids))
|
||||
}
|
||||
} else {
|
||||
// Falls UUID genutzt wird (z.B. IoT Devices)
|
||||
const uuids = [...new Set(pagesForType.map(p => p.entityUuid).filter((uuid): uuid is string => uuid !== null))]
|
||||
if (uuids.length > 0) {
|
||||
//@ts-ignore
|
||||
entities = await server.db.select({ id: config.table.id, label: config.labelField })
|
||||
.from(config.table)
|
||||
//@ts-ignore
|
||||
.where(inArray(config.table.id, uuids))
|
||||
}
|
||||
}
|
||||
|
||||
if (entities.length === 0) return
|
||||
|
||||
// 3. Virtuellen Root Ordner erstellen (z.B. "Kunden")
|
||||
const rootId = `virtual-root-${typeKey}`
|
||||
virtualNodes.push({
|
||||
id: rootId,
|
||||
parentId: null, // Ganz oben im Baum
|
||||
title: config.rootLabel,
|
||||
isFolder: true,
|
||||
isVirtual: true, // Flag fürs Frontend (read-only Folder)
|
||||
sortOrder: 1000 // Ganz unten anzeigen
|
||||
})
|
||||
|
||||
// 4. Virtuelle Entity Ordner erstellen (z.B. "Müller GmbH")
|
||||
entities.forEach(entity => {
|
||||
const entityNodeId = `virtual-entity-${typeKey}-${entity.id}`
|
||||
|
||||
virtualNodes.push({
|
||||
id: entityNodeId,
|
||||
parentId: rootId,
|
||||
title: entity.label || 'Unbekannt',
|
||||
isFolder: true,
|
||||
isVirtual: true,
|
||||
sortOrder: 0
|
||||
})
|
||||
|
||||
// 5. Die echten Notizen verschieben
|
||||
// Wir suchen alle Notizen, die zu dieser Entity gehören
|
||||
const myPages = pagesForType.filter(p =>
|
||||
(config.idField === 'id' && p.entityId === entity.id) ||
|
||||
(config.idField === 'uuid' && p.entityUuid === entity.id)
|
||||
)
|
||||
|
||||
myPages.forEach(page => {
|
||||
// Nur Root-Notizen der Entity verschieben.
|
||||
// Sub-Pages bleiben wo sie sind (parentId zeigt ja schon auf die richtige Seite)
|
||||
if (!page.parentId) {
|
||||
// Wir modifizieren das Objekt für die Response (nicht in der DB!)
|
||||
// Wir müssen es clonen, sonst ändern wir es für alle Referenzen
|
||||
const pageClone = { ...page }
|
||||
pageClone.parentId = entityNodeId
|
||||
virtualNodes.push(pageClone)
|
||||
} else {
|
||||
// Sub-Pages einfach so hinzufügen
|
||||
virtualNodes.push(page)
|
||||
}
|
||||
})
|
||||
})
|
||||
}))
|
||||
|
||||
// Ergebnis: Normale Seiten + Virtuelle Struktur
|
||||
return [...standardPages, ...virtualNodes]
|
||||
})
|
||||
|
||||
// ---------------------------------------------------------
|
||||
// 2. GET /wiki/:id
|
||||
// Lädt EINEN Eintrag komplett MIT Content
|
||||
// ---------------------------------------------------------
|
||||
server.get<{ Params: { id: string } }>("/wiki/:id", async (req, reply) => {
|
||||
const user = req.user
|
||||
const { id } = req.params
|
||||
|
||||
const page = await server.db.query.wikiPages.findFirst({
|
||||
where: and(
|
||||
eq(wikiPages.id, id),
|
||||
eq(wikiPages.tenantId, user.tenant_id)
|
||||
),
|
||||
with: {
|
||||
author: {
|
||||
columns: { id: true } // Name falls vorhanden
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (!page) return reply.code(404).send({ error: "Page not found" })
|
||||
return page
|
||||
})
|
||||
|
||||
// ---------------------------------------------------------
|
||||
// 3. POST /wiki
|
||||
// Erstellt neuen Eintrag
|
||||
// ---------------------------------------------------------
|
||||
server.post<{ Body: WikiCreateBody }>("/wiki", async (req, reply) => {
|
||||
const user = req.user
|
||||
const body = req.body
|
||||
|
||||
if (!body.title) return reply.code(400).send({ error: "Title required" })
|
||||
|
||||
const hasEntity = !!body.entityType
|
||||
|
||||
const [newPage] = await server.db
|
||||
.insert(wikiPages)
|
||||
.values({
|
||||
tenantId: user.tenant_id,
|
||||
title: body.title,
|
||||
parentId: body.parentId || null,
|
||||
isFolder: body.isFolder ?? false,
|
||||
entityType: hasEntity ? body.entityType : null,
|
||||
entityId: hasEntity && body.entityId ? body.entityId : null,
|
||||
entityUuid: hasEntity && body.entityUuid ? body.entityUuid : null,
|
||||
//@ts-ignore
|
||||
createdBy: user.id,
|
||||
//@ts-ignore
|
||||
updatedBy: user.id
|
||||
})
|
||||
.returning()
|
||||
|
||||
return newPage
|
||||
})
|
||||
|
||||
// ---------------------------------------------------------
|
||||
// 4. PATCH /wiki/:id
|
||||
// Universal-Update
|
||||
// ---------------------------------------------------------
|
||||
server.patch<{ Params: { id: string }; Body: WikiUpdateBody }>(
|
||||
"/wiki/:id",
|
||||
async (req, reply) => {
|
||||
const user = req.user
|
||||
const { id } = req.params
|
||||
const body = req.body
|
||||
|
||||
const existing = await server.db.query.wikiPages.findFirst({
|
||||
where: and(eq(wikiPages.id, id), eq(wikiPages.tenantId, user.tenant_id)),
|
||||
columns: { id: true }
|
||||
})
|
||||
|
||||
if (!existing) return reply.code(404).send({ error: "Not found" })
|
||||
|
||||
const [updatedPage] = await server.db
|
||||
.update(wikiPages)
|
||||
.set({
|
||||
title: body.title,
|
||||
content: body.content,
|
||||
parentId: body.parentId,
|
||||
sortOrder: body.sortOrder,
|
||||
isFolder: body.isFolder,
|
||||
updatedAt: new Date(),
|
||||
//@ts-ignore
|
||||
updatedBy: user.id
|
||||
})
|
||||
.where(eq(wikiPages.id, id))
|
||||
.returning()
|
||||
|
||||
return updatedPage
|
||||
}
|
||||
)
|
||||
|
||||
// ---------------------------------------------------------
|
||||
// 5. DELETE /wiki/:id
|
||||
// Löscht Eintrag
|
||||
// ---------------------------------------------------------
|
||||
server.delete<{ Params: { id: string } }>("/wiki/:id", async (req, reply) => {
|
||||
const user = req.user
|
||||
const { id } = req.params
|
||||
|
||||
const result = await server.db
|
||||
.delete(wikiPages)
|
||||
.where(and(
|
||||
eq(wikiPages.id, id),
|
||||
eq(wikiPages.tenantId, user.tenant_id)
|
||||
))
|
||||
.returning({ id: wikiPages.id })
|
||||
|
||||
if (result.length === 0) return reply.code(404).send({ error: "Not found" })
|
||||
|
||||
return { success: true, deletedId: result[0].id }
|
||||
})
|
||||
}
|
||||
3512
backend/src/utils/deBankBics.ts
Normal file
3512
backend/src/utils/deBankBics.ts
Normal file
File diff suppressed because it is too large
Load Diff
3515
backend/src/utils/deBankCodes.ts
Normal file
3515
backend/src/utils/deBankCodes.ts
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,5 @@
|
||||
|
||||
import {diffTranslations} from "./diffTranslations";
|
||||
import {diffTranslations, getDiffLabel} from "./diffTranslations";
|
||||
|
||||
export type DiffChange = {
|
||||
key: string;
|
||||
@@ -43,8 +43,6 @@ export function diffObjects(
|
||||
const oldVal = obj1?.[key];
|
||||
const newVal = obj2?.[key];
|
||||
|
||||
console.log(oldVal, key, newVal);
|
||||
|
||||
// Wenn beides null/undefined → ignorieren
|
||||
if (
|
||||
(oldVal === null || oldVal === undefined || oldVal === "" || JSON.stringify(oldVal) === "[]") &&
|
||||
@@ -72,12 +70,11 @@ export function diffObjects(
|
||||
if (type === "unchanged") continue;
|
||||
|
||||
const translation = diffTranslations[key];
|
||||
let label = key;
|
||||
let label = getDiffLabel(key);
|
||||
let resolvedOld = oldVal;
|
||||
let resolvedNew = newVal;
|
||||
|
||||
if (translation) {
|
||||
label = translation.label;
|
||||
if (translation.resolve) {
|
||||
const { oldVal: resOld, newVal: resNew } = translation.resolve(
|
||||
oldVal,
|
||||
|
||||
@@ -6,6 +6,149 @@ type ValueResolver = (
|
||||
ctx?: Record<string, any>
|
||||
) => { oldVal: any; newVal: any };
|
||||
|
||||
const TOKEN_TRANSLATIONS: Record<string, string> = {
|
||||
account: "Konto",
|
||||
active: "Aktiv",
|
||||
address: "Adresse",
|
||||
amount: "Betrag",
|
||||
archived: "Archiviert",
|
||||
article: "Artikel",
|
||||
bank: "Bank",
|
||||
barcode: "Barcode",
|
||||
birthday: "Geburtstag",
|
||||
category: "Kategorie",
|
||||
city: "Ort",
|
||||
color: "Farbe",
|
||||
comment: "Kommentar",
|
||||
company: "Firma",
|
||||
contact: "Kontakt",
|
||||
contract: "Vertrag",
|
||||
cost: "Kosten",
|
||||
country: "Land",
|
||||
created: "Erstellt",
|
||||
customer: "Kunde",
|
||||
date: "Datum",
|
||||
default: "Standard",
|
||||
deleted: "Gelöscht",
|
||||
delivery: "Lieferung",
|
||||
description: "Beschreibung",
|
||||
document: "Dokument",
|
||||
driver: "Fahrer",
|
||||
due: "Fällig",
|
||||
duration: "Dauer",
|
||||
email: "E-Mail",
|
||||
employee: "Mitarbeiter",
|
||||
enabled: "Aktiviert",
|
||||
end: "Ende",
|
||||
event: "Ereignis",
|
||||
file: "Datei",
|
||||
first: "Vorname",
|
||||
fixed: "Festgeschrieben",
|
||||
group: "Gruppe",
|
||||
hour: "Stunde",
|
||||
iban: "IBAN",
|
||||
id: "ID",
|
||||
incoming: "Eingang",
|
||||
invoice: "Rechnung",
|
||||
item: "Eintrag",
|
||||
language: "Sprache",
|
||||
last: "Nachname",
|
||||
license: "Kennzeichen",
|
||||
link: "Link",
|
||||
list: "Liste",
|
||||
location: "Standort",
|
||||
manufacturer: "Hersteller",
|
||||
markup: "Verkaufsaufschlag",
|
||||
message: "Nachricht",
|
||||
mobile: "Mobil",
|
||||
name: "Name",
|
||||
note: "Notiz",
|
||||
notes: "Notizen",
|
||||
number: "Nummer",
|
||||
order: "Bestellung",
|
||||
own: "Eigen",
|
||||
payment: "Zahlung",
|
||||
phone: "Telefon",
|
||||
plant: "Objekt",
|
||||
postal: "Post",
|
||||
price: "Preis",
|
||||
percentage: "%",
|
||||
product: "Produkt",
|
||||
profile: "Profil",
|
||||
project: "Projekt",
|
||||
purchase: "Kauf",
|
||||
quantity: "Menge",
|
||||
rate: "Satz",
|
||||
reference: "Referenz",
|
||||
requisition: "Anfrage",
|
||||
resource: "Ressource",
|
||||
role: "Rolle",
|
||||
serial: "Serien",
|
||||
service: "Leistung",
|
||||
selling: "Verkauf",
|
||||
sellign: "Verkauf",
|
||||
space: "Lagerplatz",
|
||||
start: "Start",
|
||||
statement: "Buchung",
|
||||
status: "Status",
|
||||
street: "Straße",
|
||||
surcharge: "Aufschlag",
|
||||
tax: "Steuer",
|
||||
tel: "Telefon",
|
||||
tenant: "Mandant",
|
||||
time: "Zeit",
|
||||
title: "Titel",
|
||||
total: "Gesamt",
|
||||
type: "Typ",
|
||||
unit: "Einheit",
|
||||
updated: "Aktualisiert",
|
||||
user: "Benutzer",
|
||||
ustid: "USt-ID",
|
||||
value: "Wert",
|
||||
vendor: "Lieferant",
|
||||
vehicle: "Fahrzeug",
|
||||
weekly: "Wöchentlich",
|
||||
working: "Arbeits",
|
||||
zip: "Postleitzahl",
|
||||
composed: "Zusammensetzung",
|
||||
material: "Material",
|
||||
worker: "Arbeit",
|
||||
};
|
||||
|
||||
function tokenizeKey(key: string): string[] {
|
||||
return key
|
||||
.replace(/([a-z0-9])([A-Z])/g, "$1_$2")
|
||||
.replace(/[^a-zA-Z0-9]+/g, "_")
|
||||
.split("_")
|
||||
.filter(Boolean)
|
||||
.map((p) => p.toLowerCase());
|
||||
}
|
||||
|
||||
function capitalize(word: string) {
|
||||
if (!word) return word;
|
||||
return word.charAt(0).toUpperCase() + word.slice(1);
|
||||
}
|
||||
|
||||
function fallbackLabelFromKey(key: string): string {
|
||||
const parts = tokenizeKey(key);
|
||||
if (!parts.length) return key;
|
||||
|
||||
if (parts.length > 1 && parts[parts.length - 1] === "id") {
|
||||
const base = parts.slice(0, -1).map((p) => TOKEN_TRANSLATIONS[p] || capitalize(p)).join(" ");
|
||||
return `${base} ID`.trim();
|
||||
}
|
||||
|
||||
return parts
|
||||
.map((p) => TOKEN_TRANSLATIONS[p] || capitalize(p))
|
||||
.join(" ")
|
||||
.replace(/\s+/g, " ")
|
||||
.trim();
|
||||
}
|
||||
|
||||
export function getDiffLabel(key: string): string {
|
||||
return diffTranslations[key]?.label || fallbackLabelFromKey(key);
|
||||
}
|
||||
|
||||
export const diffTranslations: Record<
|
||||
string,
|
||||
{ label: string; resolve?: ValueResolver }
|
||||
@@ -44,7 +187,7 @@ export const diffTranslations: Record<
|
||||
}),
|
||||
},
|
||||
resources: {
|
||||
label: "Resourcen",
|
||||
label: "Ressourcen",
|
||||
resolve: (o, n) => ({
|
||||
oldVal: Array.isArray(o) ? o.map((i: any) => i.title).join(", ") : "-",
|
||||
newVal: Array.isArray(n) ? n.map((i: any) => i.title).join(", ") : "-",
|
||||
@@ -86,10 +229,18 @@ export const diffTranslations: Record<
|
||||
approved: { label: "Genehmigt" },
|
||||
manufacturer: { label: "Hersteller" },
|
||||
purchasePrice: { label: "Kaufpreis" },
|
||||
markupPercentage: { label: "Verkaufsaufschlag in %" },
|
||||
markup_percentage: { label: "Verkaufsaufschlag in %" },
|
||||
sellingPrice: { label: "Verkaufspreis" },
|
||||
selling_price: { label: "Verkaufspreis" },
|
||||
sellingPriceComposed: { label: "Verkaufspreis Zusammensetzung" },
|
||||
purchaseDate: { label: "Kaufdatum" },
|
||||
serialNumber: { label: "Seriennummer" },
|
||||
customerInventoryId: { label: "Kundeninventar-ID" },
|
||||
customerinventoryitems: { label: "Kundeninventar" },
|
||||
usePlanning: { label: "In Plantafel verwenden" },
|
||||
currentSpace: { label: "Lagerplatz" },
|
||||
customerspace: { label: "Kundenlagerplatz" },
|
||||
|
||||
customer: {
|
||||
label: "Kunde",
|
||||
@@ -108,6 +259,7 @@ export const diffTranslations: Record<
|
||||
|
||||
description: { label: "Beschreibung" },
|
||||
categorie: { label: "Kategorie" },
|
||||
category: { label: "Kategorie" },
|
||||
|
||||
profile: {
|
||||
label: "Mitarbeiter",
|
||||
@@ -147,6 +299,8 @@ export const diffTranslations: Record<
|
||||
},
|
||||
|
||||
projecttype: { label: "Projekttyp" },
|
||||
contracttype: { label: "Vertragstyp" },
|
||||
billingInterval: { label: "Abrechnungsintervall" },
|
||||
|
||||
fixed: {
|
||||
label: "Festgeschrieben",
|
||||
|
||||
@@ -1,327 +1,389 @@
|
||||
import xmlbuilder from "xmlbuilder";
|
||||
import dayjs from "dayjs";
|
||||
import isBetween from "dayjs/plugin/isBetween.js"
|
||||
import {BlobWriter, Data64URIReader, TextReader, TextWriter, ZipWriter} from "@zip.js/zip.js";
|
||||
import isBetween from "dayjs/plugin/isBetween.js";
|
||||
import { BlobWriter, Data64URIReader, TextReader, ZipWriter } from "@zip.js/zip.js";
|
||||
import { FastifyInstance } from "fastify";
|
||||
import { GetObjectCommand } from "@aws-sdk/client-s3";
|
||||
import { s3 } from "../s3";
|
||||
import { secrets } from "../secrets";
|
||||
dayjs.extend(isBetween)
|
||||
|
||||
const getCreatedDocumentTotal = (item) => {
|
||||
let totalNet = 0
|
||||
let total19 = 0
|
||||
let total7 = 0
|
||||
// Drizzle Core Imports
|
||||
import { eq, and, inArray, gte, lte, asc, aliasedTable } from "drizzle-orm";
|
||||
|
||||
item.rows.forEach(row => {
|
||||
// Tabellen Imports (keine Relations nötig!)
|
||||
import {
|
||||
statementallocations,
|
||||
createddocuments,
|
||||
incominginvoices,
|
||||
accounts,
|
||||
files,
|
||||
customers,
|
||||
vendors,
|
||||
bankaccounts,
|
||||
bankstatements,
|
||||
ownaccounts
|
||||
} from "../../../db/schema";
|
||||
|
||||
dayjs.extend(isBetween);
|
||||
|
||||
// ---------------------------------------------------------
|
||||
// HELPER FUNCTIONS (Unverändert)
|
||||
// ---------------------------------------------------------
|
||||
|
||||
const getCreatedDocumentTotal = (item: any) => {
|
||||
let totalNet = 0;
|
||||
let total19:number = 0;
|
||||
let total7:number = 0;
|
||||
const rows = Array.isArray(item.rows) ? item.rows : [];
|
||||
rows.forEach((row: any) => {
|
||||
if (!['pagebreak', 'title', 'text'].includes(row.mode)) {
|
||||
let rowPrice = Number(Number(row.quantity) * Number(row.price) * (1 - Number(row.discountPercent) /100) ).toFixed(3)
|
||||
totalNet = totalNet + Number(rowPrice)
|
||||
|
||||
if(row.taxPercent === 19) {
|
||||
// @ts-ignore
|
||||
total19 = total19 + Number(rowPrice * 0.19)
|
||||
} else if(row.taxPercent === 7) {
|
||||
// @ts-ignore
|
||||
total7 = total7 + Number(rowPrice * 0.07)
|
||||
let rowPrice = Number(Number(row.quantity) * Number(row.price) * (1 - Number(row.discountPercent) / 100)).toFixed(3);
|
||||
totalNet = totalNet + Number(rowPrice);
|
||||
if (row.taxPercent === 19) total19 += Number(rowPrice) * Number(0.19);
|
||||
else if (row.taxPercent === 7) total7 += Number(rowPrice) * Number(0.07);
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
let totalGross = Number(totalNet.toFixed(2)) + Number(total19.toFixed(2)) + Number(total7.toFixed(2))
|
||||
|
||||
|
||||
|
||||
});
|
||||
return {
|
||||
totalNet: totalNet,
|
||||
total19: total19,
|
||||
total7: total7,
|
||||
totalGross: totalGross,
|
||||
}
|
||||
}
|
||||
totalNet, total19, total7,
|
||||
totalGross: Number(totalNet.toFixed(2)) + Number(total19.toFixed(2)) + Number(total7.toFixed(2))
|
||||
};
|
||||
};
|
||||
|
||||
const escapeString = (str) => {
|
||||
const escapeString = (str: string | null | undefined) => {
|
||||
return (str || "").replaceAll("\n", "").replaceAll(";", "").replaceAll(/\r/g, "").replaceAll(/"/g, "").replaceAll(/ü/g, "ue").replaceAll(/ä/g, "ae").replaceAll(/ö/g, "oe");
|
||||
};
|
||||
|
||||
str = (str ||"")
|
||||
.replaceAll("\n","")
|
||||
.replaceAll(";","")
|
||||
.replaceAll(/\r/g,"")
|
||||
.replaceAll(/"/g,"")
|
||||
.replaceAll(/ü/g,"ue")
|
||||
.replaceAll(/ä/g,"ae")
|
||||
.replaceAll(/ö/g,"oe")
|
||||
return str
|
||||
}
|
||||
const displayCurrency = (input: number, onlyAbs = false) => {
|
||||
return (onlyAbs ? Math.abs(input) : input).toFixed(2).replace(".", ",");
|
||||
};
|
||||
|
||||
const displayCurrency = (input, onlyAbs = false) => {
|
||||
// ---------------------------------------------------------
|
||||
// MAIN EXPORT FUNCTION
|
||||
// ---------------------------------------------------------
|
||||
|
||||
if(onlyAbs) {
|
||||
return Math.abs(input).toFixed(2).replace(".",",")
|
||||
} else {
|
||||
return input.toFixed(2).replace(".",",")
|
||||
}
|
||||
}
|
||||
|
||||
export async function buildExportZip(server: FastifyInstance, tenant: number, startDate: string, endDate: string, beraternr: string, mandantennr: string): Promise<Buffer> {
|
||||
export async function buildExportZip(
|
||||
server: FastifyInstance,
|
||||
tenantId: number,
|
||||
startDate: string,
|
||||
endDate: string,
|
||||
beraternr: string,
|
||||
mandantennr: string
|
||||
): Promise<Buffer> {
|
||||
|
||||
try {
|
||||
const zipFileWriter = new BlobWriter()
|
||||
const zipWriter = new ZipWriter(zipFileWriter)
|
||||
const zipFileWriter = new BlobWriter();
|
||||
const zipWriter = new ZipWriter(zipFileWriter);
|
||||
|
||||
// Header Infos
|
||||
const dateNowStr = dayjs().format("YYYYMMDDHHmmssSSS");
|
||||
const startDateFmt = dayjs(startDate).format("YYYYMMDD");
|
||||
const endDateFmt = dayjs(endDate).format("YYYYMMDD");
|
||||
|
||||
let header = `"EXTF";700;21;"Buchungsstapel";13;${dateNowStr};;"FE";"Florian Federspiel";;${beraternr};${mandantennr};20250101;4;${startDateFmt};${endDateFmt};"Buchungsstapel";"FF";1;0;1;"EUR";;"";;;"03";;;"";""`;
|
||||
let colHeaders = `Umsatz;Soll-/Haben-Kennzeichen;WKZ Umsatz;Kurs;Basisumsatz;WKZ Basisumsatz;Konto;Gegenkonto;BU-Schluessel;Belegdatum;Belegfeld 1;Belegfeld 2;Skonto;Buchungstext;Postensperre;Diverse Adressnummer;Geschaeftspartnerbank;Sachverhalt;Zinssperre;Beleglink;Beleginfo - Art 1;Beleginfo - Inhalt 1;Beleginfo - Art 2;Beleginfo - Inhalt 2;Beleginfo - Art 3;Beleginfo - Inhalt 3;Beleginfo - Art 4;Beleginfo - Inhalt 4;Beleginfo - Art 5;Beleginfo - Inhalt 5;Beleginfo - Art 6;Beleginfo - Inhalt 6;Beleginfo - Art 7;Beleginfo - Inhalt 7;Beleginfo - Art 8;Beleginfo - Inhalt 8;KOST1 - Kostenstelle;KOST2 - Kostenstelle;Kost Menge;EU-Land u. USt-IdNr. (Bestimmung);EU-Steuersatz (Bestimmung);Abw. Versteuerungsart;Sachverhalt L+L;Funktionsergaenzung L+L;BU 49 Hauptfunktionstyp;BU 49 Hauptfunktionsnummer;BU 49 Funktionsergaenzung;Zusatzinformation - Art 1;Zusatzinformation - Inhalt 1;Zusatzinformation - Art 2;Zusatzinformation - Inhalt 2;Zusatzinformation - Art 3;Zusatzinformation - Inhalt 3;Zusatzinformation - Art 4;Zusatzinformation - Inhalt 4;Zusatzinformation - Art 5;Zusatzinformation - Inhalt 5;Zusatzinformation - Art 6;Zusatzinformation - Inhalt 6;Zusatzinformation - Art 7;Zusatzinformation - Inhalt 7;Zusatzinformation - Art 8;Zusatzinformation - Inhalt 8;Zusatzinformation - Art 9;Zusatzinformation - Inhalt 9;Zusatzinformation - Art 10;Zusatzinformation - Inhalt 10;Zusatzinformation - Art 11;Zusatzinformation - Inhalt 11;Zusatzinformation - Art 12;Zusatzinformation - Inhalt 12;Zusatzinformation - Art 13;Zusatzinformation - Inhalt 13;Zusatzinformation - Art 14;Zusatzinformation - Inhalt 14;Zusatzinformation - Art 15;Zusatzinformation - Inhalt 15;Zusatzinformation - Art 16;Zusatzinformation - Inhalt 16;Zusatzinformation - Art 17;Zusatzinformation - Inhalt 17;Zusatzinformation - Art 18;Zusatzinformation - Inhalt 18;Zusatzinformation - Art 19;Zusatzinformation - Inhalt 19;Zusatzinformation - Art 20;Zusatzinformation - Inhalt 20;Stueck;Gewicht;Zahlweise;Zahlweise;Veranlagungsjahr;Zugeordnete Faelligkeit;Skontotyp;Auftragsnummer;Buchungstyp;USt-Schluessel (Anzahlungen);EU-Mitgliedstaat (Anzahlungen);Sachverhalt L+L (Anzahlungen);EU-Steuersatz (Anzahlungen);Erloeskonto (Anzahlungen);Herkunft-Kz;Leerfeld;KOST-Datum;SEPA-Mandatsreferenz;Skontosperre;Gesellschaftername;Beteiligtennummer;Identifikationsnummer;Zeichnernummer;Postensperre bis;Bezeichnung SoBil-Sachverhalt;Kennzeichen SoBil-Buchung;Festschreibung;Leistungsdatum;Datum Zuord. Steuerperiode;Faelligkeit;Generalumkehr;Steuersatz;Land;Abrechnungsreferenz;BVV-Position;EU-Mitgliedstaat u. UStID(Ursprung);EU-Steuersatz(Ursprung);Abw. Skontokonto`;
|
||||
|
||||
//Basic Information
|
||||
// ---------------------------------------------------------
|
||||
// 1. DATEN LADEN (CORE API SELECT & JOIN)
|
||||
// ---------------------------------------------------------
|
||||
|
||||
let header = `"EXTF";700;21;"Buchungsstapel";13;${dayjs().format("YYYYMMDDHHmmssSSS")};;"FE";"Florian Federspiel";;${beraternr};${mandantennr};20250101;4;${dayjs(startDate).format("YYYYMMDD")};${dayjs(endDate).format("YYYYMMDD")};"Buchungsstapel";"FF";1;0;1;"EUR";;"";;;"03";;;"";""`
|
||||
// --- A) Created Documents ---
|
||||
// Wir brauchen das Dokument und den Kunden dazu
|
||||
const cdRaw = await server.db.select({
|
||||
doc: createddocuments,
|
||||
customer: customers
|
||||
})
|
||||
.from(createddocuments)
|
||||
.leftJoin(customers, eq(createddocuments.customer, customers.id))
|
||||
.where(and(
|
||||
eq(createddocuments.tenant, tenantId),
|
||||
inArray(createddocuments.type, ["invoices", "advanceInvoices", "cancellationInvoices"]),
|
||||
eq(createddocuments.state, "Gebucht"),
|
||||
eq(createddocuments.archived, false),
|
||||
gte(createddocuments.documentDate, startDate),
|
||||
lte(createddocuments.documentDate, endDate)
|
||||
));
|
||||
|
||||
let colHeaders = `Umsatz;Soll-/Haben-Kennzeichen;WKZ Umsatz;Kurs;Basisumsatz;WKZ Basisumsatz;Konto;Gegenkonto;BU-Schluessel;Belegdatum;Belegfeld 1;Belegfeld 2;Skonto;Buchungstext;Postensperre;Diverse Adressnummer;Geschaeftspartnerbank;Sachverhalt;Zinssperre;Beleglink;Beleginfo - Art 1;Beleginfo - Inhalt 1;Beleginfo - Art 2;Beleginfo - Inhalt 2;Beleginfo - Art 3;Beleginfo - Inhalt 3;Beleginfo - Art 4;Beleginfo - Inhalt 4;Beleginfo - Art 5;Beleginfo - Inhalt 5;Beleginfo - Art 6;Beleginfo - Inhalt 6;Beleginfo - Art 7;Beleginfo - Inhalt 7;Beleginfo - Art 8;Beleginfo - Inhalt 8;KOST1 - Kostenstelle;KOST2 - Kostenstelle;Kost Menge;EU-Land u. USt-IdNr. (Bestimmung);EU-Steuersatz (Bestimmung);Abw. Versteuerungsart;Sachverhalt L+L;Funktionsergaenzung L+L;BU 49 Hauptfunktionstyp;BU 49 Hauptfunktionsnummer;BU 49 Funktionsergaenzung;Zusatzinformation - Art 1;Zusatzinformation - Inhalt 1;Zusatzinformation - Art 2;Zusatzinformation - Inhalt 2;Zusatzinformation - Art 3;Zusatzinformation - Inhalt 3;Zusatzinformation - Art 4;Zusatzinformation - Inhalt 4;Zusatzinformation - Art 5;Zusatzinformation - Inhalt 5;Zusatzinformation - Art 6;Zusatzinformation - Inhalt 6;Zusatzinformation - Art 7;Zusatzinformation - Inhalt 7;Zusatzinformation - Art 8;Zusatzinformation - Inhalt 8;Zusatzinformation - Art 9;Zusatzinformation - Inhalt 9;Zusatzinformation - Art 10;Zusatzinformation - Inhalt 10;Zusatzinformation - Art 11;Zusatzinformation - Inhalt 11;Zusatzinformation - Art 12;Zusatzinformation - Inhalt 12;Zusatzinformation - Art 13;Zusatzinformation - Inhalt 13;Zusatzinformation - Art 14;Zusatzinformation - Inhalt 14;Zusatzinformation - Art 15;Zusatzinformation - Inhalt 15;Zusatzinformation - Art 16;Zusatzinformation - Inhalt 16;Zusatzinformation - Art 17;Zusatzinformation - Inhalt 17;Zusatzinformation - Art 18;Zusatzinformation - Inhalt 18;Zusatzinformation - Art 19;Zusatzinformation - Inhalt 19;Zusatzinformation - Art 20;Zusatzinformation - Inhalt 20;Stueck;Gewicht;Zahlweise;Zahlweise;Veranlagungsjahr;Zugeordnete Faelligkeit;Skontotyp;Auftragsnummer;Buchungstyp;USt-Schluessel (Anzahlungen);EU-Mitgliedstaat (Anzahlungen);Sachverhalt L+L (Anzahlungen);EU-Steuersatz (Anzahlungen);Erloeskonto (Anzahlungen);Herkunft-Kz;Leerfeld;KOST-Datum;SEPA-Mandatsreferenz;Skontosperre;Gesellschaftername;Beteiligtennummer;Identifikationsnummer;Zeichnernummer;Postensperre bis;Bezeichnung SoBil-Sachverhalt;Kennzeichen SoBil-Buchung;Festschreibung;Leistungsdatum;Datum Zuord. Steuerperiode;Faelligkeit;Generalumkehr;Steuersatz;Land;Abrechnungsreferenz;BVV-Position;EU-Mitgliedstaat u. UStID(Ursprung);EU-Steuersatz(Ursprung);Abw. Skontokonto`
|
||||
// Mapping: Flat Result -> Nested Object (damit der Rest des Codes gleich bleiben kann)
|
||||
const createddocumentsList = cdRaw.map(r => ({
|
||||
...r.doc,
|
||||
customer: r.customer
|
||||
}));
|
||||
|
||||
//Get Bookings
|
||||
const {data:statementallocationsRaw,error: statementallocationsError} = await server.supabase.from("statementallocations").select('*, account(*), bs_id(*, account(*)), cd_id(*,customer(*)), ii_id(*, vendor(*)), vendor(*), customer(*), ownaccount(*)').eq("tenant", tenant);
|
||||
let {data:createddocumentsRaw,error: createddocumentsError} = await server.supabase.from("createddocuments").select('*,customer(*)').eq("tenant", tenant).in("type",["invoices","advanceInvoices","cancellationInvoices"]).eq("state","Gebucht").eq("archived",false)
|
||||
let {data:incominginvoicesRaw,error: incominginvoicesError} = await server.supabase.from("incominginvoices").select('*, vendor(*)').eq("tenant", tenant).eq("state","Gebucht").eq("archived",false)
|
||||
const {data:accounts} = await server.supabase.from("accounts").select()
|
||||
const {data:tenantData} = await server.supabase.from("tenants").select().eq("id",tenant).single()
|
||||
// --- B) Incoming Invoices ---
|
||||
// Wir brauchen die Rechnung und den Lieferanten
|
||||
const iiRaw = await server.db.select({
|
||||
inv: incominginvoices,
|
||||
vendor: vendors
|
||||
})
|
||||
.from(incominginvoices)
|
||||
.leftJoin(vendors, eq(incominginvoices.vendor, vendors.id))
|
||||
.where(and(
|
||||
eq(incominginvoices.tenant, tenantId),
|
||||
eq(incominginvoices.state, "Gebucht"),
|
||||
eq(incominginvoices.archived, false),
|
||||
gte(incominginvoices.date, startDate),
|
||||
lte(incominginvoices.date, endDate)
|
||||
));
|
||||
|
||||
let createddocuments = createddocumentsRaw.filter(i => dayjs(i.documentDate).isBetween(startDate,endDate,"day","[]"))
|
||||
let incominginvoices = incominginvoicesRaw.filter(i => dayjs(i.date).isBetween(startDate,endDate,"day","[]"))
|
||||
let statementallocations = statementallocationsRaw.filter(i => dayjs(i.bs_id.date).isBetween(startDate,endDate,"day","[]"))
|
||||
const incominginvoicesList = iiRaw.map(r => ({
|
||||
...r.inv,
|
||||
vendor: r.vendor
|
||||
}));
|
||||
|
||||
// --- C) Statement Allocations ---
|
||||
// Das ist der komplexeste Teil. Wir müssen Tabellen aliasen, da wir z.B. Customers doppelt joinen könnten
|
||||
// (Einmal via CreatedDocument, einmal direkt an der Allocation).
|
||||
|
||||
const {data:filesCreateddocuments, error: filesErrorCD} = await server.supabase.from("files").select().eq("tenant",tenant).or(`createddocument.in.(${createddocuments.map(i => i.id).join(",")})`)
|
||||
const {data:filesIncomingInvoices, error: filesErrorII} = await server.supabase.from("files").select().eq("tenant",tenant).or(`incominginvoice.in.(${incominginvoices.map(i => i.id).join(",")})`)
|
||||
const CdCustomer = aliasedTable(customers, "cd_customer");
|
||||
const IiVendor = aliasedTable(vendors, "ii_vendor");
|
||||
|
||||
const downloadFile = async (bucketName, filePath, downloadFilePath,fileId) => {
|
||||
const allocRaw = await server.db.select({
|
||||
allocation: statementallocations,
|
||||
bs: bankstatements,
|
||||
ba: bankaccounts,
|
||||
cd: createddocuments,
|
||||
cd_cust: CdCustomer,
|
||||
ii: incominginvoices,
|
||||
ii_vend: IiVendor,
|
||||
acc: accounts,
|
||||
direct_vend: vendors, // Direkte Zuordnung an Kreditor
|
||||
direct_cust: customers, // Direkte Zuordnung an Debitor
|
||||
own: ownaccounts
|
||||
})
|
||||
.from(statementallocations)
|
||||
// JOIN 1: Bankstatement (Pflicht, für Datum Filter)
|
||||
.innerJoin(bankstatements, eq(statementallocations.bankstatement, bankstatements.id))
|
||||
// JOIN 2: Bankaccount (für DATEV Nummer)
|
||||
.leftJoin(bankaccounts, eq(bankstatements.account, bankaccounts.id))
|
||||
|
||||
console.log(filePath)
|
||||
// JOIN 3: Ausgangsrechnung & deren Kunde
|
||||
.leftJoin(createddocuments, eq(statementallocations.createddocument, createddocuments.id))
|
||||
.leftJoin(CdCustomer, eq(createddocuments.customer, CdCustomer.id))
|
||||
|
||||
// JOIN 4: Eingangsrechnung & deren Lieferant
|
||||
.leftJoin(incominginvoices, eq(statementallocations.incominginvoice, incominginvoices.id))
|
||||
.leftJoin(IiVendor, eq(incominginvoices.vendor, IiVendor.id))
|
||||
|
||||
// JOIN 5: Direkte Zuordnungen
|
||||
.leftJoin(accounts, eq(statementallocations.account, accounts.id))
|
||||
.leftJoin(vendors, eq(statementallocations.vendor, vendors.id))
|
||||
.leftJoin(customers, eq(statementallocations.customer, customers.id))
|
||||
.leftJoin(ownaccounts, eq(statementallocations.ownaccount, ownaccounts.id))
|
||||
|
||||
.where(and(
|
||||
eq(statementallocations.tenant, tenantId),
|
||||
eq(statementallocations.archived, false),
|
||||
// Datum Filter direkt auf dem Bankstatement
|
||||
gte(bankstatements.date, startDate),
|
||||
lte(bankstatements.date, endDate)
|
||||
));
|
||||
|
||||
// Mapping: Wir bauen das komplexe Objekt nach, das die CSV Logik erwartet
|
||||
const statementallocationsList = allocRaw.map(r => ({
|
||||
...r.allocation,
|
||||
bankstatement: {
|
||||
...r.bs,
|
||||
account: r.ba // Nesting für bs.account.datevNumber
|
||||
},
|
||||
createddocument: r.cd ? {
|
||||
...r.cd,
|
||||
customer: r.cd_cust
|
||||
} : null,
|
||||
incominginvoice: r.ii ? {
|
||||
...r.ii,
|
||||
vendor: r.ii_vend
|
||||
} : null,
|
||||
account: r.acc,
|
||||
vendor: r.direct_vend,
|
||||
customer: r.direct_cust,
|
||||
ownaccount: r.own
|
||||
}));
|
||||
|
||||
// --- D) Stammdaten Accounts ---
|
||||
const accountsList = await server.db.select().from(accounts);
|
||||
|
||||
// ---------------------------------------------------------
|
||||
// 2. FILES LADEN
|
||||
// ---------------------------------------------------------
|
||||
|
||||
// IDs sammeln für IN (...) Abfragen
|
||||
const cdIds = createddocumentsList.map(i => i.id);
|
||||
const iiIds = incominginvoicesList.map(i => i.id);
|
||||
|
||||
let filesCreateddocuments: any[] = [];
|
||||
if (cdIds.length > 0) {
|
||||
filesCreateddocuments = await server.db.select().from(files).where(and(
|
||||
eq(files.tenant, tenantId),
|
||||
inArray(files.createddocument, cdIds),
|
||||
eq(files.archived, false)
|
||||
));
|
||||
}
|
||||
|
||||
let filesIncomingInvoices: any[] = [];
|
||||
if (iiIds.length > 0) {
|
||||
filesIncomingInvoices = await server.db.select().from(files).where(and(
|
||||
eq(files.tenant, tenantId),
|
||||
inArray(files.incominginvoice, iiIds),
|
||||
eq(files.archived, false)
|
||||
));
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------
|
||||
// 3. DOWNLOAD & ZIP
|
||||
// ---------------------------------------------------------
|
||||
|
||||
const downloadFile = async (filePath: string, fileId: string) => {
|
||||
try {
|
||||
const command = new GetObjectCommand({
|
||||
Bucket: secrets.S3_BUCKET,
|
||||
Key: filePath,
|
||||
})
|
||||
|
||||
const { Body, ContentType } = await s3.send(command)
|
||||
|
||||
const chunks: any[] = []
|
||||
});
|
||||
const { Body } = await s3.send(command);
|
||||
if (!Body) return;
|
||||
const chunks: any[] = [];
|
||||
// @ts-ignore
|
||||
for await (const chunk of Body) {
|
||||
chunks.push(chunk)
|
||||
for await (const chunk of Body) chunks.push(chunk);
|
||||
const buffer = Buffer.concat(chunks);
|
||||
const dataURL = `data:application/pdf;base64,${buffer.toString('base64')}`;
|
||||
const dataURLReader = new Data64URIReader(dataURL);
|
||||
const ext = filePath.includes('.') ? filePath.split(".").pop() : "pdf";
|
||||
await zipWriter.add(`${fileId}.${ext}`, dataURLReader);
|
||||
} catch (e) {
|
||||
console.error(`Error downloading file ${fileId}`, e);
|
||||
}
|
||||
const buffer = Buffer.concat(chunks)
|
||||
|
||||
const dataURL = `data:application/pdf;base64,${buffer.toString('base64')}`
|
||||
|
||||
const dataURLReader = new Data64URIReader(dataURL)
|
||||
await zipWriter.add(`${fileId}.${downloadFilePath.split(".").pop()}`, dataURLReader)
|
||||
|
||||
//await fs.writeFile(`./output/${fileId}.${downloadFilePath.split(".").pop()}`, buffer, () => {});
|
||||
console.log(`File added to Zip`);
|
||||
};
|
||||
|
||||
for (const file of filesCreateddocuments) {
|
||||
await downloadFile("filesdev",file.path,`./output/files/${file.path.split("/")[file.path.split("/").length - 1]}`,file.id);
|
||||
}
|
||||
for (const file of filesIncomingInvoices) {
|
||||
await downloadFile("filesdev",file.path,`./output/files/${file.path.split("/")[file.path.split("/").length - 1]}`,file.id);
|
||||
for (const file of filesCreateddocuments) if(file.path) await downloadFile(file.path, file.id);
|
||||
for (const file of filesIncomingInvoices) if(file.path) await downloadFile(file.path, file.id);
|
||||
|
||||
// ---------------------------------------------------------
|
||||
// 4. CSV GENERIERUNG (Logic ist gleich geblieben)
|
||||
// ---------------------------------------------------------
|
||||
|
||||
let bookingLines: string[] = [];
|
||||
|
||||
// AR
|
||||
createddocumentsList.forEach(cd => {
|
||||
let file = filesCreateddocuments.find(i => i.createddocument === cd.id);
|
||||
let total = 0;
|
||||
let typeString = "";
|
||||
|
||||
if(cd.type === "invoices") {
|
||||
total = getCreatedDocumentTotal(cd).totalGross;
|
||||
typeString = "AR";
|
||||
} else if(cd.type === "advanceInvoices") {
|
||||
total = getCreatedDocumentTotal(cd).totalGross;
|
||||
typeString = "ARAbschlag";
|
||||
} else if(cd.type === "cancellationInvoices") {
|
||||
total = getCreatedDocumentTotal(cd).totalGross;
|
||||
typeString = "ARStorno";
|
||||
}
|
||||
|
||||
let bookingLines = []
|
||||
let shSelector = Math.sign(total) === -1 ? "H" : "S";
|
||||
const cust = cd.customer; // durch Mapping verfügbar
|
||||
|
||||
createddocuments.forEach(createddocument => {
|
||||
bookingLines.push(`${displayCurrency(total,true)};"${shSelector}";;;;;${cust?.customerNumber || ""};8400;"";${dayjs(cd.documentDate).format("DDMM")};"${cd.documentNumber}";;;"${`${typeString} ${cd.documentNumber} - ${cust?.name || ""}`.substring(0,59)}";;;;;;${file ? `"BEDI ""${file.id}"""` : ""};"Geschäftspartner";"${cust?.name || ""}";"Kundennummer";"${cust?.customerNumber || ""}";"Belegnummer";"${cd.documentNumber}";"Leistungsdatum";"${dayjs(cd.deliveryDate).format("DD.MM.YYYY")}";"Belegdatum";"${dayjs(cd.documentDate).format("DD.MM.YYYY")}";;;;;;;;;;"";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;;`);
|
||||
});
|
||||
|
||||
let file = filesCreateddocuments.find(i => i.createddocument === createddocument.id);
|
||||
// ER
|
||||
incominginvoicesList.forEach(ii => {
|
||||
const accs = ii.accounts as any[] || [];
|
||||
accs.forEach(account => {
|
||||
let file = filesIncomingInvoices.find(i => i.incominginvoice === ii.id);
|
||||
let accountData = accountsList.find(i => i.id === account.account);
|
||||
if (!accountData) return;
|
||||
|
||||
let total = 0
|
||||
let typeString = ""
|
||||
let buschluessel = "9";
|
||||
if(account.taxType === '19') buschluessel = "9";
|
||||
else if(account.taxType === 'null') buschluessel = "";
|
||||
else if(account.taxType === '7') buschluessel = "8";
|
||||
else if(account.taxType === '19I') buschluessel = "19";
|
||||
else if(account.taxType === '7I') buschluessel = "18";
|
||||
else buschluessel = "-";
|
||||
|
||||
if(createddocument.type === "invoices") {
|
||||
total = getCreatedDocumentTotal(createddocument).totalGross
|
||||
let amountGross =/* account.amountGross ? account.amountGross : */(account.amountNet || 0) + (account.amountTax || 0);
|
||||
let shSelector = Math.sign(amountGross) === -1 ? "H" : "S";
|
||||
let text = `ER ${ii.reference}: ${escapeString(ii.description)}`.substring(0,59);
|
||||
const vend = ii.vendor; // durch Mapping verfügbar
|
||||
|
||||
console.log()
|
||||
if(createddocument.usedAdvanceInvoices.length > 0){
|
||||
createddocument.usedAdvanceInvoices.forEach(usedAdvanceInvoice => {
|
||||
total -= getCreatedDocumentTotal(createddocumentsRaw.find(i => i.id === usedAdvanceInvoice)).totalGross
|
||||
})
|
||||
bookingLines.push(`${Math.abs(amountGross).toFixed(2).replace(".",",")};"${shSelector}";;;;;${accountData.number};${vend?.vendorNumber || ""};"${buschluessel}";${dayjs(ii.date).format("DDMM")};"${ii.reference}";;;"${text}";;;;;;${file ? `"BEDI ""${file.id}"""` : ""};"Geschäftspartner";"${vend?.name || ""}";"Kundennummer";"${vend?.vendorNumber || ""}";"Belegnummer";"${ii.reference}";"Leistungsdatum";"${dayjs(ii.date).format("DD.MM.YYYY")}";"Belegdatum";"${dayjs(ii.date).format("DD.MM.YYYY")}";;;;;;;;;;"";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;;`);
|
||||
});
|
||||
});
|
||||
|
||||
// Bank
|
||||
statementallocationsList.forEach(alloc => {
|
||||
const bs = alloc.bankstatement; // durch Mapping verfügbar
|
||||
if(!bs) return;
|
||||
|
||||
let shSelector = Math.sign(alloc.amount) === -1 ? "H" : "S";
|
||||
// @ts-ignore
|
||||
let datevKonto = bs.account?.datevNumber || "";
|
||||
let dateVal = dayjs(bs.date).format("DDMM");
|
||||
let dateFull = dayjs(bs.date).format("DD.MM.YYYY");
|
||||
let bsText = escapeString(bs.text);
|
||||
|
||||
if(alloc.createddocument && alloc.createddocument.customer) {
|
||||
const cd = alloc.createddocument;
|
||||
const cust = cd.customer;
|
||||
bookingLines.push(`${displayCurrency(alloc.amount,true)};"H";;;;;${cust?.customerNumber};${datevKonto};"3";${dayjs(cd.documentDate).format("DDMM")};"${cd.documentNumber}";;;"${`ZE${alloc.description}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${cust?.name}";"Kundennummer";"${cust?.customerNumber}";"Belegnummer";"${cd.documentNumber}";"Leistungsdatum";"${dayjs(cd.deliveryDate).format("DD.MM.YYYY")}";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;Bank-Id;${alloc.bankstatement.id};;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;`);
|
||||
} else if(alloc.incominginvoice && alloc.incominginvoice.vendor) {
|
||||
const ii = alloc.incominginvoice;
|
||||
const vend = ii.vendor;
|
||||
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${vend?.vendorNumber};"";${dayjs(ii.date).format("DDMM")};"${ii.reference}";;;"${`ZA${alloc.description} ${bsText} `.substring(0,59)}";;;;;;;"Geschäftspartner";"${vend?.name}";"Kundennummer";"${vend?.vendorNumber}";"Belegnummer";"${ii.reference}";"Leistungsdatum";"${dayjs(ii.date).format("DD.MM.YYYY")}";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;Bank-Id;${alloc.bankstatement.id};;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;`);
|
||||
} else if(alloc.account) {
|
||||
const acc = alloc.account;
|
||||
let vorzeichen = Math.sign(alloc.amount) > 0 ? "ZE" : "ZA";
|
||||
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${acc.number};"";${dateVal};"";;;"${`${vorzeichen} ${acc.number} - ${escapeString(acc.label)}${escapeString(alloc.description)}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${bs.credName || ''}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;Bank-Id;${alloc.bankstatement.id};;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;`);
|
||||
} else if(alloc.vendor) {
|
||||
const vend = alloc.vendor;
|
||||
let vorzeichen = Math.sign(alloc.amount) > 0 ? "ZE" : "ZA";
|
||||
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${vend.vendorNumber};"";${dateVal};"";;;"${`${vorzeichen} ${vend.vendorNumber} - ${escapeString(vend.name)}${escapeString(alloc.description)}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${vend.name}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;Bank-Id;${alloc.bankstatement.id};;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;`);
|
||||
} else if(alloc.customer) {
|
||||
const cust = alloc.customer;
|
||||
let vorzeichen = Math.sign(alloc.amount) > 0 ? "ZE" : "ZA";
|
||||
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${cust.customerNumber};"";${dateVal};"";;;"${`${vorzeichen} ${cust.customerNumber} - ${escapeString(cust.name)}${escapeString(alloc.description)}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${cust.name}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;Bank-Id;${alloc.bankstatement.id};;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;`);
|
||||
} else if(alloc.ownaccount) {
|
||||
const own = alloc.ownaccount;
|
||||
let vorzeichen = Math.sign(alloc.amount) > 0 ? "ZE" : "ZA";
|
||||
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${own.number};"";${dateVal};"";;;"${`${vorzeichen} ${own.number} - ${escapeString(own.name)}${escapeString(alloc.description)}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${own.name}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;Bank-Id;${alloc.bankstatement.id};;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;`);
|
||||
}
|
||||
});
|
||||
|
||||
console.log(total)
|
||||
// ---------------------------------------------------------
|
||||
// 5. STAMMDATEN CSV
|
||||
// ---------------------------------------------------------
|
||||
const csvString = `${header}\n${colHeaders}\n` + bookingLines.join("\n") + "\n";
|
||||
await zipWriter.add(
|
||||
`EXTF_Buchungsstapel_von_${startDateFmt}_bis_${endDateFmt}.csv`,
|
||||
new TextReader(csvString)
|
||||
);
|
||||
|
||||
typeString = "AR"
|
||||
} else if(createddocument.type === "advanceInvoices") {
|
||||
total = getCreatedDocumentTotal(createddocument).totalGross
|
||||
typeString = "ARAbschlag"
|
||||
} else if(createddocument.type === "cancellationInvoices") {
|
||||
total = getCreatedDocumentTotal(createddocument).totalGross
|
||||
typeString = "ARStorno"
|
||||
}
|
||||
const headerStammdaten = `"EXTF";700;16;"Debitoren/Kreditoren";5;${dateNowStr};;"FE";"Florian Federspiel";;${beraternr};${mandantennr};20250101;4;${startDateFmt};${endDateFmt};"Debitoren & Kreditoren";"FF";1;0;1;"EUR";;"";;;"03";;;"";""`;
|
||||
const colHeadersStammdaten = `Konto;Name (Adressattyp Unternehmen);Unternehmensgegenstand;Name (Adressattyp natuerl. Person);Vorname (Adressattyp natuerl. Person);Name (Adressattyp keine Angabe);Adressatentyp;Kurzbezeichnung;EU-Land;EU-UStID;Anrede;Titel/Akad. Grad;Adelstitel;Namensvorsatz;Adressart;Strasse;Postfach;Postleitzahl;Ort;Land;Versandzusatz;Adresszusatz;Abweichende Anrede;Abw. Zustellbezeichnung 1;Abw. Zustellbezeichnung 2;Kennz. Korrespondenzadresse;Adresse Gueltig von;Adresse Gueltig bis;Telefon;Bemerkung (Telefon);Telefon GL;Bemerkung (Telefon GL);E-Mail;Bemerkung (E-Mail);Internet;Bemerkung (Internet);Fax;Bemerkung (Fax);Sonstige;Bemerkung (Sonstige);Bankleitzahl 1;Bankbezeichnung 1;Bankkonto-Nummer 1;Laenderkennzeichen 1;IBAN-Nr. 1;Leerfeld;SWIFT-Code 1;Abw. Kontoinhaber 1;Kennz. Haupt-Bankverb. 1;Bankverb. 1 Gueltig von;Bankverb. 1 Gueltig bis;Bankleitzahl 2;Bankbezeichnung 2;Bankkonto-Nummer 2;Laenderkennzeichen 2;IBAN-Nr. 2;Leerfeld;SWIFT-Code 2;Abw. Kontoinhaber 2;Kennz. Haupt-Bankverb. 2;Bankverb. 2 gueltig von;Bankverb. 2 gueltig bis;Bankleitzahl 3;Bankbezeichnung 3;Bankkonto-Nummer 3;Laenderkennzeichen 3;IBAN-Nr. 3;Leerfeld;SWIFT-Code 3;Abw. Kontoinhaber 3;Kennz. Haupt-Bankverb. 3;Bankverb. 3 gueltig von;Bankverb. 3 gueltig bis;Bankleitzahl 4;Bankbezeichnung 4;Bankkonto-Nummer 4;Laenderkennzeichen 4;IBAN-Nr. 4;Leerfeld;SWIFT-Code 4;Abw. Kontoinhaber 4;Kennz. Haupt-Bankverb. 4;Bankverb. 4 gueltig von;Bankverb. 4 gueltig bis;Bankleitzahl 5;Bankbezeichnung 5;Bankkonto-Nummer 5;Laenderkennzeichen 5;IBAN-Nr. 5;Leerfeld;SWIFT-Code 5;Abw. Kontoinhaber 5;Kennz. Haupt-Bankverb. 5;Bankverb. 5 gueltig von;Bankverb. 5 gueltig bis;Leerfeld;Briefanrede;Grussformel;Kunden-/Lief.-Nr.;Steuernummer;Sprache;Ansprechpartner;Vertreter;Sachbearbeiter;Diverse-Konto;Ausgabeziel;Waehrungssteuerung;Kreditlimit (Debitor);Zahlungsbedingung;Faelligkeit in Tagen (Debitor);Skonto in Prozent (Debitor);Kreditoren-Ziel 1 Tg.;Kreditoren-Skonto 1 %;Kreditoren-Ziel 2 Tg.;Kreditoren-Skonto 2 %;Kreditoren-Ziel 3 Brutto Tg.;Kreditoren-Ziel 4 Tg.;Kreditoren-Skonto 4 %;Kreditoren-Ziel 5 Tg.;Kreditoren-Skonto 5 %;Mahnung;Kontoauszug;Mahntext 1;Mahntext 2;Mahntext 3;Kontoauszugstext;Mahnlimit Betrag;Mahnlimit %;Zinsberechnung;Mahnzinssatz 1;Mahnzinssatz 2;Mahnzinssatz 3;Lastschrift;Verfahren;Mandantenbank;Zahlungstraeger;Indiv. Feld 1;Indiv. Feld 2;Indiv. Feld 3;Indiv. Feld 4;Indiv. Feld 5;Indiv. Feld 6;Indiv. Feld 7;Indiv. Feld 8;Indiv. Feld 9;Indiv. Feld 10;Indiv. Feld 11;Indiv. Feld 12;Indiv. Feld 13;Indiv. Feld 14;Indiv. Feld 15;Abweichende Anrede (Rechnungsadresse);Adressart (Rechnungsadresse);Strasse (Rechnungsadresse);Postfach (Rechnungsadresse);Postleitzahl (Rechnungsadresse);Ort (Rechnungsadresse);Land (Rechnungsadresse);Versandzusatz (Rechnungsadresse);Adresszusatz (Rechnungsadresse);Abw. Zustellbezeichnung 1 (Rechnungsadresse);Abw. Zustellbezeichnung 2 (Rechnungsadresse);Adresse Gueltig von (Rechnungsadresse);Adresse Gueltig bis (Rechnungsadresse);Bankleitzahl 6;Bankbezeichnung 6;Bankkonto-Nummer 6;Laenderkennzeichen 6;IBAN-Nr. 6;Leerfeld;SWIFT-Code 6;Abw. Kontoinhaber 6;Kennz. Haupt-Bankverb. 6;Bankverb. 6 gueltig von;Bankverb. 6 gueltig bis;Bankleitzahl 7;Bankbezeichnung 7;Bankkonto-Nummer 7;Laenderkennzeichen 7;IBAN-Nr. 7;Leerfeld;SWIFT-Code 7;Abw. Kontoinhaber 7;Kennz. Haupt-Bankverb. 7;Bankverb. 7 gueltig von;Bankverb. 7 gueltig bis;Bankleitzahl 8;Bankbezeichnung 8;Bankkonto-Nummer 8;Laenderkennzeichen 8;IBAN-Nr. 8;Leerfeld;SWIFT-Code 8;Abw. Kontoinhaber 8;Kennz. Haupt-Bankverb. 8;Bankverb. 8 gueltig von;Bankverb. 8 gueltig bis;Bankleitzahl 9;Bankbezeichnung 9;Bankkonto-Nummer 9;Laenderkennzeichen 9;IBAN-Nr. 9;Leerfeld;SWIFT-Code 9;Abw. Kontoinhaber 9;Kennz. Haupt-Bankverb. 9;Bankverb. 9 gueltig von;Bankverb. 9 gueltig bis;Bankleitzahl 10;Bankbezeichnung 10;Bankkonto-Nummer 10;Laenderkennzeichen 10;IBAN-Nr. 10;Leerfeld;SWIFT-Code 10;Abw. Kontoinhaber 10;Kennz. Haupt-Bankverb. 10;Bankverb 10 Gueltig von;Bankverb 10 Gueltig bis;Nummer Fremdsystem;Insolvent;SEPA-Mandatsreferenz 1;SEPA-Mandatsreferenz 2;SEPA-Mandatsreferenz 3;SEPA-Mandatsreferenz 4;SEPA-Mandatsreferenz 5;SEPA-Mandatsreferenz 6;SEPA-Mandatsreferenz 7;SEPA-Mandatsreferenz 8;SEPA-Mandatsreferenz 9;SEPA-Mandatsreferenz 10;Verknuepftes OPOS-Konto;Mahnsperre bis;Lastschriftsperre bis;Zahlungssperre bis;Gebuehrenberechnung;Mahngebuehr 1;Mahngebuehr 2;Mahngebuehr 3;Pauschalberechnung;Verzugspauschale 1;Verzugspauschale 2;Verzugspauschale 3;Alternativer Suchname;Status;Anschrift manuell geaendert (Korrespondenzadresse);Anschrift individuell (Korrespondenzadresse);Anschrift manuell geaendert (Rechnungsadresse);Anschrift individuell (Rechnungsadresse);Fristberechnung bei Debitor;Mahnfrist 1;Mahnfrist 2;Mahnfrist 3;Letzte Frist`;
|
||||
|
||||
let shSelector = "S"
|
||||
if(Math.sign(total) === 1) {
|
||||
shSelector = "S"
|
||||
} else if (Math.sign(total) === -1) {
|
||||
shSelector = "H"
|
||||
}
|
||||
const customersList = await server.db.select().from(customers).where(and(eq(customers.tenant, tenantId), eq(customers.active, true))).orderBy(asc(customers.customerNumber));
|
||||
const vendorsList = await server.db.select().from(vendors).where(and(eq(vendors.tenant, tenantId), eq(vendors.archived, false))).orderBy(asc(vendors.vendorNumber));
|
||||
|
||||
bookingLines.push(`${displayCurrency(total,true)};"${shSelector}";;;;;${createddocument.customer.customerNumber};8400;"";${dayjs(createddocument.documentDate).format("DDMM")};"${createddocument.documentNumber}";;;"${`${typeString} ${createddocument.documentNumber} - ${createddocument.customer.name}`.substring(0,59)}";;;;;;${file ? `"BEDI ""${file.id}"""` : ""};"Geschäftspartner";"${createddocument.customer.name}";"Kundennummer";"${createddocument.customer.customerNumber}";"Belegnummer";"${createddocument.documentNumber}";"Leistungsdatum";"${dayjs(createddocument.deliveryDate).format("DD.MM.YYYY")}";"Belegdatum";"${dayjs(createddocument.documentDate).format("DD.MM.YYYY")}";;;;;;;;;;"";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;;`)
|
||||
let bookinglinesStammdaten: string[] = [];
|
||||
|
||||
})
|
||||
customersList.forEach(c => {
|
||||
const info = c.infoData as any || {};
|
||||
bookinglinesStammdaten.push(`${c.customerNumber};"${c.isCompany ? (c.name || "").substring(0,48): ''}";;"${!c.isCompany ? (c.lastname ? c.lastname : c.name) : ''}";"${!c.isCompany ? (c.firstname ? c.firstname : '') : ''}";;${c.isCompany ? 2 : 1};;;;;;;;"STR";"${info.street || ''}";;"${info.zip || ''}";"${info.city || ''}";;;"${info.special || ''}";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;`);
|
||||
});
|
||||
|
||||
incominginvoices.forEach(incominginvoice => {
|
||||
console.log(incominginvoice.id);
|
||||
incominginvoice.accounts.forEach(account => {
|
||||
|
||||
let file = filesIncomingInvoices.find(i => i.incominginvoice === incominginvoice.id);
|
||||
|
||||
|
||||
let accountData = accounts.find(i => i.id === account.account)
|
||||
let buschluessel: string = "9"
|
||||
|
||||
if(account.taxType === '19'){
|
||||
buschluessel = "9"
|
||||
} else if(account.taxType === 'null') {
|
||||
buschluessel = ""
|
||||
} else if(account.taxType === '7') {
|
||||
buschluessel = "8"
|
||||
} else if(account.taxType === '19I') {
|
||||
buschluessel = "19"
|
||||
} else if(account.taxType === '7I') {
|
||||
buschluessel = "18"
|
||||
} else {
|
||||
buschluessel = "-"
|
||||
}
|
||||
|
||||
let shSelector = "S"
|
||||
let amountGross = account.amountGross ? account.amountGross : account.amountNet + account.amountTax
|
||||
|
||||
|
||||
if(Math.sign(amountGross) === 1) {
|
||||
shSelector = "S"
|
||||
} else if(Math.sign(amountGross) === -1) {
|
||||
shSelector = "H"
|
||||
}
|
||||
|
||||
let text = `ER ${incominginvoice.reference}: ${escapeString(incominginvoice.description)}`.substring(0,59)
|
||||
console.log(incominginvoice)
|
||||
bookingLines.push(`${Math.abs(amountGross).toFixed(2).replace(".",",")};"${shSelector}";;;;;${accountData.number};${incominginvoice.vendor.vendorNumber};"${buschluessel}";${dayjs(incominginvoice.date).format("DDMM")};"${incominginvoice.reference}";;;"${text}";;;;;;${file ? `"BEDI ""${file.id}"""` : ""};"Geschäftspartner";"${incominginvoice.vendor.name}";"Kundennummer";"${incominginvoice.vendor.vendorNumber}";"Belegnummer";"${incominginvoice.reference}";"Leistungsdatum";"${dayjs(incominginvoice.date).format("DD.MM.YYYY")}";"Belegdatum";"${dayjs(incominginvoice.date).format("DD.MM.YYYY")}";;;;;;;;;;"";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;;`)
|
||||
})
|
||||
|
||||
|
||||
})
|
||||
|
||||
statementallocations.forEach(statementallocation => {
|
||||
|
||||
let shSelector = "S"
|
||||
|
||||
if(Math.sign(statementallocation.amount) === 1) {
|
||||
shSelector = "S"
|
||||
} else if(Math.sign(statementallocation.amount) === -1) {
|
||||
shSelector = "H"
|
||||
}
|
||||
|
||||
if(statementallocation.cd_id) {
|
||||
bookingLines.push(`${displayCurrency(statementallocation.amount,true)};"H";;;;;${statementallocation.cd_id.customer.customerNumber};${statementallocation.bs_id.account.datevNumber};"3";${dayjs(statementallocation.cd_id.documentDate).format("DDMM")};"${statementallocation.cd_id.documentNumber}";;;"${`ZE${statementallocation.description}${escapeString(statementallocation.bs_id.text)}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${statementallocation.cd_id.customer.name}";"Kundennummer";"${statementallocation.cd_id.customer.customerNumber}";"Belegnummer";"${statementallocation.cd_id.documentNumber}";"Leistungsdatum";"${dayjs(statementallocation.cd_id.deliveryDate).format("DD.MM.YYYY")}";"Belegdatum";"${dayjs(statementallocation.cd_id.documentDate).format("DD.MM.YYYY")}";;;;;;;;;;"";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;;`)
|
||||
} else if(statementallocation.ii_id) {
|
||||
bookingLines.push(`${displayCurrency(statementallocation.amount,true)};"${shSelector}";;;;;${statementallocation.bs_id.account.datevNumber};${statementallocation.ii_id.vendor.vendorNumber};"";${dayjs(statementallocation.ii_id.date).format("DDMM")};"${statementallocation.ii_id.reference}";;;"${`ZA${statementallocation.description} ${escapeString(statementallocation.bs_id.text)} `.substring(0,59)}";;;;;;;"Geschäftspartner";"${statementallocation.ii_id.vendor.name}";"Kundennummer";"${statementallocation.ii_id.vendor.vendorNumber}";"Belegnummer";"${statementallocation.ii_id.reference}";"Leistungsdatum";"${dayjs(statementallocation.ii_id.date).format("DD.MM.YYYY")}";"Belegdatum";"${dayjs(statementallocation.ii_id.date).format("DD.MM.YYYY")}";;;;;;;;;;"";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;;`)
|
||||
} else if(statementallocation.account) {
|
||||
bookingLines.push(`${displayCurrency(statementallocation.amount,true)};"${shSelector}";;;;;${statementallocation.bs_id.account.datevNumber};${statementallocation.account.number};"";${dayjs(statementallocation.bs_id.date).format("DDMM")};"";;;"${`${Math.sign(statementallocation.amount) > 0 ? "ZE" : "ZA"} ${statementallocation.account.number} - ${escapeString(statementallocation.account.label)}${escapeString(statementallocation.description)}${statementallocation.bs_id.text}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${statementallocation.bs_id.credName}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dayjs(statementallocation.bs_id.date).format("DD.MM.YYYY")}";;;;;;;;;;"";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;;`)
|
||||
} else if(statementallocation.vendor) {
|
||||
bookingLines.push(`${displayCurrency(statementallocation.amount,true)};"${shSelector}";;;;;${statementallocation.bs_id.account.datevNumber};${statementallocation.vendor.vendorNumber};"";${dayjs(statementallocation.bs_id.date).format("DDMM")};"";;;"${`${Math.sign(statementallocation.amount) > 0 ? "ZE" : "ZA"} ${statementallocation.vendor.vendorNumber} - ${escapeString(statementallocation.vendor.name)}${escapeString(statementallocation.description)}${statementallocation.bs_id.text}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${statementallocation.vendor.name}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dayjs(statementallocation.bs_id.date).format("DD.MM.YYYY")}";;;;;;;;;;"";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;;`)
|
||||
} else if(statementallocation.customer) {
|
||||
bookingLines.push(`${displayCurrency(statementallocation.amount,true)};"${shSelector}";;;;;${statementallocation.bs_id.account.datevNumber};${statementallocation.customer.customerNumber};"";${dayjs(statementallocation.bs_id.date).format("DDMM")};"";;;"${`${Math.sign(statementallocation.amount) > 0 ? "ZE" : "ZA"} ${statementallocation.customer.customerNumber} - ${escapeString(statementallocation.customer.name)}${escapeString(statementallocation.description)}${statementallocation.bs_id.text}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${statementallocation.customer.name}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dayjs(statementallocation.bs_id.date).format("DD.MM.YYYY")}";;;;;;;;;;"";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;;`)
|
||||
} else if(statementallocation.ownaccount) {
|
||||
bookingLines.push(`${displayCurrency(statementallocation.amount,true)};"${shSelector}";;;;;${statementallocation.bs_id.account.datevNumber};${statementallocation.ownaccount.number};"";${dayjs(statementallocation.bs_id.date).format("DDMM")};"";;;"${`${Math.sign(statementallocation.amount) > 0 ? "ZE" : "ZA"} ${statementallocation.ownaccount.number} - ${escapeString(statementallocation.ownaccount.name)}${escapeString(statementallocation.description)}${statementallocation.bs_id.text}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${statementallocation.ownaccount.name}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dayjs(statementallocation.bs_id.date).format("DD.MM.YYYY")}";;;;;;;;;;"";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;;`)
|
||||
}
|
||||
|
||||
|
||||
})
|
||||
|
||||
|
||||
let csvString = `${header}\n${colHeaders}\n`;
|
||||
bookingLines.forEach(line => {
|
||||
csvString += `${line}\n`;
|
||||
})
|
||||
|
||||
const buchungsstapelReader = new TextReader(csvString)
|
||||
await zipWriter.add(`EXTF_Buchungsstapel_von_${dayjs(startDate).format("DDMMYYYY")}_bis_${dayjs(endDate).format("DDMMYYYY")}.csv`, buchungsstapelReader)
|
||||
|
||||
/*fs.writeFile(`output/EXTF_Buchungsstapel_von_${dayjs(startDate).format("DDMMYYYY")}_bis_${dayjs(endDate).format("DDMMYYYY")}.csv`, csvString, 'utf8', function (err) {
|
||||
if (err) {
|
||||
console.log('Some error occured - file either not saved or corrupted file saved.');
|
||||
console.log(err);
|
||||
} else{
|
||||
console.log('It\'s saved!');
|
||||
}
|
||||
});*/
|
||||
|
||||
// Kreditoren/Debitoren
|
||||
let headerStammdaten = `"EXTF";700;16;"Debitoren/Kreditoren";5;${dayjs().format("YYYYMMDDHHmmssSSS")};;"FE";"Florian Federspiel";;${beraternr};${mandantennr};20250101;4;${dayjs(startDate).format("YYYYMMDD")};${dayjs(endDate).format("YYYYMMDD")};"Debitoren & Kreditoren";"FF";1;0;1;"EUR";;"";;;"03";;;"";""`
|
||||
|
||||
let colHeadersStammdaten = `Konto;Name (Adressattyp Unternehmen);Unternehmensgegenstand;Name (Adressattyp natuerl. Person);Vorname (Adressattyp natuerl. Person);Name (Adressattyp keine Angabe);Adressatentyp;Kurzbezeichnung;EU-Land;EU-UStID;Anrede;Titel/Akad. Grad;Adelstitel;Namensvorsatz;Adressart;Strasse;Postfach;Postleitzahl;Ort;Land;Versandzusatz;Adresszusatz;Abweichende Anrede;Abw. Zustellbezeichnung 1;Abw. Zustellbezeichnung 2;Kennz. Korrespondenzadresse;Adresse Gueltig von;Adresse Gueltig bis;Telefon;Bemerkung (Telefon);Telefon GL;Bemerkung (Telefon GL);E-Mail;Bemerkung (E-Mail);Internet;Bemerkung (Internet);Fax;Bemerkung (Fax);Sonstige;Bemerkung (Sonstige);Bankleitzahl 1;Bankbezeichnung 1;Bankkonto-Nummer 1;Laenderkennzeichen 1;IBAN-Nr. 1;Leerfeld;SWIFT-Code 1;Abw. Kontoinhaber 1;Kennz. Haupt-Bankverb. 1;Bankverb. 1 Gueltig von;Bankverb. 1 Gueltig bis;Bankleitzahl 2;Bankbezeichnung 2;Bankkonto-Nummer 2;Laenderkennzeichen 2;IBAN-Nr. 2;Leerfeld;SWIFT-Code 2;Abw. Kontoinhaber 2;Kennz. Haupt-Bankverb. 2;Bankverb. 2 gueltig von;Bankverb. 2 gueltig bis;Bankleitzahl 3;Bankbezeichnung 3;Bankkonto-Nummer 3;Laenderkennzeichen 3;IBAN-Nr. 3;Leerfeld;SWIFT-Code 3;Abw. Kontoinhaber 3;Kennz. Haupt-Bankverb. 3;Bankverb. 3 gueltig von;Bankverb. 3 gueltig bis;Bankleitzahl 4;Bankbezeichnung 4;Bankkonto-Nummer 4;Laenderkennzeichen 4;IBAN-Nr. 4;Leerfeld;SWIFT-Code 4;Abw. Kontoinhaber 4;Kennz. Haupt-Bankverb. 4;Bankverb. 4 gueltig von;Bankverb. 4 gueltig bis;Bankleitzahl 5;Bankbezeichnung 5;Bankkonto-Nummer 5;Laenderkennzeichen 5;IBAN-Nr. 5;Leerfeld;SWIFT-Code 5;Abw. Kontoinhaber 5;Kennz. Haupt-Bankverb. 5;Bankverb. 5 gueltig von;Bankverb. 5 gueltig bis;Leerfeld;Briefanrede;Grussformel;Kunden-/Lief.-Nr.;Steuernummer;Sprache;Ansprechpartner;Vertreter;Sachbearbeiter;Diverse-Konto;Ausgabeziel;Waehrungssteuerung;Kreditlimit (Debitor);Zahlungsbedingung;Faelligkeit in Tagen (Debitor);Skonto in Prozent (Debitor);Kreditoren-Ziel 1 Tg.;Kreditoren-Skonto 1 %;Kreditoren-Ziel 2 Tg.;Kreditoren-Skonto 2 %;Kreditoren-Ziel 3 Brutto Tg.;Kreditoren-Ziel 4 Tg.;Kreditoren-Skonto 4 %;Kreditoren-Ziel 5 Tg.;Kreditoren-Skonto 5 %;Mahnung;Kontoauszug;Mahntext 1;Mahntext 2;Mahntext 3;Kontoauszugstext;Mahnlimit Betrag;Mahnlimit %;Zinsberechnung;Mahnzinssatz 1;Mahnzinssatz 2;Mahnzinssatz 3;Lastschrift;Verfahren;Mandantenbank;Zahlungstraeger;Indiv. Feld 1;Indiv. Feld 2;Indiv. Feld 3;Indiv. Feld 4;Indiv. Feld 5;Indiv. Feld 6;Indiv. Feld 7;Indiv. Feld 8;Indiv. Feld 9;Indiv. Feld 10;Indiv. Feld 11;Indiv. Feld 12;Indiv. Feld 13;Indiv. Feld 14;Indiv. Feld 15;Abweichende Anrede (Rechnungsadresse);Adressart (Rechnungsadresse);Strasse (Rechnungsadresse);Postfach (Rechnungsadresse);Postleitzahl (Rechnungsadresse);Ort (Rechnungsadresse);Land (Rechnungsadresse);Versandzusatz (Rechnungsadresse);Adresszusatz (Rechnungsadresse);Abw. Zustellbezeichnung 1 (Rechnungsadresse);Abw. Zustellbezeichnung 2 (Rechnungsadresse);Adresse Gueltig von (Rechnungsadresse);Adresse Gueltig bis (Rechnungsadresse);Bankleitzahl 6;Bankbezeichnung 6;Bankkonto-Nummer 6;Laenderkennzeichen 6;IBAN-Nr. 6;Leerfeld;SWIFT-Code 6;Abw. Kontoinhaber 6;Kennz. Haupt-Bankverb. 6;Bankverb. 6 gueltig von;Bankverb. 6 gueltig bis;Bankleitzahl 7;Bankbezeichnung 7;Bankkonto-Nummer 7;Laenderkennzeichen 7;IBAN-Nr. 7;Leerfeld;SWIFT-Code 7;Abw. Kontoinhaber 7;Kennz. Haupt-Bankverb. 7;Bankverb. 7 gueltig von;Bankverb. 7 gueltig bis;Bankleitzahl 8;Bankbezeichnung 8;Bankkonto-Nummer 8;Laenderkennzeichen 8;IBAN-Nr. 8;Leerfeld;SWIFT-Code 8;Abw. Kontoinhaber 8;Kennz. Haupt-Bankverb. 8;Bankverb. 8 gueltig von;Bankverb. 8 gueltig bis;Bankleitzahl 9;Bankbezeichnung 9;Bankkonto-Nummer 9;Laenderkennzeichen 9;IBAN-Nr. 9;Leerfeld;SWIFT-Code 9;Abw. Kontoinhaber 9;Kennz. Haupt-Bankverb. 9;Bankverb. 9 gueltig von;Bankverb. 9 gueltig bis;Bankleitzahl 10;Bankbezeichnung 10;Bankkonto-Nummer 10;Laenderkennzeichen 10;IBAN-Nr. 10;Leerfeld;SWIFT-Code 10;Abw. Kontoinhaber 10;Kennz. Haupt-Bankverb. 10;Bankverb 10 Gueltig von;Bankverb 10 Gueltig bis;Nummer Fremdsystem;Insolvent;SEPA-Mandatsreferenz 1;SEPA-Mandatsreferenz 2;SEPA-Mandatsreferenz 3;SEPA-Mandatsreferenz 4;SEPA-Mandatsreferenz 5;SEPA-Mandatsreferenz 6;SEPA-Mandatsreferenz 7;SEPA-Mandatsreferenz 8;SEPA-Mandatsreferenz 9;SEPA-Mandatsreferenz 10;Verknuepftes OPOS-Konto;Mahnsperre bis;Lastschriftsperre bis;Zahlungssperre bis;Gebuehrenberechnung;Mahngebuehr 1;Mahngebuehr 2;Mahngebuehr 3;Pauschalberechnung;Verzugspauschale 1;Verzugspauschale 2;Verzugspauschale 3;Alternativer Suchname;Status;Anschrift manuell geaendert (Korrespondenzadresse);Anschrift individuell (Korrespondenzadresse);Anschrift manuell geaendert (Rechnungsadresse);Anschrift individuell (Rechnungsadresse);Fristberechnung bei Debitor;Mahnfrist 1;Mahnfrist 2;Mahnfrist 3;Letzte Frist`
|
||||
const {data:customers} = await server.supabase.from("customers").select().eq("tenant",tenant).order("customerNumber")
|
||||
const {data:vendors} = await server.supabase.from("vendors").select().eq("tenant",tenant).order("vendorNumber")
|
||||
|
||||
let bookinglinesStammdaten = []
|
||||
|
||||
customers.forEach(customer => {
|
||||
bookinglinesStammdaten.push(`${customer.customerNumber};"${customer.isCompany ? customer.name.substring(0,48): ''}";;"${!customer.isCompany ? (customer.lastname ? customer.lastname : customer.name) : ''}";"${!customer.isCompany ? (customer.firstname ? customer.firstname : '') : ''}";;${customer.isCompany ? 2 : 1};;;;;;;;"STR";"${customer.infoData.street ? customer.infoData.street : ''}";;"${customer.infoData.zip ? customer.infoData.zip : ''}";"${customer.infoData.city ? customer.infoData.city : ''}";;;"${customer.infoData.special ? customer.infoData.special : ''}";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;`)
|
||||
|
||||
})
|
||||
|
||||
vendors.forEach(vendor => {
|
||||
bookinglinesStammdaten.push(`${vendor.vendorNumber};"${vendor.name.substring(0,48)}";;;;;2;;;;;;;;"STR";"${vendor.infoData.street ? vendor.infoData.street : ''}";;"${vendor.infoData.zip ? vendor.infoData.zip : ''}";"${vendor.infoData.city ? vendor.infoData.city : ''}";;;"${vendor.infoData.special ? vendor.infoData.special : ''}";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;`)
|
||||
|
||||
})
|
||||
|
||||
let csvStringStammdaten = `${headerStammdaten}\n${colHeadersStammdaten}\n`;
|
||||
bookinglinesStammdaten.forEach(line => {
|
||||
csvStringStammdaten += `${line}\n`;
|
||||
})
|
||||
|
||||
const stammdatenReader = new TextReader(csvStringStammdaten)
|
||||
await zipWriter.add(`EXTF_Stammdaten_von_${dayjs(startDate).format("DDMMYYYY")}_bis_${dayjs(endDate).format("DDMMYYYY")}.csv`, stammdatenReader)
|
||||
|
||||
|
||||
|
||||
/*fs.writeFile(`output/EXTF_Stammdaten_von_${dayjs(startDate).format("DDMMYYYY")}_bis_${dayjs(endDate).format("DDMMYYYY")}.csv`, csvStringStammdaten, 'utf8', function (err) {
|
||||
if (err) {
|
||||
console.log('Some error occured - file either not saved or corrupted file saved.');
|
||||
console.log(err);
|
||||
} else{
|
||||
console.log('It\'s saved!');
|
||||
}
|
||||
});*/
|
||||
|
||||
//Sachkonten
|
||||
let headerSachkonten = `"EXTF";700;20;"Kontenbeschriftungen";3;${dayjs().format("YYYYMMDDHHmmssSSS")};;"FE";"Florian Federspiel";;${beraternr};${mandantennr};20250101;4;${dayjs(startDate).format("YYYYMMDD")};${dayjs(endDate).format("YYYYMMDD")};"Sachkonten";"FF";1;0;1;"EUR";;"";;;"03";;;"";""`
|
||||
|
||||
let colHeadersSachkonten = `Konto;Kontenbeschriftung;Sprach-ID;Kontenbeschriftung lang`
|
||||
const {data:bankaccounts} = await server.supabase.from("bankaccounts").select().eq("tenant",tenant).order("datevNumber")
|
||||
|
||||
let bookinglinesSachkonten = []
|
||||
|
||||
bankaccounts.forEach(bankaccount => {
|
||||
bookinglinesSachkonten.push(`${bankaccount.datevNumber};"${bankaccount.name}";"de-DE";`)
|
||||
|
||||
})
|
||||
|
||||
let csvStringSachkonten = `${headerSachkonten}\n${colHeadersSachkonten}\n`;
|
||||
bookinglinesSachkonten.forEach(line => {
|
||||
csvStringSachkonten += `${line}\n`;
|
||||
})
|
||||
|
||||
const sachkontenReader = new TextReader(csvStringSachkonten)
|
||||
await zipWriter.add(`EXTF_Sachkonten_von_${dayjs(startDate).format("DDMMYYYY")}_bis_${dayjs(endDate).format("DDMMYYYY")}.csv`, sachkontenReader)
|
||||
|
||||
/*fs.writeFile(`output/EXTF_Sachkonten_von_${dayjs(startDate).format("DDMMYYYY")}_bis_${dayjs(endDate).format("DDMMYYYY")}.csv`, csvStringSachkonten, 'utf8', function (err) {
|
||||
if (err) {
|
||||
console.log('Some error occured - file either not saved or corrupted file saved.');
|
||||
console.log(err);
|
||||
} else{
|
||||
console.log('It\'s saved!');
|
||||
}
|
||||
});*/
|
||||
vendorsList.forEach(v => {
|
||||
const info = v.infoData as any || {};
|
||||
bookinglinesStammdaten.push(`${v.vendorNumber};"${(v.name || "").substring(0,48)}";;;;;2;;;;;;;;"STR";"${info.street || ''}";;"${info.zip || ''}";"${info.city || ''}";;;"${info.special || ''}";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;`);
|
||||
});
|
||||
|
||||
await zipWriter.add(
|
||||
`EXTF_Stammdaten_von_${startDateFmt}_bis_${endDateFmt}.csv`,
|
||||
new TextReader(`${headerStammdaten}\n${colHeadersStammdaten}\n` + bookinglinesStammdaten.join("\n") + "\n")
|
||||
);
|
||||
|
||||
// ---------------------------------------------------------
|
||||
// 6. XML METADATA
|
||||
// ---------------------------------------------------------
|
||||
let obj = {
|
||||
archive: {
|
||||
'@version':"5.0",
|
||||
@@ -333,56 +395,34 @@ export async function buildExportZip(server: FastifyInstance, tenant: number, st
|
||||
date: dayjs().format("YYYY-MM-DDTHH:mm:ss")
|
||||
},
|
||||
content: {
|
||||
document: []
|
||||
}
|
||||
document: [] as any[]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
filesCreateddocuments.forEach(file => {
|
||||
const addXmlDoc = (file: any) => {
|
||||
if(!file.path) return;
|
||||
const ext = file.path.includes('.') ? file.path.split(".").pop() : "pdf";
|
||||
obj.archive.content.document.push({
|
||||
"@guid": file.id,
|
||||
extension: {
|
||||
"@xsi:type":"File",
|
||||
"@name":`${file.id}.pdf`
|
||||
"@name":`${file.id}.${ext}`
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
};
|
||||
|
||||
filesIncomingInvoices.forEach(file => {
|
||||
obj.archive.content.document.push({
|
||||
"@guid": file.id,
|
||||
extension: {
|
||||
"@xsi:type":"File",
|
||||
"@name":`${file.id}.pdf`
|
||||
}
|
||||
})
|
||||
})
|
||||
filesCreateddocuments.forEach(addXmlDoc);
|
||||
filesIncomingInvoices.forEach(addXmlDoc);
|
||||
|
||||
let doc = xmlbuilder.create(obj, {encoding: 'UTF-8', standalone: true})
|
||||
const doc = xmlbuilder.create(obj, {encoding: 'UTF-8', standalone: true});
|
||||
await zipWriter.add(`document.xml`, new TextReader(doc.end({pretty: true})));
|
||||
|
||||
//console.log(doc.end({pretty: true}));
|
||||
const arrayBuffer = await (await zipWriter.close()).arrayBuffer();
|
||||
return Buffer.from(arrayBuffer);
|
||||
|
||||
const documentsReader = new TextReader(doc.end({pretty: true}))
|
||||
await zipWriter.add(`document.xml`, documentsReader)
|
||||
|
||||
|
||||
|
||||
|
||||
/*function toBuffer(arrayBuffer) {
|
||||
const buffer = Buffer.alloc(arrayBuffer.byteLength);
|
||||
const view = new Uint8Array(arrayBuffer);
|
||||
for (let i = 0; i < buffer.length; ++i) {
|
||||
buffer[i] = view[i];
|
||||
}
|
||||
return buffer;
|
||||
}*/
|
||||
|
||||
|
||||
const arrayBuffer = await (await zipWriter.close()).arrayBuffer()
|
||||
return Buffer.from(arrayBuffer)
|
||||
} catch(error) {
|
||||
console.log(error)
|
||||
console.error("DATEV Export Error:", error);
|
||||
throw error;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
@@ -1,12 +1,25 @@
|
||||
import xmlbuilder from "xmlbuilder";
|
||||
import {randomUUID} from "node:crypto";
|
||||
import dayjs from "dayjs";
|
||||
import { and, eq, inArray } from "drizzle-orm";
|
||||
import { createddocuments, tenants } from "../../../db/schema";
|
||||
|
||||
export const createSEPAExport = async (server,idsToExport, tenant_id) => {
|
||||
const {data,error} = await server.supabase.from("createddocuments").select().eq("tenant", tenant_id).in("id", idsToExport)
|
||||
const {data:tenantData,error:tenantError} = await server.supabase.from("tenants").select().eq("id", tenant_id).single()
|
||||
const data = await server.db
|
||||
.select()
|
||||
.from(createddocuments)
|
||||
.where(and(
|
||||
eq(createddocuments.tenant, tenant_id),
|
||||
inArray(createddocuments.id, idsToExport)
|
||||
))
|
||||
|
||||
const tenantRows = await server.db
|
||||
.select()
|
||||
.from(tenants)
|
||||
.where(eq(tenants.id, tenant_id))
|
||||
.limit(1)
|
||||
const tenantData = tenantRows[0]
|
||||
console.log(tenantData)
|
||||
console.log(tenantError)
|
||||
|
||||
console.log(data)
|
||||
|
||||
|
||||
@@ -1,11 +1,8 @@
|
||||
import {FastifyInstance} from "fastify";
|
||||
// import { PNG } from 'pngjs'
|
||||
// import { ready as zplReady } from 'zpl-renderer-js'
|
||||
// import { Utils } from '@mmote/niimbluelib'
|
||||
// import { createCanvas } from 'canvas'
|
||||
// import bwipjs from 'bwip-js'
|
||||
// import Sharp from 'sharp'
|
||||
// import fs from 'fs'
|
||||
import { FastifyInstance } from "fastify"
|
||||
import { PNG } from "pngjs"
|
||||
import { Utils } from "@mmote/niimbluelib"
|
||||
import bwipjs from "bwip-js"
|
||||
import Sharp from "sharp"
|
||||
|
||||
import { tenants } from "../../db/schema"
|
||||
import { eq } from "drizzle-orm"
|
||||
@@ -15,7 +12,6 @@ export const useNextNumberRangeNumber = async (
|
||||
tenantId: number,
|
||||
numberRange: string
|
||||
) => {
|
||||
// 1️⃣ Tenant laden
|
||||
const [tenant] = await server.db
|
||||
.select()
|
||||
.from(tenants)
|
||||
@@ -33,23 +29,20 @@ export const useNextNumberRangeNumber = async (
|
||||
|
||||
const current = numberRanges[numberRange]
|
||||
|
||||
// 2️⃣ Used Number generieren
|
||||
const usedNumber =
|
||||
(current.prefix || "") +
|
||||
current.nextNumber +
|
||||
(current.suffix || "")
|
||||
|
||||
// 3️⃣ nextNumber erhöhen
|
||||
const updatedRanges = {
|
||||
// @ts-ignore
|
||||
...numberRanges,
|
||||
[numberRange]: {
|
||||
...current,
|
||||
nextNumber: current.nextNumber + 1
|
||||
}
|
||||
nextNumber: current.nextNumber + 1,
|
||||
},
|
||||
}
|
||||
|
||||
// 4️⃣ Tenant aktualisieren
|
||||
await server.db
|
||||
.update(tenants)
|
||||
.set({ numberRanges: updatedRanges })
|
||||
@@ -58,24 +51,17 @@ export const useNextNumberRangeNumber = async (
|
||||
return { usedNumber }
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
export async function encodeBase64ToNiimbot(base64Png, printDirection = 'top') {
|
||||
// 1️⃣ PNG dekodieren
|
||||
const buffer = Buffer.from(base64Png, 'base64')
|
||||
const png = PNG.sync.read(buffer) // liefert {width, height, data: Uint8Array(RGBA)}
|
||||
export async function encodeBase64ToNiimbot(base64Png: string, printDirection: "top" | "left" = "top") {
|
||||
const buffer = Buffer.from(base64Png, "base64")
|
||||
const png = PNG.sync.read(buffer)
|
||||
|
||||
const { width, height, data } = png
|
||||
console.log(width, height, data)
|
||||
const cols = printDirection === 'left' ? height : width
|
||||
const rows = printDirection === 'left' ? width : height
|
||||
const rowsData = []
|
||||
const cols = printDirection === "left" ? height : width
|
||||
const rows = printDirection === "left" ? width : height
|
||||
const rowsData: any[] = []
|
||||
|
||||
console.log(cols)
|
||||
if (cols % 8 !== 0) throw new Error("Column count must be multiple of 8")
|
||||
|
||||
if (cols % 8 !== 0) throw new Error('Column count must be multiple of 8')
|
||||
|
||||
// 2️⃣ Zeilenweise durchgehen und Bits bilden
|
||||
for (let row = 0; row < rows; row++) {
|
||||
let isVoid = true
|
||||
let blackPixelsCount = 0
|
||||
@@ -84,8 +70,8 @@ export async function encodeBase64ToNiimbot(base64Png, printDirection = 'top') {
|
||||
for (let colOct = 0; colOct < cols / 8; colOct++) {
|
||||
let pixelsOctet = 0
|
||||
for (let colBit = 0; colBit < 8; colBit++) {
|
||||
const x = printDirection === 'left' ? row : colOct * 8 + colBit
|
||||
const y = printDirection === 'left' ? height - 1 - (colOct * 8 + colBit) : row
|
||||
const x = printDirection === "left" ? row : colOct * 8 + colBit
|
||||
const y = printDirection === "left" ? height - 1 - (colOct * 8 + colBit) : row
|
||||
const idx = (y * width + x) * 4
|
||||
const lum = 0.299 * data[idx] + 0.587 * data[idx + 1] + 0.114 * data[idx + 2]
|
||||
const isBlack = lum < 128
|
||||
@@ -99,7 +85,7 @@ export async function encodeBase64ToNiimbot(base64Png, printDirection = 'top') {
|
||||
}
|
||||
|
||||
const newPart = {
|
||||
dataType: isVoid ? 'void' : 'pixels',
|
||||
dataType: isVoid ? "void" : "pixels",
|
||||
rowNumber: row,
|
||||
repeat: 1,
|
||||
rowData: isVoid ? undefined : rowData,
|
||||
@@ -111,14 +97,15 @@ export async function encodeBase64ToNiimbot(base64Png, printDirection = 'top') {
|
||||
} else {
|
||||
const last = rowsData[rowsData.length - 1]
|
||||
let same = newPart.dataType === last.dataType
|
||||
if (same && newPart.dataType === 'pixels') {
|
||||
if (same && newPart.dataType === "pixels") {
|
||||
same = Utils.u8ArraysEqual(newPart.rowData, last.rowData)
|
||||
}
|
||||
if (same) last.repeat++
|
||||
else rowsData.push(newPart)
|
||||
|
||||
if (row % 200 === 199) {
|
||||
rowsData.push({
|
||||
dataType: 'check',
|
||||
dataType: "check",
|
||||
rowNumber: row,
|
||||
repeat: 0,
|
||||
rowData: undefined,
|
||||
@@ -131,44 +118,69 @@ export async function encodeBase64ToNiimbot(base64Png, printDirection = 'top') {
|
||||
return { cols, rows, rowsData }
|
||||
}
|
||||
|
||||
export async function generateLabel(context,width,height) {
|
||||
// Canvas für Hintergrund & Text
|
||||
const canvas = createCanvas(width, height)
|
||||
const ctx = canvas.getContext('2d')
|
||||
function escapeXml(value: string) {
|
||||
return String(value)
|
||||
.replace(/&/g, "&")
|
||||
.replace(/</g, "<")
|
||||
.replace(/>/g, ">")
|
||||
.replace(/\"/g, """)
|
||||
.replace(/'/g, "'")
|
||||
}
|
||||
|
||||
// Hintergrund weiß
|
||||
ctx.fillStyle = '#FFFFFF'
|
||||
ctx.fillRect(0, 0, width, height)
|
||||
export async function generateLabel(context: any = {}, width = 584, height = 354) {
|
||||
const normalizedWidth = Math.ceil(Number(width) / 8) * 8
|
||||
const normalizedHeight = Math.max(1, Number(height) || 203)
|
||||
|
||||
// Überschrift
|
||||
ctx.fillStyle = '#000000'
|
||||
ctx.font = '32px Arial'
|
||||
ctx.fillText(context.text, 20, 40)
|
||||
const idFont = Math.max(24, Math.round(normalizedHeight * 0.125))
|
||||
const nameFont = Math.max(17, Math.round(normalizedHeight * 0.078))
|
||||
const customerFont = Math.max(14, Math.round(normalizedHeight * 0.06))
|
||||
const serialFont = Math.max(12, Math.round(normalizedHeight * 0.052))
|
||||
|
||||
const labelId = context.customerInventoryId || context.datamatrix || context.id || "N/A"
|
||||
const labelName = context.name || context.text || "Kundeninventarartikel"
|
||||
const customerName = context.customerName || ""
|
||||
const serial = context.serialNumber ? `SN: ${context.serialNumber}` : ""
|
||||
const nameLine1 = String(labelName).slice(0, 30)
|
||||
const nameLine2 = String(labelName).slice(30, 60)
|
||||
|
||||
// 3) DataMatrix
|
||||
const dataMatrixPng = await bwipjs.toBuffer({
|
||||
bcid: 'datamatrix',
|
||||
text: context.datamatrix,
|
||||
scale: 6,
|
||||
bcid: "datamatrix",
|
||||
text: String(labelId),
|
||||
scale: normalizedWidth >= 560 ? 7 : 5,
|
||||
includetext: false,
|
||||
})
|
||||
const dataMatrixMeta = await Sharp(dataMatrixPng).metadata()
|
||||
const dataMatrixWidth = dataMatrixMeta.width || 0
|
||||
const dataMatrixHeight = dataMatrixMeta.height || 0
|
||||
const dmLeft = Math.max(8, normalizedWidth - dataMatrixWidth - 28)
|
||||
const dmTop = Math.max(8, Math.floor((normalizedHeight - dataMatrixHeight) / 2))
|
||||
const textMaxWidth = Math.max(120, dmLeft - 20)
|
||||
|
||||
// Basisbild aus Canvas
|
||||
const base = await Sharp(canvas.toBuffer())
|
||||
.png()
|
||||
.toBuffer()
|
||||
const textSvg = `
|
||||
<svg width="${normalizedWidth}" height="${normalizedHeight}" xmlns="http://www.w3.org/2000/svg">
|
||||
<rect width="100%" height="100%" fill="white"/>
|
||||
<text x="12" y="${Math.round(normalizedHeight * 0.15)}" font-size="${idFont}" font-family="Arial, Helvetica, sans-serif" font-weight="700" fill="black">${escapeXml(String(labelId).slice(0, 26))}</text>
|
||||
<text x="12" y="${Math.round(normalizedHeight * 0.29)}" font-size="${nameFont}" font-family="Arial, Helvetica, sans-serif" fill="black">${escapeXml(nameLine1)}</text>
|
||||
<text x="12" y="${Math.round(normalizedHeight * 0.37)}" font-size="${nameFont}" font-family="Arial, Helvetica, sans-serif" fill="black">${escapeXml(nameLine2)}</text>
|
||||
<text x="12" y="${Math.round(normalizedHeight * 0.49)}" font-size="${customerFont}" font-family="Arial, Helvetica, sans-serif" fill="black">${escapeXml(String(customerName).slice(0, 40))}</text>
|
||||
<text x="12" y="${Math.round(normalizedHeight * 0.58)}" font-size="${serialFont}" font-family="Arial, Helvetica, sans-serif" fill="black">${escapeXml(String(serial).slice(0, 42))}</text>
|
||||
<rect x="0" y="0" width="${textMaxWidth}" height="${normalizedHeight}" fill="none"/>
|
||||
</svg>`.trim()
|
||||
|
||||
// Alles zusammen compositen
|
||||
const final = await Sharp(base)
|
||||
const final = await Sharp({
|
||||
create: {
|
||||
width: normalizedWidth,
|
||||
height: normalizedHeight,
|
||||
channels: 3,
|
||||
background: { r: 255, g: 255, b: 255 },
|
||||
},
|
||||
})
|
||||
.composite([
|
||||
{ input: dataMatrixPng, top: 60, left: 20 },
|
||||
{ input: Buffer.from(textSvg), top: 0, left: 0 },
|
||||
{ input: dataMatrixPng, top: dmTop, left: dmLeft },
|
||||
])
|
||||
.png()
|
||||
.toBuffer()
|
||||
|
||||
fs.writeFileSync('label.png', final)
|
||||
|
||||
// Optional: Base64 zurückgeben (z.B. für API)
|
||||
const base64 = final.toString('base64')
|
||||
|
||||
return base64
|
||||
}*/
|
||||
return final.toString("base64")
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ import { s3 } from "./s3";
|
||||
import { secrets } from "./secrets";
|
||||
|
||||
// Drizzle schema
|
||||
import { vendors, accounts } from "../../db/schema";
|
||||
import { vendors, accounts, tenants } from "../../db/schema";
|
||||
import {eq} from "drizzle-orm";
|
||||
|
||||
let openai: OpenAI | null = null;
|
||||
@@ -86,12 +86,13 @@ const InstructionFormat = z.object({
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------
|
||||
// MAIN FUNCTION – REPLACES SUPABASE VERSION
|
||||
// MAIN FUNCTION
|
||||
// ---------------------------------------------------------
|
||||
export const getInvoiceDataFromGPT = async function (
|
||||
server: FastifyInstance,
|
||||
file: any,
|
||||
tenantId: number
|
||||
tenantId: number,
|
||||
learningContext?: string
|
||||
) {
|
||||
await initOpenAi();
|
||||
|
||||
@@ -162,13 +163,22 @@ export const getInvoiceDataFromGPT = async function (
|
||||
.from(vendors)
|
||||
.where(eq(vendors.tenant,tenantId));
|
||||
|
||||
const [tenant] = await server.db
|
||||
.select({ accountChart: tenants.accountChart })
|
||||
.from(tenants)
|
||||
.where(eq(tenants.id, tenantId))
|
||||
.limit(1)
|
||||
|
||||
const activeAccountChart = tenant?.accountChart || "skr03"
|
||||
|
||||
const accountList = await server.db
|
||||
.select({
|
||||
id: accounts.id,
|
||||
label: accounts.label,
|
||||
number: accounts.number,
|
||||
})
|
||||
.from(accounts);
|
||||
.from(accounts)
|
||||
.where(eq(accounts.accountChart, activeAccountChart));
|
||||
|
||||
// ---------------------------------------------------------
|
||||
// 4) GPT ANALYSIS
|
||||
@@ -188,8 +198,13 @@ export const getInvoiceDataFromGPT = async function (
|
||||
"You extract structured invoice data.\n\n" +
|
||||
`VENDORS: ${JSON.stringify(vendorList)}\n` +
|
||||
`ACCOUNTS: ${JSON.stringify(accountList)}\n\n` +
|
||||
(learningContext
|
||||
? `HISTORICAL_PATTERNS: ${learningContext}\n\n`
|
||||
: "") +
|
||||
"Match issuer by name to vendor.id.\n" +
|
||||
"Match invoice items to account id based on label/number.\n" +
|
||||
"Use historical patterns as soft hints for vendor/account/payment mapping.\n" +
|
||||
"Do not invent values when the invoice text contradicts the hints.\n" +
|
||||
"Convert dates to YYYY-MM-DD.\n" +
|
||||
"Keep invoice items in original order.\n",
|
||||
},
|
||||
|
||||
@@ -1,4 +1,42 @@
|
||||
import { FastifyInstance } from "fastify"
|
||||
import { historyitems } from "../../db/schema";
|
||||
|
||||
const HISTORY_ENTITY_LABELS: Record<string, string> = {
|
||||
customers: "Kunden",
|
||||
members: "Mitglieder",
|
||||
vendors: "Lieferanten",
|
||||
projects: "Projekte",
|
||||
plants: "Objekte",
|
||||
contacts: "Kontakte",
|
||||
inventoryitems: "Inventarartikel",
|
||||
customerinventoryitems: "Kundeninventar",
|
||||
products: "Artikel",
|
||||
profiles: "Mitarbeiter",
|
||||
absencerequests: "Abwesenheiten",
|
||||
events: "Termine",
|
||||
tasks: "Aufgaben",
|
||||
vehicles: "Fahrzeuge",
|
||||
costcentres: "Kostenstellen",
|
||||
ownaccounts: "zusätzliche Buchungskonten",
|
||||
documentboxes: "Dokumentenboxen",
|
||||
hourrates: "Stundensätze",
|
||||
services: "Leistungen",
|
||||
roles: "Rollen",
|
||||
checks: "Überprüfungen",
|
||||
spaces: "Lagerplätze",
|
||||
customerspaces: "Kundenlagerplätze",
|
||||
trackingtrips: "Fahrten",
|
||||
createddocuments: "Dokumente",
|
||||
inventoryitemgroups: "Inventarartikelgruppen",
|
||||
bankstatements: "Buchungen",
|
||||
incominginvoices: "Eingangsrechnungen",
|
||||
files: "Dateien",
|
||||
memberrelations: "Mitgliedsverhältnisse",
|
||||
}
|
||||
|
||||
export function getHistoryEntityLabel(entity: string) {
|
||||
return HISTORY_ENTITY_LABELS[entity] || entity
|
||||
}
|
||||
|
||||
export async function insertHistoryItem(
|
||||
server: FastifyInstance,
|
||||
@@ -13,15 +51,18 @@ export async function insertHistoryItem(
|
||||
text?: string
|
||||
}
|
||||
) {
|
||||
const entityLabel = getHistoryEntityLabel(params.entity)
|
||||
const textMap = {
|
||||
created: `Neuer Eintrag in ${params.entity} erstellt`,
|
||||
updated: `Eintrag in ${params.entity} geändert`,
|
||||
archived: `Eintrag in ${params.entity} archiviert`,
|
||||
deleted: `Eintrag in ${params.entity} gelöscht`
|
||||
created: `Neuer Eintrag in ${entityLabel} erstellt`,
|
||||
updated: `Eintrag in ${entityLabel} geändert`,
|
||||
unchanged: `Eintrag in ${entityLabel} unverändert`,
|
||||
archived: `Eintrag in ${entityLabel} archiviert`,
|
||||
deleted: `Eintrag in ${entityLabel} gelöscht`
|
||||
}
|
||||
|
||||
const columnMap: Record<string, string> = {
|
||||
customers: "customer",
|
||||
members: "customer",
|
||||
vendors: "vendor",
|
||||
projects: "project",
|
||||
plants: "plant",
|
||||
@@ -41,10 +82,15 @@ export async function insertHistoryItem(
|
||||
roles: "role",
|
||||
checks: "check",
|
||||
spaces: "space",
|
||||
customerspaces: "customerspace",
|
||||
customerinventoryitems: "customerinventoryitem",
|
||||
trackingtrips: "trackingtrip",
|
||||
createddocuments: "createddocument",
|
||||
inventoryitemgroups: "inventoryitemgroup",
|
||||
bankstatements: "bankstatement"
|
||||
bankstatements: "bankstatement",
|
||||
incominginvoices: "incomingInvoice",
|
||||
files: "file",
|
||||
memberrelations: "memberrelation",
|
||||
}
|
||||
|
||||
const fkColumn = columnMap[params.entity]
|
||||
@@ -53,18 +99,20 @@ export async function insertHistoryItem(
|
||||
return
|
||||
}
|
||||
|
||||
const stringifyHistoryValue = (value: any) => {
|
||||
if (value === undefined || value === null) return null
|
||||
return typeof value === "string" ? value : JSON.stringify(value)
|
||||
}
|
||||
|
||||
const entry = {
|
||||
tenant: params.tenant_id,
|
||||
created_by: params.created_by,
|
||||
createdBy: params.created_by,
|
||||
text: params.text || textMap[params.action],
|
||||
action: params.action,
|
||||
[fkColumn]: params.entityId,
|
||||
oldVal: params.oldVal ? JSON.stringify(params.oldVal) : null,
|
||||
newVal: params.newVal ? JSON.stringify(params.newVal) : null
|
||||
oldVal: stringifyHistoryValue(params.oldVal),
|
||||
newVal: stringifyHistoryValue(params.newVal)
|
||||
}
|
||||
|
||||
const { error } = await server.supabase.from("historyitems").insert([entry])
|
||||
if (error) { // @ts-ignore
|
||||
console.log(error)
|
||||
}
|
||||
await server.db.insert(historyitems).values(entry as any)
|
||||
}
|
||||
|
||||
@@ -2,6 +2,9 @@ import {PDFDocument, StandardFonts, rgb} from "pdf-lib"
|
||||
import dayjs from "dayjs"
|
||||
import {renderAsCurrency, splitStringBySpace} from "./stringRendering";
|
||||
import {FastifyInstance} from "fastify";
|
||||
import { GetObjectCommand } from "@aws-sdk/client-s3";
|
||||
import { s3 } from "./s3";
|
||||
import { secrets } from "./secrets";
|
||||
|
||||
const getCoordinatesForPDFLib = (x:number ,y:number, page:any) => {
|
||||
/*
|
||||
@@ -25,9 +28,21 @@ const getCoordinatesForPDFLib = (x:number ,y:number, page:any) => {
|
||||
|
||||
const getBackgroundSourceBuffer = async (server:FastifyInstance, path:string) => {
|
||||
|
||||
const {data:backgroundPDFData,error:backgroundPDFError} = await server.supabase.storage.from("files").download(path)
|
||||
console.log(path)
|
||||
|
||||
return backgroundPDFData.arrayBuffer()
|
||||
const { Body } = await s3.send(
|
||||
new GetObjectCommand({
|
||||
Bucket: secrets.S3_BUCKET,
|
||||
Key: path
|
||||
})
|
||||
)
|
||||
|
||||
const chunks: Buffer[] = []
|
||||
for await (const chunk of Body as any) {
|
||||
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk))
|
||||
}
|
||||
|
||||
return Buffer.concat(chunks)
|
||||
}
|
||||
|
||||
const getDuration = (time) => {
|
||||
|
||||
@@ -3,10 +3,14 @@ import {
|
||||
bankaccounts,
|
||||
bankrequisitions,
|
||||
bankstatements,
|
||||
entitybankaccounts,
|
||||
contacts,
|
||||
contracts,
|
||||
contracttypes,
|
||||
costcentres,
|
||||
createddocuments,
|
||||
customerinventoryitems,
|
||||
customerspaces,
|
||||
customers,
|
||||
files,
|
||||
filetags,
|
||||
@@ -16,6 +20,7 @@ import {
|
||||
inventoryitemgroups,
|
||||
inventoryitems,
|
||||
letterheads,
|
||||
memberrelations,
|
||||
ownaccounts,
|
||||
plants,
|
||||
productcategories,
|
||||
@@ -36,17 +41,28 @@ import {
|
||||
|
||||
export const resourceConfig = {
|
||||
projects: {
|
||||
searchColumns: ["name"],
|
||||
searchColumns: ["name","customerRef","projectNumber","notes"],
|
||||
mtoLoad: ["customer","plant","contract","projecttype"],
|
||||
mtmLoad: ["tasks", "files","createddocuments"],
|
||||
table: projects,
|
||||
numberRangeHolder: "projectNumber"
|
||||
},
|
||||
customers: {
|
||||
searchColumns: ["name", "customerNumber", "firstname", "lastname", "notes"],
|
||||
searchColumns: ["name", "nameAddition", "customerNumber", "firstname", "lastname", "notes"],
|
||||
mtmLoad: ["contacts","projects","plants","createddocuments","contracts","customerinventoryitems","customerspaces"],
|
||||
table: customers,
|
||||
numberRangeHolder: "customerNumber",
|
||||
},
|
||||
members: {
|
||||
searchColumns: ["name", "nameAddition", "customerNumber", "firstname", "lastname", "notes"],
|
||||
mtmLoad: ["contacts","projects","plants","createddocuments","contracts"],
|
||||
table: customers,
|
||||
numberRangeHolder: "customerNumber",
|
||||
relationKey: "customer",
|
||||
},
|
||||
memberrelations: {
|
||||
table: memberrelations,
|
||||
searchColumns: ["type", "billingInterval"],
|
||||
},
|
||||
contacts: {
|
||||
searchColumns: ["firstName", "lastName", "email", "phone", "notes"],
|
||||
@@ -55,11 +71,17 @@ export const resourceConfig = {
|
||||
},
|
||||
contracts: {
|
||||
table: contracts,
|
||||
searchColumns: ["name", "notes", "contractNumber", "paymentType", "sepaRef", "bankingName"],
|
||||
searchColumns: ["name", "notes", "contractNumber", "paymentType", "billingInterval", "sepaRef", "bankingName"],
|
||||
numberRangeHolder: "contractNumber",
|
||||
mtoLoad: ["customer", "contracttype"],
|
||||
},
|
||||
contracttypes: {
|
||||
table: contracttypes,
|
||||
searchColumns: ["name", "description", "paymentType", "billingInterval"],
|
||||
},
|
||||
plants: {
|
||||
table: plants,
|
||||
searchColumns: ["name"],
|
||||
mtoLoad: ["customer"],
|
||||
mtmLoad: ["projects","tasks","files"],
|
||||
},
|
||||
@@ -84,6 +106,12 @@ export const resourceConfig = {
|
||||
table: inventoryitems,
|
||||
numberRangeHolder: "articleNumber",
|
||||
},
|
||||
customerinventoryitems: {
|
||||
table: customerinventoryitems,
|
||||
numberRangeHolder: "customerInventoryId",
|
||||
mtoLoad: ["customer", "customerspace", "product", "vendor"],
|
||||
searchColumns: ["name", "customerInventoryId", "serialNumber", "description", "manufacturer", "manufacturerNumber"],
|
||||
},
|
||||
inventoryitemgroups: {
|
||||
table: inventoryitemgroups
|
||||
},
|
||||
@@ -118,6 +146,13 @@ export const resourceConfig = {
|
||||
searchColumns: ["name","space_number","type","info_data"],
|
||||
numberRangeHolder: "spaceNumber",
|
||||
},
|
||||
customerspaces: {
|
||||
table: customerspaces,
|
||||
searchColumns: ["name","space_number","type","info_data","description"],
|
||||
numberRangeHolder: "space_number",
|
||||
mtoLoad: ["customer"],
|
||||
mtmLoad: ["customerinventoryitems"],
|
||||
},
|
||||
ownaccounts: {
|
||||
table: ownaccounts,
|
||||
searchColumns: ["name","description","number"],
|
||||
@@ -168,6 +203,10 @@ export const resourceConfig = {
|
||||
bankrequisitions: {
|
||||
table: bankrequisitions,
|
||||
},
|
||||
entitybankaccounts: {
|
||||
table: entitybankaccounts,
|
||||
searchColumns: ["description"],
|
||||
},
|
||||
serialexecutions: {
|
||||
table: serialExecutions
|
||||
}
|
||||
|
||||
@@ -14,8 +14,6 @@ export let secrets = {
|
||||
PORT: number
|
||||
HOST: string
|
||||
DATABASE_URL: string
|
||||
SUPABASE_URL: string
|
||||
SUPABASE_SERVICE_ROLE_KEY: string
|
||||
S3_BUCKET: string
|
||||
ENCRYPTION_KEY: string
|
||||
MAILER_SMTP_HOST: string
|
||||
|
||||
74
backend/src/webdav/fill-file-sizes.ts
Normal file
74
backend/src/webdav/fill-file-sizes.ts
Normal file
@@ -0,0 +1,74 @@
|
||||
// scripts/fill-file-sizes.ts
|
||||
import 'dotenv/config';
|
||||
import { db } from '../../db';
|
||||
import { files } from '../../db/schema';
|
||||
import { eq, isNull } from 'drizzle-orm';
|
||||
import { HeadObjectCommand } from "@aws-sdk/client-s3";
|
||||
import { s3, initS3 } from '../utils/s3';
|
||||
import { loadSecrets, secrets } from '../utils/secrets';
|
||||
|
||||
async function migrate() {
|
||||
console.log("🚀 Starte Migration der Dateigrößen...");
|
||||
|
||||
// 1. Setup
|
||||
await loadSecrets();
|
||||
await initS3();
|
||||
|
||||
// 2. Alle Dateien holen, die noch keine Größe haben (oder alle, um sicherzugehen)
|
||||
// Wir nehmen erstmal ALLE, um sicherzustellen, dass alles stimmt.
|
||||
const allFiles = await db.select().from(files);
|
||||
|
||||
console.log(`📦 ${allFiles.length} Dateien in der Datenbank gefunden.`);
|
||||
|
||||
let successCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
// 3. Loop durch alle Dateien
|
||||
for (const file of allFiles) {
|
||||
if (!file.path) {
|
||||
console.log(`⏭️ Überspringe Datei ${file.id} (Kein Pfad)`);
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
// S3 fragen (HeadObject lädt nur Metadaten, nicht die ganze Datei -> Schnell)
|
||||
const command = new HeadObjectCommand({
|
||||
Bucket: secrets.S3_BUCKET, // Oder secrets.S3_BUCKET_NAME je nach deiner Config
|
||||
Key: file.path
|
||||
});
|
||||
|
||||
const response = await s3.send(command);
|
||||
const size = response.ContentLength || 0;
|
||||
|
||||
// In DB speichern
|
||||
await db.update(files)
|
||||
.set({ size: size })
|
||||
.where(eq(files.id, file.id));
|
||||
|
||||
process.stdout.write("."); // Fortschrittsanzeige
|
||||
successCount++;
|
||||
|
||||
} catch (error: any) {
|
||||
process.stdout.write("X");
|
||||
// console.error(`\n❌ Fehler bei ${file.path}: ${error.name}`);
|
||||
|
||||
// Optional: Wenn Datei in S3 fehlt, könnten wir sie markieren oder loggen
|
||||
if (error.name === 'NotFound') {
|
||||
// console.error(` -> Datei existiert nicht im Bucket!`);
|
||||
}
|
||||
errorCount++;
|
||||
}
|
||||
}
|
||||
|
||||
console.log("\n\n------------------------------------------------");
|
||||
console.log(`✅ Fertig!`);
|
||||
console.log(`Updated: ${successCount}`);
|
||||
console.log(`Fehler: ${errorCount} (Meistens Dateien, die im Bucket fehlen)`);
|
||||
console.log("------------------------------------------------");
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
migrate().catch(err => {
|
||||
console.error("Fataler Fehler:", err);
|
||||
process.exit(1);
|
||||
});
|
||||
200
backend/src/webdav/server.ts
Normal file
200
backend/src/webdav/server.ts
Normal file
@@ -0,0 +1,200 @@
|
||||
import 'dotenv/config';
|
||||
import { v2 as webdav } from 'webdav-server';
|
||||
import { db } from '../../db';
|
||||
import { tenants, files, folders } from '../../db/schema';
|
||||
import { Readable } from 'stream';
|
||||
import { GetObjectCommand, HeadObjectCommand } from "@aws-sdk/client-s3";
|
||||
import { s3, initS3 } from '../utils/s3';
|
||||
import { secrets, loadSecrets } from '../utils/secrets';
|
||||
|
||||
// ============================================================================
|
||||
// 1. SETUP
|
||||
// ============================================================================
|
||||
|
||||
const userManager = new webdav.SimpleUserManager();
|
||||
const user = userManager.addUser('admin', 'admin', true);
|
||||
|
||||
const privilegeManager = new webdav.SimplePathPrivilegeManager();
|
||||
privilegeManager.setRights(user, '/', [ 'all' ]);
|
||||
|
||||
const server = new webdav.WebDAVServer({
|
||||
httpAuthentication: new webdav.HTTPDigestAuthentication(userManager, 'Default realm'),
|
||||
privilegeManager: privilegeManager,
|
||||
port: 3200,
|
||||
headers: {
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'Access-Control-Allow-Methods': 'GET,POST,PUT,DELETE,OPTIONS,PROPFIND,PROPPATCH,MKCOL,COPY,MOVE,LOCK,UNLOCK',
|
||||
'Access-Control-Allow-Headers': 'Authorization, Content-Type, Depth, User-Agent, X-Expected-Entity-Length, If-Modified-Since, Cache-Control, Range, Overwrite, Destination',
|
||||
}
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// 2. CACHE
|
||||
// ============================================================================
|
||||
|
||||
const pathToS3KeyMap = new Map<string, string>();
|
||||
const pathToSizeMap = new Map<string, number>();
|
||||
|
||||
// ============================================================================
|
||||
// 3. LOGIC
|
||||
// ============================================================================
|
||||
|
||||
async function startServer() {
|
||||
console.log('------------------------------------------------');
|
||||
console.log('[WebDAV] Starte Server (Filtered Mode)...');
|
||||
|
||||
try {
|
||||
await loadSecrets();
|
||||
await initS3();
|
||||
console.log('[WebDAV] S3 Verbindung OK.');
|
||||
|
||||
console.log('[WebDAV] Lade Datenbank...');
|
||||
const allTenants = await db.select().from(tenants);
|
||||
const allFolders = await db.select().from(folders);
|
||||
const allFiles = await db.select().from(files);
|
||||
|
||||
// Zähler für Statistik
|
||||
let hiddenFilesCount = 0;
|
||||
|
||||
// --------------------------------------------------------------------
|
||||
// BUILDER
|
||||
// --------------------------------------------------------------------
|
||||
const buildFolderContent = (tenantId: string, parentFolderId: string | null, currentWebDavPath: string) => {
|
||||
const currentDir: any = {};
|
||||
|
||||
// 1. UNTERORDNER
|
||||
const subFolders = allFolders.filter(f => f.tenant === tenantId && f.parent === parentFolderId);
|
||||
subFolders.forEach(folder => {
|
||||
const folderName = folder.name.replace(/\//g, '-');
|
||||
const nextPath = `${currentWebDavPath}/${folderName}`;
|
||||
currentDir[folderName] = buildFolderContent(tenantId, folder.id, nextPath);
|
||||
});
|
||||
|
||||
// 2. DATEIEN
|
||||
//@ts-ignore
|
||||
const dirFiles = allFiles.filter(f => f.tenant === tenantId && f.folder === parentFolderId);
|
||||
|
||||
dirFiles.forEach(file => {
|
||||
// ============================================================
|
||||
// ❌ FILTER: DATEIEN OHNE GRÖSSE AUSBLENDEN
|
||||
// ============================================================
|
||||
const fileSize = Number(file.size || 0);
|
||||
|
||||
if (fileSize <= 0) {
|
||||
// Datei überspringen, wenn 0 Bytes oder null
|
||||
hiddenFilesCount++;
|
||||
return;
|
||||
}
|
||||
// ============================================================
|
||||
|
||||
// Name bestimmen
|
||||
let fileName = 'Unbenannt';
|
||||
if (file.path) fileName = file.path.split('/').pop() || 'Unbenannt';
|
||||
else if (file.name) fileName = file.name;
|
||||
|
||||
// A) Eintrag im WebDAV
|
||||
currentDir[fileName] = `Ref: ${file.id}`;
|
||||
|
||||
// B) Maps füllen
|
||||
const webDavFullPath = `${currentWebDavPath}/${fileName}`;
|
||||
|
||||
if (file.path) {
|
||||
pathToS3KeyMap.set(webDavFullPath, file.path);
|
||||
}
|
||||
|
||||
// C) Größe setzen (wir wissen jetzt sicher, dass sie > 0 ist)
|
||||
pathToSizeMap.set(webDavFullPath, fileSize);
|
||||
});
|
||||
|
||||
return currentDir;
|
||||
};
|
||||
|
||||
// --------------------------------------------------------------------
|
||||
// BAUM ZUSAMMENSETZEN
|
||||
// --------------------------------------------------------------------
|
||||
const dbTree: any = {};
|
||||
|
||||
allTenants.forEach(tenant => {
|
||||
const tName = tenant.name.replace(/\//g, '-');
|
||||
const rootPath = `/${tName}`;
|
||||
//@ts-ignore
|
||||
const content = buildFolderContent(tenant.id, null, rootPath);
|
||||
|
||||
// Leere Ordner Hinweis (optional)
|
||||
if (Object.keys(content).length === 0) {
|
||||
content['(Leer).txt'] = 'Keine gültigen Dateien vorhanden.';
|
||||
}
|
||||
dbTree[tName] = content;
|
||||
});
|
||||
|
||||
if (Object.keys(dbTree).length === 0) {
|
||||
dbTree['Status.txt'] = 'Datenbank leer.';
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------
|
||||
// REGISTRIEREN
|
||||
// --------------------------------------------------------------------
|
||||
const rootFS = server.rootFileSystem();
|
||||
//@ts-ignore
|
||||
rootFS.addSubTree(server.createExternalContext(), dbTree);
|
||||
|
||||
// ====================================================================
|
||||
// OVERRIDE 1: DOWNLOAD
|
||||
// ====================================================================
|
||||
(rootFS as any)._openReadStream = async (path: webdav.Path, ctx: any, callback: any) => {
|
||||
const p = path.toString();
|
||||
const s3Key = pathToS3KeyMap.get(p);
|
||||
|
||||
if (s3Key) {
|
||||
try {
|
||||
const command = new GetObjectCommand({ Bucket: secrets.S3_BUCKET, Key: s3Key });
|
||||
const response = await s3.send(command);
|
||||
if (response.Body) return callback(null, response.Body as Readable);
|
||||
} catch (e: any) {
|
||||
console.error(`[S3 ERROR] ${e.message}`);
|
||||
return callback(null, Readable.from([`Error: ${e.message}`]));
|
||||
}
|
||||
}
|
||||
return callback(null, Readable.from(['System File']));
|
||||
};
|
||||
|
||||
// ====================================================================
|
||||
// OVERRIDE 2: SIZE
|
||||
// ====================================================================
|
||||
(rootFS as any)._size = async (path: webdav.Path, ctx: any, callback: any) => {
|
||||
const p = path.toString();
|
||||
const cachedSize = pathToSizeMap.get(p);
|
||||
|
||||
if (cachedSize !== undefined) return callback(null, cachedSize);
|
||||
|
||||
// Fallback S3 Check (sollte durch Filter kaum noch vorkommen)
|
||||
const s3Key = pathToS3KeyMap.get(p);
|
||||
if (s3Key) {
|
||||
try {
|
||||
const command = new HeadObjectCommand({ Bucket: secrets.S3_BUCKET, Key: s3Key });
|
||||
const response = await s3.send(command);
|
||||
const realSize = response.ContentLength || 0;
|
||||
pathToSizeMap.set(p, realSize);
|
||||
return callback(null, realSize);
|
||||
} catch (e) {
|
||||
return callback(null, 0);
|
||||
}
|
||||
}
|
||||
return callback(null, 0);
|
||||
};
|
||||
|
||||
// --------------------------------------------------------------------
|
||||
// START
|
||||
// --------------------------------------------------------------------
|
||||
server.start(() => {
|
||||
console.log('[WebDAV] 🚀 READY auf http://localhost:3200');
|
||||
console.log(`[WebDAV] Sichtbare Dateien: ${pathToS3KeyMap.size}`);
|
||||
console.log(`[WebDAV] Ausgeblendet (0 Bytes): ${hiddenFilesCount}`);
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('[WebDAV] 💥 ERROR:', error);
|
||||
}
|
||||
}
|
||||
|
||||
startServer();
|
||||
@@ -1,37 +1,70 @@
|
||||
version: "3"
|
||||
|
||||
services:
|
||||
web:
|
||||
image: reg.federspiel.software/fedeo/software:beta
|
||||
frontend:
|
||||
image: git.federspiel.tech/flfeders/fedeo/frontend:dev
|
||||
restart: always
|
||||
environment:
|
||||
- INFISICAL_CLIENT_ID=abc
|
||||
- INFISICAL_CLIENT_SECRET=abc
|
||||
- NUXT_PUBLIC_API_BASE=https://app.fedeo.de/backend
|
||||
- NUXT_PUBLIC_PDF_LICENSE=eyJkYXRhIjoiZXlKMElqb2laR1YyWld4dmNHVnlJaXdpWVhaMUlqb3hOemt3TmpNNU9UazVMQ0prYlNJNkltRndjQzVtWldSbGJ5NWtaU0lzSW00aU9pSXpOemt3Wm1Vek5UazBZbVU0TlRRNElpd2laWGh3SWpveE56a3dOak01T1RrNUxDSmtiWFFpT2lKemNHVmphV1pwWXlJc0luQWlPaUoyYVdWM1pYSWlmUT09Iiwic2lnbmF0dXJlIjoicWU4K0ZxQUJDNUp5bEJUU094Vkd5RTJMbk9UNmpyc2EyRStsN2tNNWhkM21KK2ZvVjYwaTFKeFdhZGtqSDRNWXZxQklMc0dpdWh5d2pMbUFjRHZuWGxOcTRMcXFLRm53dzVtaG1LK3lTeDRXbzVaS1loK1VZdFBzWUZjV3oyUHVGMmJraGJrVjJ6RzRlTGtRU09wdmJKY3JUZU1rN0N1VkN6Q1UraHF5T0ZVVXllWnRmaHlmcWswZEFFL0RMR1hvTDFSQXFjNkNkYU9FTDRTdC9Idy9DQnFieTE2aisvT3RxQUlLcy9NWTR6SVk3RTI3bWo4RUx5VjhXNkdXNXhqc0VUVzNKN0RRMUVlb3RhVlNLT29kc3pVRlhUYzVlbHVuSm04ZlcwM1ErMUhtSnpmWGoyS1dwM1dnamJDazZYSHozamFML2lOdUYvZFZNaWYvc2FoR3NnPT0ifQ==
|
||||
networks:
|
||||
- traefik
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.docker.network=traefik"
|
||||
- "traefik.port=3000"
|
||||
# Middlewares
|
||||
- "traefik.http.middlewares.fedeo-frontend-redirect-web-secure.redirectscheme.scheme=https"
|
||||
# Web Entrypoint
|
||||
- "traefik.http.routers.fedeo-frontend.middlewares=fedeo-frontend-redirect-web-secure"
|
||||
- "traefik.http.routers.fedeo-frontend.rule=Host(`app.fedeo.de`) && PathPrefix(`/`)"
|
||||
- "traefik.http.routers.fedeo-frontend.entrypoints=web"
|
||||
# Web Secure Entrypoint
|
||||
- "traefik.http.routers.fedeo-frontend-secure.rule=Host(`app.fedeo.de`) && PathPrefix(`/`)"
|
||||
- "traefik.http.routers.fedeo-frontend-secure.entrypoints=web-secured" #
|
||||
- "traefik.http.routers.fedeo-frontend-secure.tls.certresolver=mytlschallenge"
|
||||
backend:
|
||||
image: reg.federspiel.software/fedeo/backend:main
|
||||
image: git.federspiel.tech/flfeders/fedeo/backend:dev
|
||||
restart: always
|
||||
environment:
|
||||
- NUXT_PUBLIC_API_BASE=
|
||||
- NUXT_PUBLIC_PDF_LICENSE=
|
||||
db:
|
||||
image: postgres
|
||||
restart: always
|
||||
shm_size: 128mb
|
||||
environment:
|
||||
POSTGRES_PASSWORD: abc
|
||||
POSTGRES_USER: sandelcom
|
||||
POSTGRES_DB: sensorfy
|
||||
volumes:
|
||||
- ./pg-data:/var/lib/postgresql/data
|
||||
ports:
|
||||
- "5432:5432"
|
||||
- INFISICAL_CLIENT_ID=a6838bd6-9983-4bf4-9be2-ace830b9abdf
|
||||
- INFISICAL_CLIENT_SECRET=4e3441acc0adbffd324aa50e668a95a556a3f55ec6bb85954e176e35a3392003
|
||||
- NODE_ENV=production
|
||||
networks:
|
||||
- traefik
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.docker.network=traefik"
|
||||
- "traefik.port=3100"
|
||||
# Middlewares
|
||||
- "traefik.http.middlewares.fedeo-backend-redirect-web-secure.redirectscheme.scheme=https"
|
||||
- "traefik.http.middlewares.fedeo-backend-strip.stripprefix.prefixes=/backend"
|
||||
# Web Entrypoint
|
||||
- "traefik.http.routers.fedeo-backend.middlewares=fedeo-backend-redirect-web-secure"
|
||||
- "traefik.http.routers.fedeo-backend.rule=Host(`app.fedeo.de`) && PathPrefix(`/backend`)"
|
||||
- "traefik.http.routers.fedeo-backend.entrypoints=web"
|
||||
# Web Secure Entrypoint
|
||||
- "traefik.http.routers.fedeo-backend-secure.rule=Host(`app.fedeo.de`) && PathPrefix(`/backend`)"
|
||||
- "traefik.http.routers.fedeo-backend-secure.entrypoints=web-secured" #
|
||||
- "traefik.http.routers.fedeo-backend-secure.tls.certresolver=mytlschallenge"
|
||||
- "traefik.http.routers.fedeo-backend-secure.middlewares=fedeo-backend-strip"
|
||||
# db:
|
||||
# image: postgres
|
||||
# restart: always
|
||||
# shm_size: 128mb
|
||||
# environment:
|
||||
# POSTGRES_PASSWORD: abc
|
||||
# POSTGRES_USER: sandelcom
|
||||
# POSTGRES_DB: sensorfy
|
||||
# volumes:
|
||||
# - ./pg-data:/var/lib/postgresql/data
|
||||
# ports:
|
||||
# - "5432:5432"
|
||||
traefik:
|
||||
image: traefik:v2.2
|
||||
image: traefik:v2.11
|
||||
restart: unless-stopped
|
||||
container_name: traefik
|
||||
command:
|
||||
- "--api.insecure=false"
|
||||
- "--api.dashboard=true"
|
||||
- "--api.dashboard=false"
|
||||
- "--api.debug=false"
|
||||
- "--providers.docker=true"
|
||||
- "--providers.docker.exposedbydefault=false"
|
||||
@@ -43,19 +76,18 @@ services:
|
||||
- "--accesslog.bufferingsize=5000"
|
||||
- "--accesslog.fields.defaultMode=keep"
|
||||
- "--accesslog.fields.headers.defaultMode=keep"
|
||||
- "--certificatesresolvers.mytlschallenge.acme.tlschallenge=true" # <== Enable TLS-ALPN-01 to generate and renew ACME certs
|
||||
- "--certificatesresolvers.mytlschallenge.acme.email=info@sandelcom.de" # <== Setting email for certs
|
||||
- "--certificatesresolvers.mytlschallenge.acme.storage=/letsencrypt/acme.json" # <== Defining acme file to store cert information
|
||||
- "--certificatesresolvers.mytlschallenge.acme.tlschallenge=true" #
|
||||
- "--certificatesresolvers.mytlschallenge.acme.email=moin@fedeo.de"
|
||||
- "--certificatesresolvers.mytlschallenge.acme.storage=/letsencrypt/acme.json"
|
||||
ports:
|
||||
- 80:80
|
||||
- 8080:8080
|
||||
- 443:443
|
||||
volumes:
|
||||
- "./traefik/letsencrypt:/letsencrypt" # <== Volume for certs (TLS)
|
||||
- "/var/run/docker.sock:/var/run/docker.sock:ro"
|
||||
- "./traefik/logs:/logs"
|
||||
labels:
|
||||
#### Labels define the behavior and rules of the traefik proxy for this container ####
|
||||
- "traefik.enable=true" # <== Enable traefik on itself to view dashboard and assign subdomain to view it
|
||||
- "traefik.http.routers.api.rule=Host(`srv1.drinkingteam.de`)" # <== Setting the domain for the dashboard
|
||||
- "traefik.http.routers.api.service=api@internal" # <== Enabling the api to be a service to access
|
||||
networks:
|
||||
- traefik
|
||||
networks:
|
||||
traefik:
|
||||
external: false
|
||||
@@ -1,14 +1,30 @@
|
||||
FROM node:20-alpine
|
||||
# --- Stage 1: Build ---
|
||||
FROM node:20-alpine AS builder
|
||||
|
||||
RUN mkdir -p /usr/src/nuxt-app
|
||||
WORKDIR /usr/src/nuxt-app
|
||||
COPY . .
|
||||
|
||||
RUN npm i
|
||||
# Nur Files kopieren, die für die Installation nötig sind (besseres Caching)
|
||||
COPY package*.json ./
|
||||
RUN npm install
|
||||
|
||||
# Restlichen Code kopieren und bauen
|
||||
COPY . .
|
||||
RUN npm run build
|
||||
|
||||
# --- Stage 2: Runtime ---
|
||||
FROM node:20-alpine AS runner
|
||||
|
||||
WORKDIR /usr/src/nuxt-app
|
||||
|
||||
# Von der Build-Stage NUR den fertigen .output Ordner kopieren
|
||||
COPY --from=builder /usr/src/nuxt-app/.output ./.output
|
||||
|
||||
# Optional: Falls du statische Dateien aus public brauchst,
|
||||
# sind diese normalerweise bereits in .output/public enthalten.
|
||||
|
||||
ENV NUXT_HOST=0.0.0.0
|
||||
ENV NUXT_PORT=3000
|
||||
ENV NODE_ENV=production
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user