Compare commits

..

8 Commits

Author SHA1 Message Date
e7554fa2cc .gitea/ISSUE_TEMPLATE/feature_request.md aktualisiert
All checks were successful
Build and Push Docker Images / build-backend (push) Successful in 16s
Build and Push Docker Images / build-frontend (push) Successful in 16s
2026-01-22 17:42:28 +00:00
7c1fabf58a .gitea/ISSUE_TEMPLATE/bug_report.md aktualisiert
All checks were successful
Build and Push Docker Images / build-backend (push) Successful in 17s
Build and Push Docker Images / build-frontend (push) Successful in 16s
2026-01-22 17:40:58 +00:00
1203b6cbd1 .gitea/ISSUE_TEMPLATE/bug_report.md aktualisiert
All checks were successful
Build and Push Docker Images / build-backend (push) Successful in 16s
Build and Push Docker Images / build-frontend (push) Successful in 16s
2026-01-15 11:39:03 +00:00
525f2906fb .gitea/ISSUE_TEMPLATE/feature_request.md aktualisiert
Some checks failed
Build and Push Docker Images / build-frontend (push) Has been cancelled
Build and Push Docker Images / build-backend (push) Has been cancelled
2026-01-15 11:38:50 +00:00
b105382abf .gitea/ISSUE_TEMPLATE/bug_report.md aktualisiert
All checks were successful
Build and Push Docker Images / build-backend (push) Successful in 16s
Build and Push Docker Images / build-frontend (push) Successful in 15s
2026-01-15 11:38:00 +00:00
b1cdec7d17 Merge pull request 'Added feature request template' (#62) from dev into main
All checks were successful
Build and Push Docker Images / build-backend (push) Successful in 17s
Build and Push Docker Images / build-frontend (push) Successful in 16s
Reviewed-on: #62
2026-01-15 11:31:57 +00:00
f1d512b2e5 Merge pull request 'dev' (#61) from dev into main
All checks were successful
Build and Push Docker Images / build-backend (push) Successful in 17s
Build and Push Docker Images / build-frontend (push) Successful in 16s
Reviewed-on: #61
2026-01-15 11:29:15 +00:00
db21b43120 Merge pull request 'dev' (#40) from dev into main
All checks were successful
Build and Push Docker Images / build-backend (push) Successful in 18s
Build and Push Docker Images / build-frontend (push) Successful in 16s
Reviewed-on: #40
2026-01-08 22:21:06 +00:00
233 changed files with 4996 additions and 33899 deletions

View File

@@ -2,37 +2,18 @@
name: 🐛 Bug Report
about: Erstelle einen Bericht, um uns zu helfen, das Projekt zu verbessern.
title: '[BUG] '
labels: bug
labels: Problem
assignees: ''
---
**Beschreibung**
Eine klare und prägnante Beschreibung des Fehlers.
**Reproduktion**
Schritte, um den Fehler zu reproduzieren:
Entweder:
1. Gehe zu '...'
2. Klicke auf '...'
3. Scrolle runter zu '...'
4. Siehe Fehler
Oder Link zur Seite
**Erwartetes Verhalten**
Eine klare Beschreibung dessen, was du erwartet hast.
**Screenshots**
Falls zutreffend, füge hier Screenshots oder Gifs hinzu, um das Problem zu verdeutlichen.
**Achtung: Achte bitte auf Datenschutz deiner Daten sowie der Daten deiner Kunden. Sollten ein Screenshot nur mit Daten möglich sein, schwärze diese bitte vor dem Upload.**
**Umgebung:**
- Betriebssystem: [z.B. Windows, macOS, Linux]
- Browser / Version (falls relevant): [z.B. Chrome 120]
- Projekt-Version: [z.B. v1.0.2]
**Zusätzlicher Kontext**
Füge hier alle anderen Informationen zum Problem hinzu.

View File

@@ -2,19 +2,16 @@
name: ✨ Feature Request
about: Schlage eine Idee für dieses Projekt vor.
title: '[FEATURE] '
labels: enhancement
labels: Funktionswunsch
assignees: ''
---
**Ist dein Feature-Wunsch mit einem Problem verbunden?**
Eine klare Beschreibung des Problems (z.B. "Ich bin immer genervt, wenn...").
**Lösungsvorschlag**
Eine klare Beschreibung dessen, was du dir wünschst und wie es funktionieren soll.
**Alternativen**
Hast du über alternative Lösungen oder Workarounds nachgedacht?
**Zusätzlicher Kontext**
Hier ist Platz für weitere Informationen, Skizzen oder Beispiele von anderen Tools.

View File

@@ -1,3 +0,0 @@
{
"rules": []
}

View File

@@ -1,27 +1,13 @@
// src/db/index.ts
import { drizzle } from "drizzle-orm/node-postgres";
import { Pool } from "pg";
import * as schema from "./schema";
import { drizzle } from "drizzle-orm/node-postgres"
import { Pool } from "pg"
import {secrets} from "../src/utils/secrets";
import * as schema from "./schema"
console.log("[DB INIT] 1. Suche Connection String...");
// Checken woher die URL kommt
let connectionString = process.env.DATABASE_URL || secrets.DATABASE_URL;
if (connectionString) {
console.log("[DB INIT] -> Gefunden in process.env.DATABASE_URL");
} else {
console.error("[DB INIT] ❌ KEIN CONNECTION STRING GEFUNDEN! .env nicht geladen?");
}
export const pool = new Pool({
connectionString,
max: 10,
});
connectionString: secrets.DATABASE_URL,
max: 10, // je nach Last
})
// TEST: Ist die DB wirklich da?
pool.query('SELECT NOW()')
.then(res => console.log(`[DB INIT] ✅ VERBINDUNG ERFOLGREICH! Zeit auf DB: ${res.rows[0].now}`))
.catch(err => console.error(`[DB INIT] ❌ VERBINDUNGSFEHLER:`, err.message));
export const db = drizzle(pool, { schema });
export const db = drizzle(pool , {schema})

View File

@@ -1,2 +0,0 @@
-- No-op migration: Datei war im Journal referenziert, aber fehlte im Repository.
SELECT 1;

View File

@@ -1,2 +0,0 @@
-- No-op migration: Datei war im Journal referenziert, aber fehlte im Repository.
SELECT 1;

View File

@@ -1,123 +0,0 @@
CREATE TABLE "m2m_api_keys" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
"updated_at" timestamp with time zone DEFAULT now() NOT NULL,
"tenant_id" bigint NOT NULL,
"user_id" uuid NOT NULL,
"created_by" uuid,
"name" text NOT NULL,
"key_prefix" text NOT NULL,
"key_hash" text NOT NULL,
"active" boolean DEFAULT true NOT NULL,
"last_used_at" timestamp with time zone,
"expires_at" timestamp with time zone,
CONSTRAINT "m2m_api_keys_key_hash_unique" UNIQUE("key_hash")
);
--> statement-breakpoint
CREATE TABLE "staff_time_events" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"tenant_id" bigint NOT NULL,
"user_id" uuid NOT NULL,
"actor_type" text NOT NULL,
"actor_user_id" uuid,
"event_time" timestamp with time zone NOT NULL,
"event_type" text NOT NULL,
"source" text NOT NULL,
"invalidates_event_id" uuid,
"related_event_id" uuid,
"metadata" jsonb,
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
CONSTRAINT "time_events_actor_user_check" CHECK (
(actor_type = 'system' AND actor_user_id IS NULL)
OR
(actor_type = 'user' AND actor_user_id IS NOT NULL)
)
);
--> statement-breakpoint
CREATE TABLE "serialtypes" (
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "serialtypes_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
"name" text NOT NULL,
"intervall" text,
"icon" text,
"tenant" bigint NOT NULL,
"archived" boolean DEFAULT false NOT NULL,
"updated_at" timestamp with time zone,
"updated_by" uuid
);
--> statement-breakpoint
CREATE TABLE "serial_executions" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"tenant" bigint NOT NULL,
"execution_date" timestamp NOT NULL,
"status" text DEFAULT 'draft',
"created_by" text,
"created_at" timestamp DEFAULT now(),
"summary" text
);
--> statement-breakpoint
CREATE TABLE "public_links" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"token" text NOT NULL,
"tenant" integer NOT NULL,
"default_profile" uuid,
"is_protected" boolean DEFAULT false NOT NULL,
"pin_hash" text,
"config" jsonb DEFAULT '{}'::jsonb,
"name" text NOT NULL,
"description" text,
"active" boolean DEFAULT true NOT NULL,
"created_at" timestamp DEFAULT now(),
"updated_at" timestamp DEFAULT now(),
CONSTRAINT "public_links_token_unique" UNIQUE("token")
);
--> statement-breakpoint
CREATE TABLE "wiki_pages" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"tenant_id" bigint NOT NULL,
"parent_id" uuid,
"title" text NOT NULL,
"content" jsonb,
"is_folder" boolean DEFAULT false NOT NULL,
"sort_order" integer DEFAULT 0 NOT NULL,
"entity_type" text,
"entity_id" bigint,
"entity_uuid" uuid,
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
"updated_at" timestamp with time zone,
"created_by" uuid,
"updated_by" uuid
);
--> statement-breakpoint
ALTER TABLE "time_events" DISABLE ROW LEVEL SECURITY;--> statement-breakpoint
DROP TABLE "time_events" CASCADE;--> statement-breakpoint
ALTER TABLE "projects" ALTER COLUMN "active_phase" SET DEFAULT 'Erstkontakt';--> statement-breakpoint
ALTER TABLE "createddocuments" ADD COLUMN "serialexecution" uuid;--> statement-breakpoint
ALTER TABLE "devices" ADD COLUMN "last_seen" timestamp with time zone;--> statement-breakpoint
ALTER TABLE "devices" ADD COLUMN "last_debug_info" jsonb;--> statement-breakpoint
ALTER TABLE "files" ADD COLUMN "size" bigint;--> statement-breakpoint
ALTER TABLE "m2m_api_keys" ADD CONSTRAINT "m2m_api_keys_tenant_id_tenants_id_fk" FOREIGN KEY ("tenant_id") REFERENCES "public"."tenants"("id") ON DELETE cascade ON UPDATE cascade;--> statement-breakpoint
ALTER TABLE "m2m_api_keys" ADD CONSTRAINT "m2m_api_keys_user_id_auth_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."auth_users"("id") ON DELETE cascade ON UPDATE cascade;--> statement-breakpoint
ALTER TABLE "m2m_api_keys" ADD CONSTRAINT "m2m_api_keys_created_by_auth_users_id_fk" FOREIGN KEY ("created_by") REFERENCES "public"."auth_users"("id") ON DELETE set null ON UPDATE cascade;--> statement-breakpoint
ALTER TABLE "staff_time_events" ADD CONSTRAINT "staff_time_events_tenant_id_tenants_id_fk" FOREIGN KEY ("tenant_id") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "staff_time_events" ADD CONSTRAINT "staff_time_events_user_id_auth_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "staff_time_events" ADD CONSTRAINT "staff_time_events_actor_user_id_auth_users_id_fk" FOREIGN KEY ("actor_user_id") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "staff_time_events" ADD CONSTRAINT "staff_time_events_invalidates_event_id_staff_time_events_id_fk" FOREIGN KEY ("invalidates_event_id") REFERENCES "public"."staff_time_events"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "staff_time_events" ADD CONSTRAINT "staff_time_events_related_event_id_staff_time_events_id_fk" FOREIGN KEY ("related_event_id") REFERENCES "public"."staff_time_events"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "serialtypes" ADD CONSTRAINT "serialtypes_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "serialtypes" ADD CONSTRAINT "serialtypes_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "serial_executions" ADD CONSTRAINT "serial_executions_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "public_links" ADD CONSTRAINT "public_links_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "public_links" ADD CONSTRAINT "public_links_default_profile_auth_profiles_id_fk" FOREIGN KEY ("default_profile") REFERENCES "public"."auth_profiles"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "wiki_pages" ADD CONSTRAINT "wiki_pages_tenant_id_tenants_id_fk" FOREIGN KEY ("tenant_id") REFERENCES "public"."tenants"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "wiki_pages" ADD CONSTRAINT "wiki_pages_parent_id_wiki_pages_id_fk" FOREIGN KEY ("parent_id") REFERENCES "public"."wiki_pages"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "wiki_pages" ADD CONSTRAINT "wiki_pages_created_by_auth_users_id_fk" FOREIGN KEY ("created_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "wiki_pages" ADD CONSTRAINT "wiki_pages_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
CREATE INDEX "idx_time_events_tenant_user_time" ON "staff_time_events" USING btree ("tenant_id","user_id","event_time");--> statement-breakpoint
CREATE INDEX "idx_time_events_created_at" ON "staff_time_events" USING btree ("created_at");--> statement-breakpoint
CREATE INDEX "idx_time_events_invalidates" ON "staff_time_events" USING btree ("invalidates_event_id");--> statement-breakpoint
CREATE INDEX "wiki_pages_tenant_idx" ON "wiki_pages" USING btree ("tenant_id");--> statement-breakpoint
CREATE INDEX "wiki_pages_parent_idx" ON "wiki_pages" USING btree ("parent_id");--> statement-breakpoint
CREATE INDEX "wiki_pages_entity_int_idx" ON "wiki_pages" USING btree ("tenant_id","entity_type","entity_id");--> statement-breakpoint
CREATE INDEX "wiki_pages_entity_uuid_idx" ON "wiki_pages" USING btree ("tenant_id","entity_type","entity_uuid");--> statement-breakpoint
ALTER TABLE "createddocuments" ADD CONSTRAINT "createddocuments_serialexecution_serial_executions_id_fk" FOREIGN KEY ("serialexecution") REFERENCES "public"."serial_executions"("id") ON DELETE no action ON UPDATE no action;

View File

@@ -1 +0,0 @@
ALTER TABLE "services" ADD COLUMN "priceUpdateLocked" boolean DEFAULT false NOT NULL;

View File

@@ -1 +0,0 @@
ALTER TABLE "customers" ADD COLUMN "customTaxType" text;

View File

@@ -1,16 +0,0 @@
CREATE TABLE "contracttypes" (
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "contracttypes_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
"tenant" bigint NOT NULL,
"name" text NOT NULL,
"description" text,
"paymentType" text,
"recurring" boolean DEFAULT false NOT NULL,
"archived" boolean DEFAULT false NOT NULL,
"updated_at" timestamp with time zone,
"updated_by" uuid
);
--> statement-breakpoint
ALTER TABLE "contracttypes" ADD CONSTRAINT "contracttypes_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
ALTER TABLE "contracttypes" ADD CONSTRAINT "contracttypes_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;

View File

@@ -1,3 +0,0 @@
ALTER TABLE "contracts" ADD COLUMN "contracttype" bigint;
--> statement-breakpoint
ALTER TABLE "contracts" ADD CONSTRAINT "contracts_contracttype_contracttypes_id_fk" FOREIGN KEY ("contracttype") REFERENCES "public"."contracttypes"("id") ON DELETE no action ON UPDATE no action;

View File

@@ -1,3 +0,0 @@
ALTER TABLE "contracttypes" ADD COLUMN "billingInterval" text;
--> statement-breakpoint
ALTER TABLE "contracts" ADD COLUMN "billingInterval" text;

View File

@@ -1,16 +0,0 @@
CREATE TABLE "entitybankaccounts" (
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "entitybankaccounts_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
"tenant" bigint NOT NULL,
"iban_encrypted" jsonb NOT NULL,
"bic_encrypted" jsonb NOT NULL,
"bank_name_encrypted" jsonb NOT NULL,
"description" text,
"updated_at" timestamp with time zone,
"updated_by" uuid,
"archived" boolean DEFAULT false NOT NULL
);
--> statement-breakpoint
ALTER TABLE "entitybankaccounts" ADD CONSTRAINT "entitybankaccounts_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
ALTER TABLE "entitybankaccounts" ADD CONSTRAINT "entitybankaccounts_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;

View File

@@ -1,73 +0,0 @@
CREATE TABLE "customerspaces" (
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "customerspaces_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
"name" text NOT NULL,
"type" text NOT NULL,
"tenant" bigint NOT NULL,
"customer" bigint NOT NULL,
"spaceNumber" text NOT NULL,
"parentSpace" bigint,
"infoData" jsonb DEFAULT '{"zip":"","city":"","streetNumber":""}'::jsonb NOT NULL,
"description" text,
"archived" boolean DEFAULT false NOT NULL,
"updated_at" timestamp with time zone,
"updated_by" uuid
);
--> statement-breakpoint
CREATE TABLE "customerinventoryitems" (
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "customerinventoryitems_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
"name" text NOT NULL,
"description" text,
"tenant" bigint NOT NULL,
"customer" bigint NOT NULL,
"customerspace" bigint,
"customerInventoryId" text NOT NULL,
"serialNumber" text,
"quantity" bigint DEFAULT 0 NOT NULL,
"manufacturer" text,
"manufacturerNumber" text,
"purchaseDate" date,
"purchasePrice" double precision DEFAULT 0,
"currentValue" double precision,
"product" bigint,
"archived" boolean DEFAULT false NOT NULL,
"updated_at" timestamp with time zone,
"updated_by" uuid
);
--> statement-breakpoint
ALTER TABLE "customerspaces" ADD CONSTRAINT "customerspaces_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
ALTER TABLE "customerspaces" ADD CONSTRAINT "customerspaces_customer_customers_id_fk" FOREIGN KEY ("customer") REFERENCES "public"."customers"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
ALTER TABLE "customerspaces" ADD CONSTRAINT "customerspaces_parentSpace_customerspaces_id_fk" FOREIGN KEY ("parentSpace") REFERENCES "public"."customerspaces"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
ALTER TABLE "customerspaces" ADD CONSTRAINT "customerspaces_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_customer_customers_id_fk" FOREIGN KEY ("customer") REFERENCES "public"."customers"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_customerspace_customerspaces_id_fk" FOREIGN KEY ("customerspace") REFERENCES "public"."customerspaces"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_product_products_id_fk" FOREIGN KEY ("product") REFERENCES "public"."products"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
CREATE UNIQUE INDEX "customerinventoryitems_tenant_customerInventoryId_idx" ON "customerinventoryitems" USING btree ("tenant","customerInventoryId");
--> statement-breakpoint
ALTER TABLE "historyitems" ADD COLUMN "customerspace" bigint;
--> statement-breakpoint
ALTER TABLE "historyitems" ADD COLUMN "customerinventoryitem" bigint;
--> statement-breakpoint
ALTER TABLE "historyitems" ADD CONSTRAINT "historyitems_customerspace_customerspaces_id_fk" FOREIGN KEY ("customerspace") REFERENCES "public"."customerspaces"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
ALTER TABLE "historyitems" ADD CONSTRAINT "historyitems_customerinventoryitem_customerinventoryitems_id_fk" FOREIGN KEY ("customerinventoryitem") REFERENCES "public"."customerinventoryitems"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
ALTER TABLE "tenants" ALTER COLUMN "numberRanges" SET DEFAULT '{"vendors":{"prefix":"","suffix":"","nextNumber":10000},"customers":{"prefix":"","suffix":"","nextNumber":10000},"products":{"prefix":"AT-","suffix":"","nextNumber":1000},"quotes":{"prefix":"AN-","suffix":"","nextNumber":1000},"confirmationOrders":{"prefix":"AB-","suffix":"","nextNumber":1000},"invoices":{"prefix":"RE-","suffix":"","nextNumber":1000},"spaces":{"prefix":"LP-","suffix":"","nextNumber":1000},"customerspaces":{"prefix":"KLP-","suffix":"","nextNumber":1000},"inventoryitems":{"prefix":"IA-","suffix":"","nextNumber":1000},"customerinventoryitems":{"prefix":"KIA-","suffix":"","nextNumber":1000},"projects":{"prefix":"PRJ-","suffix":"","nextNumber":1000},"costcentres":{"prefix":"KST-","suffix":"","nextNumber":1000}}'::jsonb;
--> statement-breakpoint
UPDATE "tenants"
SET "numberRanges" = COALESCE("numberRanges", '{}'::jsonb) || jsonb_build_object(
'customerspaces', COALESCE("numberRanges"->'customerspaces', '{"prefix":"KLP-","suffix":"","nextNumber":1000}'::jsonb),
'customerinventoryitems', COALESCE("numberRanges"->'customerinventoryitems', '{"prefix":"KIA-","suffix":"","nextNumber":1000}'::jsonb)
);

View File

@@ -1,3 +0,0 @@
ALTER TABLE "customerinventoryitems" ADD COLUMN "vendor" bigint;
--> statement-breakpoint
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_vendor_vendors_id_fk" FOREIGN KEY ("vendor") REFERENCES "public"."vendors"("id") ON DELETE no action ON UPDATE no action;

View File

@@ -1,20 +0,0 @@
CREATE TABLE "memberrelations" (
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "memberrelations_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
"tenant" bigint NOT NULL,
"type" text NOT NULL,
"billingInterval" text NOT NULL,
"billingAmount" double precision DEFAULT 0 NOT NULL,
"archived" boolean DEFAULT false NOT NULL,
"updated_at" timestamp with time zone,
"updated_by" uuid
);
--> statement-breakpoint
ALTER TABLE "customers" ADD COLUMN "memberrelation" bigint;
--> statement-breakpoint
ALTER TABLE "memberrelations" ADD CONSTRAINT "memberrelations_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
ALTER TABLE "memberrelations" ADD CONSTRAINT "memberrelations_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
ALTER TABLE "customers" ADD CONSTRAINT "customers_memberrelation_memberrelations_id_fk" FOREIGN KEY ("memberrelation") REFERENCES "public"."memberrelations"("id") ON DELETE no action ON UPDATE no action;

View File

@@ -1,4 +0,0 @@
ALTER TABLE "historyitems" ADD COLUMN "memberrelation" bigint;
--> statement-breakpoint
ALTER TABLE "historyitems" ADD CONSTRAINT "historyitems_memberrelation_memberrelations_id_fk" FOREIGN KEY ("memberrelation") REFERENCES "public"."memberrelations"("id") ON DELETE no action ON UPDATE no action;

View File

@@ -36,83 +36,6 @@
"when": 1765716877146,
"tag": "0004_stormy_onslaught",
"breakpoints": true
},
{
"idx": 5,
"version": "7",
"when": 1771096926109,
"tag": "0005_green_shinobi_shaw",
"breakpoints": true
},
{
"idx": 6,
"version": "7",
"when": 1772000000000,
"tag": "0006_nifty_price_lock",
"breakpoints": true
},
{
"idx": 7,
"version": "7",
"when": 1772000100000,
"tag": "0007_bright_default_tax_type",
"breakpoints": true
},
{
"idx": 8,
"version": "7",
"when": 1773000000000,
"tag": "0008_quick_contracttypes",
"breakpoints": true
},
{
"idx": 9,
"version": "7",
"when": 1773000100000,
"tag": "0009_heavy_contract_contracttype",
"breakpoints": true
},
{
"idx": 10,
"version": "7",
"when": 1773000200000,
"tag": "0010_sudden_billing_interval",
"breakpoints": true
},
{
"idx": 11,
"version": "7",
"when": 1773000300000,
"tag": "0011_mighty_member_bankaccounts",
"breakpoints": true
},
{
"idx": 12,
"version": "7",
"when": 1773000400000,
"tag": "0012_shiny_customer_inventory",
"breakpoints": true
},
{
"idx": 13,
"version": "7",
"when": 1773000500000,
"tag": "0013_brisk_customer_inventory_vendor",
"breakpoints": true
},
{
"idx": 14,
"version": "7",
"when": 1773000600000,
"tag": "0014_smart_memberrelations",
"breakpoints": true
},
{
"idx": 15,
"version": "7",
"when": 1773000700000,
"tag": "0015_wise_memberrelation_history",
"breakpoints": true
}
]
}

View File

@@ -11,7 +11,6 @@ import {
import { tenants } from "./tenants"
import { customers } from "./customers"
import { contacts } from "./contacts"
import { contracttypes } from "./contracttypes"
import { authUsers } from "./auth_users"
export const contracts = pgTable(
@@ -49,9 +48,6 @@ export const contracts = pgTable(
contact: bigint("contact", { mode: "number" }).references(
() => contacts.id
),
contracttype: bigint("contracttype", { mode: "number" }).references(
() => contracttypes.id
),
bankingIban: text("bankingIban"),
bankingBIC: text("bankingBIC"),
@@ -61,7 +57,6 @@ export const contracts = pgTable(
sepaDate: timestamp("sepaDate", { withTimezone: true }),
paymentType: text("paymentType"),
billingInterval: text("billingInterval"),
invoiceDispatch: text("invoiceDispatch"),
ownFields: jsonb("ownFields").notNull().default({}),

View File

@@ -1,40 +0,0 @@
import {
pgTable,
bigint,
timestamp,
text,
boolean,
uuid,
} from "drizzle-orm/pg-core"
import { tenants } from "./tenants"
import { authUsers } from "./auth_users"
export const contracttypes = pgTable("contracttypes", {
id: bigint("id", { mode: "number" })
.primaryKey()
.generatedByDefaultAsIdentity(),
createdAt: timestamp("created_at", { withTimezone: true })
.notNull()
.defaultNow(),
tenant: bigint("tenant", { mode: "number" })
.notNull()
.references(() => tenants.id),
name: text("name").notNull(),
description: text("description"),
paymentType: text("paymentType"),
recurring: boolean("recurring").notNull().default(false),
billingInterval: text("billingInterval"),
archived: boolean("archived").notNull().default(false),
updatedAt: timestamp("updated_at", { withTimezone: true }),
updatedBy: uuid("updated_by").references(() => authUsers.id),
})
export type ContractType = typeof contracttypes.$inferSelect
export type NewContractType = typeof contracttypes.$inferInsert

View File

@@ -1,66 +0,0 @@
import {
pgTable,
bigint,
timestamp,
text,
boolean,
doublePrecision,
uuid,
date,
} from "drizzle-orm/pg-core"
import { tenants } from "./tenants"
import { customers } from "./customers"
import { customerspaces } from "./customerspaces"
import { products } from "./products"
import { vendors } from "./vendors"
import { authUsers } from "./auth_users"
export const customerinventoryitems = pgTable("customerinventoryitems", {
id: bigint("id", { mode: "number" })
.primaryKey()
.generatedByDefaultAsIdentity(),
createdAt: timestamp("created_at", { withTimezone: true })
.notNull()
.defaultNow(),
name: text("name").notNull(),
description: text("description"),
tenant: bigint("tenant", { mode: "number" })
.notNull()
.references(() => tenants.id),
customer: bigint("customer", { mode: "number" })
.notNull()
.references(() => customers.id),
customerspace: bigint("customerspace", { mode: "number" }).references(
() => customerspaces.id
),
customerInventoryId: text("customerInventoryId").notNull(),
serialNumber: text("serialNumber"),
quantity: bigint("quantity", { mode: "number" }).notNull().default(0),
manufacturer: text("manufacturer"),
manufacturerNumber: text("manufacturerNumber"),
purchaseDate: date("purchaseDate"),
purchasePrice: doublePrecision("purchasePrice").default(0),
currentValue: doublePrecision("currentValue"),
product: bigint("product", { mode: "number" }).references(() => products.id),
vendor: bigint("vendor", { mode: "number" }).references(() => vendors.id),
archived: boolean("archived").notNull().default(false),
updatedAt: timestamp("updated_at", { withTimezone: true }),
updatedBy: uuid("updated_by").references(() => authUsers.id),
})
export type CustomerInventoryItem = typeof customerinventoryitems.$inferSelect
export type NewCustomerInventoryItem = typeof customerinventoryitems.$inferInsert

View File

@@ -62,7 +62,6 @@ export const customers = pgTable(
updatedBy: uuid("updated_by").references(() => authUsers.id),
customPaymentType: text("custom_payment_type"), // ENUM payment_types separat?
customTaxType: text("customTaxType"),
}
)

View File

@@ -1,54 +0,0 @@
import {
pgTable,
bigint,
timestamp,
text,
boolean,
jsonb,
uuid,
} from "drizzle-orm/pg-core"
import { tenants } from "./tenants"
import { customers } from "./customers"
import { authUsers } from "./auth_users"
export const customerspaces = pgTable("customerspaces", {
id: bigint("id", { mode: "number" })
.primaryKey()
.generatedByDefaultAsIdentity(),
createdAt: timestamp("created_at", { withTimezone: true })
.notNull()
.defaultNow(),
name: text("name").notNull(),
type: text("type").notNull(),
tenant: bigint("tenant", { mode: "number" })
.notNull()
.references(() => tenants.id),
customer: bigint("customer", { mode: "number" })
.notNull()
.references(() => customers.id),
space_number: text("spaceNumber").notNull(),
parentSpace: bigint("parentSpace", { mode: "number" }).references(
() => customerspaces.id
),
info_data: jsonb("infoData")
.notNull()
.default({ zip: "", city: "", streetNumber: "" }),
description: text("description"),
archived: boolean("archived").notNull().default(false),
updatedAt: timestamp("updated_at", { withTimezone: true }),
updatedBy: uuid("updated_by").references(() => authUsers.id),
})
export type CustomerSpace = typeof customerspaces.$inferSelect
export type NewCustomerSpace = typeof customerspaces.$inferInsert

View File

@@ -3,7 +3,7 @@ import {
uuid,
timestamp,
text,
bigint, jsonb,
bigint,
} from "drizzle-orm/pg-core"
import { tenants } from "./tenants"
@@ -23,11 +23,6 @@ export const devices = pgTable("devices", {
password: text("password"),
externalId: text("externalId"),
lastSeen: timestamp("last_seen", { withTimezone: true }),
// Hier speichern wir den ganzen Payload (RSSI, Heap, IP, etc.)
lastDebugInfo: jsonb("last_debug_info"),
})
export type Device = typeof devices.$inferSelect

View File

@@ -1,39 +0,0 @@
import {
pgTable,
bigint,
timestamp,
text,
boolean,
jsonb,
uuid,
} from "drizzle-orm/pg-core"
import { tenants } from "./tenants"
import { authUsers } from "./auth_users"
export const entitybankaccounts = pgTable("entitybankaccounts", {
id: bigint("id", { mode: "number" })
.primaryKey()
.generatedByDefaultAsIdentity(),
createdAt: timestamp("created_at", { withTimezone: true })
.notNull()
.defaultNow(),
tenant: bigint("tenant", { mode: "number" })
.notNull()
.references(() => tenants.id),
ibanEncrypted: jsonb("iban_encrypted").notNull(),
bicEncrypted: jsonb("bic_encrypted").notNull(),
bankNameEncrypted: jsonb("bank_name_encrypted").notNull(),
description: text("description"),
updatedAt: timestamp("updated_at", { withTimezone: true }),
updatedBy: uuid("updated_by").references(() => authUsers.id),
archived: boolean("archived").notNull().default(false),
})
export type EntityBankAccount = typeof entitybankaccounts.$inferSelect
export type NewEntityBankAccount = typeof entitybankaccounts.$inferInsert

View File

@@ -73,7 +73,6 @@ export const files = pgTable("files", {
createdBy: uuid("created_by").references(() => authUsers.id),
authProfile: uuid("auth_profile").references(() => authProfiles.id),
size: bigint("size", { mode: "number" }),
})
export type File = typeof files.$inferSelect

View File

@@ -20,8 +20,6 @@ import { tasks } from "./tasks"
import { vehicles } from "./vehicles"
import { bankstatements } from "./bankstatements"
import { spaces } from "./spaces"
import { customerspaces } from "./customerspaces"
import { customerinventoryitems } from "./customerinventoryitems"
import { costcentres } from "./costcentres"
import { ownaccounts } from "./ownaccounts"
import { createddocuments } from "./createddocuments"
@@ -34,7 +32,6 @@ import { events } from "./events"
import { inventoryitemgroups } from "./inventoryitemgroups"
import { authUsers } from "./auth_users"
import {files} from "./files";
import { memberrelations } from "./memberrelations";
export const historyitems = pgTable("historyitems", {
id: bigint("id", { mode: "number" })
@@ -102,12 +99,6 @@ export const historyitems = pgTable("historyitems", {
space: bigint("space", { mode: "number" }).references(() => spaces.id),
customerspace: bigint("customerspace", { mode: "number" }).references(() => customerspaces.id),
customerinventoryitem: bigint("customerinventoryitem", { mode: "number" }).references(() => customerinventoryitems.id),
memberrelation: bigint("memberrelation", { mode: "number" }).references(() => memberrelations.id),
config: jsonb("config"),
projecttype: bigint("projecttype", { mode: "number" }).references(

View File

@@ -14,7 +14,7 @@ export const hourrates = pgTable("hourrates", {
name: text("name").notNull(),
purchase_price: doublePrecision("purchasePrice").notNull(),
purchasePrice: doublePrecision("purchasePrice").notNull(),
sellingPrice: doublePrecision("sellingPrice").notNull(),
archived: boolean("archived").notNull().default(false),

View File

@@ -13,19 +13,15 @@ export * from "./checks"
export * from "./citys"
export * from "./contacts"
export * from "./contracts"
export * from "./contracttypes"
export * from "./costcentres"
export * from "./countrys"
export * from "./createddocuments"
export * from "./createdletters"
export * from "./customers"
export * from "./customerspaces"
export * from "./customerinventoryitems"
export * from "./devices"
export * from "./documentboxes"
export * from "./enums"
export * from "./events"
export * from "./entitybankaccounts"
export * from "./files"
export * from "./filetags"
export * from "./folders"
@@ -46,9 +42,7 @@ export * from "./incominginvoices"
export * from "./inventoryitemgroups"
export * from "./inventoryitems"
export * from "./letterheads"
export * from "./memberrelations"
export * from "./movements"
export * from "./m2m_api_keys"
export * from "./notifications_event_types"
export * from "./notifications_items"
export * from "./notifications_preferences"
@@ -78,4 +72,3 @@ export * from "./staff_time_events"
export * from "./serialtypes"
export * from "./serialexecutions"
export * from "./public_links"
export * from "./wikipages"

View File

@@ -1,48 +0,0 @@
import {
pgTable,
uuid,
bigint,
text,
timestamp,
boolean,
} from "drizzle-orm/pg-core"
import { tenants } from "./tenants"
import { authUsers } from "./auth_users"
export const m2mApiKeys = pgTable("m2m_api_keys", {
id: uuid("id").primaryKey().defaultRandom(),
createdAt: timestamp("created_at", { withTimezone: true })
.notNull()
.defaultNow(),
updatedAt: timestamp("updated_at", { withTimezone: true })
.notNull()
.defaultNow(),
tenantId: bigint("tenant_id", { mode: "number" })
.notNull()
.references(() => tenants.id, { onDelete: "cascade", onUpdate: "cascade" }),
userId: uuid("user_id")
.notNull()
.references(() => authUsers.id, { onDelete: "cascade", onUpdate: "cascade" }),
createdBy: uuid("created_by").references(() => authUsers.id, {
onDelete: "set null",
onUpdate: "cascade",
}),
name: text("name").notNull(),
keyPrefix: text("key_prefix").notNull(),
keyHash: text("key_hash").notNull().unique(),
active: boolean("active").notNull().default(true),
lastUsedAt: timestamp("last_used_at", { withTimezone: true }),
expiresAt: timestamp("expires_at", { withTimezone: true }),
})
export type M2mApiKey = typeof m2mApiKeys.$inferSelect
export type NewM2mApiKey = typeof m2mApiKeys.$inferInsert

View File

@@ -1,39 +0,0 @@
import {
pgTable,
bigint,
timestamp,
text,
boolean,
uuid,
doublePrecision,
} from "drizzle-orm/pg-core"
import { tenants } from "./tenants"
import { authUsers } from "./auth_users"
export const memberrelations = pgTable("memberrelations", {
id: bigint("id", { mode: "number" })
.primaryKey()
.generatedByDefaultAsIdentity(),
createdAt: timestamp("created_at", { withTimezone: true })
.notNull()
.defaultNow(),
tenant: bigint("tenant", { mode: "number" })
.notNull()
.references(() => tenants.id),
type: text("type").notNull(),
billingInterval: text("billingInterval").notNull(),
billingAmount: doublePrecision("billingAmount").notNull().default(0),
archived: boolean("archived").notNull().default(false),
updatedAt: timestamp("updated_at", { withTimezone: true }),
updatedBy: uuid("updated_by").references(() => authUsers.id),
})
export type MemberRelation = typeof memberrelations.$inferSelect
export type NewMemberRelation = typeof memberrelations.$inferInsert

View File

@@ -54,7 +54,6 @@ export const services = pgTable("services", {
materialComposition: jsonb("materialComposition").notNull().default([]),
personalComposition: jsonb("personalComposition").notNull().default([]),
priceUpdateLocked: boolean("priceUpdateLocked").notNull().default(false),
updatedAt: timestamp("updated_at", { withTimezone: true }),
updatedBy: uuid("updated_by").references(() => authUsers.id),

View File

@@ -88,9 +88,7 @@ export const tenants = pgTable(
confirmationOrders: { prefix: "AB-", suffix: "", nextNumber: 1000 },
invoices: { prefix: "RE-", suffix: "", nextNumber: 1000 },
spaces: { prefix: "LP-", suffix: "", nextNumber: 1000 },
customerspaces: { prefix: "KLP-", suffix: "", nextNumber: 1000 },
inventoryitems: { prefix: "IA-", suffix: "", nextNumber: 1000 },
customerinventoryitems: { prefix: "KIA-", suffix: "", nextNumber: 1000 },
projects: { prefix: "PRJ-", suffix: "", nextNumber: 1000 },
costcentres: { prefix: "KST-", suffix: "", nextNumber: 1000 },
}),

View File

@@ -1,99 +0,0 @@
import {
pgTable,
bigint,
text,
timestamp,
boolean,
jsonb,
integer,
index,
uuid,
AnyPgColumn
} from "drizzle-orm/pg-core"
import { relations } from "drizzle-orm"
import { tenants } from "./tenants"
import { authUsers } from "./auth_users"
export const wikiPages = pgTable(
"wiki_pages",
{
// ID des Wiki-Eintrags selbst (neu = UUID)
id: uuid("id")
.primaryKey()
.defaultRandom(),
tenantId: bigint("tenant_id", { mode: "number" })
.notNull()
.references(() => tenants.id, { onDelete: "cascade" }),
parentId: uuid("parent_id")
.references((): AnyPgColumn => wikiPages.id, { onDelete: "cascade" }),
title: text("title").notNull(),
content: jsonb("content"),
isFolder: boolean("is_folder").notNull().default(false),
sortOrder: integer("sort_order").notNull().default(0),
// --- POLYMORPHE BEZIEHUNG (Split) ---
// Art der Entität (z.B. 'customer', 'invoice', 'iot_device')
entityType: text("entity_type"),
// SPALTE 1: Für Legacy-Tabellen (BigInt)
// Nutzung: Wenn entityType='customer', wird hier die ID 1050 gespeichert
entityId: bigint("entity_id", { mode: "number" }),
// SPALTE 2: Für neue Tabellen (UUID)
// Nutzung: Wenn entityType='iot_device', wird hier die UUID gespeichert
entityUuid: uuid("entity_uuid"),
// ------------------------------------
createdAt: timestamp("created_at", { withTimezone: true })
.notNull()
.defaultNow(),
updatedAt: timestamp("updated_at", { withTimezone: true }),
createdBy: uuid("created_by").references(() => authUsers.id),
updatedBy: uuid("updated_by").references(() => authUsers.id),
},
(table) => ({
tenantIdx: index("wiki_pages_tenant_idx").on(table.tenantId),
parentIdx: index("wiki_pages_parent_idx").on(table.parentId),
// ZWEI separate Indexe für schnelle Lookups, je nachdem welche ID genutzt wird
// Fall 1: Suche nach Notizen für Kunde 1050
entityIntIdx: index("wiki_pages_entity_int_idx")
.on(table.tenantId, table.entityType, table.entityId),
// Fall 2: Suche nach Notizen für IoT-Device 550e84...
entityUuidIdx: index("wiki_pages_entity_uuid_idx")
.on(table.tenantId, table.entityType, table.entityUuid),
})
)
export const wikiPagesRelations = relations(wikiPages, ({ one, many }) => ({
tenant: one(tenants, {
fields: [wikiPages.tenantId],
references: [tenants.id],
}),
parent: one(wikiPages, {
fields: [wikiPages.parentId],
references: [wikiPages.id],
relationName: "parent_child",
}),
children: many(wikiPages, {
relationName: "parent_child",
}),
author: one(authUsers, {
fields: [wikiPages.createdBy],
references: [authUsers.id],
}),
}))
export type WikiPage = typeof wikiPages.$inferSelect
export type NewWikiPage = typeof wikiPages.$inferInsert

View File

@@ -6,6 +6,6 @@ export default defineConfig({
schema: "./db/schema",
out: "./db/migrations",
dbCredentials: {
url: secrets.DATABASE_URL || "postgres://postgres:wJw7aNpEBJdcxgoct6GXNpvY4Cn6ECqu@fedeo-db-001.vpn.internal:5432/fedeo",
url: secrets.DATABASE_URL,
},
})

View File

@@ -5,12 +5,9 @@
"main": "index.js",
"scripts": {
"dev": "tsx watch src/index.ts",
"fill": "ts-node src/webdav/fill-file-sizes.ts",
"dev:dav": "tsx watch src/webdav/server.ts",
"build": "tsc",
"start": "node dist/src/index.js",
"schema:index": "ts-node scripts/generate-schema-index.ts",
"bankcodes:update": "tsx scripts/generate-de-bank-codes.ts"
"schema:index": "ts-node scripts/generate-schema-index.ts"
},
"repository": {
"type": "git",
@@ -30,6 +27,7 @@
"@infisical/sdk": "^4.0.6",
"@mmote/niimbluelib": "^0.0.1-alpha.29",
"@prisma/client": "^6.15.0",
"@supabase/supabase-js": "^2.56.1",
"@zip.js/zip.js": "^2.7.73",
"archiver": "^7.0.1",
"axios": "^1.12.1",
@@ -50,7 +48,6 @@
"pg": "^8.16.3",
"pngjs": "^7.0.0",
"sharp": "^0.34.5",
"webdav-server": "^2.6.2",
"xmlbuilder": "^15.1.1",
"zpl-image": "^0.2.0",
"zpl-renderer-js": "^2.0.2"

View File

@@ -1,95 +0,0 @@
import fs from "node:fs/promises"
import path from "node:path"
import https from "node:https"
const DEFAULT_SOURCE_URL =
"https://www.bundesbank.de/resource/blob/602632/bec25ca5df1eb62fefadd8325dafe67c/472B63F073F071307366337C94F8C870/blz-aktuell-txt-data.txt"
const OUTPUT_NAME_FILE = path.resolve("src/utils/deBankCodes.ts")
const OUTPUT_BIC_FILE = path.resolve("src/utils/deBankBics.ts")
function fetchBuffer(url: string): Promise<Buffer> {
return new Promise((resolve, reject) => {
https
.get(url, (res) => {
if (res.statusCode && res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) {
return resolve(fetchBuffer(res.headers.location))
}
if (res.statusCode !== 200) {
return reject(new Error(`Download failed with status ${res.statusCode}`))
}
const chunks: Buffer[] = []
res.on("data", (chunk) => chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)))
res.on("end", () => resolve(Buffer.concat(chunks)))
res.on("error", reject)
})
.on("error", reject)
})
}
function escapeTsString(value: string) {
return value.replace(/\\/g, "\\\\").replace(/"/g, '\\"')
}
async function main() {
const source = process.env.BLZ_SOURCE_URL || DEFAULT_SOURCE_URL
const sourceFile = process.env.BLZ_SOURCE_FILE
let raw: Buffer
if (sourceFile) {
console.log(`Reading BLZ source file: ${sourceFile}`)
raw = await fs.readFile(sourceFile)
} else {
console.log(`Downloading BLZ source: ${source}`)
raw = await fetchBuffer(source)
}
const content = raw.toString("latin1")
const lines = content.split(/\r?\n/)
const nameMap = new Map<string, string>()
const bicMap = new Map<string, string>()
for (const line of lines) {
if (!line || line.length < 150) continue
const blz = line.slice(0, 8).trim()
const name = line.slice(9, 67).trim()
const bic = line.slice(139, 150).trim()
if (!/^\d{8}$/.test(blz) || !name) continue
if (!nameMap.has(blz)) nameMap.set(blz, name)
if (bic && !bicMap.has(blz)) bicMap.set(blz, bic)
}
const sortedNames = [...nameMap.entries()].sort(([a], [b]) => a.localeCompare(b))
const sortedBics = [...bicMap.entries()].sort(([a], [b]) => a.localeCompare(b))
const nameOutputLines = [
"// Lokale Bankleitzahl-zu-Institut Zuordnung (DE).",
"// Quelle: Deutsche Bundesbank, BLZ-Datei (vollstaendig).",
"export const DE_BANK_CODE_TO_NAME: Record<string, string> = {",
...sortedNames.map(([blz, name]) => ` "${blz}": "${escapeTsString(name)}",`),
"}",
"",
]
const bicOutputLines = [
"// Lokale Bankleitzahl-zu-BIC Zuordnung (DE).",
"// Quelle: Deutsche Bundesbank, BLZ-Datei (vollstaendig).",
"export const DE_BANK_CODE_TO_BIC: Record<string, string> = {",
...sortedBics.map(([blz, bic]) => ` "${blz}": "${escapeTsString(bic)}",`),
"}",
"",
]
await fs.writeFile(OUTPUT_NAME_FILE, nameOutputLines.join("\n"), "utf8")
await fs.writeFile(OUTPUT_BIC_FILE, bicOutputLines.join("\n"), "utf8")
console.log(`Wrote ${sortedNames.length} bank names to ${OUTPUT_NAME_FILE}`)
console.log(`Wrote ${sortedBics.length} bank BICs to ${OUTPUT_BIC_FILE}`)
}
main().catch((err) => {
console.error(err)
process.exit(1)
})

View File

@@ -1,5 +1,6 @@
import Fastify from "fastify";
import swaggerPlugin from "./plugins/swagger"
import supabasePlugin from "./plugins/supabase";
import dayjsPlugin from "./plugins/dayjs";
import healthRoutes from "./routes/health";
import meRoutes from "./routes/auth/me";
@@ -28,7 +29,6 @@ import staffTimeRoutes from "./routes/staff/time";
import staffTimeConnectRoutes from "./routes/staff/timeconnects";
import userRoutes from "./routes/auth/user";
import publiclinksAuthenticatedRoutes from "./routes/publiclinks/publiclinks-authenticated";
import wikiRoutes from "./routes/wiki";
//Public Links
import publiclinksNonAuthenticatedRoutes from "./routes/publiclinks/publiclinks-non-authenticated";
@@ -42,11 +42,9 @@ import helpdeskInboundEmailRoutes from "./routes/helpdesk.inbound.email";
import deviceRoutes from "./routes/internal/devices";
import tenantRoutesInternal from "./routes/internal/tenant";
import staffTimeRoutesInternal from "./routes/internal/time";
import authM2mInternalRoutes from "./routes/internal/auth.m2m";
//Devices
import devicesRFIDRoutes from "./routes/devices/rfid";
import devicesManagementRoutes from "./routes/devices/management";
import {sendMail} from "./utils/mailer";
@@ -54,7 +52,6 @@ import {loadSecrets, secrets} from "./utils/secrets";
import {initMailer} from "./utils/mailer"
import {initS3} from "./utils/s3";
//Services
import servicesPlugin from "./plugins/services";
@@ -73,6 +70,8 @@ async function main() {
// Plugins Global verfügbar
await app.register(swaggerPlugin);
await app.register(corsPlugin);
await app.register(supabasePlugin);
await app.register(tenantPlugin);
await app.register(dayjsPlugin);
await app.register(dbPlugin);
@@ -108,7 +107,6 @@ async function main() {
await app.register(async (m2mApp) => {
await m2mApp.register(authM2m)
await m2mApp.register(authM2mInternalRoutes)
await m2mApp.register(helpdeskInboundEmailRoutes)
await m2mApp.register(deviceRoutes)
await m2mApp.register(tenantRoutesInternal)
@@ -117,10 +115,8 @@ async function main() {
await app.register(async (devicesApp) => {
await devicesApp.register(devicesRFIDRoutes)
await devicesApp.register(devicesManagementRoutes)
},{prefix: "/devices"})
await app.register(corsPlugin);
//Geschützte Routes
@@ -145,7 +141,6 @@ async function main() {
await subApp.register(userRoutes);
await subApp.register(publiclinksAuthenticatedRoutes);
await subApp.register(resourceRoutes);
await subApp.register(wikiRoutes);
},{prefix: "/api"})

View File

@@ -19,238 +19,241 @@ import {
and,
} from "drizzle-orm"
let badMessageDetected = false
let badMessageMessageSent = false
export function syncDokuboxService (server: FastifyInstance) {
let badMessageDetected = false
let badMessageMessageSent = false
let client: ImapFlow | null = null
let client: ImapFlow | null = null
// -------------------------------------------------------------
// IMAP CLIENT INITIALIZEN
// -------------------------------------------------------------
export async function initDokuboxClient() {
client = new ImapFlow({
host: secrets.DOKUBOX_IMAP_HOST,
port: secrets.DOKUBOX_IMAP_PORT,
secure: secrets.DOKUBOX_IMAP_SECURE,
auth: {
user: secrets.DOKUBOX_IMAP_USER,
pass: secrets.DOKUBOX_IMAP_PASSWORD
},
logger: false
})
async function initDokuboxClient() {
client = new ImapFlow({
host: secrets.DOKUBOX_IMAP_HOST,
port: secrets.DOKUBOX_IMAP_PORT,
secure: secrets.DOKUBOX_IMAP_SECURE,
auth: {
user: secrets.DOKUBOX_IMAP_USER,
pass: secrets.DOKUBOX_IMAP_PASSWORD
},
logger: false
})
console.log("Dokubox E-Mail Client Initialized")
console.log("Dokubox E-Mail Client Initialized")
await client.connect()
}
await client.connect()
}
const syncDokubox = async () => {
console.log("Perform Dokubox Sync")
// -------------------------------------------------------------
// MAIN SYNC FUNCTION (DRIZZLE VERSION)
// -------------------------------------------------------------
export const syncDokubox = (server: FastifyInstance) =>
async () => {
await initDokuboxClient()
console.log("Perform Dokubox Sync")
if (!client?.usable) {
throw new Error("E-Mail Client not usable")
}
await initDokuboxClient()
// -------------------------------
// TENANTS LADEN (DRIZZLE)
// -------------------------------
const tenantList = await server.db
.select({
id: tenants.id,
name: tenants.name,
emailAddresses: tenants.dokuboxEmailAddresses,
key: tenants.dokuboxkey
})
.from(tenants)
if (!client?.usable) {
throw new Error("E-Mail Client not usable")
}
const lock = await client.getMailboxLock("INBOX")
// -------------------------------
// TENANTS LADEN (DRIZZLE)
// -------------------------------
const tenantList = await server.db
.select({
id: tenants.id,
name: tenants.name,
emailAddresses: tenants.dokuboxEmailAddresses,
key: tenants.dokuboxkey
})
.from(tenants)
try {
const lock = await client.getMailboxLock("INBOX")
for await (let msg of client.fetch({ seen: false }, { envelope: true, source: true })) {
try {
const parsed = await simpleParser(msg.source)
for await (let msg of client.fetch({ seen: false }, { envelope: true, source: true })) {
const message = {
id: msg.uid,
subject: parsed.subject,
to: parsed.to?.value || [],
cc: parsed.cc?.value || [],
attachments: parsed.attachments || []
}
const parsed = await simpleParser(msg.source)
// -------------------------------------------------
// MAPPING / FIND TENANT
// -------------------------------------------------
const config = await getMessageConfigDrizzle(server, message, tenantList)
if (!config) {
badMessageDetected = true
if (!badMessageMessageSent) {
badMessageMessageSent = true
}
return
}
if (message.attachments.length > 0) {
for (const attachment of message.attachments) {
await saveFile(
server,
config.tenant,
message.id,
attachment,
config.folder,
config.filetype
)
}
}
const message = {
id: msg.uid,
subject: parsed.subject,
to: parsed.to?.value || [],
cc: parsed.cc?.value || [],
attachments: parsed.attachments || []
}
if (!badMessageDetected) {
badMessageDetected = false
badMessageMessageSent = false
// -------------------------------------------------
// MAPPING / FIND TENANT
// -------------------------------------------------
const config = await getMessageConfigDrizzle(server, message, tenantList)
if (!config) {
badMessageDetected = true
if (!badMessageMessageSent) {
badMessageMessageSent = true
}
return
}
await client.messageFlagsAdd({ seen: false }, ["\\Seen"])
await client.messageDelete({ seen: true })
} finally {
lock.release()
client.close()
}
}
const getMessageConfigDrizzle = async (
server: FastifyInstance,
message,
tenantsList: any[]
) => {
let possibleKeys: string[] = []
if (message.to) {
message.to.forEach((item) =>
possibleKeys.push(item.address.split("@")[0].toLowerCase())
)
}
if (message.cc) {
message.cc.forEach((item) =>
possibleKeys.push(item.address.split("@")[0].toLowerCase())
)
}
// -------------------------------------------
// TENANT IDENTIFY
// -------------------------------------------
let tenant = tenantsList.find((t) => possibleKeys.includes(t.key))
if (!tenant && message.to?.length) {
const address = message.to[0].address.toLowerCase()
tenant = tenantsList.find((t) =>
(t.emailAddresses || []).map((m) => m.toLowerCase()).includes(address)
)
}
if (!tenant) return null
// -------------------------------------------
// FOLDER + FILETYPE VIA SUBJECT
// -------------------------------------------
let folderId = null
let filetypeId = null
// -------------------------------------------
// Rechnung / Invoice
// -------------------------------------------
if (message.subject?.match(/(Rechnung|Beleg|Invoice|Quittung)/gi)) {
const folder = await server.db
.select({ id: folders.id })
.from(folders)
.where(
and(
eq(folders.tenant, tenant.id),
and(
eq(folders.function, "incomingInvoices"),
//@ts-ignore
eq(folders.year, dayjs().format("YYYY"))
if (message.attachments.length > 0) {
for (const attachment of message.attachments) {
await saveFile(
server,
config.tenant,
message.id,
attachment,
config.folder,
config.filetype
)
)
)
.limit(1)
}
}
}
folderId = folder[0]?.id ?? null
if (!badMessageDetected) {
badMessageDetected = false
badMessageMessageSent = false
}
const tag = await server.db
.select({ id: filetags.id })
.from(filetags)
.where(
and(
eq(filetags.tenant, tenant.id),
eq(filetags.incomingDocumentType, "invoices")
)
)
.limit(1)
await client.messageFlagsAdd({ seen: false }, ["\\Seen"])
await client.messageDelete({ seen: true })
filetypeId = tag[0]?.id ?? null
}
// -------------------------------------------
// Mahnung
// -------------------------------------------
else if (message.subject?.match(/(Mahnung|Zahlungsaufforderung|Zahlungsverzug)/gi)) {
const tag = await server.db
.select({ id: filetags.id })
.from(filetags)
.where(
and(
eq(filetags.tenant, tenant.id),
eq(filetags.incomingDocumentType, "reminders")
)
)
.limit(1)
filetypeId = tag[0]?.id ?? null
}
// -------------------------------------------
// Sonstige Dokumente → Deposit Folder
// -------------------------------------------
else {
const folder = await server.db
.select({ id: folders.id })
.from(folders)
.where(
and(
eq(folders.tenant, tenant.id),
eq(folders.function, "deposit")
)
)
.limit(1)
folderId = folder[0]?.id ?? null
}
return {
tenant: tenant.id,
folder: folderId,
filetype: filetypeId
} finally {
lock.release()
client.close()
}
}
// -------------------------------------------------------------
// TENANT ERKENNEN + FOLDER/FILETYPES (DRIZZLE VERSION)
// -------------------------------------------------------------
const getMessageConfigDrizzle = async (
server: FastifyInstance,
message,
tenantsList: any[]
) => {
let possibleKeys: string[] = []
if (message.to) {
message.to.forEach((item) =>
possibleKeys.push(item.address.split("@")[0].toLowerCase())
)
}
if (message.cc) {
message.cc.forEach((item) =>
possibleKeys.push(item.address.split("@")[0].toLowerCase())
)
}
// -------------------------------------------
// TENANT IDENTIFY
// -------------------------------------------
let tenant = tenantsList.find((t) => possibleKeys.includes(t.key))
if (!tenant && message.to?.length) {
const address = message.to[0].address.toLowerCase()
tenant = tenantsList.find((t) =>
(t.emailAddresses || []).map((m) => m.toLowerCase()).includes(address)
)
}
if (!tenant) return null
// -------------------------------------------
// FOLDER + FILETYPE VIA SUBJECT
// -------------------------------------------
let folderId = null
let filetypeId = null
// -------------------------------------------
// Rechnung / Invoice
// -------------------------------------------
if (message.subject?.match(/(Rechnung|Beleg|Invoice|Quittung)/gi)) {
const folder = await server.db
.select({ id: folders.id })
.from(folders)
.where(
and(
eq(folders.tenant, tenant.id),
and(
eq(folders.function, "incomingInvoices"),
//@ts-ignore
eq(folders.year, dayjs().format("YYYY"))
)
)
)
.limit(1)
folderId = folder[0]?.id ?? null
const tag = await server.db
.select({ id: filetags.id })
.from(filetags)
.where(
and(
eq(filetags.tenant, tenant.id),
eq(filetags.incomingDocumentType, "invoices")
)
)
.limit(1)
filetypeId = tag[0]?.id ?? null
}
// -------------------------------------------
// Mahnung
// -------------------------------------------
else if (message.subject?.match(/(Mahnung|Zahlungsaufforderung|Zahlungsverzug)/gi)) {
const tag = await server.db
.select({ id: filetags.id })
.from(filetags)
.where(
and(
eq(filetags.tenant, tenant.id),
eq(filetags.incomingDocumentType, "reminders")
)
)
.limit(1)
filetypeId = tag[0]?.id ?? null
}
// -------------------------------------------
// Sonstige Dokumente → Deposit Folder
// -------------------------------------------
else {
const folder = await server.db
.select({ id: folders.id })
.from(folders)
.where(
and(
eq(folders.tenant, tenant.id),
eq(folders.function, "deposit")
)
)
.limit(1)
folderId = folder[0]?.id ?? null
}
return {
run: async () => {
await initDokuboxClient()
await syncDokubox()
console.log("Service: Dokubox sync finished")
}
tenant: tenant.id,
folder: folderId,
filetype: filetypeId
}
}

View File

@@ -8,108 +8,9 @@ import {
files,
filetags,
incominginvoices,
vendors,
} from "../../../db/schema"
import { eq, and, isNull, not, desc } from "drizzle-orm"
type InvoiceAccount = {
account?: number | null
description?: string | null
taxType?: string | number | null
}
const normalizeAccounts = (accounts: unknown): InvoiceAccount[] => {
if (!Array.isArray(accounts)) return []
return accounts
.map((entry: any) => ({
account: typeof entry?.account === "number" ? entry.account : null,
description: typeof entry?.description === "string" ? entry.description : null,
taxType: entry?.taxType ?? null,
}))
.filter((entry) => entry.account !== null || entry.description || entry.taxType !== null)
}
const buildLearningContext = (historicalInvoices: any[]) => {
if (!historicalInvoices.length) return null
const vendorProfiles = new Map<number, {
vendorName: string
paymentTypes: Map<string, number>
accountUsage: Map<number, number>
sampleDescriptions: string[]
}>()
const recentExamples: any[] = []
for (const invoice of historicalInvoices) {
const accounts = normalizeAccounts(invoice.accounts)
const vendorId = typeof invoice.vendorId === "number" ? invoice.vendorId : null
const vendorName = typeof invoice.vendorName === "string" ? invoice.vendorName : "Unknown"
if (vendorId) {
if (!vendorProfiles.has(vendorId)) {
vendorProfiles.set(vendorId, {
vendorName,
paymentTypes: new Map(),
accountUsage: new Map(),
sampleDescriptions: [],
})
}
const profile = vendorProfiles.get(vendorId)!
if (invoice.paymentType) {
const key = String(invoice.paymentType)
profile.paymentTypes.set(key, (profile.paymentTypes.get(key) ?? 0) + 1)
}
for (const account of accounts) {
if (typeof account.account === "number") {
profile.accountUsage.set(account.account, (profile.accountUsage.get(account.account) ?? 0) + 1)
}
}
if (invoice.description && profile.sampleDescriptions.length < 3) {
profile.sampleDescriptions.push(String(invoice.description).slice(0, 120))
}
}
if (recentExamples.length < 20) {
recentExamples.push({
vendorId,
vendorName,
paymentType: invoice.paymentType ?? null,
accounts: accounts.map((entry) => ({
account: entry.account,
description: entry.description ?? null,
taxType: entry.taxType ?? null,
})),
})
}
}
const vendorPatterns = Array.from(vendorProfiles.entries())
.map(([vendorId, profile]) => {
const commonPaymentType = Array.from(profile.paymentTypes.entries())
.sort((a, b) => b[1] - a[1])[0]?.[0] ?? null
const topAccounts = Array.from(profile.accountUsage.entries())
.sort((a, b) => b[1] - a[1])
.slice(0, 4)
.map(([accountId, count]) => ({ accountId, count }))
return {
vendorId,
vendorName: profile.vendorName,
commonPaymentType,
topAccounts,
sampleDescriptions: profile.sampleDescriptions,
}
})
.slice(0, 50)
return JSON.stringify({
vendorPatterns,
recentExamples,
})
}
import { eq, and, isNull, not } from "drizzle-orm"
export function prepareIncomingInvoices(server: FastifyInstance) {
const processInvoices = async (tenantId:number) => {
@@ -171,34 +72,13 @@ export function prepareIncomingInvoices(server: FastifyInstance) {
continue
}
const historicalInvoices = await server.db
.select({
vendorId: incominginvoices.vendor,
vendorName: vendors.name,
paymentType: incominginvoices.paymentType,
description: incominginvoices.description,
accounts: incominginvoices.accounts,
})
.from(incominginvoices)
.leftJoin(vendors, eq(incominginvoices.vendor, vendors.id))
.where(
and(
eq(incominginvoices.tenant, tenantId),
eq(incominginvoices.archived, false)
)
)
.orderBy(desc(incominginvoices.createdAt))
.limit(120)
const learningContext = buildLearningContext(historicalInvoices)
// -------------------------------------------------------------
// 3⃣ Jede Datei einzeln durch GPT jagen & IncomingInvoice erzeugen
// -------------------------------------------------------------
for (const file of filesRes) {
console.log(`Processing file ${file.id} for tenant ${tenantId}`)
const data = await getInvoiceDataFromGPT(server,file, tenantId, learningContext ?? undefined)
const data = await getInvoiceDataFromGPT(server,file, tenantId)
if (!data) {
server.log.warn(`GPT returned no data for file ${file.id}`)

View File

@@ -1,7 +1,5 @@
// modules/helpdesk/helpdesk.contact.service.ts
import { FastifyInstance } from 'fastify'
import { and, eq, or } from "drizzle-orm";
import { helpdesk_contacts } from "../../../db/schema";
export async function getOrCreateContact(
server: FastifyInstance,
@@ -11,35 +9,30 @@ export async function getOrCreateContact(
if (!email && !phone) throw new Error('Contact must have at least an email or phone')
// Bestehenden Kontakt prüfen
const matchConditions = []
if (email) matchConditions.push(eq(helpdesk_contacts.email, email))
if (phone) matchConditions.push(eq(helpdesk_contacts.phone, phone))
const { data: existing, error: findError } = await server.supabase
.from('helpdesk_contacts')
.select('*')
.eq('tenant_id', tenant_id)
.or(`email.eq.${email || ''},phone.eq.${phone || ''}`)
.maybeSingle()
const existing = await server.db
.select()
.from(helpdesk_contacts)
.where(
and(
eq(helpdesk_contacts.tenantId, tenant_id),
or(...matchConditions)
)
)
.limit(1)
if (existing[0]) return existing[0]
if (findError) throw findError
if (existing) return existing
// Anlegen
const created = await server.db
.insert(helpdesk_contacts)
.values({
tenantId: tenant_id,
const { data: created, error: insertError } = await server.supabase
.from('helpdesk_contacts')
.insert({
tenant_id,
email,
phone,
displayName: display_name,
customerId: customer_id,
contactId: contact_id
display_name,
customer_id,
contact_id
})
.returning()
.select()
.single()
return created[0]
if (insertError) throw insertError
return created
}

View File

@@ -2,8 +2,6 @@
import { FastifyInstance } from 'fastify'
import { getOrCreateContact } from './helpdesk.contact.service.js'
import {useNextNumberRangeNumber} from "../../utils/functions";
import { and, desc, eq } from "drizzle-orm";
import { customers, helpdesk_contacts, helpdesk_conversations } from "../../../db/schema";
export async function createConversation(
server: FastifyInstance,
@@ -27,34 +25,24 @@ export async function createConversation(
const {usedNumber } = await useNextNumberRangeNumber(server, tenant_id, "tickets")
const inserted = await server.db
.insert(helpdesk_conversations)
.values({
tenantId: tenant_id,
contactId: contactRecord.id,
channelInstanceId: channel_instance_id,
const { data, error } = await server.supabase
.from('helpdesk_conversations')
.insert({
tenant_id,
contact_id: contactRecord.id,
channel_instance_id,
subject: subject || null,
status: 'open',
createdAt: new Date(),
customerId: customer_id,
contactPersonId: contact_person_id,
ticketNumber: usedNumber
created_at: new Date().toISOString(),
customer_id,
contact_person_id,
ticket_number: usedNumber
})
.returning()
.select()
.single()
const data = inserted[0]
return {
...data,
channel_instance_id: data.channelInstanceId,
contact_id: data.contactId,
contact_person_id: data.contactPersonId,
created_at: data.createdAt,
customer_id: data.customerId,
last_message_at: data.lastMessageAt,
tenant_id: data.tenantId,
ticket_number: data.ticketNumber,
}
if (error) throw error
return data
}
export async function getConversations(
@@ -64,34 +52,22 @@ export async function getConversations(
) {
const { status, limit = 50 } = opts || {}
const filters = [eq(helpdesk_conversations.tenantId, tenant_id)]
if (status) filters.push(eq(helpdesk_conversations.status, status))
let query = server.supabase.from('helpdesk_conversations').select('*, customer_id(*)').eq('tenant_id', tenant_id)
const data = await server.db
.select({
conversation: helpdesk_conversations,
contact: helpdesk_contacts,
customer: customers,
})
.from(helpdesk_conversations)
.leftJoin(helpdesk_contacts, eq(helpdesk_contacts.id, helpdesk_conversations.contactId))
.leftJoin(customers, eq(customers.id, helpdesk_conversations.customerId))
.where(and(...filters))
.orderBy(desc(helpdesk_conversations.lastMessageAt))
.limit(limit)
if (status) query = query.eq('status', status)
query = query.order('last_message_at', { ascending: false }).limit(limit)
return data.map((entry) => ({
...entry.conversation,
helpdesk_contacts: entry.contact,
channel_instance_id: entry.conversation.channelInstanceId,
contact_id: entry.conversation.contactId,
contact_person_id: entry.conversation.contactPersonId,
created_at: entry.conversation.createdAt,
customer_id: entry.customer,
last_message_at: entry.conversation.lastMessageAt,
tenant_id: entry.conversation.tenantId,
ticket_number: entry.conversation.ticketNumber,
}))
const { data, error } = await query
if (error) throw error
const mappedData = data.map(entry => {
return {
...entry,
customer: entry.customer_id
}
})
return mappedData
}
export async function updateConversationStatus(
@@ -102,22 +78,13 @@ export async function updateConversationStatus(
const valid = ['open', 'in_progress', 'waiting_for_customer', 'answered', 'closed']
if (!valid.includes(status)) throw new Error('Invalid status')
const updated = await server.db
.update(helpdesk_conversations)
.set({ status })
.where(eq(helpdesk_conversations.id, conversation_id))
.returning()
const { data, error } = await server.supabase
.from('helpdesk_conversations')
.update({ status })
.eq('id', conversation_id)
.select()
.single()
const data = updated[0]
return {
...data,
channel_instance_id: data.channelInstanceId,
contact_id: data.contactId,
contact_person_id: data.contactPersonId,
created_at: data.createdAt,
customer_id: data.customerId,
last_message_at: data.lastMessageAt,
tenant_id: data.tenantId,
ticket_number: data.ticketNumber,
}
if (error) throw error
return data
}

View File

@@ -1,7 +1,5 @@
// modules/helpdesk/helpdesk.message.service.ts
import { FastifyInstance } from 'fastify'
import { asc, eq } from "drizzle-orm";
import { helpdesk_conversations, helpdesk_messages } from "../../../db/schema";
export async function addMessage(
server: FastifyInstance,
@@ -25,53 +23,38 @@ export async function addMessage(
) {
if (!payload?.text) throw new Error('Message payload requires text content')
const inserted = await server.db
.insert(helpdesk_messages)
.values({
tenantId: tenant_id,
conversationId: conversation_id,
authorUserId: author_user_id,
const { data: message, error } = await server.supabase
.from('helpdesk_messages')
.insert({
tenant_id,
conversation_id,
author_user_id,
direction,
payload,
rawMeta: raw_meta,
externalMessageId: external_message_id,
receivedAt: new Date(),
raw_meta,
created_at: new Date().toISOString(),
})
.returning()
.select()
.single()
const message = inserted[0]
if (error) throw error
// Letzte Nachricht aktualisieren
await server.db
.update(helpdesk_conversations)
.set({ lastMessageAt: new Date() })
.where(eq(helpdesk_conversations.id, conversation_id))
await server.supabase
.from('helpdesk_conversations')
.update({ last_message_at: new Date().toISOString() })
.eq('id', conversation_id)
return {
...message,
author_user_id: message.authorUserId,
conversation_id: message.conversationId,
created_at: message.createdAt,
external_message_id: message.externalMessageId,
raw_meta: message.rawMeta,
tenant_id: message.tenantId,
}
return message
}
export async function getMessages(server: FastifyInstance, conversation_id: string) {
const data = await server.db
.select()
.from(helpdesk_messages)
.where(eq(helpdesk_messages.conversationId, conversation_id))
.orderBy(asc(helpdesk_messages.createdAt))
const { data, error } = await server.supabase
.from('helpdesk_messages')
.select('*')
.eq('conversation_id', conversation_id)
.order('created_at', { ascending: true })
return data.map((message) => ({
...message,
author_user_id: message.authorUserId,
conversation_id: message.conversationId,
created_at: message.createdAt,
external_message_id: message.externalMessageId,
raw_meta: message.rawMeta,
tenant_id: message.tenantId,
}))
if (error) throw error
return data
}

View File

@@ -1,8 +1,6 @@
// services/notification.service.ts
import type { FastifyInstance } from 'fastify';
import {secrets} from "../utils/secrets";
import { eq } from "drizzle-orm";
import { notificationsEventTypes, notificationsItems } from "../../db/schema";
export type NotificationStatus = 'queued' | 'sent' | 'failed';
@@ -36,16 +34,16 @@ export class NotificationService {
*/
async trigger(input: TriggerInput) {
const { tenantId, userId, eventType, title, message, payload } = input;
const supabase = this.server.supabase;
// 1) Event-Typ prüfen (aktiv?)
const eventTypeRows = await this.server.db
.select()
.from(notificationsEventTypes)
.where(eq(notificationsEventTypes.eventKey, eventType))
.limit(1)
const eventTypeRow = eventTypeRows[0]
const { data: eventTypeRow, error: etErr } = await supabase
.from('notifications_event_types')
.select('event_key,is_active')
.eq('event_key', eventType)
.maybeSingle();
if (!eventTypeRow || eventTypeRow.isActive !== true) {
if (etErr || !eventTypeRow || eventTypeRow.is_active !== true) {
throw new Error(`Unbekannter oder inaktiver Event-Typ: ${eventType}`);
}
@@ -56,40 +54,40 @@ export class NotificationService {
}
// 3) Notification anlegen (status: queued)
const insertedRows = await this.server.db
.insert(notificationsItems)
.values({
tenantId,
userId,
eventType,
const { data: inserted, error: insErr } = await supabase
.from('notifications_items')
.insert({
tenant_id: tenantId,
user_id: userId,
event_type: eventType,
title,
message,
payload: payload ?? null,
channel: 'email',
status: 'queued'
})
.returning({ id: notificationsItems.id })
const inserted = insertedRows[0]
.select('id')
.single();
if (!inserted) {
throw new Error("Fehler beim Einfügen der Notification");
if (insErr || !inserted) {
throw new Error(`Fehler beim Einfügen der Notification: ${insErr?.message}`);
}
// 4) E-Mail versenden
try {
await this.sendEmail(user.email, title, message);
await this.server.db
.update(notificationsItems)
.set({ status: 'sent', sentAt: new Date() })
.where(eq(notificationsItems.id, inserted.id));
await supabase
.from('notifications_items')
.update({ status: 'sent', sent_at: new Date().toISOString() })
.eq('id', inserted.id);
return { success: true, id: inserted.id };
} catch (err: any) {
await this.server.db
.update(notificationsItems)
.set({ status: 'failed', error: String(err?.message || err) })
.where(eq(notificationsItems.id, inserted.id));
await supabase
.from('notifications_items')
.update({ status: 'failed', error: String(err?.message || err) })
.eq('id', inserted.id);
this.server.log.error({ err, notificationId: inserted.id }, 'E-Mail Versand fehlgeschlagen');
return { success: false, error: err?.message || 'E-Mail Versand fehlgeschlagen' };

View File

@@ -1,227 +0,0 @@
import { and, eq } from "drizzle-orm";
import * as schema from "../../db/schema";
import { FastifyInstance } from "fastify";
type CompositionRow = {
product?: number | string | null;
service?: number | string | null;
hourrate?: string | null;
quantity?: number | string | null;
price?: number | string | null;
purchasePrice?: number | string | null;
[key: string]: any;
};
function toNumber(value: any): number {
const num = Number(value ?? 0);
return Number.isFinite(num) ? num : 0;
}
function round2(value: number): number {
return Number(value.toFixed(2));
}
function getJsonNumber(source: unknown, key: string): number {
if (!source || typeof source !== "object") return 0;
return toNumber((source as Record<string, unknown>)[key]);
}
function normalizeId(value: unknown): number | null {
if (value === null || value === undefined || value === "") return null;
const num = Number(value);
return Number.isFinite(num) ? num : null;
}
function normalizeUuid(value: unknown): string | null {
if (typeof value !== "string") return null;
const trimmed = value.trim();
return trimmed.length ? trimmed : null;
}
export async function recalculateServicePricesForTenant(server: FastifyInstance, tenantId: number, updatedBy?: string | null) {
const [services, products, hourrates] = await Promise.all([
server.db.select().from(schema.services).where(eq(schema.services.tenant, tenantId)),
server.db.select().from(schema.products).where(eq(schema.products.tenant, tenantId)),
server.db.select().from(schema.hourrates).where(eq(schema.hourrates.tenant, tenantId)),
]);
const serviceMap = new Map(services.map((item) => [item.id, item]));
const productMap = new Map(products.map((item) => [item.id, item]));
const hourrateMap = new Map(hourrates.map((item) => [item.id, item]));
const memo = new Map<number, {
sellingTotal: number;
purchaseTotal: number;
materialTotal: number;
materialPurchaseTotal: number;
workerTotal: number;
workerPurchaseTotal: number;
materialComposition: CompositionRow[];
personalComposition: CompositionRow[];
}>();
const stack = new Set<number>();
const calculateService = (serviceId: number) => {
if (memo.has(serviceId)) return memo.get(serviceId)!;
const service = serviceMap.get(serviceId);
const emptyResult = {
sellingTotal: 0,
purchaseTotal: 0,
materialTotal: 0,
materialPurchaseTotal: 0,
workerTotal: 0,
workerPurchaseTotal: 0,
materialComposition: [],
personalComposition: [],
};
if (!service) return emptyResult;
if (stack.has(serviceId)) return emptyResult;
// Gesperrte Leistungen bleiben bei automatischen Preis-Updates unverändert.
if (service.priceUpdateLocked) {
const lockedResult = {
sellingTotal: getJsonNumber(service.sellingPriceComposed, "total") || toNumber(service.sellingPrice),
purchaseTotal: getJsonNumber(service.purchasePriceComposed, "total"),
materialTotal: getJsonNumber(service.sellingPriceComposed, "material"),
materialPurchaseTotal: getJsonNumber(service.purchasePriceComposed, "material"),
workerTotal: getJsonNumber(service.sellingPriceComposed, "worker"),
workerPurchaseTotal: getJsonNumber(service.purchasePriceComposed, "worker"),
materialComposition: Array.isArray(service.materialComposition) ? service.materialComposition as CompositionRow[] : [],
personalComposition: Array.isArray(service.personalComposition) ? service.personalComposition as CompositionRow[] : [],
};
memo.set(serviceId, lockedResult);
return lockedResult;
}
stack.add(serviceId);
const materialComposition: CompositionRow[] = Array.isArray(service.materialComposition)
? (service.materialComposition as CompositionRow[])
: [];
const personalComposition: CompositionRow[] = Array.isArray(service.personalComposition)
? (service.personalComposition as CompositionRow[])
: [];
let materialTotal = 0;
let materialPurchaseTotal = 0;
const normalizedMaterialComposition = materialComposition.map((entry) => {
const quantity = toNumber(entry.quantity);
const productId = normalizeId(entry.product);
const childServiceId = normalizeId(entry.service);
let sellingPrice = toNumber(entry.price);
let purchasePrice = toNumber(entry.purchasePrice);
if (productId) {
const product = productMap.get(productId);
sellingPrice = toNumber(product?.selling_price);
purchasePrice = toNumber(product?.purchase_price);
} else if (childServiceId) {
const child = calculateService(childServiceId);
sellingPrice = toNumber(child.sellingTotal);
purchasePrice = toNumber(child.purchaseTotal);
}
materialTotal += quantity * sellingPrice;
materialPurchaseTotal += quantity * purchasePrice;
return {
...entry,
price: round2(sellingPrice),
purchasePrice: round2(purchasePrice),
};
});
let workerTotal = 0;
let workerPurchaseTotal = 0;
const normalizedPersonalComposition = personalComposition.map((entry) => {
const quantity = toNumber(entry.quantity);
const hourrateId = normalizeUuid(entry.hourrate);
let sellingPrice = toNumber(entry.price);
let purchasePrice = toNumber(entry.purchasePrice);
if (hourrateId) {
const hourrate = hourrateMap.get(hourrateId);
if (hourrate) {
sellingPrice = toNumber(hourrate.sellingPrice);
purchasePrice = toNumber(hourrate.purchase_price);
}
}
workerTotal += quantity * sellingPrice;
workerPurchaseTotal += quantity * purchasePrice;
return {
...entry,
price: round2(sellingPrice),
purchasePrice: round2(purchasePrice),
};
});
const result = {
sellingTotal: round2(materialTotal + workerTotal),
purchaseTotal: round2(materialPurchaseTotal + workerPurchaseTotal),
materialTotal: round2(materialTotal),
materialPurchaseTotal: round2(materialPurchaseTotal),
workerTotal: round2(workerTotal),
workerPurchaseTotal: round2(workerPurchaseTotal),
materialComposition: normalizedMaterialComposition,
personalComposition: normalizedPersonalComposition,
};
memo.set(serviceId, result);
stack.delete(serviceId);
return result;
};
for (const service of services) {
calculateService(service.id);
}
const updates = services
.filter((service) => !service.priceUpdateLocked)
.map(async (service) => {
const calc = memo.get(service.id);
if (!calc) return;
const sellingPriceComposed = {
worker: calc.workerTotal,
material: calc.materialTotal,
total: calc.sellingTotal,
};
const purchasePriceComposed = {
worker: calc.workerPurchaseTotal,
material: calc.materialPurchaseTotal,
total: calc.purchaseTotal,
};
const unchanged =
JSON.stringify(service.materialComposition ?? []) === JSON.stringify(calc.materialComposition) &&
JSON.stringify(service.personalComposition ?? []) === JSON.stringify(calc.personalComposition) &&
JSON.stringify(service.sellingPriceComposed ?? {}) === JSON.stringify(sellingPriceComposed) &&
JSON.stringify(service.purchasePriceComposed ?? {}) === JSON.stringify(purchasePriceComposed) &&
round2(toNumber(service.sellingPrice)) === calc.sellingTotal;
if (unchanged) return;
await server.db
.update(schema.services)
.set({
materialComposition: calc.materialComposition,
personalComposition: calc.personalComposition,
sellingPriceComposed,
purchasePriceComposed,
sellingPrice: calc.sellingTotal,
updatedAt: new Date(),
updatedBy: updatedBy ?? null,
})
.where(and(eq(schema.services.id, service.id), eq(schema.services.tenant, tenantId)));
});
await Promise.all(updates);
}

View File

@@ -1,9 +1,6 @@
import { FastifyInstance } from "fastify";
import fp from "fastify-plugin";
import { secrets } from "../utils/secrets";
import { and, eq } from "drizzle-orm";
import { authUsers, m2mApiKeys } from "../../db/schema";
import { createHash } from "node:crypto";
/**
* Fastify Plugin für Machine-to-Machine Authentifizierung.
@@ -15,99 +12,26 @@ import { createHash } from "node:crypto";
* server.register(m2mAuthPlugin, { allowedPrefix: '/internal' })
*/
export default fp(async (server: FastifyInstance, opts: { allowedPrefix?: string } = {}) => {
const hashApiKey = (apiKey: string) =>
createHash("sha256").update(apiKey, "utf8").digest("hex")
//const allowedPrefix = opts.allowedPrefix || "/internal";
server.addHook("preHandler", async (req, reply) => {
try {
const apiKeyHeader = req.headers["x-api-key"];
const apiKey = Array.isArray(apiKeyHeader) ? apiKeyHeader[0] : apiKeyHeader;
// Nur prüfen, wenn Route unterhalb des Prefix liegt
//if (!req.url.startsWith(allowedPrefix)) return;
if (!apiKey) {
const apiKey = req.headers["x-api-key"];
if (!apiKey || apiKey !== secrets.M2M_API_KEY) {
server.log.warn(`[M2M Auth] Ungültiger oder fehlender API-Key bei ${req.url}`);
return reply.status(401).send({ error: "Unauthorized" });
}
const keyHash = hashApiKey(apiKey);
const keyRows = await server.db
.select({
id: m2mApiKeys.id,
tenantId: m2mApiKeys.tenantId,
userId: m2mApiKeys.userId,
active: m2mApiKeys.active,
expiresAt: m2mApiKeys.expiresAt,
name: m2mApiKeys.name,
userEmail: authUsers.email,
})
.from(m2mApiKeys)
.innerJoin(authUsers, eq(authUsers.id, m2mApiKeys.userId))
.where(and(
eq(m2mApiKeys.keyHash, keyHash),
eq(m2mApiKeys.active, true)
))
.limit(1)
let key = keyRows[0]
if (!key) {
const fallbackValid = apiKey === secrets.M2M_API_KEY
if (!fallbackValid) {
server.log.warn(`[M2M Auth] Ungültiger API-Key bei ${req.url}`)
return reply.status(401).send({ error: "Unauthorized" })
}
// Backward compatibility mode for one global key.
// The caller must provide user/tenant identifiers in headers.
const tenantIdHeader = req.headers["x-tenant-id"]
const userIdHeader = req.headers["x-user-id"]
const tenantId = Number(Array.isArray(tenantIdHeader) ? tenantIdHeader[0] : tenantIdHeader)
const userId = Array.isArray(userIdHeader) ? userIdHeader[0] : userIdHeader
if (!tenantId || !userId) {
return reply.status(401).send({ error: "Missing x-tenant-id or x-user-id for legacy M2M key" })
}
const users = await server.db
.select({ email: authUsers.email })
.from(authUsers)
.where(eq(authUsers.id, userId))
.limit(1)
if (!users[0]) {
return reply.status(401).send({ error: "Unknown user for legacy M2M key" })
}
req.user = {
user_id: userId,
email: users[0].email,
tenant_id: tenantId
}
} else {
if (key.expiresAt && new Date(key.expiresAt).getTime() < Date.now()) {
return reply.status(401).send({ error: "Expired API key" })
}
req.user = {
user_id: key.userId,
email: key.userEmail,
tenant_id: key.tenantId
}
await server.db
.update(m2mApiKeys)
.set({ lastUsedAt: new Date(), updatedAt: new Date() })
.where(eq(m2mApiKeys.id, key.id))
}
// Zusatzinformationen im Request (z. B. interne Kennung)
(req as any).m2m = {
verified: true,
type: "internal",
key: apiKey,
};
req.role = "m2m"
req.permissions = []
req.hasPermission = () => false
} catch (err) {
// @ts-ignore
server.log.error("[M2M Auth] Fehler beim Prüfen des API-Keys:", err);

View File

@@ -9,15 +9,13 @@ export default fp(async (server: FastifyInstance) => {
"http://localhost:3001", // dein Nuxt-Frontend
"http://127.0.0.1:3000", // dein Nuxt-Frontend
"http://192.168.1.227:3001", // dein Nuxt-Frontend
"http://192.168.1.234:3000", // dein Nuxt-Frontend
"http://192.168.1.113:3000", // dein Nuxt-Frontend
"https://beta.fedeo.de", // dein Nuxt-Frontend
"https://app.fedeo.de", // dein Nuxt-Frontend
"capacitor://localhost", // dein Nuxt-Frontend
],
methods: ["GET", "POST", "PUT", "DELETE", "OPTIONS","PATCH",
"PROPFIND", "PROPPATCH", "MKCOL", "COPY", "MOVE", "LOCK", "UNLOCK"],
allowedHeaders: ["Content-Type", "Authorization", "Context", "X-Public-Pin","Depth", "Overwrite", "Destination", "Lock-Token", "If"],
methods: ["GET", "POST", "PUT", "DELETE", "OPTIONS"],
allowedHeaders: ["Content-Type", "Authorization", "Context", "X-Public-Pin"],
exposedHeaders: ["Authorization", "Content-Disposition", "Content-Type", "Content-Length"], // optional, falls du ihn auch auslesen willst
credentials: true, // wichtig, falls du Cookies nutzt
});

View File

@@ -1,25 +1,31 @@
// src/plugins/db.ts
import fp from "fastify-plugin";
import { NodePgDatabase } from "drizzle-orm/node-postgres";
import * as schema from "../../db/schema";
import { db, pool } from "../../db"; // <--- Importiert jetzt die globale Instanz
import fp from "fastify-plugin"
import {drizzle, NodePgDatabase} from "drizzle-orm/node-postgres"
import * as schema from "../../db/schema"
import {secrets} from "../utils/secrets";
import { Pool } from "pg"
export default fp(async (server, opts) => {
// Wir nutzen die db, die wir in src/db/index.ts erstellt haben
server.decorate("db", db);
const pool = new Pool({
connectionString: secrets.DATABASE_URL,
max: 10, // je nach Last
})
// Graceful Shutdown: Wenn Fastify ausgeht, schließen wir den Pool
const db = drizzle(pool , {schema})
// Dekorieren -> überall server.db
server.decorate("db", db)
// Graceful Shutdown
server.addHook("onClose", async () => {
console.log("[DB] Closing connection pool...");
await pool.end();
});
await pool.end()
})
console.log("[Fastify] Database attached from shared instance");
});
console.log("Drizzle database connected")
})
declare module "fastify" {
interface FastifyInstance {
db: NodePgDatabase<typeof schema>
db:NodePgDatabase<typeof schema>
}
}

View File

@@ -1,7 +1,7 @@
// /plugins/services.ts
import fp from "fastify-plugin";
import { bankStatementService } from "../modules/cron/bankstatementsync.service";
import {syncDokuboxService} from "../modules/cron/dokuboximport.service";
//import {initDokuboxClient, syncDokubox} from "../modules/cron/dokuboximport.service";
import { FastifyInstance } from "fastify";
import {prepareIncomingInvoices} from "../modules/cron/prepareIncomingInvoices";
@@ -9,7 +9,7 @@ declare module "fastify" {
interface FastifyInstance {
services: {
bankStatements: ReturnType<typeof bankStatementService>;
dokuboxSync: ReturnType<typeof syncDokuboxService>;
//dokuboxSync: ReturnType<typeof syncDokubox>;
prepareIncomingInvoices: ReturnType<typeof prepareIncomingInvoices>;
};
}
@@ -18,7 +18,7 @@ declare module "fastify" {
export default fp(async function servicePlugin(server: FastifyInstance) {
server.decorate("services", {
bankStatements: bankStatementService(server),
dokuboxSync: syncDokuboxService(server),
//dokuboxSync: syncDokubox(server),
prepareIncomingInvoices: prepareIncomingInvoices(server),
});
});

View File

@@ -0,0 +1,19 @@
import { FastifyInstance } from "fastify";
import fp from "fastify-plugin";
import { createClient, SupabaseClient } from "@supabase/supabase-js";
import {secrets} from "../utils/secrets";
export default fp(async (server: FastifyInstance) => {
const supabaseUrl = secrets.SUPABASE_URL
const supabaseServiceKey = secrets.SUPABASE_SERVICE_ROLE_KEY
const supabase: SupabaseClient = createClient(supabaseUrl, supabaseServiceKey);
// Fastify um supabase erweitern
server.decorate("supabase", supabase);
});
declare module "fastify" {
interface FastifyInstance {
supabase: SupabaseClient;
}
}

View File

@@ -5,33 +5,26 @@ import swaggerUi from "@fastify/swagger-ui";
export default fp(async (server: FastifyInstance) => {
await server.register(swagger, {
mode: "dynamic",
mode: "dynamic", // wichtig: generiert echtes OpenAPI JSON
openapi: {
info: {
title: "FEDEO Backend API",
description: "OpenAPI specification for the FEDEO backend",
title: "Multi-Tenant API",
description: "API Dokumentation für dein Backend",
version: "1.0.0",
},
servers: [{ url: "/" }],
components: {
securitySchemes: {
bearerAuth: {
type: "http",
scheme: "bearer",
bearerFormat: "JWT"
}
}
}
servers: [{ url: "http://localhost:3000" }],
},
});
// @ts-ignore
await server.register(swaggerUi, {
routePrefix: "/docs",
});
// Stable raw spec path
server.get("/openapi.json", async (_req, reply) => {
return reply.send(server.swagger());
routePrefix: "/docs", // UI erreichbar unter http://localhost:3000/docs
swagger: {
info: {
title: "Multi-Tenant API",
version: "1.0.0",
},
},
exposeRoute: true,
});
});

View File

@@ -1,7 +1,5 @@
import { FastifyInstance, FastifyRequest } from "fastify";
import fp from "fastify-plugin";
import { eq } from "drizzle-orm";
import { tenants } from "../../db/schema";
export default fp(async (server: FastifyInstance) => {
server.addHook("preHandler", async (req, reply) => {
@@ -11,12 +9,11 @@ export default fp(async (server: FastifyInstance) => {
return;
}
// Tenant aus DB laden
const rows = await server.db
.select()
.from(tenants)
.where(eq(tenants.portalDomain, host))
.limit(1);
const tenant = rows[0];
const { data: tenant } = await server.supabase
.from("tenants")
.select("*")
.eq("portalDomain", host)
.single();
if(!tenant) {

View File

@@ -1,60 +1,11 @@
import { FastifyInstance } from "fastify"
import bcrypt from "bcrypt"
import { eq } from "drizzle-orm"
import jwt from "jsonwebtoken"
import { secrets } from "../../utils/secrets"
import { authUsers } from "../../../db/schema" // wichtig: Drizzle Schema importieren!
export default async function authRoutesAuthenticated(server: FastifyInstance) {
server.post("/auth/refresh", {
schema: {
tags: ["Auth"],
summary: "Refresh JWT for current authenticated user",
response: {
200: {
type: "object",
properties: {
token: { type: "string" },
},
required: ["token"],
},
401: {
type: "object",
properties: {
error: { type: "string" },
},
required: ["error"],
},
},
},
}, async (req, reply) => {
if (!req.user?.user_id) {
return reply.code(401).send({ error: "Unauthorized" })
}
const token = jwt.sign(
{
user_id: req.user.user_id,
email: req.user.email,
tenant_id: req.user.tenant_id,
},
secrets.JWT_SECRET!,
{ expiresIn: "6h" }
)
reply.setCookie("token", token, {
path: "/",
httpOnly: true,
sameSite: process.env.NODE_ENV === "production" ? "none" : "lax",
secure: process.env.NODE_ENV === "production",
maxAge: 60 * 60 * 6,
})
return { token }
})
server.post("/auth/password/change", {
schema: {
tags: ["Auth"],

View File

@@ -137,7 +137,7 @@ export default async function authRoutes(server: FastifyInstance) {
httpOnly: true,
sameSite: process.env.NODE_ENV === "production" ? "none" : "lax",
secure: process.env.NODE_ENV === "production",
maxAge: 60 * 60 * 6,
maxAge: 60 * 60 * 3,
});
return { token };

View File

@@ -4,19 +4,10 @@ import dayjs from "dayjs"
import { secrets } from "../utils/secrets"
import { insertHistoryItem } from "../utils/history"
import { decrypt, encrypt } from "../utils/crypt"
import { DE_BANK_CODE_TO_NAME } from "../utils/deBankCodes"
import { DE_BANK_CODE_TO_BIC } from "../utils/deBankBics"
import {
bankrequisitions,
bankstatements,
createddocuments,
customers,
entitybankaccounts,
incominginvoices,
statementallocations,
vendors,
} from "../../db/schema"
import {
@@ -26,322 +17,6 @@ import {
export default async function bankingRoutes(server: FastifyInstance) {
const normalizeIban = (value?: string | null) =>
String(value || "").replace(/\s+/g, "").toUpperCase()
const pickPartnerBankData = (statement: any, partnerType: "customer" | "vendor") => {
if (!statement) return null
const prefersDebit = partnerType === "customer"
? Number(statement.amount) >= 0
: Number(statement.amount) > 0
const primary = prefersDebit
? { iban: statement.debIban }
: { iban: statement.credIban }
const fallback = prefersDebit
? { iban: statement.credIban }
: { iban: statement.debIban }
const primaryIban = normalizeIban(primary.iban)
if (primaryIban) {
return {
iban: primaryIban,
}
}
const fallbackIban = normalizeIban(fallback.iban)
if (fallbackIban) {
return {
iban: fallbackIban,
}
}
return null
}
const mergePartnerIban = (infoData: Record<string, any>, iban: string, bankAccountId?: number | null) => {
if (!iban && !bankAccountId) return infoData || {}
const info = infoData && typeof infoData === "object" ? { ...infoData } : {}
if (iban) {
const existing = Array.isArray(info.bankingIbans) ? info.bankingIbans : []
const merged = [...new Set([...existing.map((i: string) => normalizeIban(i)), iban])]
info.bankingIbans = merged
if (!info.bankingIban) info.bankingIban = iban
}
if (bankAccountId) {
const existingIds = Array.isArray(info.bankAccountIds) ? info.bankAccountIds : []
if (!existingIds.includes(bankAccountId)) {
info.bankAccountIds = [...existingIds, bankAccountId]
}
}
return info
}
const ibanLengthByCountry: Record<string, number> = {
DE: 22,
AT: 20,
CH: 21,
NL: 18,
BE: 16,
FR: 27,
ES: 24,
IT: 27,
LU: 20,
}
const isValidIbanLocal = (iban: string) => {
const normalized = normalizeIban(iban)
if (!normalized || normalized.length < 15 || normalized.length > 34) return false
if (!/^[A-Z]{2}[0-9]{2}[A-Z0-9]+$/.test(normalized)) return false
const country = normalized.slice(0, 2)
const expectedLength = ibanLengthByCountry[country]
if (expectedLength && normalized.length !== expectedLength) return false
const rearranged = normalized.slice(4) + normalized.slice(0, 4)
let numeric = ""
for (const ch of rearranged) {
if (ch >= "A" && ch <= "Z") numeric += (ch.charCodeAt(0) - 55).toString()
else numeric += ch
}
let remainder = 0
for (const digit of numeric) {
remainder = (remainder * 10 + Number(digit)) % 97
}
return remainder === 1
}
const resolveGermanBankDataFromIbanLocal = (iban: string) => {
const normalized = normalizeIban(iban)
if (!isValidIbanLocal(normalized)) return null
// Für DE-IBANs kann die BLZ aus Position 5-12 lokal gelesen werden.
if (normalized.startsWith("DE") && normalized.length === 22) {
const bankCode = normalized.slice(4, 12)
const bankName = DE_BANK_CODE_TO_NAME[bankCode] || `Unbekannt (BLZ ${bankCode})`
const bic = DE_BANK_CODE_TO_BIC[bankCode] || null
return {
bankName,
bic,
bankCode,
}
}
return null
}
const resolveEntityBankAccountId = async (
tenantId: number,
userId: string,
iban: string
) => {
const normalizedIban = normalizeIban(iban)
if (!normalizedIban) return null
const bankData = resolveGermanBankDataFromIbanLocal(normalizedIban)
const allAccounts = await server.db
.select({
id: entitybankaccounts.id,
ibanEncrypted: entitybankaccounts.ibanEncrypted,
bankNameEncrypted: entitybankaccounts.bankNameEncrypted,
bicEncrypted: entitybankaccounts.bicEncrypted,
})
.from(entitybankaccounts)
.where(eq(entitybankaccounts.tenant, tenantId))
const existing = allAccounts.find((row) => {
if (!row.ibanEncrypted) return false
try {
const decryptedIban = decrypt(row.ibanEncrypted as any)
return normalizeIban(decryptedIban) === normalizedIban
} catch {
return false
}
})
if (existing?.id) {
if (bankData) {
let currentBankName = ""
let currentBic = ""
try {
currentBankName = String(decrypt(existing.bankNameEncrypted as any) || "").trim()
} catch {
currentBankName = ""
}
try {
currentBic = String(decrypt((existing as any).bicEncrypted as any) || "").trim()
} catch {
currentBic = ""
}
const nextBankName = bankData?.bankName || "Unbekannt"
const nextBic = bankData?.bic || "UNBEKANNT"
if (currentBankName !== nextBankName || currentBic !== nextBic) {
await server.db
.update(entitybankaccounts)
.set({
bankNameEncrypted: encrypt(nextBankName),
bicEncrypted: encrypt(nextBic),
updatedAt: new Date(),
updatedBy: userId,
})
.where(and(eq(entitybankaccounts.id, Number(existing.id)), eq(entitybankaccounts.tenant, tenantId)))
}
}
return Number(existing.id)
}
const [created] = await server.db
.insert(entitybankaccounts)
.values({
tenant: tenantId,
ibanEncrypted: encrypt(normalizedIban),
bicEncrypted: encrypt(bankData?.bic || "UNBEKANNT"),
bankNameEncrypted: encrypt(bankData?.bankName || "Unbekannt"),
description: "Automatisch aus Bankbuchung übernommen",
updatedAt: new Date(),
updatedBy: userId,
})
.returning({ id: entitybankaccounts.id })
return created?.id ? Number(created.id) : null
}
server.get("/banking/iban/:iban", async (req, reply) => {
try {
const { iban } = req.params as { iban: string }
const normalized = normalizeIban(iban)
if (!normalized) {
return reply.code(400).send({ error: "IBAN missing" })
}
const valid = isValidIbanLocal(normalized)
const bankData = resolveGermanBankDataFromIbanLocal(normalized)
return reply.send({
iban: normalized,
valid,
bic: bankData?.bic || null,
bankName: bankData?.bankName || null,
bankCode: bankData?.bankCode || null,
})
} catch (err) {
server.log.error(err)
return reply.code(500).send({ error: "Failed to resolve IBAN data" })
}
})
const assignIbanFromStatementToCustomer = async (tenantId: number, userId: string, statementId: number, createdDocumentId?: number) => {
if (!createdDocumentId) return
const [statement] = await server.db
.select()
.from(bankstatements)
.where(and(eq(bankstatements.id, statementId), eq(bankstatements.tenant, tenantId)))
.limit(1)
if (!statement) return
const [doc] = await server.db
.select({ customer: createddocuments.customer })
.from(createddocuments)
.where(and(eq(createddocuments.id, createdDocumentId), eq(createddocuments.tenant, tenantId)))
.limit(1)
const customerId = doc?.customer
if (!customerId) return
const partnerBank = pickPartnerBankData(statement, "customer")
if (!partnerBank?.iban) return
const [customer] = await server.db
.select({ id: customers.id, infoData: customers.infoData })
.from(customers)
.where(and(eq(customers.id, customerId), eq(customers.tenant, tenantId)))
.limit(1)
if (!customer) return
const bankAccountId = await resolveEntityBankAccountId(
tenantId,
userId,
partnerBank.iban
)
const newInfoData = mergePartnerIban(
(customer.infoData || {}) as Record<string, any>,
partnerBank.iban,
bankAccountId
)
await server.db
.update(customers)
.set({
infoData: newInfoData,
updatedAt: new Date(),
updatedBy: userId,
})
.where(and(eq(customers.id, customerId), eq(customers.tenant, tenantId)))
}
const assignIbanFromStatementToVendor = async (tenantId: number, userId: string, statementId: number, incomingInvoiceId?: number) => {
if (!incomingInvoiceId) return
const [statement] = await server.db
.select()
.from(bankstatements)
.where(and(eq(bankstatements.id, statementId), eq(bankstatements.tenant, tenantId)))
.limit(1)
if (!statement) return
const [invoice] = await server.db
.select({ vendor: incominginvoices.vendor })
.from(incominginvoices)
.where(and(eq(incominginvoices.id, incomingInvoiceId), eq(incominginvoices.tenant, tenantId)))
.limit(1)
const vendorId = invoice?.vendor
if (!vendorId) return
const partnerBank = pickPartnerBankData(statement, "vendor")
if (!partnerBank?.iban) return
const [vendor] = await server.db
.select({ id: vendors.id, infoData: vendors.infoData })
.from(vendors)
.where(and(eq(vendors.id, vendorId), eq(vendors.tenant, tenantId)))
.limit(1)
if (!vendor) return
const bankAccountId = await resolveEntityBankAccountId(
tenantId,
userId,
partnerBank.iban
)
const newInfoData = mergePartnerIban(
(vendor.infoData || {}) as Record<string, any>,
partnerBank.iban,
bankAccountId
)
await server.db
.update(vendors)
.set({
infoData: newInfoData,
updatedAt: new Date(),
updatedBy: userId,
})
.where(and(eq(vendors.id, vendorId), eq(vendors.tenant, tenantId)))
}
// ------------------------------------------------------------------
// 🔐 GoCardLess Token Handling
@@ -496,35 +171,9 @@ export default async function bankingRoutes(server: FastifyInstance) {
const createdRecord = inserted[0]
if (createdRecord?.createddocument) {
try {
await assignIbanFromStatementToCustomer(
req.user.tenant_id,
req.user.user_id,
Number(createdRecord.bankstatement),
Number(createdRecord.createddocument)
)
} catch (err) {
server.log.warn({ err, allocationId: createdRecord.id }, "Konnte IBAN nicht automatisch beim Kunden hinterlegen")
}
}
if (createdRecord?.incominginvoice) {
try {
await assignIbanFromStatementToVendor(
req.user.tenant_id,
req.user.user_id,
Number(createdRecord.bankstatement),
Number(createdRecord.incominginvoice)
)
} catch (err) {
server.log.warn({ err, allocationId: createdRecord.id }, "Konnte IBAN nicht automatisch beim Lieferanten hinterlegen")
}
}
await insertHistoryItem(server, {
entity: "bankstatements",
entityId: Number(createdRecord.bankstatement),
entityId: createdRecord.id,
action: "created",
created_by: req.user.user_id,
tenant_id: req.user.tenant_id,
@@ -567,7 +216,7 @@ export default async function bankingRoutes(server: FastifyInstance) {
await insertHistoryItem(server, {
entity: "bankstatements",
entityId: Number(old.bankstatement),
entityId: id,
action: "deleted",
created_by: req.user.user_id,
tenant_id: req.user.tenant_id,

View File

@@ -1,58 +0,0 @@
import { FastifyInstance } from "fastify";
import { eq } from "drizzle-orm";
import { db } from "../../../db"; // <--- PFAD ZUR DB INSTANZ ANPASSEN
import { devices } from "../../../db/schema";
// Definition, was wir vom ESP32 erwarten
interface HealthBody {
terminal_id: string;
ip_address?: string;
wifi_rssi?: number;
uptime_seconds?: number;
heap_free?: number;
[key: string]: any; // Erlaubt weitere Felder
}
export default async function devicesManagementRoutes(server: FastifyInstance) {
server.post<{ Body: HealthBody }>(
"/health",
async (req, reply) => {
try {
const data = req.body;
// 1. Validierung: Haben wir eine ID?
if (!data.terminal_id) {
console.warn("Health Check ohne terminal_id empfangen:", data);
return reply.code(400).send({ error: "terminal_id missing" });
}
console.log(`Health Ping von Device ${data.terminal_id}`, data);
// 2. Datenbank Update
// Wir suchen das Gerät mit der passenden externalId
const result = await server.db
.update(devices)
.set({
lastSeen: new Date(), // Setzt Zeit auf JETZT
lastDebugInfo: data // Speichert das ganze JSON
})
.where(eq(devices.externalId, data.terminal_id))
.returning({ id: devices.id }); // Gibt ID zurück, falls gefunden
// 3. Checken ob Gerät gefunden wurde
if (result.length === 0) {
console.warn(`Unbekanntes Terminal versucht Health Check: ${data.terminal_id}`);
// Optional: 404 senden oder ignorieren (Sicherheit)
return reply.code(404).send({ error: "Device not found" });
}
// Alles OK
return reply.code(200).send({ status: "ok" });
} catch (err: any) {
console.error("Health Check Error:", err);
return reply.code(500).send({ error: err.message });
}
}
);
}

View File

@@ -1,39 +1,37 @@
import { FastifyInstance } from "fastify";
import { and, desc, eq } from "drizzle-orm";
import { authProfiles, devices, stafftimeevents } from "../../../db/schema";
import {and, desc, eq} from "drizzle-orm";
import {authProfiles, devices, stafftimeevents} from "../../../db/schema";
export default async function devicesRFIDRoutes(server: FastifyInstance) {
server.post(
"/rfid/createevent/:terminal_id",
async (req, reply) => {
try {
// 1. Timestamp aus dem Body holen (optional)
const { rfid_id, timestamp } = req.body as {
rfid_id: string,
timestamp?: number // Kann undefined sein (Live) oder Zahl (Offline)
};
const { terminal_id } = req.params as { terminal_id: string };
const {rfid_id} = req.body as {rfid_id: string};
const {terminal_id} = req.params as {terminal_id: string};
if (!rfid_id || !terminal_id) {
if(!rfid_id ||!terminal_id) {
console.log(`Missing Params`);
return reply.code(400).send(`Missing Params`);
return reply.code(400).send(`Missing Params`)
}
// 2. Gerät suchen
const device = await server.db
.select()
.from(devices)
.where(eq(devices.externalId, terminal_id))
.where(
eq(devices.externalId, terminal_id)
)
.limit(1)
.then(rows => rows[0]);
if (!device) {
if(!device) {
console.log(`Device ${terminal_id} not found`);
return reply.code(400).send(`Device ${terminal_id} not found`);
return reply.code(400).send(`Device ${terminal_id} not found`)
}
// 3. User-Profil suchen
const profile = await server.db
.select()
.from(authProfiles)
@@ -46,56 +44,55 @@ export default async function devicesRFIDRoutes(server: FastifyInstance) {
.limit(1)
.then(rows => rows[0]);
if (!profile) {
if(!profile) {
console.log(`Profile for Token ${rfid_id} not found`);
return reply.code(400).send(`Profile for Token ${rfid_id} not found`);
return reply.code(400).send(`Profile for Token ${rfid_id} not found`)
}
// 4. Letztes Event suchen (für Status-Toggle Work Start/End)
const lastEvent = await server.db
.select()
.from(stafftimeevents)
.where(eq(stafftimeevents.user_id, profile.user_id))
.orderBy(desc(stafftimeevents.eventtime))
.where(
eq(stafftimeevents.user_id, profile.user_id)
)
.orderBy(desc(stafftimeevents.eventtime)) // <-- Sortierung: Neuestes zuerst
.limit(1)
.then(rows => rows[0]);
// 5. Zeitstempel Logik (WICHTIG!)
// Der ESP32 sendet Unix-Timestamp in SEKUNDEN. JS braucht MILLISEKUNDEN.
// Wenn kein Timestamp kommt (0 oder undefined), nehmen wir JETZT.
const actualEventTime = (timestamp && timestamp > 0)
? new Date(timestamp * 1000)
: new Date();
console.log(lastEvent)
// 6. Event Typ bestimmen (Toggle Logik)
// Falls noch nie gestempelt wurde (lastEvent undefined), fangen wir mit start an.
const nextEventType = (lastEvent?.eventtype === "work_start")
? "work_end"
: "work_start";
const dataToInsert = {
tenant_id: device.tenant,
user_id: profile.user_id,
actortype: "system",
eventtime: actualEventTime, // Hier nutzen wir die berechnete Zeit
eventtype: nextEventType,
source: "TERMINAL" // Habe ich von WEB auf TERMINAL geändert (optional)
};
eventtime: new Date(),
eventtype: lastEvent.eventtype === "work_start" ? "work_end" : "work_start",
source: "WEB"
}
console.log(`New Event for ${profile.user_id}: ${nextEventType} @ ${actualEventTime.toISOString()}`);
console.log(dataToInsert)
const [created] = await server.db
.insert(stafftimeevents)
//@ts-ignore
.values(dataToInsert)
.returning();
return created;
.returning()
return created
} catch (err: any) {
console.error(err);
return reply.code(400).send({ error: err.message });
console.error(err)
return reply.code(400).send({ error: err.message })
}
console.log(req.body)
return
}
);
}

View File

@@ -1,4 +1,6 @@
import { FastifyInstance } from "fastify";
import jwt from "jsonwebtoken";
import {insertHistoryItem} from "../utils/history";
import {buildExportZip} from "../utils/export/datev";
import {s3} from "../utils/s3";
import {GetObjectCommand, PutObjectCommand} from "@aws-sdk/client-s3"
@@ -7,8 +9,6 @@ import dayjs from "dayjs";
import {randomUUID} from "node:crypto";
import {secrets} from "../utils/secrets";
import {createSEPAExport} from "../utils/export/sepa";
import {generatedexports} from "../../db/schema";
import {eq} from "drizzle-orm";
const createDatevExport = async (server:FastifyInstance,req:any,startDate,endDate,beraternr,mandantennr) => {
try {
@@ -45,21 +45,25 @@ const createDatevExport = async (server:FastifyInstance,req:any,startDate,endDat
console.log(url)
// 5) In Haupt-DB speichern
const inserted = await server.db
.insert(generatedexports)
.values({
tenantId: req.user.tenant_id,
startDate: new Date(startDate),
endDate: new Date(endDate),
validUntil: dayjs().add(24, "hours").toDate(),
filePath: fileKey,
url,
type: "datev",
})
.returning()
// 5) In Supabase-DB speichern
const { data, error } = await server.supabase
.from("exports")
.insert([
{
tenant_id: req.user.tenant_id,
start_date: startDate,
end_date: endDate,
valid_until: dayjs().add(24,"hours").toISOString(),
file_path: fileKey,
url: url,
created_at: new Date().toISOString(),
},
])
.select()
.single()
console.log(inserted[0])
console.log(data)
console.log(error)
} catch (error) {
console.log(error)
}
@@ -116,22 +120,9 @@ export default async function exportRoutes(server: FastifyInstance) {
//List Exports Available for Download
server.get("/exports", async (req,reply) => {
const data = await server.db
.select({
id: generatedexports.id,
created_at: generatedexports.createdAt,
tenant_id: generatedexports.tenantId,
start_date: generatedexports.startDate,
end_date: generatedexports.endDate,
valid_until: generatedexports.validUntil,
type: generatedexports.type,
url: generatedexports.url,
file_path: generatedexports.filePath,
})
.from(generatedexports)
.where(eq(generatedexports.tenantId, req.user.tenant_id))
const {data,error} = await server.supabase.from("exports").select().eq("tenant_id",req.user.tenant_id)
console.log(data)
console.log(data,error)
reply.send(data)
})

View File

@@ -1,6 +1,6 @@
import { FastifyInstance } from "fastify";
import {createInvoicePDF, createTimeSheetPDF} from "../utils/pdf";
import {encodeBase64ToNiimbot, generateLabel, useNextNumberRangeNumber} from "../utils/functions";
//import {encodeBase64ToNiimbot, generateLabel, useNextNumberRangeNumber} from "../utils/functions";
import dayjs from "dayjs";
//import { ready as zplReady } from 'zpl-renderer-js'
//import { renderZPL } from "zpl-image";
@@ -15,6 +15,7 @@ import timezone from "dayjs/plugin/timezone.js";
import {generateTimesEvaluation} from "../modules/time/evaluation.service";
import {citys} from "../../db/schema";
import {eq} from "drizzle-orm";
import {useNextNumberRangeNumber} from "../utils/functions";
import {executeManualGeneration, finishManualGeneration} from "../modules/serialexecution.service";
dayjs.extend(customParseFormat)
dayjs.extend(isoWeek)
@@ -99,25 +100,31 @@ export default async function functionRoutes(server: FastifyInstance) {
server.get('/functions/check-zip/:zip', async (req, reply) => {
const { zip } = req.params as { zip: string }
const normalizedZip = String(zip || "").replace(/\D/g, "")
if (normalizedZip.length !== 5) {
return reply.code(400).send({ error: 'ZIP must contain exactly 5 digits' })
if (!zip) {
return reply.code(400).send({ error: 'ZIP is required' })
}
try {
const data = await server.db
//@ts-ignore
const data = await server.db.select().from(citys).where(eq(citys.zip,zip))
/*const { data, error } = await server.supabase
.from('citys')
.select()
.from(citys)
.where(eq(citys.zip, Number(normalizedZip)))
.eq('zip', zip)
.maybeSingle()
if (error) {
console.log(error)
return reply.code(500).send({ error: 'Database error' })
}*/
if (!data.length) {
if (!data) {
return reply.code(404).send({ error: 'ZIP not found' })
}
const city = data[0]
//districtMap
const bundeslaender = [
{ code: 'DE-BW', name: 'Baden-Württemberg' },
@@ -141,8 +148,9 @@ export default async function functionRoutes(server: FastifyInstance) {
return reply.send({
...city,
state_code: bundeslaender.find(i => i.name === city.countryName)?.code || null
...data,
//@ts-ignore
state_code: bundeslaender.find(i => i.name === data.countryName)
})
} catch (err) {
console.log(err)
@@ -171,25 +179,44 @@ export default async function functionRoutes(server: FastifyInstance) {
await server.services.prepareIncomingInvoices.run(req.user.tenant_id)
})
server.post('/functions/services/syncdokubox', async (req, reply) => {
await server.services.dokuboxSync.run()
/*server.post('/print/zpl/preview', async (req, reply) => {
const { zpl, widthMm = 50, heightMm = 30, dpmm = 8, asBase64 = false } = req.body as {zpl:string,widthMm:number,heightMm:number,dpmm:number,asBase64:string}
console.log(widthMm,heightMm,dpmm)
if (!zpl) {
return reply.code(400).send({ error: 'Missing ZPL string' })
}
try {
// 1⃣ Renderer initialisieren
const { api } = await zplReady
// 2⃣ Rendern (liefert base64-encoded PNG)
const base64Png = await api.zplToBase64Async(zpl, widthMm, heightMm, dpmm)
return await encodeBase64ToNiimbot(base64Png, 'top')
} catch (err) {
console.error('[ZPL Preview Error]', err)
return reply.code(500).send({ error: err.message || 'Failed to render ZPL' })
}
})
server.post('/print/label', async (req, reply) => {
const { context, width = 584, height = 354 } = req.body as {context:any,width:number,height:number}
const { context, width=584, heigth=354 } = req.body as {context:any,width:number,heigth:number}
try {
const base64 = await generateLabel(context,width,height)
const base64 = await generateLabel(context,width,heigth)
return {
encoded: await encodeBase64ToNiimbot(base64, 'top'),
base64: base64
}
} catch (err) {
console.error('[Label Render Error]', err)
return reply.code(500).send({ error: err.message || 'Failed to render label' })
console.error('[ZPL Preview Error]', err)
return reply.code(500).send({ error: err.message || 'Failed to render ZPL' })
}
})
})*/
}

View File

@@ -3,11 +3,12 @@ import { FastifyInstance } from "fastify";
export default async function routes(server: FastifyInstance) {
server.get("/ping", async () => {
// Testquery gegen DB
const result = await server.db.execute("SELECT NOW()");
const { data, error } = await server.supabase.from("tenants").select("id").limit(1);
return {
status: "ok",
db: JSON.stringify(result.rows[0]),
db: error ? "not connected" : "connected",
tenant_count: data?.length ?? 0
};
});
}

View File

@@ -3,9 +3,8 @@ import { FastifyPluginAsync } from 'fastify'
import { createConversation } from '../modules/helpdesk/helpdesk.conversation.service.js'
import { addMessage } from '../modules/helpdesk/helpdesk.message.service.js'
import { getOrCreateContact } from '../modules/helpdesk/helpdesk.contact.service.js'
import { findCustomerOrContactByEmailOrDomain } from "../utils/helpers";
import { eq } from "drizzle-orm";
import { helpdesk_conversations, helpdesk_messages } from "../../db/schema";
import {extractDomain, findCustomerOrContactByEmailOrDomain} from "../utils/helpers";
import {useNextNumberRangeNumber} from "../utils/functions";
// -------------------------------------------------------------
// 📧 Interne M2M-Route für eingehende E-Mails
@@ -53,12 +52,12 @@ const helpdeskInboundEmailRoutes: FastifyPluginAsync = async (server) => {
// 3⃣ Konversation anhand In-Reply-To suchen
let conversationId: string | null = null
if (in_reply_to) {
const msg = await server.db
.select({ conversationId: helpdesk_messages.conversationId })
.from(helpdesk_messages)
.where(eq(helpdesk_messages.externalMessageId, in_reply_to))
.limit(1)
conversationId = msg[0]?.conversationId || null
const { data: msg } = await server.supabase
.from('helpdesk_messages')
.select('conversation_id')
.eq('external_message_id', in_reply_to)
.maybeSingle()
conversationId = msg?.conversation_id || null
}
// 4⃣ Neue Konversation anlegen falls keine existiert
@@ -74,12 +73,12 @@ const helpdeskInboundEmailRoutes: FastifyPluginAsync = async (server) => {
})
conversationId = conversation.id
} else {
const rows = await server.db
.select()
.from(helpdesk_conversations)
.where(eq(helpdesk_conversations.id, conversationId))
.limit(1)
conversation = rows[0]
const { data } = await server.supabase
.from('helpdesk_conversations')
.select('*')
.eq('id', conversationId)
.single()
conversation = data
}
// 5⃣ Nachricht speichern
@@ -97,7 +96,7 @@ const helpdeskInboundEmailRoutes: FastifyPluginAsync = async (server) => {
return res.status(201).send({
success: true,
conversation_id: conversationId,
ticket_number: conversation?.ticket_number || conversation?.ticketNumber,
ticket_number: conversation.ticket_number,
})
})
}

View File

@@ -3,9 +3,70 @@ import { FastifyPluginAsync } from 'fastify'
import { createConversation } from '../modules/helpdesk/helpdesk.conversation.service.js'
import { addMessage } from '../modules/helpdesk/helpdesk.message.service.js'
import { getOrCreateContact } from '../modules/helpdesk/helpdesk.contact.service.js'
import { findCustomerOrContactByEmailOrDomain } from "../utils/helpers";
import { eq } from "drizzle-orm";
import { helpdesk_channel_instances } from "../../db/schema";
/**
* Öffentliche Route zum Empfang eingehender Kontaktformular-Nachrichten.
* Authentifizierung: über `public_token` aus helpdesk_channel_instances
*/
function extractDomain(email) {
if (!email) return null
const parts = email.split("@")
return parts.length === 2 ? parts[1].toLowerCase() : null
}
async function findCustomerOrContactByEmailOrDomain(server,fromMail, tenantId) {
const sender = fromMail
const senderDomain = extractDomain(sender)
if (!senderDomain) return null
// 1⃣ Direkter Match über contacts
const { data: contactMatch } = await server.supabase
.from("contacts")
.select("id, customer")
.eq("email", sender)
.eq("tenant", tenantId)
.maybeSingle()
if (contactMatch?.customer_id) return {
customer: contactMatch.customer,
contact: contactMatch.id
}
// 2⃣ Kunden laden, bei denen E-Mail oder Rechnungsmail passt
const { data: customers, error } = await server.supabase
.from("customers")
.select("id, infoData")
.eq("tenant", tenantId)
if (error) {
console.error(`[Helpdesk] Fehler beim Laden der Kunden:`, error.message)
return null
}
// 3⃣ Durch Kunden iterieren und prüfen
for (const c of customers || []) {
const info = c.infoData || {}
const email = info.email?.toLowerCase()
const invoiceEmail = info.invoiceEmail?.toLowerCase()
const emailDomain = extractDomain(email)
const invoiceDomain = extractDomain(invoiceEmail)
// exakter Match oder Domain-Match
if (
sender === email ||
sender === invoiceEmail ||
senderDomain === emailDomain ||
senderDomain === invoiceDomain
) {
return {customer: c.id, contact:null}
}
}
return null
}
const helpdeskInboundRoutes: FastifyPluginAsync = async (server) => {
// Öffentliche POST-Route
@@ -24,18 +85,17 @@ const helpdeskInboundRoutes: FastifyPluginAsync = async (server) => {
}
// 1⃣ Kanalinstanz anhand des Tokens ermitteln
const channels = await server.db
.select()
.from(helpdesk_channel_instances)
.where(eq(helpdesk_channel_instances.publicToken, public_token))
.limit(1)
const channel = channels[0]
const { data: channel, error: channelError } = await server.supabase
.from('helpdesk_channel_instances')
.select('*')
.eq('public_token', public_token)
.single()
if (!channel) {
if (channelError || !channel) {
return res.status(404).send({ error: 'Invalid channel token' })
}
const tenant_id = channel.tenantId
const tenant_id = channel.tenant_id
const channel_instance_id = channel.id
// @ts-ignore

View File

@@ -5,13 +5,6 @@ import { addMessage, getMessages } from '../modules/helpdesk/helpdesk.message.se
import { getOrCreateContact } from '../modules/helpdesk/helpdesk.contact.service.js'
import {decrypt, encrypt} from "../utils/crypt";
import nodemailer from "nodemailer"
import { eq } from "drizzle-orm";
import {
helpdesk_channel_instances,
helpdesk_contacts,
helpdesk_conversations,
helpdesk_messages,
} from "../../db/schema";
const helpdeskRoutes: FastifyPluginAsync = async (server) => {
// 📩 1. Liste aller Konversationen
@@ -65,30 +58,15 @@ const helpdeskRoutes: FastifyPluginAsync = async (server) => {
const tenant_id = req.user?.tenant_id
const {id: conversation_id} = req.params as {id: string}
const rows = await server.db
.select({
conversation: helpdesk_conversations,
contact: helpdesk_contacts
})
.from(helpdesk_conversations)
.leftJoin(helpdesk_contacts, eq(helpdesk_contacts.id, helpdesk_conversations.contactId))
.where(eq(helpdesk_conversations.id, conversation_id))
const { data, error } = await server.supabase
.from('helpdesk_conversations')
.select('*, helpdesk_contacts(*)')
.eq('tenant_id', tenant_id)
.eq('id', conversation_id)
.single()
const data = rows[0]
if (!data || data.conversation.tenantId !== tenant_id) return res.status(404).send({ error: 'Conversation not found' })
return res.send({
...data.conversation,
channel_instance_id: data.conversation.channelInstanceId,
contact_id: data.conversation.contactId,
contact_person_id: data.conversation.contactPersonId,
created_at: data.conversation.createdAt,
customer_id: data.conversation.customerId,
last_message_at: data.conversation.lastMessageAt,
tenant_id: data.conversation.tenantId,
ticket_number: data.conversation.ticketNumber,
helpdesk_contacts: data.contact,
})
if (error) return res.status(404).send({ error: 'Conversation not found' })
return res.send(data)
})
// 🔄 4. Konversation Status ändern
@@ -203,39 +181,36 @@ const helpdeskRoutes: FastifyPluginAsync = async (server) => {
safeConfig.smtp.pass = encrypt(safeConfig.smtp.pass)
}
const inserted = await server.db
.insert(helpdesk_channel_instances)
.values({
tenantId: tenant_id,
typeId: type_id,
// Speichern in Supabase
const { data, error } = await server.supabase
.from("helpdesk_channel_instances")
.insert({
tenant_id,
type_id,
name,
config: safeConfig,
isActive: is_active,
is_active,
})
.returning()
.select()
.single()
const data = inserted[0]
if (!data) throw new Error("Konnte Channel nicht erstellen")
const responseConfig: any = data.config
if (error) throw error
// sensible Felder aus Response entfernen
if (responseConfig?.imap) {
delete responseConfig.imap.host
delete responseConfig.imap.user
delete responseConfig.imap.pass
if (data.config?.imap) {
delete data.config.imap.host
delete data.config.imap.user
delete data.config.imap.pass
}
if (responseConfig?.smtp) {
delete responseConfig.smtp.host
delete responseConfig.smtp.user
delete responseConfig.smtp.pass
if (data.config?.smtp) {
delete data.config.smtp.host
delete data.config.smtp.user
delete data.config.smtp.pass
}
reply.send({
message: "E-Mail-Channel erfolgreich erstellt",
channel: {
...data,
config: responseConfig
},
channel: data,
})
} catch (err) {
console.error("Fehler bei Channel-Erstellung:", err)
@@ -259,29 +234,29 @@ const helpdeskRoutes: FastifyPluginAsync = async (server) => {
const { text } = req.body as { text: string }
// 🔹 Konversation inkl. Channel + Kontakt laden
const rows = await server.db
.select({
conversation: helpdesk_conversations,
contact: helpdesk_contacts,
channel: helpdesk_channel_instances,
})
.from(helpdesk_conversations)
.leftJoin(helpdesk_contacts, eq(helpdesk_contacts.id, helpdesk_conversations.contactId))
.leftJoin(helpdesk_channel_instances, eq(helpdesk_channel_instances.id, helpdesk_conversations.channelInstanceId))
.where(eq(helpdesk_conversations.id, conversationId))
.limit(1)
const conv = rows[0]
const { data: conv, error: convErr } = await server.supabase
.from("helpdesk_conversations")
.select(`
id,
tenant_id,
subject,
channel_instance_id,
helpdesk_contacts(email),
helpdesk_channel_instances(config, name),
ticket_number
`)
.eq("id", conversationId)
.single()
console.log(conv)
if (!conv) {
if (convErr || !conv) {
reply.status(404).send({ error: "Konversation nicht gefunden" })
return
}
const contact = conv.contact as unknown as {email: string}
const channel = conv.channel as unknown as {name: string, config: any}
const contact = conv.helpdesk_contacts as unknown as {email: string}
const channel = conv.helpdesk_channel_instances as unknown as {name: string}
console.log(contact)
if (!contact?.email) {
@@ -313,7 +288,7 @@ const helpdeskRoutes: FastifyPluginAsync = async (server) => {
const mailOptions = {
from: `"${channel?.name}" <${user}>`,
to: contact.email,
subject: `${conv.conversation.ticketNumber} | ${conv.conversation.subject}` || `${conv.conversation.ticketNumber} | Antwort vom FEDEO Helpdesk`,
subject: `${conv.ticket_number} | ${conv.subject}` || `${conv.ticket_number} | Antwort vom FEDEO Helpdesk`,
text,
}
@@ -321,22 +296,24 @@ const helpdeskRoutes: FastifyPluginAsync = async (server) => {
console.log(`[Helpdesk SMTP] Gesendet an ${contact.email}: ${info.messageId}`)
// 💾 Nachricht speichern
await server.db
.insert(helpdesk_messages)
.values({
tenantId: conv.conversation.tenantId,
conversationId: conversationId,
const { error: insertErr } = await server.supabase
.from("helpdesk_messages")
.insert({
tenant_id: conv.tenant_id,
conversation_id: conversationId,
direction: "outgoing",
payload: { type: "text", text },
externalMessageId: info.messageId,
receivedAt: new Date(),
external_message_id: info.messageId,
received_at: new Date().toISOString(),
})
if (insertErr) throw insertErr
// 🔁 Konversation aktualisieren
await server.db
.update(helpdesk_conversations)
.set({ lastMessageAt: new Date() })
.where(eq(helpdesk_conversations.id, conversationId))
await server.supabase
.from("helpdesk_conversations")
.update({ last_message_at: new Date().toISOString() })
.eq("id", conversationId)
reply.send({
message: "E-Mail erfolgreich gesendet",

View File

@@ -1,39 +1,12 @@
// src/routes/resources/history.ts
import { FastifyInstance } from "fastify";
import { and, asc, eq, inArray } from "drizzle-orm";
import { authProfiles, historyitems } from "../../db/schema";
const columnMap: Record<string, any> = {
customers: historyitems.customer,
members: historyitems.customer,
vendors: historyitems.vendor,
projects: historyitems.project,
plants: historyitems.plant,
contacts: historyitems.contact,
tasks: historyitems.task,
vehicles: historyitems.vehicle,
events: historyitems.event,
files: historyitems.file,
products: historyitems.product,
inventoryitems: historyitems.inventoryitem,
inventoryitemgroups: historyitems.inventoryitemgroup,
checks: historyitems.check,
costcentres: historyitems.costcentre,
ownaccounts: historyitems.ownaccount,
documentboxes: historyitems.documentbox,
hourrates: historyitems.hourrate,
services: historyitems.service,
customerspaces: historyitems.customerspace,
customerinventoryitems: historyitems.customerinventoryitem,
memberrelations: historyitems.memberrelation,
};
const insertFieldMap: Record<string, string> = {
const columnMap: Record<string, string> = {
customers: "customer",
members: "customer",
vendors: "vendor",
projects: "project",
plants: "plant",
contracts: "contract",
contacts: "contact",
tasks: "task",
vehicles: "vehicle",
@@ -42,21 +15,15 @@ const insertFieldMap: Record<string, string> = {
products: "product",
inventoryitems: "inventoryitem",
inventoryitemgroups: "inventoryitemgroup",
absencerequests: "absencerequest",
checks: "check",
costcentres: "costcentre",
ownaccounts: "ownaccount",
documentboxes: "documentbox",
hourrates: "hourrate",
services: "service",
customerspaces: "customerspace",
customerinventoryitems: "customerinventoryitem",
memberrelations: "memberrelation",
}
const parseId = (value: string) => {
if (/^\d+$/.test(value)) return Number(value)
return value
}
roles: "role",
};
export default async function resourceHistoryRoutes(server: FastifyInstance) {
server.get<{
@@ -82,36 +49,29 @@ export default async function resourceHistoryRoutes(server: FastifyInstance) {
return reply.code(400).send({ error: `History not supported for resource '${resource}'` });
}
const data = await server.db
.select()
.from(historyitems)
.where(eq(column, parseId(id)))
.orderBy(asc(historyitems.createdAt));
const { data, error } = await server.supabase
.from("historyitems")
.select("*")
.eq(column, id)
.order("created_at", { ascending: true });
const userIds = Array.from(
new Set(data.map((item) => item.createdBy).filter(Boolean))
) as string[]
if (error) {
server.log.error(error);
return reply.code(500).send({ error: "Failed to fetch history" });
}
const profiles = userIds.length > 0
? await server.db
.select()
.from(authProfiles)
.where(and(
eq(authProfiles.tenant_id, req.user?.tenant_id),
inArray(authProfiles.user_id, userIds)
))
: []
const {data:users, error:usersError} = await server.supabase
.from("auth_users")
.select("*, auth_profiles(*), tenants!auth_tenant_users(*)")
const profileByUserId = new Map(
profiles.map((profile) => [profile.user_id, profile])
)
const filteredUsers = (users ||[]).filter(i => i.tenants.find((t:any) => t.id === req.user?.tenant_id))
const dataCombined = data.map((historyitem) => ({
...historyitem,
created_at: historyitem.createdAt,
created_by: historyitem.createdBy,
created_by_profile: historyitem.createdBy ? profileByUserId.get(historyitem.createdBy) || null : null,
}))
const dataCombined = data.map(historyitem => {
return {
...historyitem,
created_by_profile: filteredUsers.find(i => i.id === historyitem.created_by) ? filteredUsers.find(i => i.id === historyitem.created_by).auth_profiles[0] : null
}
})
@@ -168,33 +128,29 @@ export default async function resourceHistoryRoutes(server: FastifyInstance) {
const userId = (req.user as any)?.user_id;
const fkField = insertFieldMap[resource];
const fkField = columnMap[resource];
if (!fkField) {
return reply.code(400).send({ error: `Unknown resource: ${resource}` });
}
const inserted = await server.db
.insert(historyitems)
.values({
const { data, error } = await server.supabase
.from("historyitems")
.insert({
text,
[fkField]: parseId(id),
[fkField]: id,
oldVal: old_val || null,
newVal: new_val || null,
config: config || null,
tenant: (req.user as any)?.tenant_id,
createdBy: userId
created_by: userId
})
.returning()
.select()
.single();
const data = inserted[0]
if (!data) {
return reply.code(500).send({ error: "Failed to create history entry" });
if (error) {
return reply.code(500).send({ error: error.message });
}
return reply.code(201).send({
...data,
created_at: data.createdAt,
created_by: data.createdBy
});
return reply.code(201).send(data);
});
}

View File

@@ -1,63 +0,0 @@
import { FastifyInstance } from "fastify"
import jwt from "jsonwebtoken"
import { and, eq } from "drizzle-orm"
import { authTenantUsers } from "../../../db/schema"
import { secrets } from "../../utils/secrets"
export default async function authM2mInternalRoutes(server: FastifyInstance) {
server.post("/auth/m2m/token", {
schema: {
tags: ["Auth"],
summary: "Exchange M2M API key for a short-lived JWT",
body: {
type: "object",
properties: {
expires_in_seconds: { type: "number" }
}
}
}
}, async (req, reply) => {
try {
if (!req.user?.user_id || !req.user?.tenant_id || !req.user?.email) {
return reply.code(401).send({ error: "Unauthorized" })
}
const membership = await server.db
.select()
.from(authTenantUsers)
.where(and(
eq(authTenantUsers.user_id, req.user.user_id),
eq(authTenantUsers.tenant_id, Number(req.user.tenant_id))
))
.limit(1)
if (!membership[0]) {
return reply.code(403).send({ error: "User is not assigned to tenant" })
}
const requestedTtl = Number((req.body as any)?.expires_in_seconds ?? 900)
const ttlSeconds = Math.min(3600, Math.max(60, requestedTtl))
const token = jwt.sign(
{
user_id: req.user.user_id,
email: req.user.email,
tenant_id: req.user.tenant_id,
},
secrets.JWT_SECRET!,
{ expiresIn: ttlSeconds }
)
return {
token_type: "Bearer",
access_token: token,
expires_in_seconds: ttlSeconds,
user_id: req.user.user_id,
tenant_id: req.user.tenant_id
}
} catch (err) {
console.error("POST /internal/auth/m2m/token ERROR:", err)
return reply.code(500).send({ error: "Internal Server Error" })
}
})
}

View File

@@ -1,22 +1,21 @@
// routes/notifications.routes.ts
import { FastifyInstance } from 'fastify';
import { NotificationService, UserDirectory } from '../modules/notification.service';
import { eq } from "drizzle-orm";
import { authUsers } from "../../db/schema";
// Beispiel: E-Mail aus eigener User-Tabelle laden
const getUserDirectory: UserDirectory = async (server:FastifyInstance, userId, tenantId) => {
const rows = await server.db
.select({ email: authUsers.email })
.from(authUsers)
.where(eq(authUsers.id, userId))
.limit(1)
const data = rows[0]
if (!data) return null;
const { data, error } = await server.supabase
.from('auth_users')
.select('email')
.eq('id', userId)
.maybeSingle();
if (error || !data) return null;
return { email: data.email };
};
export default async function notificationsRoutes(server: FastifyInstance) {
// wichtig: server.supabase ist über app verfügbar
const svc = new NotificationService(server, getUserDirectory);
server.post('/notifications/trigger', async (req, reply) => {

View File

@@ -1,19 +1,40 @@
import { FastifyRequest, FastifyReply, FastifyInstance } from 'fastify';
import { publicLinkService } from '../../modules/publiclinks.service';
import dayjs from 'dayjs'; // Falls nicht installiert: npm install dayjs
export default async function publiclinksNonAuthenticatedRoutes(server: FastifyInstance) {
server.get("/workflows/context/:token", async (req, reply) => {
const { token } = req.params as { token: string };
// Wir lesen die PIN aus dem Header (Best Practice für Security)
const pin = req.headers['x-public-pin'] as string | undefined;
try {
const context = await publicLinkService.getLinkContext(server, token, pin);
return reply.send(context);
} catch (error: any) {
if (error.message === "Link_NotFound") return reply.code(404).send({ error: "Link nicht gefunden" });
if (error.message === "Pin_Required") return reply.code(401).send({ error: "PIN erforderlich", requirePin: true });
if (error.message === "Pin_Invalid") return reply.code(403).send({ error: "PIN falsch", requirePin: true });
// Spezifische Fehlercodes für das Frontend
if (error.message === "Link_NotFound") {
return reply.code(404).send({ error: "Link nicht gefunden oder abgelaufen" });
}
if (error.message === "Pin_Required") {
return reply.code(401).send({
error: "PIN erforderlich",
code: "PIN_REQUIRED",
requirePin: true
});
}
if (error.message === "Pin_Invalid") {
return reply.code(403).send({
error: "PIN falsch",
code: "PIN_INVALID",
requirePin: true
});
}
server.log.error(error);
return reply.code(500).send({ error: "Interner Server Fehler" });
@@ -22,31 +43,49 @@ export default async function publiclinksNonAuthenticatedRoutes(server: FastifyI
server.post("/workflows/submit/:token", async (req, reply) => {
const { token } = req.params as { token: string };
// PIN sicher aus dem Header lesen
const pin = req.headers['x-public-pin'] as string | undefined;
const body = req.body as any;
// Der Body enthält { profile, project, service, ... }
const payload = req.body;
console.log(payload)
try {
const quantity = parseFloat(body.quantity) || 0;
// Wir nutzen das vom User gewählte deliveryDate
// Falls kein Datum geschickt wurde, Fallback auf Heute
const baseDate = body.deliveryDate ? dayjs(body.deliveryDate) : dayjs();
const payload = {
...body,
// Wir mappen das deliveryDate auf die Zeitstempel
// Start ist z.B. 08:00 Uhr am gewählten Tag, Ende ist Start + Menge
startDate: baseDate.hour(8).minute(0).toDate(),
endDate: baseDate.hour(8).add(quantity, 'hour').toDate(),
deliveryDate: baseDate.format('YYYY-MM-DD')
};
// Service aufrufen (führt die 3 Schritte aus: Lieferschein -> Zeit -> History)
const result = await publicLinkService.submitFormData(server, token, payload, pin);
// 201 Created zurückgeben
return reply.code(201).send(result);
} catch (error: any) {
server.log.error(error);
return reply.code(500).send({ error: "Fehler beim Speichern", details: error.message });
console.log(error);
// Fehler-Mapping für saubere HTTP Codes
if (error.message === "Link_NotFound") {
return reply.code(404).send({ error: "Link ungültig oder nicht aktiv" });
}
if (error.message === "Pin_Required") {
return reply.code(401).send({ error: "PIN erforderlich" });
}
if (error.message === "Pin_Invalid") {
return reply.code(403).send({ error: "PIN ist falsch" });
}
if (error.message === "Profile_Missing") {
return reply.code(400).send({ error: "Kein Mitarbeiter-Profil gefunden (weder im Link noch in der Eingabe)" });
}
if (error.message === "Project not found" || error.message === "Service not found") {
return reply.code(400).send({ error: "Ausgewähltes Projekt oder Leistung existiert nicht mehr." });
}
// Fallback für alle anderen Fehler (z.B. DB Constraints)
return reply.code(500).send({
error: "Interner Fehler beim Speichern",
details: error.message
});
}
});
}

File diff suppressed because it is too large Load Diff

View File

@@ -35,7 +35,7 @@ export default async function resourceRoutesSpecial(server: FastifyInstance) {
}
// ---------------------------------------
// 📌 SELECT: select-string wird in dieser Route bewusst ignoriert
// 📌 SELECT: wir ignorieren select string (wie Supabase)
// Drizzle kann kein dynamisches Select aus String!
// Wir geben IMMER alle Spalten zurück → kompatibel zum Frontend
// ---------------------------------------

View File

@@ -124,7 +124,6 @@ export default async function staffTimeRoutes(server: FastifyInstance) {
eventtype: "invalidated",
source: "WEB",
related_event_id: id,
invalidates_event_id: id,
metadata: {
reason: reason || "Bearbeitung",
replaced_by_edit: true

View File

@@ -1,7 +1,5 @@
import { FastifyInstance } from 'fastify'
import { StaffTimeEntryConnect } from '../../types/staff'
import { asc, eq } from "drizzle-orm";
import { stafftimenetryconnects } from "../../../db/schema";
export default async function staffTimeConnectRoutes(server: FastifyInstance) {
@@ -10,21 +8,16 @@ export default async function staffTimeConnectRoutes(server: FastifyInstance) {
'/staff/time/:id/connects',
async (req, reply) => {
const { id } = req.params
const { started_at, stopped_at, project_id, notes } = req.body
const parsedProjectId = project_id ? Number(project_id) : null
const { started_at, stopped_at, project_id, customer_id, task_id, ticket_id, notes } = req.body
const data = await server.db
.insert(stafftimenetryconnects)
.values({
stafftimeentry: id,
started_at: new Date(started_at),
stopped_at: new Date(stopped_at),
project_id: parsedProjectId,
notes
})
.returning()
const { data, error } = await server.supabase
.from('staff_time_entry_connects')
.insert([{ time_entry_id: id, started_at, stopped_at, project_id, customer_id, task_id, ticket_id, notes }])
.select()
.maybeSingle()
return reply.send(data[0])
if (error) return reply.code(400).send({ error: error.message })
return reply.send(data)
}
)
@@ -33,12 +26,13 @@ export default async function staffTimeConnectRoutes(server: FastifyInstance) {
'/staff/time/:id/connects',
async (req, reply) => {
const { id } = req.params
const data = await server.db
.select()
.from(stafftimenetryconnects)
.where(eq(stafftimenetryconnects.stafftimeentry, id))
.orderBy(asc(stafftimenetryconnects.started_at))
const { data, error } = await server.supabase
.from('staff_time_entry_connects')
.select('*')
.eq('time_entry_id', id)
.order('started_at', { ascending: true })
if (error) return reply.code(400).send({ error: error.message })
return reply.send(data)
}
)
@@ -48,20 +42,15 @@ export default async function staffTimeConnectRoutes(server: FastifyInstance) {
'/staff/time/connects/:connectId',
async (req, reply) => {
const { connectId } = req.params
const patchData = { ...req.body } as any
if (patchData.started_at) patchData.started_at = new Date(patchData.started_at)
if (patchData.stopped_at) patchData.stopped_at = new Date(patchData.stopped_at)
if (patchData.project_id !== undefined) {
patchData.project_id = patchData.project_id ? Number(patchData.project_id) : null
}
const { data, error } = await server.supabase
.from('staff_time_entry_connects')
.update({ ...req.body, updated_at: new Date().toISOString() })
.eq('id', connectId)
.select()
.maybeSingle()
const data = await server.db
.update(stafftimenetryconnects)
.set({ ...patchData, updated_at: new Date() })
.where(eq(stafftimenetryconnects.id, connectId))
.returning()
return reply.send(data[0])
if (error) return reply.code(400).send({ error: error.message })
return reply.send(data)
}
)
@@ -70,10 +59,12 @@ export default async function staffTimeConnectRoutes(server: FastifyInstance) {
'/staff/time/connects/:connectId',
async (req, reply) => {
const { connectId } = req.params
await server.db
.delete(stafftimenetryconnects)
.where(eq(stafftimenetryconnects.id, connectId))
const { error } = await server.supabase
.from('staff_time_entry_connects')
.delete()
.eq('id', connectId)
if (error) return reply.code(400).send({ error: error.message })
return reply.send({ success: true })
}
)

View File

@@ -1,26 +1,18 @@
import { FastifyInstance } from "fastify"
import jwt from "jsonwebtoken"
import { secrets } from "../utils/secrets"
import { createHash, randomBytes } from "node:crypto"
import {
authTenantUsers,
authUsers,
authProfiles,
tenants,
m2mApiKeys
tenants
} from "../../db/schema"
import {and, desc, eq, inArray} from "drizzle-orm"
import {and, eq, inArray} from "drizzle-orm"
export default async function tenantRoutes(server: FastifyInstance) {
const generateApiKey = () => {
const raw = randomBytes(32).toString("base64url")
return `fedeo_m2m_${raw}`
}
const hashApiKey = (apiKey: string) =>
createHash("sha256").update(apiKey, "utf8").digest("hex")
// -------------------------------------------------------------
@@ -81,7 +73,7 @@ export default async function tenantRoutes(server: FastifyInstance) {
httpOnly: true,
sameSite: process.env.NODE_ENV === "production" ? "none" : "lax",
secure: process.env.NODE_ENV === "production",
maxAge: 60 * 60 * 6,
maxAge: 60 * 60 * 3,
})
return { token }
@@ -249,172 +241,4 @@ export default async function tenantRoutes(server: FastifyInstance) {
}
})
// -------------------------------------------------------------
// M2M API KEYS
// -------------------------------------------------------------
server.get("/tenant/api-keys", async (req, reply) => {
try {
const tenantId = req.user?.tenant_id
if (!tenantId) return reply.code(401).send({ error: "Unauthorized" })
const keys = await server.db
.select({
id: m2mApiKeys.id,
name: m2mApiKeys.name,
tenant_id: m2mApiKeys.tenantId,
user_id: m2mApiKeys.userId,
active: m2mApiKeys.active,
key_prefix: m2mApiKeys.keyPrefix,
created_at: m2mApiKeys.createdAt,
updated_at: m2mApiKeys.updatedAt,
expires_at: m2mApiKeys.expiresAt,
last_used_at: m2mApiKeys.lastUsedAt,
})
.from(m2mApiKeys)
.where(eq(m2mApiKeys.tenantId, tenantId))
.orderBy(desc(m2mApiKeys.createdAt))
return keys
} catch (err) {
console.error("/tenant/api-keys GET ERROR:", err)
return reply.code(500).send({ error: "Internal Server Error" })
}
})
server.post("/tenant/api-keys", async (req, reply) => {
try {
const tenantId = req.user?.tenant_id
const creatorUserId = req.user?.user_id
if (!tenantId || !creatorUserId) {
return reply.code(401).send({ error: "Unauthorized" })
}
const { name, user_id, expires_at } = req.body as {
name: string
user_id: string
expires_at?: string | null
}
if (!name || !user_id) {
return reply.code(400).send({ error: "name and user_id are required" })
}
const userMembership = await server.db
.select()
.from(authTenantUsers)
.where(and(
eq(authTenantUsers.tenant_id, tenantId),
eq(authTenantUsers.user_id, user_id)
))
.limit(1)
if (!userMembership[0]) {
return reply.code(400).send({ error: "user_id is not assigned to this tenant" })
}
const plainApiKey = generateApiKey()
const keyPrefix = plainApiKey.slice(0, 16)
const keyHash = hashApiKey(plainApiKey)
const inserted = await server.db
.insert(m2mApiKeys)
.values({
tenantId,
userId: user_id,
createdBy: creatorUserId,
name,
keyPrefix,
keyHash,
expiresAt: expires_at ? new Date(expires_at) : null,
})
.returning({
id: m2mApiKeys.id,
name: m2mApiKeys.name,
tenant_id: m2mApiKeys.tenantId,
user_id: m2mApiKeys.userId,
key_prefix: m2mApiKeys.keyPrefix,
created_at: m2mApiKeys.createdAt,
expires_at: m2mApiKeys.expiresAt,
active: m2mApiKeys.active,
})
return reply.code(201).send({
...inserted[0],
api_key: plainApiKey, // only returned once
})
} catch (err) {
console.error("/tenant/api-keys POST ERROR:", err)
return reply.code(500).send({ error: "Internal Server Error" })
}
})
server.patch("/tenant/api-keys/:id", async (req, reply) => {
try {
const tenantId = req.user?.tenant_id
if (!tenantId) return reply.code(401).send({ error: "Unauthorized" })
const { id } = req.params as { id: string }
const { name, active, expires_at } = req.body as {
name?: string
active?: boolean
expires_at?: string | null
}
const updateData: any = {
updatedAt: new Date()
}
if (name !== undefined) updateData.name = name
if (active !== undefined) updateData.active = active
if (expires_at !== undefined) updateData.expiresAt = expires_at ? new Date(expires_at) : null
const updated = await server.db
.update(m2mApiKeys)
.set(updateData)
.where(and(
eq(m2mApiKeys.id, id),
eq(m2mApiKeys.tenantId, tenantId)
))
.returning({
id: m2mApiKeys.id,
name: m2mApiKeys.name,
tenant_id: m2mApiKeys.tenantId,
user_id: m2mApiKeys.userId,
active: m2mApiKeys.active,
key_prefix: m2mApiKeys.keyPrefix,
updated_at: m2mApiKeys.updatedAt,
expires_at: m2mApiKeys.expiresAt,
last_used_at: m2mApiKeys.lastUsedAt,
})
if (!updated[0]) {
return reply.code(404).send({ error: "API key not found" })
}
return updated[0]
} catch (err) {
console.error("/tenant/api-keys PATCH ERROR:", err)
return reply.code(500).send({ error: "Internal Server Error" })
}
})
server.delete("/tenant/api-keys/:id", async (req, reply) => {
try {
const tenantId = req.user?.tenant_id
if (!tenantId) return reply.code(401).send({ error: "Unauthorized" })
const { id } = req.params as { id: string }
await server.db
.delete(m2mApiKeys)
.where(and(
eq(m2mApiKeys.id, id),
eq(m2mApiKeys.tenantId, tenantId)
))
return { success: true }
} catch (err) {
console.error("/tenant/api-keys DELETE ERROR:", err)
return reply.code(500).send({ error: "Internal Server Error" })
}
})
}

View File

@@ -1,344 +0,0 @@
import { FastifyInstance } from "fastify"
import { and, eq, isNull, asc, inArray } from "drizzle-orm"
// WICHTIG: Hier müssen die Schemas der Entitäten importiert werden!
import {
wikiPages,
authUsers,
// Bereits vorhanden
customers,
projects,
plants,
products,
inventoryitems,
customerinventoryitems,
customerspaces,
// NEU HINZUGEFÜGT (Basierend auf deinem DataStore)
tasks,
contacts,
contracts,
vehicles,
vendors,
spaces,
inventoryitemgroups,
services,
hourrates,
events,
productcategories,
servicecategories,
ownaccounts
} from "../../db/schema/"
// Konfiguration: Welche Entitäten sollen im Wiki auftauchen?
const ENTITY_CONFIG: Record<string, { table: any, labelField: any, rootLabel: string, idField: 'id' | 'uuid' }> = {
// --- BEREITS VORHANDEN ---
'customers': { table: customers, labelField: customers.name, rootLabel: 'Kunden', idField: 'id' },
'projects': { table: projects, labelField: projects.name, rootLabel: 'Projekte', idField: 'id' },
'plants': { table: plants, labelField: plants.name, rootLabel: 'Objekte', idField: 'id' },
'products': { table: products, labelField: products.name, rootLabel: 'Artikel', idField: 'id' },
'inventoryitems': { table: inventoryitems, labelField: inventoryitems.name, rootLabel: 'Inventarartikel', idField: 'id' },
'customerinventoryitems': { table: customerinventoryitems, labelField: customerinventoryitems.name, rootLabel: 'Kundeninventar', idField: 'id' },
'customerspaces': { table: customerspaces, labelField: customerspaces.name, rootLabel: 'Kundenlagerplätze', idField: 'id' },
// --- NEU BASIEREND AUF DATASTORE ---
'tasks': { table: tasks, labelField: tasks.name, rootLabel: 'Aufgaben', idField: 'id' },
'contacts': { table: contacts, labelField: contacts.fullName, rootLabel: 'Kontakte', idField: 'id' },
'contracts': { table: contracts, labelField: contracts.name, rootLabel: 'Verträge', idField: 'id' },
'vehicles': { table: vehicles, labelField: vehicles.license_plate, rootLabel: 'Fahrzeuge', idField: 'id' },
'vendors': { table: vendors, labelField: vendors.name, rootLabel: 'Lieferanten', idField: 'id' },
'spaces': { table: spaces, labelField: spaces.name, rootLabel: 'Lagerplätze', idField: 'id' },
'inventoryitemgroups': { table: inventoryitemgroups, labelField: inventoryitemgroups.name, rootLabel: 'Inventarartikelgruppen', idField: 'id' },
'services': { table: services, labelField: services.name, rootLabel: 'Leistungen', idField: 'id' },
'hourrates': { table: hourrates, labelField: hourrates.name, rootLabel: 'Stundensätze', idField: 'id' },
'events': { table: events, labelField: events.name, rootLabel: 'Termine', idField: 'id' },
'productcategories': { table: productcategories, labelField: productcategories.name, rootLabel: 'Artikelkategorien', idField: 'id' },
'servicecategories': { table: servicecategories, labelField: servicecategories.name, rootLabel: 'Leistungskategorien', idField: 'id' },
'ownaccounts': { table: ownaccounts, labelField: ownaccounts.name, rootLabel: 'Zusätzliche Buchungskonten', idField: 'id' },
}
// Types
interface WikiTreeQuery {
entityType?: string
entityId?: number
entityUuid?: string
}
interface WikiCreateBody {
title: string
parentId?: string
isFolder?: boolean
entityType?: string
entityId?: number
entityUuid?: string
}
interface WikiUpdateBody {
title?: string
content?: any
parentId?: string | null
sortOrder?: number
isFolder?: boolean
}
export default async function wikiRoutes(server: FastifyInstance) {
// ---------------------------------------------------------
// 1. GET /wiki/tree
// Lädt Struktur: Entweder gefiltert (Widget) oder Global (mit virtuellen Ordnern)
// ---------------------------------------------------------
server.get<{ Querystring: WikiTreeQuery }>("/wiki/tree", async (req, reply) => {
const user = req.user
const { entityType, entityId, entityUuid } = req.query
// FALL A: WIDGET-ANSICHT (Spezifische Entität)
// Wenn wir spezifisch filtern, wollen wir nur die echten Seiten ohne virtuelle Ordner
if (entityType && (entityId || entityUuid)) {
const filters = [
eq(wikiPages.tenantId, user.tenant_id),
eq(wikiPages.entityType, entityType)
]
if (entityId) filters.push(eq(wikiPages.entityId, Number(entityId)))
else if (entityUuid) filters.push(eq(wikiPages.entityUuid, entityUuid))
return server.db
.select({
id: wikiPages.id,
parentId: wikiPages.parentId,
title: wikiPages.title,
isFolder: wikiPages.isFolder,
sortOrder: wikiPages.sortOrder,
entityType: wikiPages.entityType,
updatedAt: wikiPages.updatedAt,
})
.from(wikiPages)
.where(and(...filters))
.orderBy(asc(wikiPages.sortOrder), asc(wikiPages.title))
}
// FALL B: GLOBALE ANSICHT (Haupt-Wiki)
// Wir laden ALLES und bauen virtuelle Ordner für die Entitäten
// 1. Alle Wiki-Seiten des Tenants laden
const allPages = await server.db
.select({
id: wikiPages.id,
parentId: wikiPages.parentId,
title: wikiPages.title,
isFolder: wikiPages.isFolder,
sortOrder: wikiPages.sortOrder,
entityType: wikiPages.entityType,
entityId: wikiPages.entityId, // Wichtig für Zuordnung
entityUuid: wikiPages.entityUuid, // Wichtig für Zuordnung
updatedAt: wikiPages.updatedAt,
})
.from(wikiPages)
.where(eq(wikiPages.tenantId, user.tenant_id))
.orderBy(asc(wikiPages.sortOrder), asc(wikiPages.title))
// Trennen in Standard-Seiten und Entity-Seiten
const standardPages = allPages.filter(p => !p.entityType)
const entityPages = allPages.filter(p => p.entityType)
const virtualNodes: any[] = []
// 2. Virtuelle Ordner generieren
// Wir iterieren durch unsere Config (Kunden, Projekte...)
await Promise.all(Object.entries(ENTITY_CONFIG).map(async ([typeKey, config]) => {
// Haben wir überhaupt Notizen für diesen Typ?
const pagesForType = entityPages.filter(p => p.entityType === typeKey)
if (pagesForType.length === 0) return
// IDs sammeln, um Namen aus der DB zu holen
// Wir unterscheiden zwischen ID (int) und UUID
let entities: any[] = []
if (config.idField === 'id') {
const ids = [...new Set(pagesForType.map(p => p.entityId).filter((id): id is number => id !== null))]
if (ids.length > 0) {
//@ts-ignore - Drizzle Typisierung bei dynamischen Tables ist tricky
entities = await server.db.select({ id: config.table.id, label: config.labelField })
.from(config.table)
//@ts-ignore
.where(inArray(config.table.id, ids))
}
} else {
// Falls UUID genutzt wird (z.B. IoT Devices)
const uuids = [...new Set(pagesForType.map(p => p.entityUuid).filter((uuid): uuid is string => uuid !== null))]
if (uuids.length > 0) {
//@ts-ignore
entities = await server.db.select({ id: config.table.id, label: config.labelField })
.from(config.table)
//@ts-ignore
.where(inArray(config.table.id, uuids))
}
}
if (entities.length === 0) return
// 3. Virtuellen Root Ordner erstellen (z.B. "Kunden")
const rootId = `virtual-root-${typeKey}`
virtualNodes.push({
id: rootId,
parentId: null, // Ganz oben im Baum
title: config.rootLabel,
isFolder: true,
isVirtual: true, // Flag fürs Frontend (read-only Folder)
sortOrder: 1000 // Ganz unten anzeigen
})
// 4. Virtuelle Entity Ordner erstellen (z.B. "Müller GmbH")
entities.forEach(entity => {
const entityNodeId = `virtual-entity-${typeKey}-${entity.id}`
virtualNodes.push({
id: entityNodeId,
parentId: rootId,
title: entity.label || 'Unbekannt',
isFolder: true,
isVirtual: true,
sortOrder: 0
})
// 5. Die echten Notizen verschieben
// Wir suchen alle Notizen, die zu dieser Entity gehören
const myPages = pagesForType.filter(p =>
(config.idField === 'id' && p.entityId === entity.id) ||
(config.idField === 'uuid' && p.entityUuid === entity.id)
)
myPages.forEach(page => {
// Nur Root-Notizen der Entity verschieben.
// Sub-Pages bleiben wo sie sind (parentId zeigt ja schon auf die richtige Seite)
if (!page.parentId) {
// Wir modifizieren das Objekt für die Response (nicht in der DB!)
// Wir müssen es clonen, sonst ändern wir es für alle Referenzen
const pageClone = { ...page }
pageClone.parentId = entityNodeId
virtualNodes.push(pageClone)
} else {
// Sub-Pages einfach so hinzufügen
virtualNodes.push(page)
}
})
})
}))
// Ergebnis: Normale Seiten + Virtuelle Struktur
return [...standardPages, ...virtualNodes]
})
// ---------------------------------------------------------
// 2. GET /wiki/:id
// Lädt EINEN Eintrag komplett MIT Content
// ---------------------------------------------------------
server.get<{ Params: { id: string } }>("/wiki/:id", async (req, reply) => {
const user = req.user
const { id } = req.params
const page = await server.db.query.wikiPages.findFirst({
where: and(
eq(wikiPages.id, id),
eq(wikiPages.tenantId, user.tenant_id)
),
with: {
author: {
columns: { id: true } // Name falls vorhanden
}
}
})
if (!page) return reply.code(404).send({ error: "Page not found" })
return page
})
// ---------------------------------------------------------
// 3. POST /wiki
// Erstellt neuen Eintrag
// ---------------------------------------------------------
server.post<{ Body: WikiCreateBody }>("/wiki", async (req, reply) => {
const user = req.user
const body = req.body
if (!body.title) return reply.code(400).send({ error: "Title required" })
const hasEntity = !!body.entityType
const [newPage] = await server.db
.insert(wikiPages)
.values({
tenantId: user.tenant_id,
title: body.title,
parentId: body.parentId || null,
isFolder: body.isFolder ?? false,
entityType: hasEntity ? body.entityType : null,
entityId: hasEntity && body.entityId ? body.entityId : null,
entityUuid: hasEntity && body.entityUuid ? body.entityUuid : null,
//@ts-ignore
createdBy: user.id,
//@ts-ignore
updatedBy: user.id
})
.returning()
return newPage
})
// ---------------------------------------------------------
// 4. PATCH /wiki/:id
// Universal-Update
// ---------------------------------------------------------
server.patch<{ Params: { id: string }; Body: WikiUpdateBody }>(
"/wiki/:id",
async (req, reply) => {
const user = req.user
const { id } = req.params
const body = req.body
const existing = await server.db.query.wikiPages.findFirst({
where: and(eq(wikiPages.id, id), eq(wikiPages.tenantId, user.tenant_id)),
columns: { id: true }
})
if (!existing) return reply.code(404).send({ error: "Not found" })
const [updatedPage] = await server.db
.update(wikiPages)
.set({
title: body.title,
content: body.content,
parentId: body.parentId,
sortOrder: body.sortOrder,
isFolder: body.isFolder,
updatedAt: new Date(),
//@ts-ignore
updatedBy: user.id
})
.where(eq(wikiPages.id, id))
.returning()
return updatedPage
}
)
// ---------------------------------------------------------
// 5. DELETE /wiki/:id
// Löscht Eintrag
// ---------------------------------------------------------
server.delete<{ Params: { id: string } }>("/wiki/:id", async (req, reply) => {
const user = req.user
const { id } = req.params
const result = await server.db
.delete(wikiPages)
.where(and(
eq(wikiPages.id, id),
eq(wikiPages.tenantId, user.tenant_id)
))
.returning({ id: wikiPages.id })
if (result.length === 0) return reply.code(404).send({ error: "Not found" })
return { success: true, deletedId: result[0].id }
})
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,5 @@
import {diffTranslations, getDiffLabel} from "./diffTranslations";
import {diffTranslations} from "./diffTranslations";
export type DiffChange = {
key: string;
@@ -43,6 +43,8 @@ export function diffObjects(
const oldVal = obj1?.[key];
const newVal = obj2?.[key];
console.log(oldVal, key, newVal);
// Wenn beides null/undefined → ignorieren
if (
(oldVal === null || oldVal === undefined || oldVal === "" || JSON.stringify(oldVal) === "[]") &&
@@ -70,11 +72,12 @@ export function diffObjects(
if (type === "unchanged") continue;
const translation = diffTranslations[key];
let label = getDiffLabel(key);
let label = key;
let resolvedOld = oldVal;
let resolvedNew = newVal;
if (translation) {
label = translation.label;
if (translation.resolve) {
const { oldVal: resOld, newVal: resNew } = translation.resolve(
oldVal,

View File

@@ -6,149 +6,6 @@ type ValueResolver = (
ctx?: Record<string, any>
) => { oldVal: any; newVal: any };
const TOKEN_TRANSLATIONS: Record<string, string> = {
account: "Konto",
active: "Aktiv",
address: "Adresse",
amount: "Betrag",
archived: "Archiviert",
article: "Artikel",
bank: "Bank",
barcode: "Barcode",
birthday: "Geburtstag",
category: "Kategorie",
city: "Ort",
color: "Farbe",
comment: "Kommentar",
company: "Firma",
contact: "Kontakt",
contract: "Vertrag",
cost: "Kosten",
country: "Land",
created: "Erstellt",
customer: "Kunde",
date: "Datum",
default: "Standard",
deleted: "Gelöscht",
delivery: "Lieferung",
description: "Beschreibung",
document: "Dokument",
driver: "Fahrer",
due: "Fällig",
duration: "Dauer",
email: "E-Mail",
employee: "Mitarbeiter",
enabled: "Aktiviert",
end: "Ende",
event: "Ereignis",
file: "Datei",
first: "Vorname",
fixed: "Festgeschrieben",
group: "Gruppe",
hour: "Stunde",
iban: "IBAN",
id: "ID",
incoming: "Eingang",
invoice: "Rechnung",
item: "Eintrag",
language: "Sprache",
last: "Nachname",
license: "Kennzeichen",
link: "Link",
list: "Liste",
location: "Standort",
manufacturer: "Hersteller",
markup: "Verkaufsaufschlag",
message: "Nachricht",
mobile: "Mobil",
name: "Name",
note: "Notiz",
notes: "Notizen",
number: "Nummer",
order: "Bestellung",
own: "Eigen",
payment: "Zahlung",
phone: "Telefon",
plant: "Objekt",
postal: "Post",
price: "Preis",
percentage: "%",
product: "Produkt",
profile: "Profil",
project: "Projekt",
purchase: "Kauf",
quantity: "Menge",
rate: "Satz",
reference: "Referenz",
requisition: "Anfrage",
resource: "Ressource",
role: "Rolle",
serial: "Serien",
service: "Leistung",
selling: "Verkauf",
sellign: "Verkauf",
space: "Lagerplatz",
start: "Start",
statement: "Buchung",
status: "Status",
street: "Straße",
surcharge: "Aufschlag",
tax: "Steuer",
tel: "Telefon",
tenant: "Mandant",
time: "Zeit",
title: "Titel",
total: "Gesamt",
type: "Typ",
unit: "Einheit",
updated: "Aktualisiert",
user: "Benutzer",
ustid: "USt-ID",
value: "Wert",
vendor: "Lieferant",
vehicle: "Fahrzeug",
weekly: "Wöchentlich",
working: "Arbeits",
zip: "Postleitzahl",
composed: "Zusammensetzung",
material: "Material",
worker: "Arbeit",
};
function tokenizeKey(key: string): string[] {
return key
.replace(/([a-z0-9])([A-Z])/g, "$1_$2")
.replace(/[^a-zA-Z0-9]+/g, "_")
.split("_")
.filter(Boolean)
.map((p) => p.toLowerCase());
}
function capitalize(word: string) {
if (!word) return word;
return word.charAt(0).toUpperCase() + word.slice(1);
}
function fallbackLabelFromKey(key: string): string {
const parts = tokenizeKey(key);
if (!parts.length) return key;
if (parts.length > 1 && parts[parts.length - 1] === "id") {
const base = parts.slice(0, -1).map((p) => TOKEN_TRANSLATIONS[p] || capitalize(p)).join(" ");
return `${base} ID`.trim();
}
return parts
.map((p) => TOKEN_TRANSLATIONS[p] || capitalize(p))
.join(" ")
.replace(/\s+/g, " ")
.trim();
}
export function getDiffLabel(key: string): string {
return diffTranslations[key]?.label || fallbackLabelFromKey(key);
}
export const diffTranslations: Record<
string,
{ label: string; resolve?: ValueResolver }
@@ -187,7 +44,7 @@ export const diffTranslations: Record<
}),
},
resources: {
label: "Ressourcen",
label: "Resourcen",
resolve: (o, n) => ({
oldVal: Array.isArray(o) ? o.map((i: any) => i.title).join(", ") : "-",
newVal: Array.isArray(n) ? n.map((i: any) => i.title).join(", ") : "-",
@@ -229,18 +86,10 @@ export const diffTranslations: Record<
approved: { label: "Genehmigt" },
manufacturer: { label: "Hersteller" },
purchasePrice: { label: "Kaufpreis" },
markupPercentage: { label: "Verkaufsaufschlag in %" },
markup_percentage: { label: "Verkaufsaufschlag in %" },
sellingPrice: { label: "Verkaufspreis" },
selling_price: { label: "Verkaufspreis" },
sellingPriceComposed: { label: "Verkaufspreis Zusammensetzung" },
purchaseDate: { label: "Kaufdatum" },
serialNumber: { label: "Seriennummer" },
customerInventoryId: { label: "Kundeninventar-ID" },
customerinventoryitems: { label: "Kundeninventar" },
usePlanning: { label: "In Plantafel verwenden" },
currentSpace: { label: "Lagerplatz" },
customerspace: { label: "Kundenlagerplatz" },
customer: {
label: "Kunde",
@@ -259,7 +108,6 @@ export const diffTranslations: Record<
description: { label: "Beschreibung" },
categorie: { label: "Kategorie" },
category: { label: "Kategorie" },
profile: {
label: "Mitarbeiter",
@@ -299,8 +147,6 @@ export const diffTranslations: Record<
},
projecttype: { label: "Projekttyp" },
contracttype: { label: "Vertragstyp" },
billingInterval: { label: "Abrechnungsintervall" },
fixed: {
label: "Festgeschrieben",

View File

@@ -301,7 +301,7 @@ export async function buildExportZip(
else if(account.taxType === '7I') buschluessel = "18";
else buschluessel = "-";
let amountGross =/* account.amountGross ? account.amountGross : */(account.amountNet || 0) + (account.amountTax || 0);
let amountGross = account.amountGross ? account.amountGross : (account.amountNet || 0) + (account.amountTax || 0);
let shSelector = Math.sign(amountGross) === -1 ? "H" : "S";
let text = `ER ${ii.reference}: ${escapeString(ii.description)}`.substring(0,59);
const vend = ii.vendor; // durch Mapping verfügbar
@@ -325,27 +325,27 @@ export async function buildExportZip(
if(alloc.createddocument && alloc.createddocument.customer) {
const cd = alloc.createddocument;
const cust = cd.customer;
bookingLines.push(`${displayCurrency(alloc.amount,true)};"H";;;;;${cust?.customerNumber};${datevKonto};"3";${dayjs(cd.documentDate).format("DDMM")};"${cd.documentNumber}";;;"${`ZE${alloc.description}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${cust?.name}";"Kundennummer";"${cust?.customerNumber}";"Belegnummer";"${cd.documentNumber}";"Leistungsdatum";"${dayjs(cd.deliveryDate).format("DD.MM.YYYY")}";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;Bank-Id;${alloc.bankstatement.id};;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;`);
bookingLines.push(`${displayCurrency(alloc.amount,true)};"H";;;;;${cust?.customerNumber};${datevKonto};"3";${dayjs(cd.documentDate).format("DDMM")};"${cd.documentNumber}";;;"${`ZE${alloc.description}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${cust?.name}";"Kundennummer";"${cust?.customerNumber}";"Belegnummer";"${cd.documentNumber}";"Leistungsdatum";"${dayjs(cd.deliveryDate).format("DD.MM.YYYY")}";"Belegdatum";"${dayjs(cd.documentDate).format("DD.MM.YYYY")}";;;;;;;;;;"";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;;`);
} else if(alloc.incominginvoice && alloc.incominginvoice.vendor) {
const ii = alloc.incominginvoice;
const vend = ii.vendor;
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${vend?.vendorNumber};"";${dayjs(ii.date).format("DDMM")};"${ii.reference}";;;"${`ZA${alloc.description} ${bsText} `.substring(0,59)}";;;;;;;"Geschäftspartner";"${vend?.name}";"Kundennummer";"${vend?.vendorNumber}";"Belegnummer";"${ii.reference}";"Leistungsdatum";"${dayjs(ii.date).format("DD.MM.YYYY")}";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;Bank-Id;${alloc.bankstatement.id};;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;`);
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${vend?.vendorNumber};"";${dayjs(ii.date).format("DDMM")};"${ii.reference}";;;"${`ZA${alloc.description} ${bsText} `.substring(0,59)}";;;;;;;"Geschäftspartner";"${vend?.name}";"Kundennummer";"${vend?.vendorNumber}";"Belegnummer";"${ii.reference}";"Leistungsdatum";"${dayjs(ii.date).format("DD.MM.YYYY")}";"Belegdatum";"${dayjs(ii.date).format("DD.MM.YYYY")}";;;;;;;;;;"";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;;`);
} else if(alloc.account) {
const acc = alloc.account;
let vorzeichen = Math.sign(alloc.amount) > 0 ? "ZE" : "ZA";
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${acc.number};"";${dateVal};"";;;"${`${vorzeichen} ${acc.number} - ${escapeString(acc.label)}${escapeString(alloc.description)}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${bs.credName || ''}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;Bank-Id;${alloc.bankstatement.id};;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;`);
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${acc.number};"";${dateVal};"";;;"${`${vorzeichen} ${acc.number} - ${escapeString(acc.label)}${escapeString(alloc.description)}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${bs.credName || ''}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;;`);
} else if(alloc.vendor) {
const vend = alloc.vendor;
let vorzeichen = Math.sign(alloc.amount) > 0 ? "ZE" : "ZA";
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${vend.vendorNumber};"";${dateVal};"";;;"${`${vorzeichen} ${vend.vendorNumber} - ${escapeString(vend.name)}${escapeString(alloc.description)}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${vend.name}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;Bank-Id;${alloc.bankstatement.id};;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;`);
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${vend.vendorNumber};"";${dateVal};"";;;"${`${vorzeichen} ${vend.vendorNumber} - ${escapeString(vend.name)}${escapeString(alloc.description)}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${vend.name}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;;`);
} else if(alloc.customer) {
const cust = alloc.customer;
let vorzeichen = Math.sign(alloc.amount) > 0 ? "ZE" : "ZA";
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${cust.customerNumber};"";${dateVal};"";;;"${`${vorzeichen} ${cust.customerNumber} - ${escapeString(cust.name)}${escapeString(alloc.description)}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${cust.name}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;Bank-Id;${alloc.bankstatement.id};;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;`);
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${cust.customerNumber};"";${dateVal};"";;;"${`${vorzeichen} ${cust.customerNumber} - ${escapeString(cust.name)}${escapeString(alloc.description)}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${cust.name}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;;`);
} else if(alloc.ownaccount) {
const own = alloc.ownaccount;
let vorzeichen = Math.sign(alloc.amount) > 0 ? "ZE" : "ZA";
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${own.number};"";${dateVal};"";;;"${`${vorzeichen} ${own.number} - ${escapeString(own.name)}${escapeString(alloc.description)}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${own.name}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;Bank-Id;${alloc.bankstatement.id};;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;`);
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${own.number};"";${dateVal};"";;;"${`${vorzeichen} ${own.number} - ${escapeString(own.name)}${escapeString(alloc.description)}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${own.name}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;;`);
}
});

View File

@@ -1,25 +1,12 @@
import xmlbuilder from "xmlbuilder";
import {randomUUID} from "node:crypto";
import dayjs from "dayjs";
import { and, eq, inArray } from "drizzle-orm";
import { createddocuments, tenants } from "../../../db/schema";
export const createSEPAExport = async (server,idsToExport, tenant_id) => {
const data = await server.db
.select()
.from(createddocuments)
.where(and(
eq(createddocuments.tenant, tenant_id),
inArray(createddocuments.id, idsToExport)
))
const tenantRows = await server.db
.select()
.from(tenants)
.where(eq(tenants.id, tenant_id))
.limit(1)
const tenantData = tenantRows[0]
const {data,error} = await server.supabase.from("createddocuments").select().eq("tenant", tenant_id).in("id", idsToExport)
const {data:tenantData,error:tenantError} = await server.supabase.from("tenants").select().eq("id", tenant_id).single()
console.log(tenantData)
console.log(tenantError)
console.log(data)

View File

@@ -1,8 +1,11 @@
import { FastifyInstance } from "fastify"
import { PNG } from "pngjs"
import { Utils } from "@mmote/niimbluelib"
import bwipjs from "bwip-js"
import Sharp from "sharp"
import {FastifyInstance} from "fastify";
// import { PNG } from 'pngjs'
// import { ready as zplReady } from 'zpl-renderer-js'
// import { Utils } from '@mmote/niimbluelib'
// import { createCanvas } from 'canvas'
// import bwipjs from 'bwip-js'
// import Sharp from 'sharp'
// import fs from 'fs'
import { tenants } from "../../db/schema"
import { eq } from "drizzle-orm"
@@ -12,6 +15,7 @@ export const useNextNumberRangeNumber = async (
tenantId: number,
numberRange: string
) => {
// 1⃣ Tenant laden
const [tenant] = await server.db
.select()
.from(tenants)
@@ -29,20 +33,23 @@ export const useNextNumberRangeNumber = async (
const current = numberRanges[numberRange]
// 2⃣ Used Number generieren
const usedNumber =
(current.prefix || "") +
current.nextNumber +
(current.suffix || "")
// 3⃣ nextNumber erhöhen
const updatedRanges = {
// @ts-ignore
...numberRanges,
[numberRange]: {
...current,
nextNumber: current.nextNumber + 1,
},
nextNumber: current.nextNumber + 1
}
}
// 4⃣ Tenant aktualisieren
await server.db
.update(tenants)
.set({ numberRanges: updatedRanges })
@@ -51,17 +58,24 @@ export const useNextNumberRangeNumber = async (
return { usedNumber }
}
export async function encodeBase64ToNiimbot(base64Png: string, printDirection: "top" | "left" = "top") {
const buffer = Buffer.from(base64Png, "base64")
const png = PNG.sync.read(buffer)
/*
export async function encodeBase64ToNiimbot(base64Png, printDirection = 'top') {
// 1⃣ PNG dekodieren
const buffer = Buffer.from(base64Png, 'base64')
const png = PNG.sync.read(buffer) // liefert {width, height, data: Uint8Array(RGBA)}
const { width, height, data } = png
const cols = printDirection === "left" ? height : width
const rows = printDirection === "left" ? width : height
const rowsData: any[] = []
console.log(width, height, data)
const cols = printDirection === 'left' ? height : width
const rows = printDirection === 'left' ? width : height
const rowsData = []
if (cols % 8 !== 0) throw new Error("Column count must be multiple of 8")
console.log(cols)
if (cols % 8 !== 0) throw new Error('Column count must be multiple of 8')
// 2⃣ Zeilenweise durchgehen und Bits bilden
for (let row = 0; row < rows; row++) {
let isVoid = true
let blackPixelsCount = 0
@@ -70,8 +84,8 @@ export async function encodeBase64ToNiimbot(base64Png: string, printDirection: "
for (let colOct = 0; colOct < cols / 8; colOct++) {
let pixelsOctet = 0
for (let colBit = 0; colBit < 8; colBit++) {
const x = printDirection === "left" ? row : colOct * 8 + colBit
const y = printDirection === "left" ? height - 1 - (colOct * 8 + colBit) : row
const x = printDirection === 'left' ? row : colOct * 8 + colBit
const y = printDirection === 'left' ? height - 1 - (colOct * 8 + colBit) : row
const idx = (y * width + x) * 4
const lum = 0.299 * data[idx] + 0.587 * data[idx + 1] + 0.114 * data[idx + 2]
const isBlack = lum < 128
@@ -85,7 +99,7 @@ export async function encodeBase64ToNiimbot(base64Png: string, printDirection: "
}
const newPart = {
dataType: isVoid ? "void" : "pixels",
dataType: isVoid ? 'void' : 'pixels',
rowNumber: row,
repeat: 1,
rowData: isVoid ? undefined : rowData,
@@ -97,15 +111,14 @@ export async function encodeBase64ToNiimbot(base64Png: string, printDirection: "
} else {
const last = rowsData[rowsData.length - 1]
let same = newPart.dataType === last.dataType
if (same && newPart.dataType === "pixels") {
if (same && newPart.dataType === 'pixels') {
same = Utils.u8ArraysEqual(newPart.rowData, last.rowData)
}
if (same) last.repeat++
else rowsData.push(newPart)
if (row % 200 === 199) {
rowsData.push({
dataType: "check",
dataType: 'check',
rowNumber: row,
repeat: 0,
rowData: undefined,
@@ -118,69 +131,44 @@ export async function encodeBase64ToNiimbot(base64Png: string, printDirection: "
return { cols, rows, rowsData }
}
function escapeXml(value: string) {
return String(value)
.replace(/&/g, "&amp;")
.replace(/</g, "&lt;")
.replace(/>/g, "&gt;")
.replace(/\"/g, "&quot;")
.replace(/'/g, "&apos;")
}
export async function generateLabel(context,width,height) {
// Canvas für Hintergrund & Text
const canvas = createCanvas(width, height)
const ctx = canvas.getContext('2d')
export async function generateLabel(context: any = {}, width = 584, height = 354) {
const normalizedWidth = Math.ceil(Number(width) / 8) * 8
const normalizedHeight = Math.max(1, Number(height) || 203)
// Hintergrund weiß
ctx.fillStyle = '#FFFFFF'
ctx.fillRect(0, 0, width, height)
const idFont = Math.max(24, Math.round(normalizedHeight * 0.125))
const nameFont = Math.max(17, Math.round(normalizedHeight * 0.078))
const customerFont = Math.max(14, Math.round(normalizedHeight * 0.06))
const serialFont = Math.max(12, Math.round(normalizedHeight * 0.052))
const labelId = context.customerInventoryId || context.datamatrix || context.id || "N/A"
const labelName = context.name || context.text || "Kundeninventarartikel"
const customerName = context.customerName || ""
const serial = context.serialNumber ? `SN: ${context.serialNumber}` : ""
const nameLine1 = String(labelName).slice(0, 30)
const nameLine2 = String(labelName).slice(30, 60)
// Überschrift
ctx.fillStyle = '#000000'
ctx.font = '32px Arial'
ctx.fillText(context.text, 20, 40)
// 3) DataMatrix
const dataMatrixPng = await bwipjs.toBuffer({
bcid: "datamatrix",
text: String(labelId),
scale: normalizedWidth >= 560 ? 7 : 5,
includetext: false,
bcid: 'datamatrix',
text: context.datamatrix,
scale: 6,
})
const dataMatrixMeta = await Sharp(dataMatrixPng).metadata()
const dataMatrixWidth = dataMatrixMeta.width || 0
const dataMatrixHeight = dataMatrixMeta.height || 0
const dmLeft = Math.max(8, normalizedWidth - dataMatrixWidth - 28)
const dmTop = Math.max(8, Math.floor((normalizedHeight - dataMatrixHeight) / 2))
const textMaxWidth = Math.max(120, dmLeft - 20)
const textSvg = `
<svg width="${normalizedWidth}" height="${normalizedHeight}" xmlns="http://www.w3.org/2000/svg">
<rect width="100%" height="100%" fill="white"/>
<text x="12" y="${Math.round(normalizedHeight * 0.15)}" font-size="${idFont}" font-family="Arial, Helvetica, sans-serif" font-weight="700" fill="black">${escapeXml(String(labelId).slice(0, 26))}</text>
<text x="12" y="${Math.round(normalizedHeight * 0.29)}" font-size="${nameFont}" font-family="Arial, Helvetica, sans-serif" fill="black">${escapeXml(nameLine1)}</text>
<text x="12" y="${Math.round(normalizedHeight * 0.37)}" font-size="${nameFont}" font-family="Arial, Helvetica, sans-serif" fill="black">${escapeXml(nameLine2)}</text>
<text x="12" y="${Math.round(normalizedHeight * 0.49)}" font-size="${customerFont}" font-family="Arial, Helvetica, sans-serif" fill="black">${escapeXml(String(customerName).slice(0, 40))}</text>
<text x="12" y="${Math.round(normalizedHeight * 0.58)}" font-size="${serialFont}" font-family="Arial, Helvetica, sans-serif" fill="black">${escapeXml(String(serial).slice(0, 42))}</text>
<rect x="0" y="0" width="${textMaxWidth}" height="${normalizedHeight}" fill="none"/>
</svg>`.trim()
// Basisbild aus Canvas
const base = await Sharp(canvas.toBuffer())
.png()
.toBuffer()
const final = await Sharp({
create: {
width: normalizedWidth,
height: normalizedHeight,
channels: 3,
background: { r: 255, g: 255, b: 255 },
},
})
// Alles zusammen compositen
const final = await Sharp(base)
.composite([
{ input: Buffer.from(textSvg), top: 0, left: 0 },
{ input: dataMatrixPng, top: dmTop, left: dmLeft },
{ input: dataMatrixPng, top: 60, left: 20 },
])
.png()
.toBuffer()
return final.toString("base64")
}
fs.writeFileSync('label.png', final)
// Optional: Base64 zurückgeben (z.B. für API)
const base64 = final.toString('base64')
return base64
}*/

View File

@@ -86,13 +86,12 @@ const InstructionFormat = z.object({
});
// ---------------------------------------------------------
// MAIN FUNCTION
// MAIN FUNCTION REPLACES SUPABASE VERSION
// ---------------------------------------------------------
export const getInvoiceDataFromGPT = async function (
server: FastifyInstance,
file: any,
tenantId: number,
learningContext?: string
tenantId: number
) {
await initOpenAi();
@@ -189,13 +188,8 @@ export const getInvoiceDataFromGPT = async function (
"You extract structured invoice data.\n\n" +
`VENDORS: ${JSON.stringify(vendorList)}\n` +
`ACCOUNTS: ${JSON.stringify(accountList)}\n\n` +
(learningContext
? `HISTORICAL_PATTERNS: ${learningContext}\n\n`
: "") +
"Match issuer by name to vendor.id.\n" +
"Match invoice items to account id based on label/number.\n" +
"Use historical patterns as soft hints for vendor/account/payment mapping.\n" +
"Do not invent values when the invoice text contradicts the hints.\n" +
"Convert dates to YYYY-MM-DD.\n" +
"Keep invoice items in original order.\n",
},

View File

@@ -1,42 +1,4 @@
import { FastifyInstance } from "fastify"
import { historyitems } from "../../db/schema";
const HISTORY_ENTITY_LABELS: Record<string, string> = {
customers: "Kunden",
members: "Mitglieder",
vendors: "Lieferanten",
projects: "Projekte",
plants: "Objekte",
contacts: "Kontakte",
inventoryitems: "Inventarartikel",
customerinventoryitems: "Kundeninventar",
products: "Artikel",
profiles: "Mitarbeiter",
absencerequests: "Abwesenheiten",
events: "Termine",
tasks: "Aufgaben",
vehicles: "Fahrzeuge",
costcentres: "Kostenstellen",
ownaccounts: "zusätzliche Buchungskonten",
documentboxes: "Dokumentenboxen",
hourrates: "Stundensätze",
services: "Leistungen",
roles: "Rollen",
checks: "Überprüfungen",
spaces: "Lagerplätze",
customerspaces: "Kundenlagerplätze",
trackingtrips: "Fahrten",
createddocuments: "Dokumente",
inventoryitemgroups: "Inventarartikelgruppen",
bankstatements: "Buchungen",
incominginvoices: "Eingangsrechnungen",
files: "Dateien",
memberrelations: "Mitgliedsverhältnisse",
}
export function getHistoryEntityLabel(entity: string) {
return HISTORY_ENTITY_LABELS[entity] || entity
}
export async function insertHistoryItem(
server: FastifyInstance,
@@ -51,18 +13,15 @@ export async function insertHistoryItem(
text?: string
}
) {
const entityLabel = getHistoryEntityLabel(params.entity)
const textMap = {
created: `Neuer Eintrag in ${entityLabel} erstellt`,
updated: `Eintrag in ${entityLabel} geändert`,
unchanged: `Eintrag in ${entityLabel} unverändert`,
archived: `Eintrag in ${entityLabel} archiviert`,
deleted: `Eintrag in ${entityLabel} gelöscht`
created: `Neuer Eintrag in ${params.entity} erstellt`,
updated: `Eintrag in ${params.entity} geändert`,
archived: `Eintrag in ${params.entity} archiviert`,
deleted: `Eintrag in ${params.entity} gelöscht`
}
const columnMap: Record<string, string> = {
customers: "customer",
members: "customer",
vendors: "vendor",
projects: "project",
plants: "plant",
@@ -82,15 +41,10 @@ export async function insertHistoryItem(
roles: "role",
checks: "check",
spaces: "space",
customerspaces: "customerspace",
customerinventoryitems: "customerinventoryitem",
trackingtrips: "trackingtrip",
createddocuments: "createddocument",
inventoryitemgroups: "inventoryitemgroup",
bankstatements: "bankstatement",
incominginvoices: "incomingInvoice",
files: "file",
memberrelations: "memberrelation",
bankstatements: "bankstatement"
}
const fkColumn = columnMap[params.entity]
@@ -99,20 +53,18 @@ export async function insertHistoryItem(
return
}
const stringifyHistoryValue = (value: any) => {
if (value === undefined || value === null) return null
return typeof value === "string" ? value : JSON.stringify(value)
}
const entry = {
tenant: params.tenant_id,
createdBy: params.created_by,
created_by: params.created_by,
text: params.text || textMap[params.action],
action: params.action,
[fkColumn]: params.entityId,
oldVal: stringifyHistoryValue(params.oldVal),
newVal: stringifyHistoryValue(params.newVal)
oldVal: params.oldVal ? JSON.stringify(params.oldVal) : null,
newVal: params.newVal ? JSON.stringify(params.newVal) : null
}
await server.db.insert(historyitems).values(entry as any)
const { error } = await server.supabase.from("historyitems").insert([entry])
if (error) { // @ts-ignore
console.log(error)
}
}

View File

@@ -2,9 +2,6 @@ import {PDFDocument, StandardFonts, rgb} from "pdf-lib"
import dayjs from "dayjs"
import {renderAsCurrency, splitStringBySpace} from "./stringRendering";
import {FastifyInstance} from "fastify";
import { GetObjectCommand } from "@aws-sdk/client-s3";
import { s3 } from "./s3";
import { secrets } from "./secrets";
const getCoordinatesForPDFLib = (x:number ,y:number, page:any) => {
/*
@@ -28,21 +25,9 @@ const getCoordinatesForPDFLib = (x:number ,y:number, page:any) => {
const getBackgroundSourceBuffer = async (server:FastifyInstance, path:string) => {
console.log(path)
const {data:backgroundPDFData,error:backgroundPDFError} = await server.supabase.storage.from("files").download(path)
const { Body } = await s3.send(
new GetObjectCommand({
Bucket: secrets.S3_BUCKET,
Key: path
})
)
const chunks: Buffer[] = []
for await (const chunk of Body as any) {
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk))
}
return Buffer.concat(chunks)
return backgroundPDFData.arrayBuffer()
}
const getDuration = (time) => {

View File

@@ -3,14 +3,10 @@ import {
bankaccounts,
bankrequisitions,
bankstatements,
entitybankaccounts,
contacts,
contracts,
contracttypes,
costcentres,
createddocuments,
customerinventoryitems,
customerspaces,
customers,
files,
filetags,
@@ -20,7 +16,6 @@ import {
inventoryitemgroups,
inventoryitems,
letterheads,
memberrelations,
ownaccounts,
plants,
productcategories,
@@ -41,28 +36,17 @@ import {
export const resourceConfig = {
projects: {
searchColumns: ["name","customerRef","projectNumber","notes"],
searchColumns: ["name"],
mtoLoad: ["customer","plant","contract","projecttype"],
mtmLoad: ["tasks", "files","createddocuments"],
table: projects,
numberRangeHolder: "projectNumber"
},
customers: {
searchColumns: ["name", "nameAddition", "customerNumber", "firstname", "lastname", "notes"],
mtmLoad: ["contacts","projects","plants","createddocuments","contracts","customerinventoryitems","customerspaces"],
table: customers,
numberRangeHolder: "customerNumber",
},
members: {
searchColumns: ["name", "nameAddition", "customerNumber", "firstname", "lastname", "notes"],
searchColumns: ["name", "customerNumber", "firstname", "lastname", "notes"],
mtmLoad: ["contacts","projects","plants","createddocuments","contracts"],
table: customers,
numberRangeHolder: "customerNumber",
relationKey: "customer",
},
memberrelations: {
table: memberrelations,
searchColumns: ["type", "billingInterval"],
},
contacts: {
searchColumns: ["firstName", "lastName", "email", "phone", "notes"],
@@ -71,17 +55,12 @@ export const resourceConfig = {
},
contracts: {
table: contracts,
searchColumns: ["name", "notes", "contractNumber", "paymentType", "billingInterval", "sepaRef", "bankingName"],
searchColumns: ["name", "notes", "contractNumber", "paymentType", "sepaRef", "bankingName"],
numberRangeHolder: "contractNumber",
mtoLoad: ["customer", "contracttype"],
},
contracttypes: {
table: contracttypes,
searchColumns: ["name", "description", "paymentType", "billingInterval"],
mtoLoad: ["customer"],
},
plants: {
table: plants,
searchColumns: ["name"],
mtoLoad: ["customer"],
mtmLoad: ["projects","tasks","files"],
},
@@ -106,12 +85,6 @@ export const resourceConfig = {
table: inventoryitems,
numberRangeHolder: "articleNumber",
},
customerinventoryitems: {
table: customerinventoryitems,
numberRangeHolder: "customerInventoryId",
mtoLoad: ["customer", "customerspace", "product", "vendor"],
searchColumns: ["name", "customerInventoryId", "serialNumber", "description", "manufacturer", "manufacturerNumber"],
},
inventoryitemgroups: {
table: inventoryitemgroups
},
@@ -146,13 +119,6 @@ export const resourceConfig = {
searchColumns: ["name","space_number","type","info_data"],
numberRangeHolder: "spaceNumber",
},
customerspaces: {
table: customerspaces,
searchColumns: ["name","space_number","type","info_data","description"],
numberRangeHolder: "space_number",
mtoLoad: ["customer"],
mtmLoad: ["customerinventoryitems"],
},
ownaccounts: {
table: ownaccounts,
searchColumns: ["name","description","number"],
@@ -203,10 +169,6 @@ export const resourceConfig = {
bankrequisitions: {
table: bankrequisitions,
},
entitybankaccounts: {
table: entitybankaccounts,
searchColumns: ["description"],
},
serialexecutions: {
table: serialExecutions
}

View File

@@ -14,6 +14,8 @@ export let secrets = {
PORT: number
HOST: string
DATABASE_URL: string
SUPABASE_URL: string
SUPABASE_SERVICE_ROLE_KEY: string
S3_BUCKET: string
ENCRYPTION_KEY: string
MAILER_SMTP_HOST: string

View File

@@ -1,74 +0,0 @@
// scripts/fill-file-sizes.ts
import 'dotenv/config';
import { db } from '../../db';
import { files } from '../../db/schema';
import { eq, isNull } from 'drizzle-orm';
import { HeadObjectCommand } from "@aws-sdk/client-s3";
import { s3, initS3 } from '../utils/s3';
import { loadSecrets, secrets } from '../utils/secrets';
async function migrate() {
console.log("🚀 Starte Migration der Dateigrößen...");
// 1. Setup
await loadSecrets();
await initS3();
// 2. Alle Dateien holen, die noch keine Größe haben (oder alle, um sicherzugehen)
// Wir nehmen erstmal ALLE, um sicherzustellen, dass alles stimmt.
const allFiles = await db.select().from(files);
console.log(`📦 ${allFiles.length} Dateien in der Datenbank gefunden.`);
let successCount = 0;
let errorCount = 0;
// 3. Loop durch alle Dateien
for (const file of allFiles) {
if (!file.path) {
console.log(`⏭️ Überspringe Datei ${file.id} (Kein Pfad)`);
continue;
}
try {
// S3 fragen (HeadObject lädt nur Metadaten, nicht die ganze Datei -> Schnell)
const command = new HeadObjectCommand({
Bucket: secrets.S3_BUCKET, // Oder secrets.S3_BUCKET_NAME je nach deiner Config
Key: file.path
});
const response = await s3.send(command);
const size = response.ContentLength || 0;
// In DB speichern
await db.update(files)
.set({ size: size })
.where(eq(files.id, file.id));
process.stdout.write("."); // Fortschrittsanzeige
successCount++;
} catch (error: any) {
process.stdout.write("X");
// console.error(`\n❌ Fehler bei ${file.path}: ${error.name}`);
// Optional: Wenn Datei in S3 fehlt, könnten wir sie markieren oder loggen
if (error.name === 'NotFound') {
// console.error(` -> Datei existiert nicht im Bucket!`);
}
errorCount++;
}
}
console.log("\n\n------------------------------------------------");
console.log(`✅ Fertig!`);
console.log(`Updated: ${successCount}`);
console.log(`Fehler: ${errorCount} (Meistens Dateien, die im Bucket fehlen)`);
console.log("------------------------------------------------");
process.exit(0);
}
migrate().catch(err => {
console.error("Fataler Fehler:", err);
process.exit(1);
});

View File

@@ -1,200 +0,0 @@
import 'dotenv/config';
import { v2 as webdav } from 'webdav-server';
import { db } from '../../db';
import { tenants, files, folders } from '../../db/schema';
import { Readable } from 'stream';
import { GetObjectCommand, HeadObjectCommand } from "@aws-sdk/client-s3";
import { s3, initS3 } from '../utils/s3';
import { secrets, loadSecrets } from '../utils/secrets';
// ============================================================================
// 1. SETUP
// ============================================================================
const userManager = new webdav.SimpleUserManager();
const user = userManager.addUser('admin', 'admin', true);
const privilegeManager = new webdav.SimplePathPrivilegeManager();
privilegeManager.setRights(user, '/', [ 'all' ]);
const server = new webdav.WebDAVServer({
httpAuthentication: new webdav.HTTPDigestAuthentication(userManager, 'Default realm'),
privilegeManager: privilegeManager,
port: 3200,
headers: {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'GET,POST,PUT,DELETE,OPTIONS,PROPFIND,PROPPATCH,MKCOL,COPY,MOVE,LOCK,UNLOCK',
'Access-Control-Allow-Headers': 'Authorization, Content-Type, Depth, User-Agent, X-Expected-Entity-Length, If-Modified-Since, Cache-Control, Range, Overwrite, Destination',
}
});
// ============================================================================
// 2. CACHE
// ============================================================================
const pathToS3KeyMap = new Map<string, string>();
const pathToSizeMap = new Map<string, number>();
// ============================================================================
// 3. LOGIC
// ============================================================================
async function startServer() {
console.log('------------------------------------------------');
console.log('[WebDAV] Starte Server (Filtered Mode)...');
try {
await loadSecrets();
await initS3();
console.log('[WebDAV] S3 Verbindung OK.');
console.log('[WebDAV] Lade Datenbank...');
const allTenants = await db.select().from(tenants);
const allFolders = await db.select().from(folders);
const allFiles = await db.select().from(files);
// Zähler für Statistik
let hiddenFilesCount = 0;
// --------------------------------------------------------------------
// BUILDER
// --------------------------------------------------------------------
const buildFolderContent = (tenantId: string, parentFolderId: string | null, currentWebDavPath: string) => {
const currentDir: any = {};
// 1. UNTERORDNER
const subFolders = allFolders.filter(f => f.tenant === tenantId && f.parent === parentFolderId);
subFolders.forEach(folder => {
const folderName = folder.name.replace(/\//g, '-');
const nextPath = `${currentWebDavPath}/${folderName}`;
currentDir[folderName] = buildFolderContent(tenantId, folder.id, nextPath);
});
// 2. DATEIEN
//@ts-ignore
const dirFiles = allFiles.filter(f => f.tenant === tenantId && f.folder === parentFolderId);
dirFiles.forEach(file => {
// ============================================================
// ❌ FILTER: DATEIEN OHNE GRÖSSE AUSBLENDEN
// ============================================================
const fileSize = Number(file.size || 0);
if (fileSize <= 0) {
// Datei überspringen, wenn 0 Bytes oder null
hiddenFilesCount++;
return;
}
// ============================================================
// Name bestimmen
let fileName = 'Unbenannt';
if (file.path) fileName = file.path.split('/').pop() || 'Unbenannt';
else if (file.name) fileName = file.name;
// A) Eintrag im WebDAV
currentDir[fileName] = `Ref: ${file.id}`;
// B) Maps füllen
const webDavFullPath = `${currentWebDavPath}/${fileName}`;
if (file.path) {
pathToS3KeyMap.set(webDavFullPath, file.path);
}
// C) Größe setzen (wir wissen jetzt sicher, dass sie > 0 ist)
pathToSizeMap.set(webDavFullPath, fileSize);
});
return currentDir;
};
// --------------------------------------------------------------------
// BAUM ZUSAMMENSETZEN
// --------------------------------------------------------------------
const dbTree: any = {};
allTenants.forEach(tenant => {
const tName = tenant.name.replace(/\//g, '-');
const rootPath = `/${tName}`;
//@ts-ignore
const content = buildFolderContent(tenant.id, null, rootPath);
// Leere Ordner Hinweis (optional)
if (Object.keys(content).length === 0) {
content['(Leer).txt'] = 'Keine gültigen Dateien vorhanden.';
}
dbTree[tName] = content;
});
if (Object.keys(dbTree).length === 0) {
dbTree['Status.txt'] = 'Datenbank leer.';
}
// --------------------------------------------------------------------
// REGISTRIEREN
// --------------------------------------------------------------------
const rootFS = server.rootFileSystem();
//@ts-ignore
rootFS.addSubTree(server.createExternalContext(), dbTree);
// ====================================================================
// OVERRIDE 1: DOWNLOAD
// ====================================================================
(rootFS as any)._openReadStream = async (path: webdav.Path, ctx: any, callback: any) => {
const p = path.toString();
const s3Key = pathToS3KeyMap.get(p);
if (s3Key) {
try {
const command = new GetObjectCommand({ Bucket: secrets.S3_BUCKET, Key: s3Key });
const response = await s3.send(command);
if (response.Body) return callback(null, response.Body as Readable);
} catch (e: any) {
console.error(`[S3 ERROR] ${e.message}`);
return callback(null, Readable.from([`Error: ${e.message}`]));
}
}
return callback(null, Readable.from(['System File']));
};
// ====================================================================
// OVERRIDE 2: SIZE
// ====================================================================
(rootFS as any)._size = async (path: webdav.Path, ctx: any, callback: any) => {
const p = path.toString();
const cachedSize = pathToSizeMap.get(p);
if (cachedSize !== undefined) return callback(null, cachedSize);
// Fallback S3 Check (sollte durch Filter kaum noch vorkommen)
const s3Key = pathToS3KeyMap.get(p);
if (s3Key) {
try {
const command = new HeadObjectCommand({ Bucket: secrets.S3_BUCKET, Key: s3Key });
const response = await s3.send(command);
const realSize = response.ContentLength || 0;
pathToSizeMap.set(p, realSize);
return callback(null, realSize);
} catch (e) {
return callback(null, 0);
}
}
return callback(null, 0);
};
// --------------------------------------------------------------------
// START
// --------------------------------------------------------------------
server.start(() => {
console.log('[WebDAV] 🚀 READY auf http://localhost:3200');
console.log(`[WebDAV] Sichtbare Dateien: ${pathToS3KeyMap.size}`);
console.log(`[WebDAV] Ausgeblendet (0 Bytes): ${hiddenFilesCount}`);
});
} catch (error) {
console.error('[WebDAV] 💥 ERROR:', error);
}
}
startServer();

View File

@@ -1,70 +1,37 @@
version: "3"
services:
frontend:
image: git.federspiel.tech/flfeders/fedeo/frontend:dev
web:
image: reg.federspiel.software/fedeo/software:beta
restart: always
environment:
- NUXT_PUBLIC_API_BASE=https://app.fedeo.de/backend
- NUXT_PUBLIC_PDF_LICENSE=eyJkYXRhIjoiZXlKMElqb2laR1YyWld4dmNHVnlJaXdpWVhaMUlqb3hOemt3TmpNNU9UazVMQ0prYlNJNkltRndjQzVtWldSbGJ5NWtaU0lzSW00aU9pSXpOemt3Wm1Vek5UazBZbVU0TlRRNElpd2laWGh3SWpveE56a3dOak01T1RrNUxDSmtiWFFpT2lKemNHVmphV1pwWXlJc0luQWlPaUoyYVdWM1pYSWlmUT09Iiwic2lnbmF0dXJlIjoicWU4K0ZxQUJDNUp5bEJUU094Vkd5RTJMbk9UNmpyc2EyRStsN2tNNWhkM21KK2ZvVjYwaTFKeFdhZGtqSDRNWXZxQklMc0dpdWh5d2pMbUFjRHZuWGxOcTRMcXFLRm53dzVtaG1LK3lTeDRXbzVaS1loK1VZdFBzWUZjV3oyUHVGMmJraGJrVjJ6RzRlTGtRU09wdmJKY3JUZU1rN0N1VkN6Q1UraHF5T0ZVVXllWnRmaHlmcWswZEFFL0RMR1hvTDFSQXFjNkNkYU9FTDRTdC9Idy9DQnFieTE2aisvT3RxQUlLcy9NWTR6SVk3RTI3bWo4RUx5VjhXNkdXNXhqc0VUVzNKN0RRMUVlb3RhVlNLT29kc3pVRlhUYzVlbHVuSm04ZlcwM1ErMUhtSnpmWGoyS1dwM1dnamJDazZYSHozamFML2lOdUYvZFZNaWYvc2FoR3NnPT0ifQ==
networks:
- traefik
labels:
- "traefik.enable=true"
- "traefik.docker.network=traefik"
- "traefik.port=3000"
# Middlewares
- "traefik.http.middlewares.fedeo-frontend-redirect-web-secure.redirectscheme.scheme=https"
# Web Entrypoint
- "traefik.http.routers.fedeo-frontend.middlewares=fedeo-frontend-redirect-web-secure"
- "traefik.http.routers.fedeo-frontend.rule=Host(`app.fedeo.de`) && PathPrefix(`/`)"
- "traefik.http.routers.fedeo-frontend.entrypoints=web"
# Web Secure Entrypoint
- "traefik.http.routers.fedeo-frontend-secure.rule=Host(`app.fedeo.de`) && PathPrefix(`/`)"
- "traefik.http.routers.fedeo-frontend-secure.entrypoints=web-secured" #
- "traefik.http.routers.fedeo-frontend-secure.tls.certresolver=mytlschallenge"
- INFISICAL_CLIENT_ID=abc
- INFISICAL_CLIENT_SECRET=abc
backend:
image: git.federspiel.tech/flfeders/fedeo/backend:dev
image: reg.federspiel.software/fedeo/backend:main
restart: always
environment:
- INFISICAL_CLIENT_ID=a6838bd6-9983-4bf4-9be2-ace830b9abdf
- INFISICAL_CLIENT_SECRET=4e3441acc0adbffd324aa50e668a95a556a3f55ec6bb85954e176e35a3392003
- NODE_ENV=production
networks:
- traefik
labels:
- "traefik.enable=true"
- "traefik.docker.network=traefik"
- "traefik.port=3100"
# Middlewares
- "traefik.http.middlewares.fedeo-backend-redirect-web-secure.redirectscheme.scheme=https"
- "traefik.http.middlewares.fedeo-backend-strip.stripprefix.prefixes=/backend"
# Web Entrypoint
- "traefik.http.routers.fedeo-backend.middlewares=fedeo-backend-redirect-web-secure"
- "traefik.http.routers.fedeo-backend.rule=Host(`app.fedeo.de`) && PathPrefix(`/backend`)"
- "traefik.http.routers.fedeo-backend.entrypoints=web"
# Web Secure Entrypoint
- "traefik.http.routers.fedeo-backend-secure.rule=Host(`app.fedeo.de`) && PathPrefix(`/backend`)"
- "traefik.http.routers.fedeo-backend-secure.entrypoints=web-secured" #
- "traefik.http.routers.fedeo-backend-secure.tls.certresolver=mytlschallenge"
- "traefik.http.routers.fedeo-backend-secure.middlewares=fedeo-backend-strip"
# db:
# image: postgres
# restart: always
# shm_size: 128mb
# environment:
# POSTGRES_PASSWORD: abc
# POSTGRES_USER: sandelcom
# POSTGRES_DB: sensorfy
# volumes:
# - ./pg-data:/var/lib/postgresql/data
# ports:
# - "5432:5432"
- NUXT_PUBLIC_API_BASE=
- NUXT_PUBLIC_PDF_LICENSE=
db:
image: postgres
restart: always
shm_size: 128mb
environment:
POSTGRES_PASSWORD: abc
POSTGRES_USER: sandelcom
POSTGRES_DB: sensorfy
volumes:
- ./pg-data:/var/lib/postgresql/data
ports:
- "5432:5432"
traefik:
image: traefik:v2.11
image: traefik:v2.2
restart: unless-stopped
container_name: traefik
command:
- "--api.insecure=false"
- "--api.dashboard=false"
- "--api.dashboard=true"
- "--api.debug=false"
- "--providers.docker=true"
- "--providers.docker.exposedbydefault=false"
@@ -76,18 +43,19 @@ services:
- "--accesslog.bufferingsize=5000"
- "--accesslog.fields.defaultMode=keep"
- "--accesslog.fields.headers.defaultMode=keep"
- "--certificatesresolvers.mytlschallenge.acme.tlschallenge=true" #
- "--certificatesresolvers.mytlschallenge.acme.email=moin@fedeo.de"
- "--certificatesresolvers.mytlschallenge.acme.storage=/letsencrypt/acme.json"
- "--certificatesresolvers.mytlschallenge.acme.tlschallenge=true" # <== Enable TLS-ALPN-01 to generate and renew ACME certs
- "--certificatesresolvers.mytlschallenge.acme.email=info@sandelcom.de" # <== Setting email for certs
- "--certificatesresolvers.mytlschallenge.acme.storage=/letsencrypt/acme.json" # <== Defining acme file to store cert information
ports:
- 80:80
- 8080:8080
- 443:443
volumes:
- "./traefik/letsencrypt:/letsencrypt" # <== Volume for certs (TLS)
- "/var/run/docker.sock:/var/run/docker.sock:ro"
- "./traefik/logs:/logs"
networks:
- traefik
networks:
traefik:
external: false
labels:
#### Labels define the behavior and rules of the traefik proxy for this container ####
- "traefik.enable=true" # <== Enable traefik on itself to view dashboard and assign subdomain to view it
- "traefik.http.routers.api.rule=Host(`srv1.drinkingteam.de`)" # <== Setting the domain for the dashboard
- "traefik.http.routers.api.service=api@internal" # <== Enabling the api to be a service to access

View File

@@ -1,182 +0,0 @@
<script setup>
const props = defineProps({
modelValue: {
type: Array,
default: () => []
},
disabled: {
type: Boolean,
default: false
}
})
const emit = defineEmits(["update:modelValue"])
const toast = useToast()
const accounts = ref([])
const ibanSearch = ref("")
const showCreate = ref(false)
const resolvingIban = ref(false)
const createPayload = ref({
iban: "",
bic: "",
bankName: "",
description: ""
})
const normalizeIban = (value) => String(value || "").replace(/\s+/g, "").toUpperCase()
const loadAccounts = async () => {
accounts.value = await useEntities("entitybankaccounts").select()
}
const assignedIds = computed(() => {
return Array.isArray(props.modelValue) ? props.modelValue : []
})
const assignedAccounts = computed(() => {
return accounts.value.filter((a) => assignedIds.value.includes(a.id))
})
const updateAssigned = (ids) => {
emit("update:modelValue", ids)
}
const assignByIban = async () => {
const search = normalizeIban(ibanSearch.value)
if (!search) return
const match = accounts.value.find((a) => normalizeIban(a.iban) === search)
if (!match) {
toast.add({ title: "Kein Bankkonto mit dieser IBAN gefunden.", color: "rose" })
return
}
if (assignedIds.value.includes(match.id)) {
toast.add({ title: "Dieses Bankkonto ist bereits zugewiesen.", color: "amber" })
return
}
updateAssigned([...assignedIds.value, match.id])
ibanSearch.value = ""
}
const removeAssigned = (id) => {
updateAssigned(assignedIds.value.filter((i) => i !== id))
}
const createAndAssign = async () => {
if (!createPayload.value.iban || !createPayload.value.bic || !createPayload.value.bankName) {
toast.add({ title: "IBAN, BIC und Bankinstitut sind Pflichtfelder.", color: "rose" })
return
}
const created = await useEntities("entitybankaccounts").create(createPayload.value, true)
await loadAccounts()
updateAssigned([...assignedIds.value, created.id])
createPayload.value = { iban: "", bic: "", bankName: "", description: "" }
showCreate.value = false
}
const resolveCreatePayloadFromIban = async () => {
const normalized = normalizeIban(createPayload.value.iban)
if (!normalized) return
resolvingIban.value = true
try {
const data = await useFunctions().useBankingResolveIban(normalized)
if (!data) return
createPayload.value.iban = data.iban || normalized
if (data.bic) createPayload.value.bic = data.bic
if (data.bankName) createPayload.value.bankName = data.bankName
} catch (e) {
// intentionally ignored: user can still enter fields manually
} finally {
resolvingIban.value = false
}
}
loadAccounts()
</script>
<template>
<div class="flex flex-col gap-2 w-full">
<div class="flex flex-wrap gap-2" v-if="assignedAccounts.length > 0">
<UBadge
v-for="account in assignedAccounts"
:key="account.id"
color="primary"
variant="subtle"
>
{{ account.displayLabel || account.iban }}
<UButton
v-if="!disabled"
variant="ghost"
color="gray"
size="2xs"
icon="i-heroicons-x-mark"
class="ml-1"
@click="removeAssigned(account.id)"
/>
</UBadge>
</div>
<InputGroup class="w-full">
<UInput
v-model="ibanSearch"
class="flex-auto"
placeholder="IBAN eingeben und zuweisen"
:disabled="disabled"
@keydown.enter.prevent="assignByIban"
/>
<UButton :disabled="disabled" @click="assignByIban">
Zuweisen
</UButton>
<UButton :disabled="disabled" color="gray" variant="outline" @click="showCreate = true">
Neu
</UButton>
</InputGroup>
</div>
<UModal v-model="showCreate">
<UCard>
<template #header>Neue Bankverbindung erstellen</template>
<div class="space-y-3">
<UFormGroup label="IBAN">
<InputGroup>
<UInput
v-model="createPayload.iban"
@blur="resolveCreatePayloadFromIban"
@keydown.enter.prevent="resolveCreatePayloadFromIban"
/>
<UButton
color="gray"
variant="outline"
:loading="resolvingIban"
@click="resolveCreatePayloadFromIban"
>
Ermitteln
</UButton>
</InputGroup>
</UFormGroup>
<UFormGroup label="BIC">
<UInput v-model="createPayload.bic" />
</UFormGroup>
<UFormGroup label="Bankinstitut">
<UInput v-model="createPayload.bankName" />
</UFormGroup>
<UFormGroup label="Beschreibung (optional)">
<UInput v-model="createPayload.description" />
</UFormGroup>
</div>
<template #footer>
<div class="flex justify-end gap-2">
<UButton color="gray" variant="outline" @click="showCreate = false">Abbrechen</UButton>
<UButton @click="createAndAssign">Erstellen und zuweisen</UButton>
</div>
</template>
</UCard>
</UModal>
</template>

View File

@@ -1,235 +0,0 @@
<template>
<div
ref="el"
:style="style"
class="fixed z-[999] w-72 bg-white dark:bg-gray-900 shadow-2xl rounded-xl border border-gray-200 dark:border-gray-800 p-4 select-none touch-none"
>
<div class="flex items-center justify-between mb-4 cursor-move border-b pb-2 dark:border-gray-800">
<div class="flex items-center gap-2 text-gray-500">
<UIcon name="i-heroicons-calculator" />
<span class="text-xs font-bold uppercase tracking-wider">Kalkulator</span>
</div>
<div class="flex items-center gap-1">
<UTooltip text="Verlauf">
<UButton
color="gray"
variant="ghost"
:icon="showHistory ? 'i-heroicons-clock-solid' : 'i-heroicons-clock'"
size="xs"
@click="showHistory = !showHistory"
/>
</UTooltip>
<UTooltip text="Schließen (Esc)">
<UButton
color="gray"
variant="ghost"
icon="i-heroicons-x-mark"
size="xs"
@click="store.isOpen = false"
/>
</UTooltip>
</div>
</div>
<div v-if="!showHistory">
<div
class="bg-gray-100 dark:bg-gray-800 p-3 rounded-lg mb-4 text-right border border-gray-200 dark:border-gray-700 cursor-pointer group relative"
@click="copyDisplay"
>
<div class="text-[10px] text-gray-500 h-4 font-mono uppercase tracking-tighter">
Speicher: {{ Number(store.memory).toFixed(2).replace('.', ',') }}
</div>
<div class="text-2xl font-mono truncate tracking-tighter">{{ store.display }}</div>
<div class="absolute inset-0 flex items-center justify-center bg-primary-500/10 opacity-0 group-hover:opacity-100 transition-opacity rounded-lg">
<span class="text-[10px] font-bold text-primary-600 uppercase">
{{ copied ? 'Kopiert!' : 'Klicken zum Kopieren' }}
</span>
</div>
</div>
<div class="grid grid-cols-4 gap-2">
<UTooltip text="Brutto (+19%)"><UButton color="green" variant="soft" block size="xs" @click="applyTax(19)">+19%</UButton></UTooltip>
<UTooltip text="Brutto (+7%)"><UButton color="green" variant="soft" block size="xs" @click="applyTax(7)">+7%</UButton></UTooltip>
<UTooltip text="Netto (-19%)"><UButton color="rose" variant="soft" block size="xs" @click="removeTax(19)">-19%</UButton></UTooltip>
<UTooltip text="Netto (-7%)"><UButton color="rose" variant="soft" block size="xs" @click="removeTax(7)">-7%</UButton></UTooltip>
<UTooltip text="Löschen"><UButton color="gray" variant="ghost" block @click="clear">C</UButton></UTooltip>
<UTooltip text="Speicher +"><UButton color="gray" variant="ghost" block @click="addToSum">M+</UButton></UTooltip>
<UTooltip text="Speicher Reset"><UButton color="gray" variant="ghost" block @click="store.memory = 0">MC</UButton></UTooltip>
<UButton color="primary" variant="soft" @click="setOperator('/')">/</UButton>
<UButton v-for="n in [7, 8, 9]" :key="n" color="white" @click="appendNumber(n)">{{ n }}</UButton>
<UButton color="primary" variant="soft" @click="setOperator('*')">×</UButton>
<UButton v-for="n in [4, 5, 6]" :key="n" color="white" @click="appendNumber(n)">{{ n }}</UButton>
<UButton color="primary" variant="soft" @click="setOperator('-')">-</UButton>
<UButton v-for="n in [1, 2, 3]" :key="n" color="white" @click="appendNumber(n)">{{ n }}</UButton>
<UButton color="primary" variant="soft" @click="setOperator('+')">+</UButton>
<UButton color="white" class="col-span-2" @click="appendNumber(0)">0</UButton>
<UButton color="white" @click="addComma">,</UButton>
<UButton color="primary" block @click="calculate">=</UButton>
</div>
</div>
<div v-else class="h-[270px] flex flex-col animate-in fade-in duration-200">
<div class="flex-1 overflow-y-auto space-y-2 pr-1 custom-scrollbar">
<div v-if="store.history.length === 0" class="text-center text-gray-400 text-xs mt-10 italic">
Keine Berechnungen im Verlauf
</div>
<div
v-for="(item, i) in store.history" :key="i"
class="p-2 bg-gray-50 dark:bg-gray-800 rounded text-right border-l-2 border-primary-500 cursor-pointer hover:bg-primary-50 dark:hover:bg-primary-900/20 transition-colors"
@click="useHistoryItem(item.result)"
>
<div class="text-[10px] text-gray-400">{{ item.expression }} =</div>
<div class="text-sm font-bold">{{ item.result }}</div>
</div>
</div>
<UButton
color="gray"
variant="ghost"
size="xs"
block
class="mt-2"
icon="i-heroicons-trash"
@click="store.history = []"
>
Verlauf leeren
</UButton>
</div>
</div>
</template>
<script setup>
import { useDraggable, useClipboard } from '@vueuse/core'
import { useCalculatorStore } from '~/stores/calculator'
const store = useCalculatorStore()
const { copy, copied } = useClipboard()
const el = ref(null)
const { style } = useDraggable(el, {
initialValue: { x: window.innerWidth - 350, y: 150 },
})
const shouldResetDisplay = ref(false)
const showHistory = ref(false)
const previousValue = ref(null)
const lastOperator = ref(null)
// --- Logik ---
const appendNumber = (num) => {
if (store.display === '0' || shouldResetDisplay.value) {
store.display = String(num)
shouldResetDisplay.value = false
} else {
store.display += String(num)
}
}
const addComma = () => {
if (!store.display.includes(',')) {
store.display += ','
}
}
const setOperator = (op) => {
previousValue.value = parseFloat(store.display.replace(',', '.'))
lastOperator.value = op
shouldResetDisplay.value = true
}
const calculate = () => {
if (lastOperator.value === null) return
const currentVal = parseFloat(store.display.replace(',', '.'))
const prevVal = previousValue.value
let result = 0
switch (lastOperator.value) {
case '+': result = prevVal + currentVal; break
case '-': result = prevVal - currentVal; break
case '*': result = prevVal * currentVal; break
case '/': result = currentVal !== 0 ? prevVal / currentVal : 0; break
}
const expression = `${prevVal} ${lastOperator.value} ${currentVal}`
const resultString = String(Number(result.toFixed(4))).replace('.', ',')
store.addHistory(expression, resultString)
store.display = resultString
lastOperator.value = null
shouldResetDisplay.value = true
}
const clear = () => {
store.display = '0'
previousValue.value = null
lastOperator.value = null
}
const applyTax = (percent) => {
const current = parseFloat(store.display.replace(',', '.'))
store.display = (current * (1 + percent / 100)).toFixed(2).replace('.', ',')
}
const removeTax = (percent) => {
const current = parseFloat(store.display.replace(',', '.'))
store.display = (current / (1 + percent / 100)).toFixed(2).replace('.', ',')
}
const addToSum = () => {
store.memory += parseFloat(store.display.replace(',', '.'))
shouldResetDisplay.value = true
}
const copyDisplay = () => {
copy(store.display)
}
const useHistoryItem = (val) => {
store.display = val
showHistory.value = false
}
// --- Shortcuts ---
defineShortcuts({
'0': () => appendNumber(0),
'1': () => appendNumber(1),
'2': () => appendNumber(2),
'3': () => appendNumber(3),
'4': () => appendNumber(4),
'5': () => appendNumber(5),
'6': () => appendNumber(6),
'7': () => appendNumber(7),
'8': () => appendNumber(8),
'9': () => appendNumber(9),
'comma': addComma,
'plus': () => setOperator('+'),
'minus': () => setOperator('-'),
'enter': { usingInput: true, handler: calculate },
'backspace': () => {
store.display = store.display.length > 1 ? store.display.slice(0, -1) : '0'
},
// Escape schließt nun das Fenster via Store
'escape': {
usingInput: true,
whenever: [computed(() => store.isOpen)],
handler: () => { store.isOpen = false }
}
})
</script>
<style scoped>
.custom-scrollbar::-webkit-scrollbar {
width: 4px;
}
.custom-scrollbar::-webkit-scrollbar-track {
@apply bg-transparent;
}
.custom-scrollbar::-webkit-scrollbar-thumb {
@apply bg-gray-200 dark:bg-gray-700 rounded-full;
}
</style>

View File

@@ -29,6 +29,7 @@ const props = defineProps({
const emit = defineEmits(["returnData"])
const {type} = props
defineShortcuts({
@@ -52,10 +53,11 @@ const route = useRoute()
const dataStore = useDataStore()
const modal = useModal()
const dataType = dataStore.dataTypes[type]
const openTab = ref(0)
const item = ref(JSON.parse(props.item))
// console.log(item.value)
console.log(item.value)
const oldItem = ref(null)
const generateOldItemData = () => {
@@ -64,50 +66,6 @@ const generateOldItemData = () => {
generateOldItemData()
// --- ÄNDERUNG START: Computed Property statt Watcher/Function ---
// Dies berechnet den Status automatisch neu, egal woher die Daten kommen (Init oder User-Eingabe)
const saveAllowed = computed(() => {
if (!item.value) return false
const isFilledValue = (value) => {
if (Array.isArray(value)) return value.length > 0
if (typeof value === "string") return value.trim().length > 0
return value !== null && value !== undefined && value !== false
}
let allowedCount = 0
// Nur Input-Felder berücksichtigen
const relevantColumns = dataType.templateColumns.filter(i => {
if (!i.inputType) return false
if (i.showFunction && !i.showFunction(item.value)) return false
if (i.disabledFunction && i.disabledFunction(item.value)) return false
return true
})
relevantColumns.forEach(datapoint => {
if(datapoint.required) {
if(datapoint.key.includes(".")){
const [parentKey, childKey] = datapoint.key.split('.')
// Prüfung: Existiert Parent UND ist Child "truthy" (nicht null/undefined/empty)
if(item.value[parentKey] && isFilledValue(item.value[parentKey][childKey])) {
allowedCount += 1
}
} else {
if(isFilledValue(item.value[datapoint.key])) {
allowedCount += 1
}
}
} else {
// Wenn nicht required, zählt es immer als "erlaubt"
allowedCount += 1
}
})
return allowedCount >= relevantColumns.length
})
// --- ÄNDERUNG ENDE ---
const setupCreate = () => {
dataType.templateColumns.forEach(datapoint => {
if(datapoint.key.includes(".")){
@@ -120,7 +78,10 @@ const setupCreate = () => {
} else {
item.value[datapoint.key] = {}
}
}
})
}
setupCreate()
@@ -130,45 +91,49 @@ const setupQuery = () => {
console.log(props.mode)
if(props.mode === "create" && (route.query || props.createQuery)) {
let data = !props.inModal ? route.query : props.createQuery
Object.keys(data).forEach(key => {
if (dataType.templateColumns.find(i => i.key === key)) {
if(dataType.templateColumns.find(i => i.key === key)) {
if (["customer", "contract", "plant", "contact", "project"].includes(key)) {
item.value[key] = Number(data[key])
} else {
item.value[key] = data[key]
}
} else if (key === "resources") {
} else if(key === "resources") {
/*item.value[key] = data[key]*/
JSON.parse(data[key]).forEach(async (i) => {
console.log(i)
let type = i.substring(0, 1)
let id = i.substring(2, i.length)
let type = i.substring(0,1)
let id = i.substring(2,i.length)
console.log(type)
console.log(id)
let holder = ""
if (type === "P") {
if(type === "P"){
holder = "profiles"
} else if (type === "F") {
} else if(type === "F"){
holder = "vehicles"
id = Number(id)
} else if (type === "I") {
} else if(type === "I"){
holder = "inventoryitems"
id = Number(id)
} else if (type === "G") {
} else if(type === "G"){
holder = "inventoryitemgroups"
}
if (typeof item.value[holder] === "object") {
if(typeof item.value[holder] === "object") {
item.value[holder].push(id)
} else {
item.value[holder] = [id]
}
})
}
})
// calcSaveAllowed() -> Entfernt, da computed automatisch reagiert
}
}
setupQuery()
@@ -183,14 +148,14 @@ const loadOptions = async () => {
})
for await(const option of optionsToLoad) {
if (option.option === "countrys") {
if(option.option === "countrys") {
loadedOptions.value[option.option] = useEntities("countrys").selectSpecial()
} else if (option.option === "units") {
} else if(option.option === "units") {
loadedOptions.value[option.option] = useEntities("units").selectSpecial()
} else {
loadedOptions.value[option.option] = (await useEntities(option.option).select())
if (dataType.templateColumns.find(x => x.key === option.key).selectDataTypeFilter) {
if(dataType.templateColumns.find(x => x.key === option.key).selectDataTypeFilter){
loadedOptions.value[option.option] = loadedOptions.value[option.option].filter(i => dataType.templateColumns.find(x => x.key === option.key).selectDataTypeFilter(i, item))
}
}
@@ -200,23 +165,47 @@ const loadOptions = async () => {
loadOptions()
const contentChanged = (content, datapoint) => {
if (datapoint.key.includes(".")) {
item.value[datapoint.key.split('.')[0]][datapoint.key.split('.')[1]].html = content.html
item.value[datapoint.key.split('.')[0]][datapoint.key.split('.')[1]].text = content.text
item.value[datapoint.key.split('.')[0]][datapoint.key.split('.')[1]].json = content.json
if(datapoint.key.includes(".")){
item[datapoint.key.split('.')[0]][datapoint.key.split('.')[1]].html = content.html
item[datapoint.key.split('.')[0]][datapoint.key.split('.')[1]].text = content.text
item[datapoint.key.split('.')[0]][datapoint.key.split('.')[1]].json = content.json
} else {
item.value[datapoint.key].html = content.html
item.value[datapoint.key].text = content.text
item.value[datapoint.key].json = content.json
item[datapoint.key].html = content.html
item[datapoint.key].text = content.text
item[datapoint.key].json = content.json
}
}
const saveAllowed = ref(false)
const calcSaveAllowed = (item) => {
let allowedCount = 0
dataType.templateColumns.filter(i => i.inputType).forEach(datapoint => {
if(datapoint.required) {
if(datapoint.key.includes(".")){
if(item[datapoint.key.split('.')[0]][datapoint.key.split('.')[1]]) allowedCount += 1
} else {
if(item[datapoint.key]) allowedCount += 1
}
} else {
allowedCount += 1
}
})
saveAllowed.value = allowedCount >= dataType.templateColumns.filter(i => i.inputType).length
}
//calcSaveAllowed()
watch(item.value, async (newItem, oldItem) => {
calcSaveAllowed(newItem)
})
const createItem = async () => {
let ret = null
if (props.inModal) {
ret = await useEntities(type).create(item.value, true)
if(props.inModal) {
ret = await useEntities(type).create(item.value, true)
} else {
ret = await useEntities(type).create(item.value)//dataStore.createNewItem(type,item.value)
@@ -229,7 +218,7 @@ const createItem = async () => {
const updateItem = async () => {
let ret = null
if (props.inModal) {
if(props.inModal) {
ret = await useEntities(type).update(item.value.id, item.value, true)
emit('returnData', ret)
modal.close()
@@ -237,7 +226,11 @@ const updateItem = async () => {
ret = await useEntities(type).update(item.value.id, item.value)
emit('returnData', ret)
}
}
</script>
<template>
@@ -252,15 +245,16 @@ const updateItem = async () => {
<UButton
icon="i-heroicons-chevron-left"
variant="outline"
@click="router.back()"
@click="router.back()/*router.push(`/standardEntity/${type}`)*/"
>
<!-- {{dataType.label}}-->
</UButton>
</template>
<template #center>
<h1
v-if="item"
:class="['text-xl','font-medium', 'text-center']"
>{{ item.id ? `${dataType.labelSingle} bearbeiten` : `${dataType.labelSingle} erstellen` }}</h1>
>{{item.id ? `${dataType.labelSingle} bearbeiten` : `${dataType.labelSingle} erstellen` }}</h1>
</template>
<template #right>
<ArchiveButton
@@ -301,7 +295,7 @@ const updateItem = async () => {
<h1
v-if="item"
:class="['text-xl','font-medium']"
>{{ item.id ? `${dataType.labelSingle} bearbeiten` : `${dataType.labelSingle} erstellen` }}</h1>
>{{item.id ? `${dataType.labelSingle} bearbeiten` : `${dataType.labelSingle} erstellen` }}</h1>
</template>
<template #right>
<UButton
@@ -336,7 +330,11 @@ const updateItem = async () => {
v-for="(columnName,index) in dataType.inputColumns"
:class="platform === 'mobile' ? ['w-full'] : [`w-1/${dataType.inputColumns.length}`, ... index < dataType.inputColumns.length -1 ? ['mr-5'] : []]"
>
<UDivider>{{ columnName }}</UDivider>
<UDivider>{{columnName}}</UDivider>
<!--
Die Form Group darf nur in der ersten bearbeitet werden und muss dann runterkopiert werden
-->
<div
v-for="datapoint in dataType.templateColumns.filter(i => i.inputType && i.inputColumn === columnName)"
@@ -364,7 +362,7 @@ const updateItem = async () => {
:placeholder="datapoint.inputIsNumberRange ? 'Leer lassen für automatisch generierte Nummer' : ''"
>
<template #trailing v-if="datapoint.inputTrailing">
<span class="text-gray-500 dark:text-gray-400 text-xs">{{ datapoint.inputTrailing }}</span>
<span class="text-gray-500 dark:text-gray-400 text-xs">{{datapoint.inputTrailing}}</span>
</template>
</UInput>
<UToggle
@@ -438,11 +436,7 @@ const updateItem = async () => {
/>
</template>
</UPopover>
<BankAccountAssignInput
v-else-if="datapoint.inputType === 'bankaccountassign'"
v-model="item[datapoint.key.split('.')[0]][datapoint.key.split('.')[1]]"
:disabled="datapoint.disabledFunction ? datapoint.disabledFunction(item) : false"
/>
<!-- TODO: DISABLED FOR TIPTAP -->
<Tiptap
v-else-if="datapoint.inputType === 'editor'"
@updateContent="(i) => contentChanged(i,datapoint)"
@@ -469,7 +463,7 @@ const updateItem = async () => {
:placeholder="datapoint.inputIsNumberRange ? 'Leer lassen für automatisch generierte Nummer' : ''"
>
<template #trailing v-if="datapoint.inputTrailing">
{{ datapoint.inputTrailing }}
{{datapoint.inputTrailing}}
</template>
</UInput>
<UToggle
@@ -543,11 +537,7 @@ const updateItem = async () => {
/>
</template>
</UPopover>
<BankAccountAssignInput
v-else-if="datapoint.inputType === 'bankaccountassign'"
v-model="item[datapoint.key]"
:disabled="datapoint.disabledFunction ? datapoint.disabledFunction(item) : false"
/>
<!-- TODO: Color/Required for TipTap and MaterialComposing -->
<Tiptap
v-else-if="datapoint.inputType === 'editor'"
@updateContent="(i) => contentChanged(i,datapoint)"
@@ -572,8 +562,35 @@ const updateItem = async () => {
icon="i-heroicons-x-mark"
/>
</InputGroup>
<!-- <div
v-if="profileStore.ownTenant.ownFields"
>
<UDivider
class="mt-3"
>Eigene Felder</UDivider>
<UFormGroup
v-for="field in profileStore.ownTenant.ownFields.contracts"
:key="field.key"
:label="field.label"
>
<UInput
v-if="field.type === 'text'"
v-model="item.ownFields[field.key]"
/>
<USelectMenu
v-else-if="field.type === 'select'"
:options="field.options"
v-model="item.ownFields[field.key]"
/>
</UFormGroup>
</div>-->
</UFormGroup>
</div>
</div>
</div>
<UFormGroup
@@ -599,7 +616,7 @@ const updateItem = async () => {
:placeholder="datapoint.inputIsNumberRange ? 'Leer lassen für automatisch generierte Nummer' : ''"
>
<template #trailing v-if="datapoint.inputTrailing">
<span class="text-gray-500 dark:text-gray-400 text-xs">{{ datapoint.inputTrailing }}</span>
<span class="text-gray-500 dark:text-gray-400 text-xs">{{datapoint.inputTrailing}}</span>
</template>
</UInput>
<UToggle
@@ -673,11 +690,7 @@ const updateItem = async () => {
/>
</template>
</UPopover>
<BankAccountAssignInput
v-else-if="datapoint.inputType === 'bankaccountassign'"
v-model="item[datapoint.key.split('.')[0]][datapoint.key.split('.')[1]]"
:disabled="datapoint.disabledFunction ? datapoint.disabledFunction(item) : false"
/>
<!-- TODO: DISABLED FOR TIPTAP -->
<Tiptap
v-else-if="datapoint.inputType === 'editor'"
@updateContent="(i) => contentChanged(i,datapoint)"
@@ -704,7 +717,7 @@ const updateItem = async () => {
:placeholder="datapoint.inputIsNumberRange ? 'Leer lassen für automatisch generierte Nummer' : ''"
>
<template #trailing v-if="datapoint.inputTrailing">
{{ datapoint.inputTrailing }}
{{datapoint.inputTrailing}}
</template>
</UInput>
<UToggle
@@ -778,11 +791,7 @@ const updateItem = async () => {
/>
</template>
</UPopover>
<BankAccountAssignInput
v-else-if="datapoint.inputType === 'bankaccountassign'"
v-model="item[datapoint.key]"
:disabled="datapoint.disabledFunction ? datapoint.disabledFunction(item) : false"
/>
<!-- TODO: Color/Required for TipTap and MaterialComposing -->
<Tiptap
v-else-if="datapoint.inputType === 'editor'"
@updateContent="(i) => contentChanged(i,datapoint)"
@@ -791,8 +800,8 @@ const updateItem = async () => {
<MaterialComposing
v-else-if="datapoint.inputType === 'materialComposing'"
:item="item"
v-else-if="datapoint.inputType === 'materialComposing'"
:item="item"
/>
<PersonalComposing
v-else-if="datapoint.inputType === 'personalComposing'"
@@ -807,6 +816,30 @@ const updateItem = async () => {
icon="i-heroicons-x-mark"
/>
</InputGroup>
<!-- <div
v-if="profileStore.ownTenant.ownFields"
>
<UDivider
class="mt-3"
>Eigene Felder</UDivider>
<UFormGroup
v-for="field in profileStore.ownTenant.ownFields.contracts"
:key="field.key"
:label="field.label"
>
<UInput
v-if="field.type === 'text'"
v-model="item.ownFields[field.key]"
/>
<USelectMenu
v-else-if="field.type === 'select'"
:options="field.options"
v-model="item.ownFields[field.key]"
/>
</UFormGroup>
</div>-->
</UFormGroup>
</UForm>
</UDashboardPanelContent>

View File

@@ -69,12 +69,6 @@ const profileStore = useProfileStore()
const tempStore = useTempStore()
const dataType = dataStore.dataTypes[type]
const canCreate = computed(() => {
if (type === "members") {
return has("members-create") || has("customers-create")
}
return has(`${type}-create`)
})
const selectedColumns = ref(tempStore.columns[type] ? tempStore.columns[type] : dataType.templateColumns.filter(i => !i.disabledInTable))
const columns = computed(() => dataType.templateColumns.filter((column) => !column.disabledInTable && selectedColumns.value.find(i => i.key === column.key)))
@@ -144,7 +138,7 @@ const filteredRows = computed(() => {
/>
<UButton
v-if="platform !== 'mobile' && canCreate/*&& useRole().checkRight(`${type}-create`)*/"
v-if="platform !== 'mobile' && has(`${type}-create`)/*&& useRole().checkRight(`${type}-create`)*/"
@click="router.push(`/standardEntity/${type}/create`)"
class="ml-3"
>+ {{dataType.labelSingle}}</UButton>

View File

@@ -1,8 +1,6 @@
<script setup>
import EntityShowSubTimes from "~/components/EntityShowSubTimes.vue";
import WikiEntityWidget from "~/components/wiki/WikiEntityWidget.vue";
import LabelPrintModal from "~/components/LabelPrintModal.vue";
const props = defineProps({
type: {
@@ -70,7 +68,7 @@ const getAvailableQueryStringData = (keys) => {
if(props.item.customer) {
addParam("customer", props.item.customer.id)
} else if(type === "customers" || type === "members") {
} else if(type === "customers") {
addParam("customer", props.item.id)
}
@@ -137,18 +135,6 @@ const changePinned = async () => {
}
const openCustomerInventoryLabelPrint = () => {
modal.open(LabelPrintModal, {
context: {
id: props.item.id,
customerInventoryId: props.item.customerInventoryId,
name: props.item.name,
customerName: props.item.customer?.name,
serialNumber: props.item.serialNumber
}
})
}
</script>
<template>
@@ -206,14 +192,6 @@ const openCustomerInventoryLabelPrint = () => {
color="yellow"
@click="changePinned"
></UButton>
<UButton
v-if="type === 'customerinventoryitems'"
icon="i-heroicons-printer"
variant="outline"
@click="openCustomerInventoryLabelPrint"
>
Label
</UButton>
<UButton
@click="router.push(`/standardEntity/${type}/edit/${item.id}`)"
>
@@ -235,14 +213,6 @@ const openCustomerInventoryLabelPrint = () => {
>{{item ? `${dataType.labelSingle}${props.item[dataType.templateColumns.find(i => i.title).key] ? ': ' + props.item[dataType.templateColumns.find(i => i.title).key] : ''}`: '' }}</h1>
</template>
<template #right>
<UButton
v-if="type === 'customerinventoryitems'"
icon="i-heroicons-printer"
variant="outline"
@click="openCustomerInventoryLabelPrint"
>
Label
</UButton>
<UButton
@click="router.push(`/standardEntity/${type}/edit/${item.id}`)"
>
@@ -318,13 +288,6 @@ const openCustomerInventoryLabelPrint = () => {
v-else-if="tab.label === 'Zeiten'"
:platform="platform"
/>
<div v-else-if="tab.label === 'Wiki'" class="h-[600px] w-full overflow-hidden">
<WikiEntityWidget
:entity-type="type"
:entity-id="typeof props.item.id === 'number' ? props.item.id : undefined"
:entity-uuid="typeof props.item.id === 'string' ? props.item.id : undefined"
/>
</div>
<EntityShowSub
:item="props.item"
:query-string-data="getAvailableQueryStringData()"

View File

@@ -31,7 +31,6 @@ const dataStore = useDataStore()
const tempStore = useTempStore()
const router = useRouter()
const createRoute = computed(() => type.value === "tasks" ? `/tasks/create?${props.queryStringData}` : `/standardEntity/${type.value}/create?${props.queryStringData}`)
let dataType = null
@@ -81,7 +80,7 @@ setup()
</template>
<Toolbar>
<UButton
@click="router.push(createRoute)"
@click="router.push(`/standardEntity/${type}/create?${props.queryStringData}`)"
>
+ {{dataType.labelSingle}}
</UButton>

View File

@@ -60,6 +60,7 @@ const router = useRouter()
const createddocuments = ref([])
const setup = async () => {
//createddocuments.value = (await useSupabaseSelect("createddocuments")).filter(i => !i.archived)
createddocuments.value = (await useEntities("createddocuments").select()).filter(i => !i.archived)
}
setup()
@@ -77,6 +78,9 @@ const templateColumns = [
},{
key: 'state',
label: "Status"
},{
key: 'paid',
label: "Bezahlt"
},{
key: 'amount',
label: "Betrag"
@@ -279,12 +283,12 @@ const selectItem = (item) => {
{{row.state}}
</span>
</template>
<!-- <template #paid-data="{row}">
<template #paid-data="{row}">
<div v-if="(row.type === 'invoices' ||row.type === 'advanceInvoices') && row.state === 'Gebucht'">
<span v-if="useSum().getIsPaid(row,createddocuments)" class="text-primary-500">Bezahlt</span>
<span v-else class="text-rose-600">Offen</span>
</div>
</template>-->
</template>
<template #reference-data="{row}">
<span v-if="row === props.item.createddocuments[selectedItem]" class="text-primary-500 font-bold">{{row.documentNumber}}</span>
<span v-else>{{row.documentNumber}}</span>

View File

@@ -21,20 +21,13 @@ const props = defineProps({
const dataStore = useDataStore()
const dataType = dataStore.dataTypes[props.topLevelType]
const historyType = computed(() => {
const holder = dataType?.historyItemHolder
if (!holder) return props.topLevelType
const normalized = String(holder).toLowerCase()
return normalized.endsWith("s") ? normalized : `${normalized}s`
})
</script>
<template>
<UCard class="mt-5 scroll" :style="props.platform !== 'mobile' ? 'height: 80vh' : ''">
<HistoryDisplay
:type="historyType"
:type="props.topLevelType"
v-if="props.item.id"
:element-id="props.item.id"
render-headline

View File

@@ -1,5 +1,4 @@
<script setup>
import dayjs from "dayjs";
const props = defineProps({
queryStringData: {
@@ -29,33 +28,6 @@ const dataType = dataStore.dataTypes[props.topLevelType]
// const selectedColumns = ref(tempStore.columns[props.topLevelType] ? tempStore.columns[props.topLevelType] : dataType.templateColumns.filter(i => !i.disabledInTable))
// const columns = computed(() => dataType.templateColumns.filter((column) => !column.disabledInTable && selectedColumns.value.find(i => i.key === column.key)))
const getDatapointValue = (datapoint) => {
if (datapoint.key.includes(".")) {
const [parentKey, childKey] = datapoint.key.split(".")
return props.item?.[parentKey]?.[childKey]
}
return props.item?.[datapoint.key]
}
const renderDatapointValue = (datapoint) => {
const value = getDatapointValue(datapoint)
if (value === null || value === undefined || value === "") return "-"
if (datapoint.inputType === "date") {
return dayjs(value).isValid() ? dayjs(value).format("DD.MM.YYYY") : String(value)
}
if (datapoint.inputType === "datetime") {
return dayjs(value).isValid() ? dayjs(value).format("DD.MM.YYYY HH:mm") : String(value)
}
if (datapoint.inputType === "bool" || typeof value === "boolean") {
return value ? "Ja" : "Nein"
}
return `${value}${datapoint.unit ? datapoint.unit : ""}`
}
</script>
<template>
@@ -81,7 +53,8 @@ const renderDatapointValue = (datapoint) => {
<td>
<component v-if="datapoint.component" :is="datapoint.component" :row="props.item" :in-show="true"></component>
<div v-else>
<span>{{ renderDatapointValue(datapoint) }}</span>
<span v-if="datapoint.key.includes('.')">{{props.item[datapoint.key.split('.')[0]][datapoint.key.split('.')[1]]}}{{datapoint.unit}}</span>
<span v-else>{{props.item[datapoint.key]}} {{datapoint.unit}}</span>
</div>
</td>
</tr>

View File

@@ -1,6 +1,9 @@
<script setup>
import dayjs from "dayjs";
const supabase = useSupabaseClient()
const route = useRoute()
const router = useRouter()
const profileStore = useProfileStore()
const props = defineProps({
@@ -25,6 +28,8 @@ const statementallocations = ref([])
const incominginvoices = ref([])
const setup = async () => {
//statementallocations.value = (await supabase.from("statementallocations").select("*, bs_id(*)").eq("account", route.params.id).eq("tenant",profileStore.currentTenant).order("created_at",{ascending: true})).data
//incominginvoices.value = (await useSupabaseSelect("incominginvoices", "*, vendor(*)")).filter(i => i.accounts.find(x => x.account == route.params.id))
}
setup()

View File

@@ -24,7 +24,7 @@ const emit = defineEmits(["updateNeeded"]);
const router = useRouter()
const profileStore = useProfileStore()
const auth = useAuthStore()
const supabase = useSupabaseClient()
const renderedPhases = computed(() => {
if(props.topLevelType === "projects" && props.item.phases) {
@@ -58,7 +58,6 @@ const renderedPhases = computed(() => {
})
const changeActivePhase = async (key) => {
console.log(props.item)
let item = await useEntities("projects").selectSingle(props.item.id,'*')
let phaseLabel = ""
@@ -69,17 +68,26 @@ const changeActivePhase = async (key) => {
if(p.key === key) {
p.active = true
p.activated_at = dayjs().format()
p.activated_by = auth.user.id
p.activated_by = profileStore.activeProfile.id
phaseLabel = p.label
}
return p
})
console.log(item)
const res = await useEntities("projects").update(item.id, {phases:item.phases,active_phase: item.phases.find(i => i.active).label})
//const {error:updateError} = await supabase.from("projects").update({phases: item.phases}).eq("id",item.id)
/*const {error} = await supabase.from("historyitems").insert({
createdBy: profileStore.activeProfile.id,
tenant: profileStore.currentTenant,
text: `Aktive Phase zu "${phaseLabel}" gewechselt`,
project: item.id
})*/
emit("updateNeeded")
}
@@ -144,7 +152,7 @@ const changeActivePhase = async (key) => {
<div>
<p v-if="item.activated_at" class="dark:text-white text-black">Aktiviert am: {{dayjs(item.activated_at).format("DD.MM.YY HH:mm")}} Uhr</p>
<p v-if="item.activated_by" class="dark:text-white text-black">Aktiviert durch: {{item.activated_by}}</p>
<p v-if="item.activated_by" class="dark:text-white text-black">Aktiviert durch: {{profileStore.getProfileById(item.activated_by).fullName}}</p>
<p v-if="item.description" class="dark:text-white text-black">Beschreibung: {{item.description}}</p>
</div>
</UCard>

View File

@@ -1,5 +1,10 @@
<script setup>
import dayjs from "dayjs";
const supabase = useSupabaseClient()
const route = useRoute()
const router = useRouter()
const profileStore = useProfileStore()
const props = defineProps({
queryStringData: {

Some files were not shown because too many files have changed in this diff Show More